repo stringlengths 5 92 | file_url stringlengths 80 287 | file_path stringlengths 5 197 | content stringlengths 0 32.8k | language stringclasses 1 value | license stringclasses 7 values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:37:27 2026-01-04 17:58:21 | truncated bool 2 classes |
|---|---|---|---|---|---|---|---|---|
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_root_agent.rb | test/test_root_agent.rb | require_relative 'helper'
require 'fluent/event_router'
require 'fluent/system_config'
require 'timecop'
require_relative 'test_plugin_classes'
class RootAgentTest < ::Test::Unit::TestCase
include Fluent
include FluentTest
def test_initialize
ra = RootAgent.new(log: $log)
assert_equal 0, ra.instance_variable_get(:@suppress_emit_error_log_interval)
assert_nil ra.instance_variable_get(:@next_emit_error_log_time)
end
data(
'suppress interval' => [{'emit_error_log_interval' => 30}, {:@suppress_emit_error_log_interval => 30}],
'without source' => [{'without_source' => true}, {:@without_source => true}],
'enable input metrics' => [{'enable_input_metrics' => true}, {:@enable_input_metrics => true}],
'disable input metrics' => [{'enable_input_metrics' => false}, {:@enable_input_metrics => false}],
)
def test_initialize_with_opt(data)
opt, expected = data
ra = RootAgent.new(log: $log, system_config: SystemConfig.new(opt))
expected.each { |k, v|
assert_equal v, ra.instance_variable_get(k)
}
end
sub_test_case 'configure' do
setup do
@ra = RootAgent.new(log: $log)
stub(Engine).root_agent { @ra }
end
def configure_ra(conf_str)
conf = Config.parse(conf_str, "(test)", "(test_dir)", true)
@ra.configure(conf)
@ra
end
test 'empty' do
ra = configure_ra('')
assert_empty ra.inputs
assert_empty ra.labels
assert_empty ra.outputs
assert_empty ra.filters
assert_nil ra.context
assert_nil ra.error_collector
end
test 'raises configuration error for missing type of source' do
conf = <<-EOC
<source>
</source>
EOC
errmsg = "Missing '@type' parameter on <source> directive"
assert_raise Fluent::ConfigError.new(errmsg) do
configure_ra(conf)
end
end
test 'raises configuration error for missing type of match' do
conf = <<-EOC
<source>
@type test_in
</source>
<match *.**>
</match>
EOC
errmsg = "Missing '@type' parameter on <match> directive"
assert_raise Fluent::ConfigError.new(errmsg) do
configure_ra(conf)
end
end
test 'raises configuration error for missing type of filter' do
conf = <<-EOC
<source>
@type test_in
</source>
<filter *.**>
</filter>
<match *.**>
@type test_out
</match>
EOC
errmsg = "Missing '@type' parameter on <filter> directive"
assert_raise Fluent::ConfigError.new(errmsg) do
configure_ra(conf)
end
end
test 'raises configuration error if there are two same label section' do
conf = <<-EOC
<source>
@type test_in
@label @test
</source>
<label @test>
@type test_out
</label>
<label @test>
@type test_out
</label>
EOC
errmsg = "Section <label @test> appears twice"
assert_raise Fluent::ConfigError.new(errmsg) do
configure_ra(conf)
end
end
test 'raises configuration error for label without name' do
conf = <<-EOC
<label>
@type test_out
</label>
EOC
errmsg = "Missing symbol argument on <label> directive"
assert_raise Fluent::ConfigError.new(errmsg) do
configure_ra(conf)
end
end
test 'raises configuration error for <label @ROOT>' do
conf = <<-EOC
<source>
@type test_in
@label @ROOT
</source>
<label @ROOT>
@type test_out
</label>
EOC
errmsg = "@ROOT for <label> is not permitted, reserved for getting root router"
assert_raise Fluent::ConfigError.new(errmsg) do
configure_ra(conf)
end
end
test 'raises configuration error if there are not match sections in label section' do
conf = <<-EOC
<source>
@type test_in
@label @test
</source>
<label @test>
@type test_out
</label>
EOC
errmsg = "Missing <match> sections in <label @test> section"
assert_raise Fluent::ConfigError.new(errmsg) do
configure_ra(conf)
end
end
test 'with plugins' do
# check @type and type in one configuration
conf = <<-EOC
<source>
@type test_in
@id test_in
</source>
<filter>
type test_filter
id test_filter
</filter>
<match **>
@type relabel
@id test_relabel
@label @test
</match>
<label @test>
<match **>
type test_out
id test_out
</match>
</label>
<label @ERROR>
<match>
@type null
</match>
</label>
EOC
ra = configure_ra(conf)
assert_kind_of FluentTestInput, ra.inputs.first
assert_kind_of Plugin::RelabelOutput, ra.outputs.first
assert_kind_of FluentTestFilter, ra.filters.first
assert ra.error_collector
%W(@test @ERROR).each { |label_symbol|
assert_include ra.labels, label_symbol
assert_kind_of Label, ra.labels[label_symbol]
}
test_label = ra.labels['@test']
assert_kind_of FluentTestOutput, test_label.outputs.first
assert_equal ra, test_label.root_agent
error_label = ra.labels['@ERROR']
assert_kind_of Fluent::Plugin::NullOutput, error_label.outputs.first
end
end
sub_test_case 'start/shutdown' do
def setup_root_agent(conf)
ra = RootAgent.new(log: $log)
stub(Engine).root_agent { ra }
ra.configure(Config.parse(conf, "(test)", "(test_dir)", true))
ra
end
test 'plugin status' do
ra = setup_root_agent(<<-EOC)
<source>
@type test_in
@id test_in
</source>
<filter>
type test_filter
id test_filter
</filter>
<match **>
@type test_out
@id test_out
</match>
EOC
ra.start
assert_true ra.inputs.first.started
assert_true ra.filters.first.started
assert_true ra.outputs.first.started
ra.shutdown
assert_false ra.inputs.first.started
assert_false ra.filters.first.started
assert_false ra.outputs.first.started
end
test 'output plugin threads should run before input plugin is blocked with buffer full' do
ra = setup_root_agent(<<-EOC)
<source>
@type test_in_gen
@id test_in_gen
</source>
<match **>
@type test_out_buffered
@id test_out_buffered
<buffer>
chunk_limit_size 1k
queue_limit_length 2
flush_thread_count 2
overflow_action block
</buffer>
</match>
EOC
waiting(5) { ra.start }
assert_true ra.inputs.first.started
assert_true ra.outputs.first.started
ra.shutdown
assert_false ra.inputs.first.started
assert_false ra.outputs.first.started
end
end
sub_test_case 'configured with label and secondary plugin' do
setup do
@ra = RootAgent.new(log: $log)
stub(Engine).root_agent{ @ra }
@ra.configure(Config.parse(<<-EOC, "(test)", "(test_dir)", true))
<source>
@type test_in
@label @route_a
</source>
<label @route_a>
<match a.**>
@type test_out_buffered
<secondary>
@type test_out_emit
</secondary>
</match>
</label>
<label @route_b>
<match b.**>
@type test_out
</match>
</label>
EOC
end
test 'secondary plugin has an event router for the label which the plugin is in' do
assert_equal 1, @ra.inputs.size
assert_equal 2, @ra.labels.size
assert_equal ['@route_a', '@route_b'], @ra.labels.keys
assert_equal '@route_a', @ra.labels['@route_a'].context
assert_equal '@route_b', @ra.labels['@route_b'].context
c1 = @ra.labels['@route_a']
assert_equal 1, c1.outputs.size
assert !c1.outputs.first.has_router?
assert c1.outputs.first.secondary
assert c1.outputs.first.secondary.has_router?
assert_equal c1.event_router, c1.outputs.first.secondary.router
end
end
sub_test_case 'configured with label and secondary plugin with @label specifier' do
setup do
@ra = RootAgent.new(log: $log)
stub(Engine).root_agent{ @ra }
@ra.configure(Config.parse(<<-EOC, "(test)", "(test_dir)", true))
<source>
@type test_in
@label @route_a
</source>
<label @route_a>
<match a.**>
@type test_out_buffered
<secondary>
@type test_out_emit
@label @route_b
</secondary>
</match>
</label>
<label @route_b>
<match b.**>
@type test_out
</match>
</label>
EOC
end
test 'secondary plugin has an event router for the label specified in secondary section' do
assert_equal 1, @ra.inputs.size
assert_equal 2, @ra.labels.size
assert_equal ['@route_a', '@route_b'], @ra.labels.keys
assert_equal '@route_a', @ra.labels['@route_a'].context
assert_equal '@route_b', @ra.labels['@route_b'].context
c1 = @ra.labels['@route_a']
c2 = @ra.labels['@route_b']
assert_equal 1, c1.outputs.size
assert !c1.outputs.first.has_router?
assert c1.outputs.first.secondary
assert c1.outputs.first.secondary.has_router?
assert_equal c2.event_router, c1.outputs.first.secondary.router
end
end
sub_test_case 'configured with label and secondary plugin with @label specifier in primary output' do
setup do
@ra = RootAgent.new(log: $log)
stub(Engine).root_agent{ @ra }
@ra.configure(Config.parse(<<-EOC, "(test)", "(test_dir)", true))
<source>
@type test_in
@label @route_a
</source>
<label @route_a>
<match a.**>
@type test_out_emit
@label @route_b
<secondary>
@type test_out_emit
</secondary>
</match>
</label>
<label @route_b>
<match b.**>
@type test_out
</match>
</label>
EOC
end
test 'secondary plugin has an event router for the label specified in secondary section' do
assert_equal 1, @ra.inputs.size
assert_equal 2, @ra.labels.size
assert_equal ['@route_a', '@route_b'], @ra.labels.keys
assert_equal '@route_a', @ra.labels['@route_a'].context
assert_equal '@route_b', @ra.labels['@route_b'].context
c1 = @ra.labels['@route_a']
c2 = @ra.labels['@route_b']
assert_equal 1, c1.outputs.size
assert c1.outputs.first.secondary
p1 = c1.outputs.first
assert p1.has_router?
assert_equal c1.event_router, p1.context_router
assert_equal c2.event_router, p1.router
s1 = p1.secondary
assert s1.has_router?
assert_equal c1.event_router, s1.context_router
assert_equal c2.event_router, s1.router
end
end
sub_test_case 'configured with MultiOutput plugins' do
setup do
@ra = RootAgent.new(log: $log)
stub(Engine).root_agent { @ra }
@ra.configure(Config.parse(<<-EOC, "(test)", "(test_dir)", true))
<source>
@type test_in
@id test_in
</source>
<filter>
@type test_filter
@id test_filter
</filter>
<match **>
@type copy
@id test_copy
<store>
@type test_out
@id test_out1
</store>
<store>
@type test_out
@id test_out2
</store>
</match>
EOC
@ra
end
test 'plugin status with multi output' do
assert_equal 1, @ra.inputs.size
assert_equal 1, @ra.filters.size
assert_equal 3, @ra.outputs.size
@ra.start
assert_equal [true], @ra.inputs.map{|i| i.started? }
assert_equal [true], @ra.filters.map{|i| i.started? }
assert_equal [true, true, true], @ra.outputs.map{|i| i.started? }
assert_equal [true], @ra.inputs.map{|i| i.after_started? }
assert_equal [true], @ra.filters.map{|i| i.after_started? }
assert_equal [true, true, true], @ra.outputs.map{|i| i.after_started? }
@ra.shutdown
assert_equal [true], @ra.inputs.map{|i| i.stopped? }
assert_equal [true], @ra.filters.map{|i| i.stopped? }
assert_equal [true, true, true], @ra.outputs.map{|i| i.stopped? }
assert_equal [true], @ra.inputs.map{|i| i.before_shutdown? }
assert_equal [true], @ra.filters.map{|i| i.before_shutdown? }
assert_equal [true, true, true], @ra.outputs.map{|i| i.before_shutdown? }
assert_equal [true], @ra.inputs.map{|i| i.shutdown? }
assert_equal [true], @ra.filters.map{|i| i.shutdown? }
assert_equal [true, true, true], @ra.outputs.map{|i| i.shutdown? }
assert_equal [true], @ra.inputs.map{|i| i.after_shutdown? }
assert_equal [true], @ra.filters.map{|i| i.after_shutdown? }
assert_equal [true, true, true], @ra.outputs.map{|i| i.after_shutdown? }
assert_equal [true], @ra.inputs.map{|i| i.closed? }
assert_equal [true], @ra.filters.map{|i| i.closed? }
assert_equal [true, true, true], @ra.outputs.map{|i| i.closed? }
assert_equal [true], @ra.inputs.map{|i| i.terminated? }
assert_equal [true], @ra.filters.map{|i| i.terminated? }
assert_equal [true, true, true], @ra.outputs.map{|i| i.terminated? }
end
end
sub_test_case 'configured with MultiOutput plugins and labels' do
setup do
@ra = RootAgent.new(log: $log)
stub(Engine).root_agent { @ra }
@ra.configure(Config.parse(<<-EOC, "(test)", "(test_dir)", true))
<source>
@type test_in
@id test_in
@label @testing
</source>
<label @testing>
<filter>
@type test_filter
@id test_filter
</filter>
<match **>
@type copy
@id test_copy
<store>
@type test_out
@id test_out1
</store>
<store>
@type test_out
@id test_out2
</store>
</match>
</label>
EOC
@ra
end
test 'plugin status with multi output' do
assert_equal 1, @ra.inputs.size
assert_equal 0, @ra.filters.size
assert_equal 0, @ra.outputs.size
assert_equal 1, @ra.labels.size
assert_equal '@testing', @ra.labels.keys.first
assert_equal 1, @ra.labels.values.first.filters.size
assert_equal 3, @ra.labels.values.first.outputs.size
label_filters = @ra.labels.values.first.filters
label_outputs = @ra.labels.values.first.outputs
@ra.start
assert_equal [true], @ra.inputs.map{|i| i.started? }
assert_equal [true], label_filters.map{|i| i.started? }
assert_equal [true, true, true], label_outputs.map{|i| i.started? }
@ra.shutdown
assert_equal [true], @ra.inputs.map{|i| i.stopped? }
assert_equal [true], label_filters.map{|i| i.stopped? }
assert_equal [true, true, true], label_outputs.map{|i| i.stopped? }
assert_equal [true], @ra.inputs.map{|i| i.before_shutdown? }
assert_equal [true], label_filters.map{|i| i.before_shutdown? }
assert_equal [true, true, true], label_outputs.map{|i| i.before_shutdown? }
assert_equal [true], @ra.inputs.map{|i| i.shutdown? }
assert_equal [true], label_filters.map{|i| i.shutdown? }
assert_equal [true, true, true], label_outputs.map{|i| i.shutdown? }
assert_equal [true], @ra.inputs.map{|i| i.after_shutdown? }
assert_equal [true], label_filters.map{|i| i.after_shutdown? }
assert_equal [true, true, true], label_outputs.map{|i| i.after_shutdown? }
assert_equal [true], @ra.inputs.map{|i| i.closed? }
assert_equal [true], label_filters.map{|i| i.closed? }
assert_equal [true, true, true], label_outputs.map{|i| i.closed? }
assert_equal [true], @ra.inputs.map{|i| i.terminated? }
assert_equal [true], label_filters.map{|i| i.terminated? }
assert_equal [true, true, true], label_outputs.map{|i| i.terminated? }
end
test 'plugin #shutdown is not called twice' do
assert_equal 1, @ra.inputs.size
assert_equal 0, @ra.filters.size
assert_equal 0, @ra.outputs.size
assert_equal 1, @ra.labels.size
assert_equal '@testing', @ra.labels.keys.first
assert_equal 1, @ra.labels.values.first.filters.size
assert_equal 3, @ra.labels.values.first.outputs.size
@ra.start
old_level = @ra.log.level
begin
@ra.log.instance_variable_get(:@logger).level = Fluent::Log::LEVEL_INFO - 1
assert_equal Fluent::Log::LEVEL_INFO, @ra.log.level
@ra.log.out.flush_logs = false
@ra.shutdown
test_out1_shutdown_logs = @ra.log.out.logs.select{|line| line =~ /shutting down output plugin type=:test_out plugin_id="test_out1"/ }
assert_equal 1, test_out1_shutdown_logs.size
ensure
@ra.log.out.flush_logs = true
@ra.log.out.reset
@ra.log.level = old_level
end
end
end
sub_test_case 'configured with MultiOutput plugin which creates plugin instances dynamically' do
setup do
@ra = RootAgent.new(log: $log)
stub(Engine).root_agent { @ra }
@ra.configure(Config.parse(<<-EOC, "(test)", "(test_dir)", true))
<source>
@type test_in
@id test_in
@label @testing
</source>
<label @testing>
<match **>
@type test_dynamic_out
@id test_dyn
</match>
</label>
EOC
@ra
end
test 'plugin status with multi output' do
assert_equal 1, @ra.inputs.size
assert_equal 0, @ra.filters.size
assert_equal 0, @ra.outputs.size
assert_equal 1, @ra.labels.size
assert_equal '@testing', @ra.labels.keys.first
assert_equal 0, @ra.labels.values.first.filters.size
assert_equal 1, @ra.labels.values.first.outputs.size
dyn_out = @ra.labels.values.first.outputs.first
assert_nil dyn_out.child
@ra.start
assert_equal 1, @ra.labels.values.first.outputs.size
assert dyn_out.child
assert_false dyn_out.child.outputs_statically_created
assert_equal 2, dyn_out.child.outputs.size
assert_equal true, dyn_out.child.outputs[0].started?
assert_equal true, dyn_out.child.outputs[1].started?
assert_equal true, dyn_out.child.outputs[0].after_started?
assert_equal true, dyn_out.child.outputs[1].after_started?
@ra.shutdown
assert_equal 1, @ra.labels.values.first.outputs.size
assert_false dyn_out.child.outputs_statically_created
assert_equal 2, dyn_out.child.outputs.size
assert_equal [true, true], dyn_out.child.outputs.map{|i| i.stopped? }
assert_equal [true, true], dyn_out.child.outputs.map{|i| i.before_shutdown? }
assert_equal [true, true], dyn_out.child.outputs.map{|i| i.shutdown? }
assert_equal [true, true], dyn_out.child.outputs.map{|i| i.after_shutdown? }
assert_equal [true, true], dyn_out.child.outputs.map{|i| i.closed? }
assert_equal [true, true], dyn_out.child.outputs.map{|i| i.terminated? }
end
end
sub_test_case 'configure emit_error_interval' do
setup do
system_config = SystemConfig.new
system_config.emit_error_log_interval = 30
@ra = RootAgent.new(log: $log, system_config: system_config)
stub(Engine).root_agent { @ra }
@ra.log.out.reset
one_minute_ago = Time.now.to_i - 60
Timecop.freeze(one_minute_ago)
end
teardown do
Timecop.return
end
test 'suppresses errors' do
mock(@ra.log).warn_backtrace()
e = StandardError.new('standard error')
begin
@ra.handle_emits_error("tag", nil, e)
rescue
end
begin
@ra.handle_emits_error("tag", nil, e)
rescue
end
assert_equal 1, @ra.log.out.logs.size
end
end
sub_test_case 'configured at worker2 with 4 workers environment' do
setup do
ENV['SERVERENGINE_WORKER_ID'] = '2'
@ra = RootAgent.new(log: $log)
system_config = SystemConfig.new
system_config.workers = 4
stub(Engine).worker_id { 2 }
stub(Engine).root_agent { @ra }
stub(Engine).system_config { system_config }
@ra
end
teardown '' do
ENV.delete('SERVERENGINE_WORKER_ID')
end
def configure_ra(conf_str)
conf = Config.parse(conf_str, "(test)", "(test_dir)", true)
@ra.configure(conf)
@ra
end
test 'raises configuration error for missing worker id' do
errmsg = 'Missing worker id on <worker> directive'
assert_raise Fluent::ConfigError.new(errmsg) do
conf = <<-EOC
<worker>
</worker>
EOC
configure_ra(conf)
end
end
test 'raises configuration error for too big worker id' do
errmsg = "worker id 4 specified by <worker> directive is not allowed. Available worker id is between 0 and 3"
assert_raise Fluent::ConfigError.new(errmsg) do
conf = <<-EOC
<worker 4>
</worker>
EOC
configure_ra(conf)
end
end
test 'raises configuration error for too big worker id on multi workers syntax' do
errmsg = "worker id 4 specified by <worker> directive is not allowed. Available worker id is between 0 and 3"
assert_raise Fluent::ConfigError.new(errmsg) do
conf = <<-EOC
<worker 1-4>
</worker>
EOC
configure_ra(conf)
end
end
test 'raises configuration error for worker id collisions on multi workers syntax' do
errmsg = "specified worker_id<2> collisions is detected on <worker> directive. Available worker id(s): [3]"
assert_raise Fluent::ConfigError.new(errmsg) do
conf = <<-EOC
<worker 0-2>
</worker>
<worker 2-4>
</worker>
EOC
configure_ra(conf)
end
end
test 'raises configuration error for worker id collisions on multi workers syntax when multi available worker_ids are left' do
errmsg = "specified worker_id<1> collisions is detected on <worker> directive. Available worker id(s): [2, 3]"
assert_raise Fluent::ConfigError.new(errmsg) do
conf = <<-EOC
<worker 0-1>
</worker>
<worker 1-3>
</worker>
EOC
configure_ra(conf)
end
end
test 'raises configuration error for too big worker id on invalid reversed multi workers syntax' do
errmsg = "greater first_worker_id<3> than last_worker_id<0> specified by <worker> directive is not allowed. Available multi worker assign syntax is <smaller_worker_id>-<greater_worker_id>"
assert_raise Fluent::ConfigError.new(errmsg) do
conf = <<-EOC
<worker 3-0>
</worker>
EOC
configure_ra(conf)
end
end
test 'raises configuration error for invalid elements as a child of worker section' do
errmsg = '<worker> section cannot have <system> directive'
assert_raise Fluent::ConfigError.new(errmsg) do
conf = <<-EOC
<worker 2>
<system>
</system>
</worker>
EOC
configure_ra(conf)
end
end
test 'raises configuration error when configured plugins do not have support multi worker configuration' do
errmsg = "Plugin 'test_out' does not support multi workers configuration (FluentTest::FluentTestOutput)"
assert_raise Fluent::ConfigError.new(errmsg) do
conf = <<-EOC
<match **>
@type test_out
</match>
EOC
configure_ra(conf)
end
end
test 'does not raise configuration error when configured plugins in worker section do not have support multi worker configuration' do
assert_nothing_raised do
conf = <<-EOC
<worker 2>
<match **>
@type test_out
</match>
</worker>
EOC
configure_ra(conf)
end
end
test 'does not raise configuration error when configured plugins as a children of MultiOutput in worker section do not have support multi worker configuration' do
assert_nothing_raised do
conf = <<-EOC
<worker 2>
<match **>
@type copy
<store>
@type test_out
</store>
<store>
@type test_out
</store>
</match>
</worker>
EOC
configure_ra(conf)
end
end
test 'does not raise configuration error when configured plugins owned by plugin do not have support multi worker configuration' do
assert_nothing_raised do
conf = <<-EOC
<worker 2>
<match **>
@type test_out_buffered
<buffer>
@type test_buffer
</buffer>
</match>
</worker>
EOC
configure_ra(conf)
end
end
test 'with plugins' do
conf = <<-EOC
<worker 2>
<source>
@type test_in
@id test_in
</source>
<filter>
type test_filter
id test_filter
</filter>
<match **>
@type relabel
@id test_relabel
@label @test
</match>
<label @test>
<match **>
type test_out
id test_out
</match>
</label>
<label @ERROR>
<match>
@type null
</match>
</label>
</worker>
EOC
ra = configure_ra(conf)
assert_kind_of FluentTestInput, ra.inputs.first
assert_kind_of Plugin::RelabelOutput, ra.outputs.first
assert_kind_of FluentTestFilter, ra.filters.first
assert ra.error_collector
%W(@test @ERROR).each { |label_symbol|
assert_include ra.labels, label_symbol
assert_kind_of Label, ra.labels[label_symbol]
}
test_label = ra.labels['@test']
assert_kind_of FluentTestOutput, test_label.outputs.first
assert_equal ra, test_label.root_agent
error_label = ra.labels['@ERROR']
assert_kind_of Fluent::Plugin::NullOutput, error_label.outputs.first
end
test 'with plugins but for another worker' do
conf = <<-EOC
<worker 0>
<source>
@type test_in
@id test_in
</source>
<filter>
type test_filter
id test_filter
</filter>
<match **>
@type relabel
@id test_relabel
@label @test
</match>
<label @test>
<match **>
type test_out
id test_out
</match>
</label>
<label @ERROR>
<match>
@type null
</match>
</label>
</worker>
EOC
ra = configure_ra(conf)
assert_equal 0, ra.inputs.size
assert_equal 0, ra.outputs.size
assert_equal 0, ra.filters.size
assert_equal 0, ra.labels.size
refute ra.error_collector
end
test 'with plugins for workers syntax should match worker_id equals to 2' do
conf = <<-EOC
<worker 0-2>
<source>
@type forward
</source>
<filter **>
@type test_filter
@id test_filter
</filter>
<match pattern>
@type stdout
</match>
<label @ERROR>
<match>
@type null
</match>
</label>
</worker>
EOC
ra = configure_ra(conf)
assert_kind_of Fluent::Plugin::ForwardInput, ra.inputs.first
assert_kind_of Fluent::Plugin::StdoutOutput, ra.outputs.first
assert_kind_of FluentTestFilter, ra.filters.first
assert ra.error_collector
end
end
sub_test_case 'start with-source-only' do
def conf
<<~EOC
<source>
@type test_in_gen
@id test_in_gen
num 20
interval_sec 0.1
async
</source>
<filter test.**>
@type record_transformer
@id record_transformer
<record>
foo foo
</record>
</filter>
<match test.**>
@type test_out
@id test_out
</match>
EOC
end
def setup
omit "Not supported on Windows" if Fluent.windows?
system_config = SystemConfig.new(
Config::Element.new('system', '', {
'with_source_only' => true,
}, [
Config::Element.new('source_only_buffer', '', {
'flush_interval' => 1,
}, []),
])
)
@root_agent = RootAgent.new(log: $log, system_config: system_config)
stub(Engine).root_agent { @root_agent }
stub(Engine).system_config { system_config }
@root_agent.configure(Config.parse(conf, "(test)", "(test_dir)"))
end
test 'only input plugins should start' do
@root_agent.start
assert_equal(
{
"input started?" => [true],
"filter started?" => [false],
"output started?" => [false],
},
{
"input started?" => @root_agent.inputs.map { |plugin| plugin.started? },
"filter started?" => @root_agent.filters.map { |plugin| plugin.started? },
"output started?" => @root_agent.outputs.map { |plugin| plugin.started? },
}
)
ensure
@root_agent.shutdown
# Buffer files remain because not cancelling source-only.
# As a test, they should be clean-up-ed.
buf_dir = @root_agent.instance_variable_get(:@source_only_buffer_agent).instance_variable_get(:@base_buffer_dir)
FileUtils.remove_dir(buf_dir)
end
test '#cancel_source_only! should start all plugins' do
@root_agent.start
@root_agent.cancel_source_only!
assert_equal(
{
"input started?" => [true],
"filter started?" => [true],
"output started?" => [true],
},
{
"input started?" => @root_agent.inputs.map { |plugin| plugin.started? },
"filter started?" => @root_agent.filters.map { |plugin| plugin.started? },
"output started?" => @root_agent.outputs.map { |plugin| plugin.started? },
}
)
ensure
@root_agent.shutdown
end
test 'buffer should be loaded after #cancel_source_only!' do
@root_agent.start
sleep 1
@root_agent.cancel_source_only!
waiting(3) do
# Wait buffer loaded after source-only cancelled
sleep 1 until @root_agent.outputs[0].events["test.event"].any? { |record| record["num"] == 0 }
end
waiting(3) do
# Wait the last data output
sleep 1 until @root_agent.outputs[0].events["test.event"].any? { |record| record["num"] == 19 }
end
# all data should be outputted
assert { @root_agent.outputs[0].events["test.event"].size == 20 }
ensure
@root_agent.shutdown
end
end
sub_test_case 'start_in_parallel' do
def conf
<<~EOC
<source>
@type test_in_gen
@id test_in_gen
num 20
interval_sec 0.1
async
</source>
<source>
@type test_in
@id test_in
</source>
<filter test.**>
@type record_transformer
@id record_transformer
<record>
foo foo
</record>
</filter>
<match test.**>
@type test_out
@id test_out
</match>
EOC
end
def setup
omit "Not supported on Windows" if Fluent.windows?
system_config = SystemConfig.new(
Config::Element.new('system', '', {}, [
Config::Element.new('source_only_buffer', '', {
'flush_interval' => 1,
}, []),
])
)
@root_agent = RootAgent.new(log: $log, system_config: system_config, start_in_parallel: true)
stub(Engine).root_agent { @root_agent }
stub(Engine).system_config { system_config }
@root_agent.configure(Config.parse(conf, "(test)", "(test_dir)"))
end
test 'only input plugins should start' do
@root_agent.start
assert_equal(
{
"input started?" => [true, false],
"filter started?" => [false],
"output started?" => [false],
},
{
"input started?" => @root_agent.inputs.map { |plugin| plugin.started? },
"filter started?" => @root_agent.filters.map { |plugin| plugin.started? },
"output started?" => @root_agent.outputs.map { |plugin| plugin.started? },
}
)
ensure
@root_agent.shutdown
# Buffer files remain because not cancelling source-only.
# As a test, they should be clean-up-ed.
buf_dir = @root_agent.instance_variable_get(:@source_only_buffer_agent).instance_variable_get(:@base_buffer_dir)
FileUtils.remove_dir(buf_dir)
end
test '#cancel_source_only! should start all plugins' do
@root_agent.start
@root_agent.cancel_source_only!
assert_equal(
{
"input started?" => [true, true],
"filter started?" => [true],
"output started?" => [true],
},
{
"input started?" => @root_agent.inputs.map { |plugin| plugin.started? },
"filter started?" => @root_agent.filters.map { |plugin| plugin.started? },
"output started?" => @root_agent.outputs.map { |plugin| plugin.started? },
}
)
ensure
@root_agent.shutdown
end
test 'buffer should be loaded after #cancel_source_only!' do
@root_agent.start
sleep 1
@root_agent.cancel_source_only!
waiting(3) do
# Wait buffer loaded after source-only cancelled
sleep 1 until @root_agent.outputs[0].events["test.event"].any? { |record| record["num"] == 0 }
end
waiting(3) do
# Wait the last data output
sleep 1 until @root_agent.outputs[0].events["test.event"].any? { |record| record["num"] == 19 }
end
# all data should be outputted
assert { @root_agent.outputs[0].events["test.event"].size == 20 }
ensure
@root_agent.shutdown
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/scripts/exec_script.rb | test/scripts/exec_script.rb | require 'json'
require 'msgpack'
def gen_tsv(time)
"#{time}\ttag1\tok"
end
def gen_json(time)
{'tag' => 'tag1', 'time' => time, 'k1' => 'ok'}.to_json
end
def gen_msgpack(time)
{'tagger' => 'tag1', 'datetime' => time, 'k1' => 'ok'}.to_msgpack
end
def gen_raw_string(time)
"#{time} hello"
end
time = ARGV.first
time = Integer(time) rescue time
case ARGV.last.to_i
when 0
puts gen_tsv(time)
when 1
puts gen_json(time)
when 2
print gen_msgpack(time)
when 3
print gen_raw_string(time)
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/scripts/fluent/plugin/parser_known.rb | test/scripts/fluent/plugin/parser_known.rb | module Fluent
TextParser.register_template('known_old', /^(?<message>.*)$/)
Plugin.register_parser('known', /^(?<message>.*)$/)
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/scripts/fluent/plugin/out_test.rb | test/scripts/fluent/plugin/out_test.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/plugin/output'
require 'fluent/event'
module Fluent::Plugin
class TestOutput < Output
Fluent::Plugin.register_output('test', self)
config_param :name, :string
config_section :buffer do
config_set_default :chunk_keys, ['tag']
end
def initialize
super
@emit_streams = []
end
attr_reader :emit_streams
def emits
all = []
@emit_streams.each {|tag,events|
events.each {|time,record|
all << [tag, time, record]
}
}
all
end
def events
all = []
@emit_streams.each {|tag,events|
all.concat events
}
all
end
def records
all = []
@emit_streams.each {|tag,events|
events.each {|time,record|
all << record
}
}
all
end
def prefer_buffered_processing
false
end
def process(tag, es)
@emit_streams << [tag, es.to_a]
end
def write(chunk)
es = Fluent::ArrayEventStream.new
chunk.each do |time, record|
es.add(time, record)
end
@emit_streams << [tag, es]
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/scripts/fluent/plugin/out_test2.rb | test/scripts/fluent/plugin/out_test2.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Fluent::Plugin
class Test2Output < Output
Fluent::Plugin.register_output('test2', self)
helpers :event_emitter
config_param :name, :string
config_section :buffer do
config_set_default :chunk_keys, ['tag']
end
def initialize
super
@emit_streams = []
end
attr_reader :emit_streams
def emits
all = []
@emit_streams.each {|tag,events|
events.each {|time,record|
all << [tag, time, record]
}
}
all
end
def events
all = []
@emit_streams.each {|tag,events|
all.concat events
}
all
end
def records
all = []
@emit_streams.each {|tag,events|
events.each {|time,record|
all << record
}
}
all
end
def prefer_buffered_processing
false
end
def process(tag, es)
@emit_streams << [tag, es.to_a]
end
def write(chunk)
es = Fluent::ArrayEventStream.new
chunk.each do |time, record|
es.add(time, record)
end
@emit_streams << [tag, es]
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/scripts/fluent/plugin/formatter_known.rb | test/scripts/fluent/plugin/formatter_known.rb | module Fluent
TextFormatter.register_template('known_old', Proc.new { |tag, time, record|
"#{tag}:#{time}:#{record.size}"
})
Plugin.register_formatter('known', Proc.new { |tag, time, record|
"#{tag}:#{time}:#{record.size}"
})
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/scripts/fluent/plugin/formatter2/formatter_test2.rb | test/scripts/fluent/plugin/formatter2/formatter_test2.rb | module Fluent
Plugin.register_formatter(
'test2',
Proc.new { |tag, time, record|
"#{tag}:#{time}:#{record.size}"
})
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/scripts/fluent/plugin/formatter1/formatter_test1.rb | test/scripts/fluent/plugin/formatter1/formatter_test1.rb | module Fluent
Plugin.register_formatter(
'test1',
Proc.new { |tag, time, record|
"#{tag}:#{time}:#{record.size}"
})
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/compat/test_parser.rb | test/compat/test_parser.rb | require_relative '../helper'
require 'fluent/plugin/parser'
class TextParserTest < ::Test::Unit::TestCase
def setup
Fluent::Test.setup
end
class MultiEventTestParser < ::Fluent::Parser
include Fluent::Configurable
def parse(text)
2.times { |i|
record = {}
record['message'] = text
record['number'] = i
yield Fluent::Engine.now, record
}
end
end
Fluent::TextParser.register_template('multi_event_test', Proc.new { MultiEventTestParser.new })
def test_lookup_unknown_format
assert_raise Fluent::NotFoundPluginError do
Fluent::Plugin.new_parser('unknown')
end
end
data('register_formatter' => 'known', 'register_template' => 'known_old')
def test_lookup_known_parser(data)
$LOAD_PATH.unshift(File.join(File.expand_path(File.dirname(__FILE__)), '..', 'scripts'))
assert_nothing_raised Fluent::ConfigError do
Fluent::Plugin.new_parser(data)
end
$LOAD_PATH.shift
end
def test_parse_with_return
parser = Fluent::TextParser.new
parser.configure(config_element('test', '', 'format' => 'none'))
_time, record = parser.parse('log message!')
assert_equal({'message' => 'log message!'}, record)
end
def test_parse_with_block
parser = Fluent::TextParser.new
parser.configure(config_element('test', '', 'format' => 'none'))
parser.parse('log message!') { |time, record|
assert_equal({'message' => 'log message!'}, record)
}
end
def test_multi_event_parser
parser = Fluent::TextParser.new
parser.configure(config_element('test', '', 'format' => 'multi_event_test'))
i = 0
parser.parse('log message!') { |time, record|
assert_equal('log message!', record['message'])
assert_equal(i, record['number'])
i += 1
}
end
def test_setting_estimate_current_event_value
p1 = Fluent::TextParser.new
assert_nil p1.estimate_current_event
assert_nil p1.parser
p1.configure(config_element('test', '', 'format' => 'none'))
assert_equal true, p1.parser.estimate_current_event
p2 = Fluent::TextParser.new
assert_nil p2.estimate_current_event
assert_nil p2.parser
p2.estimate_current_event = false
p2.configure(config_element('test', '', 'format' => 'none'))
assert_equal false, p2.parser.estimate_current_event
end
data(ignorecase: Regexp::IGNORECASE,
multiline: Regexp::MULTILINE,
both: Regexp::IGNORECASE & Regexp::MULTILINE)
def test_regexp_parser_config(options)
source = "(?<test>.*)"
parser = Fluent::TextParser::RegexpParser.new(Regexp.new(source, options), { "dummy" => "dummy" })
regexp = parser.instance_variable_get(:@regexp)
assert_equal(options, regexp.options)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/compat/test_calls_super.rb | test/compat/test_calls_super.rb | require_relative '../helper'
# these are Fluent::Compat::* in fact
require 'fluent/input'
require 'fluent/output'
require 'fluent/filter'
class CompatCallsSuperTest < Test::Unit::TestCase
class DummyGoodInput < Fluent::Input
def configure(conf); super; end
def start; super; end
def before_shutdown; super; end
def shutdown; super; end
end
class DummyBadInput < Fluent::Input
def configure(conf); super; end
def start; end
def before_shutdown; end
def shutdown; end
end
class DummyGoodOutput < Fluent::Output
def configure(conf); super; end
def start; super; end
def before_shutdown; super; end
def shutdown; super; end
end
class DummyBadOutput < Fluent::Output
def configure(conf); super; end
def start; end
def before_shutdown; end
def shutdown; end
end
class DummyGoodFilter < Fluent::Filter
def configure(conf); super; end
def filter(tag, time, record); end
def start; super; end
def before_shutdown; super; end
def shutdown; super; end
end
class DummyBadFilter < Fluent::Filter
def configure(conf); super; end
def filter(tag, time, record); end
def start; end
def before_shutdown; end
def shutdown; end
end
setup do
Fluent::Test.setup
end
sub_test_case 'old API plugin which calls super properly' do
test 'Input#start, #before_shutdown and #shutdown calls all superclass methods properly' do
i = DummyGoodInput.new
i.configure(config_element())
assert i.configured?
i.start
assert i.started?
i.before_shutdown
assert i.before_shutdown?
i.shutdown
assert i.shutdown?
assert i.log.out.logs.empty?
end
test 'Output#start, #before_shutdown and #shutdown calls all superclass methods properly' do
i = DummyGoodOutput.new
i.configure(config_element())
assert i.configured?
i.start
assert i.started?
i.before_shutdown
assert i.before_shutdown?
i.shutdown
assert i.shutdown?
assert i.log.out.logs.empty?
end
test 'Filter#start, #before_shutdown and #shutdown calls all superclass methods properly' do
i = DummyGoodFilter.new
i.configure(config_element())
assert i.configured?
i.start
assert i.started?
i.before_shutdown
assert i.before_shutdown?
i.shutdown
assert i.shutdown?
assert i.log.out.logs.empty?
end
end
sub_test_case 'old API plugin which does not call super' do
test 'Input#start, #before_shutdown and #shutdown calls superclass methods forcedly with logs' do
i = DummyBadInput.new
i.configure(config_element())
assert i.configured?
i.start
assert i.started?
i.before_shutdown
assert i.before_shutdown?
i.shutdown
assert i.shutdown?
logs = i.log.out.logs
assert{ logs.any?{|l| l.include?("[warn]: super was not called in #start: called it forcedly plugin=CompatCallsSuperTest::DummyBadInput") } }
assert{ logs.any?{|l| l.include?("[warn]: super was not called in #before_shutdown: calling it forcedly plugin=CompatCallsSuperTest::DummyBadInput") } }
assert{ logs.any?{|l| l.include?("[warn]: super was not called in #shutdown: calling it forcedly plugin=CompatCallsSuperTest::DummyBadInput") } }
end
test 'Output#start, #before_shutdown and #shutdown calls superclass methods forcedly with logs' do
i = DummyBadOutput.new
i.configure(config_element())
assert i.configured?
i.start
assert i.started?
i.before_shutdown
assert i.before_shutdown?
i.shutdown
assert i.shutdown?
logs = i.log.out.logs
assert{ logs.any?{|l| l.include?("[warn]: super was not called in #start: called it forcedly plugin=CompatCallsSuperTest::DummyBadOutput") } }
assert{ logs.any?{|l| l.include?("[warn]: super was not called in #before_shutdown: calling it forcedly plugin=CompatCallsSuperTest::DummyBadOutput") } }
assert{ logs.any?{|l| l.include?("[warn]: super was not called in #shutdown: calling it forcedly plugin=CompatCallsSuperTest::DummyBadOutput") } }
end
test 'Filter#start, #before_shutdown and #shutdown calls superclass methods forcedly with logs' do
i = DummyBadFilter.new
i.configure(config_element())
assert i.configured?
i.start
assert i.started?
i.before_shutdown
assert i.before_shutdown?
i.shutdown
assert i.shutdown?
logs = i.log.out.logs
assert{ logs.any?{|l| l.include?("[warn]: super was not called in #start: called it forcedly plugin=CompatCallsSuperTest::DummyBadFilter") } }
assert{ logs.any?{|l| l.include?("[warn]: super was not called in #before_shutdown: calling it forcedly plugin=CompatCallsSuperTest::DummyBadFilter") } }
assert{ logs.any?{|l| l.include?("[warn]: super was not called in #shutdown: calling it forcedly plugin=CompatCallsSuperTest::DummyBadFilter") } }
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_buffer_chunk.rb | test/plugin/test_buffer_chunk.rb | require_relative '../helper'
require 'fluent/plugin/buffer/chunk'
class BufferChunkTest < Test::Unit::TestCase
sub_test_case 'blank buffer chunk' do
test 'has generated unique id, given metadata, created_at and modified_at' do
meta = Object.new
chunk = Fluent::Plugin::Buffer::Chunk.new(meta)
assert{ chunk.unique_id.bytesize == 16 }
assert{ chunk.metadata.object_id == meta.object_id }
assert{ chunk.created_at.is_a? Time }
assert{ chunk.modified_at.is_a? Time }
assert chunk.unstaged?
assert !chunk.staged?
assert !chunk.queued?
assert !chunk.closed?
end
test 'has many methods for chunks, but not implemented' do
meta = Object.new
chunk = Fluent::Plugin::Buffer::Chunk.new(meta)
assert chunk.respond_to?(:append)
assert chunk.respond_to?(:concat)
assert chunk.respond_to?(:commit)
assert chunk.respond_to?(:rollback)
assert chunk.respond_to?(:bytesize)
assert chunk.respond_to?(:size)
assert chunk.respond_to?(:length)
assert chunk.respond_to?(:empty?)
assert chunk.respond_to?(:read)
assert chunk.respond_to?(:open)
assert chunk.respond_to?(:write_to)
assert_raise(NotImplementedError){ chunk.append([]) }
assert_raise(NotImplementedError){ chunk.concat(nil, 0) }
assert_raise(NotImplementedError){ chunk.commit }
assert_raise(NotImplementedError){ chunk.rollback }
assert_raise(NotImplementedError){ chunk.bytesize }
assert_raise(NotImplementedError){ chunk.size }
assert_raise(NotImplementedError){ chunk.length }
assert_raise(NotImplementedError){ chunk.empty? }
assert_raise(NotImplementedError){ chunk.read }
assert_raise(NotImplementedError){ chunk.open(){} }
assert_raise(NotImplementedError){ chunk.write_to(nil) }
assert !chunk.respond_to?(:msgpack_each)
end
test 'has method #each and #msgpack_each only when extended by ChunkMessagePackEventStreamer' do
meta = Object.new
chunk = Fluent::Plugin::Buffer::Chunk.new(meta)
assert !chunk.respond_to?(:each)
assert !chunk.respond_to?(:msgpack_each)
chunk.extend Fluent::ChunkMessagePackEventStreamer
assert chunk.respond_to?(:each)
assert chunk.respond_to?(:msgpack_each)
end
test 'unpacker arg is not implemented for ChunkMessagePackEventStreamer' do
meta = Object.new
chunk = Fluent::Plugin::Buffer::Chunk.new(meta)
chunk.extend Fluent::ChunkMessagePackEventStreamer
unpacker = Fluent::MessagePackFactory.thread_local_msgpack_unpacker
assert_raise(NotImplementedError){ chunk.each(unpacker: unpacker) }
assert_raise(NotImplementedError){ chunk.msgpack_each(unpacker: unpacker) }
end
test 'some methods raise ArgumentError with an option of `compressed: :gzip` and without extending Compressble`' do
meta = Object.new
chunk = Fluent::Plugin::Buffer::Chunk.new(meta)
assert_raise(ArgumentError){ chunk.read(compressed: :gzip) }
assert_raise(ArgumentError){ chunk.open(compressed: :gzip){} }
assert_raise(ArgumentError){ chunk.write_to(nil, compressed: :gzip) }
assert_raise(ArgumentError){ chunk.append(nil, compress: :gzip) }
end
test 'some methods raise ArgumentError with an option of `compressed: :zstd` and without extending Compressble`' do
meta = Object.new
chunk = Fluent::Plugin::Buffer::Chunk.new(meta)
assert_raise(ArgumentError){ chunk.read(compressed: :zstd) }
assert_raise(ArgumentError){ chunk.open(compressed: :zstd){} }
assert_raise(ArgumentError){ chunk.write_to(nil, compressed: :zstd) }
assert_raise(ArgumentError){ chunk.append(nil, compress: :zstd) }
end
end
class TestChunk < Fluent::Plugin::Buffer::Chunk
attr_accessor :data
def initialize(meta)
super
@data = ''
end
def size
@data.size
end
def open(**kwargs)
require 'stringio'
io = StringIO.new(@data)
yield io
end
end
sub_test_case 'minimum chunk implements #size and #open' do
test 'chunk lifecycle' do
c = TestChunk.new(Object.new)
assert c.unstaged?
assert !c.staged?
assert !c.queued?
assert !c.closed?
assert c.writable?
c.staged!
assert !c.unstaged?
assert c.staged?
assert !c.queued?
assert !c.closed?
assert c.writable?
c.enqueued!
assert !c.unstaged?
assert !c.staged?
assert c.queued?
assert !c.closed?
assert !c.writable?
c.close
assert !c.unstaged?
assert !c.staged?
assert !c.queued?
assert c.closed?
assert !c.writable?
end
test 'chunk can be unstaged' do
c = TestChunk.new(Object.new)
assert c.unstaged?
assert !c.staged?
assert !c.queued?
assert !c.closed?
assert c.writable?
c.staged!
assert !c.unstaged?
assert c.staged?
assert !c.queued?
assert !c.closed?
assert c.writable?
c.unstaged!
assert c.unstaged?
assert !c.staged?
assert !c.queued?
assert !c.closed?
assert c.writable?
c.enqueued!
assert !c.unstaged?
assert !c.staged?
assert c.queued?
assert !c.closed?
assert !c.writable?
c.close
assert !c.unstaged?
assert !c.staged?
assert !c.queued?
assert c.closed?
assert !c.writable?
end
test 'can respond to #empty? correctly' do
c = TestChunk.new(Object.new)
assert_equal 0, c.size
assert c.empty?
end
test 'can write its contents to io object' do
c = TestChunk.new(Object.new)
c.data << "my data\nyour data\n"
io = StringIO.new
c.write_to(io)
assert "my data\nyour data\n", io.to_s
end
test 'can feed objects into blocks with unpacking msgpack if ChunkMessagePackEventStreamer is included' do
require 'msgpack'
c = TestChunk.new(Object.new)
c.extend Fluent::ChunkMessagePackEventStreamer
c.data << MessagePack.pack(['my data', 1])
c.data << MessagePack.pack(['your data', 2])
ary = []
c.msgpack_each do |obj|
ary << obj
end
assert_equal ['my data', 1], ary[0]
assert_equal ['your data', 2], ary[1]
end
end
sub_test_case 'when compress is gzip' do
test 'create decompressable chunk' do
meta = Object.new
chunk = Fluent::Plugin::Buffer::Chunk.new(meta, compress: :gzip)
assert chunk.singleton_class.ancestors.include?(Fluent::Plugin::Buffer::Chunk::GzipDecompressable)
end
end
sub_test_case 'when compress is zstd' do
test 'create decompressable chunk' do
meta = Object.new
chunk = Fluent::Plugin::Buffer::Chunk.new(meta, compress: :zstd)
assert chunk.singleton_class.ancestors.include?(Fluent::Plugin::Buffer::Chunk::ZstdDecompressable)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_sd_file.rb | test/plugin/test_sd_file.rb | require_relative '../helper'
require 'fluent/plugin/sd_file'
require 'fileutils'
require 'json'
class FileServiceDiscoveryTest < ::Test::Unit::TestCase
setup do
@dir = File.expand_path('data/sd_file', __dir__)
FileUtils.mkdir_p(File.join(@dir, 'tmp'))
end
teardown do
FileUtils.rm_r(File.join(@dir, 'tmp'))
end
sub_test_case 'configure' do
test 'load yml' do
sdf = Fluent::Plugin::FileServiceDiscovery.new
sdf.configure(config_element('service_discovery', '', { 'path' => File.join(@dir, 'config.yml') }))
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:file, '127.0.0.1', 24224, 'test1', 1, false, 'user1', 'pass1', 'key1'), sdf.services[0]
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:file, '127.0.0.1', 24225, nil, 1), sdf.services[1]
end
test 'load yaml' do
sdf = Fluent::Plugin::FileServiceDiscovery.new
sdf.configure(config_element('service_discovery', '', { 'path' => File.join(@dir, 'config.yaml') }))
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:file, '127.0.0.1', 24224, 'test1', 1, false, 'user1', 'pass1', 'key1'), sdf.services[0]
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:file, '127.0.0.1', 24225, nil, 1), sdf.services[1]
end
test 'load json' do
sdf = Fluent::Plugin::FileServiceDiscovery.new
sdf.configure(config_element('service_discovery', '', { 'path' => File.join(@dir, 'config.json') }))
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:file, '127.0.0.1', 24224, 'test1', 1, false, 'user1', 'pass1', 'key1'), sdf.services[0]
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:file, '127.0.0.1', 24225, nil, 1), sdf.services[1]
end
test 'regard as yaml if ext is not given' do
sdf = Fluent::Plugin::FileServiceDiscovery.new
sdf.configure(config_element('service_discovery', '', { 'path' => File.join(@dir, 'config') }))
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:file, '127.0.0.1', 24224, 'test1', 1, false, 'user1', 'pass1', 'key1'), sdf.services[0]
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:file, '127.0.0.1', 24225, nil, 1), sdf.services[1]
end
test 'raise an error if config has error' do
sdf = Fluent::Plugin::FileServiceDiscovery.new
e = assert_raise Fluent::ConfigError do
sdf.configure(config_element('service_discovery', '', { 'path' => File.join(@dir, 'invalid_config.yaml') }))
end
assert_match(/path=/, e.message)
end
test 'raise an error if config file does not exist' do
sdf = Fluent::Plugin::FileServiceDiscovery.new
e = assert_raise Fluent::ConfigError do
sdf.configure(config_element('service_discovery', '', { 'path' => File.join(@dir, 'invalid_not_found.json') }))
end
assert_match(/not found/, e.message)
end
end
sub_test_case '#start' do
module TestStatEventHelperWrapper
# easy to control statsevent
def event_loop_attach(watcher)
unless watcher.is_a?(Fluent::Plugin::FileServiceDiscovery::StatWatcher)
super
return
end
@test_stat_event_helper_wrapper_watchers ||= []
@test_stat_event_helper_wrapper_watchers << watcher
@test_stat_event_helper_wrapper_context = Fiber.new do
loop do
@test_stat_event_helper_wrapper_watchers.each do |w|
w.on_change('old', 'new')
end
if Fiber.yield == :finish
break
end
end
end
resume
end
def resume
@test_stat_event_helper_wrapper_context.resume(:resume)
end
def shutdown
super
if @test_stat_event_helper_wrapper_context
@test_stat_event_helper_wrapper_context.resume(:finish)
end
end
end
def create_tmp_config(path, body)
File.write(File.join(@dir, 'tmp', path), body)
end
setup do
sdf = Fluent::Plugin::FileServiceDiscovery.new
@sd_file = sdf
end
teardown do
if @sd_file
@sd_file.stop unless @sd_file.stopped?
@sd_file.before_shutdown unless @sd_file.before_shutdown?
@sd_file.shutdown unless @sd_file.shutdown?
@sd_file.after_shutdown unless @sd_file.after_shutdown?
@sd_file.close unless @sd_file.closed?
@sd_file.terminate unless @sd_file.terminated?
end
end
test 'Skip if file is not updated' do
@sd_file.extend(TestStatEventHelperWrapper)
create_tmp_config('config.json', JSON.generate([{ port: 1233, host: '127.0.0.1' }]))
@sd_file.configure(config_element('service_discovery', '', { 'path' => File.join(@dir, 'config.yml') }))
queue = []
mock.proxy(@sd_file).refresh_file(queue).twice
@sd_file.start(queue)
assert_empty queue
@sd_file.resume
assert_empty queue
end
test 'Skip if file is invalid contents' do
@sd_file.extend(TestStatEventHelperWrapper)
create_tmp_config('config.json', JSON.generate([{ port: 1233, host: '127.0.0.1' }]))
@sd_file.configure(config_element('service_discovery', '', { 'path' => File.join(@dir, 'config.yml') }))
queue = []
@sd_file.start(queue)
mock.proxy(@sd_file).refresh_file(queue).once
create_tmp_config('test.json', 'invalid contents')
@sd_file.resume
assert_empty queue
end
test 'Skip if error is occurred' do
@sd_file.extend(TestStatEventHelperWrapper)
create_tmp_config('config.json', JSON.generate([{ port: 1233, host: '127.0.0.1' }]))
@sd_file.configure(config_element('service_discovery', '', { 'path' => File.join(@dir, 'config.yml') }))
queue = []
FileUtils.rm_r(File.join(@dir, 'tmp', 'config.json'))
mock.proxy(@sd_file).refresh_file(queue).twice
@sd_file.start(queue)
assert_empty queue
@sd_file.resume
assert_empty queue
end
test 'if service is updated, service_in and service_out event happen' do
@sd_file.extend(TestStatEventHelperWrapper)
create_tmp_config('test.json', JSON.generate([{ port: 1233, host: '127.0.0.1' }]))
@sd_file.configure(config_element('service_discovery', '', { 'path' => File.join(@dir, 'tmp/test.json') }))
queue = []
@sd_file.start(queue)
create_tmp_config('test.json', JSON.generate([{ port: 1234, host: '127.0.0.1' }]))
@sd_file.resume
assert_equal 2, queue.size
join = queue.shift
drain = queue.shift
assert_equal Fluent::Plugin::ServiceDiscovery::SERVICE_IN, join.type
assert_equal 1234, join.service.port
assert_equal '127.0.0.1', join.service.host
assert_equal Fluent::Plugin::ServiceDiscovery::SERVICE_OUT, drain.type
assert_equal 1233, drain.service.port
assert_equal '127.0.0.1', drain.service.host
end
test 'if service is deleted, service_out event happens' do
@sd_file.extend(TestStatEventHelperWrapper)
create_tmp_config('test.json', JSON.generate([{ port: 1233, host: '127.0.0.1' }, { port: 1234, host: '127.0.0.2' }]))
@sd_file.configure(config_element('service_discovery', '', { 'path' => File.join(@dir, 'tmp/test.json') }))
queue = []
@sd_file.start(queue)
create_tmp_config('test.json', JSON.generate([{ port: 1233, host: '127.0.0.1' }]))
@sd_file.resume
assert_equal 1, queue.size
drain = queue.shift
assert_equal Fluent::Plugin::ServiceDiscovery::SERVICE_OUT, drain.type
assert_equal 1234, drain.service.port
assert_equal '127.0.0.2', drain.service.host
end
test 'if new service is added, service_in event happens' do
@sd_file.extend(TestStatEventHelperWrapper)
create_tmp_config('test.json', JSON.generate([{ port: 1233, host: '127.0.0.1' }]))
@sd_file.configure(config_element('service_discovery', '', { 'path' => File.join(@dir, 'tmp/test.json') }))
queue = []
@sd_file.start(queue)
create_tmp_config('test.json', JSON.generate([{ port: 1233, host: '127.0.0.1' }, { port: 1234, host: '127.0.0.2' }]))
@sd_file.resume
assert_equal 1, queue.size
join = queue.shift
assert_equal Fluent::Plugin::ServiceDiscovery::SERVICE_IN, join.type
assert_equal 1234, join.service.port
assert_equal '127.0.0.2', join.service.host
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_out_copy.rb | test/plugin/test_out_copy.rb | require_relative '../helper'
require 'fluent/test/driver/multi_output'
require 'fluent/plugin/out_copy'
require 'fluent/event'
require 'flexmock/test_unit'
class CopyOutputTest < Test::Unit::TestCase
include FlexMock::TestCase
class << self
def startup
$LOAD_PATH.unshift File.expand_path(File.join(File.dirname(__FILE__), '..', 'scripts'))
require 'fluent/plugin/out_test'
require 'fluent/plugin/out_test2'
end
def shutdown
$LOAD_PATH.shift
end
end
def setup
Fluent::Test.setup
end
CONFIG = %[
<store>
@type test
name c0
</store>
<store>
@type test2
name c1
</store>
<store>
@type test
name c2
</store>
]
def create_driver(conf = CONFIG)
Fluent::Test::Driver::MultiOutput.new(Fluent::Plugin::CopyOutput).configure(conf)
end
def test_configure
d = create_driver
outputs = d.instance.outputs
assert_equal 3, outputs.size
assert_equal Fluent::Plugin::TestOutput, outputs[0].class
assert_equal Fluent::Plugin::Test2Output, outputs[1].class
assert_equal Fluent::Plugin::TestOutput, outputs[2].class
assert_equal "c0", outputs[0].name
assert_equal "c1", outputs[1].name
assert_equal "c2", outputs[2].name
assert_false d.instance.deep_copy
assert_equal :no_copy, d.instance.copy_mode
end
ERRORNEOUS_IGNORE_IF_PREV_SUCCESS_CONFIG = %[
<store ignore_if_prev_success ignore_error>
@type test
name c0
</store>
<store ignore_if_prev_success ignore_error>
@type test
name c1
</store>
<store ignore_if_prev_success>
@type test
name c2
</store>
]
def test_configure_with_errorneus_ignore_if_prev_success
assert_raise(Fluent::ConfigError) do
create_driver(ERRORNEOUS_IGNORE_IF_PREV_SUCCESS_CONFIG)
end
end
ALL_IGNORE_ERROR_WITHOUT_IGNORE_IF_PREV_SUCCESS_CONFIG = %[
@log_level info
<store ignore_error>
@type test
name c0
</store>
<store ignore_error>
@type test
name c1
</store>
<store ignore_error>
@type test
name c2
</store>
]
def test_configure_all_ignore_errors_without_ignore_if_prev_success
d = create_driver(ALL_IGNORE_ERROR_WITHOUT_IGNORE_IF_PREV_SUCCESS_CONFIG)
expected = /ignore_errors are specified in all <store>, but ignore_if_prev_success is not specified./
matches = d.logs.grep(expected)
assert_equal(1, matches.length, "Logs do not contain '#{expected}' '#{d.logs}'")
end
def test_configure_with_deep_copy_and_use_shallow_copy_mode
d = create_driver(%[
deep_copy true
<store>
@type test
name c0
</store>
])
outputs = d.instance.outputs
assert_equal 1, outputs.size
assert_equal Fluent::Plugin::TestOutput, outputs[0].class
assert_equal "c0", outputs[0].name
assert_true d.instance.deep_copy
assert_equal :shallow, d.instance.copy_mode
end
def test_feed_events
d = create_driver
assert !d.instance.outputs[0].has_router?
assert_not_nil d.instance.outputs[1].router
assert !d.instance.outputs[2].has_router?
time = event_time("2011-01-02 13:14:15 UTC")
d.run(default_tag: 'test') do
d.feed(time, {"a" => 1})
d.feed(time, {"a" => 2})
end
d.instance.outputs.each {|o|
assert_equal [ [time, {"a"=>1}], [time, {"a"=>2}] ], o.events
}
end
def test_msgpack_unpacker_cache_bug_for_msgpack_event_stream
d = create_driver
time = event_time("2011-01-02 13:14:15 UTC")
source = Fluent::ArrayEventStream.new([ [time, {"a" => 1}], [time, {"a" => 2}] ])
es = Fluent::MessagePackEventStream.new(source.to_msgpack_stream)
d.run(default_tag: 'test') do
d.feed(es)
end
d.instance.outputs.each { |o|
assert_equal [ [time, {"a"=>1}], [time, {"a"=>2}] ], o.events
}
end
def create_event_test_driver(copy_mode = 'no_copy')
config = %[
copy_mode #{copy_mode}
<store>
@type test
name output1
</store>
<store>
@type test
name output2
</store>
]
d = Fluent::Test::Driver::MultiOutput.new(Fluent::Plugin::CopyOutput).configure(config)
d.instance.outputs[0].define_singleton_method(:process) do |tag, es|
es.each do |time, record|
record['foo'] = 'bar'
end
super(tag, es)
end
d
end
time = event_time("2013-05-26 06:37:22 UTC")
gen_multi_es = Proc.new {
es = Fluent::MultiEventStream.new
es.add(time, {"a" => 1, "nest" => {'k' => 'v'}})
es.add(time, {"b" => 1, "nest" => {'k' => 'v'}})
es
}
data(
"OneEventStream without copy" => ['no_copy', Fluent::OneEventStream.new(time, {"a" => 1, "nest" => {'k' => 'v'}})],
"OneEventStream with shallow" => ['shallow', Fluent::OneEventStream.new(time, {"a" => 1, "nest" => {'k' => 'v'}})],
"OneEventStream with marshal" => ['marshal', Fluent::OneEventStream.new(time, {"a" => 1, "nest" => {'k' => 'v'}})],
"OneEventStream with deep" => ['deep', Fluent::OneEventStream.new(time, {"a" => 1, "nest" => {'k' => 'v'}})],
"ArrayEventStream without copy" => ['no_copy', Fluent::ArrayEventStream.new([[time, {"a" => 1, "nest" => {'k' => 'v'}}], [time, {"b" => 2, "nest" => {'k' => 'v'}}]])],
"ArrayEventStream with shallow" => ['shallow', Fluent::ArrayEventStream.new([[time, {"a" => 1, "nest" => {'k' => 'v'}}], [time, {"b" => 2, "nest" => {'k' => 'v'}}]])],
"ArrayEventStream with marshal" => ['marshal', Fluent::ArrayEventStream.new([[time, {"a" => 1, "nest" => {'k' => 'v'}}], [time, {"b" => 2, "nest" => {'k' => 'v'}}]])],
"ArrayEventStream with deep" => ['deep', Fluent::ArrayEventStream.new([[time, {"a" => 1, "nest" => {'k' => 'v'}}], [time, {"b" => 2, "nest" => {'k' => 'v'}}]])],
"MultiEventStream without copy" => ['no_copy', gen_multi_es.call],
"MultiEventStream with shallow" => ['shallow', gen_multi_es.call],
"MultiEventStream with marshal" => ['marshal', gen_multi_es.call],
"MultiEventStream with deep" => ['deep', gen_multi_es.call],
)
def test_copy_mode_with_event_streams(data)
copy_mode, es = data
d = create_event_test_driver(copy_mode)
d.run(default_tag: 'test') do
d.feed(es)
end
events = d.instance.outputs.map(&:events)
if copy_mode != 'no_copy'
events[0].each_with_index do |entry0, i|
record0 = entry0.last
record1 = events[1][i].last
assert_not_equal record0.object_id, record1.object_id
assert_equal "bar", record0["foo"]
assert !record1.has_key?("foo")
if copy_mode == 'shallow'
assert_equal record0['nest'].object_id, record1['nest'].object_id
else
assert_not_equal record0['nest'].object_id, record1['nest'].object_id
end
end
else
events[0].each_with_index do |entry0, i|
record0 = entry0.last
record1 = events[1][i].last
assert_equal record0.object_id, record1.object_id
assert_equal "bar", record0["foo"]
assert_equal "bar", record1["foo"]
assert_equal record0['nest'].object_id, record1['nest'].object_id
end
end
end
IGNORE_ERROR_CONFIG = %[
<store ignore_error>
@type test
name c0
</store>
<store ignore_error>
@type test
name c1
</store>
<store>
@type test
name c2
</store>
]
def test_ignore_error
d = create_driver(IGNORE_ERROR_CONFIG)
# override to raise an error
d.instance.outputs[0].define_singleton_method(:process) do |tag, es|
raise ArgumentError, 'Failed'
end
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
assert_nothing_raised do
d.run(default_tag: 'test') do
d.feed(time, {"a"=>1})
end
end
end
IGNORE_IF_PREV_SUCCESS_CONFIG = %[
<store ignore_error>
@type test
name c0
</store>
<store ignore_if_prev_success ignore_error>
@type test
name c1
</store>
<store ignore_if_prev_success>
@type test
name c2
</store>
]
def test_ignore_if_prev_success
d = create_driver(IGNORE_IF_PREV_SUCCESS_CONFIG)
# override to raise an error
d.instance.outputs[0].define_singleton_method(:process) do |tag, es|
raise ArgumentError, 'Failed'
end
# check ingore_if_prev_success functionality:
# 1. output 2 is succeeded.
# 2. output 3 is not called.
flexstub(d.instance.outputs[1]) do |output|
output.should_receive(:process).once
end
flexstub(d.instance.outputs[2]) do |output|
output.should_receive(:process).never
end
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
assert_nothing_raised do
d.run(default_tag: 'test') do
d.feed(time, {"a"=>1})
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_parser_labeled_tsv.rb | test/plugin/test_parser_labeled_tsv.rb | require_relative '../helper'
require 'fluent/test/driver/parser'
require 'fluent/plugin/parser'
class LabeledTSVParserTest < ::Test::Unit::TestCase
def setup
Fluent::Test.setup
end
def test_config_params
parser = Fluent::Test::Driver::Parser.new(Fluent::TextParser::LabeledTSVParser)
assert_equal "\t", parser.instance.delimiter
assert_equal ":", parser.instance.label_delimiter
parser.configure(
'delimiter' => ',',
'label_delimiter' => '=',
)
assert_equal ",", parser.instance.delimiter
assert_equal "=", parser.instance.label_delimiter
end
def test_parse
parser = Fluent::Test::Driver::Parser.new(Fluent::TextParser::LabeledTSVParser)
parser.configure({})
parser.instance.parse("time:2013/02/28 12:00:00\thost:192.168.0.1\treq_id:111") { |time, record|
assert_equal(event_time('2013/02/28 12:00:00', format: '%Y/%m/%d %H:%M:%S'), time)
assert_equal({
'host' => '192.168.0.1',
'req_id' => '111',
}, record)
}
end
def test_parse_with_customized_delimiter
parser = Fluent::Test::Driver::Parser.new(Fluent::TextParser::LabeledTSVParser)
parser.configure(
'delimiter' => ',',
'label_delimiter' => '=',
)
parser.instance.parse('time=2013/02/28 12:00:00,host=192.168.0.1,req_id=111') { |time, record|
assert_equal(event_time('2013/02/28 12:00:00', format: '%Y/%m/%d %H:%M:%S'), time)
assert_equal({
'host' => '192.168.0.1',
'req_id' => '111',
}, record)
}
end
def test_parse_with_customized_time_format
parser = Fluent::Test::Driver::Parser.new(Fluent::TextParser::LabeledTSVParser)
parser.configure(
'time_key' => 'mytime',
'time_format' => '%d/%b/%Y:%H:%M:%S %z',
)
parser.instance.parse("mytime:28/Feb/2013:12:00:00 +0900\thost:192.168.0.1\treq_id:111") { |time, record|
assert_equal(event_time('28/Feb/2013:12:00:00 +0900', format: '%d/%b/%Y:%H:%M:%S %z'), time)
assert_equal({
'host' => '192.168.0.1',
'req_id' => '111',
}, record)
}
end
def test_parse_without_time
time_at_start = Time.now.to_i
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::LabeledTSVParser)
parser.configure({})
parser.instance.parse("host:192.168.0.1\treq_id:111") { |time, record|
assert time && time >= time_at_start, "parser puts current time without time input"
assert_equal({
'host' => '192.168.0.1',
'req_id' => '111',
}, record)
}
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::LabeledTSVParser)
parser.configure({'estimate_current_event' => 'no'})
parser.instance.parse("host:192.168.0.1\treq_id:111") { |time, record|
assert_equal({
'host' => '192.168.0.1',
'req_id' => '111',
}, record)
assert_nil time, "parser returns nil w/o time and if configured so"
}
end
def test_parse_with_keep_time_key
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::LabeledTSVParser)
parser.configure(
'time_format'=>"%d/%b/%Y:%H:%M:%S %z",
'keep_time_key'=>'true',
)
text = '28/Feb/2013:12:00:00 +0900'
parser.instance.parse("time:#{text}") do |time, record|
assert_equal text, record['time']
end
end
def test_parse_and_reject_invalid_kv_pairs
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::LabeledTSVParser)
parser.configure(
'delimiter' => ' ',
'label_delimiter' => '=',
)
text = 'A leading portion that is not LTSV : foo=bar baz=derp and a trailing portion'
expected = {'foo' => 'bar', 'baz' => 'derp'}
parser.instance.parse(text) do |time, record|
assert_equal expected, record
end
end
def test_parse_with_null_value_pattern
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::LabeledTSVParser)
parser.configure(
'null_value_pattern'=>'^(-|null|NULL)$'
)
parser.instance.parse("a:-\tb:null\tc:NULL\td:\te:--\tf:nuLL") do |time, record|
assert_nil record['a']
assert_nil record['b']
assert_nil record['c']
assert_equal record['d'], ''
assert_equal record['e'], '--'
assert_equal record['f'], 'nuLL'
end
end
def test_parse_with_null_empty_string
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::LabeledTSVParser)
parser.configure(
'null_empty_string'=>true
)
parser.instance.parse("a:\tb: ") do |time, record|
assert_nil record['a']
assert_equal record['b'], ' '
end
end
data("single space" => ["k1=v1 k2=v2", { "k1" => "v1", "k2" => "v2" }],
"multiple space" => ["k1=v1 k2=v2", { "k1" => "v1", "k2" => "v2" }],
"reverse" => ["k2=v2 k1=v1", { "k1" => "v1", "k2" => "v2" }],
"tab" => ["k2=v2\tk1=v1", { "k1" => "v1", "k2" => "v2" }],
"tab and space" => ["k2=v2\t k1=v1", { "k1" => "v1", "k2" => "v2" }])
def test_parse_with_delimiter_pattern(data)
text, expected = data
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::LabeledTSVParser)
parser.configure(
'delimiter_pattern' => '/\s+/',
'label_delimiter' => '='
)
parser.instance.parse(text) do |_time, record|
assert_equal(expected, record)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_storage.rb | test/plugin/test_storage.rb | require_relative '../helper'
require 'fluent/plugin/storage'
require 'fluent/plugin/base'
class DummyPlugin < Fluent::Plugin::TestBase
end
class BareStorage < Fluent::Plugin::Storage
Fluent::Plugin.register_storage('bare', self)
end
class BasicStorage < Fluent::Plugin::Storage
Fluent::Plugin.register_storage('example', self)
attr_reader :data, :saved
def initialize
super
@data = @saved = nil
end
def load
@data = {}
end
def save
@saved = @data.dup
end
def get(key)
@data[key]
end
def fetch(key, defval)
@data.fetch(key, defval)
end
def put(key, value)
@data[key] = value
end
def delete(key)
@data.delete(key)
end
def update(key, &block)
@data[key] = block.call(@data[key])
end
def close
@data = {}
super
end
def terminate
@saved = {}
super
end
end
class StorageTest < Test::Unit::TestCase
sub_test_case 'BareStorage' do
setup do
plugin = DummyPlugin.new
@s = BareStorage.new
@s.configure(config_element())
@s.owner = plugin
end
test 'is configured with plugin information and system config' do
plugin = DummyPlugin.new
plugin.system_config_override({'process_name' => 'mytest'})
plugin.configure(config_element('ROOT', '', {'@id' => '1'}))
s = BareStorage.new
s.configure(config_element())
s.owner = plugin
assert_equal 'mytest', s.owner.system_config.process_name
assert_equal '1', s.instance_eval{ @_plugin_id }
end
test 'does NOT have features for high-performance/high-consistent storages' do
assert_equal false, @s.persistent_always?
assert_equal false, @s.synchronized?
end
test 'does have default values which is conservative for almost all users' do
assert_equal false, @s.persistent
assert_equal true, @s.autosave
assert_equal 10, @s.autosave_interval
assert_equal true, @s.save_at_shutdown
end
test 'load/save does NOT anything: just as memory storage' do
assert_nothing_raised{ @s.load }
assert_nothing_raised{ @s.save }
end
test 'all operations are not defined yet' do
assert_raise NotImplementedError do
@s.get('key')
end
assert_raise NotImplementedError do
@s.fetch('key', 'value')
end
assert_raise NotImplementedError do
@s.put('key', 'value')
end
assert_raise NotImplementedError do
@s.delete('key')
end
assert_raise NotImplementedError do
@s.update('key'){ |v| v + '2' }
end
end
end
sub_test_case 'ExampleStorage' do
setup do
plugin = DummyPlugin.new
plugin.configure(config_element('ROOT', '', {'@id' => '1'}))
@s = BasicStorage.new
@s.configure(config_element())
@s.owner = plugin
end
test 'load/save works well as plugin internal state operations' do
plugin = DummyPlugin.new
plugin.configure(config_element('ROOT', '', {'@id' => '0'}))
s = BasicStorage.new
s.owner = plugin
assert_nothing_raised{ s.load }
assert s.data
assert_nil s.saved
assert_nothing_raised{ s.save }
assert s.saved
assert{ s.data == s.saved }
assert{ s.data.object_id != s.saved.object_id }
end
test 'all operations work well' do
@s.load
assert_nil @s.get('key')
assert_equal 'value', @s.fetch('key', 'value')
assert_nil @s.get('key')
assert_equal 'value', @s.put('key', 'value')
assert_equal 'value', @s.get('key')
assert_equal 'valuevalue', @s.update('key'){|v| v * 2 }
assert_equal 'valuevalue', @s.delete('key')
end
test 'close and terminate work to operate internal states' do
@s.load
@s.put('k1', 'v1')
@s.put('k2', 'v2')
assert_equal 2, @s.data.size
@s.save
assert_equal @s.data.size, @s.saved.size
assert_nothing_raised{ @s.close }
assert @s.data.empty?
assert !@s.saved.empty?
assert_nothing_raised{ @s.terminate }
assert @s.data.empty?
assert @s.saved.empty?
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_output_as_standard.rb | test/plugin/test_output_as_standard.rb | require_relative '../helper'
require 'fluent/plugin/output'
require 'fluent/plugin/buffer'
require 'fluent/msgpack_factory'
require 'fluent/event'
require 'json'
require 'time'
require 'timeout'
require 'flexmock/test_unit'
module FluentPluginStandardBufferedOutputTest
class DummyBareOutput < Fluent::Plugin::Output
def register(name, &block)
instance_variable_set("@#{name}", block)
end
end
class DummyAsyncOutput < DummyBareOutput
def format(tag, time, record)
@format ? @format.call(tag, time, record) : [tag, time, record].to_json
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
end
class DummyAsyncStandardOutput < DummyBareOutput
def write(chunk)
@write ? @write.call(chunk) : nil
end
end
end
class StandardBufferedOutputTest < Test::Unit::TestCase
def create_output(type=:full)
case type
when :bare then FluentPluginStandardBufferedOutputTest::DummyBareOutput.new
when :buffered then FluentPluginStandardBufferedOutputTest::DummyAsyncOutput.new
when :standard then FluentPluginStandardBufferedOutputTest::DummyAsyncStandardOutput.new
else
raise ArgumentError, "unknown type: #{type}"
end
end
def create_metadata(timekey: nil, tag: nil, variables: nil)
Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
end
def waiting(seconds)
begin
Timeout.timeout(seconds) do
yield
end
rescue Timeout::Error
STDERR.print(*@i.log.out.logs)
raise
end
end
def test_event_stream
es = Fluent::MultiEventStream.new
es.add(event_time('2016-04-21 17:19:00 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es.add(event_time('2016-04-21 17:19:13 -0700'), {"key" => "my value", "name" => "moris2", "message" => "hello!"})
es.add(event_time('2016-04-21 17:19:25 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es.add(event_time('2016-04-21 17:20:01 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es.add(event_time('2016-04-21 17:20:13 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es.add(event_time('2016-04-21 17:21:32 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es
end
setup do
@i = nil
end
teardown do
if @i
@i.stop unless @i.stopped?
@i.before_shutdown unless @i.before_shutdown?
@i.shutdown unless @i.shutdown?
@i.after_shutdown unless @i.after_shutdown?
@i.close unless @i.closed?
@i.terminate unless @i.terminated?
end
end
sub_test_case 'standard buffered without any chunk keys' do
test '#execute_chunking calls @buffer.write(bulk: true) just once with predefined msgpack format' do
@i = create_output(:standard)
@i.configure(config_element())
@i.start
@i.after_start
m = create_metadata()
es = test_event_stream
buffer_mock = flexmock(@i.buffer)
buffer_mock.should_receive(:write).once.with({m => es}, format: Fluent::Plugin::Output::FORMAT_MSGPACK_STREAM, enqueue: false)
@i.execute_chunking("mytag.test", es)
end
test '#execute_chunking calls @buffer.write(bulk: true) just once with predefined msgpack format, but time will be int if time_as_integer specified' do
@i = create_output(:standard)
@i.configure(config_element('ROOT','',{"time_as_integer"=>"true"}))
@i.start
@i.after_start
m = create_metadata()
es = test_event_stream
buffer_mock = flexmock(@i.buffer)
buffer_mock.should_receive(:write).once.with({m => es}, format: Fluent::Plugin::Output::FORMAT_MSGPACK_STREAM_TIME_INT, enqueue: false)
@i.execute_chunking("mytag.test", es)
end
end
sub_test_case 'standard buffered with tag chunk key' do
test '#execute_chunking calls @buffer.write(bulk: true) just once with predefined msgpack format' do
@i = create_output(:standard)
@i.configure(config_element('ROOT','',{},[config_element('buffer','tag',{'flush_thread_burst_interval' => 0.01})]))
@i.start
@i.after_start
m = create_metadata(tag: "mytag.test")
es = test_event_stream
buffer_mock = flexmock(@i.buffer)
buffer_mock.should_receive(:write).once.with({m => es}, format: Fluent::Plugin::Output::FORMAT_MSGPACK_STREAM, enqueue: false)
@i.execute_chunking("mytag.test", es)
end
test '#execute_chunking calls @buffer.write(bulk: true) just once with predefined msgpack format, but time will be int if time_as_integer specified' do
@i = create_output(:standard)
@i.configure(config_element('ROOT','',{"time_as_integer"=>"true"},[config_element('buffer','tag',{'flush_thread_burst_interval' => 0.01})]))
@i.start
@i.after_start
m = create_metadata(tag: "mytag.test")
es = test_event_stream
buffer_mock = flexmock(@i.buffer)
buffer_mock.should_receive(:write).once.with({m => es}, format: Fluent::Plugin::Output::FORMAT_MSGPACK_STREAM_TIME_INT, enqueue: false)
@i.execute_chunking("mytag.test", es)
end
end
sub_test_case 'standard buffered with time chunk key' do
test '#execute_chunking calls @buffer.write(bulk: true) with predefined msgpack format' do
@i = create_output(:standard)
@i.configure(config_element('ROOT','',{},[config_element('buffer','time',{"timekey" => "60",'flush_thread_burst_interval' => 0.01})]))
@i.start
@i.after_start
m1 = create_metadata(timekey: Time.parse('2016-04-21 17:19:00 -0700').to_i)
m2 = create_metadata(timekey: Time.parse('2016-04-21 17:20:00 -0700').to_i)
m3 = create_metadata(timekey: Time.parse('2016-04-21 17:21:00 -0700').to_i)
es1 = Fluent::MultiEventStream.new
es1.add(event_time('2016-04-21 17:19:00 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:19:13 -0700'), {"key" => "my value", "name" => "moris2", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:19:25 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es2 = Fluent::MultiEventStream.new
es2.add(event_time('2016-04-21 17:20:01 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es2.add(event_time('2016-04-21 17:20:13 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es3 = Fluent::MultiEventStream.new
es3.add(event_time('2016-04-21 17:21:32 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
buffer_mock = flexmock(@i.buffer)
buffer_mock.should_receive(:write).once.with({
m1 => es1,
m2 => es2,
m3 => es3,
}, format: Fluent::Plugin::Output::FORMAT_MSGPACK_STREAM, enqueue: false)
es = test_event_stream
@i.execute_chunking("mytag.test", es)
end
test '#execute_chunking calls @buffer.write(bulk: true) with predefined msgpack format, but time will be int if time_as_integer specified' do
@i = create_output(:standard)
@i.configure(config_element('ROOT','',{"time_as_integer" => "true"},[config_element('buffer','time',{"timekey" => "60",'flush_thread_burst_interval' => 0.01})]))
@i.start
@i.after_start
m1 = create_metadata(timekey: Time.parse('2016-04-21 17:19:00 -0700').to_i)
m2 = create_metadata(timekey: Time.parse('2016-04-21 17:20:00 -0700').to_i)
m3 = create_metadata(timekey: Time.parse('2016-04-21 17:21:00 -0700').to_i)
es1 = Fluent::MultiEventStream.new
es1.add(event_time('2016-04-21 17:19:00 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:19:13 -0700'), {"key" => "my value", "name" => "moris2", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:19:25 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es2 = Fluent::MultiEventStream.new
es2.add(event_time('2016-04-21 17:20:01 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es2.add(event_time('2016-04-21 17:20:13 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es3 = Fluent::MultiEventStream.new
es3.add(event_time('2016-04-21 17:21:32 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
buffer_mock = flexmock(@i.buffer)
buffer_mock.should_receive(:write).with({
m1 => es1,
m2 => es2,
m3 => es3,
}, format: Fluent::Plugin::Output::FORMAT_MSGPACK_STREAM_TIME_INT, enqueue: false)
es = test_event_stream
@i.execute_chunking("mytag.test", es)
end
end
sub_test_case 'standard buffered with variable chunk keys' do
test '#execute_chunking calls @buffer.write(bulk: true) with predefined msgpack format' do
@i = create_output(:standard)
@i.configure(config_element('ROOT','',{},[config_element('buffer','key,name',{'flush_thread_burst_interval' => 0.01})]))
@i.start
@i.after_start
m1 = create_metadata(variables: {key: "my value", name: "moris1"})
es1 = Fluent::MultiEventStream.new
es1.add(event_time('2016-04-21 17:19:00 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:19:25 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:20:01 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:20:13 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:21:32 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
m2 = create_metadata(variables: {key: "my value", name: "moris2"})
es2 = Fluent::MultiEventStream.new
es2.add(event_time('2016-04-21 17:19:13 -0700'), {"key" => "my value", "name" => "moris2", "message" => "hello!"})
buffer_mock = flexmock(@i.buffer)
buffer_mock.should_receive(:write).with({
m1 => es1,
m2 => es2,
}, format: Fluent::Plugin::Output::FORMAT_MSGPACK_STREAM, enqueue: false).once
es = test_event_stream
@i.execute_chunking("mytag.test", es)
end
test '#execute_chunking calls @buffer.write(bulk: true) in times of # of variable variations with predefined msgpack format, but time will be int if time_as_integer specified' do
@i = create_output(:standard)
@i.configure(config_element('ROOT','',{"time_as_integer" => "true"},[config_element('buffer','key,name',{'flush_thread_burst_interval' => 0.01})]))
@i.start
@i.after_start
m1 = create_metadata(variables: {key: "my value", name: "moris1"})
es1 = Fluent::MultiEventStream.new
es1.add(event_time('2016-04-21 17:19:00 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:19:25 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:20:01 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:20:13 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:21:32 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
m2 = create_metadata(variables: {key: "my value", name: "moris2"})
es2 = Fluent::MultiEventStream.new
es2.add(event_time('2016-04-21 17:19:13 -0700'), {"key" => "my value", "name" => "moris2", "message" => "hello!"})
buffer_mock = flexmock(@i.buffer)
buffer_mock.should_receive(:write).with({
m1 => es1,
m2 => es2,
}, format: Fluent::Plugin::Output::FORMAT_MSGPACK_STREAM_TIME_INT, enqueue: false).once
es = test_event_stream
@i.execute_chunking("mytag.test", es)
end
end
sub_test_case 'custom format buffered without any chunk keys' do
test '#execute_chunking calls @buffer.write(bulk: true) just once with customized format' do
@i = create_output(:buffered)
@i.register(:format){|tag, time, record| [time, record].to_json }
@i.configure(config_element())
@i.start
@i.after_start
m = create_metadata()
es = test_event_stream
buffer_mock = flexmock(@i.buffer)
buffer_mock.should_receive(:write).once.with({m => es.map{|t,r| [t,r].to_json }}, format: nil, enqueue: false)
@i.execute_chunking("mytag.test", es)
end
end
sub_test_case 'custom format buffered with tag chunk key' do
test '#execute_chunking calls @buffer.write(bulk: true) just once with customized format' do
@i = create_output(:buffered)
@i.register(:format){|tag, time, record| [time, record].to_json }
@i.configure(config_element('ROOT','',{},[config_element('buffer','tag',{'flush_thread_burst_interval' => 0.01})]))
@i.start
@i.after_start
m = create_metadata(tag: "mytag.test")
es = test_event_stream
buffer_mock = flexmock(@i.buffer)
buffer_mock.should_receive(:write).once.with({m => es.map{|t,r| [t,r].to_json}}, format: nil, enqueue: false)
@i.execute_chunking("mytag.test", es)
end
end
sub_test_case 'custom format buffered with time chunk key' do
test '#execute_chunking calls @buffer.write with customized format' do
@i = create_output(:buffered)
@i.register(:format){|tag, time, record| [time, record].to_json }
@i.configure(config_element('ROOT','',{},[config_element('buffer','time',{"timekey" => "60",'flush_thread_burst_interval' => 0.01})]))
@i.start
@i.after_start
m1 = create_metadata(timekey: Time.parse('2016-04-21 17:19:00 -0700').to_i)
m2 = create_metadata(timekey: Time.parse('2016-04-21 17:20:00 -0700').to_i)
m3 = create_metadata(timekey: Time.parse('2016-04-21 17:21:00 -0700').to_i)
es1 = Fluent::MultiEventStream.new
es1.add(event_time('2016-04-21 17:19:00 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:19:13 -0700'), {"key" => "my value", "name" => "moris2", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:19:25 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es2 = Fluent::MultiEventStream.new
es2.add(event_time('2016-04-21 17:20:01 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es2.add(event_time('2016-04-21 17:20:13 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es3 = Fluent::MultiEventStream.new
es3.add(event_time('2016-04-21 17:21:32 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
buffer_mock = flexmock(@i.buffer)
buffer_mock.should_receive(:write).with({
m1 => es1.map{|t,r| [t,r].to_json },
m2 => es2.map{|t,r| [t,r].to_json },
m3 => es3.map{|t,r| [t,r].to_json },
}, enqueue: false).once
es = test_event_stream
@i.execute_chunking("mytag.test", es)
end
end
sub_test_case 'custom format buffered with variable chunk keys' do
test '#execute_chunking calls @buffer.write in times of # of variable variations with customized format' do
@i = create_output(:buffered)
@i.register(:format){|tag, time, record| [time, record].to_json }
@i.configure(config_element('ROOT','',{},[config_element('buffer','key,name',{'flush_thread_burst_interval' => 0.01})]))
@i.start
@i.after_start
m1 = create_metadata(variables: {key: "my value", name: "moris1"})
es1 = Fluent::MultiEventStream.new
es1.add(event_time('2016-04-21 17:19:00 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:19:25 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:20:01 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:20:13 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
es1.add(event_time('2016-04-21 17:21:32 -0700'), {"key" => "my value", "name" => "moris1", "message" => "hello!"})
m2 = create_metadata(variables: {key: "my value", name: "moris2"})
es2 = Fluent::MultiEventStream.new
es2.add(event_time('2016-04-21 17:19:13 -0700'), {"key" => "my value", "name" => "moris2", "message" => "hello!"})
buffer_mock = flexmock(@i.buffer)
buffer_mock.should_receive(:write).with({
m1 => es1.map{|t,r| [t,r].to_json },
m2 => es2.map{|t,r| [t,r].to_json },
}, enqueue: false).once
es = test_event_stream
@i.execute_chunking("mytag.test", es)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_filter_grep.rb | test/plugin/test_filter_grep.rb | require_relative '../helper'
require 'fluent/plugin/filter_grep'
require 'fluent/test/driver/filter'
class GrepFilterTest < Test::Unit::TestCase
include Fluent
setup do
Fluent::Test.setup
@time = event_time
end
def create_driver(conf = '')
Fluent::Test::Driver::Filter.new(Fluent::Plugin::GrepFilter).configure(conf)
end
sub_test_case 'configure' do
test 'check default' do
d = create_driver
assert_empty(d.instance.regexps)
assert_empty(d.instance.excludes)
end
test "regexpN can contain a space" do
d = create_driver(%[regexp1 message foo])
d.instance._regexp_and_conditions.each { |value|
assert_equal(Regexp.compile(/ foo/), value.pattern)
}
end
test "excludeN can contain a space" do
d = create_driver(%[exclude1 message foo])
d.instance._exclude_or_conditions.each { |value|
assert_equal(Regexp.compile(/ foo/), value.pattern)
}
end
sub_test_case "duplicate key" do
test "flat" do
conf = %[
regexp1 message test
regexp2 message test2
]
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
test "section" do
conf = %[
<regexp>
key message
pattern test
</regexp>
<regexp>
key message
pattern test2
</regexp>
]
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
test "mix" do
conf = %[
regexp1 message test
<regexp>
key message
pattern test
</regexp>
]
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
test "and/regexp" do
conf = %[
<and>
<regexp>
key message
pattern test
</regexp>
<regexp>
key message
pattern test
</regexp>
</and>
]
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
test "and/regexp, and/regexp" do
conf = %[
<and>
<regexp>
key message
pattern test
</regexp>
</and>
<and>
<regexp>
key message
pattern test
</regexp>
</and>
]
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
test "regexp, and/regexp" do
conf = %[
<regexp>
key message
pattern test
</regexp>
<and>
<regexp>
key message
pattern test
</regexp>
</and>
]
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
test "and/exclude" do
conf = %[
<and>
<exclude>
key message
pattern test
</exclude>
<exclude>
key message
pattern test
</exclude>
</and>
]
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
test "and/exclude, and/exclude" do
conf = %[
<and>
<exclude>
key message
pattern test
</exclude>
</and>
<and>
<exclude>
key message
pattern test
</exclude>
</and>
]
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
test "exclude, or/exclude" do
conf = %[
<exclude>
key message
pattern test
</exclude>
<or>
<exclude>
key message
pattern test
</exclude>
</or>
]
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
end
sub_test_case "pattern with slashes" do
test "start with character classes" do
conf = %[
<regexp>
key message
pattern /[a-z]test/
</regexp>
<exclude>
key message
pattern /[A-Z]test/
</exclude>
]
d = create_driver(conf)
assert_equal(/[a-z]test/, d.instance.regexps.first.pattern)
assert_equal(/[A-Z]test/, d.instance.excludes.first.pattern)
end
end
sub_test_case "and/or section" do
test "<and> section cannot include both <regexp> and <exclude>" do
conf = %[
<and>
<regexp>
key message
pattern /test/
</regexp>
<exclude>
key level
pattern /debug/
</exclude>
</and>
]
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
test "<or> section cannot include both <regexp> and <exclude>" do
conf = %[
<or>
<regexp>
key message
pattern /test/
</regexp>
<exclude>
key level
pattern /debug/
</exclude>
</or>
]
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
end
end
sub_test_case 'filter_stream' do
def messages
[
"2013/01/13T07:02:11.124202 INFO GET /ping",
"2013/01/13T07:02:13.232645 WARN POST /auth",
"2013/01/13T07:02:21.542145 WARN GET /favicon.ico",
"2013/01/13T07:02:43.632145 WARN POST /login",
]
end
def filter(config, msgs)
d = create_driver(config)
d.run {
msgs.each { |msg|
d.feed("filter.test", @time, {'foo' => 'bar', 'message' => msg})
}
}
d.filtered_records
end
test 'empty config' do
filtered_records = filter('', messages)
assert_equal(4, filtered_records.size)
end
test 'regexpN' do
filtered_records = filter('regexp1 message WARN', messages)
assert_equal(3, filtered_records.size)
assert_block('only WARN logs') do
filtered_records.all? { |r|
!r['message'].include?('INFO')
}
end
end
test 'excludeN' do
filtered_records = filter('exclude1 message favicon', messages)
assert_equal(3, filtered_records.size)
assert_block('remove favicon logs') do
filtered_records.all? { |r|
!r['message'].include?('favicon')
}
end
end
test 'regexps' do
conf = %[
<regexp>
key message
pattern WARN
</regexp>
]
filtered_records = filter(conf, messages)
assert_equal(3, filtered_records.size)
assert_block('only WARN logs') do
filtered_records.all? { |r|
!r['message'].include?('INFO')
}
end
end
test 'excludes' do
conf = %[
<exclude>
key message
pattern favicon
</exclude>
]
filtered_records = filter(conf, messages)
assert_equal(3, filtered_records.size)
assert_block('remove favicon logs') do
filtered_records.all? { |r|
!r['message'].include?('favicon')
}
end
end
sub_test_case 'with invalid sequence' do
def messages
[
"\xff".force_encoding('UTF-8'),
]
end
test "don't raise an exception" do
assert_nothing_raised {
filter(%[regexp1 message WARN], ["\xff".force_encoding('UTF-8')])
}
end
end
sub_test_case "and/or section" do
def records
[
{ "time" => "2013/01/13T07:02:11.124202", "level" => "INFO", "method" => "GET", "path" => "/ping" },
{ "time" => "2013/01/13T07:02:13.232645", "level" => "WARN", "method" => "POST", "path" => "/auth" },
{ "time" => "2013/01/13T07:02:21.542145", "level" => "WARN", "method" => "GET", "path" => "/favicon.ico" },
{ "time" => "2013/01/13T07:02:43.632145", "level" => "WARN", "method" => "POST", "path" => "/login" },
]
end
def filter(conf, records)
d = create_driver(conf)
d.run do
records.each do |record|
d.feed("filter.test", @time, record)
end
end
d.filtered_records
end
test "basic and/regexp" do
conf = %[
<and>
<regexp>
key level
pattern ^INFO$
</regexp>
<regexp>
key method
pattern ^GET$
</regexp>
</and>
]
filtered_records = filter(conf, records)
assert_equal(records.values_at(0), filtered_records)
end
test "basic or/exclude" do
conf = %[
<or>
<exclude>
key level
pattern ^INFO$
</exclude>
<exclude>
key method
pattern ^GET$
</exclude>
</or>
]
filtered_records = filter(conf, records)
assert_equal(records.values_at(1, 3), filtered_records)
end
test "basic or/regexp" do
conf = %[
<or>
<regexp>
key level
pattern ^INFO$
</regexp>
<regexp>
key method
pattern ^GET$
</regexp>
</or>
]
filtered_records = filter(conf, records)
assert_equal(records.values_at(0, 2), filtered_records)
end
test "basic and/exclude" do
conf = %[
<and>
<exclude>
key level
pattern ^INFO$
</exclude>
<exclude>
key method
pattern ^GET$
</exclude>
</and>
]
filtered_records = filter(conf, records)
assert_equal(records.values_at(1, 2, 3), filtered_records)
end
sub_test_case "and/or combo" do
def records
[
{ "time" => "2013/01/13T07:02:11.124202", "level" => "INFO", "method" => "GET", "path" => "/ping" },
{ "time" => "2013/01/13T07:02:13.232645", "level" => "WARN", "method" => "POST", "path" => "/auth" },
{ "time" => "2013/01/13T07:02:21.542145", "level" => "WARN", "method" => "GET", "path" => "/favicon.ico" },
{ "time" => "2013/01/13T07:02:43.632145", "level" => "WARN", "method" => "POST", "path" => "/login" },
{ "time" => "2013/01/13T07:02:44.959307", "level" => "ERROR", "method" => "POST", "path" => "/login" },
{ "time" => "2013/01/13T07:02:45.444992", "level" => "ERROR", "method" => "GET", "path" => "/ping" },
{ "time" => "2013/01/13T07:02:51.247941", "level" => "WARN", "method" => "GET", "path" => "/info" },
{ "time" => "2013/01/13T07:02:53.108366", "level" => "WARN", "method" => "POST", "path" => "/ban" },
]
end
test "and/regexp, or/exclude" do
conf = %[
<and>
<regexp>
key level
pattern ^ERROR|WARN$
</regexp>
<regexp>
key method
pattern ^GET|POST$
</regexp>
</and>
<or>
<exclude>
key level
pattern ^WARN$
</exclude>
<exclude>
key method
pattern ^GET$
</exclude>
</or>
]
filtered_records = filter(conf, records)
assert_equal(records.values_at(4), filtered_records)
end
test "and/regexp, and/exclude" do
conf = %[
<and>
<regexp>
key level
pattern ^ERROR|WARN$
</regexp>
<regexp>
key method
pattern ^GET|POST$
</regexp>
</and>
<and>
<exclude>
key level
pattern ^WARN$
</exclude>
<exclude>
key method
pattern ^GET$
</exclude>
</and>
]
filtered_records = filter(conf, records)
assert_equal(records.values_at(1, 3, 4, 5, 7), filtered_records)
end
test "or/regexp, and/exclude" do
conf = %[
<or>
<regexp>
key level
pattern ^ERROR|WARN$
</regexp>
<regexp>
key method
pattern ^GET|POST$
</regexp>
</or>
<and>
<exclude>
key level
pattern ^WARN$
</exclude>
<exclude>
key method
pattern ^GET$
</exclude>
</and>
]
filtered_records = filter(conf, records)
assert_equal(records.values_at(0, 1, 3, 4, 5, 7), filtered_records)
end
test "or/regexp, or/exclude" do
conf = %[
<or>
<regexp>
key level
pattern ^ERROR|WARN$
</regexp>
<regexp>
key method
pattern ^GET|POST$
</regexp>
</or>
<or>
<exclude>
key level
pattern ^WARN$
</exclude>
<exclude>
key method
pattern ^GET$
</exclude>
</or>
]
filtered_records = filter(conf, records)
assert_equal(records.values_at(4), filtered_records)
end
test "regexp, and/regexp" do
conf = %[
<and>
<regexp>
key level
pattern ^ERROR|WARN$
</regexp>
<regexp>
key method
pattern ^GET|POST$
</regexp>
</and>
<regexp>
key path
pattern ^/login$
</regexp>
]
filtered_records = filter(conf, records)
assert_equal(records.values_at(3, 4), filtered_records)
end
test "regexp, or/exclude" do
conf = %[
<regexp>
key level
pattern ^ERROR|WARN$
</regexp>
<regexp>
key method
pattern ^GET|POST$
</regexp>
<or>
<exclude>
key level
pattern ^WARN$
</exclude>
<exclude>
key method
pattern ^GET$
</exclude>
</or>
]
filtered_records = filter(conf, records)
assert_equal(records.values_at(4), filtered_records)
end
test "regexp, and/exclude" do
conf = %[
<regexp>
key level
pattern ^ERROR|WARN$
</regexp>
<regexp>
key method
pattern ^GET|POST$
</regexp>
<and>
<exclude>
key level
pattern ^WARN$
</exclude>
<exclude>
key method
pattern ^GET$
</exclude>
</and>
]
filtered_records = filter(conf, records)
assert_equal(records.values_at(1, 3, 4, 5, 7), filtered_records)
end
end
end
end
sub_test_case 'nested keys' do
def messages
[
{"nest1" => {"nest2" => "INFO"}},
{"nest1" => {"nest2" => "WARN"}},
{"nest1" => {"nest2" => "WARN"}}
]
end
def filter(config, msgs)
d = create_driver(config)
d.run {
msgs.each { |msg|
d.feed("filter.test", @time, {'foo' => 'bar', 'message' => msg})
}
}
d.filtered_records
end
test 'regexps' do
conf = %[
<regexp>
key $.message.nest1.nest2
pattern WARN
</regexp>
]
filtered_records = filter(conf, messages)
assert_equal(2, filtered_records.size)
assert_block('only 2 nested logs') do
filtered_records.all? { |r|
r['message']['nest1']['nest2'] == 'WARN'
}
end
end
test 'excludes' do
conf = %[
<exclude>
key $.message.nest1.nest2
pattern WARN
</exclude>
]
filtered_records = filter(conf, messages)
assert_equal(1, filtered_records.size)
assert_block('only 2 nested logs') do
filtered_records.all? { |r|
r['message']['nest1']['nest2'] == 'INFO'
}
end
end
end
sub_test_case 'grep non-string jsonable values' do
def filter(msg, config = 'regexp1 message 0')
d = create_driver(config)
d.run do
d.feed("filter.test", @time, {'foo' => 'bar', 'message' => msg})
end
d.filtered_records
end
data(
'array' => ["0"],
'hash' => ["0" => "0"],
'integer' => 0,
'float' => 0.1)
test "value" do |data|
filtered_records = filter(data)
assert_equal(1, filtered_records.size)
end
test "value boolean" do
filtered_records = filter(true, %[regexp1 message true])
assert_equal(1, filtered_records.size)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_compressable.rb | test/plugin/test_compressable.rb | require_relative '../helper'
require 'fluent/plugin/compressable'
class CompressableTest < Test::Unit::TestCase
include Fluent::Plugin::Compressable
def compress_assert_equal(expected, actual)
e = Zlib::GzipReader.new(StringIO.new(expected)).read
a = Zlib::GzipReader.new(StringIO.new(actual)).read
assert_equal(e, a)
end
sub_test_case '#compress' do
setup do
@src = 'text data for compressing' * 5
@gzipped_src = compress(@src)
end
test 'compress data' do
assert compress(@src).size < @src.size
assert_not_equal @gzipped_src, @src
end
test 'write compressed data to IO with output_io option' do
io = StringIO.new
compress(@src, output_io: io)
compress_assert_equal @gzipped_src, io.string
end
end
sub_test_case '#decompress' do
setup do
@src = 'text data for compressing' * 5
@gzipped_src = compress(@src)
end
test 'decompress compressed data' do
assert_equal @src, decompress(@gzipped_src)
end
test 'write decompressed data to IO with output_io option' do
io = StringIO.new
decompress(@gzipped_src, output_io: io)
assert_equal @src, io.string
end
test 'return decompressed string with output_io option' do
io = StringIO.new(@gzipped_src)
assert_equal @src, decompress(input_io: io)
end
test 'decompress multiple compressed data' do
src1 = 'text data'
src2 = 'text data2'
gzipped_src = compress(src1) + compress(src2)
assert_equal src1 + src2, decompress(gzipped_src)
end
test 'decompress with input_io and output_io' do
input_io = StringIO.new(@gzipped_src)
output_io = StringIO.new
decompress(input_io: input_io, output_io: output_io)
assert_equal @src, output_io.string
end
test 'decompress multiple compressed data with input_io and output_io' do
src1 = 'text data'
src2 = 'text data2'
gzipped_src = compress(src1) + compress(src2)
input_io = StringIO.new(gzipped_src)
output_io = StringIO.new
decompress(input_io: input_io, output_io: output_io)
assert_equal src1 + src2, output_io.string
end
test 'return the received value as it is with empty string or nil' do
assert_equal nil, decompress
assert_equal nil, decompress(nil)
assert_equal '', decompress('')
assert_equal '', decompress('', output_io: StringIO.new)
end
test 'decompress large zstd compressed data' do
src1 = SecureRandom.random_bytes(1024)
src2 = SecureRandom.random_bytes(1024)
src3 = SecureRandom.random_bytes(1024)
zstd_compressed_data = compress(src1, type: :zstd) + compress(src2, type: :zstd) + compress(src3, type: :zstd)
assert_equal src1 + src2 + src3, decompress(zstd_compressed_data, type: :zstd)
end
test 'decompress large zstd compressed data with input_io and output_io' do
src1 = SecureRandom.random_bytes(1024)
src2 = SecureRandom.random_bytes(1024)
src3 = SecureRandom.random_bytes(1024)
zstd_compressed_data = compress(src1, type: :zstd) + compress(src2, type: :zstd) + compress(src3, type: :zstd)
input_io = StringIO.new(zstd_compressed_data)
output_io = StringIO.new
output_io.set_encoding(src1.encoding)
decompress(input_io: input_io, output_io: output_io, type: :zstd)
assert_equal src1 + src2 + src3, output_io.string
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_multi_output.rb | test/plugin/test_multi_output.rb | require_relative '../helper'
require 'fluent/plugin/multi_output'
require 'fluent/event'
require 'json'
require 'time'
require 'timeout'
module FluentPluginMultiOutputTest
class DummyMultiOutput < Fluent::Plugin::MultiOutput
attr_reader :events
def initialize
super
@events = []
end
def configure(conf)
super
end
def process(tag, es)
es.each do |time, record|
@events << [tag, time, record]
end
end
end
class DummyCompatMultiOutput < Fluent::Plugin::MultiOutput
def initialize
super
@compat = true
end
def configure(conf)
super
end
def process(tag, es)
# ...
end
end
class Dummy1Output < Fluent::Plugin::Output
Fluent::Plugin.register_output('dummy_test_multi_output_1', self)
attr_reader :configured
def configure(conf)
super
@configured = true
end
def process(tag, es)
end
end
class Dummy2Output < Fluent::Plugin::Output
Fluent::Plugin.register_output('dummy_test_multi_output_2', self)
attr_reader :configured
def configure(conf)
super
@configured = true
end
def process(tag, es)
end
end
class Dummy3Output < Fluent::Plugin::Output
Fluent::Plugin.register_output('dummy_test_multi_output_3', self)
attr_reader :configured
def configure(conf)
super
@configured = true
end
def process(tag, es)
end
end
class Dummy4Output < Fluent::Plugin::Output
Fluent::Plugin.register_output('dummy_test_multi_output_4', self)
attr_reader :configured
def configure(conf)
super
@configured = true
end
def process(tag, es)
end
end
end
class MultiOutputTest < Test::Unit::TestCase
def create_output(type=:multi)
case type
when :compat_multi
FluentPluginMultiOutputTest::DummyCompatMultiOutput.new
else
FluentPluginMultiOutputTest::DummyMultiOutput.new
end
end
sub_test_case 'basic multi output plugin' do
setup do
Fluent::Test.setup
@i = create_output()
end
teardown do
@i.log.out.reset
end
test '#configure raises error if <store> sections are missing' do
conf = config_element('ROOT', '', { '@type' => 'dummy_test_multi_output' }, [])
assert_raise Fluent::ConfigError do
@i.configure(conf)
end
end
test '#configure initialize child plugins and call these #configure' do
assert_equal [], @i.outputs
conf = config_element('ROOT', '', { '@type' => 'dummy_test_multi_output' },
[
config_element('store', '', { '@type' => 'dummy_test_multi_output_1' }),
config_element('store', '', { '@type' => 'dummy_test_multi_output_2' }),
config_element('store', '', { '@type' => 'dummy_test_multi_output_3' }),
config_element('store', '', { '@type' => 'dummy_test_multi_output_4' }),
]
)
@i.configure(conf)
assert_equal 4, @i.outputs.size
assert @i.outputs[0].is_a? FluentPluginMultiOutputTest::Dummy1Output
assert @i.outputs[0].configured
assert @i.outputs[1].is_a? FluentPluginMultiOutputTest::Dummy2Output
assert @i.outputs[1].configured
assert @i.outputs[2].is_a? FluentPluginMultiOutputTest::Dummy3Output
assert @i.outputs[2].configured
assert @i.outputs[3].is_a? FluentPluginMultiOutputTest::Dummy4Output
assert @i.outputs[3].configured
end
test '#configure warns if "type" is used in <store> sections instead of "@type"' do
assert_equal [], @i.log.out.logs
conf = config_element('ROOT', '', { '@type' => 'dummy_test_multi_output' },
[
config_element('store', '', { 'type' => 'dummy_test_multi_output_1' }),
config_element('store', '', { 'type' => 'dummy_test_multi_output_2' }),
config_element('store', '', { 'type' => 'dummy_test_multi_output_3' }),
config_element('store', '', { 'type' => 'dummy_test_multi_output_4' }),
]
)
@i.configure(conf)
assert_equal 4, @i.outputs.size
log_size_for_multi_output_itself = 4
log_size_for_metrics_plugin_helper = 4
expected_warn_log_size = log_size_for_multi_output_itself + log_size_for_metrics_plugin_helper
logs = @i.log.out.logs
assert{ logs.count{|log| log.include?('[warn]') && log.include?("'type' is deprecated parameter name. use '@type' instead.") } == expected_warn_log_size }
end
test '#emit_events calls #process always' do
conf = config_element('ROOT', '', { '@type' => 'dummy_test_multi_output' },
[
config_element('store', '', { '@type' => 'dummy_test_multi_output_1' }),
config_element('store', '', { '@type' => 'dummy_test_multi_output_2' }),
config_element('store', '', { '@type' => 'dummy_test_multi_output_3' }),
config_element('store', '', { '@type' => 'dummy_test_multi_output_4' }),
]
)
@i.configure(conf)
@i.start
assert @i.events.empty?
@i.emit_events(
'test.tag',
Fluent::ArrayEventStream.new(
[
[event_time(), {"message" => "multi test 1"}],
[event_time(), {"message" => "multi test 1"}],
]
)
)
assert_equal 2, @i.events.size
end
test 'can use metrics plugins and fallback methods' do
conf = config_element('ROOT', '', { '@type' => 'dummy_test_multi_output' },
[
config_element('store', '', { 'type' => 'dummy_test_multi_output_1' }),
config_element('store', '', { 'type' => 'dummy_test_multi_output_2' }),
config_element('store', '', { 'type' => 'dummy_test_multi_output_3' }),
config_element('store', '', { 'type' => 'dummy_test_multi_output_4' }),
]
)
@i.configure(conf)
%w[num_errors_metrics emit_count_metrics emit_size_metrics emit_records_metrics].each do |metric_name|
assert_true @i.instance_variable_get(:"@#{metric_name}").is_a?(Fluent::Plugin::Metrics)
end
assert_equal 0, @i.num_errors
assert_equal 0, @i.emit_count
assert_equal 0, @i.emit_size
assert_equal 0, @i.emit_records
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_in_forward.rb | test/plugin/test_in_forward.rb | require_relative '../helper'
require 'fluent/test/driver/input'
require 'fluent/test/startup_shutdown'
require 'base64'
require 'fluent/env'
require 'fluent/event'
require 'fluent/plugin/in_forward'
require 'fluent/plugin/compressable'
require 'timecop'
class ForwardInputTest < Test::Unit::TestCase
include Fluent::Plugin::Compressable
def setup
Fluent::Test.setup
@responses = [] # for testing responses after sending data
@d = nil
# forward plugin uses TCP and UDP sockets on the same port number
@port = unused_port(protocol: :all)
end
def teardown
@d.instance_shutdown if @d
@port = nil
end
SHARED_KEY = 'foobar1'
USER_NAME = 'tagomoris'
USER_PASSWORD = 'fluentd'
def base_config
%[
port #{@port}
bind 127.0.0.1
]
end
LOCALHOST_HOSTNAME_GETTER = ->(){sock = UDPSocket.new(::Socket::AF_INET); sock.do_not_reverse_lookup = false; sock.connect("127.0.0.1", 2048); sock.peeraddr[2] }
LOCALHOST_HOSTNAME = LOCALHOST_HOSTNAME_GETTER.call
DUMMY_SOCK = Struct.new(:remote_host, :remote_addr, :remote_port).new(LOCALHOST_HOSTNAME, "127.0.0.1", 0)
def config_auth
base_config + %[
<security>
self_hostname localhost
shared_key foobar1
user_auth true
<user>
username #{USER_NAME}
password #{USER_PASSWORD}
</user>
<client>
network 127.0.0.0/8
shared_key #{SHARED_KEY}
users ["#{USER_NAME}"]
</client>
</security>
]
end
def create_driver(conf=base_config)
Fluent::Test::Driver::Input.new(Fluent::Plugin::ForwardInput).configure(conf)
end
sub_test_case '#configure' do
test 'simple' do
@d = d = create_driver
assert_equal @port, d.instance.port
assert_equal '127.0.0.1', d.instance.bind
assert_equal 0.5, d.instance.blocking_timeout
assert !d.instance.backlog
end
test 'auth' do
@d = d = create_driver(config_auth)
assert_equal @port, d.instance.port
assert_equal '127.0.0.1', d.instance.bind
assert !d.instance.backlog
assert d.instance.security
assert_equal 1, d.instance.security.users.size
assert_equal 1, d.instance.security.clients.size
end
data(tag: "tag",
add_tag_prefix: "add_tag_prefix")
test 'tag parameters' do |data|
assert_raise(Fluent::ConfigError.new("'#{data}' parameter must not be empty")) {
create_driver(base_config + "#{data} ''")
}
end
test 'send_keepalive_packet is disabled by default' do
@d = d = create_driver(config_auth)
assert_false d.instance.send_keepalive_packet
end
test 'send_keepalive_packet can be enabled' do
@d = d = create_driver(config_auth + %[
send_keepalive_packet true
])
assert_true d.instance.send_keepalive_packet
end
test 'both send_keepalive_packet and deny_keepalive cannot be enabled' do
assert_raise(Fluent::ConfigError.new("both 'send_keepalive_packet' and 'deny_keepalive' cannot be set to true")) do
create_driver(config_auth + %[
send_keepalive_packet true
deny_keepalive true
])
end
end
end
sub_test_case 'message' do
test 'time' do
time = event_time("2011-01-02 13:14:15 UTC")
begin
Timecop.freeze(Time.at(time))
@d = d = create_driver
records = [
["tag1", time, {"a"=>1}],
["tag2", time, {"a"=>2}],
]
d.run(expect_records: records.length, timeout: 5) do
records.each {|tag, _time, record|
send_data packer.write([tag, 0, record]).to_s
}
end
assert_equal(records, d.events.sort_by {|a| a[0] })
ensure
Timecop.return
end
end
test 'plain' do
@d = d = create_driver
time = event_time("2011-01-02 13:14:15 UTC")
records = [
["tag1", time, {"a"=>1}],
["tag2", time, {"a"=>2}],
]
d.run(expect_records: records.length, timeout: 5) do
records.each {|tag, _time, record|
send_data packer.write([tag, _time, record]).to_s
}
end
assert_equal(records, d.events)
end
test 'time_as_integer' do
@d = d = create_driver
time_i = event_time("2011-01-02 13:14:15 UTC").to_i
records = [
["tag1", time_i, {"a"=>1}],
["tag2", time_i, {"a"=>2}],
]
d.run(expect_records: records.length, timeout: 5) do
records.each {|tag, _time, record|
send_data packer.write([tag, _time, record]).to_s
}
end
assert_equal(records, d.events)
end
test 'skip_invalid_event' do
@d = d = create_driver(base_config + "skip_invalid_event true")
time = event_time("2011-01-02 13:14:15 UTC")
records = [
["tag1", time, {"a" => 1}],
["tag2", time, {"a" => 2}],
]
d.run(shutdown: false, expect_records: 2, timeout: 10) do
entries = []
# These entries are skipped
entries << ['tag1', true, {'a' => 3}] << ['tag2', time, 'invalid record']
entries += records.map { |tag, _time, record| [tag, _time, record] }
entries.each {|tag, _time, record|
# Without ack, logs are sometimes not saved to logs during test.
send_data packer.write([tag, _time, record]).to_s #, try_to_receive_response: true
}
end
logs = d.instance.log.logs
assert_equal 2, logs.count { |line| line =~ /got invalid event and drop it/ }
assert_equal records[0], d.events[0]
assert_equal records[1], d.events[1]
d.instance_shutdown
end
test 'json_using_integer_time' do
@d = d = create_driver
time_i = event_time("2011-01-02 13:14:15 UTC").to_i
records = [
["tag1", time_i, {"a"=>1}],
["tag2", time_i, {"a"=>2}],
]
d.run(expect_records: records.length, timeout: 20) do
records.each {|tag, _time, record|
send_data [tag, _time, record].to_json
}
end
assert_equal(records, d.events.sort_by {|a| a[0] })
end
test 'json_with_newline' do
@d = d = create_driver
time_i = event_time("2011-01-02 13:14:15 UTC").to_i
records = [
["tag1", time_i, {"a"=>1}],
["tag2", time_i, {"a"=>2}],
]
d.run(expect_records: records.length, timeout: 20, shutdown: true) do
records.each {|tag, _time, record|
send_data [tag, _time, record].to_json + "\n"
}
end
assert_equal(records, d.events.sort_by {|a| a[0] })
end
end
sub_test_case 'forward' do
data(tcp: {
options: {
auth: false
}
},
auth: {
options: {
auth: true
}
})
test 'plain' do |data|
options = data[:options]
config = options[:auth] ? config_auth : base_config
@d = d = create_driver(config)
time = event_time("2011-01-02 13:14:15 UTC")
records = [
["tag1", time, {"a"=>1}],
["tag1", time, {"a"=>2}]
]
d.run(expect_records: records.length, timeout: 20) do
entries = []
records.each {|tag, _time, record|
entries << [_time, record]
}
send_data packer.write(["tag1", entries]).to_s, **options
end
assert_equal(records, d.events)
end
data(tag: {
param: "tag new_tag",
result: "new_tag"
},
add_tag_prefix: {
param: "add_tag_prefix new_prefix",
result: "new_prefix.tag1"
})
test 'tag parameters' do |data|
@d = create_driver(base_config + data[:param])
time = event_time("2011-01-02 13:14:15 UTC")
options = {auth: false}
records = [
["tag1", time, {"a"=>1}],
["tag1", time, {"a"=>2}],
]
@d.run(expect_records: records.length, timeout: 20) do
entries = []
records.each {|tag, _time, record|
entries << [_time, record]
}
send_data packer.write(["tag1", entries]).to_s, **options
end
assert_equal(data[:result], @d.events[0][0])
end
data(tcp: {
options: {
auth: false
}
},
auth: {
options: {
auth: true
}
})
test 'time_as_integer' do |data|
options = data[:options]
config = options[:auth] ? config_auth : base_config
@d = d = create_driver(config)
time_i = event_time("2011-01-02 13:14:15 UTC")
records = [
["tag1", time_i, {"a"=>1}],
["tag1", time_i, {"a"=>2}]
]
d.run(expect_records: records.length, timeout: 20) do
entries = []
records.each {|tag, _time, record|
entries << [_time, record]
}
send_data packer.write(["tag1", entries]).to_s, **options
end
assert_equal(records, d.events)
end
data(tcp: {
options: {
auth: false
}
},
auth: {
options: {
auth: true
}
})
test 'skip_invalid_event' do |data|
options = data[:options]
config = options[:auth] ? config_auth : base_config
@d = d = create_driver(config + "skip_invalid_event true")
time = event_time("2011-01-02 13:14:15 UTC")
records = [
["tag1", time, {"a" => 1}],
["tag1", time, {"a" => 2}],
]
d.run(shutdown: false, expect_records: records.length, timeout: 20) do
entries = records.map { |tag, _time, record| [_time, record] }
# These entries are skipped
entries << ['invalid time', {'a' => 3}] << [time, 'invalid record']
send_data packer.write(["tag1", entries]).to_s, **options
end
logs = d.instance.log.out.logs
assert{ logs.count{|line| line =~ /skip invalid event/ } == 2 }
d.instance_shutdown
end
end
sub_test_case 'packed forward' do
data(tcp: {
options: {
auth: false
}
},
auth: {
options: {
auth: true
}
})
test 'plain' do |data|
options = data[:options]
config = options[:auth] ? config_auth : base_config
@d = d = create_driver(config)
time = event_time("2011-01-02 13:14:15 UTC")
records = [
["tag1", time, {"a"=>1}],
["tag1", time, {"a"=>2}],
]
d.run(expect_records: records.length, timeout: 20) do
entries = ''
records.each {|_tag, _time, record|
packer(entries).write([_time, record]).flush
}
send_data packer.write(["tag1", entries]).to_s, **options
end
assert_equal(records, d.events)
end
data(tag: {
param: "tag new_tag",
result: "new_tag"
},
add_tag_prefix: {
param: "add_tag_prefix new_prefix",
result: "new_prefix.tag1"
})
test 'tag parameters' do |data|
@d = create_driver(base_config + data[:param])
time = event_time("2011-01-02 13:14:15 UTC")
options = {auth: false}
records = [
["tag1", time, {"a"=>1}],
["tag1", time, {"a"=>2}],
]
@d.run(expect_records: records.length, timeout: 20) do
entries = ''
records.each {|_tag, _time, record|
packer(entries).write([_time, record]).flush
}
send_data packer.write(["tag1", entries]).to_s, **options
end
assert_equal(data[:result], @d.events[0][0])
end
data(tcp: {
options: {
auth: false
}
},
auth: {
options: {
auth: true
}
})
test 'time_as_integer' do |data|
options = data[:options]
config = options[:auth] ? config_auth : base_config
@d = d = create_driver(config)
time_i = event_time("2011-01-02 13:14:15 UTC").to_i
records = [
["tag1", time_i, {"a"=>1}],
["tag1", time_i, {"a"=>2}],
]
d.run(expect_records: records.length, timeout: 20) do
entries = ''
records.each {|tag, _time, record|
packer(entries).write([_time, record]).flush
}
send_data packer.write(["tag1", entries]).to_s, **options
end
assert_equal(records, d.events)
end
data(tcp: {
options: {
auth: false
}
},
auth: {
options: {
auth: true
}
})
test 'skip_invalid_event' do |data|
options = data[:options]
config = options[:auth] ? config_auth : base_config
@d = d = create_driver(config + "skip_invalid_event true")
time = event_time("2011-01-02 13:14:15 UTC")
records = [
["tag1", time, {"a" => 1}],
["tag1", time, {"a" => 2}],
]
d.run(shutdown: false, expect_records: records.length, timeout: 20) do
entries = records.map { |tag, _time, record| [_time, record] }
# These entries are skipped
entries << ['invalid time', {'a' => 3}] << [time, 'invalid record']
packed_entries = ''
entries.each { |_time, record|
packer(packed_entries).write([_time, record]).flush
}
send_data packer.write(["tag1", packed_entries]).to_s, **options
end
logs = d.instance.log.logs
assert_equal 2, logs.count { |line| line =~ /skip invalid event/ }
d.instance_shutdown
end
end
sub_test_case 'compressed packed forward' do
test 'set_compress_to_option_gzip' do
@d = d = create_driver
time_i = event_time("2011-01-02 13:14:15 UTC").to_i
events = [
["tag1", time_i, {"a"=>1}],
["tag1", time_i, {"a"=>2}]
]
# create compressed entries
entries = ''
events.each do |_tag, _time, record|
v = [_time, record].to_msgpack
entries << compress(v)
end
chunk = ["tag1", entries, { 'compressed' => 'gzip' }].to_msgpack
d.run do
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(chunk) do |obj|
option = d.instance.send(:on_message, obj, chunk.size, DUMMY_SOCK)
assert_equal 'gzip', option['compressed']
end
end
assert_equal events, d.events
end
test 'set_compress_to_option_zstd' do
@d = d = create_driver
time_i = event_time("2011-01-02 13:14:15 UTC").to_i
events = [
["tag1", time_i, {"a"=>1}],
["tag1", time_i, {"a"=>2}]
]
# create compressed entries
entries = ''
events.each do |_tag, _time, record|
v = [_time, record].to_msgpack
entries << compress(v, type: :zstd)
end
chunk = ["tag1", entries, { 'compressed' => 'zstd' }].to_msgpack
d.run do
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(chunk) do |obj|
option = d.instance.send(:on_message, obj, chunk.size, DUMMY_SOCK)
assert_equal 'zstd', option['compressed']
end
end
assert_equal events, d.events
end
test 'create_CompressedMessagePackEventStream_with_gzip_compress_option' do
@d = d = create_driver
time_i = event_time("2011-01-02 13:14:15 UTC").to_i
events = [
["tag1", time_i, {"a"=>1}],
["tag1", time_i, {"a"=>2}]
]
# create compressed entries
entries = ''
events.each do |_tag, _time, record|
v = [_time, record].to_msgpack
entries << compress(v)
end
chunk = ["tag1", entries, { 'compressed' => 'gzip' }].to_msgpack
d.run do
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(chunk) do |obj|
option = d.instance.send(:on_message, obj, chunk.size, DUMMY_SOCK)
assert_equal 'gzip', option['compressed']
end
end
end
test 'create_CompressedMessagePackEventStream_with_zstd_compress_option' do
@d = d = create_driver
time_i = event_time("2011-01-02 13:14:15 UTC").to_i
events = [
["tag1", time_i, {"a"=>1}],
["tag1", time_i, {"a"=>2}]
]
# create compressed entries
entries = ''
events.each do |_tag, _time, record|
v = [_time, record].to_msgpack
entries << compress(v, type: :zstd)
end
chunk = ["tag1", entries, { 'compressed' => 'zstd' }].to_msgpack
d.run do
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(chunk) do |obj|
option = d.instance.send(:on_message, obj, chunk.size, DUMMY_SOCK)
assert_equal 'zstd', option['compressed']
end
end
end
end
sub_test_case 'warning' do
test 'send_large_chunk_warning' do
@d = d = create_driver(base_config + %[
chunk_size_warn_limit 16M
chunk_size_limit 32M
])
time = event_time("2014-04-25 13:14:15 UTC")
# generate over 16M chunk
str = "X" * 1024 * 1024
chunk = [ "test.tag", (0...16).map{|i| [time + i, {"data" => str}] } ].to_msgpack
assert chunk.size > (16 * 1024 * 1024)
assert chunk.size < (32 * 1024 * 1024)
d.run(shutdown: false) do
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(chunk) do |obj|
d.instance.send(:on_message, obj, chunk.size, DUMMY_SOCK)
end
end
# check emitted data
emits = d.events
assert_equal 16, emits.size
assert emits.map(&:first).all?("test.tag")
assert_equal (0...16).to_a, emits.map{|_tag, t, _record| t - time }
# check log
logs = d.instance.log.logs
assert_equal 1, logs.count{|line|
line =~ / \[warn\]: Input chunk size is larger than 'chunk_size_warn_limit':/ &&
line =~ / tag="test.tag" host="#{LOCALHOST_HOSTNAME}" limit=16777216 size=16777501/
}, "large chunk warning is not logged"
d.instance_shutdown
end
test 'send_large_chunk_only_warning' do
@d = d = create_driver(base_config + %[
chunk_size_warn_limit 16M
])
time = event_time("2014-04-25 13:14:15 UTC")
# generate over 16M chunk
str = "X" * 1024 * 1024
chunk = [ "test.tag", (0...16).map{|i| [time + i, {"data" => str}] } ].to_msgpack
d.run(shutdown: false) do
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(chunk) do |obj|
d.instance.send(:on_message, obj, chunk.size, DUMMY_SOCK)
end
end
# check log
logs = d.instance.log.logs
assert_equal 1, logs.count{ |line|
line =~ / \[warn\]: Input chunk size is larger than 'chunk_size_warn_limit':/ &&
line =~ / tag="test.tag" host="#{LOCALHOST_HOSTNAME}" limit=16777216 size=16777501/
}, "large chunk warning is not logged"
d.instance_shutdown
end
test 'send_large_chunk_limit' do
@d = d = create_driver(base_config + %[
chunk_size_warn_limit 16M
chunk_size_limit 32M
])
time = event_time("2014-04-25 13:14:15 UTC")
# generate over 32M chunk
str = "X" * 1024 * 1024
chunk = [ "test.tag", (0...32).map{|i| [time + i, {"data" => str}] } ].to_msgpack
assert chunk.size > (32 * 1024 * 1024)
# d.run => send_data
d.run(shutdown: false) do
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(chunk) do |obj|
d.instance.send(:on_message, obj, chunk.size, DUMMY_SOCK)
end
end
# check emitted data
emits = d.events
assert_equal 0, emits.size
# check log
logs = d.instance.log.logs
assert_equal 1, logs.count{|line|
line =~ / \[warn\]: Input chunk size is larger than 'chunk_size_limit', dropped:/ &&
line =~ / tag="test.tag" host="#{LOCALHOST_HOSTNAME}" limit=33554432 size=33554989/
}, "large chunk warning is not logged"
d.instance_shutdown
end
data('string chunk' => 'broken string',
'integer chunk' => 10)
test 'send_broken_chunk' do |data|
@d = d = create_driver
# d.run => send_data
d.run(shutdown: false) do
d.instance.send(:on_message, data, 1000000000, DUMMY_SOCK)
end
# check emitted data
assert_equal 0, d.events.size
# check log
logs = d.instance.log.logs
assert_equal 1, logs.count{|line|
line =~ / \[warn\]: incoming chunk is broken: host="#{LOCALHOST_HOSTNAME}" msg=#{data.inspect}/
}, "should not accept broken chunk"
d.instance_shutdown
end
end
sub_test_case 'respond to required ack' do
data(tcp: {
options: {
auth: false
}
},
auth: {
options: {
auth: true
}
})
test 'message' do |data|
options = data[:options]
config = options[:auth] ? config_auth : base_config
@d = d = create_driver(config)
time = event_time("2011-01-02 13:14:15 UTC")
events = [
["tag1", time, {"a"=>1}],
["tag2", time, {"a"=>2}]
]
expected_acks = []
d.run(expect_records: events.size) do
events.each {|tag, _time, record|
op = { 'chunk' => Base64.encode64(record.object_id.to_s) }
expected_acks << op['chunk']
send_data([tag, _time, record, op].to_msgpack, try_to_receive_response: true, response_timeout: 1, **options)
}
end
assert_equal events, d.events
assert_equal expected_acks, @responses.map { |res| MessagePack.unpack(res)['ack'] }
end
# FIX: response is not pushed into @responses because IO.select has been blocked until InputForward shutdowns
data(tcp: {
options: {
auth: false
}
},
auth: {
options: {
auth: true
}
})
test 'forward' do |data|
options = data[:options]
config = options[:auth] ? config_auth : base_config
@d = d = create_driver(config)
time = event_time("2011-01-02 13:14:15 UTC")
events = [
["tag1", time, {"a"=>1}],
["tag1", time, {"a"=>2}]
]
expected_acks = []
d.run(expect_records: events.size) do
entries = []
events.each {|_tag, _time, record|
entries << [_time, record]
}
op = { 'chunk' => Base64.encode64(entries.object_id.to_s) }
expected_acks << op['chunk']
send_data(["tag1", entries, op].to_msgpack, try_to_receive_response: true, response_timeout: 1, **options)
end
assert_equal events, d.events
assert_equal expected_acks, @responses.map { |res| MessagePack.unpack(res)['ack'] }
end
data(tcp: {
options: {
auth: false
}
},
auth: {
options: {
auth: true
}
})
test 'packed_forward' do |data|
options = data[:options]
config = options[:auth] ? config_auth : base_config
@d = d = create_driver(config)
time = event_time("2011-01-02 13:14:15 UTC")
events = [
["tag1", time, {"a"=>1}],
["tag1", time, {"a"=>2}]
]
expected_acks = []
d.run(expect_records: events.size) do
entries = ''
events.each {|_tag, _time,record|
[time, record].to_msgpack(entries)
}
op = { 'chunk' => Base64.encode64(entries.object_id.to_s) }
expected_acks << op['chunk']
send_data(["tag1", entries, op].to_msgpack, try_to_receive_response: true, response_timeout: 1, **options)
end
assert_equal events, d.events
assert_equal expected_acks, @responses.map { |res| MessagePack.unpack(res)['ack'] }
end
data(
tcp: {
options: {
auth: false
}
},
### Auth is not supported with json
# auth: {
# options: {
# auth: true
# }
# },
)
test 'message_json' do |data|
options = data[:options]
config = options[:auth] ? config_auth : base_config
@d = d = create_driver(config)
time_i = event_time("2011-01-02 13:14:15 UTC")
events = [
["tag1", time_i, {"a"=>1}],
["tag2", time_i, {"a"=>2}]
]
expected_acks = []
d.run(expect_records: events.size, timeout: 20) do
events.each {|tag, _time, record|
op = { 'chunk' => Base64.encode64(record.object_id.to_s) }
expected_acks << op['chunk']
send_data([tag, _time, record, op].to_json, try_to_receive_response: true, response_timeout: 1, **options)
}
end
assert_equal events, d.events
assert_equal expected_acks, @responses.map { |res| JSON.parse(res)['ack'] }
end
end
sub_test_case 'not respond without required ack' do
data(tcp: {
options: {
auth: false
}
},
auth: {
options: {
auth: true
}
})
test 'message' do |data|
options = data[:options]
config = options[:auth] ? config_auth : base_config
@d = d = create_driver(config)
time = event_time("2011-01-02 13:14:15 UTC")
events = [
["tag1", time, {"a"=>1}],
["tag2", time, {"a"=>2}]
]
d.run(expect_records: events.size, timeout: 20) do
events.each {|tag, _time, record|
send_data([tag, _time, record].to_msgpack, try_to_receive_response: true, response_timeout: 1, **options)
}
end
assert_equal events, d.events
assert_equal [nil, nil], @responses
end
data(tcp: {
options: {
auth: false
}
},
auth: {
options: {
auth: true
}
})
test 'forward' do |data|
options = data[:options]
config = options[:auth] ? config_auth : base_config
@d = d = create_driver(config)
time = event_time("2011-01-02 13:14:15 UTC")
events = [
["tag1", time, {"a"=>1}],
["tag1", time, {"a"=>2}]
]
d.run(expect_records: events.size, timeout: 20) do
entries = []
events.each {|tag, _time, record|
entries << [_time, record]
}
send_data(["tag1", entries].to_msgpack, try_to_receive_response: true, response_timeout: 1, **options)
end
assert_equal events, d.events
assert_equal [nil], @responses
end
data(tcp: {
options: {
auth: false
}
},
auth: {
options: {
auth: true
}
})
test 'packed_forward' do |data|
options = data[:options]
config = options[:auth] ? config_auth : base_config
@d = d = create_driver(config)
time = event_time("2011-01-02 13:14:15 UTC")
events = [
["tag1", time, {"a"=>1}],
["tag1", time, {"a"=>2}]
]
d.run(expect_records: events.size, timeout: 20) do
entries = ''
events.each {|tag, _time, record|
[_time, record].to_msgpack(entries)
}
send_data(["tag1", entries].to_msgpack, try_to_receive_response: true, response_timeout: 1, **options)
end
assert_equal events, d.events
assert_equal [nil], @responses
end
data(
tcp: {
options: {
auth: false
}
},
### Auth is not supported with json
# auth: {
# config: config_auth,
# options: {
# auth: true
# }
# },
)
test 'message_json' do |data|
options = data[:options]
config = options[:auth] ? config_auth : base_config
@d = d = create_driver(config)
time_i = event_time("2011-01-02 13:14:15 UTC").to_i
events = [
["tag1", time_i, {"a"=>1}],
["tag2", time_i, {"a"=>2}]
]
d.run(expect_records: events.size, timeout: 20) do
events.each {|tag, _time, record|
send_data([tag, _time, record].to_json, try_to_receive_response: true, response_timeout: 1, **options)
}
end
assert_equal events, d.events
assert_equal [nil, nil], @responses
end
end
def packer(*args)
Fluent::MessagePackFactory.msgpack_packer(*args)
end
def unpacker
Fluent::MessagePackFactory.msgpack_unpacker
end
# res
# '' : socket is disconnected without any data
# nil: socket read timeout
def read_data(io, timeout, &block)
res = ''
select_timeout = 0.5
clock_id = Process::CLOCK_MONOTONIC_RAW rescue Process::CLOCK_MONOTONIC
timeout_at = Process.clock_gettime(clock_id) + timeout
begin
buf = ''
io_activated = false
while Process.clock_gettime(clock_id) < timeout_at
if IO.select([io], nil, nil, select_timeout)
io_activated = true
buf = io.readpartial(2048)
res ||= ''
res << buf
break if block.call(res)
end
end
res = nil unless io_activated # timeout without no data arrival
rescue Errno::EAGAIN
sleep 0.01
retry if res == ''
# if res is not empty, all data in socket buffer are read, so do not retry
rescue IOError, EOFError, Errno::ECONNRESET
# socket disconnected
end
res
end
def simulate_auth_sequence(io, shared_key=SHARED_KEY, username=USER_NAME, password=USER_PASSWORD)
auth_response_timeout = 30
shared_key_salt = 'salt'
# reading helo
helo_data = read_data(io, auth_response_timeout){|data| MessagePack.unpack(data) rescue nil }
raise "Authentication packet timeout" unless helo_data
raise "Authentication connection closed" if helo_data == ''
# ['HELO', options(hash)]
helo = MessagePack.unpack(helo_data)
raise "Invalid HELO header" unless helo[0] == 'HELO'
raise "Invalid HELO option object" unless helo[1].is_a?(Hash)
@options = helo[1]
# sending ping
ping = [
'PING',
'selfhostname',
shared_key_salt,
Digest::SHA512.new
.update(shared_key_salt)
.update('selfhostname')
.update(@options['nonce'])
.update(shared_key).hexdigest,
]
if @options['auth'] # auth enabled -> value is auth salt
pass_digest = Digest::SHA512.new.update(@options['auth']).update(username).update(password).hexdigest
ping.push(username, pass_digest)
else
ping.push('', '')
end
io.write ping.to_msgpack
io.flush
# reading pong
pong_data = read_data(io, auth_response_timeout){|data| MessagePack.unpack(data) rescue nil }
raise "PONG packet timeout" unless pong_data
raise "PONG connection closed" if pong_data == ''
# ['PING', bool(auth_result), string(reason_if_failed), self_hostname, shared_key_digest]
pong = MessagePack.unpack(pong_data)
raise "Invalid PONG header" unless pong[0] == 'PONG'
raise "Authentication Failure: #{pong[2]}" unless pong[1]
clientside_calculated = Digest::SHA512.new
.update(shared_key_salt)
.update(pong[3])
.update(@options['nonce'])
.update(shared_key).hexdigest
raise "Shared key digest mismatch" unless clientside_calculated == pong[4]
# authentication success
true
end
def connect
TCPSocket.new('127.0.0.1', @port)
end
# Data ordering is not assured:
# Records in different sockets are processed on different thread, so its scheduling make effect
# on order of emitted records.
# So, we MUST sort emitted records in different `send_data` before assertion.
def send_data(data, try_to_receive_response: false, response_timeout: 5, auth: false)
io = connect
if auth
simulate_auth_sequence(io)
end
io.write data
io.flush
if try_to_receive_response
@responses << read_data(io, response_timeout){|d| MessagePack.unpack(d) rescue nil }
end
ensure
io.close rescue nil # SSL socket requires any writes to close sockets
end
sub_test_case 'source_hostname_key and source_address_key features' do
data(
both: [:hostname, :address],
hostname: [:hostname],
address: [:address],
)
test 'message protocol' do |keys|
execute_test_with_source_hostname_key(*keys) { |events|
events.each { |tag, time, record|
send_data [tag, time, record].to_msgpack
}
}
end
data(
both: [:hostname, :address],
hostname: [:hostname],
address: [:address],
)
test 'forward protocol' do |keys|
execute_test_with_source_hostname_key(*keys) { |events|
entries = []
events.each {|tag,time,record|
entries << [time, record]
}
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_parser_regexp.rb | test/plugin/test_parser_regexp.rb | require_relative '../helper'
require 'fluent/test/driver/parser'
require 'fluent/plugin/parser'
class RegexpParserTest < ::Test::Unit::TestCase
def setup
Fluent::Test.setup
end
def internal_test_case(parser)
text = '192.168.0.1 - - [28/Feb/2013:12:00:00 +0900] [14/Feb/2013:12:00:00 +0900] "true /,/user HTTP/1.1" 200 777'
parser.parse(text) { |time, record|
assert_equal(event_time('28/Feb/2013:12:00:00 +0900', format: '%d/%b/%Y:%H:%M:%S %z'), time)
assert_equal({
'user' => '-',
'flag' => true,
'code' => 200.0,
'size' => 777,
'date' => event_time('14/Feb/2013:12:00:00 +0900', format: '%d/%b/%Y:%H:%M:%S %z'),
'host' => '192.168.0.1',
'path' => ['/', '/user']
}, record)
}
end
sub_test_case "Fluent::Compat::TextParser::RegexpParser" do
def create_driver(regexp, conf = {}, initialize_conf: false)
if initialize_conf
Fluent::Test::Driver::Parser.new(Fluent::Compat::TextParser::RegexpParser.new(regexp, conf))
else
Fluent::Test::Driver::Parser.new(Fluent::Compat::TextParser::RegexpParser.new(regexp)).configure(conf)
end
end
def test_parse_with_typed
# Use Regexp.new instead of // literal to avoid different parser behaviour in 1.9 and 2.0
regexp = Regexp.new(%q!^(?<host>[^ ]*) [^ ]* (?<user>[^ ]*) \[(?<time>[^\]]*)\] \[(?<date>[^\]]*)\] "(?<flag>\S+)(?: +(?<path>[^ ]*) +\S*)?" (?<code>[^ ]*) (?<size>[^ ]*)$!)
conf = {
'time_format' => "%d/%b/%Y:%H:%M:%S %z",
'types' => 'user:string,date:time:%d/%b/%Y:%H:%M:%S %z,flag:bool,path:array,code:float,size:integer'
}
d = create_driver(regexp, conf, initialize_conf: true)
internal_test_case(d.instance)
end
def test_parse_with_configure
# Specify conf by configure method instead of initializer
regexp = Regexp.new(%q!^(?<host>[^ ]*) [^ ]* (?<user>[^ ]*) \[(?<time>[^\]]*)\] \[(?<date>[^\]]*)\] "(?<flag>\S+)(?: +(?<path>[^ ]*) +\S*)?" (?<code>[^ ]*) (?<size>[^ ]*)$!)
conf = {
'time_format' => "%d/%b/%Y:%H:%M:%S %z",
'types' => 'user:string,date:time:%d/%b/%Y:%H:%M:%S %z,flag:bool,path:array,code:float,size:integer'
}
d = create_driver(regexp, conf)
internal_test_case(d.instance)
assert_equal(regexp, d.instance.patterns['format'])
assert_equal("%d/%b/%Y:%H:%M:%S %z", d.instance.patterns['time_format'])
end
def test_parse_with_typed_and_name_separator
regexp = Regexp.new(%q!^(?<host>[^ ]*) [^ ]* (?<user>[^ ]*) \[(?<time>[^\]]*)\] \[(?<date>[^\]]*)\] "(?<flag>\S+)(?: +(?<path>[^ ]*) +\S*)?" (?<code>[^ ]*) (?<size>[^ ]*)$!)
conf = {
'time_format' => "%d/%b/%Y:%H:%M:%S %z",
'types' => 'user|string,date|time|%d/%b/%Y:%H:%M:%S %z,flag|bool,path|array,code|float,size|integer',
'types_label_delimiter' => '|'
}
d = create_driver(regexp, conf)
internal_test_case(d.instance)
end
def test_parse_with_time_key
conf = {
'time_format' => "%Y-%m-%d %H:%M:%S %z",
'time_key' => 'logtime'
}
d = create_driver(/(?<logtime>[^\]]*)/, conf)
text = '2013-02-28 12:00:00 +0900'
d.instance.parse(text) do |time, _record|
assert_equal Fluent::EventTime.parse(text), time
end
end
def test_parse_without_time
time_at_start = Time.now.to_i
text = "tagomori_satoshi tagomoris 34\n"
regexp = Regexp.new(%q!^(?<name>[^ ]*) (?<user>[^ ]*) (?<age>\d*)$!)
conf = {
'types' => 'name:string,user:string,age:integer'
}
d = create_driver(regexp, conf)
d.instance.parse(text) { |time, record|
assert time && time >= time_at_start, "parser puts current time without time input"
assert_equal "tagomori_satoshi", record["name"]
assert_equal "tagomoris", record["user"]
assert_equal 34, record["age"]
}
end
def test_parse_without_time_estimate_current_event_false
text = "tagomori_satoshi tagomoris 34\n"
regexp = Regexp.new(%q!^(?<name>[^ ]*) (?<user>[^ ]*) (?<age>\d*)$!)
conf = {
'types' => 'name:string,user:string,age:integer'
}
d = create_driver(regexp, conf)
d.instance.estimate_current_event = false
d.instance.parse(text) { |time, record|
assert_equal "tagomori_satoshi", record["name"]
assert_equal "tagomoris", record["user"]
assert_equal 34, record["age"]
assert_nil time, "parser returns nil if configured so"
}
end
def test_parse_with_keep_time_key
regexp = Regexp.new(%q!(?<time>.*)!)
conf = {
'time_format' => "%d/%b/%Y:%H:%M:%S %z",
'keep_time_key' => 'true',
}
d = create_driver(regexp, conf)
text = '28/Feb/2013:12:00:00 +0900'
d.instance.parse(text) do |_time, record|
assert_equal text, record['time']
end
end
def test_parse_with_keep_time_key_with_typecast
regexp = Regexp.new(%q!(?<time>.*)!)
conf = {
'time_format' => "%d/%b/%Y:%H:%M:%S %z",
'keep_time_key' => 'true',
'types' => 'time:time:%d/%b/%Y:%H:%M:%S %z',
}
d = create_driver(regexp, conf)
text = '28/Feb/2013:12:00:00 +0900'
d.instance.parse(text) do |_time, record|
assert_equal 1362020400, record['time']
end
end
end
sub_test_case "Fluent::Plugin::RegexpParser" do
def create_driver(conf)
Fluent::Test::Driver::Parser.new(Fluent::Plugin::RegexpParser.new).configure(conf)
end
sub_test_case "configure" do
def test_bad_expression
conf = {
'expression' => %q!/.*/!,
}
assert_raise Fluent::ConfigError do
create_driver(conf)
end
end
def test_default_options
conf = {
'expression' => %q!/^(?<host>[^ ]*) [^ ]* (?<user>[^ ]*) \[(?<time>[^\]]*)\] \[(?<date>[^\]]*)\] "(?<flag>\S+)(?: +(?<path>[^ ]*) +\S*)?" (?<code>[^ ]*) (?<size>[^ ]*)$/!,
'time_format' => "%d/%b/%Y:%H:%M:%S %z",
'types' => 'user:string,date:time:%d/%b/%Y:%H:%M:%S %z,flag:bool,path:array,code:float,size:integer'
}
d = create_driver(conf)
regexp = d.instance.expression
assert_equal(0, regexp.options)
end
data(
ignorecase: ["i", Regexp::IGNORECASE],
multiline: ["m", Regexp::MULTILINE],
ignorecase_multiline: ["im", Regexp::IGNORECASE | Regexp::MULTILINE],
)
def test_options(data)
regexp_option, expected = data
conf = {
'expression' => %Q!/^(?<host>[^ ]*) [^ ]* (?<user>[^ ]*) \[(?<time>[^\]]*)\] \[(?<date>[^\]]*)\] "(?<flag>\S+)(?: +(?<path>[^ ]*) +\S*)?" (?<code>[^ ]*) (?<size>[^ ]*)$/#{regexp_option}!,
'time_format' => "%d/%b/%Y:%H:%M:%S %z",
'types' => 'user:string,date:time:%d/%b/%Y:%H:%M:%S %z,flag:bool,path:array,code:float,size:integer'
}
d = create_driver(conf)
regexp = d.instance.expression
assert_equal(expected, regexp.options)
end
end
def test_parse_with_typed
conf = {
'expression' => %q!/^(?<host>[^ ]*) [^ ]* (?<user>[^ ]*) \[(?<time>[^\]]*)\] \[(?<date>[^\]]*)\] "(?<flag>\S+)(?: +(?<path>[^ ]*) +\S*)?" (?<code>[^ ]*) (?<size>[^ ]*)$/!,
'time_format' => "%d/%b/%Y:%H:%M:%S %z",
'types' => 'user:string,date:time:%d/%b/%Y:%H:%M:%S %z,flag:bool,path:array,code:float,size:integer'
}
d = create_driver(conf)
internal_test_case(d.instance)
end
def test_parse_with_typed_by_json_hash
conf = {
'expression' => %q!/^(?<host>[^ ]*) [^ ]* (?<user>[^ ]*) \[(?<time>[^\]]*)\] \[(?<date>[^\]]*)\] "(?<flag>\S+)(?: +(?<path>[^ ]*) +\S*)?" (?<code>[^ ]*) (?<size>[^ ]*)$/!,
'time_format' => "%d/%b/%Y:%H:%M:%S %z",
'types' => '{"user":"string","date":"time:%d/%b/%Y:%H:%M:%S %z","flag":"bool","path":"array","code":"float","size":"integer"}',
}
d = create_driver(conf)
internal_test_case(d.instance)
end
def test_parse_with_time_key
conf = {
'expression' => %q!/(?<logtime>[^\]]*)/!,
'time_format' => "%Y-%m-%d %H:%M:%S %z",
'time_key' => 'logtime'
}
d = create_driver(conf)
text = '2013-02-28 12:00:00 +0900'
d.instance.parse(text) do |time, _record|
assert_equal Fluent::EventTime.parse(text), time
end
end
def test_parse_without_time
time_at_start = Time.now.to_i
text = "tagomori_satoshi tagomoris 34\n"
conf = {
'expression' => %q!/^(?<name>[^ ]*) (?<user>[^ ]*) (?<age>\d*)$/!,
'types' => 'name:string,user:string,age:integer'
}
d = create_driver(conf)
d.instance.parse(text) { |time, record|
assert time && time >= time_at_start, "parser puts current time without time input"
assert_equal "tagomori_satoshi", record["name"]
assert_equal "tagomoris", record["user"]
assert_equal 34, record["age"]
}
end
def test_parse_without_time_estimate_current_event_false
text = "tagomori_satoshi tagomoris 34\n"
conf = {
'expression' => %q!/^(?<name>[^ ]*) (?<user>[^ ]*) (?<age>\d*)$/!,
'types' => 'name:string,user:string,age:integer'
}
d = create_driver(conf)
d.instance.estimate_current_event = false
d.instance.parse(text) { |time, record|
assert_equal "tagomori_satoshi", record["name"]
assert_equal "tagomoris", record["user"]
assert_equal 34, record["age"]
assert_nil time, "parser returns nil if configured so"
}
end
def test_parse_with_keep_time_key
conf = {
'expression' => %q!/(?<time>.*)/!,
'time_format' => "%d/%b/%Y:%H:%M:%S %z",
'keep_time_key' => 'true',
}
d = create_driver(conf)
text = '28/Feb/2013:12:00:00 +0900'
d.instance.parse(text) do |_time, record|
assert_equal text, record['time']
end
end
def test_parse_with_keep_time_key_with_typecast
conf = {
'expression' => %q!/(?<time>.*)/!,
'time_format' => "%d/%b/%Y:%H:%M:%S %z",
'keep_time_key' => 'true',
'types' => 'time:time:%d/%b/%Y:%H:%M:%S %z',
}
d = create_driver(conf)
text = '28/Feb/2013:12:00:00 +0900'
d.instance.parse(text) do |_time, record|
assert_equal 1362020400, record['time']
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_metadata.rb | test/plugin/test_metadata.rb | require_relative '../helper'
require 'fluent/plugin/buffer'
class BufferMetadataTest < Test::Unit::TestCase
def meta(timekey=nil, tag=nil, variables=nil)
Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
end
setup do
Fluent::Test.setup
end
sub_test_case 'about metadata' do
test 'comparison of variables should be stable' do
m = meta(nil, nil, nil)
# different sets of keys
assert_equal(-1, m.cmp_variables({}, {a: 1}))
assert_equal(1, m.cmp_variables({a: 1}, {}))
assert_equal(1, m.cmp_variables({c: 1}, {a: 1}))
assert_equal(-1, m.cmp_variables({a: 1}, {a: 1, b: 2}))
assert_equal(1, m.cmp_variables({a: 1, c: 1}, {a: 1, b: 2}))
assert_equal(1, m.cmp_variables({a: 1, b: 0, c: 1}, {a: 1, b: 2}))
# same set of keys
assert_equal(-1, m.cmp_variables({a: 1}, {a: 2}))
assert_equal(-1, m.cmp_variables({a: 1, b: 0}, {a: 1, b: 1}))
assert_equal(-1, m.cmp_variables({a: 1, b: 1, c: 100}, {a: 1, b: 1, c: 200}))
assert_equal(-1, m.cmp_variables({b: 1, c: 100, a: 1}, {a: 1, b: 1, c: 200})) # comparison sorts keys
assert_equal(-1, m.cmp_variables({a: nil}, {a: 1}))
assert_equal(-1, m.cmp_variables({a: 1, b: nil}, {a: 1, b: 1}))
end
test 'comparison of metadata should be stable' do
n = Time.now.to_i
assert_equal(0, meta(nil, nil, nil) <=> meta(nil, nil, nil))
assert_equal(0, meta(n, nil, nil) <=> meta(n, nil, nil))
assert_equal(0, meta(nil, "t1", nil) <=> meta(nil, "t1", nil))
assert_equal(0, meta(nil, nil, {}) <=> meta(nil, nil, {}))
assert_equal(0, meta(nil, nil, {a: "1"}) <=> meta(nil, nil, {a: "1"}))
assert_equal(0, meta(n, nil, {}) <=> meta(n, nil, {}))
assert_equal(0, meta(n, "t1", {}) <=> meta(n, "t1", {}))
assert_equal(0, meta(n, "t1", {a: "x", b: 10}) <=> meta(n, "t1", {a: "x", b: 10}))
# timekey is 1st comparison key
assert_equal(-1, meta(n - 300, nil, nil) <=> meta(n - 270, nil, nil))
assert_equal(1, meta(n + 1, "a", nil) <=> meta(n - 1, "b", nil))
assert_equal(-1, meta(n - 1, nil, {a: 100}) <=> meta(n + 1, nil, {}))
# tag is 2nd
assert_equal(-1, meta(nil, "a", {}) <=> meta(nil, "b", {}))
assert_equal(-1, meta(n, "a", {}) <=> meta(n, "b", {}))
assert_equal(1, meta(nil, "x", {a: 1}) <=> meta(nil, "t", {}))
assert_equal(1, meta(n, "x", {a: 1}) <=> meta(n, "t", {}))
assert_equal(1, meta(nil, "x", {a: 1}) <=> meta(nil, "t", {a: 1}))
assert_equal(1, meta(n, "x", {a: 1}) <=> meta(n, "t", {a: 2}))
assert_equal(1, meta(n, "x", {a: 1}) <=> meta(n, "t", {a: 10, b: 1}))
# variables is the last
assert_equal(-1, meta(nil, nil, {}) <=> meta(nil, nil, {a: 1}))
assert_equal(-1, meta(n, "t", {}) <=> meta(n, "t", {a: 1}))
assert_equal(1, meta(n, "t", {a: 1}) <=> meta(n, "t", {}))
assert_equal(-1, meta(n, "t", {a: 1}) <=> meta(n, "t", {a: 2}))
assert_equal(-1, meta(n, "t", {a: 1}) <=> meta(n, "t", {a: 1, b: 1}))
assert_equal(1, meta(nil, nil, {b: 1}) <=> meta(nil, nil, {a: 1}))
assert_equal(1, meta(n, "t", {b: 1}) <=> meta(n, "t", {a: 1}))
end
test 'metadata can be sorted' do
n = Time.now.to_i
m0 = meta(nil, nil, nil)
m1 = meta(n - 1, nil, nil)
m2 = meta(n - 1, "a", nil)
m3 = meta(n - 1, "a", {a: 1})
m4 = meta(n - 1, "a", {a: 100})
m5 = meta(n - 1, "a", {a: 100, b: 1})
m6 = meta(n - 1, "aa", nil)
m7 = meta(n - 1, "aa", {a: 1})
m8 = meta(n - 1, "b", nil)
m9 = meta(n, nil, nil)
m10 = meta(n + 1, nil, {a: 1})
expected = [m0, m1, m2, m3, m4, m5, m6, m7, m8, m9, m10].freeze
ary = expected.dup
100.times do
assert_equal expected, ary.shuffle.sort
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_input.rb | test/plugin/test_input.rb | require_relative '../helper'
require 'fluent/plugin/input'
require 'flexmock/test_unit'
module FluentPluginInputTest
class DummyPlugin < Fluent::Plugin::Input
end
end
class InputTest < Test::Unit::TestCase
setup do
Fluent::Test.setup
@p = FluentPluginInputTest::DummyPlugin.new
end
test 'has healthy lifecycle' do
assert !@p.configured?
@p.configure(config_element())
assert @p.configured?
assert !@p.started?
@p.start
assert @p.start
assert !@p.stopped?
@p.stop
assert @p.stopped?
assert !@p.before_shutdown?
@p.before_shutdown
assert @p.before_shutdown?
assert !@p.shutdown?
@p.shutdown
assert @p.shutdown?
assert !@p.after_shutdown?
@p.after_shutdown
assert @p.after_shutdown?
assert !@p.closed?
@p.close
assert @p.closed?
assert !@p.terminated?
@p.terminate
assert @p.terminated?
end
test 'has plugin_id automatically generated' do
assert @p.respond_to?(:plugin_id_configured?)
assert @p.respond_to?(:plugin_id)
@p.configure(config_element())
assert !@p.plugin_id_configured?
assert @p.plugin_id
assert{ @p.plugin_id != 'mytest' }
end
test 'has plugin_id manually configured' do
@p.configure(config_element('ROOT', '', {'@id' => 'mytest'}))
assert @p.plugin_id_configured?
assert_equal 'mytest', @p.plugin_id
end
test 'has plugin logger' do
assert @p.respond_to?(:log)
assert @p.log
# default logger
original_logger = @p.log
@p.configure(config_element('ROOT', '', {'@log_level' => 'debug'}))
assert(@p.log.object_id != original_logger.object_id)
assert_equal Fluent::Log::LEVEL_DEBUG, @p.log.level
end
test 'can load plugin helpers' do
assert_nothing_raised do
class FluentPluginInputTest::DummyPlugin2 < Fluent::Plugin::Input
helpers :storage
end
end
end
test 'can use metrics plugins and fallback methods' do
@p.configure(config_element('ROOT', '', {'@log_level' => 'debug'}))
%w[emit_size_metrics emit_records_metrics].each do |metric_name|
assert_true @p.instance_variable_get(:"@#{metric_name}").is_a?(Fluent::Plugin::Metrics)
end
assert_equal 0, @p.emit_size
assert_equal 0, @p.emit_records
end
test 'are not available with multi workers configuration in default' do
assert_false @p.multi_workers_ready?
end
test 'has router and can emit into it' do
assert @p.has_router?
@p.configure(config_element())
assert @p.router
DummyRouter = Struct.new(:emits) do
def emit(tag, es)
self.emits << [tag, es]
end
end
@p.router = DummyRouter.new([])
@p.router.emit('mytag', [])
@p.router.emit('mytag.testing', ['it is not es, but no problem for tests'])
assert_equal ['mytag', []], @p.router.emits[0]
assert_equal ['mytag.testing', ['it is not es, but no problem for tests']], @p.router.emits[1]
end
test 'has router for specified label if configured' do
@p.configure(config_element())
original_router = @p.router
router_mock = flexmock('mytest')
router_mock.should_receive(:emit).once.with('mytag.testing', ['for mock'])
label_mock = flexmock('mylabel')
label_mock.should_receive(:event_router).once.and_return(router_mock)
Fluent::Engine.root_agent.labels['@mytest'] = label_mock
@p.configure(config_element('ROOT', '', {'@label' => '@mytest'}))
assert{ @p.router.object_id != original_router.object_id }
@p.router.emit('mytag.testing', ['for mock'])
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_buffer_file_single_chunk.rb | test/plugin/test_buffer_file_single_chunk.rb | require_relative '../helper'
require 'fluent/plugin/buffer/file_single_chunk'
require 'fluent/plugin/compressable'
require 'fluent/unique_id'
require 'fileutils'
require 'msgpack'
require 'time'
class BufferFileSingleChunkTest < Test::Unit::TestCase
include Fluent::Plugin::Compressable
setup do
@klass = Fluent::Plugin::Buffer::FileSingleChunk
@chunkdir = File.expand_path('../../tmp/buffer_file_single_chunk', __FILE__)
FileUtils.rm_r(@chunkdir) rescue nil
FileUtils.mkdir_p(@chunkdir)
end
Metadata = Struct.new(:timekey, :tag, :variables)
def gen_metadata(timekey: nil, tag: 'testing', variables: nil)
Metadata.new(timekey, tag, variables)
end
def gen_path(path)
File.join(@chunkdir, path)
end
def gen_test_chunk_id
now = Time.parse('2016-04-07 14:31:33 +0900')
u1 = ((now.to_i * 1000 * 1000 + now.usec) << 12 | 1725) # 1725 is one of `rand(0xfff)`
u3 = 2979763054 # one of rand(0xffffffff)
u4 = 438020492 # ditto
[u1 >> 32, u1 & 0xffffffff, u3, u4].pack('NNNN')
# unique_id.unpack('N*').map{|n| n.to_s(16)}.join => "52fde6425d7406bdb19b936e1a1ba98c"
end
def hex_id(id)
id.unpack('N*').map { |n| n.to_s(16) }.join
end
sub_test_case 'classmethods' do
data(
correct_staged: ['/mydir/mypath/fsb.b00ff.buf', :staged],
correct_queued: ['/mydir/mypath/fsb.q00ff.buf', :queued],
incorrect_staged: ['/mydir/mypath/fsb.b00ff.buf/unknown', :unknown],
incorrect_queued: ['/mydir/mypath/fsb.q00ff.buf/unknown', :unknown],
output_file: ['/mydir/mypath/fsb.20160716.buf', :unknown],
)
test 'can .assume_chunk_state' do |data|
path, expected = data
assert_equal expected, @klass.assume_chunk_state(path)
end
test '.generate_stage_chunk_path generates path with staged mark & chunk unique_id' do
assert_equal gen_path("fsb.foo.b52fde6425d7406bdb19b936e1a1ba98c.buf"), @klass.generate_stage_chunk_path(gen_path("fsb.*.buf"), 'foo', gen_test_chunk_id)
assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
@klass.generate_stage_chunk_path(gen_path("fsb.buf"), 'foo', gen_test_chunk_id)
end
assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
@klass.generate_stage_chunk_path(gen_path("fsb.*"), 'foo', gen_test_chunk_id)
end
assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
@klass.generate_stage_chunk_path(gen_path("*.buf"), 'foo', gen_test_chunk_id)
end
end
test '.generate_queued_chunk_path generates path with enqueued mark for staged chunk path' do
assert_equal(
gen_path("fsb.q52fde6425d7406bdb19b936e1a1ba98c.buf"),
@klass.generate_queued_chunk_path(gen_path("fsb.b52fde6425d7406bdb19b936e1a1ba98c.buf"), gen_test_chunk_id)
)
end
test '.generate_queued_chunk_path generates special path with chunk unique_id for non staged chunk path' do
assert_equal(
gen_path("fsb.buf.q52fde6425d7406bdb19b936e1a1ba98c.chunk"),
@klass.generate_queued_chunk_path(gen_path("fsb.buf"), gen_test_chunk_id)
)
assert_equal(
gen_path("fsb.q55555555555555555555555555555555.buf.q52fde6425d7406bdb19b936e1a1ba98c.chunk"),
@klass.generate_queued_chunk_path(gen_path("fsb.q55555555555555555555555555555555.buf"), gen_test_chunk_id)
)
end
data('1 word tag' => 'foo',
'2 words tag' => 'test.log',
'empty' => '')
test '.unique_id_and_key_from_path recreates unique_id and key from file path' do |key|
path = @klass.unique_id_and_key_from_path(gen_path("fsb.#{key}.q52fde6425d7406bdb19b936e1a1ba98c.buf"))
assert_equal [gen_test_chunk_id, key], path
end
end
sub_test_case 'newly created chunk' do
setup do
@path_conf = File.join(@chunkdir, 'fsb.*.buf')
@chunk_path = File.join(@chunkdir, "fsb.testing.b#{hex_id(gen_test_chunk_id)}.buf")
@c = Fluent::Plugin::Buffer::FileSingleChunk.new(gen_metadata, @path_conf, :create, nil)
end
def gen_chunk_path(prefix, unique_id)
File.join(@chunkdir, "fsb.testing.#{prefix}#{Fluent::UniqueId.hex(unique_id)}.buf")
end
teardown do
if @c
@c.purge rescue nil
end
if File.exist?(@chunk_path)
File.unlink(@chunk_path)
end
end
test 'creates new files for chunk and metadata with specified path & permission' do
assert_equal 16, @c.unique_id.size
assert_equal gen_chunk_path('b', @c.unique_id), @c.path
assert File.exist?(gen_chunk_path('b', @c.unique_id))
assert { File.stat(gen_chunk_path('b', @c.unique_id)).mode.to_s(8).end_with?(Fluent::DEFAULT_FILE_PERMISSION.to_s(8)) }
assert_equal :unstaged, @c.state
assert @c.empty?
end
test 'can #append, #commit and #read it' do
assert @c.empty?
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
ds = @c.read.split("\n").select { |d| !d.empty? }
assert_equal 2, ds.size
assert_equal d1, JSON.parse(ds[0])
assert_equal d2, JSON.parse(ds[1])
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
@c.commit
ds = @c.read.split("\n").select{|d| !d.empty? }
assert_equal 4, ds.size
assert_equal d1, JSON.parse(ds[0])
assert_equal d2, JSON.parse(ds[1])
assert_equal d3, JSON.parse(ds[2])
assert_equal d4, JSON.parse(ds[3])
end
test 'can #concat, #commit and #read it' do
assert @c.empty?
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"].join
@c.concat(data, 2)
@c.commit
ds = @c.read.split("\n").select{|d| !d.empty? }
assert_equal 2, ds.size
assert_equal d1, JSON.parse(ds[0])
assert_equal d2, JSON.parse(ds[1])
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.concat([d3.to_json + "\n", d4.to_json + "\n"].join, 2)
@c.commit
ds = @c.read.split("\n").select { |d| !d.empty? }
assert_equal 4, ds.size
assert_equal d1, JSON.parse(ds[0])
assert_equal d2, JSON.parse(ds[1])
assert_equal d3, JSON.parse(ds[2])
assert_equal d4, JSON.parse(ds[3])
end
test 'has its contents in binary (ascii-8bit)' do
data1 = "aaa bbb ccc".force_encoding('utf-8')
@c.append([data1])
@c.commit
assert_equal Encoding::ASCII_8BIT, @c.instance_eval{ @chunk.external_encoding }
assert_equal Encoding::ASCII_8BIT, @c.read.encoding
end
test 'has #bytesize and #size' do
assert @c.empty?
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
@c.commit
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
first_bytesize = @c.bytesize
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
assert_equal 4, @c.size
@c.commit
assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
assert_equal 4, @c.size
end
test 'can #rollback to revert non-committed data' do
assert @c.empty?
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
@c.rollback
assert @c.empty?
assert_equal '', File.read(@c.path)
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
first_bytesize = @c.bytesize
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
assert_equal 4, @c.size
@c.rollback
assert_equal first_bytesize, @c.bytesize
assert_equal 2, @c.size
assert_equal (d1.to_json + "\n" + d2.to_json + "\n"), File.read(@c.path)
end
test 'can #rollback to revert non-committed data from #concat' do
assert @c.empty?
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"].join
@c.concat(data, 2)
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
@c.rollback
assert @c.empty?
assert_equal '', File.read(@c.path)
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
first_bytesize = @c.bytesize
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.concat([d3.to_json + "\n", d4.to_json + "\n"].join, 2)
assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
assert_equal 4, @c.size
@c.rollback
assert_equal first_bytesize, @c.bytesize
assert_equal 2, @c.size
assert_equal (d1.to_json + "\n" + d2.to_json + "\n"), File.read(@c.path)
end
test 'can store its data by #close' do
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
@c.commit
content = @c.read
@c.close
assert_equal content, File.read(@c.path)
end
test 'deletes all data by #purge' do
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
@c.commit
@c.purge
assert @c.empty?
assert_equal 0, @c.bytesize
assert_equal 0, @c.size
assert !File.exist?(@c.path)
end
test 'can #open its contents as io' do
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
@c.commit
lines = []
@c.open do |io|
assert io
io.readlines.each do |l|
lines << l
end
end
assert_equal d1.to_json + "\n", lines[0]
assert_equal d2.to_json + "\n", lines[1]
assert_equal d3.to_json + "\n", lines[2]
assert_equal d4.to_json + "\n", lines[3]
end
end
sub_test_case 'chunk with file for staged chunk' do
setup do
@chunk_id = gen_test_chunk_id
@staged_path = File.join(@chunkdir, "fsb.testing.b#{hex_id(@chunk_id)}.buf")
@enqueued_path = File.join(@chunkdir, "fsb.testing.q#{hex_id(@chunk_id)}.buf")
@d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
@d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
@d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
@d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
@d = [@d1, @d2, @d3, @d4].map{ |d| d.to_json + "\n" }.join
File.write(@staged_path, @d, :mode => 'wb')
@c = Fluent::Plugin::Buffer::FileSingleChunk.new(gen_metadata, @staged_path, :staged, nil)
end
teardown do
if @c
@c.purge rescue nil
end
[@staged_path, @enqueued_path].each do |path|
File.unlink(path) if File.exist?(path)
end
end
test 'can load as staged chunk from file with metadata' do
assert_equal @staged_path, @c.path
assert_equal :staged, @c.state
assert_nil @c.metadata.timekey
assert_equal 'testing', @c.metadata.tag
assert_nil @c.metadata.variables
assert_equal 0, @c.size
assert_equal @d, @c.read
@c.restore_size(:text)
assert_equal 4, @c.size
end
test 'can be enqueued' do
stage_path = @c.path
queue_path = @enqueued_path
assert File.exist?(stage_path)
assert !File.exist?(queue_path)
@c.enqueued!
assert_equal queue_path, @c.path
assert !File.exist?(stage_path)
assert File.exist?(queue_path)
assert_nil @c.metadata.timekey
assert_equal 'testing', @c.metadata.tag
assert_nil @c.metadata.variables
assert_equal 0, @c.size
assert_equal @d, File.read(@c.path)
@c.restore_size(:text)
assert_equal 4, @c.size
end
test '#file_rename can rename chunk files even in windows, and call callback with file size' do
data = "aaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbccccccccccccccccccccccccccccc"
testing_file1 = gen_path('rename1.test')
testing_file2 = gen_path('rename2.test')
f = File.open(testing_file1, 'wb', @c.permission)
f.set_encoding(Encoding::ASCII_8BIT)
f.sync = true
f.binmode
f.write data
pos = f.pos
assert f.binmode?
assert f.sync
assert_equal data.bytesize, f.size
io = nil
@c.file_rename(f, testing_file1, testing_file2, ->(new_io){ io = new_io })
assert io
if Fluent.windows?
assert { f != io }
else
assert_equal f, io
end
assert_equal Encoding::ASCII_8BIT, io.external_encoding
assert io.sync
assert io.binmode?
assert_equal data.bytesize, io.size
assert_equal pos, io.pos
assert_equal '', io.read
io.rewind
assert_equal data, io.read
end
end
sub_test_case 'chunk with file for enqueued chunk' do
setup do
@chunk_id = gen_test_chunk_id
@enqueued_path = File.join(@chunkdir, "fsb.testing.q#{hex_id(@chunk_id)}.buf")
@d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
@d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
@d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
@d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
@d = [@d1, @d2, @d3, @d4].map { |d| d.to_json + "\n" }.join
File.write(@enqueued_path, @d, :mode => 'wb')
@c = Fluent::Plugin::Buffer::FileSingleChunk.new(gen_metadata, @enqueued_path, :queued, nil)
end
teardown do
if @c
@c.purge rescue nil
end
File.unlink(@enqueued_path) if File.exist?(@enqueued_path)
end
test 'can load as queued chunk (read only) with metadata' do
assert @c
assert_equal @chunk_id, @c.unique_id
assert_equal :queued, @c.state
stat = File.stat(@enqueued_path)
assert_equal stat.ctime.to_i, @c.created_at.to_i
assert_equal stat.mtime.to_i, @c.modified_at.to_i
assert_equal 0, @c.size
assert_equal @d.bytesize, @c.bytesize
assert_equal @d, @c.read
@c.restore_size(:text)
assert_equal 4, @c.size
assert_raise RuntimeError.new("BUG: concatenating to unwritable chunk, now 'queued'") do
@c.append(["queued chunk is read only"])
end
assert_raise IOError do
@c.instance_eval{ @chunk }.write "chunk io is opened as read only"
end
end
end
sub_test_case 'chunk with queued chunk file' do
setup do
@chunk_id = gen_test_chunk_id
@chunk_path = File.join(@chunkdir, "fsb.testing.q#{hex_id(@chunk_id)}.buf")
@d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
@d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
@d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
@d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
@d = [@d1, @d2, @d3, @d4].map { |d| d.to_json + "\n" }.join
File.write(@chunk_path, @d, :mode => 'wb')
@c = Fluent::Plugin::Buffer::FileSingleChunk.new(gen_metadata, @chunk_path, :queued, nil)
end
teardown do
if @c
@c.purge rescue nil
end
File.unlink(@chunk_path) if File.exist?(@chunk_path)
end
test 'can load as queued chunk' do
assert @c
assert_equal :queued, @c.state
assert_equal @chunk_id, @c.unique_id
assert_equal gen_metadata, @c.metadata
assert_equal @d.bytesize, @c.bytesize
assert_equal 0, @c.size
assert_equal @d, @c.read
assert_raise RuntimeError.new("BUG: concatenating to unwritable chunk, now 'queued'") do
@c.append(["queued chunk is read only"])
end
assert_raise IOError do
@c.instance_eval{ @chunk }.write "chunk io is opened as read only"
end
end
end
sub_test_case 'compressed buffer' do
setup do
@src = 'text data for compressing' * 5
@gzipped_src = compress(@src)
@zstded_src = compress(@src, type: :zstd)
end
test '#append with compress option writes compressed data to chunk when compress is gzip' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'fsb.*.buf'), :create, nil, compress: :gzip)
c.append([@src, @src], compress: :gzip)
c.commit
# check chunk is compressed
assert c.read(compressed: :gzip).size < [@src, @src].join("").size
assert_equal @src + @src, c.read
end
test '#open passes io object having decompressed data to a block when compress is gzip' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'fsb.*.buf'), :create, nil, compress: :gzip)
c.concat(@gzipped_src, @src.size)
c.commit
decomressed_data = c.open do |io|
v = io.read
assert_equal @src, v
v
end
assert_equal @src, decomressed_data
end
test '#open with compressed option passes io object having decompressed data to a block when compress is gzip' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'fsb.*.buf'), :create, nil, compress: :gzip)
c.concat(@gzipped_src, @src.size)
c.commit
comressed_data = c.open(compressed: :gzip) do |io|
v = io.read
assert_equal @gzipped_src, v
v
end
assert_equal @gzipped_src, comressed_data
end
test '#write_to writes decompressed data when compress is gzip' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'fsb.*.buf'), :create, nil, compress: :gzip)
c.concat(@gzipped_src, @src.size)
c.commit
assert_equal @src, c.read
assert_equal @gzipped_src, c.read(compressed: :gzip)
io = StringIO.new
c.write_to(io)
assert_equal @src, io.string
end
test '#write_to with compressed option writes compressed data when compress is gzip' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'fsb.*.buf'), :create, nil, compress: :gzip)
c.concat(@gzipped_src, @src.size)
c.commit
assert_equal @src, c.read
assert_equal @gzipped_src, c.read(compressed: :gzip)
io = StringIO.new
io.set_encoding(Encoding::ASCII_8BIT)
c.write_to(io, compressed: :gzip)
assert_equal @gzipped_src, io.string
end
test '#append with compress option writes compressed data to chunk when compress is zstd' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'fsb.*.buf'), :create, nil, compress: :zstd)
c.append([@src, @src], compress: :zstd)
c.commit
# check chunk is compressed
assert c.read(compressed: :zstd).size < [@src, @src].join("").size
assert_equal @src + @src, c.read
end
test '#open passes io object having decompressed data to a block when compress is zstd' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'fsb.*.buf'), :create, nil, compress: :zstd)
c.concat(@zstded_src, @src.size)
c.commit
decomressed_data = c.open do |io|
v = io.read
assert_equal @src, v
v
end
assert_equal @src, decomressed_data
end
test '#open with compressed option passes io object having decompressed data to a block when compress is zstd' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'fsb.*.buf'), :create, nil, compress: :zstd)
c.concat(@zstded_src, @src.size)
c.commit
comressed_data = c.open(compressed: :zstd) do |io|
v = io.read
assert_equal @zstded_src, v
v
end
assert_equal @zstded_src, comressed_data
end
test '#write_to writes decompressed data when compress is zstd' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'fsb.*.buf'), :create, nil, compress: :zstd)
c.concat(@zstded_src, @src.size)
c.commit
assert_equal @src, c.read
assert_equal @zstded_src, c.read(compressed: :zstd)
io = StringIO.new
c.write_to(io)
assert_equal @src, io.string
end
test '#write_to with compressed option writes compressed data when compress is zstd' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'fsb.*.buf'), :create, nil, compress: :zstd)
c.concat(@zstded_src, @src.size)
c.commit
assert_equal @src, c.read
assert_equal @zstded_src, c.read(compressed: :zstd)
io = StringIO.new
io.set_encoding(Encoding::ASCII_8BIT)
c.write_to(io, compressed: :zstd)
assert_equal @zstded_src, io.string
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_formatter_ltsv.rb | test/plugin/test_formatter_ltsv.rb | require_relative '../helper'
require 'fluent/test/driver/formatter'
require 'fluent/plugin/formatter_ltsv'
class LabeledTSVFormatterTest < ::Test::Unit::TestCase
def setup
@time = event_time
end
def create_driver(conf = "")
Fluent::Test::Driver::Formatter.new(Fluent::Plugin::LabeledTSVFormatter).configure(conf)
end
def tag
"tag"
end
def record
{'message' => 'awesome', 'greeting' => 'hello'}
end
def test_config_params
d = create_driver
assert_equal "\t", d.instance.delimiter
assert_equal ":", d.instance.label_delimiter
assert_equal true, d.instance.add_newline
d = create_driver(
'delimiter' => ',',
'label_delimiter' => '=',
'add_newline' => false,
)
assert_equal ",", d.instance.delimiter
assert_equal "=", d.instance.label_delimiter
assert_equal false, d.instance.add_newline
end
data("newline (LF)" => ["lf", "\n"],
"newline (CRLF)" => ["crlf", "\r\n"])
def test_format(data)
newline_conf, newline = data
d = create_driver({"newline" => newline_conf})
formatted = d.instance.format(tag, @time, record)
assert_equal("message:awesome\tgreeting:hello#{newline}", formatted)
end
def test_format_without_newline
d = create_driver('add_newline' => false)
formatted = d.instance.format(tag, @time, record)
assert_equal("message:awesome\tgreeting:hello", formatted)
end
data("newline (LF)" => ["lf", "\n"],
"newline (CRLF)" => ["crlf", "\r\n"])
def test_format_with_customized_delimiters(data)
newline_conf, newline = data
d = create_driver(
'delimiter' => ',',
'label_delimiter' => '=',
'newline' => newline_conf,
)
formatted = d.instance.format(tag, @time, record)
assert_equal("message=awesome,greeting=hello#{newline}", formatted)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_out_exec_filter.rb | test/plugin/test_out_exec_filter.rb | require_relative '../helper'
require 'fluent/test/driver/output'
require 'fluent/plugin/out_exec_filter'
require 'fileutils'
class ExecFilterOutputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
end
CONFIG = %[
command cat
num_children 3
<inject>
tag_key tag
time_key time_in
time_type string
time_format %Y-%m-%d %H:%M:%S
</inject>
<format>
keys ["time_in", "tag", "k1"]
</format>
<parse>
keys ["time_out", "tag", "k2"]
</parse>
<extract>
tag_key tag
time_key time_out
time_type string
time_format %Y-%m-%d %H:%M:%S
</extract>
]
CONFIG_COMPAT = %[
command cat
in_keys time_in,tag,k1
out_keys time_out,tag,k2
tag_key tag
in_time_key time_in
out_time_key time_out
time_format %Y-%m-%d %H:%M:%S
localtime
num_children 3
]
def create_driver(conf)
Fluent::Test::Driver::Output.new(Fluent::Plugin::ExecFilterOutput).configure(conf)
end
SED_SUPPORT_UNBUFFERED_OPTION = ->(){
system("echo xxx | sed --unbuffered -l -e 's/x/y/g' >#{IO::NULL} 2>&1")
$?.success?
}.call
SED_UNBUFFERED_OPTION = SED_SUPPORT_UNBUFFERED_OPTION ? '--unbuffered' : ''
data(
'with sections' => CONFIG,
'traditional' => CONFIG_COMPAT,
)
test 'configure' do |conf|
d = create_driver(conf)
assert_false d.instance.parser.estimate_current_event
assert_equal ["time_in","tag","k1"], d.instance.formatter.keys
assert_equal ["time_out","tag","k2"], d.instance.parser.keys
assert_equal "tag", d.instance.inject_config.tag_key
assert_equal "tag", d.instance.extract_config.tag_key
assert_equal "time_in", d.instance.inject_config.time_key
assert_equal "time_out", d.instance.extract_config.time_key
assert_equal "%Y-%m-%d %H:%M:%S", d.instance.inject_config.time_format
assert_equal "%Y-%m-%d %H:%M:%S", d.instance.extract_config.time_format
assert_equal true, d.instance.inject_config.localtime
assert_equal 3, d.instance.num_children
d = create_driver %[
command sed -l -e s/foo/bar/
in_keys time,k1
out_keys time,k2
tag xxx
time_key time
num_children 3
]
assert_equal "sed -l -e s/foo/bar/", d.instance.command
d = create_driver(conf + %[
remove_prefix before
add_prefix after
])
assert_equal "before", d.instance.remove_prefix
assert_equal "after" , d.instance.add_prefix
end
data(
'with sections' => CONFIG,
'traditional' => CONFIG_COMPAT,
)
test 'emit events with TSV format' do |conf|
d = create_driver(conf)
time = event_time("2011-01-02 13:14:15")
d.run(default_tag: 'test', expect_emits: 2, timeout: 10) do
# sleep 0.1 until d.instance.children && !d.instance.children.empty? && d.instance.children.all?{|c| c.finished == false }
d.feed(time, {"k1"=>1})
d.feed(time, {"k1"=>2})
end
assert_equal "2011-01-02 13:14:15\ttest\t1\n", d.formatted[0]
assert_equal "2011-01-02 13:14:15\ttest\t2\n", d.formatted[1]
events = d.events
assert_equal 2, events.length
assert_equal_event_time time, events[0][1]
assert_equal ["test", time, {"k2"=>"1"}], events[0]
assert_equal_event_time time, events[1][1]
assert_equal ["test", time, {"k2"=>"2"}], events[1]
end
CONFIG_WITHOUT_TIME_FORMAT = %[
command cat
num_children 3
tag xxx
<inject>
time_key time
time_type unixtime
</inject>
<format>
keys time,k1
</format>
<parse>
keys time,k2
time_key time
time_type unixtime
</parse>
]
CONFIG_WITHOUT_TIME_FORMAT_COMPAT = %[
command cat
in_keys time,k1
out_keys time,k2
tag xxx
time_key time
num_children 3
]
data(
'with sections' => CONFIG_WITHOUT_TIME_FORMAT,
'traditional' => CONFIG_WITHOUT_TIME_FORMAT_COMPAT,
)
test 'emit events without time format configuration' do |conf|
d = create_driver(conf)
time = event_time("2011-01-02 13:14:15 +0900")
d.run(default_tag: 'test', expect_emits: 2, timeout: 10) do
d.feed(time, {"k1"=>1})
d.feed(time, {"k1"=>2})
end
assert_equal "1293941655\t1\n", d.formatted[0]
assert_equal "1293941655\t2\n", d.formatted[1]
events = d.events
assert_equal 2, events.length
assert_equal_event_time time, events[0][1]
assert_equal ["xxx", time, {"k2"=>"1"}], events[0]
assert_equal_event_time time, events[1][1]
assert_equal ["xxx", time, {"k2"=>"2"}], events[1]
end
CONFIG_TO_DO_GREP = %[
command grep --line-buffered -v poo
num_children 3
tag xxx
<inject>
time_key time
time_type unixtime
</inject>
<format>
keys time, val1
</format>
<parse>
keys time, val2
time_key time
time_type unixtime
</parse>
]
CONFIG_TO_DO_GREP_COMPAT = %[
command grep --line-buffered -v poo
in_keys time,val1
out_keys time,val2
tag xxx
time_key time
num_children 3
]
data(
'with sections' => CONFIG_TO_DO_GREP,
'traditional' => CONFIG_TO_DO_GREP_COMPAT,
)
test 'emit events through grep command' do |conf|
d = create_driver(conf)
time = event_time("2011-01-02 13:14:15 +0900")
d.run(default_tag: 'test', expect_emits: 1, timeout: 10) do
d.feed(time, {"val1"=>"sed-ed value poo"})
d.feed(time, {"val1"=>"sed-ed value foo"})
end
assert_equal "1293941655\tsed-ed value poo\n", d.formatted[0]
assert_equal "1293941655\tsed-ed value foo\n", d.formatted[1]
events = d.events
assert_equal 1, events.length
assert_equal_event_time time, events[0][1]
assert_equal ["xxx", time, {"val2"=>"sed-ed value foo"}], events[0]
end
CONFIG_TO_DO_SED = %[
command sed #{SED_UNBUFFERED_OPTION} -l -e s/foo/bar/
num_children 3
tag xxx
<inject>
time_key time
time_type unixtime
</inject>
<format>
keys time, val1
</format>
<parse>
keys time, val2
time_key time
time_type unixtime
</parse>
]
CONFIG_TO_DO_SED_COMPAT = %[
command sed #{SED_UNBUFFERED_OPTION} -l -e s/foo/bar/
in_keys time,val1
out_keys time,val2
tag xxx
time_key time
num_children 3
]
data(
'with sections' => CONFIG_TO_DO_SED,
'traditional' => CONFIG_TO_DO_SED_COMPAT,
)
test 'emit events through sed command' do |conf|
d = create_driver(conf)
time = event_time("2011-01-02 13:14:15 +0900")
d.run(default_tag: 'test', expect_emits: 1, timeout: 10) do
d.feed(time, {"val1"=>"sed-ed value poo"})
d.feed(time, {"val1"=>"sed-ed value foo"})
end
assert_equal "1293941655\tsed-ed value poo\n", d.formatted[0]
assert_equal "1293941655\tsed-ed value foo\n", d.formatted[1]
events = d.events
assert_equal 2, events.length
assert_equal_event_time time, events[0][1]
assert_equal ["xxx", time, {"val2"=>"sed-ed value poo"}], events[0]
assert_equal_event_time time, events[1][1]
assert_equal ["xxx", time, {"val2"=>"sed-ed value bar"}], events[1]
end
CONFIG_TO_DO_SED_WITH_TAG_MODIFY = %[
command sed #{SED_UNBUFFERED_OPTION} -l -e s/foo/bar/
num_children 3
remove_prefix input
add_prefix output
<inject>
tag_key tag
time_key time
</inject>
<format>
keys tag, time, val1
</format>
<parse>
keys tag, time, val2
</parse>
<extract>
tag_key tag
time_key time
</extract>
]
CONFIG_TO_DO_SED_WITH_TAG_MODIFY_COMPAT = %[
command sed #{SED_UNBUFFERED_OPTION} -l -e s/foo/bar/
in_keys tag,time,val1
remove_prefix input
out_keys tag,time,val2
add_prefix output
tag_key tag
time_key time
num_children 3
]
data(
'with sections' => CONFIG_TO_DO_SED_WITH_TAG_MODIFY,
'traditional' => CONFIG_TO_DO_SED_WITH_TAG_MODIFY_COMPAT,
)
test 'emit events with add/remove tag prefix' do |conf|
d = create_driver(conf)
time = event_time("2011-01-02 13:14:15 +0900")
d.run(default_tag: 'input.test', expect_emits: 2, timeout: 10) do
d.feed(time, {"val1"=>"sed-ed value foo"})
d.feed(time, {"val1"=>"sed-ed value poo"})
end
assert_equal "test\t1293941655\tsed-ed value foo\n", d.formatted[0]
assert_equal "test\t1293941655\tsed-ed value poo\n", d.formatted[1]
events = d.events
assert_equal 2, events.length
assert_equal_event_time time, events[0][1]
assert_equal ["output.test", time, {"val2"=>"sed-ed value bar"}], events[0]
assert_equal_event_time time, events[1][1]
assert_equal ["output.test", time, {"val2"=>"sed-ed value poo"}], events[1]
end
CONFIG_JSON = %[
command cat
<format>
@type tsv
keys message
</format>
<parse>
@type json
stream_buffer_size 1
</parse>
<extract>
tag_key tag
time_key time
</extract>
]
CONFIG_JSON_COMPAT = %[
command cat
in_keys message
out_format json
out_stream_buffer_size 1
time_key time
tag_key tag
]
data(
'with sections' => CONFIG_JSON,
'traditional' => CONFIG_JSON_COMPAT,
)
test 'using json format' do |conf|
d = create_driver(conf)
time = event_time("2011-01-02 13:14:15 +0900")
d.run(default_tag: 'input.test', expect_emits: 1, timeout: 10) do
i = d.instance
assert{ i.router }
d.feed(time, {"message"=>%[{"time":#{time},"tag":"t1","k1":"v1"}]})
end
assert_equal '{"time":1293941655,"tag":"t1","k1":"v1"}' + "\n", d.formatted[0]
events = d.events
assert_equal 1, events.length
assert_equal_event_time time, events[0][1]
assert_equal ["t1", time, {"k1"=>"v1"}], events[0]
end
CONFIG_JSON_WITH_FLOAT_TIME = %[
command cat
<format>
@type tsv
keys message
</format>
<parse>
@type json
stream_buffer_size 1
</parse>
<extract>
tag_key tag
time_key time
</extract>
]
CONFIG_JSON_WITH_FLOAT_TIME_COMPAT = %[
command cat
in_keys message
out_format json
out_stream_buffer_size 1
time_key time
tag_key tag
]
data(
'with sections' => CONFIG_JSON_WITH_FLOAT_TIME,
'traditional' => CONFIG_JSON_WITH_FLOAT_TIME_COMPAT,
)
test 'using json format with float time' do |conf|
d = create_driver(conf)
time = event_time("2011-01-02 13:14:15.123 +0900")
d.run(default_tag: 'input.test', expect_emits: 1, timeout: 10) do
d.feed(time + 10, {"message"=>%[{"time":#{time.sec}.#{time.nsec},"tag":"t1","k1":"v1"}]})
end
assert_equal '{"time":1293941655.123000000,"tag":"t1","k1":"v1"}' + "\n", d.formatted[0]
events = d.events
assert_equal 1, events.length
assert_equal_event_time time, events[0][1]
assert_equal ["t1", time, {"k1"=>"v1"}], events[0]
end
CONFIG_JSON_WITH_TIME_FORMAT = %[
command cat
<format>
@type tsv
keys message
</format>
<parse>
@type json
stream_buffer_size 1
</parse>
<extract>
tag_key tag
time_key time
time_type string
time_format %d/%b/%Y %H:%M:%S.%N %z
</extract>
]
CONFIG_JSON_WITH_TIME_FORMAT_COMPAT = %[
command cat
in_keys message
out_format json
out_stream_buffer_size 1
time_key time
time_format %d/%b/%Y %H:%M:%S.%N %z
tag_key tag
]
data(
'with sections' => CONFIG_JSON_WITH_TIME_FORMAT,
'traditional' => CONFIG_JSON_WITH_TIME_FORMAT_COMPAT,
)
test 'using json format with custom time format' do |conf|
d = create_driver(conf)
time_str = "28/Feb/2013 12:00:00.123456789 +0900"
time = event_time(time_str, format: "%d/%b/%Y %H:%M:%S.%N %z")
d.run(default_tag: 'input.test', expect_emits: 1, timeout: 10) do
d.feed(time + 10, {"message"=>%[{"time":"#{time_str}","tag":"t1","k1":"v1"}]})
end
assert_equal '{"time":"28/Feb/2013 12:00:00.123456789 +0900","tag":"t1","k1":"v1"}' + "\n", d.formatted[0]
events = d.events
assert_equal 1, events.length
assert_equal_event_time time, events[0][1]
assert_equal ["t1", time, {"k1"=>"v1"}], events[0]
end
CONFIG_ROUND_ROBIN = %[
command ruby -e 'STDOUT.sync = true; STDIN.each_line{|line| puts line.chomp + "\t" + Process.pid.to_s }'
num_children 2
<inject>
tag_key tag
time_key time_in
time_type string
time_format %Y-%m-%d %H:%M:%S
</inject>
<format>
keys ["time_in", "tag", "k1"]
</format>
<parse>
keys ["time_out", "tag", "k2", "child_pid"]
</parse>
<extract>
tag_key tag
time_key time_out
time_type string
time_format %Y-%m-%d %H:%M:%S
</extract>
]
CONFIG_ROUND_ROBIN_COMPAT = %[
command ruby -e 'STDOUT.sync = true; STDIN.each_line{|line| puts line.chomp + "\t" + Process.pid.to_s }'
in_keys time_in,tag,k1
out_keys time_out,tag,k2,child_pid
tag_key tag
in_time_key time_in
out_time_key time_out
time_format %Y-%m-%d %H:%M:%S
localtime
num_children 2
]
data(
'with sections' => CONFIG_ROUND_ROBIN,
'traditional' => CONFIG_ROUND_ROBIN_COMPAT,
)
test 'using child processes by round robin' do |conf|
d = create_driver(conf)
time = event_time('2011-01-02 13:14:15')
d.run(default_tag: 'test', expect_emits: 4) do
d.feed(time, {"k1" => 0})
d.flush
sleep 0.5
d.feed(time, {"k1" => 1})
d.flush
sleep 0.5
d.feed(time, {"k1" => 2})
d.flush
sleep 0.5
d.feed(time, {"k1" => 3})
end
assert_equal "2011-01-02 13:14:15\ttest\t0\n", d.formatted[0]
assert_equal "2011-01-02 13:14:15\ttest\t1\n", d.formatted[1]
assert_equal "2011-01-02 13:14:15\ttest\t2\n", d.formatted[2]
assert_equal "2011-01-02 13:14:15\ttest\t3\n", d.formatted[3]
events = d.events
assert_equal 4, events.length
pid_list = []
events.each do |event|
pid = event[2]['child_pid']
pid_list << pid unless pid_list.include?(pid)
end
assert_equal 2, pid_list.size, "the number of pids should be same with number of child processes: #{pid_list.inspect}"
assert_equal pid_list[0], events[0][2]['child_pid']
assert_equal pid_list[1], events[1][2]['child_pid']
assert_equal pid_list[0], events[2][2]['child_pid']
assert_equal pid_list[1], events[3][2]['child_pid']
end
# child process exits per 3 lines
CONFIG_RESPAWN = %[
command ruby -e 'STDOUT.sync = true; proc = ->(){line = STDIN.readline.chomp; puts line + "\t" + Process.pid.to_s}; proc.call; proc.call; proc.call'
num_children 2
child_respawn -1
<inject>
tag_key tag
time_key time_in
time_type unixtime
</inject>
<format>
keys ["time_in", "tag", "k1"]
</format>
<parse>
keys ["time_out", "tag", "k2", "child_pid"]
</parse>
<extract>
tag_key tag
time_key time_out
time_type unixtime
</extract>
]
CONFIG_RESPAWN_COMPAT = %[
command ruby -e 'STDOUT.sync = true; proc = ->(){line = STDIN.readline.chomp; puts line + "\t" + Process.pid.to_s}; proc.call; proc.call; proc.call'
num_children 2
child_respawn -1
in_keys time_in,tag,k1
out_keys time_out,tag,k2,child_pid
tag_key tag
in_time_key time_in
out_time_key time_out
# time_format %Y-%m-%d %H:%M:%S
# localtime
]
data(
'with sections' => CONFIG_RESPAWN,
'traditional' => CONFIG_RESPAWN_COMPAT,
)
test 'emit events via child processes which exits sometimes' do |conf|
d = create_driver(conf)
time = event_time("2011-01-02 13:14:15")
countup = 0
d.run(start: true, shutdown: false)
assert_equal 2, d.instance.instance_eval{ @_child_process_processes.size }
2.times do
d.run(default_tag: 'test', expect_emits: 3, timeout: 3, force_flush_retry: true, start: false, shutdown: false) do
d.feed(time, { "k1" => countup }); countup += 1
d.feed(time, { "k1" => countup }); countup += 1
d.feed(time, { "k1" => countup }); countup += 1
end
end
events = d.events
assert_equal 6, events.length
pid_list = []
events.each do |event|
pid = event[2]['child_pid']
pid_list << pid unless pid_list.include?(pid)
end
# the number of pids should be same with number of child processes
assert_equal 2, pid_list.size
logs = d.instance.log.out.logs
assert_equal 2, logs.count { |l| l.include?('child process exits with error code') }
assert_equal 2, logs.count { |l| l.include?('respawning child process') }
ensure
d.run(start: false, shutdown: true)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_in_udp.rb | test/plugin/test_in_udp.rb | require_relative '../helper'
require 'fluent/test/driver/input'
require 'fluent/plugin/in_udp'
class UdpInputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
@port = unused_port(protocol: :udp)
end
def teardown
@port = nil
end
def base_config
%[
port #{@port}
tag udp
]
end
def ipv4_config
base_config + %!
bind 127.0.0.1
format /^\\[(?<time>[^\\]]*)\\] (?<message>.*)/
!
end
def ipv6_config
base_config + %!
bind ::1
format /^\\[(?<time>[^\\]]*)\\] (?<message>.*)/
!
end
def create_driver(conf)
Fluent::Test::Driver::Input.new(Fluent::Plugin::UdpInput).configure(conf)
end
def create_udp_socket(host, port)
u = if IPAddr.new(IPSocket.getaddress(host)).ipv4?
UDPSocket.new(Socket::AF_INET)
else
UDPSocket.new(Socket::AF_INET6)
end
u.do_not_reverse_lookup = false
u.connect(host, port)
if block_given?
begin
yield u
ensure
u.close rescue nil
end
else
u
end
end
data(
'ipv4' => ['127.0.0.1', :ipv4],
'ipv6' => ['::1', :ipv6],
)
test 'configure' do |data|
bind, protocol = data
conf = send("#{protocol}_config")
omit "IPv6 is not supported on this environment" if protocol == :ipv6 && !ipv6_enabled?
d = create_driver(conf)
assert_equal @port, d.instance.port
assert_equal bind, d.instance.bind
assert_equal 4096, d.instance.message_length_limit
assert_equal nil, d.instance.receive_buffer_size
end
test ' configure w/o parse section' do
assert_raise(Fluent::ConfigError.new("<parse> section is required.")) {
create_driver(base_config)
}
end
data(
'ipv4' => ['127.0.0.1', :ipv4],
'ipv6' => ['::1', :ipv6],
)
test 'time_format' do |data|
bind, protocol = data
conf = send("#{protocol}_config")
omit "IPv6 is not supported on this environment" if protocol == :ipv6 && !ipv6_enabled?
d = create_driver(conf)
tests = [
{'msg' => '[Sep 11 00:00:00] localhost logger: foo', 'expected' => event_time('Sep 11 00:00:00', format: '%b %d %H:%M:%S')},
{'msg' => '[Sep 1 00:00:00] localhost logger: foo', 'expected' => event_time('Sep 1 00:00:00', format: '%b %d %H:%M:%S')},
]
d.run(expect_records: 2) do
create_udp_socket(bind, @port) do |u|
tests.each do |test|
u.send(test['msg'], 0)
end
end
end
events = d.events
tests.each_with_index do |t, i|
assert_equal_event_time(t['expected'], events[i][1])
end
end
data(
'message_length_limit' => 'message_length_limit 2048',
'body_size_limit' => 'body_size_limit 2048'
)
test 'message_length_limit/body_size_limit compatibility' do |param|
d = create_driver(ipv4_config + param)
assert_equal 2048, d.instance.message_length_limit
end
data(
'none' => {
'format' => 'none',
'payloads' => ["tcptest1\n", "tcptest2\n"],
'expecteds' => [
{"message" => "tcptest1"},
{"message" => "tcptest2"},
],
},
'json' => {
'format' => 'json',
'payloads' => [
{'k' => 123, 'message' => 'tcptest1'}.to_json + "\n",
{'k' => 'tcptest2', 'message' => 456}.to_json + "\n",
],
'expecteds' => [
{'k' => 123, 'message' => 'tcptest1'},
{'k' => 'tcptest2', 'message' => 456},
],
},
'regexp' => {
'format' => '/^\\[(?<time>[^\\]]*)\\] (?<message>.*)/',
'payloads' => [
'[Sep 10 00:00:00] localhost: ' + 'x' * 100 + "\n",
'[Sep 10 00:00:00] localhost: ' + 'x' * 1024 + "\n"
],
'expecteds' => [
{"message" => 'localhost: ' + 'x' * 100},
{"message" => 'localhost: ' + 'x' * 1024},
],
},
)
test 'message size with format' do |data|
format = data['format']
payloads = data['payloads']
expecteds = data['expecteds']
d = create_driver(base_config + "format #{format}")
d.run(expect_records: 2) do
create_udp_socket('127.0.0.1', @port) do |u|
payloads.each do |payload|
u.send(payload, 0)
end
end
end
assert_equal 2, d.events.size
expecteds.each_with_index do |expected_record, i|
assert_equal "udp", d.events[i][0]
assert d.events[i][1].is_a?(Fluent::EventTime)
assert_equal expected_record, d.events[i][2]
end
end
test 'remove_newline' do
d = create_driver(base_config + %!
format none
remove_newline false
!)
payloads = ["test1\n", "test2\n"]
d.run(expect_records: 2) do
create_udp_socket('127.0.0.1', @port) do |u|
payloads.each do |payload|
u.send(payload, 0)
end
end
end
expecteds = payloads.map { |payload| {'message' => payload} }
assert_equal 2, d.events.size
expecteds.each_with_index do |expected_record, i|
assert_equal "udp", d.events[i][0]
assert d.events[i][1].is_a?(Fluent::EventTime)
assert_equal expected_record, d.events[i][2]
end
end
test 'source_hostname_key' do
d = create_driver(base_config + %!
format none
source_hostname_key host
!)
hostname = nil
d.run(expect_records: 1) do
create_udp_socket('127.0.0.1', @port) do |u|
u.send("test", 0)
hostname = u.peeraddr[2]
end
end
assert_equal 1, d.events.size
assert_equal "udp", d.events[0][0]
assert d.events[0][1].is_a?(Fluent::EventTime)
assert_equal hostname, d.events[0][2]['host']
end
test 'source_address_key' do
d = create_driver(base_config + %!
format none
source_address_key addr
!)
address = nil
d.run(expect_records: 1) do
create_udp_socket('127.0.0.1', @port) do |u|
u.send("test", 0)
address = u.peeraddr[3]
end
end
assert_equal 1, d.events.size
assert_equal "udp", d.events[0][0]
assert d.events[0][1].is_a?(Fluent::EventTime)
assert_equal address, d.events[0][2]['addr']
end
test 'receive_buffer_size' do
# doesn't check exact value because it depends on platform and condition
# check if default socket and in_udp's one without receive_buffer_size have same size buffer
d0 = create_driver(base_config + %!
format none
!)
d0.run do
sock = d0.instance.instance_variable_get(:@_servers)[0].server.instance_variable_get(:@sock)
begin
default_sock = UDPSocket.new
assert_equal(default_sock.getsockopt(Socket::SOL_SOCKET, Socket::SO_RCVBUF).int, sock.getsockopt(Socket::SOL_SOCKET, Socket::SO_RCVBUF).int)
ensure
default_sock.close
end
end
# check if default socket and in_udp's one with receive_buffer_size have different size buffer
d1 = create_driver(base_config + %!
format none
receive_buffer_size 1001
!)
d1.run do
sock = d1.instance.instance_variable_get(:@_servers)[0].server.instance_variable_get(:@sock)
begin
default_sock = UDPSocket.new
assert_not_equal(default_sock.getsockopt(Socket::SOL_SOCKET, Socket::SO_RCVBUF).int, sock.getsockopt(Socket::SOL_SOCKET, Socket::SO_RCVBUF).int)
ensure
default_sock.close
end
end
end
test 'message_length_limit' do
message_length_limit = 32
if Fluent.windows?
expected_records = ["0" * 30, "4" * 30]
else
expected_records = 1.upto(3).collect do |i|
"#{i}" * message_length_limit
end
expected_records.prepend("0" * 30)
expected_records.append("4" * 30)
end
d = create_driver(base_config + %!
format none
message_length_limit #{message_length_limit}
!)
d.run(expect_records: expected_records.size, timeout: 5) do
create_udp_socket('127.0.0.1', @port) do |u|
u.send("0" * 30 + "\n", 0)
1.upto(3) do |i|
u.send("#{i}" * 40 + "\n", 0)
end
u.send("4" * 30 + "\n", 0)
end
end
actual_records = d.events.collect do |event|
event[2]["message"]
end
assert_equal expected_records, actual_records
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_out_roundrobin.rb | test/plugin/test_out_roundrobin.rb | require_relative '../helper'
require 'fluent/test/driver/multi_output'
require 'fluent/plugin/out_roundrobin'
class RoundRobinOutputTest < Test::Unit::TestCase
class << self
def startup
$LOAD_PATH.unshift File.expand_path(File.join(File.dirname(__FILE__), '..', 'scripts'))
require 'fluent/plugin/out_test'
require 'fluent/plugin/out_test2'
end
def shutdown
$LOAD_PATH.shift
end
end
def setup
Fluent::Test.setup
end
CONFIG = %[
<store>
@type test
name c0
</store>
<store>
@type test2
name c1
</store>
<store>
@type test
name c2
</store>
]
CONFIG_WITH_WEIGHT = %[
<store>
@type test
name c0
weight 3
</store>
<store>
@type test2
name c1
weight 3
</store>
<store>
@type test
name c2
</store>
]
def create_driver(conf = CONFIG)
Fluent::Test::Driver::MultiOutput.new(Fluent::Plugin::RoundRobinOutput).configure(conf)
end
def test_configure
d = create_driver
outputs = d.instance.outputs
assert_equal 3, outputs.size
assert_equal Fluent::Plugin::TestOutput, outputs[0].class
assert_equal Fluent::Plugin::Test2Output, outputs[1].class
assert_equal Fluent::Plugin::TestOutput, outputs[2].class
assert !outputs[0].has_router?
assert outputs[1].has_router?
assert outputs[1].router
assert !outputs[2].has_router?
assert_equal "c0", outputs[0].name
assert_equal "c1", outputs[1].name
assert_equal "c2", outputs[2].name
weights = d.instance.weights
assert_equal 3, weights.size
assert_equal 1, weights[0]
assert_equal 1, weights[1]
assert_equal 1, weights[2]
d = create_driver(CONFIG_WITH_WEIGHT)
outputs = d.instance.outputs
assert_equal 3, outputs.size
assert_equal Fluent::Plugin::TestOutput, outputs[0].class
assert_equal Fluent::Plugin::Test2Output, outputs[1].class
assert_equal Fluent::Plugin::TestOutput, outputs[2].class
assert_equal "c0", outputs[0].name
assert_equal "c1", outputs[1].name
assert_equal "c2", outputs[2].name
weights = d.instance.weights
assert_equal 3, weights.size
assert_equal 3, weights[0]
assert_equal 3, weights[1]
assert_equal 1, weights[2]
end
def test_events_feeded_to_plugins_by_roundrobin
d = create_driver
time = event_time("2011-01-02 13:14:15 UTC")
d.run(default_tag: 'test') do
d.feed(time, {"a" => 1})
d.feed(time, {"a" => 2})
d.feed(time, {"a" => 3})
d.feed(time, {"a" => 4})
end
os = d.instance.outputs
assert_equal [
[time, {"a"=>1}],
[time, {"a"=>4}],
], os[0].events
assert_equal [
[time, {"a"=>2}],
], os[1].events
assert_equal [
[time, {"a"=>3}],
], os[2].events
end
def test_events_feeded_with_specified_weights
d = create_driver(CONFIG_WITH_WEIGHT)
time = event_time("2011-01-02 13:14:15 UTC")
d.run(default_tag: 'test') do
14.times do |i|
d.feed(time, {"a" => i})
end
end
os = d.instance.outputs
assert_equal 6, os[0].events.size # weight=3
assert_equal 6, os[1].events.size # weight=3
assert_equal 2, os[2].events.size # weight=1
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_storage_local.rb | test/plugin/test_storage_local.rb | require_relative '../helper'
require 'fluent/plugin/storage_local'
require 'fluent/plugin/input'
require 'fluent/system_config'
require 'fileutils'
class LocalStorageTest < Test::Unit::TestCase
TMP_DIR = File.expand_path(File.dirname(__FILE__) + "/tmp/storage_local#{ENV['TEST_ENV_NUMBER']}")
class MyInput < Fluent::Plugin::Input
helpers :storage
config_section :storage do
config_set_default :@type, 'local'
end
end
setup do
FileUtils.rm_rf(TMP_DIR)
FileUtils.mkdir_p(TMP_DIR)
Fluent::Test.setup
@d = MyInput.new
end
teardown do
@d.stop unless @d.stopped?
@d.before_shutdown unless @d.before_shutdown?
@d.shutdown unless @d.shutdown?
@d.after_shutdown unless @d.after_shutdown?
@d.close unless @d.closed?
@d.terminate unless @d.terminated?
end
sub_test_case 'without any configuration' do
test 'works as on-memory storage' do
conf = config_element()
@d.configure(conf)
@d.start
@p = @d.storage_create()
assert_nil @p.path
assert @p.store.empty?
assert_nil @p.get('key1')
assert_equal 'EMPTY', @p.fetch('key1', 'EMPTY')
@p.put('key1', '1')
assert_equal '1', @p.get('key1')
@p.update('key1') do |v|
(v.to_i * 2).to_s
end
assert_equal '2', @p.get('key1')
@p.save # on-memory storage does nothing...
@d.stop; @d.before_shutdown; @d.shutdown; @d.after_shutdown; @d.close; @d.terminate
# re-create to reload storage contents
@d = MyInput.new
@d.configure(conf)
@d.start
@p = @d.storage_create()
assert @p.store.empty?
end
test 'warns about on-memory storage if autosave is true' do
@d.configure(config_element())
@d.start
@p = @d.storage_create()
logs = @d.log.out.logs
assert{ logs.any?{|log| log.include?("[warn]: both of Plugin @id and path for <storage> are not specified. Using on-memory store.") } }
end
test 'show info log about on-memory storage if autosave is false' do
@d.configure(config_element('ROOT', '', {}, [config_element('storage', '', {'autosave' => 'false'})]))
@d.start
@p = @d.storage_create()
logs = @d.log.out.logs
assert{ logs.any?{|log| log.include?("[info]: both of Plugin @id and path for <storage> are not specified. Using on-memory store.") } }
end
end
sub_test_case 'configured with file path' do
test 'works as storage which stores data on disk' do
storage_path = File.join(TMP_DIR, 'my_store.json')
conf = config_element('ROOT', '', {}, [config_element('storage', '', {'path' => storage_path})])
@d.configure(conf)
@d.start
@p = @d.storage_create()
assert_equal storage_path, @p.path
assert @p.store.empty?
assert_nil @p.get('key1')
assert_equal 'EMPTY', @p.fetch('key1', 'EMPTY')
@p.put('key1', '1')
assert_equal '1', @p.get('key1')
@p.update('key1') do |v|
(v.to_i * 2).to_s
end
assert_equal '2', @p.get('key1')
@p.save # stores all data into file
assert File.exist?(storage_path)
@p.put('key2', 4)
@d.stop; @d.before_shutdown; @d.shutdown; @d.after_shutdown; @d.close; @d.terminate
assert_equal({'key1' => '2', 'key2' => 4}, File.open(storage_path){|f| JSON.parse(f.read) })
# re-create to reload storage contents
@d = MyInput.new
@d.configure(conf)
@d.start
@p = @d.storage_create()
assert_false @p.store.empty?
assert_equal '2', @p.get('key1')
assert_equal 4, @p.get('key2')
end
test 'raise configuration error if a file specified with multi worker configuration' do
storage_path = File.join(TMP_DIR, 'my_store.json')
conf = config_element('ROOT', '', {}, [config_element('storage', '', {'path' => storage_path})])
@d.system_config_override('workers' => 3)
assert_raise Fluent::ConfigError.new("Plugin 'local' does not support multi workers configuration (Fluent::Plugin::LocalStorage)") do
@d.configure(conf)
end
end
end
sub_test_case 'configured with root-dir and plugin id' do
test 'works as storage which stores data under root dir' do
root_dir = File.join(TMP_DIR, 'root')
expected_storage_path = File.join(root_dir, 'worker0', 'local_storage_test', 'storage.json')
conf = config_element('ROOT', '', {'@id' => 'local_storage_test'})
Fluent::SystemConfig.overwrite_system_config('root_dir' => root_dir) do
@d.configure(conf)
end
@d.start
@p = @d.storage_create()
assert_equal expected_storage_path, @p.path
assert @p.store.empty?
assert_nil @p.get('key1')
assert_equal 'EMPTY', @p.fetch('key1', 'EMPTY')
@p.put('key1', '1')
assert_equal '1', @p.get('key1')
@p.update('key1') do |v|
(v.to_i * 2).to_s
end
assert_equal '2', @p.get('key1')
@p.save # stores all data into file
assert File.exist?(expected_storage_path)
@p.put('key2', 4)
@d.stop; @d.before_shutdown; @d.shutdown; @d.after_shutdown; @d.close; @d.terminate
assert_equal({'key1' => '2', 'key2' => 4}, File.open(expected_storage_path){|f| JSON.parse(f.read) })
# re-create to reload storage contents
@d = MyInput.new
Fluent::SystemConfig.overwrite_system_config('root_dir' => root_dir) do
@d.configure(conf)
end
@d.start
@p = @d.storage_create()
assert_false @p.store.empty?
assert_equal '2', @p.get('key1')
assert_equal 4, @p.get('key2')
end
test 'works with customized path by specified usage' do
root_dir = File.join(TMP_DIR, 'root')
expected_storage_path = File.join(root_dir, 'worker0', 'local_storage_test', 'storage.usage.json')
conf = config_element('ROOT', 'usage', {'@id' => 'local_storage_test'})
Fluent::SystemConfig.overwrite_system_config('root_dir' => root_dir) do
@d.configure(conf)
end
@d.start
@p = @d.storage_create(usage: 'usage', type: 'local')
assert_equal expected_storage_path, @p.path
assert @p.store.empty?
end
end
sub_test_case 'configured with root-dir and plugin id, and multi workers' do
test 'works as storage which stores data under root dir, also in workers' do
root_dir = File.join(TMP_DIR, 'root')
expected_storage_path = File.join(root_dir, 'worker1', 'local_storage_test', 'storage.json')
conf = config_element('ROOT', '', {'@id' => 'local_storage_test'})
with_worker_config(root_dir: root_dir, workers: 2, worker_id: 1) do
@d.configure(conf)
end
@d.start
@p = @d.storage_create()
assert_equal expected_storage_path, @p.path
assert @p.store.empty?
end
end
sub_test_case 'persistent specified' do
test 'works well with path' do
omit "It's hard to test on Windows" if Fluent.windows?
storage_path = File.join(TMP_DIR, 'my_store.json')
conf = config_element('ROOT', '', {}, [config_element('storage', '', {'path' => storage_path, 'persistent' => 'true'})])
@d.configure(conf)
@d.start
@p = @d.storage_create()
assert_equal storage_path, @p.path
assert @p.store.empty?
assert_nil @p.get('key1')
assert_equal 'EMPTY', @p.fetch('key1', 'EMPTY')
@p.put('key1', '1')
assert_equal({'key1' => '1'}, File.open(storage_path){|f| JSON.parse(f.read) })
@p.update('key1') do |v|
(v.to_i * 2).to_s
end
assert_equal({'key1' => '2'}, File.open(storage_path){|f| JSON.parse(f.read) })
end
test 'works well with root-dir and plugin id' do
omit "It's hard to test on Windows" if Fluent.windows?
root_dir = File.join(TMP_DIR, 'root')
expected_storage_path = File.join(root_dir, 'worker0', 'local_storage_test', 'storage.json')
conf = config_element('ROOT', '', {'@id' => 'local_storage_test'}, [config_element('storage', '', {'persistent' => 'true'})])
Fluent::SystemConfig.overwrite_system_config('root_dir' => root_dir) do
@d.configure(conf)
end
@d.start
@p = @d.storage_create()
assert_equal expected_storage_path, @p.path
assert @p.store.empty?
assert_nil @p.get('key1')
assert_equal 'EMPTY', @p.fetch('key1', 'EMPTY')
@p.put('key1', '1')
assert_equal({'key1' => '1'}, File.open(expected_storage_path){|f| JSON.parse(f.read) })
@p.update('key1') do |v|
(v.to_i * 2).to_s
end
assert_equal({'key1' => '2'}, File.open(expected_storage_path){|f| JSON.parse(f.read) })
end
test 'raises error if it is configured to use on-memory storage' do
assert_raise Fluent::ConfigError.new("Plugin @id or path for <storage> required when 'persistent' is true") do
@d.configure(config_element('ROOT', '', {}, [config_element('storage', '', {'persistent' => 'true'})]))
end
end
end
sub_test_case 'with various configurations' do
test 'mode and dir_mode controls permissions of stored data' do
omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
storage_path = File.join(TMP_DIR, 'dir', 'my_store.json')
storage_conf = {
'path' => storage_path,
'mode' => '0600',
'dir_mode' => '0777',
}
conf = config_element('ROOT', '', {}, [config_element('storage', '', storage_conf)])
@d.configure(conf)
@d.start
@p = @d.storage_create()
assert_equal storage_path, @p.path
assert @p.store.empty?
@p.put('key1', '1')
assert_equal '1', @p.get('key1')
@p.save # stores all data into file
assert File.exist?(storage_path)
assert_equal 0600, (File.stat(storage_path).mode % 01000)
assert_equal 0777, (File.stat(File.dirname(storage_path)).mode % 01000)
end
test 'pretty_print controls to write data in files as human-easy-to-read' do
storage_path = File.join(TMP_DIR, 'dir', 'my_store.json')
storage_conf = {
'path' => storage_path,
'pretty_print' => 'true',
}
conf = config_element('ROOT', '', {}, [config_element('storage', '', storage_conf)])
@d.configure(conf)
@d.start
@p = @d.storage_create()
assert_equal storage_path, @p.path
assert @p.store.empty?
@p.put('key1', '1')
assert_equal '1', @p.get('key1')
@p.save # stores all data into file
expected_pp_json = <<JSON.chomp
{
"key1": "1"
}
JSON
assert_equal expected_pp_json, File.read(storage_path)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_in_monitor_agent.rb | test/plugin/test_in_monitor_agent.rb | require_relative '../helper'
require 'fluent/test/driver/input'
require 'fluent/plugin/in_monitor_agent'
require 'fluent/engine'
require 'fluent/config'
require 'fluent/event_router'
require 'fluent/supervisor'
require 'fluent/version'
require 'net/http'
require 'json'
require_relative '../test_plugin_classes'
class MonitorAgentInputTest < Test::Unit::TestCase
include FuzzyAssert
CONFIG_DIR = File.expand_path('../tmp/in_monitor_agent', __dir__)
def setup
Fluent::Test.setup
end
def create_driver(conf = '')
Fluent::Test::Driver::Input.new(Fluent::Plugin::MonitorAgentInput).configure(conf)
end
def configure_ra(ra, conf_str)
conf = Fluent::Config.parse(conf_str, "(test)", "(test_dir)", true)
ra.configure(conf)
ra
end
def test_configure
d = create_driver
assert_equal("0.0.0.0", d.instance.bind)
assert_equal(24220, d.instance.port)
assert_equal(nil, d.instance.tag)
assert_equal(60, d.instance.emit_interval)
assert_true d.instance.include_config
end
sub_test_case "collect in_monitor_agent plugin statistics" do
# Input Test Driver does not register metric callbacks.
# We should stub them here.
class TestEventMetricRouter < Fluent::Test::Driver::TestEventRouter
def initialize(driver)
super
raise ArgumentError, "plugin does not respond metric_callback method" unless @driver.instance.respond_to?(:metric_callback)
end
def emit(tag, time, record)
super
@driver.instance.metric_callback(OneEventStream.new(time, record))
end
def emit_array(tag, array)
super
@driver.instance.metric_callback(ArrayEventStream.new(array))
end
def emit_stream(tag, es)
super
@driver.instance.metric_callback(es)
end
end
class MetricInputDriver < Fluent::Test::Driver::Input
def configure(conf, syntax: :v1)
if conf.is_a?(Fluent::Config::Element)
@config = conf
else
@config = Fluent::Config.parse(conf, "(test)", "(test_dir)", syntax: syntax)
end
if @instance.respond_to?(:router=)
@event_streams = []
@error_events = []
driver = self
mojule = Module.new do
define_method(:event_emitter_router) do |label_name|
TestEventMetricRouter.new(driver)
end
end
@instance.singleton_class.prepend mojule
end
@instance.configure(@config)
self
end
end
setup do
# check @type and type in one configuration
conf = <<-EOC
<source>
@type test_in_gen
@id test_in_gen
num 10
</source>
<filter>
@type test_filter
@id test_filter
</filter>
<match **>
@type relabel
@id test_relabel
@label @test
</match>
<label @test>
<match **>
@type test_out
@id test_out
</match>
</label>
<label @copy>
<match **>
@type copy
<store>
@type test_out
@id copy_out_1
</store>
<store>
@type test_out
@id copy_out_2
</store>
</match>
</label>
<label @ERROR>
<match>
@type null
@id null
</match>
</label>
EOC
@ra = Fluent::RootAgent.new(log: $log)
stub(Fluent::Engine).root_agent { @ra }
@ra = configure_ra(@ra, conf)
end
data(:with_config_yes => true,
:with_config_no => false)
def test_enable_input_metrics(with_config)
monitor_agent_conf = <<-CONF
tag test.monitor
emit_interval 1
CONF
@ra.inputs.first.context_router.emit("test.event", Fluent::Engine.now, {"message":"ok"})
d = MetricInputDriver.new(Fluent::Plugin::MonitorAgentInput).configure(monitor_agent_conf)
d.run(expect_emits: 1, timeout: 3)
test_label = @ra.labels['@test']
error_label = @ra.labels['@ERROR']
input_info = {
"output_plugin" => false,
"plugin_category"=> "input",
"plugin_id" => "test_in_gen",
"retry_count" => nil,
"type" => "test_in_gen",
"emit_records" => 0, # This field is not updated due to not to be assigned metric callback.
"emit_size" => 0, # Ditto.
}
input_info["config"] = {"@id" => "test_in_gen", "@type" => "test_in_gen", "num" => "10"} if with_config
filter_info = {
"output_plugin" => false,
"plugin_category" => "filter",
"plugin_id" => "test_filter",
"retry_count" => nil,
"type" => "test_filter",
"emit_records" => Integer,
"emit_size" => Integer,
}
filter_info["config"] = {"@id" => "test_filter", "@type" => "test_filter"} if with_config
output_info = {
"output_plugin" => true,
"plugin_category" => "output",
"plugin_id" => "test_out",
"retry_count" => 0,
"type" => "test_out",
"emit_count" => Integer,
"emit_records" => Integer,
"emit_size" => Integer,
"write_count" => Integer,
"write_secondary_count" => Integer,
"rollback_count" => Integer,
"slow_flush_count" => Integer,
"flush_time_count" => Integer,
"drop_oldest_chunk_count" => Integer,
}
output_info["config"] = {"@id" => "test_out", "@type" => "test_out"} if with_config
error_label_info = {
"buffer_queue_length" => 0,
"buffer_timekeys" => [],
"buffer_total_queued_size" => 0,
"output_plugin" => true,
"plugin_category" => "output",
"plugin_id" => "null",
"retry_count" => 0,
"type" => "null",
"buffer_available_buffer_space_ratios" => Float,
"buffer_queue_byte_size" => Integer,
"buffer_stage_byte_size" => Integer,
"buffer_stage_length" => Integer,
"emit_count" => Integer,
"emit_records" => Integer,
"emit_size" => Integer,
"write_count" => Integer,
"write_secondary_count" => Integer,
"rollback_count" => Integer,
"slow_flush_count" => Integer,
"flush_time_count" => Integer,
"drop_oldest_chunk_count" => Integer,
}
error_label_info["config"] = {"@id"=>"null", "@type" => "null"} if with_config
opts = {with_config: with_config}
assert_equal(input_info, d.instance.get_monitor_info(@ra.inputs.first, opts))
assert_fuzzy_equal(filter_info, d.instance.get_monitor_info(@ra.filters.first, opts))
assert_fuzzy_equal(output_info, d.instance.get_monitor_info(test_label.outputs.first, opts))
assert_fuzzy_equal(error_label_info, d.instance.get_monitor_info(error_label.outputs.first, opts))
monitor_agent_emit_info = {
"emit_records" => Integer,
"emit_size" => Integer,
}
filter_statistics_info = {
"emit_records" => Integer,
"emit_size" => Integer,
}
assert_fuzzy_equal(monitor_agent_emit_info, d.instance.statistics["input"])
assert_fuzzy_equal(filter_statistics_info, @ra.filters.first.statistics["filter"])
end
end
sub_test_case "collect plugin information" do
setup do
# check @type and type in one configuration
conf = <<-EOC
<source>
@type test_in
@id test_in
</source>
<filter>
@type test_filter
@id test_filter
</filter>
<match **>
@type relabel
@id test_relabel
@label @test
</match>
<label @test>
<match **>
@type test_out
@id test_out
</match>
</label>
<label @copy>
<match **>
@type copy
<store>
@type test_out
@id copy_out_1
</store>
<store>
@type test_out
@id copy_out_2
</store>
</match>
</label>
<label @ERROR>
<match>
@type null
@id null
</match>
</label>
EOC
@ra = Fluent::RootAgent.new(log: $log)
stub(Fluent::Engine).root_agent { @ra }
@ra = configure_ra(@ra, conf)
end
test "plugin_category" do
d = create_driver
test_label = @ra.labels['@test']
error_label = @ra.labels['@ERROR']
assert_equal("input", d.instance.plugin_category(@ra.inputs.first))
assert_equal("filter", d.instance.plugin_category(@ra.filters.first))
assert_equal("output", d.instance.plugin_category(test_label.outputs.first))
assert_equal("output", d.instance.plugin_category(error_label.outputs.first))
end
data(:with_config_yes => true,
:with_config_no => false)
test "get_monitor_info" do |with_config|
d = create_driver
test_label = @ra.labels['@test']
error_label = @ra.labels['@ERROR']
input_info = {
"output_plugin" => false,
"plugin_category"=> "input",
"plugin_id" => "test_in",
"retry_count" => nil,
"type" => "test_in",
"emit_records" => 0,
"emit_size" => 0,
}
input_info["config"] = {"@id" => "test_in", "@type" => "test_in"} if with_config
filter_info = {
"output_plugin" => false,
"plugin_category" => "filter",
"plugin_id" => "test_filter",
"retry_count" => nil,
"type" => "test_filter",
"emit_records" => 0,
"emit_size" => 0,
}
filter_info["config"] = {"@id" => "test_filter", "@type" => "test_filter"} if with_config
output_info = {
"output_plugin" => true,
"plugin_category" => "output",
"plugin_id" => "test_out",
"retry_count" => 0,
"type" => "test_out",
"emit_count" => Integer,
"emit_records" => Integer,
"emit_size" => Integer,
"write_count" => Integer,
"write_secondary_count" => Integer,
"rollback_count" => Integer,
"slow_flush_count" => Integer,
"flush_time_count" => Integer,
"drop_oldest_chunk_count" => Integer,
}
output_info["config"] = {"@id" => "test_out", "@type" => "test_out"} if with_config
error_label_info = {
"buffer_queue_length" => 0,
"buffer_timekeys" => [],
"buffer_total_queued_size" => 0,
"output_plugin" => true,
"plugin_category" => "output",
"plugin_id" => "null",
"retry_count" => 0,
"type" => "null",
"buffer_available_buffer_space_ratios" => Float,
"buffer_queue_byte_size" => Integer,
"buffer_stage_byte_size" => Integer,
"buffer_stage_length" => Integer,
"emit_count" => Integer,
"emit_records" => Integer,
"emit_size" => Integer,
"write_count" => Integer,
"write_secondary_count" => Integer,
"rollback_count" => Integer,
"slow_flush_count" => Integer,
"flush_time_count" => Integer,
"drop_oldest_chunk_count" => Integer,
}
error_label_info["config"] = {"@id"=>"null", "@type" => "null"} if with_config
opts = {with_config: with_config}
assert_equal(input_info, d.instance.get_monitor_info(@ra.inputs.first, opts))
assert_fuzzy_equal(filter_info, d.instance.get_monitor_info(@ra.filters.first, opts))
assert_fuzzy_equal(output_info, d.instance.get_monitor_info(test_label.outputs.first, opts))
assert_fuzzy_equal(error_label_info, d.instance.get_monitor_info(error_label.outputs.first, opts))
end
test "fluentd opts" do
d = create_driver
filepath = nil
begin
FileUtils.mkdir_p(CONFIG_DIR)
filepath = File.expand_path('fluentd.conf', CONFIG_DIR)
FileUtils.touch(filepath)
s = Fluent::Supervisor.new({config_path: filepath})
s.configure
ensure
FileUtils.rm_r(CONFIG_DIR) rescue _
end
expected_opts = {
"config_path" => filepath,
"pid_file" => nil,
"plugin_dirs" => ["/etc/fluent/plugin"],
"log_path" => nil,
"root_dir" => nil,
}
assert_equal(expected_opts, d.instance.fluentd_opts)
end
test "all_plugins" do
d = create_driver
plugins = []
d.instance.all_plugins.each {|plugin| plugins << plugin.class }
assert_equal([FluentTest::FluentTestInput,
Fluent::Plugin::RelabelOutput,
FluentTest::FluentTestFilter,
FluentTest::FluentTestOutput, # in label @test
Fluent::Plugin::CopyOutput,
FluentTest::FluentTestOutput, # in label @copy 1
FluentTest::FluentTestOutput, # in label @copy 2
Fluent::Plugin::NullOutput], plugins)
end
test "emit" do
port = unused_port(protocol: :tcp)
d = create_driver("
@type monitor_agent
bind '127.0.0.1'
port #{port}
tag monitor
emit_interval 1
")
d.instance.start
d.end_if do
d.events.size >= 5
end
d.run
expect_relabel_record = {
"plugin_id" => "test_relabel",
"plugin_category" => "output",
"type" => "relabel",
"output_plugin" => true,
"retry_count" => 0,
"emit_count" => Integer,
"emit_records" => Integer,
"emit_size" => Integer,
"write_count" => Integer,
"write_secondary_count" => Integer,
"rollback_count" => Integer,
"slow_flush_count" => Integer,
"flush_time_count" => Integer,
"drop_oldest_chunk_count" => Integer,
}
expect_test_out_record = {
"plugin_id" => "test_out",
"plugin_category" => "output",
"type" => "test_out",
"output_plugin" => true,
"retry_count" => 0,
"emit_count" => Integer,
"emit_records" => Integer,
"emit_size" => Integer,
"write_count" => Integer,
"write_secondary_count" => Integer,
"rollback_count" => Integer,
"slow_flush_count" => Integer,
"flush_time_count" => Integer,
"drop_oldest_chunk_count" => Integer,
}
assert_fuzzy_equal(expect_relabel_record, d.events[1][2])
assert_fuzzy_equal(expect_test_out_record, d.events[3][2])
end
end
def get(uri, header = {})
url = URI.parse(uri)
req = Net::HTTP::Get.new(url, header)
unless header.has_key?('Content-Type')
header['Content-Type'] = 'application/octet-stream'
end
Net::HTTP.start(url.host, url.port) {|http|
http.request(req)
}
end
sub_test_case "servlets" do
setup do
@port = unused_port(protocol: :tcp)
# check @type and type in one configuration
conf = <<-EOC
<source>
@type test_in
@id test_in
</source>
<source>
@type monitor_agent
bind "127.0.0.1"
port #{@port}
tag monitor
@id monitor_agent
</source>
<filter>
@type test_filter
@id test_filter
</filter>
<match **>
@type relabel
@id test_relabel
@label @test
</match>
<label @test>
<match **>
@type test_out
@id test_out
</match>
</label>
<label @ERROR>
<match>
@type null
@id null
</match>
</label>
EOC
begin
@ra = Fluent::RootAgent.new(log: $log)
stub(Fluent::Engine).root_agent { @ra }
@ra = configure_ra(@ra, conf)
# store Supervisor instance to avoid collected by GC
FileUtils.mkdir_p(CONFIG_DIR)
@filepath = File.expand_path('fluentd.conf', CONFIG_DIR)
File.open(@filepath, 'w') do |v|
v.puts(conf)
end
@supervisor = Fluent::Supervisor.new({config_path: @filepath})
@supervisor.configure
ensure
FileUtils.rm_r(CONFIG_DIR) rescue _
end
end
test "/api/plugins" do
d = create_driver("
@type monitor_agent
bind '127.0.0.1'
port #{@port}
tag monitor
")
d.instance.start
expected_test_in_response = "\
plugin_id:test_in\tplugin_category:input\ttype:test_in\toutput_plugin:false\tretry_count:\temit_records:0\temit_size:0"
expected_test_filter_response = "\
plugin_id:test_filter\tplugin_category:filter\ttype:test_filter\toutput_plugin:false\tretry_count:\temit_records:0\temit_size:0"
response = get("http://127.0.0.1:#{@port}/api/plugins").body
test_in = response.split("\n")[0]
test_filter = response.split("\n")[3]
assert_equal(expected_test_in_response, test_in)
assert_equal(expected_test_filter_response, test_filter)
end
data(:include_config_and_retry_yes => [true, true, "include_config yes", "include_retry yes"],
:include_config_and_retry_no => [false, false, "include_config no", "include_retry no"],)
test "/api/plugins.json" do |(with_config, with_retry, include_conf, retry_conf)|
d = create_driver("
@type monitor_agent
bind '127.0.0.1'
port #{@port}
tag monitor
#{include_conf}
#{retry_conf}
")
d.instance.start
expected_test_in_response = {
"output_plugin" => false,
"plugin_category" => "input",
"plugin_id" => "test_in",
"retry_count" => nil,
"type" => "test_in",
"emit_records" => 0,
"emit_size" => 0,
}
expected_test_in_response["config"] = {"@id" => "test_in", "@type" => "test_in"} if with_config
expected_null_response = {
"buffer_queue_length" => 0,
"buffer_timekeys" => [],
"buffer_total_queued_size" => 0,
"output_plugin" => true,
"plugin_category" => "output",
"plugin_id" => "null",
"retry_count" => 0,
"type" => "null",
"buffer_available_buffer_space_ratios" => Float,
"buffer_queue_byte_size" => Integer,
"buffer_stage_byte_size" => Integer,
"buffer_stage_length" => Integer,
"emit_count" => Integer,
"emit_records" => Integer,
"emit_size" => Integer,
"write_count" => Integer,
"write_secondary_count" => Integer,
"rollback_count" => Integer,
"slow_flush_count" => Integer,
"flush_time_count" => Integer,
"drop_oldest_chunk_count" => Integer,
}
expected_null_response["config"] = {"@id" => "null", "@type" => "null"} if with_config
expected_null_response["retry"] = {} if with_retry
response = JSON.parse(get("http://127.0.0.1:#{@port}/api/plugins.json").body)
test_in_response = response["plugins"][0]
null_response = response["plugins"][5]
assert_equal(expected_test_in_response, test_in_response)
assert_fuzzy_equal(expected_null_response, null_response)
end
test "/api/plugins.json/not_found" do
d = create_driver("
@type monitor_agent
bind '127.0.0.1'
port #{@port}
tag monitor
")
d.instance.start
resp = get("http://127.0.0.1:#{@port}/api/plugins.json/not_found")
assert_equal('404', resp.code)
body = JSON.parse(resp.body)
assert_equal(body['message'], 'Not found')
end
data(:with_config_and_retry_yes => [true, true, "?with_config=yes&with_retry"],
:with_config_and_retry_no => [false, false, "?with_config=no&with_retry=no"])
test "/api/plugins.json with query parameter. query parameter is preferred than include_config" do |(with_config, with_retry, query_param)|
d = create_driver("
@type monitor_agent
bind '127.0.0.1'
port #{@port}
tag monitor
")
d.instance.start
expected_test_in_response = {
"output_plugin" => false,
"plugin_category" => "input",
"plugin_id" => "test_in",
"retry_count" => nil,
"type" => "test_in",
"emit_records" => 0,
"emit_size" => 0,
}
expected_test_in_response["config"] = {"@id" => "test_in", "@type" => "test_in"} if with_config
expected_null_response = {
"buffer_queue_length" => 0,
"buffer_timekeys" => [],
"buffer_total_queued_size" => 0,
"output_plugin" => true,
"plugin_category" => "output",
"plugin_id" => "null",
"retry_count" => 0,
"type" => "null",
"buffer_available_buffer_space_ratios" => Float,
"buffer_queue_byte_size" => Integer,
"buffer_stage_byte_size" => Integer,
"buffer_stage_length" => Integer,
"emit_count" => Integer,
"emit_records" => Integer,
"emit_size" => Integer,
"write_count" => Integer,
"rollback_count" => Integer,
"slow_flush_count" => Integer,
"flush_time_count" => Integer,
"drop_oldest_chunk_count" => Integer,
}
expected_null_response["config"] = {"@id" => "null", "@type" => "null"} if with_config
expected_null_response["retry"] = {} if with_retry
response = JSON.parse(get("http://127.0.0.1:#{@port}/api/plugins.json#{query_param}").body)
test_in_response = response["plugins"][0]
null_response = response["plugins"][5]
assert_equal(expected_test_in_response, test_in_response)
assert_fuzzy_include(expected_null_response, null_response)
end
test "/api/plugins.json with 'with_ivars'. response contains specified instance variables of each plugin" do
d = create_driver("
@type monitor_agent
bind '127.0.0.1'
port #{@port}
tag monitor
")
d.instance.start
expected_test_in_response = {
"output_plugin" => false,
"plugin_category" => "input",
"plugin_id" => "test_in",
"retry_count" => nil,
"type" => "test_in",
"instance_variables" => {"id" => "test_in"},
"emit_records" => 0,
"emit_size" => 0,
}
expected_null_response = {
"buffer_queue_length" => 0,
"buffer_timekeys" => [],
"buffer_total_queued_size" => 0,
"output_plugin" => true,
"plugin_category" => "output",
"plugin_id" => "null",
"retry_count" => 0,
"type" => "null",
"instance_variables" => {"id" => "null"},
"buffer_available_buffer_space_ratios" => Float,
"buffer_queue_byte_size" => Integer,
"buffer_stage_byte_size" => Integer,
"buffer_stage_length" => Integer,
"emit_count" => Integer,
"emit_records" => Integer,
"emit_size" => Integer,
"write_count" => Integer,
"write_secondary_count" => Integer,
"rollback_count" => Integer,
"slow_flush_count" => Integer,
"flush_time_count" => Integer,
"drop_oldest_chunk_count" => Integer,
}
response = JSON.parse(get("http://127.0.0.1:#{@port}/api/plugins.json?with_config=no&with_retry=no&with_ivars=id,num_errors").body)
test_in_response = response["plugins"][0]
null_response = response["plugins"][5]
assert_equal(expected_test_in_response, test_in_response)
assert_fuzzy_equal(expected_null_response, null_response)
end
test "/api/config" do
d = create_driver("
@type monitor_agent
bind '127.0.0.1'
port #{@port}
tag monitor
")
d.instance.start
expected_response_regex = %r{pid:\d+\tppid:\d+\tversion:#{Fluent::VERSION}\tconfig_path:#{@filepath}\tpid_file:\tplugin_dirs:/etc/fluent/plugin\tlog_path:}
assert_match(expected_response_regex,
get("http://127.0.0.1:#{@port}/api/config").body)
end
test "/api/config.json" do
d = create_driver("
@type monitor_agent
bind '127.0.0.1'
port #{@port}
tag monitor
")
d.instance.start
res = JSON.parse(get("http://127.0.0.1:#{@port}/api/config.json").body)
assert_equal(@filepath, res["config_path"])
assert_nil(res["pid_file"])
assert_equal(["/etc/fluent/plugin"], res["plugin_dirs"])
assert_nil(res["log_path"])
assert_equal(Fluent::VERSION, res["version"])
end
test "/api/config.json?debug=1" do
d = create_driver("
@type monitor_agent
bind '127.0.0.1'
port #{@port}
tag monitor
")
d.instance.start
# To check pretty print
assert_true !get("http://127.0.0.1:#{@port}/api/config.json").body.include?("\n")
assert_true get("http://127.0.0.1:#{@port}/api/config.json?debug=1").body.include?("\n")
end
test "/api/config.json/not_found" do
d = create_driver("
@type monitor_agent
bind '127.0.0.1'
port #{@port}
tag monitor
")
d.instance.start
resp = get("http://127.0.0.1:#{@port}/api/config.json/not_found")
assert_equal('404', resp.code)
body = JSON.parse(resp.body)
assert_equal(body['message'], 'Not found')
end
end
sub_test_case "check retry of buffered plugins" do
class FluentTestFailWriteOutput < ::Fluent::Plugin::Output
::Fluent::Plugin.register_output('test_out_fail_write', self)
def write(chunk)
raise "chunk error!"
end
end
setup do
@port = unused_port(protocol: :tcp)
# check @type and type in one configuration
conf = <<-EOC
<source>
@type monitor_agent
@id monitor_agent
bind "127.0.0.1"
port #{@port}
</source>
<match **>
@type test_out_fail_write
@id test_out_fail_write
<buffer time>
timekey 1m
flush_mode immediate
</buffer>
</match>
EOC
@ra = Fluent::RootAgent.new(log: $log)
stub(Fluent::Engine).root_agent { @ra }
@ra = configure_ra(@ra, conf)
end
test "/api/plugins.json retry object should be filled if flush was failed" do
d = create_driver("
@type monitor_agent
bind '127.0.0.1'
port #{@port}
include_config no
")
d.instance.start
output = @ra.outputs[0]
output.start
output.after_start
expected_test_out_fail_write_response = {
"buffer_queue_length" => 1,
"buffer_timekeys" => [output.calculate_timekey(event_time)],
"buffer_total_queued_size" => 40,
"output_plugin" => true,
"plugin_category" => "output",
"plugin_id" => "test_out_fail_write",
"type" => "test_out_fail_write",
"buffer_newest_timekey" => output.calculate_timekey(event_time),
"buffer_oldest_timekey" => output.calculate_timekey(event_time),
"buffer_available_buffer_space_ratios" => Float,
"buffer_queue_byte_size" => Integer,
"buffer_stage_byte_size" => Integer,
"buffer_stage_length" => Integer,
"emit_count" => Integer,
"emit_records" => Integer,
"emit_size" => Integer,
"write_count" => Integer,
"write_secondary_count" => Integer,
"rollback_count" => Integer,
'slow_flush_count' => Integer,
'flush_time_count' => Integer,
"drop_oldest_chunk_count" => Integer,
}
output.emit_events('test.tag', Fluent::ArrayEventStream.new([[event_time, {"message" => "test failed flush 1"}]]))
# flush few times to check steps
2.times do
output.force_flush
# output.force_flush calls #submit_flush_all, but #submit_flush_all skips to call #submit_flush_once when @retry exists.
# So that forced flush in retry state should be done by calling #submit_flush_once directly.
output.submit_flush_once
sleep 0.1 until output.buffer.queued?
end
response = JSON.parse(get("http://127.0.0.1:#{@port}/api/plugins.json").body)
test_out_fail_write_response = response["plugins"][1]
# remove dynamic keys
response_retry_count = test_out_fail_write_response.delete("retry_count")
response_retry = test_out_fail_write_response.delete("retry")
assert_fuzzy_equal(expected_test_out_fail_write_response, test_out_fail_write_response)
assert{ response_retry.has_key?("steps") }
# it's very hard to check exact retry count (because retries are called by output flush thread scheduling)
assert{ response_retry_count >= 1 && response_retry["steps"] >= 0 }
assert{ response_retry_count == response_retry["steps"] + 1 }
end
end
sub_test_case "check the port number of http server" do
test "on single worker environment" do
port = unused_port(protocol: :tcp)
d = create_driver("
@type monitor_agent
bind '127.0.0.1'
port #{port}
")
d.instance.start
assert_equal("200", get("http://127.0.0.1:#{port}/api/plugins").code)
end
test "worker_id = 2 on multi worker environment" do
port = unused_port(protocol: :tcp)
Fluent::SystemConfig.overwrite_system_config('workers' => 4) do
d = Fluent::Test::Driver::Input.new(Fluent::Plugin::MonitorAgentInput)
d.instance.instance_eval{ @_fluentd_worker_id = 2 }
d.configure("
@type monitor_agent
bind '127.0.0.1'
port #{port - 2}
")
d.instance.start
end
assert_equal("200", get("http://127.0.0.1:#{port}/api/plugins").code)
end
end
sub_test_case "check NoMethodError does not happen" do
class FluentTestBufferVariableOutput < ::Fluent::Plugin::Output
::Fluent::Plugin.register_output('test_out_buffer_variable', self)
def configure(conf)
super
@buffer = []
end
def write(chunk)
end
end
class FluentTestBufferVariableFilter < ::Fluent::Plugin::Filter
::Fluent::Plugin.register_filter("test_filter_buffer_variable", self)
def initialize
super
@buffer = {}
end
def filter(tag, time, record)
record
end
end
setup do
conf = <<-EOC
<match **>
@type test_out_buffer_variable
@id test_out_buffer_variable
</match>
<filter **>
@type test_filter_buffer_variable
@id test_filter_buffer_variable
</filter>
EOC
@ra = Fluent::RootAgent.new(log: $log)
stub(Fluent::Engine).root_agent { @ra }
@ra = configure_ra(@ra, conf)
end
test "plugins have a variable named buffer does not throws NoMethodError" do
port = unused_port(protocol: :tcp)
d = create_driver("
@type monitor_agent
bind '127.0.0.1'
port #{port}
include_config no
")
d.instance.start
assert_equal("200", get("http://127.0.0.1:#{port}/api/plugins.json").code)
assert{ d.logs.none?{|log| log.include?("NoMethodError") } }
assert_equal(false, d.instance.instance_variable_get(:@first_warn))
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_formatter_json.rb | test/plugin/test_formatter_json.rb | require_relative '../helper'
require 'json'
require 'fluent/test/driver/formatter'
require 'fluent/plugin/formatter_json'
class JsonFormatterTest < ::Test::Unit::TestCase
def setup
@time = event_time
@default_newline = if Fluent.windows?
"\r\n"
else
"\n"
end
end
def create_driver(conf = "")
Fluent::Test::Driver::Formatter.new(Fluent::Plugin::JSONFormatter).configure(conf)
end
def tag
"tag"
end
def record
{'message' => 'awesome'}
end
def symbolic_record
{:message => :awesome}
end
data('oj with LF' => ['oj', "lf", "\n"],
'oj with CRLF' => ['oj', "crlf", "\r\n"],
'yajl with LF' => ['yajl', "lf", "\n"],
'yajl with CRLF' => ['yajl', "crlf", "\r\n"]
)
def test_format(data)
parser, newline_conf, newline = data
d = create_driver('json_parser' => parser, 'newline' => newline_conf)
formatted = d.instance.format(tag, @time, record)
assert_equal("#{JSON.generate(record)}#{newline}", formatted)
end
data('oj' => 'oj', 'yajl' => 'yajl')
def test_format_without_nl(data)
d = create_driver('json_parser' => data, 'add_newline' => false)
formatted = d.instance.format(tag, @time, record)
assert_equal(JSON.generate(record), formatted)
end
data('oj' => 'oj', 'yajl' => 'yajl')
def test_format_with_symbolic_record(data)
d = create_driver('json_parser' => data)
formatted = d.instance.format(tag, @time, symbolic_record)
assert_equal("#{JSON.generate(record)}#{@default_newline}", formatted)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_filter.rb | test/plugin/test_filter.rb | require_relative '../helper'
require 'fluent/plugin/filter'
require 'fluent/event'
require 'flexmock/test_unit'
module FluentPluginFilterTest
class DummyPlugin < Fluent::Plugin::Filter
end
class NumDoublePlugin < Fluent::Plugin::Filter
def filter(tag, time, record)
r = record.dup
r["num"] = r["num"].to_i * 2
r
end
end
class IgnoreForNumPlugin < Fluent::Plugin::Filter
def filter(tag, time, record)
if record["num"].is_a? Numeric
nil
else
record
end
end
end
class RaiseForNumPlugin < Fluent::Plugin::Filter
def filter(tag, time, record)
if record["num"].is_a? Numeric
raise "Value of num is Number!"
end
record
end
end
class NumDoublePluginWithTime < Fluent::Plugin::Filter
def filter_with_time(tag, time, record)
r = record.dup
r["num"] = r["num"].to_i * 2
[time, r]
end
end
class IgnoreForNumPluginWithTime < Fluent::Plugin::Filter
def filter_with_time(tag, time, record)
if record["num"].is_a? Numeric
nil
else
[time, record]
end
end
end
class InvalidPlugin < Fluent::Plugin::Filter
# Because of implementing `filter_with_time` and `filter` methods
def filter_with_time(tag, time, record); end
def filter(tag, time, record); end
end
end
class FilterPluginTest < Test::Unit::TestCase
DummyRouter = Struct.new(:emits) do
def emit_error_event(tag, time, record, error)
self.emits << [tag, time, record, error]
end
end
setup do
@p = nil
end
teardown do
if @p
@p.stop unless @p.stopped?
@p.before_shutdown unless @p.before_shutdown?
@p.shutdown unless @p.shutdown?
@p.after_shutdown unless @p.after_shutdown?
@p.close unless @p.closed?
@p.terminate unless @p.terminated?
end
end
sub_test_case 'for basic dummy plugin' do
setup do
Fluent::Test.setup
end
test 'plugin does not define #filter raises error' do
assert_raise NotImplementedError do
FluentPluginFilterTest::DummyPlugin.new
end
end
end
sub_test_case 'normal filter plugin' do
setup do
Fluent::Test.setup
@p = FluentPluginFilterTest::NumDoublePlugin.new
end
test 'has healthy lifecycle' do
assert !@p.configured?
@p.configure(config_element)
assert @p.configured?
assert !@p.started?
@p.start
assert @p.start
assert !@p.stopped?
@p.stop
assert @p.stopped?
assert !@p.before_shutdown?
@p.before_shutdown
assert @p.before_shutdown?
assert !@p.shutdown?
@p.shutdown
assert @p.shutdown?
assert !@p.after_shutdown?
@p.after_shutdown
assert @p.after_shutdown?
assert !@p.closed?
@p.close
assert @p.closed?
assert !@p.terminated?
@p.terminate
assert @p.terminated?
end
test 'has plugin_id automatically generated' do
assert @p.respond_to?(:plugin_id_configured?)
assert @p.respond_to?(:plugin_id)
@p.configure(config_element)
assert !@p.plugin_id_configured?
assert @p.plugin_id
assert{ @p.plugin_id != 'mytest' }
end
test 'has plugin_id manually configured' do
@p.configure(config_element('ROOT', '', {'@id' => 'mytest'}))
assert @p.plugin_id_configured?
assert_equal 'mytest', @p.plugin_id
end
test 'has plugin logger' do
assert @p.respond_to?(:log)
assert @p.log
# default logger
original_logger = @p.log
@p.configure(config_element('ROOT', '', {'@log_level' => 'debug'}))
assert(@p.log.object_id != original_logger.object_id)
assert_equal Fluent::Log::LEVEL_DEBUG, @p.log.level
end
test 'can load plugin helpers' do
assert_nothing_raised do
class FluentPluginFilterTest::DummyPlugin2 < Fluent::Plugin::Filter
helpers :storage
end
end
end
test 'can use metrics plugins and fallback methods' do
@p.configure(config_element('ROOT', '', {'@log_level' => 'debug'}))
%w[emit_size_metrics emit_records_metrics].each do |metric_name|
assert_true @p.instance_variable_get(:"@#{metric_name}").is_a?(Fluent::Plugin::Metrics)
end
assert_equal 0, @p.emit_size
assert_equal 0, @p.emit_records
end
test 'are available with multi worker configuration in default' do
assert @p.multi_workers_ready?
end
test 'filters events correctly' do
test_es = [
[event_time('2016-04-19 13:01:00 -0700'), {"num" => "1", "message" => "Hello filters!"}],
[event_time('2016-04-19 13:01:03 -0700'), {"num" => "2", "message" => "Hello filters!"}],
[event_time('2016-04-19 13:01:05 -0700'), {"num" => "3", "message" => "Hello filters!"}],
]
@p.configure(config_element)
es = @p.filter_stream('testing', test_es)
assert es.is_a? Fluent::EventStream
ary = []
es.each do |time, r|
ary << [time, r]
end
assert_equal 3, ary.size
assert_equal event_time('2016-04-19 13:01:00 -0700'), ary[0][0]
assert_equal "Hello filters!", ary[0][1]["message"]
assert_equal 2, ary[0][1]["num"]
assert_equal event_time('2016-04-19 13:01:03 -0700'), ary[1][0]
assert_equal 4, ary[1][1]["num"]
assert_equal event_time('2016-04-19 13:01:05 -0700'), ary[2][0]
assert_equal 6, ary[2][1]["num"]
end
end
sub_test_case 'filter plugin returns nil for some records' do
setup do
Fluent::Test.setup
@p = FluentPluginFilterTest::IgnoreForNumPlugin.new
end
test 'filter_stream ignores records which #filter return nil' do
test_es = [
[event_time('2016-04-19 13:01:00 -0700'), {"num" => "1", "message" => "Hello filters!"}],
[event_time('2016-04-19 13:01:03 -0700'), {"num" => 2, "message" => "Ignored, yay!"}],
[event_time('2016-04-19 13:01:05 -0700'), {"num" => "3", "message" => "Hello filters!"}],
]
@p.configure(config_element)
es = @p.filter_stream('testing', test_es)
assert es.is_a? Fluent::EventStream
ary = []
es.each do |time, r|
ary << [time, r]
end
assert_equal 2, ary.size
assert_equal event_time('2016-04-19 13:01:00 -0700'), ary[0][0]
assert_equal "Hello filters!", ary[0][1]["message"]
assert_equal "1", ary[0][1]["num"]
assert_equal event_time('2016-04-19 13:01:05 -0700'), ary[1][0]
assert_equal "3", ary[1][1]["num"]
end
end
sub_test_case 'filter plugin raises error' do
setup do
Fluent::Test.setup
@p = FluentPluginFilterTest::RaiseForNumPlugin.new
end
test 'has router and can emit events to error streams' do
assert @p.has_router?
@p.configure(config_element)
assert @p.router
@p.router = DummyRouter.new([])
test_es = [
[event_time('2016-04-19 13:01:00 -0700'), {"num" => "1", "message" => "Hello filters!"}],
[event_time('2016-04-19 13:01:03 -0700'), {"num" => 2, "message" => "Hello error router!"}],
[event_time('2016-04-19 13:01:05 -0700'), {"num" => "3", "message" => "Hello filters!"}],
]
es = @p.filter_stream('testing', test_es)
assert es.is_a? Fluent::EventStream
ary = []
es.each do |time, r|
ary << [time, r]
end
assert_equal 2, ary.size
assert_equal event_time('2016-04-19 13:01:00 -0700'), ary[0][0]
assert_equal "Hello filters!", ary[0][1]["message"]
assert_equal "1", ary[0][1]["num"]
assert_equal event_time('2016-04-19 13:01:05 -0700'), ary[1][0]
assert_equal "3", ary[1][1]["num"]
assert_equal 1, @p.router.emits.size
error_emits = @p.router.emits
assert_equal "testing", error_emits[0][0]
assert_equal event_time('2016-04-19 13:01:03 -0700'), error_emits[0][1]
assert_equal({"num" => 2, "message" => "Hello error router!"}, error_emits[0][2])
assert{ error_emits[0][3].is_a? RuntimeError }
assert_equal "Value of num is Number!", error_emits[0][3].message
end
end
sub_test_case 'filter plugins that is implemented `filter_with_time`' do
setup do
Fluent::Test.setup
@p = FluentPluginFilterTest::NumDoublePluginWithTime.new
end
test 'filters events correctly' do
test_es = [
[event_time('2016-04-19 13:01:00 -0700'), {"num" => "1", "message" => "Hello filters!"}],
[event_time('2016-04-19 13:01:03 -0700'), {"num" => "2", "message" => "Hello filters!"}],
[event_time('2016-04-19 13:01:05 -0700'), {"num" => "3", "message" => "Hello filters!"}],
]
es = @p.filter_stream('testing', test_es)
assert es.is_a? Fluent::EventStream
ary = []
es.each do |time, r|
ary << [time, r]
end
assert_equal 3, ary.size
assert_equal event_time('2016-04-19 13:01:00 -0700'), ary[0][0]
assert_equal "Hello filters!", ary[0][1]["message"]
assert_equal 2, ary[0][1]["num"]
assert_equal event_time('2016-04-19 13:01:03 -0700'), ary[1][0]
assert_equal 4, ary[1][1]["num"]
assert_equal event_time('2016-04-19 13:01:05 -0700'), ary[2][0]
assert_equal 6, ary[2][1]["num"]
end
end
sub_test_case 'filter plugin that is implemented `filter_with_time` and returns nil for some records' do
setup do
Fluent::Test.setup
@p = FluentPluginFilterTest::IgnoreForNumPluginWithTime.new
end
test 'filter_stream ignores records which #filter_with_time return nil' do
test_es = [
[event_time('2016-04-19 13:01:00 -0700'), {"num" => "1", "message" => "Hello filters!"}],
[event_time('2016-04-19 13:01:03 -0700'), {"num" => 2, "message" => "Ignored, yay!"}],
[event_time('2016-04-19 13:01:05 -0700'), {"num" => "3", "message" => "Hello filters!"}],
]
@p.configure(config_element)
es = @p.filter_stream('testing', test_es)
assert es.is_a? Fluent::EventStream
ary = []
es.each do |time, r|
ary << [time, r]
end
assert_equal 2, ary.size
assert_equal event_time('2016-04-19 13:01:00 -0700'), ary[0][0]
assert_equal "Hello filters!", ary[0][1]["message"]
assert_equal "1", ary[0][1]["num"]
assert_equal event_time('2016-04-19 13:01:05 -0700'), ary[1][0]
assert_equal "3", ary[1][1]["num"]
end
end
sub_test_case 'filter plugins that is implemented both `filter_with_time` and `filter`' do
setup do
Fluent::Test.setup
end
test 'raises DuplicatedImplementError' do
assert_raise do
FluentPluginFilterTest::InvalidPlugin.new
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_in_gc_stat.rb | test/plugin/test_in_gc_stat.rb | require_relative '../helper'
require 'fluent/test/driver/input'
require 'fluent/plugin/in_gc_stat'
class GCStatInputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
end
CONFIG = %[
emit_interval 1
tag t1
]
def create_driver(conf=CONFIG)
Fluent::Test::Driver::Input.new(Fluent::Plugin::GCStatInput).configure(conf)
end
def test_configure
d = create_driver
assert_equal(1, d.instance.emit_interval)
assert_equal("t1", d.instance.tag)
end
def setup_gc_stat
stat = GC.stat
stub(GC).stat { stat }
stat
end
def test_emit
stat = setup_gc_stat
d = create_driver
d.run(expect_emits: 2)
events = d.events
assert(events.length > 0)
events.each_index {|i|
assert_equal(stat, events[i][2])
assert(events[i][1].is_a?(Fluent::EventTime))
}
end
def test_emit_with_use_symbol_keys_false
stat = setup_gc_stat
result = {}
stat.each_pair { |k, v|
result[k.to_s] = v
}
d = create_driver(CONFIG + "use_symbol_keys false")
d.run(expect_emits: 2)
events = d.events
assert(events.length > 0)
events.each_index {|i|
assert_equal(result, events[i][2])
assert(events[i][1].is_a?(Fluent::EventTime))
}
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_formatter_csv.rb | test/plugin/test_formatter_csv.rb | require_relative '../helper'
require 'fluent/test/driver/formatter'
require 'fluent/plugin/formatter_csv'
class CsvFormatterTest < ::Test::Unit::TestCase
def setup
@time = event_time
end
CONF = %[
fields a,b,c
]
def create_driver(conf = CONF)
Fluent::Test::Driver::Formatter.new(Fluent::Plugin::CsvFormatter).configure(conf)
end
def tag
"tag"
end
def test_config_params
d = create_driver
assert_equal(',', d.instance.delimiter)
assert_equal(true, d.instance.force_quotes)
assert_equal(['a', 'b', 'c'], d.instance.fields)
end
data('empty array' => [],
'array including empty string' => ['', ''])
def test_empty_fields(param)
assert_raise Fluent::ConfigError do
create_driver('fields' => param)
end
end
data(
'tab_char' => ["\t", '\t'],
'tab_string' => ["\t", 'TAB'],
'pipe' => ['|', '|'])
def test_config_params_with_customized_delimiters(data)
expected, target = data
d = create_driver("delimiter" => target, 'fields' => 'a,b,c')
assert_equal expected, d.instance.delimiter
end
def test_format
d = create_driver("fields" => "message,message2")
formatted = d.instance.format(tag, @time, {
'message' => 'awesome',
'message2' => 'awesome2'
})
assert_equal("\"awesome\",\"awesome2\"\n", formatted)
end
def test_format_with_nested_fields
d = create_driver("fields" => "message,$.nest.key")
formatted = d.instance.format(tag, @time, {
'message' => 'awesome',
'nest' => {'key' => 'awesome2'}
})
assert_equal("\"awesome\",\"awesome2\"\n", formatted)
end
def test_format_without_newline
d = create_driver("fields" => "message,message2", "add_newline" => false)
formatted = d.instance.format(tag, @time, {
'message' => 'awesome',
'message2' => 'awesome2'
})
assert_equal("\"awesome\",\"awesome2\"", formatted)
end
def test_format_with_customized_delimiters
d = create_driver("fields" => "message,message2",
"delimiter" => "\t")
formatted = d.instance.format(tag, @time, {
'message' => 'awesome',
'message2' => 'awesome2'
})
assert_equal("\"awesome\"\t\"awesome2\"\n", formatted)
end
def test_format_with_non_quote
d = create_driver("fields" => "message,message2",
"force_quotes" => false)
formatted = d.instance.format(tag, @time, {
'message' => 'awesome',
'message2' => 'awesome2'
})
assert_equal("awesome,awesome2\n", formatted)
end
data(
'nil' => {
'message' => 'awesome',
'message2' => nil,
'message3' => 'awesome3'
},
'blank' => {
'message' => 'awesome',
'message2' => '',
'message3' => 'awesome3'
})
def test_format_with_empty_fields(data)
d = create_driver("fields" => "message,message2,message3")
formatted = d.instance.format(tag, @time, data)
assert_equal("\"awesome\",\"\",\"awesome3\"\n", formatted)
end
data(
'normally' => 'one,two,three',
'white_space' => 'one , two , three',
'blank' => 'one,,two,three')
def test_config_params_with_fields(data)
d = create_driver('fields' => data)
assert_equal %w(one two three), d.instance.fields
end
def test_format_with_multiple_records
d = create_driver("fields" => "message,message2")
r = {'message' => 'hello', 'message2' => 'fluentd'}
formatted = d.instance.format(tag, @time, r)
assert_equal("\"hello\",\"fluentd\"\n", formatted)
r = {'message' => 'hey', 'message2' => 'ho'}
formatted = d.instance.format(tag, @time, r)
assert_equal("\"hey\",\"ho\"\n", formatted)
r = {'message' => 'longer message', 'message2' => 'longer longer message'}
formatted = d.instance.format(tag, @time, r)
assert_equal("\"longer message\",\"longer longer message\"\n", formatted)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_parser_json.rb | test/plugin/test_parser_json.rb | require_relative '../helper'
require 'fluent/test/driver/parser'
require 'fluent/plugin/parser'
class JsonParserTest < ::Test::Unit::TestCase
def setup
Fluent::Test.setup
@parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::JSONParser)
end
sub_test_case "configure_json_parser" do
data("oj", [:oj, [Oj.method(:load), Oj::ParseError]])
data("json", [:json, [JSON.method(:parse), JSON::ParserError]])
data("yajl", [:yajl, [Yajl.method(:load), Yajl::ParseError]])
def test_return_each_loader((input, expected_return))
result = @parser.instance.configure_json_parser(input)
assert_equal expected_return, result
end
def test_raise_exception_for_unknown_input
assert_raise RuntimeError do
@parser.instance.configure_json_parser(:unknown)
end
end
def test_fall_back_oj_to_json_if_oj_not_available
stub(Fluent::OjOptions).available? { false }
result = @parser.instance.configure_json_parser(:oj)
assert_equal [JSON.method(:parse), JSON::ParserError], result
logs = @parser.logs.collect do |log|
log.gsub(/\A\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} [-+]\d{4} /, "")
end
assert_equal(
["[info]: Oj is not installed, and failing back to JSON for json parser\n"],
logs
)
end
end
data('oj' => 'oj', 'yajl' => 'yajl')
def test_parse(data)
@parser.configure('json_parser' => data)
@parser.instance.parse('{"time":1362020400,"host":"192.168.0.1","size":777,"method":"PUT"}') { |time, record|
assert_equal(event_time('2013-02-28 12:00:00 +0900').to_i, time)
assert_equal({
'host' => '192.168.0.1',
'size' => 777,
'method' => 'PUT',
}, record)
}
end
data('oj' => 'oj', 'yajl' => 'yajl')
def test_parse_with_large_float(data)
@parser.configure('json_parser' => data)
@parser.instance.parse('{"num":999999999999999999999999999999.99999}') { |time, record|
assert_equal(Float, record['num'].class)
}
end
data('oj' => 'oj', 'yajl' => 'yajl')
def test_parse_without_time(data)
time_at_start = Time.now.to_i
@parser.configure('json_parser' => data)
@parser.instance.parse('{"host":"192.168.0.1","size":777,"method":"PUT"}') { |time, record|
assert time && time >= time_at_start, "parser puts current time without time input"
assert_equal({
'host' => '192.168.0.1',
'size' => 777,
'method' => 'PUT',
}, record)
}
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::JSONParser)
parser.configure('json_parser' => data, 'estimate_current_event' => 'false')
parser.instance.parse('{"host":"192.168.0.1","size":777,"method":"PUT"}') { |time, record|
assert_equal({
'host' => '192.168.0.1',
'size' => 777,
'method' => 'PUT',
}, record)
assert_nil time, "parser return nil w/o time and if specified so"
}
end
data('oj' => 'oj', 'yajl' => 'yajl')
def test_parse_with_colon_string(data)
@parser.configure('json_parser' => data)
@parser.instance.parse('{"time":1362020400,"log":":message"}') { |time, record|
assert_equal(record['log'], ':message')
}
end
data('oj' => 'oj', 'yajl' => 'yajl')
def test_parse_with_invalid_time(data)
@parser.configure('json_parser' => data)
assert_raise Fluent::ParserError do
@parser.instance.parse('{"time":[],"k":"v"}') { |time, record| }
end
end
data('oj' => 'oj', 'yajl' => 'yajl')
def test_parse_float_time(data)
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::JSONParser)
parser.configure('json_parser' => data)
text = "100.1"
parser.instance.parse("{\"time\":\"#{text}\"}") do |time, record|
assert_equal 100, time.sec
assert_equal 100_000_000, time.nsec
end
end
data('oj' => 'oj', 'yajl' => 'yajl')
def test_parse_with_keep_time_key(data)
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::JSONParser)
format = "%d/%b/%Y:%H:%M:%S %z"
parser.configure(
'time_format' => format,
'keep_time_key' => 'true',
'json_parser' => data
)
text = "28/Feb/2013:12:00:00 +0900"
parser.instance.parse("{\"time\":\"#{text}\"}") do |time, record|
assert_equal Time.strptime(text, format).to_i, time.sec
assert_equal text, record['time']
end
end
data('oj' => 'oj', 'yajl' => 'yajl')
def test_parse_with_keep_time_key_without_time_format(data)
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::JSONParser)
parser.configure(
'keep_time_key' => 'true',
'json_parser' => data
)
text = "100"
parser.instance.parse("{\"time\":\"#{text}\"}") do |time, record|
assert_equal text.to_i, time.sec
assert_equal text, record['time']
end
end
def test_yajl_parse_io_with_buffer_smaller_than_input
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::JSONParser)
parser.configure(
'keep_time_key' => 'true',
'json_parser' => 'yajl',
'stream_buffer_size' => 1,
)
text = "100"
waiting(5) do
rd, wr = IO.pipe
wr.write "{\"time\":\"#{text}\"}"
parser.instance.parse_io(rd) do |time, record|
assert_equal text.to_i, time.sec
assert_equal text, record['time']
# Once a record has been received the 'write' end of the pipe must be
# closed, otherwise the test will block waiting for more input.
wr.close
end
end
end
sub_test_case "various record pattern" do
data("Only string", { record: '"message"', expected: [nil] }, keep: true)
data("Only string without quotation", { record: "message", expected: [nil] }, keep: true)
data("Only number", { record: "0", expected: [nil] }, keep: true)
data(
"Array of Hash",
{
record: '[{"k1": 1}, {"k2": 2}]',
expected: [{"k1" => 1}, {"k2" => 2}]
},
keep: true,
)
data(
"Array of both Hash and invalid",
{
record: '[{"k1": 1}, "string", {"k2": 2}, 0]',
expected: [{"k1" => 1}, nil, {"k2" => 2}, nil]
},
keep: true,
)
data(
"Array of all invalid",
{
record: '["string", 0, [{"k": 0}]]',
expected: [nil, nil, nil]
},
keep: true,
)
def test_oj(data)
parsed_records = []
@parser.configure("json_parser" => "oj")
@parser.instance.parse(data[:record]) { |time, record|
parsed_records.append(record)
}
assert_equal(data[:expected], parsed_records)
end
def test_yajl(data)
parsed_records = []
@parser.configure("json_parser" => "yajl")
@parser.instance.parse(data[:record]) { |time, record|
parsed_records.append(record)
}
assert_equal(data[:expected], parsed_records)
end
def test_json(json)
parsed_records = []
@parser.configure("json_parser" => "json")
@parser.instance.parse(data[:record]) { |time, record|
parsed_records.append(record)
}
assert_equal(data[:expected], parsed_records)
end
end
# This becomes NoMethodError if a non-Hash object is passed to convert_values.
# https://github.com/fluent/fluentd/issues/4100
sub_test_case "execute_convert_values with null_empty_string" do
data("Only string", { record: '"message"', expected: [nil] }, keep: true)
data(
"Hash",
{
record: '{"k1": 1, "k2": ""}',
expected: [{"k1" => 1, "k2" => nil}]
},
keep: true,
)
data(
"Array of Hash",
{
record: '[{"k1": 1}, {"k2": ""}]',
expected: [{"k1" => 1}, {"k2" => nil}]
},
keep: true,
)
def test_oj(data)
parsed_records = []
@parser.configure("json_parser" => "oj", "null_empty_string" => true)
@parser.instance.parse(data[:record]) { |time, record|
parsed_records.append(record)
}
assert_equal(data[:expected], parsed_records)
end
def test_yajl(data)
parsed_records = []
@parser.configure("json_parser" => "yajl", "null_empty_string" => true)
@parser.instance.parse(data[:record]) { |time, record|
parsed_records.append(record)
}
assert_equal(data[:expected], parsed_records)
end
def test_json(json)
parsed_records = []
@parser.configure("json_parser" => "json", "null_empty_string" => true)
@parser.instance.parse(data[:record]) { |time, record|
parsed_records.append(record)
}
assert_equal(data[:expected], parsed_records)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_out_null.rb | test/plugin/test_out_null.rb | require_relative '../helper'
require 'fluent/test/driver/output'
require 'fluent/plugin/out_null'
class NullOutputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
end
def create_driver(conf = "")
Fluent::Test::Driver::Output.new(Fluent::Plugin::NullOutput).configure(conf)
end
sub_test_case 'non-buffered' do
test 'configure' do
assert_nothing_raised do
create_driver
end
end
test 'process' do
d = create_driver
assert_nothing_raised do
d.run do
d.feed("test", Fluent::EventTime.now, {"test" => "null"})
end
end
assert_equal([], d.events(tag: "test"))
end
end
sub_test_case 'buffered' do
test 'default chunk limit size is 100' do
d = create_driver(config_element("ROOT", "", {}, [config_element("buffer")]))
assert_equal 10 * 1024, d.instance.buffer_config.chunk_limit_size
assert d.instance.buffer_config.flush_at_shutdown
assert_equal ['tag'], d.instance.buffer_config.chunk_keys
assert d.instance.chunk_key_tag
assert !d.instance.chunk_key_time
assert_equal [], d.instance.chunk_keys
end
test 'writes standard formatted chunks' do
d = create_driver(config_element("ROOT", "", {}, [config_element("buffer")]))
t = event_time("2016-05-23 00:22:13 -0800")
d.run(default_tag: 'test', flush: true) do
d.feed(t, {"message" => "null null null"})
d.feed(t, {"message" => "null null"})
d.feed(t, {"message" => "null"})
end
assert_equal 3, d.instance.emit_count
assert_equal 3, d.instance.emit_records
end
test 'check for chunk passed to #write' do
d = create_driver(config_element("ROOT", "", {}, [config_element("buffer")]))
data = []
d.instance.feed_proc = ->(chunk){ data << [chunk.unique_id, chunk.metadata.tag, chunk.read] }
t = event_time("2016-05-23 00:22:13 -0800")
d.run(default_tag: 'test', flush: true) do
d.feed(t, {"message" => "null null null"})
d.feed(t, {"message" => "null null"})
d.feed(t, {"message" => "null"})
end
assert_equal 1, data.size
_, tag, binary = data.first
events = []
Fluent::MessagePackFactory.unpacker.feed_each(binary){|obj| events << obj }
assert_equal 'test', tag
assert_equal [ [t, {"message" => "null null null"}], [t, {"message" => "null null"}], [t, {"message" => "null"}] ], events
end
test 'check for chunk passed to #try_write' do
d = create_driver(config_element("ROOT", "", {}, [config_element("buffer")]))
data = []
d.instance.feed_proc = ->(chunk){ data << [chunk.unique_id, chunk.metadata.tag, chunk.read] }
d.instance.delayed = true
t = event_time("2016-05-23 00:22:13 -0800")
d.run(default_tag: 'test', flush: true, wait_flush_completion: false, shutdown: false) do
d.feed(t, {"message" => "null null null"})
d.feed(t, {"message" => "null null"})
d.feed(t, {"message" => "null"})
end
assert_equal 1, data.size
chunk_id, tag, binary = data.first
events = []
Fluent::MessagePackFactory.unpacker.feed_each(binary){|obj| events << obj }
assert_equal 'test', tag
assert_equal [ [t, {"message" => "null null null"}], [t, {"message" => "null null"}], [t, {"message" => "null"}] ], events
assert_equal [chunk_id], d.instance.buffer.dequeued.keys
d.instance.commit_write(chunk_id)
assert_equal [], d.instance.buffer.dequeued.keys
d.instance_shutdown
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_output.rb | test/plugin/test_output.rb | require_relative '../helper'
require 'fluent/plugin/output'
require 'fluent/plugin/buffer'
require 'fluent/event'
require 'json'
require 'time'
require 'timeout'
module FluentPluginOutputTest
class DummyBareOutput < Fluent::Plugin::Output
def register(name, &block)
instance_variable_set("@#{name}", block)
end
end
class DummySyncOutput < DummyBareOutput
def initialize
super
@process = nil
end
def process(tag, es)
@process ? @process.call(tag, es) : nil
end
end
class DummyAsyncOutput < DummyBareOutput
def initialize
super
@format = nil
@write = nil
end
def format(tag, time, record)
@format ? @format.call(tag, time, record) : [tag, time, record].to_json
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
end
class DummyAsyncStandardOutput < DummyBareOutput
def initialize
super
@write = nil
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
end
class DummyDelayedOutput < DummyBareOutput
def initialize
super
@format = nil
@try_write = nil
end
def format(tag, time, record)
@format ? @format.call(tag, time, record) : [tag, time, record].to_json
end
def try_write(chunk)
@try_write ? @try_write.call(chunk) : nil
end
end
class DummyDelayedStandardOutput < DummyBareOutput
def initialize
super
@try_write = nil
end
def try_write(chunk)
@try_write ? @try_write.call(chunk) : nil
end
end
class DummyFullFeatureOutput < DummyBareOutput
def initialize
super
@prefer_buffered_processing = nil
@prefer_delayed_commit = nil
@process = nil
@format = nil
@write = nil
@try_write = nil
end
def prefer_buffered_processing
@prefer_buffered_processing ? @prefer_buffered_processing.call : false
end
def prefer_delayed_commit
@prefer_delayed_commit ? @prefer_delayed_commit.call : false
end
def process(tag, es)
@process ? @process.call(tag, es) : nil
end
def format(tag, time, record)
@format ? @format.call(tag, time, record) : [tag, time, record].to_json
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
def try_write(chunk)
@try_write ? @try_write.call(chunk) : nil
end
end
end
class OutputTest < Test::Unit::TestCase
class << self
def startup
$LOAD_PATH.unshift File.expand_path(File.join(File.dirname(__FILE__), '../scripts'))
require 'fluent/plugin/out_test'
end
def shutdown
$LOAD_PATH.shift
end
end
def create_output(type=:full)
case type
when :bare then FluentPluginOutputTest::DummyBareOutput.new
when :sync then FluentPluginOutputTest::DummySyncOutput.new
when :buffered then FluentPluginOutputTest::DummyAsyncOutput.new
when :standard then FluentPluginOutputTest::DummyAsyncStandardOutput.new
when :delayed then FluentPluginOutputTest::DummyDelayedOutput.new
when :sdelayed then FluentPluginOutputTest::DummyDelayedStandardOutput.new
when :full then FluentPluginOutputTest::DummyFullFeatureOutput.new
else
raise ArgumentError, "unknown type: #{type}"
end
end
def create_metadata(timekey: nil, tag: nil, variables: nil)
Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
end
def create_chunk(timekey: nil, tag: nil, variables: nil)
m = Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
Fluent::Plugin::Buffer::MemoryChunk.new(m)
end
def waiting(seconds)
begin
Timeout.timeout(seconds) do
yield
end
rescue Timeout::Error
STDERR.print(*@i.log.out.logs)
raise
end
end
sub_test_case 'basic output feature' do
setup do
@i = create_output(:full)
end
test 'are not available with multi workers configuration in default' do
assert_false @i.multi_workers_ready?
end
test '#implement? can return features for plugin instances' do
i1 = FluentPluginOutputTest::DummyBareOutput.new
assert !i1.implement?(:synchronous)
assert !i1.implement?(:buffered)
assert !i1.implement?(:delayed_commit)
assert !i1.implement?(:custom_format)
i2 = FluentPluginOutputTest::DummySyncOutput.new
assert i2.implement?(:synchronous)
assert !i2.implement?(:buffered)
assert !i2.implement?(:delayed_commit)
assert !i2.implement?(:custom_format)
i3 = FluentPluginOutputTest::DummyAsyncOutput.new
assert !i3.implement?(:synchronous)
assert i3.implement?(:buffered)
assert !i3.implement?(:delayed_commit)
assert i3.implement?(:custom_format)
i4 = FluentPluginOutputTest::DummyAsyncStandardOutput.new
assert !i4.implement?(:synchronous)
assert i4.implement?(:buffered)
assert !i4.implement?(:delayed_commit)
assert !i4.implement?(:custom_format)
i5 = FluentPluginOutputTest::DummyDelayedOutput.new
assert !i5.implement?(:synchronous)
assert !i5.implement?(:buffered)
assert i5.implement?(:delayed_commit)
assert i5.implement?(:custom_format)
i6 = FluentPluginOutputTest::DummyDelayedStandardOutput.new
assert !i6.implement?(:synchronous)
assert !i6.implement?(:buffered)
assert i6.implement?(:delayed_commit)
assert !i6.implement?(:custom_format)
i6 = FluentPluginOutputTest::DummyFullFeatureOutput.new
assert i6.implement?(:synchronous)
assert i6.implement?(:buffered)
assert i6.implement?(:delayed_commit)
assert i6.implement?(:custom_format)
end
test 'plugin lifecycle for configure/start/stop/before_shutdown/shutdown/after_shutdown/close/terminate' do
assert !@i.configured?
@i.configure(config_element())
assert @i.configured?
assert !@i.started?
@i.start
assert @i.started?
assert !@i.after_started?
@i.after_start
assert @i.after_started?
assert !@i.stopped?
@i.stop
assert @i.stopped?
assert !@i.before_shutdown?
@i.before_shutdown
assert @i.before_shutdown?
assert !@i.shutdown?
@i.shutdown
assert @i.shutdown?
assert !@i.after_shutdown?
@i.after_shutdown
assert @i.after_shutdown?
assert !@i.closed?
@i.close
assert @i.closed?
assert !@i.terminated?
@i.terminate
assert @i.terminated?
end
test 'can use metrics plugins and fallback methods' do
@i.configure(config_element())
%w[num_errors_metrics emit_count_metrics emit_size_metrics emit_records_metrics write_count_metrics
write_secondary_count_metrics rollback_count_metrics flush_time_count_metrics slow_flush_count_metrics
drop_oldest_chunk_count_metrics].each do |metric_name|
assert_true @i.instance_variable_get(:"@#{metric_name}").is_a?(Fluent::Plugin::Metrics)
end
assert_equal 0, @i.num_errors
assert_equal 0, @i.emit_count
assert_equal 0, @i.emit_records
assert_equal 0, @i.emit_size
assert_equal 0, @i.emit_records
assert_equal 0, @i.write_count
assert_equal 0, @i.write_secondary_count
assert_equal 0, @i.rollback_count
assert_equal 0, @i.flush_time_count
assert_equal 0, @i.slow_flush_count
assert_equal 0, @i.drop_oldest_chunk_count
end
data(:new_api => :chunk,
:old_api => :metadata)
test '#extract_placeholders does nothing if chunk key is not specified' do |api|
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', '')]))
assert !@i.chunk_key_time
assert !@i.chunk_key_tag
assert_equal [], @i.chunk_keys
tmpl = "/mypath/%Y/%m/%d/${tag}/${tag[1]}/${tag[2]}/${key1}/${key2}/tail"
t = event_time('2016-04-11 20:30:00 +0900')
v = {key1: "value1", key2: "value2"}
c = if api == :chunk
create_chunk(timekey: t, tag: 'fluentd.test.output', variables: v)
else
create_metadata(timekey: t, tag: 'fluentd.test.output', variables: v)
end
assert_equal tmpl, @i.extract_placeholders(tmpl, c)
end
data(:new_api => :chunk,
:old_api => :metadata)
test '#extract_placeholders can extract time if time key and range are configured' do |api|
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'time', {'timekey' => 60*30, 'timekey_zone' => "+0900"})]))
assert @i.chunk_key_time
assert !@i.chunk_key_tag
assert_equal [], @i.chunk_keys
tmpl = "/mypath/%Y/%m/%d/%H-%M/${tag}/${tag[1]}/${tag[2]}/${key1}/${key2}/tail"
t = event_time('2016-04-11 20:30:00 +0900')
v = {key1: "value1", key2: "value2"}
c = if api == :chunk
create_chunk(timekey: t, tag: 'fluentd.test.output', variables: v)
else
create_metadata(timekey: t, tag: 'fluentd.test.output', variables: v)
end
assert_equal "/mypath/2016/04/11/20-30/${tag}/${tag[1]}/${tag[2]}/${key1}/${key2}/tail", @i.extract_placeholders(tmpl, c)
end
data(:new_api => :chunk,
:old_api => :metadata)
test '#extract_placeholders can extract tag and parts of tag if tag is configured' do |api|
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'tag', {})]))
assert !@i.chunk_key_time
assert @i.chunk_key_tag
assert_equal [], @i.chunk_keys
tmpl = "/mypath/%Y/%m/%d/%H-%M/${tag}/${tag[1]}/${tag[2]}/${key1}/${key2}/tail"
t = event_time('2016-04-11 20:30:00 +0900')
v = {key1: "value1", key2: "value2"}
c = if api == :chunk
create_chunk(timekey: t, tag: 'fluentd.test.output', variables: v)
else
create_metadata(timekey: t, tag: 'fluentd.test.output', variables: v)
end
assert_equal "/mypath/%Y/%m/%d/%H-%M/fluentd.test.output/test/output/${key1}/${key2}/tail", @i.extract_placeholders(tmpl, c)
end
data(:new_api => :chunk,
:old_api => :metadata)
test '#extract_placeholders can extract variables if variables are configured' do |api|
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'key1,key2', {})]))
assert !@i.chunk_key_time
assert !@i.chunk_key_tag
assert_equal ['key1','key2'], @i.chunk_keys
tmpl = "/mypath/%Y/%m/%d/%H-%M/${tag}/${tag[1]}/${tag[2]}/${key1}/${key2}/tail"
t = event_time('2016-04-11 20:30:00 +0900')
v = {key1: "value1", key2: "value2"}
c = if api == :chunk
create_chunk(timekey: t, tag: 'fluentd.test.output', variables: v)
else
create_metadata(timekey: t, tag: 'fluentd.test.output', variables: v)
end
assert_equal "/mypath/%Y/%m/%d/%H-%M/${tag}/${tag[1]}/${tag[2]}/value1/value2/tail", @i.extract_placeholders(tmpl, c)
end
data(:new_api => :chunk,
:old_api => :metadata)
test '#extract_placeholders can extract nested variables if variables are configured with dot notation' do |api|
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'key,$.nest.key', {})]))
assert !@i.chunk_key_time
assert !@i.chunk_key_tag
assert_equal ['key','$.nest.key'], @i.chunk_keys
tmpl = "/mypath/%Y/%m/%d/%H-%M/${tag}/${tag[1]}/${tag[2]}/${key}/${$.nest.key}/tail"
t = event_time('2016-04-11 20:30:00 +0900')
v = {:key => "value1", :"$.nest.key" => "value2"}
c = if api == :chunk
create_chunk(timekey: t, tag: 'fluentd.test.output', variables: v)
else
create_metadata(timekey: t, tag: 'fluentd.test.output', variables: v)
end
assert_equal "/mypath/%Y/%m/%d/%H-%M/${tag}/${tag[1]}/${tag[2]}/value1/value2/tail", @i.extract_placeholders(tmpl, c)
end
data(:new_api => :chunk,
:old_api => :metadata)
test '#extract_placeholders can extract all chunk keys if configured' do |api|
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'time,tag,key1,key2', {'timekey' => 60*30, 'timekey_zone' => "+0900"})]))
assert @i.chunk_key_time
assert @i.chunk_key_tag
assert_equal ['key1','key2'], @i.chunk_keys
tmpl = "/mypath/%Y/%m/%d/%H-%M/${tag}/${tag[1]}/${tag[2]}/${key1}/${key2}/tail"
t = event_time('2016-04-11 20:30:00 +0900')
v = {key1: "value1", key2: "value2"}
c = if api == :chunk
create_chunk(timekey: t, tag: 'fluentd.test.output', variables: v)
else
create_metadata(timekey: t, tag: 'fluentd.test.output', variables: v)
end
assert_equal "/mypath/2016/04/11/20-30/fluentd.test.output/test/output/value1/value2/tail", @i.extract_placeholders(tmpl, c)
end
data(:new_api => :chunk,
:old_api => :metadata)
test '#extract_placeholders can extract negative index with tag' do |api|
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'time,tag,key1,key2', {'timekey' => 60*30, 'timekey_zone' => "+0900"})]))
assert @i.chunk_key_time
assert @i.chunk_key_tag
assert_equal ['key1','key2'], @i.chunk_keys
tmpl = "/mypath/%Y/%m/%d/%H-%M/${tag}/${tag[-1]}/${tag[-2]}/${key1}/${key2}/tail"
t = event_time('2016-04-11 20:30:00 +0900')
v = {key1: "value1", key2: "value2"}
c = if api == :chunk
create_chunk(timekey: t, tag: 'fluentd.test.output', variables: v)
else
create_metadata(timekey: t, tag: 'fluentd.test.output', variables: v)
end
assert_equal "/mypath/2016/04/11/20-30/fluentd.test.output/output/test/value1/value2/tail", @i.extract_placeholders(tmpl, c)
end
data(:new_api => :chunk,
:old_api => :metadata)
test '#extract_placeholders removes out-of-range tag part and unknown variable placeholders' do |api|
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'time,tag,key1,key2', {'timekey' => 60*30, 'timekey_zone' => "+0900"})]))
assert @i.chunk_key_time
assert @i.chunk_key_tag
assert_equal ['key1','key2'], @i.chunk_keys
tmpl = "/mypath/%Y/%m/%d/%H-%M/${tag}/${tag[3]}/${tag[-4]}/${key3}/${key4}/tail"
t = event_time('2016-04-11 20:30:00 +0900')
v = {key1: "value1", key2: "value2"}
c = if api == :chunk
create_chunk(timekey: t, tag: 'fluentd.test.output', variables: v)
else
create_metadata(timekey: t, tag: 'fluentd.test.output', variables: v)
end
assert_equal "/mypath/2016/04/11/20-30/fluentd.test.output/////tail", @i.extract_placeholders(tmpl, c)
end
test '#extract_placeholders logs warn message if metadata is passed for ${chunk_id} placeholder' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', '')]))
tmpl = "/mypath/${chunk_id}/tail"
t = event_time('2016-04-11 20:30:00 +0900')
v = {key1: "value1", key2: "value2"}
m = create_metadata(timekey: t, tag: 'fluentd.test.output', variables: v)
@i.extract_placeholders(tmpl, m)
logs = @i.log.out.logs
assert { logs.any? { |log| log.include?("${chunk_id} is not allowed in this plugin") } }
end
test '#extract_placeholders does not log for ${chunk_id} placeholder' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', '')]))
tmpl = "/mypath/${chunk_id}/tail"
t = event_time('2016-04-11 20:30:00 +0900')
v = {key1: "value1", key2: "value2"}
c = create_chunk(timekey: t, tag: 'fluentd.test.output', variables: v)
@i.log.out.logs.clear
@i.extract_placeholders(tmpl, c)
logs = @i.log.out.logs
assert { logs.none? { |log| log.include?("${chunk_id}") } }
end
test '#extract_placeholders does not log for ${chunk_id} placeholder (with @chunk_keys)' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'key1')]))
tmpl = "/mypath/${chunk_id}/${key1}/tail"
t = event_time('2016-04-11 20:30:00 +0900')
v = {key1: "value1", key2: "value2"}
c = create_chunk(timekey: t, tag: 'fluentd.test.output', variables: v)
@i.log.out.logs.clear
@i.extract_placeholders(tmpl, c)
logs = @i.log.out.logs
assert { logs.none? { |log| log.include?("${chunk_id}") } }
end
test '#extract_placeholders logs warn message with not replaced key' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', '')]))
tmpl = "/mypath/${key1}/test"
t = event_time('2016-04-11 20:30:00 +0900')
v = { key1: "value1" }
m = create_metadata(timekey: t, tag: 'fluentd.test.output', variables: v)
@i.extract_placeholders(tmpl, m)
logs = @i.log.out.logs
assert { logs.any? { |log| log.include?("chunk key placeholder 'key1' not replaced. template:#{tmpl}") } }
end
test '#extract_placeholders logs warn message with not replaced key if variables exist and chunk_key is not empty' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'key1')]))
tmpl = "/mypath/${key1}/${key2}/test"
t = event_time('2016-04-11 20:30:00 +0900')
v = { key1: "value1" }
m = create_metadata(timekey: t, tag: 'fluentd.test.output', variables: v)
@i.extract_placeholders(tmpl, m)
logs = @i.log.out.logs
assert { logs.any? { |log| log.include?("chunk key placeholder 'key2' not replaced. template:#{tmpl}") } }
end
sub_test_case '#placeholder_validators' do
test 'returns validators for time, tag and keys when a template has placeholders even if plugin is not configured with these keys' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', '')]))
validators = @i.placeholder_validators(:path, "/my/path/${tag}/${username}/file.%Y%m%d_%H%M.log")
assert_equal 3, validators.size
assert_equal 1, validators.count(&:time?)
assert_equal 1, validators.count(&:tag?)
assert_equal 1, validators.count(&:keys?)
end
test 'returns validators for time, tag and keys when a plugin is configured with these keys even if a template does not have placeholders' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'time,tag,username', {'timekey' => 60})]))
validators = @i.placeholder_validators(:path, "/my/path/file.log")
assert_equal 3, validators.size
assert_equal 1, validators.count(&:time?)
assert_equal 1, validators.count(&:tag?)
assert_equal 1, validators.count(&:keys?)
end
test 'returns a validator for time if a template has timestamp placeholders' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', '')]))
validators = @i.placeholder_validators(:path, "/my/path/file.%Y-%m-%d.log")
assert_equal 1, validators.size
assert_equal 1, validators.count(&:time?)
assert_raise Fluent::ConfigError.new("Parameter 'path: /my/path/file.%Y-%m-%d.log' has timestamp placeholders, but chunk key 'time' is not configured") do
validators.first.validate!
end
end
test 'returns a validator for time if a plugin is configured with time key' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'time', {'timekey' => '30'})]))
validators = @i.placeholder_validators(:path, "/my/path/to/file.log")
assert_equal 1, validators.size
assert_equal 1, validators.count(&:time?)
assert_raise Fluent::ConfigError.new("Parameter 'path: /my/path/to/file.log' doesn't have timestamp placeholders for timekey 30") do
validators.first.validate!
end
end
test 'returns a validator for tag if a template has tag placeholders' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', '')]))
validators = @i.placeholder_validators(:path, "/my/path/${tag}/file.log")
assert_equal 1, validators.size
assert_equal 1, validators.count(&:tag?)
assert_raise Fluent::ConfigError.new("Parameter 'path: /my/path/${tag}/file.log' has tag placeholders, but chunk key 'tag' is not configured") do
validators.first.validate!
end
end
test 'returns a validator for tag if a plugin is configured with tag key' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'tag')]))
validators = @i.placeholder_validators(:path, "/my/path/file.log")
assert_equal 1, validators.size
assert_equal 1, validators.count(&:tag?)
assert_raise Fluent::ConfigError.new("Parameter 'path: /my/path/file.log' doesn't have tag placeholder") do
validators.first.validate!
end
end
test 'returns a validator for variable keys if a template has variable placeholders' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', '')]))
validators = @i.placeholder_validators(:path, "/my/path/${username}/file.${group}.log")
assert_equal 1, validators.size
assert_equal 1, validators.count(&:keys?)
assert_raise Fluent::ConfigError.new("Parameter 'path: /my/path/${username}/file.${group}.log' has placeholders, but chunk keys doesn't have keys group,username") do
validators.first.validate!
end
end
test 'returns a validator for variable keys if a plugin is configured with variable keys' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'username,group')]))
validators = @i.placeholder_validators(:path, "/my/path/file.log")
assert_equal 1, validators.size
assert_equal 1, validators.count(&:keys?)
assert_raise Fluent::ConfigError.new("Parameter 'path: /my/path/file.log' doesn't have enough placeholders for keys group,username") do
validators.first.validate!
end
end
end
sub_test_case '#placeholder_validate!' do
test 'raises configuration error for a template when timestamp placeholders exist but time key is missing' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', '')]))
assert_raise Fluent::ConfigError.new("Parameter 'path: /path/without/timestamp/file.%Y%m%d-%H%M.log' has timestamp placeholders, but chunk key 'time' is not configured") do
@i.placeholder_validate!(:path, "/path/without/timestamp/file.%Y%m%d-%H%M.log")
end
end
test 'raises configuration error for a template without timestamp placeholders when timekey is configured' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'time', {"timekey" => 180})]))
assert_raise Fluent::ConfigError.new("Parameter 'path: /my/path/file.log' doesn't have timestamp placeholders for timekey 180") do
@i.placeholder_validate!(:path, "/my/path/file.log")
end
assert_nothing_raised do
@i.placeholder_validate!(:path, "/my/path/%Y%m%d/file.%H%M.log")
end
end
test 'raises configuration error for a template with timestamp placeholders when plugin is configured more fine timekey' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'time', {"timekey" => 180})]))
assert_raise Fluent::ConfigError.new("Parameter 'path: /my/path/file.%Y%m%d_%H.log' doesn't have timestamp placeholder for hour('%H') for timekey 180") do
@i.placeholder_validate!(:path, "/my/path/file.%Y%m%d_%H.log")
end
assert_nothing_raised do
@i.placeholder_validate!(:path, "/my/path/file.%Y%m%d_%H%M.log")
end
end
test 'raises configuration error for a template when tag placeholders exist but tag key is missing' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', '')]))
assert_raise Fluent::ConfigError.new("Parameter 'path: /my/path/${tag}/file.${tag[2]}.log' has tag placeholders, but chunk key 'tag' is not configured") do
@i.placeholder_validate!(:path, "/my/path/${tag}/file.${tag[2]}.log")
end
end
test 'raises configuration error for a template without tag placeholders when tagkey is configured' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'tag')]))
assert_raise Fluent::ConfigError.new("Parameter 'path: /my/path/file.log' doesn't have tag placeholder") do
@i.placeholder_validate!(:path, "/my/path/file.log")
end
assert_nothing_raised do
@i.placeholder_validate!(:path, "/my/path/${tag}/file.${tag[2]}.log")
end
assert_nothing_raised do
@i.placeholder_validate!(:path, "/my/path/${tag}/file.${tag[-1]}.log")
end
end
test 'raises configuration error for a template when variable key placeholders exist but chunk keys are missing' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', '')]))
assert_raise Fluent::ConfigError.new("Parameter 'path: /my/path/${service}/file.${username}.log' has placeholders, but chunk keys doesn't have keys service,username") do
@i.placeholder_validate!(:path, "/my/path/${service}/file.${username}.log")
end
end
test 'raises configuration error for a template without variable key placeholders when chunk keys are configured' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'username,service')]))
assert_raise Fluent::ConfigError.new("Parameter 'path: /my/path/file.log' doesn't have enough placeholders for keys service,username") do
@i.placeholder_validate!(:path, "/my/path/file.log")
end
assert_nothing_raised do
@i.placeholder_validate!(:path, "/my/path/${service}/file.${username}.log")
end
end
test 'raise configuration error for a template and configuration with keys mismatch' do
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', 'username,service')]))
assert_raise Fluent::ConfigError.new("Parameter 'path: /my/path/file.${username}.log' doesn't have enough placeholders for keys service") do
@i.placeholder_validate!(:path, "/my/path/file.${username}.log")
end
assert_raise Fluent::ConfigError.new("Parameter 'path: /my/path/${service}/file.log' doesn't have enough placeholders for keys username") do
@i.placeholder_validate!(:path, "/my/path/${service}/file.log")
end
assert_nothing_raised do
@i.placeholder_validate!(:path, "/my/path/${service}/file.${username}.log")
end
end
end
test '#get_placeholders_time returns seconds,title and example placeholder for a template' do
s, t, e = @i.get_placeholders_time("/path/to/dir/yay")
assert_nil s
assert_nil t
assert_nil e
s, t, e = @i.get_placeholders_time("/path/to/%Y%m%d/yay")
assert_equal 86400, s
assert_equal :day, t
assert_equal '%d', e
s, t, e = @i.get_placeholders_time("my birthday! at %F")
assert_equal 86400, s
assert_equal :day, t
assert_equal '%d', e
s, t, e = @i.get_placeholders_time("myfile.%Y-%m-%d_%H.log")
assert_equal 3600, s
assert_equal :hour, t
assert_equal '%H', e
s, t, e = @i.get_placeholders_time("part-%Y%m%d-%H%M.ts")
assert_equal 60, s
assert_equal :minute, t
assert_equal '%M', e
s, t, e = @i.get_placeholders_time("my first data at %F %T %z")
assert_equal 1, s
assert_equal :second, t
assert_equal '%S', e
end
test '#get_placeholders_tag returns a list of tag part position for a template' do
assert_equal [], @i.get_placeholders_tag("db.table")
assert_equal [], @i.get_placeholders_tag("db.table_${non_tag}")
assert_equal [-1], @i.get_placeholders_tag("table_${tag}")
assert_equal [0, 1], @i.get_placeholders_tag("db_${tag[0]}.table_${tag[1]}")
assert_equal [-1, 0], @i.get_placeholders_tag("/treedir/${tag[0]}/${tag}")
end
test '#get_placeholders_keys returns a list of keys for a template' do
assert_equal [], @i.get_placeholders_keys("/path/to/my/data/file.log")
assert_equal [], @i.get_placeholders_keys("/path/to/my/${tag}/file.log")
assert_equal ['key1', 'key2'], @i.get_placeholders_keys("/path/to/${key2}/${tag}/file.${key1}.log")
assert_equal ['.hidden', '0001', '@timestamp', 'a_key', 'my-domain'], @i.get_placeholders_keys("http://${my-domain}/${.hidden}/${0001}/${a_key}?timestamp=${@timestamp}")
end
data('include space' => 'ke y',
'bracket notation' => "$['key']",
'invalid notation' => "$.ke y")
test 'configure checks invalid chunk keys' do |chunk_keys|
i = create_output(:buffered)
assert_raise Fluent::ConfigError do
i.configure(config_element('ROOT' , '', {}, [config_element('buffer', chunk_keys)]))
end
end
test '#metadata returns object which contains tag/timekey/variables from records as specified in configuration' do
tag = 'test.output'
time = event_time('2016-04-12 15:31:23 -0700')
timekey = event_time('2016-04-12 15:00:00 -0700')
record = {"key1" => "value1", "num1" => 1, "message" => "my message", "nest" => {"key" => "nested value"}}
i1 = create_output(:buffered)
i1.configure(config_element('ROOT','',{},[config_element('buffer', '')]))
assert_equal create_metadata(), i1.metadata(tag, time, record)
i2 = create_output(:buffered)
i2.configure(config_element('ROOT','',{},[config_element('buffer', 'tag')]))
assert_equal create_metadata(tag: tag), i2.metadata(tag, time, record)
i3 = create_output(:buffered)
i3.configure(config_element('ROOT','',{},[config_element('buffer', 'time', {"timekey" => 3600, "timekey_zone" => "-0700"})]))
assert_equal create_metadata(timekey: timekey), i3.metadata(tag, time, record)
i4 = create_output(:buffered)
i4.configure(config_element('ROOT','',{},[config_element('buffer', 'key1', {})]))
assert_equal create_metadata(variables: {key1: "value1"}), i4.metadata(tag, time, record)
i5 = create_output(:buffered)
i5.configure(config_element('ROOT','',{},[config_element('buffer', 'key1,num1', {})]))
assert_equal create_metadata(variables: {key1: "value1", num1: 1}), i5.metadata(tag, time, record)
i6 = create_output(:buffered)
i6.configure(config_element('ROOT','',{},[config_element('buffer', 'tag,time', {"timekey" => 3600, "timekey_zone" => "-0700"})]))
assert_equal create_metadata(timekey: timekey, tag: tag), i6.metadata(tag, time, record)
i7 = create_output(:buffered)
i7.configure(config_element('ROOT','',{},[config_element('buffer', 'tag,num1', {"timekey" => 3600, "timekey_zone" => "-0700"})]))
assert_equal create_metadata(tag: tag, variables: {num1: 1}), i7.metadata(tag, time, record)
i8 = create_output(:buffered)
i8.configure(config_element('ROOT','',{},[config_element('buffer', 'time,tag,key1', {"timekey" => 3600, "timekey_zone" => "-0700"})]))
assert_equal create_metadata(timekey: timekey, tag: tag, variables: {key1: "value1"}), i8.metadata(tag, time, record)
i9 = create_output(:buffered)
i9.configure(config_element('ROOT','',{},[config_element('buffer', 'key1,$.nest.key', {})]))
assert_equal create_metadata(variables: {:key1 => "value1", :"$.nest.key" => 'nested value'}), i9.metadata(tag, time, record)
end
test '#emit calls #process via #emit_sync for non-buffered output' do
i = create_output(:sync)
process_called = false
i.register(:process){|tag, es| process_called = true }
i.configure(config_element())
i.start
i.after_start
t = event_time()
i.emit_events('tag', Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ]))
assert process_called
i.stop; i.before_shutdown; i.shutdown; i.after_shutdown; i.close; i.terminate
end
test '#emit calls #format for buffered output' do
i = create_output(:buffered)
format_called_times = 0
i.register(:format){|tag, time, record| format_called_times += 1; '' }
i.configure(config_element())
i.start
i.after_start
t = event_time()
i.emit_events('tag', Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ]))
assert_equal 2, format_called_times
i.stop; i.before_shutdown; i.shutdown; i.after_shutdown; i.close; i.terminate
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_sd_srv.rb | test/plugin/test_sd_srv.rb | require_relative '../helper'
require 'fluent/plugin/sd_srv'
require 'fileutils'
require 'flexmock/test_unit'
require 'json'
class SrvServiceDiscoveryTest < ::Test::Unit::TestCase
SRV_RECORD1 = Resolv::DNS::Resource::IN::SRV.new(1, 10, 8081, 'service1.example.com')
SRV_RECORD2 = Resolv::DNS::Resource::IN::SRV.new(2, 20, 8082, 'service2.example.com')
sub_test_case 'configure' do
test 'set services ordered by priority' do
sdf = Fluent::Plugin::SrvServiceDiscovery.new
mock(Resolv::DNS).new { flexmock('dns_resolver', getresources: [SRV_RECORD2, SRV_RECORD1], getaddress: '127.0.0.1') }
sdf.configure(config_element('service_discovery', '', { 'service' => 'service1', 'hostname' => 'example.com' }))
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:srv, '127.0.0.1', 8081, 'service1.example.com', 10, false, '', '', nil), sdf.services[0]
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:srv, '127.0.0.1', 8082, 'service2.example.com', 20, false, '', '', nil), sdf.services[1]
end
test 'return host name without resolving name when dns_lookup is false' do
sdf = Fluent::Plugin::SrvServiceDiscovery.new
mock(Resolv::DNS).new { flexmock('dns_resolver', getresources: [SRV_RECORD1, SRV_RECORD2], getaddress: '127.0.0.1') }
sdf.configure(config_element('service_discovery', '', { 'service' => 'service1', 'hostname' => 'example.com', 'dns_lookup' => false }))
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:srv, 'service1.example.com', 8081, 'service1.example.com', 10, false, '', '', nil), sdf.services[0]
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:srv, 'service2.example.com', 8082, 'service2.example.com', 20, false, '', '', nil), sdf.services[1]
end
test 'pass a value as :nameserver to Resolve::DNS when dns_server_host is given' do
sdf = Fluent::Plugin::SrvServiceDiscovery.new
mock(Resolv::DNS).new(nameserver: '8.8.8.8') { flexmock('dns_resolver', getresources: [SRV_RECORD1, SRV_RECORD2], getaddress: '127.0.0.1') }
sdf.configure(config_element('service_discovery', '', { 'service' => 'service1', 'hostname' => 'example.com', 'dns_server_host' => '8.8.8.8' }))
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:srv, '127.0.0.1', 8081, 'service1.example.com', 10, false, '', '', nil), sdf.services[0]
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:srv, '127.0.0.1', 8082, 'service2.example.com', 20, false, '', '', nil), sdf.services[1]
end
test 'pass a value as :nameserver_port to Resolve::DNS when dns_server_host has port' do
sdf = Fluent::Plugin::SrvServiceDiscovery.new
mock(Resolv::DNS).new(nameserver_port: [['8.8.8.8', 8080]]) { flexmock('dns_resolver', getresources: [SRV_RECORD1, SRV_RECORD2], getaddress: '127.0.0.1') }
sdf.configure(config_element('service_discovery', '', { 'service' => 'service1', 'hostname' => 'example.com', 'dns_server_host' => '8.8.8.8:8080' }))
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:srv, '127.0.0.1', 8081, 'service1.example.com', 10, false, '', '', nil), sdf.services[0]
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:srv, '127.0.0.1', 8082, 'service2.example.com', 20, false, '', '', nil), sdf.services[1]
end
test 'target follows RFC2782' do
sdf = Fluent::Plugin::SrvServiceDiscovery.new
mock = flexmock('dns_resolver', getaddress: '127.0.0.1')
.should_receive(:getresources).with("_service1._tcp.example.com", Resolv::DNS::Resource::IN::SRV)
.and_return([SRV_RECORD1, SRV_RECORD2])
.mock
mock(Resolv::DNS).new { mock }
sdf.configure(config_element('service_discovery', '', { 'service' => 'service1', 'hostname' => 'example.com' }))
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:srv, '127.0.0.1', 8081, 'service1.example.com', 10, false, '', '', nil), sdf.services[0]
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:srv, '127.0.0.1', 8082, 'service2.example.com', 20, false, '', '', nil), sdf.services[1]
end
test 'can change protocol' do
sdf = Fluent::Plugin::SrvServiceDiscovery.new
mock = flexmock('dns_resolver', getaddress: '127.0.0.1')
.should_receive(:getresources).with("_service1._udp.example.com", Resolv::DNS::Resource::IN::SRV)
.and_return([SRV_RECORD1, SRV_RECORD2])
.mock
mock(Resolv::DNS).new { mock }
sdf.configure(config_element('service_discovery', '', { 'service' => 'service1', 'hostname' => 'example.com', 'proto' => 'udp' }))
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:srv, '127.0.0.1', 8081, 'service1.example.com', 10, false, '', '', nil), sdf.services[0]
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:srv, '127.0.0.1', 8082, 'service2.example.com', 20, false, '', '', nil), sdf.services[1]
end
test 'can set password, username, password' do
sdf = Fluent::Plugin::SrvServiceDiscovery.new
mock(Resolv::DNS).new { flexmock('dns_resolver', getresources: [SRV_RECORD2, SRV_RECORD1], getaddress: '127.0.0.1') }
sdf.configure(config_element('service_discovery', '', { 'service' => 'service1', 'hostname' => 'example.com', 'shared_key' => 'key', 'username' => 'user', 'password' => 'pass' }))
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:srv, '127.0.0.1', 8081, 'service1.example.com', 10, false, 'user', 'pass', 'key'), sdf.services[0]
assert_equal Fluent::Plugin::ServiceDiscovery::Service.new(:srv, '127.0.0.1', 8082, 'service2.example.com', 20, false, 'user', 'pass', 'key'), sdf.services[1]
end
end
sub_test_case '#start' do
module TestTimerEventHelperWrapper
# easy to control statsevent
def timer_execute(_name, _interval, &block)
@test_timer_event_helper_wrapper_context = Fiber.new do
loop do
block.call
if Fiber.yield == :finish
break
end
end
end
resume
end
def resume
@test_timer_event_helper_wrapper_context.resume(:resume)
end
def shutdown
super
if @test_timer_event_helper_wrapper_context
@test_timer_event_helper_wrapper_context.resume(:finish)
end
end
end
setup do
sds = Fluent::Plugin::SrvServiceDiscovery.new
@sd_srv = sds
end
teardown do
if @sd_srv
@sd_srv.stop unless @sd_srv.stopped?
@sd_srv.before_shutdown unless @sd_srv.before_shutdown?
@sd_srv.shutdown unless @sd_srv.shutdown?
@sd_srv.after_shutdown unless @sd_srv.after_shutdown?
@sd_srv.close unless @sd_srv.closed?
@sd_srv.terminate unless @sd_srv.terminated?
end
end
test 'Skip if srv record is not updated' do
@sd_srv.extend(TestTimerEventHelperWrapper)
mock(Resolv::DNS).new { flexmock('dns_resolver', getresources: [SRV_RECORD2, SRV_RECORD1], getaddress: '127.0.0.1') }
@sd_srv.configure(config_element('service_discovery', '', { 'service' => 'service1', 'hostname' => 'example.com' }))
queue = []
@sd_srv.start(queue)
assert_empty queue
@sd_srv.resume
assert_empty queue
end
test 'Skip if DNS resolver raise an error' do
@sd_srv.extend(TestTimerEventHelperWrapper)
mock = flexmock('dns_resolver', getaddress: '127.0.0.1')
.should_receive(:getresources)
.and_return([SRV_RECORD1, SRV_RECORD2])
.and_return { raise 'some error' } # for start
.and_return { raise 'some error' } # for resume
.mock
mock(Resolv::DNS).new { mock }
@sd_srv.configure(config_element('service_discovery', '', { 'service' => 'service1', 'hostname' => 'example.com' }))
queue = []
@sd_srv.start(queue)
assert_empty queue
@sd_srv.resume
assert_empty queue
end
test 'if service is updated, service_in and service_out event happen' do
@sd_srv.extend(TestTimerEventHelperWrapper)
mock = flexmock('dns_resolver', getaddress: '127.0.0.1')
.should_receive(:getresources)
.and_return([SRV_RECORD1])
.and_return([SRV_RECORD2])
.mock
mock(Resolv::DNS).new { mock }
@sd_srv.configure(config_element('service_discovery', '', { 'service' => 'service1', 'hostname' => 'example.com', 'dns_lookup' => false }))
queue = []
@sd_srv.start(queue)
join = queue.shift
drain = queue.shift
assert_equal Fluent::Plugin::ServiceDiscovery::SERVICE_IN, join.type
assert_equal 8082, join.service.port
assert_equal 'service2.example.com', join.service.host
assert_equal Fluent::Plugin::ServiceDiscovery::SERVICE_OUT, drain.type
assert_equal 8081, drain.service.port
assert_equal 'service1.example.com', drain.service.host
end
test 'if service is deleted, service_out event happens' do
@sd_srv.extend(TestTimerEventHelperWrapper)
mock = flexmock('dns_resolver', getaddress: '127.0.0.1')
.should_receive(:getresources)
.and_return([SRV_RECORD1, SRV_RECORD2])
.and_return([SRV_RECORD2])
.mock
mock(Resolv::DNS).new { mock }
@sd_srv.configure(config_element('service_discovery', '', { 'service' => 'service1', 'hostname' => 'example.com', 'dns_lookup' => false }))
queue = []
@sd_srv.start(queue)
assert_equal 1, queue.size
drain = queue.shift
assert_equal Fluent::Plugin::ServiceDiscovery::SERVICE_OUT, drain.type
assert_equal 8081, drain.service.port
assert_equal 'service1.example.com', drain.service.host
end
test 'if new service is added, service_in event happens' do
@sd_srv.extend(TestTimerEventHelperWrapper)
mock = flexmock('dns_resolver', getaddress: '127.0.0.1')
.should_receive(:getresources)
.and_return([SRV_RECORD2])
.and_return([SRV_RECORD1, SRV_RECORD2])
.mock
mock(Resolv::DNS).new { mock }
@sd_srv.configure(config_element('service_discovery', '', { 'service' => 'service1', 'hostname' => 'example.com', 'dns_lookup' => false }))
queue = []
@sd_srv.start(queue)
assert_equal 1, queue.size
join = queue.shift
assert_equal Fluent::Plugin::ServiceDiscovery::SERVICE_IN, join.type
assert_equal 8081, join.service.port
assert_equal 'service1.example.com', join.service.host
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_out_relabel.rb | test/plugin/test_out_relabel.rb | require_relative '../helper'
require 'fluent/test/driver/output'
require 'fluent/plugin/out_relabel'
class RelabelOutputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
end
def default_config
config_element('ROOT', '', {"@type"=>"relabel", "@label"=>"@RELABELED"})
end
def create_driver(conf = default_config)
Fluent::Test::Driver::Output.new(Fluent::Plugin::RelabelOutput).configure(conf)
end
def test_process
d = create_driver
time = event_time("2011-01-02 13:14:15 UTC")
d.run(default_tag: 'test') do
d.feed(time, {"a"=>1})
d.feed(time, {"a"=>2})
end
assert_equal [["test", time, {"a"=>1}], ["test", time, {"a"=>2}]], d.events
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_filter_stdout.rb | test/plugin/test_filter_stdout.rb | require_relative '../helper'
require 'fluent/test/driver/filter'
require 'fluent/plugin/filter_stdout'
require 'timecop'
require 'flexmock/test_unit'
class StdoutFilterTest < Test::Unit::TestCase
include FlexMock::TestCase
def setup
Fluent::Test.setup
@old_tz = ENV["TZ"]
ENV["TZ"] = "UTC"
Timecop.freeze
@default_newline = if Fluent.windows?
"\r\n"
else
"\n"
end
end
def teardown
super # FlexMock::TestCase requires this
# http://flexmock.rubyforge.org/FlexMock/TestCase.html
Timecop.return
ENV["TZ"] = @old_tz
end
CONFIG = config_element('ROOT')
def create_driver(conf = CONFIG)
Fluent::Test::Driver::Filter.new(Fluent::Plugin::StdoutFilter).configure(conf)
end
def filter(d, time, record)
d.run {
d.feed("filter.test", time, record)
}
d.filtered_records
end
def test_through_record
d = create_driver
filtered = filter(d, event_time, {'test' => 'test'})
assert_equal([{'test' => 'test'}], filtered)
end
sub_test_case "flat style parameters" do
sub_test_case "configure" do
def test_configure_default
d = create_driver
d.run {}
assert_equal 'json', d.instance.formatter.output_type
end
data(json: "json",
hash: "hash",
ltsv: "ltsv")
def test_output_type(data)
d = create_driver(CONFIG + config_element("", "", { "output_type" => data }))
d.run {}
assert_equal data, d.instance.formatter.output_type
end
def test_invalid_output_type
assert_raise(Fluent::NotFoundPluginError) do
d = create_driver(CONFIG + config_element("", "", { "output_type" => "foo" }))
d.run {}
end
end
end
def test_output_type_json
d = create_driver(CONFIG + config_element("", "", { "output_type" => "json" }))
etime = event_time("2016-10-07 21:09:31.012345678 UTC")
out = capture_log(d) { filter(d, etime, {'test' => 'test'}) }
assert_equal "2016-10-07 21:09:31.012345678 +0000 filter.test: {\"test\":\"test\"}\n", out
# NOTE: Float::NAN is not jsonable
d = create_driver(CONFIG + config_element("", "", { "output_type" => "json" }))
flexmock(d.instance.router).should_receive(:emit_error_event)
filter(d, etime, {'test' => Float::NAN})
end
def test_output_type_hash
d = create_driver(CONFIG + config_element("", "", { "output_type" => "hash" }))
etime = event_time("2016-10-07 21:09:31.012345678 UTC")
out = capture_log(d) { filter(d, etime, {'test' => 'test'}) }
assert_equal "2016-10-07 21:09:31.012345678 +0000 filter.test: {\"test\"=>\"test\"}\n", out.gsub(' => ', '=>')
# NOTE: Float::NAN is not jsonable, but hash string can output it.
d = create_driver(CONFIG + config_element("", "", { "output_type" => "hash" }))
out = capture_log(d) { filter(d, etime, {'test' => Float::NAN}) }
assert_equal "2016-10-07 21:09:31.012345678 +0000 filter.test: {\"test\"=>NaN}\n", out.gsub(' => ', '=>')
end
# Use include_time_key to output the message's time
def test_include_time_key
config = config_element("", "", {
"output_type" => "json",
"include_time_key" => true,
"localtime" => false
})
d = create_driver(config)
etime = event_time("2016-10-07 21:09:31.012345678 UTC")
out = capture_log(d) { filter(d, etime, {'test' => 'test'}) }
assert_equal "2016-10-07 21:09:31.012345678 +0000 filter.test: {\"test\":\"test\",\"time\":\"2016-10-07T21:09:31Z\"}\n", out
end
# out_stdout formatter itself can also be replaced
def test_format_json
d = create_driver(CONFIG + config_element("", "", { "format" => "json" }))
out = capture_log(d) { filter(d, event_time, {'test' => 'test'}) }
assert_equal "{\"test\":\"test\"}#{@default_newline}", out
end
end
sub_test_case "with <format> sub section" do
sub_test_case "configure" do
def test_default
conf = config_element
conf.elements << config_element("format", "", { "@type" => "stdout"})
d = create_driver(conf)
d.run {}
assert_equal("json", d.instance.formatter.output_type)
end
data(json: "json",
hash: "hash",
ltsv: "ltsv")
def test_output_type(data)
conf = config_element
conf.elements << config_element("format", "", { "@type" => "stdout", "output_type" => data })
d = create_driver(conf)
d.run {}
assert_equal(data, d.instance.formatter.output_type)
end
def test_invalid_output_type
conf = config_element
conf.elements << config_element("format", "", { "@type" => "stdout", "output_type" => "foo" })
assert_raise(Fluent::NotFoundPluginError) do
d = create_driver(conf)
d.run {}
end
end
end
sub_test_case "output_type" do
def test_json
conf = config_element
conf.elements << config_element("format", "", { "@type" => "stdout", "output_type" => "json" })
d = create_driver(conf)
etime = event_time("2016-10-07 21:09:31.012345678 UTC")
out = capture_log(d) { filter(d, etime, {'test' => 'test'}) }
assert_equal "2016-10-07 21:09:31.012345678 +0000 filter.test: {\"test\":\"test\"}\n", out
end
def test_json_nan
# NOTE: Float::NAN is not jsonable
conf = config_element
conf.elements << config_element("format", "", { "@type" => "stdout", "output_type" => "json" })
d = create_driver(conf)
etime = event_time("2016-10-07 21:09:31.012345678 UTC")
flexmock(d.instance.router).should_receive(:emit_error_event)
filter(d, etime, {'test' => Float::NAN})
end
def test_hash
conf = config_element
conf.elements << config_element("format", "", { "@type" => "stdout", "output_type" => "hash" })
d = create_driver(conf)
etime = event_time("2016-10-07 21:09:31.012345678 UTC")
out = capture_log(d) { filter(d, etime, {'test' => 'test'}) }
assert_equal "2016-10-07 21:09:31.012345678 +0000 filter.test: {\"test\"=>\"test\"}\n", out.gsub(' => ', '=>')
end
def test_hash_nan
# NOTE: Float::NAN is not jsonable, but hash string can output it.
conf = config_element
conf.elements << config_element("format", "", { "@type" => "stdout", "output_type" => "hash" })
d = create_driver(conf)
etime = event_time("2016-10-07 21:09:31.012345678 UTC")
out = capture_log(d) { filter(d, etime, {'test' => Float::NAN}) }
assert_equal "2016-10-07 21:09:31.012345678 +0000 filter.test: {\"test\"=>NaN}\n", out.gsub(' => ', '=>')
end
# Use include_time_key to output the message's time
def test_include_time_key
conf = config_element
conf.elements << config_element("format", "", {
"@type" => "stdout",
"output_type" => "json"
})
conf.elements << config_element("inject", "", {
"time_key" => "time",
"time_type" => "string",
"localtime" => false
})
d = create_driver(conf)
etime = event_time("2016-10-07 21:09:31.012345678 UTC")
out = capture_log(d) { filter(d, etime, {'test' => 'test'}) }
assert_equal "2016-10-07 21:09:31.012345678 +0000 filter.test: {\"test\":\"test\",\"time\":\"2016-10-07T21:09:31Z\"}\n", out
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_metrics.rb | test/plugin/test_metrics.rb | require_relative '../helper'
require 'fluent/plugin/metrics'
require 'fluent/plugin/base'
require 'fluent/system_config'
class BareMetrics < Fluent::Plugin::Metrics
Fluent::Plugin.register_metrics('bare', self)
private
# Just override for tests.
def has_methods_for_counter?
false
end
end
class BasicCounterMetrics < Fluent::Plugin::Metrics
Fluent::Plugin.register_metrics('example', self)
attr_reader :data
def initialize
super
@data = 0
end
def get
@data
end
def inc
@data +=1
end
def add(value)
@data += value
end
def set(value)
@data = value
end
def close
@data = 0
super
end
end
class AliasedCounterMetrics < Fluent::Plugin::Metrics
Fluent::Plugin.register_metrics('example', self)
attr_reader :data
def initialize
super
@data = 0
end
def configure(conf)
super
class << self
alias_method :set, :set_counter
end
end
def get
@data
end
def inc
@data +=1
end
def add(value)
@data += value
end
def set_counter(value)
@data = value
end
def close
@data = 0
super
end
end
class BasicGaugeMetrics < Fluent::Plugin::Metrics
Fluent::Plugin.register_metrics('example', self)
attr_reader :data
def initialize
super
@data = 0
end
def get
@data
end
def inc
@data +=1
end
def dec
@data -=1
end
def add(value)
@data += value
end
def sub(value)
@data -= value
end
def set(value)
@data = value
end
def close
@data = 0
super
end
end
class AliasedGaugeMetrics < Fluent::Plugin::Metrics
Fluent::Plugin.register_metrics('example', self)
attr_reader :data
def initialize
super
@data = 0
end
def configure(conf)
super
class << self
alias_method :dec, :dec_gauge
alias_method :set, :set_gauge
alias_method :sub, :sub_gauge
end
end
def get
@data
end
def inc
@data +=1
end
def dec_gauge
@data -=1
end
def add(value)
@data += value
end
def sub_gauge(value)
@data -= value
end
def set_gauge(value)
@data = value
end
def close
@data = 0
super
end
end
class StorageTest < Test::Unit::TestCase
sub_test_case 'BareMetrics' do
setup do
@m = BareMetrics.new
@m.configure(config_element())
end
test 'is configured with plugin information and system config' do
m = BareMetrics.new
m.configure(config_element('metrics', '', {}))
assert_false m.use_gauge_metric
assert_false m.has_methods_for_counter
assert_false m.has_methods_for_gauge
end
test 'all bare operations are not defined yet' do
assert_raise NotImplementedError do
@m.get
end
assert_raise NotImplementedError do
@m.inc
end
assert_raise NotImplementedError do
@m.dec
end
assert_raise NotImplementedError do
@m.add(10)
end
assert_raise NotImplementedError do
@m.sub(11)
end
assert_raise NotImplementedError do
@m.set(123)
end
end
end
sub_test_case 'BasicCounterMetric' do
setup do
@m = BasicCounterMetrics.new
@m.configure(config_element('metrics', '', {'@id' => '1'}))
end
test 'all basic counter operations work well' do
assert_true @m.has_methods_for_counter
assert_false @m.has_methods_for_gauge
assert_equal 0, @m.get
assert_equal 1, @m.inc
@m.add(20)
assert_equal 21, @m.get
assert_raise NotImplementedError do
@m.dec
end
@m.set(100)
assert_equal 100, @m.get
assert_raise NotImplementedError do
@m.sub(11)
end
end
end
sub_test_case 'AliasedCounterMetric' do
setup do
@m = AliasedCounterMetrics.new
@m.configure(config_element('metrics', '', {}))
end
test 'all aliased counter operations work well' do
assert_true @m.has_methods_for_counter
assert_false @m.has_methods_for_gauge
assert_equal 0, @m.get
assert_equal 1, @m.inc
@m.add(20)
assert_equal 21, @m.get
assert_raise NotImplementedError do
@m.dec
end
@m.set(100)
assert_equal 100, @m.get
assert_raise NotImplementedError do
@m.sub(11)
end
end
end
sub_test_case 'BasicGaugeMetric' do
setup do
@m = BasicGaugeMetrics.new
@m.use_gauge_metric = true
@m.configure(config_element('metrics', '', {}))
end
test 'all basic gauge operations work well' do
assert_false @m.has_methods_for_counter
assert_true @m.has_methods_for_gauge
assert_equal 0, @m.get
assert_equal 1, @m.inc
@m.add(20)
assert_equal 21, @m.get
@m.dec
assert_equal 20, @m.get
@m.set(100)
assert_equal 100, @m.get
@m.sub(11)
assert_equal 89, @m.get
end
end
sub_test_case 'AliasedGaugeMetric' do
setup do
@m = AliasedGaugeMetrics.new
@m.use_gauge_metric = true
@m.configure(config_element('metrics', '', {}))
end
test 'all aliased gauge operations work well' do
assert_false @m.has_methods_for_counter
assert_true @m.has_methods_for_gauge
assert_equal 0, @m.get
assert_equal 1, @m.inc
@m.add(20)
assert_equal 21, @m.get
@m.dec
assert_equal 20, @m.get
@m.set(100)
assert_equal 100, @m.get
@m.sub(11)
assert_equal 89, @m.get
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_out_stdout.rb | test/plugin/test_out_stdout.rb | require_relative '../helper'
require 'fluent/test/driver/output'
require 'fluent/plugin/out_stdout'
class StdoutOutputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
end
CONFIG = %[
]
TIME_FORMAT = '%Y-%m-%d %H:%M:%S.%9N %z'
def create_driver(conf = CONFIG)
Fluent::Test::Driver::Output.new(Fluent::Plugin::StdoutOutput).configure(conf)
end
sub_test_case 'non-buffered' do
test 'configure' do
d = create_driver
assert_equal 1, d.instance.formatter_configs.size # init: true
assert_kind_of Fluent::Plugin::StdoutFormatter, d.instance.formatter
assert_equal 'json', d.instance.formatter.output_type
end
test 'configure output_type' do
d = create_driver(CONFIG + "\noutput_type json")
assert_kind_of Fluent::Plugin::StdoutFormatter, d.instance.formatter
assert_equal 'json', d.instance.formatter.output_type
d = create_driver(CONFIG + "\noutput_type hash")
assert_kind_of Fluent::Plugin::StdoutFormatter, d.instance.formatter
assert_equal 'hash', d.instance.formatter.output_type
assert_raise(Fluent::NotFoundPluginError) do
d = create_driver(CONFIG + "\noutput_type foo")
end
end
test 'configure with time_format' do
d = create_driver(CONFIG + <<-CONF)
<format>
@type stdout
time_format %Y-%m-%dT%H:%M:%S.%L%z
</format>
CONF
time = event_time
out = capture_log do
d.run(default_tag: 'test') do
d.feed(time, {'test' => 'test'})
end
end
t = Time.at(time).localtime.strftime("%Y-%m-%dT%H:%M:%S.%L%z")
assert_equal "#{t} test: {\"test\":\"test\"}\n", out
end
test 'emit with default configuration' do
d = create_driver
time = event_time()
out = capture_log do
d.run(default_tag: 'test') do
d.feed(time, {'test' => 'test1'})
end
end
assert_equal "#{Time.at(time).localtime.strftime(TIME_FORMAT)} test: {\"test\":\"test1\"}\n", out
end
data('oj' => 'oj', 'yajl' => 'yajl')
test 'emit in json format' do |data|
d = create_driver(CONFIG + "\noutput_type json\njson_parser #{data}")
time = event_time()
out = capture_log do
d.run(default_tag: 'test') do
d.feed(time, {'test' => 'test1'})
end
end
assert_equal "#{Time.at(time).localtime.strftime(TIME_FORMAT)} test: {\"test\":\"test1\"}\n", out
if data == 'yajl'
# NOTE: Float::NAN is not jsonable
assert_raise(JSON::GeneratorError) { d.feed('test', time, {'test' => Float::NAN}) }
else
out = capture_log { d.feed('test', time, {'test' => Float::NAN}) }
assert_equal "#{Time.at(time).localtime.strftime(TIME_FORMAT)} test: {\"test\":NaN}\n", out
end
end
test 'emit in hash format' do
d = create_driver(CONFIG + "\noutput_type hash")
time = event_time()
out = capture_log do
d.run(default_tag: 'test') do
d.feed(time, {'test' => 'test2'})
end
end
assert_equal "#{Time.at(time).localtime.strftime(TIME_FORMAT)} test: {\"test\"=>\"test2\"}\n", out.gsub(' => ', '=>')
# NOTE: Float::NAN is not jsonable, but hash string can output it.
out = capture_log { d.feed('test', time, {'test' => Float::NAN}) }
assert_equal "#{Time.at(time).localtime.strftime(TIME_FORMAT)} test: {\"test\"=>NaN}\n", out.gsub(' => ', '=>')
end
end
sub_test_case 'buffered' do
test 'configure' do
d = create_driver(config_element("ROOT", "", {}, [config_element("buffer")]))
assert_equal 1, d.instance.formatter_configs.size
assert_kind_of Fluent::Plugin::StdoutFormatter, d.instance.formatter
assert_equal 'json', d.instance.formatter.output_type
assert_equal 10 * 1024, d.instance.buffer_config.chunk_limit_size
assert d.instance.buffer_config.flush_at_shutdown
assert_equal ['tag'], d.instance.buffer_config.chunk_keys
assert d.instance.chunk_key_tag
assert !d.instance.chunk_key_time
assert_equal [], d.instance.chunk_keys
end
test 'configure with output_type' do
d = create_driver(config_element("ROOT", "", {"output_type" => "json"}, [config_element("buffer")]))
assert_kind_of Fluent::Plugin::StdoutFormatter, d.instance.formatter
assert_equal 'json', d.instance.formatter.output_type
d = create_driver(config_element("ROOT", "", {"output_type" => "hash"}, [config_element("buffer")]))
assert_kind_of Fluent::Plugin::StdoutFormatter, d.instance.formatter
assert_equal 'hash', d.instance.formatter.output_type
assert_raise(Fluent::NotFoundPluginError) do
create_driver(config_element("ROOT", "", {"output_type" => "foo"}, [config_element("buffer")]))
end
end
sub_test_case "emit with default config" do
test '#write(synchronous)' do
d = create_driver(config_element("ROOT", "", {}, [config_element("buffer")]))
time = event_time()
out = capture_log do
d.run(default_tag: 'test', flush: true) do
d.feed(time, {'test' => 'test'})
end
end
assert_equal "#{Time.at(time).localtime.strftime(TIME_FORMAT)} test: {\"test\":\"test\"}\n", out
end
end
sub_test_case "emit json" do
data('oj' => 'oj', 'yajl' => 'yajl')
test '#write(synchronous)' do |data|
d = create_driver(config_element("ROOT", "", {"output_type" => "json", "json_parser" => data}, [config_element("buffer")]))
time = event_time()
out = capture_log do
d.run(default_tag: 'test', flush: true) do
d.feed(time, {'test' => 'test'})
end
end
assert_equal "#{Time.at(time).localtime.strftime(TIME_FORMAT)} test: {\"test\":\"test\"}\n", out
end
end
sub_test_case 'emit hash' do
test '#write(synchronous)' do
d = create_driver(config_element("ROOT", "", {"output_type" => "hash"}, [config_element("buffer")]))
time = event_time()
out = capture_log do
d.run(default_tag: 'test', flush: true) do
d.feed(time, {'test' => 'test'})
end
end
assert_equal "#{Time.at(time).localtime.strftime(TIME_FORMAT)} test: {\"test\"=>\"test\"}\n", out.gsub(' => ', '=>')
end
end
end
data(
'utc and !localtime' => "utc true\nlocaltime false",
'!utc and localtime' => "utc false\nlocaltime true")
test 'success when configure with localtime and utc' do |c|
assert_nothing_raised do
create_driver(CONFIG + c)
end
end
data('utc and localtime' => "utc true\nlocaltime true",
'!utc and !localtime' => "utc false\nlocaltime false")
test 'raise an error when configure with localtime and utc' do |c|
assert_raise(Fluent::ConfigError.new('both of utc and localtime are specified, use only one of them')) do
create_driver(CONFIG + c)
end
end
test 'use_logger false' do
d = create_driver(<<~EOC)
use_logger false
EOC
time = event_time
out = capture_stdout do
d.run(default_tag: 'test', flush: true) do
d.feed(time, {'test' => 'test'})
end
end
assert_equal "#{Time.at(time).localtime.strftime(TIME_FORMAT)} test: {\"test\":\"test\"}\n", out
end
def capture_log
tmp = $log
$log = StringIO.new
yield
return $log.string
ensure
$log = tmp
end
def capture_stdout
tmp = $stdout
$stdout = StringIO.new
yield
return $stdout.string
ensure
$stdout = tmp
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_file_util.rb | test/plugin/test_file_util.rb | require_relative '../helper'
require 'fluent/plugin/file_util'
require 'fileutils'
class FileUtilTest < Test::Unit::TestCase
def setup
FileUtils.rm_rf(TEST_DIR)
FileUtils.mkdir_p(TEST_DIR)
end
TEST_DIR = File.expand_path(File.dirname(__FILE__) + "/../tmp/file_util")
sub_test_case 'writable?' do
test 'file exists and writable' do
FileUtils.touch("#{TEST_DIR}/test_file")
assert_true Fluent::FileUtil.writable?("#{TEST_DIR}/test_file")
end
test 'file exists and not writable' do
FileUtils.touch("#{TEST_DIR}/test_file")
File.chmod(0444, "#{TEST_DIR}/test_file")
assert_false Fluent::FileUtil.writable?("#{TEST_DIR}/test_file")
end
test 'directory exists' do
FileUtils.mkdir_p("#{TEST_DIR}/test_dir")
assert_false Fluent::FileUtil.writable?("#{TEST_DIR}/test_dir")
end
test 'file does not exist and parent directory is writable' do
FileUtils.mkdir_p("#{TEST_DIR}/test_dir")
assert_true Fluent::FileUtil.writable?("#{TEST_DIR}/test_dir/test_file")
end
test 'file does not exist and parent directory is not writable' do
FileUtils.mkdir_p("#{TEST_DIR}/test_dir")
File.chmod(0444, "#{TEST_DIR}/test_dir")
assert_false Fluent::FileUtil.writable?("#{TEST_DIR}/test_dir/test_file")
end
test 'parent directory does not exist' do
FileUtils.rm_rf("#{TEST_DIR}/test_dir")
assert_false Fluent::FileUtil.writable?("#{TEST_DIR}/test_dir/test_file")
end
test 'parent file (not directory) exists' do
FileUtils.touch("#{TEST_DIR}/test_file")
assert_false Fluent::FileUtil.writable?("#{TEST_DIR}/test_file/foo")
end
end
sub_test_case 'writable_p?' do
test 'file exists and writable' do
FileUtils.touch("#{TEST_DIR}/test_file")
assert_true Fluent::FileUtil.writable_p?("#{TEST_DIR}/test_file")
end
test 'file exists and not writable' do
FileUtils.touch("#{TEST_DIR}/test_file")
File.chmod(0444, "#{TEST_DIR}/test_file")
assert_false Fluent::FileUtil.writable_p?("#{TEST_DIR}/test_file")
end
test 'directory exists' do
FileUtils.mkdir_p("#{TEST_DIR}/test_dir")
assert_false Fluent::FileUtil.writable_p?("#{TEST_DIR}/test_dir")
end
test 'parent directory exists and writable' do
FileUtils.mkdir_p("#{TEST_DIR}/test_dir")
assert_true Fluent::FileUtil.writable_p?("#{TEST_DIR}/test_dir/test_file")
end
test 'parent directory exists and not writable' do
FileUtils.mkdir_p("#{TEST_DIR}/test_dir")
File.chmod(0555, "#{TEST_DIR}/test_dir")
assert_false Fluent::FileUtil.writable_p?("#{TEST_DIR}/test_dir/test_file")
end
test 'parent of parent (of parent ...) directory exists and writable' do
FileUtils.mkdir_p("#{TEST_DIR}/test_dir")
assert_true Fluent::FileUtil.writable_p?("#{TEST_DIR}/test_dir/foo/bar/baz")
end
test 'parent of parent (of parent ...) directory exists and not writable' do
FileUtils.mkdir_p("#{TEST_DIR}/test_dir")
File.chmod(0555, "#{TEST_DIR}/test_dir")
assert_false Fluent::FileUtil.writable_p?("#{TEST_DIR}/test_dir/foo/bar/baz")
end
test 'parent of parent (of parent ...) file (not directory) exists' do
FileUtils.touch("#{TEST_DIR}/test_file")
assert_false Fluent::FileUtil.writable_p?("#{TEST_DIR}/test_file/foo/bar/baz")
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_output_as_buffered_retries.rb | test/plugin/test_output_as_buffered_retries.rb | require_relative '../helper'
require 'fluent/plugin/output'
require 'fluent/plugin/buffer'
require 'fluent/event'
require 'json'
require 'time'
require 'timeout'
require 'timecop'
module FluentPluginOutputAsBufferedRetryTest
class DummyBareOutput < Fluent::Plugin::Output
def register(name, &block)
instance_variable_set("@#{name}", block)
end
end
class DummySyncOutput < DummyBareOutput
def initialize
super
@process = nil
end
def process(tag, es)
@process ? @process.call(tag, es) : nil
end
end
class DummyFullFeatureOutput < DummyBareOutput
def initialize
super
@prefer_buffered_processing = nil
@prefer_delayed_commit = nil
@process = nil
@format = nil
@write = nil
@try_write = nil
end
def prefer_buffered_processing
@prefer_buffered_processing ? @prefer_buffered_processing.call : false
end
def prefer_delayed_commit
@prefer_delayed_commit ? @prefer_delayed_commit.call : false
end
def process(tag, es)
@process ? @process.call(tag, es) : nil
end
def format(tag, time, record)
@format ? @format.call(tag, time, record) : [tag, time, record].to_json
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
def try_write(chunk)
@try_write ? @try_write.call(chunk) : nil
end
end
class DummyFullFeatureOutput2 < DummyFullFeatureOutput
def prefer_buffered_processing; true; end
def prefer_delayed_commit; super; end
def format(tag, time, record); super; end
def write(chunk); super; end
def try_write(chunk); super; end
end
end
class BufferedOutputRetryTest < Test::Unit::TestCase
def create_output(type=:full)
case type
when :bare then FluentPluginOutputAsBufferedRetryTest::DummyBareOutput.new
when :sync then FluentPluginOutputAsBufferedRetryTest::DummySyncOutput.new
when :full then FluentPluginOutputAsBufferedRetryTest::DummyFullFeatureOutput.new
else
raise ArgumentError, "unknown type: #{type}"
end
end
def create_metadata(timekey: nil, tag: nil, variables: nil)
Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
end
def waiting(seconds)
begin
Timeout.timeout(seconds) do
yield
end
rescue Timeout::Error
STDERR.print(*@i.log.out.logs)
raise
end
end
def dummy_event_stream
Fluent::ArrayEventStream.new([
[ event_time('2016-04-13 18:33:00'), {"name" => "moris", "age" => 36, "message" => "data1"} ],
[ event_time('2016-04-13 18:33:13'), {"name" => "moris", "age" => 36, "message" => "data2"} ],
[ event_time('2016-04-13 18:33:32'), {"name" => "moris", "age" => 36, "message" => "data3"} ],
])
end
def get_log_time(msg, logs)
log_time = nil
log = logs.find{|l| l.include?(msg) }
if log && /^(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} [-+]\d{4}) \[error\]/ =~ log
log_time = Time.parse($1)
end
log_time
end
setup do
@i = create_output
end
teardown do
if @i
@i.stop unless @i.stopped?
@i.before_shutdown unless @i.before_shutdown?
@i.shutdown unless @i.shutdown?
@i.after_shutdown unless @i.after_shutdown?
@i.close unless @i.closed?
@i.terminate unless @i.terminated?
end
Timecop.return
end
sub_test_case 'buffered output for retries with exponential backoff' do
test 'exponential backoff is default strategy for retries' do
chunk_key = 'tag'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
'retry_randomize' => false,
'queued_chunks_limit_size' => 100
}
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
@i.register(:prefer_buffered_processing){ true }
@i.start
@i.after_start
assert_equal :exponential_backoff, @i.buffer_config.retry_type
assert_equal 1, @i.buffer_config.retry_wait
assert_equal 2.0, @i.buffer_config.retry_exponential_backoff_base
assert !@i.buffer_config.retry_randomize
now = Time.parse('2016-04-13 18:17:00 -0700')
Timecop.freeze( now )
retry_state = @i.retry_state( @i.buffer_config.retry_randomize )
retry_state.step
assert_equal (1 * (2 ** 1)), (retry_state.next_time - now)
retry_state.step
assert_equal (1 * (2 ** 2)), (retry_state.next_time - now)
retry_state.step
assert_equal (1 * (2 ** 3)), (retry_state.next_time - now)
retry_state.step
assert_equal (1 * (2 ** 4)), (retry_state.next_time - now)
end
test 'does retries correctly when #write fails' do
chunk_key = 'tag'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
'retry_randomize' => false,
'retry_max_interval' => 60 * 60,
}
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:write){|chunk| raise "yay, your #write must fail" }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:32 -0700')
Timecop.freeze( now )
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ Thread.pass until @i.write_count > 0 }
assert{ @i.write_count > 0 }
assert{ @i.num_errors > 0 }
now = @i.next_flush_time
Timecop.freeze( now )
@i.flush_thread_wakeup
waiting(4){ Thread.pass until @i.write_count > 1 }
assert{ @i.write_count > 1 }
assert{ @i.num_errors > 1 }
end
test 'max retry interval is limited by retry_max_interval' do
chunk_key = 'tag'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
'retry_randomize' => false,
'retry_max_interval' => 60,
}
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:write){|chunk| raise "yay, your #write must fail" }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:32 -0700')
Timecop.freeze( now )
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
assert{ @i.write_count > 0 }
assert{ @i.num_errors > 0 }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
10.times do
now = @i.next_flush_time
Timecop.freeze( now )
@i.flush_thread_wakeup
waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
assert{ @i.write_count > prev_write_count }
assert{ @i.num_errors > prev_num_errors }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
end
# exponential backoff interval: 1 * 2 ** 10 == 1024
# but it should be limited by retry_max_interval=60
assert_equal 60, (@i.next_flush_time - now)
end
test 'output plugin give retries up by retry_timeout, and clear queue in buffer' do
written_tags = []
chunk_key = 'tag'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
'retry_randomize' => false,
'retry_timeout' => 3600,
'queued_chunks_limit_size' => 100
}
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:31 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.2", dummy_event_stream())
assert_equal 0, @i.write_count
assert_equal 0, @i.num_errors
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
assert{ @i.buffer.queue.size > 0 }
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
assert{ @i.write_count > 0 }
assert{ @i.num_errors > 0 }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
first_failure = @i.retry.start
15.times do |i| # large enough
now = @i.next_flush_time
# p({i: i, now: now, diff: (now - Time.now)})
# * if loop count is 12:
# {:i=>0, :now=>2016-04-13 18:33:32 -0700, :diff=>1.0}
# {:i=>1, :now=>2016-04-13 18:33:34 -0700, :diff=>2.0}
# {:i=>2, :now=>2016-04-13 18:33:38 -0700, :diff=>4.0}
# {:i=>3, :now=>2016-04-13 18:33:46 -0700, :diff=>8.0}
# {:i=>4, :now=>2016-04-13 18:34:02 -0700, :diff=>16.0}
# {:i=>5, :now=>2016-04-13 18:34:34 -0700, :diff=>32.0}
# {:i=>6, :now=>2016-04-13 18:35:38 -0700, :diff=>64.0}
# {:i=>7, :now=>2016-04-13 18:37:46 -0700, :diff=>128.0}
# {:i=>8, :now=>2016-04-13 18:42:02 -0700, :diff=>256.0}
# {:i=>9, :now=>2016-04-13 18:50:34 -0700, :diff=>512.0}
# {:i=>10, :now=>2016-04-13 19:07:38 -0700, :diff=>1024.0}
# {:i=>11, :now=>2016-04-13 19:33:31 -0700, :diff=>1553.0} # clear_queue!
Timecop.freeze( now )
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
assert{ @i.write_count > prev_write_count }
assert{ @i.num_errors > prev_num_errors }
break if @i.buffer.queue.size == 0
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
end
assert{ now >= first_failure + 3600 }
assert{ @i.buffer.stage.size == 0 }
assert{ written_tags.all?('test.tag.1') }
@i.emit_events("test.tag.3", dummy_event_stream())
logs = @i.log.out.logs
assert{ logs.any?{|l| l.include?("[error]: Hit limit for retries. dropping all chunks in the buffer queue.") } }
end
test 'output plugin give retries up by retry_max_times, and clear queue in buffer' do
written_tags = []
chunk_key = 'tag'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
'retry_randomize' => false,
'retry_max_times' => 10,
'queued_chunks_limit_size' => 100
}
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:31 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.2", dummy_event_stream())
assert_equal 0, @i.write_count
assert_equal 0, @i.num_errors
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
assert{ @i.buffer.queue.size > 0 }
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
assert{ @i.write_count > 0 }
assert{ @i.num_errors > 0 }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
_first_failure = @i.retry.start
chunks = @i.buffer.queue.dup
20.times do |i| # large times enough
now = @i.next_flush_time
Timecop.freeze( now )
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
assert{ @i.write_count > prev_write_count }
assert{ @i.num_errors > prev_num_errors }
break if @i.buffer.queue.size == 0
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
end
assert{ @i.buffer.stage.size == 0 }
assert{ written_tags.all?('test.tag.1') }
@i.emit_events("test.tag.3", dummy_event_stream())
logs = @i.log.out.logs
assert{ logs.any?{|l| l.include?("[error]: Hit limit for retries. dropping all chunks in the buffer queue.") && l.include?("retry_times=10") } }
assert{ @i.buffer.queue.size == 0 }
assert{ @i.buffer.stage.size == 1 }
assert{ chunks.all?{|c| c.empty? } }
end
test 'output plugin limits queued chunks via queued_chunks_limit_size' do
chunk_key = 'tag'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
'retry_randomize' => false,
'retry_max_times' => 7,
'queued_chunks_limit_size' => 2,
}
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
@i.register(:prefer_buffered_processing) { true }
@i.register(:format) { |tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:write) { |chunk| raise "yay, your #write must fail" }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze(now)
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:31 -0700')
Timecop.freeze(now)
@i.emit_events("test.tag.2", dummy_event_stream())
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4) { Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
assert { @i.buffer.queue.size > 0 }
assert { @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
assert { @i.write_count > 0 }
assert { @i.num_errors > 0 }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
20.times do |i| # large times enough
now = @i.next_flush_time
Timecop.freeze(now)
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4) { Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
@i.emit_events("test.tag.1", dummy_event_stream())
assert { @i.buffer.queue.size <= 2 }
assert { @i.buffer.stage.size == 1 } # all new data is stored into staged chunk
break if @i.buffer.queue.size == 0
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
end
end
end
sub_test_case 'buffered output for retries with periodical retry' do
test 'periodical retries should retry to write in failing status per retry_wait' do
chunk_key = 'tag'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
'retry_type' => :periodic,
'retry_wait' => 3,
'retry_randomize' => false,
'queued_chunks_limit_size' => 100
}
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:write){|chunk| raise "yay, your #write must fail" }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:32 -0700')
Timecop.freeze( now )
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ Thread.pass until @i.write_count > 0 }
assert{ @i.write_count > 0 }
assert{ @i.num_errors > 0 }
now = @i.next_flush_time
Timecop.freeze( now )
@i.flush_thread_wakeup
waiting(4){ Thread.pass until @i.write_count > 1 }
assert{ @i.write_count > 1 }
assert{ @i.num_errors > 1 }
end
test 'output plugin give retries up by retry_timeout, and clear queue in buffer' do
written_tags = []
chunk_key = 'tag'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
'retry_type' => :periodic,
'retry_wait' => 30,
'retry_randomize' => false,
'retry_timeout' => 120,
'queued_chunks_limit_size' => 100
}
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:31 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.2", dummy_event_stream())
assert_equal 0, @i.write_count
assert_equal 0, @i.num_errors
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
assert{ @i.buffer.queue.size > 0 }
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
assert{ @i.write_count > 0 }
assert{ @i.num_errors > 0 }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
first_failure = @i.retry.start
3.times do |i|
now = @i.next_flush_time
Timecop.freeze( now )
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
assert{ @i.write_count > prev_write_count }
assert{ @i.num_errors > prev_num_errors }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
end
assert{ @i.next_flush_time >= first_failure + 120 }
assert{ @i.buffer.queue.size == 2 }
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
assert{ @i.buffer.stage.size == 0 }
assert{ written_tags.all?('test.tag.1') }
chunks = @i.buffer.queue.dup
@i.emit_events("test.tag.3", dummy_event_stream())
now = @i.next_flush_time
Timecop.freeze( now )
@i.flush_thread_wakeup
waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
logs = @i.log.out.logs
target_time = Time.parse("2016-04-13 18:35:31 -0700")
target_msg = "[error]: Hit limit for retries. dropping all chunks in the buffer queue."
assert{ logs.any?{|l| l.include?(target_msg) } }
log_time = get_log_time(target_msg, logs)
assert_equal target_time.localtime, log_time.localtime
assert{ @i.buffer.queue.size == 0 }
assert{ @i.buffer.stage.size == 1 }
assert{ chunks.all?{|c| c.empty? } }
end
test 'retry_max_times can limit maximum times for retries' do
written_tags = []
chunk_key = 'tag'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
'retry_type' => :periodic,
'retry_wait' => 3,
'retry_randomize' => false,
'retry_max_times' => 10,
'queued_chunks_limit_size' => 100
}
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:31 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.2", dummy_event_stream())
assert_equal 0, @i.write_count
assert_equal 0, @i.num_errors
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
assert{ @i.buffer.queue.size > 0 }
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
assert{ @i.write_count > 0 }
assert{ @i.num_errors > 0 }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
_first_failure = @i.retry.start
chunks = @i.buffer.queue.dup
20.times do |i|
now = @i.next_flush_time
Timecop.freeze( now )
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
assert{ @i.write_count > prev_write_count }
assert{ @i.num_errors > prev_num_errors }
break if @i.buffer.queue.size == 0
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
end
assert{ @i.buffer.stage.size == 0 }
assert{ written_tags.all?('test.tag.1') }
@i.emit_events("test.tag.3", dummy_event_stream())
logs = @i.log.out.logs
assert{ logs.any?{|l| l.include?("[error]: Hit limit for retries. dropping all chunks in the buffer queue.") && l.include?("retry_times=10") } }
assert{ @i.buffer.queue.size == 0 }
assert{ @i.buffer.stage.size == 1 }
assert{ chunks.all?{|c| c.empty? } }
end
test 'Do not retry when retry_max_times is 0' do
written_tags = []
chunk_key = 'tag'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
'retry_type' => :periodic,
'retry_wait' => 1,
'retry_randomize' => false,
'retry_max_times' => 0,
'queued_chunks_limit_size' => 100
}
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:31 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.2", dummy_event_stream())
assert_equal(0, @i.write_count)
assert_equal(0, @i.num_errors)
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(2){ Thread.pass until @i.write_count == 1 && @i.num_errors == 1 }
assert(@i.write_count == 1)
assert(@i.num_errors == 1)
assert(@i.log.out.logs.any?{|l| l.include?("[error]: Hit limit for retries. dropping all chunks in the buffer queue.") && l.include?("retry_times=0") })
assert(@i.buffer.queue.size == 0)
assert(@i.buffer.stage.size == 1)
assert(@i.buffer.queue.all?{|c| c.empty? })
end
end
sub_test_case 'buffered output configured as retry_forever' do
setup do
Fluent::Plugin.register_output('output_retries_secondary_test', FluentPluginOutputAsBufferedRetryTest::DummyFullFeatureOutput2)
end
test 'warning logs are generated if secondary section is configured' do
chunk_key = 'tag'
hash = {
'retry_forever' => true,
'retry_randomize' => false,
}
i = create_output()
i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash),config_element('secondary','', {'@type' => 'output_retries_secondary_test'})]))
logs = i.log.out.logs
assert { logs.any? { |l| l.include?("<secondary> with 'retry_forever', only unrecoverable errors are moved to secondary") } }
end
test 'retry_timeout and retry_max_times will be ignored if retry_forever is true for exponential backoff' do
written_tags = []
chunk_key = 'tag'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
'retry_type' => :exponential_backoff,
'retry_forever' => true,
'retry_randomize' => false,
'retry_timeout' => 3600,
'retry_max_times' => 10,
'queued_chunks_limit_size' => 100
}
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:31 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.2", dummy_event_stream())
assert_equal 0, @i.write_count
assert_equal 0, @i.num_errors
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > 0 && @i.num_errors > 0 }
assert{ @i.buffer.queue.size > 0 }
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
assert{ @i.write_count > 0 }
assert{ @i.num_errors > 0 }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
first_failure = @i.retry.start
15.times do |i|
now = @i.next_flush_time
Timecop.freeze( now + 1 )
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
assert{ @i.write_count > prev_write_count }
assert{ @i.num_errors > prev_num_errors }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
end
assert{ @i.buffer.queue.size == 2 }
assert{ @i.retry.steps > 10 }
assert{ now > first_failure + 3600 }
end
test 'retry_timeout and retry_max_times will be ignored if retry_forever is true for periodical retries' do
written_tags = []
chunk_key = 'tag'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
'retry_type' => :periodic,
'retry_forever' => true,
'retry_randomize' => false,
'retry_wait' => 30,
'retry_timeout' => 360,
'retry_max_times' => 10,
'queued_chunks_limit_size' => 100
}
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:31 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.2", dummy_event_stream())
assert_equal 0, @i.write_count
assert_equal 0, @i.num_errors
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > 0 && @i.num_errors > 0 }
assert{ @i.buffer.queue.size > 0 }
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
assert{ @i.write_count > 0 }
assert{ @i.num_errors > 0 }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
first_failure = @i.retry.start
15.times do |i|
now = @i.next_flush_time
Timecop.freeze( now + 1 )
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
assert{ @i.write_count > prev_write_count }
assert{ @i.num_errors > prev_num_errors }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
end
assert{ @i.buffer.queue.size == 2 }
assert{ @i.retry.steps > 10 }
assert{ now > first_failure + 360 }
end
end
sub_test_case 'buffered output with delayed commit' do
test 'does retries correctly when #try_write fails' do
chunk_key = 'tag'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
'retry_randomize' => false,
'retry_max_interval' => 60 * 60,
}
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:prefer_delayed_commit){ true }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:try_write){|chunk| raise "yay, your #write must fail" }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:32 -0700')
Timecop.freeze( now )
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
waiting(4) do
state = @i.instance_variable_get(:@output_flush_threads).first
state.thread.status == 'sleep'
end
assert(@i.write_count > 0)
assert(@i.num_errors > 0)
now = @i.next_flush_time
Timecop.freeze( now )
@i.flush_thread_wakeup
waiting(4){ Thread.pass until @i.write_count > 1 && @i.num_errors > 1 }
waiting(4) do
state = @i.instance_variable_get(:@output_flush_threads).first
state.thread.status == 'sleep'
end
assert(@i.write_count > 1)
assert(@i.num_errors > 1)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_parser_tsv.rb | test/plugin/test_parser_tsv.rb | require_relative '../helper'
require 'fluent/test/driver/parser'
require 'fluent/plugin/parser_tsv'
class TSVParserTest < ::Test::Unit::TestCase
def setup
Fluent::Test.setup
end
def create_driver(conf={})
Fluent::Test::Driver::Parser.new(Fluent::TextParser::TSVParser).configure(conf)
end
data('array param' => '["a","b"]', 'string param' => 'a,b')
def test_config_params(param)
d = create_driver(
'keys' => param,
)
assert_equal "\t", d.instance.delimiter
d = create_driver(
'keys' => param,
'delimiter' => ',',
)
assert_equal ['a', 'b'], d.instance.keys
assert_equal ",", d.instance.delimiter
end
data('array param' => '["time","a","b"]', 'string param' => 'time,a,b')
def test_parse(param)
d = create_driver('keys' => param, 'time_key' => 'time')
d.instance.parse("2013/02/28 12:00:00\t192.168.0.1\t111") { |time, record|
assert_equal(event_time('2013/02/28 12:00:00', format: '%Y/%m/%d %H:%M:%S'), time)
assert_equal({
'a' => '192.168.0.1',
'b' => '111',
}, record)
}
end
def test_parse_with_time
time_at_start = Time.now.to_i
d = create_driver('keys' => 'a,b')
d.instance.parse("192.168.0.1\t111") { |time, record|
assert time && time >= time_at_start, "parser puts current time without time input"
assert_equal({
'a' => '192.168.0.1',
'b' => '111',
}, record)
}
d = Fluent::Test::Driver::Parser.new(Fluent::Plugin::TSVParser)
d.configure('keys' => 'a,b', 'time_key' => 'time', 'estimate_current_event' => 'no')
d.instance.parse("192.168.0.1\t111") { |time, record|
assert_equal({
'a' => '192.168.0.1',
'b' => '111',
}, record)
assert_nil time, "parser returns nil w/o time and if configured so"
}
end
data(
'left blank column' => ["\t@\t@", {"1" => "","2" => "@","3" => "@"}],
'center blank column' => ["@\t\t@", {"1" => "@","2" => "","3" => "@"}],
'right blank column' => ["@\t@\t", {"1" => "@","2" => "@","3" => ""}],
'2 right blank columns' => ["@\t\t", {"1" => "@","2" => "","3" => ""}],
'left blank columns' => ["\t\t@", {"1" => "","2" => "","3" => "@"}],
'all blank columns' => ["\t\t", {"1" => "","2" => "","3" => ""}])
def test_black_column(data)
line, expected = data
d = create_driver('keys' => '1,2,3')
d.instance.parse(line) { |time, record|
assert_equal(expected, record)
}
end
def test_parse_with_keep_time_key
d = create_driver(
'keys'=>'time',
'time_key'=>'time',
'time_format'=>"%d/%b/%Y:%H:%M:%S %z",
'keep_time_key'=>'true',
)
text = '28/Feb/2013:12:00:00 +0900'
d.instance.parse(text) do |time, record|
assert_equal text, record['time']
end
end
data('array param' => '["a","b","c","d","e","f"]', 'string param' => 'a,b,c,d,e,f')
def test_parse_with_null_value_pattern(param)
d = create_driver(
'keys'=>param,
'null_value_pattern'=>'^(-|null|NULL)$'
)
d.instance.parse("-\tnull\tNULL\t\t--\tnuLL") do |time, record|
assert_nil record['a']
assert_nil record['b']
assert_nil record['c']
assert_equal record['d'], ''
assert_equal record['e'], '--'
assert_equal record['f'], 'nuLL'
end
end
data('array param' => '["a","b"]', 'string param' => 'a,b')
def test_parse_with_null_empty_string(param)
d = create_driver(
'keys'=>param,
'null_empty_string'=>true
)
d.instance.parse("\t ") do |time, record|
assert_nil record['a']
assert_equal record['b'], ' '
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_in_tail.rb | test/plugin/test_in_tail.rb | require_relative '../helper'
require 'fluent/test/driver/input'
require 'fluent/plugin/in_tail'
require 'fluent/plugin/buffer'
require 'fluent/system_config'
require 'fluent/file_wrapper'
require 'net/http'
require 'flexmock/test_unit'
require 'timecop'
require 'tmpdir'
require 'securerandom'
class TailInputTest < Test::Unit::TestCase
include FlexMock::TestCase
def tmp_dir
File.join(File.dirname(__FILE__), "..", "tmp", "tail#{ENV['TEST_ENV_NUMBER']}", SecureRandom.hex(10))
end
def setup
Fluent::Test.setup
@tmp_dir = tmp_dir
cleanup_directory(@tmp_dir)
end
def teardown
super
cleanup_directory(@tmp_dir)
Fluent::Engine.stop
Timecop.return
end
def cleanup_directory(path)
unless Dir.exist?(path)
FileUtils.mkdir_p(path)
return
end
FileUtils.remove_entry_secure(path, true)
end
def cleanup_file(path)
FileUtils.remove_entry_secure(path, true)
end
def create_target_info(path)
Fluent::Plugin::TailInput::TargetInfo.new(path, Fluent::FileWrapper.stat(path).ino)
end
ROOT_CONFIG = config_element("ROOT", "", {
"tag" => "t1",
"rotate_wait" => "2s",
"refresh_interval" => "1s"
})
def base_config
ROOT_CONFIG + config_element("", "", { "path" => "#{@tmp_dir}/tail.txt" })
end
def common_config
base_config + config_element("", "", { "pos_file" => "#{@tmp_dir}/tail.pos" })
end
def common_follow_inode_config
config_element("ROOT", "", {
"path" => "#{@tmp_dir}/tail.txt*",
"pos_file" => "#{@tmp_dir}/tail.pos",
"tag" => "t1",
"refresh_interval" => "1s",
"read_from_head" => "true",
"format" => "none",
"rotate_wait" => "1s",
"follow_inodes" => "true"
})
end
CONFIG_READ_FROM_HEAD = config_element("", "", { "read_from_head" => true })
CONFIG_DISABLE_WATCH_TIMER = config_element("", "", { "enable_watch_timer" => false })
CONFIG_DISABLE_STAT_WATCHER = config_element("", "", { "enable_stat_watcher" => false })
CONFIG_OPEN_ON_EVERY_UPDATE = config_element("", "", { "open_on_every_update" => true })
SINGLE_LINE_CONFIG = config_element("", "", { "format" => "/(?<message>.*)/" })
PARSE_SINGLE_LINE_CONFIG = config_element("", "", {}, [config_element("parse", "", { "@type" => "/(?<message>.*)/" })])
MULTILINE_CONFIG = config_element(
"", "", {
"format" => "multiline",
"format1" => "/^s (?<message1>[^\\n]+)(\\nf (?<message2>[^\\n]+))?(\\nf (?<message3>.*))?/",
"format_firstline" => "/^[s]/"
})
PARSE_MULTILINE_CONFIG = config_element(
"", "", {},
[config_element("parse", "", {
"@type" => "multiline",
"format1" => "/^s (?<message1>[^\\n]+)(\\nf (?<message2>[^\\n]+))?(\\nf (?<message3>.*))?/",
"format_firstline" => "/^[s]/"
})
])
MULTILINE_CONFIG_WITH_NEWLINE = config_element(
"", "", {
"format" => "multiline",
"format1" => "/^s (?<message1>[^\\n]+)(\\nf (?<message2>[^\\n]+))?(\\nf (?<message3>.[^\\n]+))?/",
"format_firstline" => "/^[s]/"
})
PARSE_MULTILINE_CONFIG_WITH_NEWLINE = config_element(
"", "", {},
[config_element("parse", "", {
"@type" => "multiline",
"format1" => "/^s (?<message1>[^\\n]+)(\\nf (?<message2>[^\\n]+))?(\\nf (?<message3>.[^\\n]+))?/",
"format_firstline" => "/^[s]/"
})
])
EX_ROTATE_WAIT = 0
EX_FOLLOW_INODES = false
def ex_config
config_element("", "", {
"tag" => "tail",
"path" => "test/plugin/*/%Y/%m/%Y%m%d-%H%M%S.log,test/plugin/data/log/**/*.log",
"format" => "none",
"pos_file" => "#{@tmp_dir}/tail.pos",
"read_from_head" => true,
"refresh_interval" => 30,
"rotate_wait" => "#{EX_ROTATE_WAIT}s",
"follow_inodes" => "#{EX_FOLLOW_INODES}",
})
end
def tailing_group_pattern
"/#{@tmp_dir}\/(?<podname>[a-z0-9]([-a-z0-9]*[a-z0-9])?(\/[a-z0-9]([-a-z0-9]*[a-z0-9])?)*)_(?<namespace>[^_]+)_(?<container>.+)-(?<docker_id>[a-z0-9]{6})\.log$/"
end
DEBUG_LOG_LEVEL = config_element("", "", {
"@log_level" => "debug"
})
def create_group_directive(pattern, rate_period, *rules)
config_element("", "", {}, [
config_element("group", "", {
"pattern" => pattern,
"rate_period" => rate_period
}, rules)
])
end
def create_rule_directive(match_named_captures, limit)
params = {
"limit" => limit,
"match" => match_named_captures,
}
config_element("rule", "", params)
end
def create_path_element(path)
config_element("source", "", { "path" => "#{@tmp_dir}/#{path}" })
end
def create_driver(conf = SINGLE_LINE_CONFIG, use_common_conf = true)
config = use_common_conf ? common_config + conf : conf
Fluent::Test::Driver::Input.new(Fluent::Plugin::TailInput).configure(config)
end
sub_test_case "configure" do
test "plain single line" do
d = create_driver
assert_equal(["#{@tmp_dir}/tail.txt"], d.instance.paths)
assert_equal("t1", d.instance.tag)
assert_equal(2, d.instance.rotate_wait)
assert_equal("#{@tmp_dir}/tail.pos", d.instance.pos_file)
assert_equal(1000, d.instance.read_lines_limit)
assert_equal(-1, d.instance.read_bytes_limit_per_second)
assert_equal(false, d.instance.ignore_repeated_permission_error)
assert_nothing_raised do
d.instance.have_read_capability?
end
end
data("empty" => config_element,
"w/o @type" => config_element("", "", {}, [config_element("parse", "", {})]))
test "w/o parse section" do |conf|
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
test "multi paths with path_delimiter" do
c = config_element("ROOT", "", { "path" => "tail.txt|test2|tmp,dev", "tag" => "t1", "path_delimiter" => "|" })
d = create_driver(c + PARSE_SINGLE_LINE_CONFIG, false)
assert_equal(["tail.txt", "test2", "tmp,dev"], d.instance.paths)
end
test "multi paths with same path configured twice" do
c = config_element("ROOT", "", { "path" => "test1.txt,test2.txt,test1.txt", "tag" => "t1", "path_delimiter" => "," })
d = create_driver(c + PARSE_SINGLE_LINE_CONFIG, false)
assert_equal(["test2.txt","test1.txt"].sort, d.instance.paths.sort)
end
test "multi paths with invalid path_delimiter" do
c = config_element("ROOT", "", { "path" => "tail.txt|test2|tmp,dev", "tag" => "t1", "path_delimiter" => "*" })
assert_raise(Fluent::ConfigError) do
create_driver(c + PARSE_SINGLE_LINE_CONFIG, false)
end
end
test "follow_inodes w/o pos file" do
assert_raise(Fluent::ConfigError) do
create_driver(base_config + config_element('', '', {'follow_inodes' => 'true'}))
end
end
sub_test_case "log throttling per file" do
test "w/o watcher timer is invalid" do
conf = CONFIG_DISABLE_WATCH_TIMER + config_element("ROOT", "", {"read_bytes_limit_per_second" => "8k"})
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
test "valid" do
conf = config_element("ROOT", "", {"read_bytes_limit_per_second" => "8k"})
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
end
test "both enable_watch_timer and enable_stat_watcher are false" do
assert_raise(Fluent::ConfigError) do
create_driver(CONFIG_DISABLE_WATCH_TIMER + CONFIG_DISABLE_STAT_WATCHER + PARSE_SINGLE_LINE_CONFIG)
end
end
sub_test_case "encoding" do
test "valid" do
conf = SINGLE_LINE_CONFIG + config_element("", "", { "encoding" => "utf-8" })
d = create_driver(conf)
assert_equal(Encoding::UTF_8, d.instance.encoding)
end
test "invalid" do
conf = SINGLE_LINE_CONFIG + config_element("", "", { "encoding" => "no-such-encoding" })
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
end
sub_test_case "from_encoding" do
test "only specified from_encoding raise ConfigError" do
conf = SINGLE_LINE_CONFIG + config_element("", "", { "from_encoding" => "utf-8" })
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
test "valid" do
conf = SINGLE_LINE_CONFIG + config_element("", "", {
"from_encoding" => "utf-8",
"encoding" => "utf-8"
})
d = create_driver(conf)
assert_equal(Encoding::UTF_8, d.instance.from_encoding)
end
test "invalid" do
conf = SINGLE_LINE_CONFIG + config_element("", "", {
"from_encoding" => "no-such-encoding",
"encoding" => "utf-8"
})
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
end
end
sub_test_case "configure group" do
test "<rule> required" do
conf = create_group_directive('.', '1m') + SINGLE_LINE_CONFIG
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
test "valid configuration" do
rule1 = create_rule_directive({
"namespace"=> "/namespace-a/",
"podname"=> "/podname-[b|c]/"
}, 100)
rule2 = create_rule_directive({
"namespace"=> "/namespace-[d|e]/",
"podname"=> "/podname-f/",
}, 50)
rule3 = create_rule_directive({
"podname"=> "/podname-g/",
}, -1)
rule4 = create_rule_directive({
"namespace"=> "/namespace-h/",
}, 0)
conf = create_group_directive(tailing_group_pattern, '1m', rule1, rule2, rule3, rule4) + SINGLE_LINE_CONFIG
assert_nothing_raised do
create_driver(conf)
end
end
test "limit should be greater than DEFAULT_LIMIT (-1)" do
rule1 = create_rule_directive({
"namespace"=> "/namespace-a/",
"podname"=> "/podname-[b|c]/",
}, -100)
rule2 = create_rule_directive({
"namespace"=> "/namespace-[d|e]/",
"podname"=> "/podname-f/",
}, 50)
conf = create_group_directive(tailing_group_pattern, '1m', rule1, rule2) + SINGLE_LINE_CONFIG
assert_raise(RuntimeError) do
create_driver(conf)
end
end
end
sub_test_case "group rules line limit resolution" do
test "valid" do
rule1 = create_rule_directive({
"namespace"=> "/namespace-a/",
"podname"=> "/podname-[b|c]/",
}, 50)
rule2 = create_rule_directive({
"podname"=> "/podname-[b|c]/",
}, 400)
rule3 = create_rule_directive({
"namespace"=> "/namespace-a/",
}, 100)
conf = create_group_directive(tailing_group_pattern, '1m', rule3, rule1, rule2) + SINGLE_LINE_CONFIG
assert_nothing_raised do
d = create_driver(conf)
instance = d.instance
metadata = {
"namespace"=> "namespace-a",
"podname"=> "podname-b",
}
assert_equal(50, instance.find_group(metadata).limit)
metadata = {
"namespace" => "namespace-a",
"podname" => "podname-c",
}
assert_equal(50, instance.find_group(metadata).limit)
metadata = {
"namespace" => "namespace-a",
"podname" => "podname-d",
}
assert_equal(100, instance.find_group(metadata).limit)
metadata = {
"namespace" => "namespace-f",
"podname" => "podname-b",
}
assert_equal(400, instance.find_group(metadata).limit)
metadata = {
"podname" => "podname-c",
}
assert_equal(400, instance.find_group(metadata).limit)
assert_equal(-1, instance.find_group({}).limit)
end
end
end
sub_test_case "files should be placed in groups" do
test "invalid regex pattern places files in default group" do
rule1 = create_rule_directive({}, 100) ## limits default groups
conf = ROOT_CONFIG + DEBUG_LOG_LEVEL + create_group_directive(tailing_group_pattern, '1m', rule1) + create_path_element("test*.txt") + SINGLE_LINE_CONFIG
d = create_driver(conf, false)
Fluent::FileWrapper.open("#{@tmp_dir}/test1.txt", 'w')
Fluent::FileWrapper.open("#{@tmp_dir}/test2.txt", 'w')
Fluent::FileWrapper.open("#{@tmp_dir}/test3.txt", 'w')
d.run do
## checking default group_watcher's paths
instance = d.instance
key = instance.default_group_key
assert_equal(3, instance.log.logs.count{|a| a.match?("Cannot find group from metadata, Adding file in the default group\n")})
assert_equal(3, instance.group_watchers[key].size)
assert_true(instance.group_watchers[key].include? File.join(@tmp_dir, 'test1.txt'))
assert_true(instance.group_watchers[key].include? File.join(@tmp_dir, 'test2.txt'))
assert_true(instance.group_watchers[key].include? File.join(@tmp_dir, 'test3.txt'))
end
end
test "valid regex pattern places file in their respective groups" do
rule1 = create_rule_directive({
"namespace"=> "/test-namespace1/",
"podname"=> "/test-podname1/",
}, 100)
rule2 = create_rule_directive({
"namespace"=> "/test-namespace1/",
}, 200)
rule3 = create_rule_directive({
"podname"=> "/test-podname2/",
}, 300)
rule4 = create_rule_directive({}, 400)
path_element = create_path_element("test-podname*.log")
conf = ROOT_CONFIG + create_group_directive(tailing_group_pattern, '1m', rule4, rule3, rule2, rule1) + path_element + SINGLE_LINE_CONFIG
d = create_driver(conf, false)
file1 = File.join(@tmp_dir, "test-podname1_test-namespace1_test-container-15fabq.log")
file2 = File.join(@tmp_dir, "test-podname3_test-namespace1_test-container-15fabq.log")
file3 = File.join(@tmp_dir, "test-podname2_test-namespace2_test-container-15fabq.log")
file4 = File.join(@tmp_dir, "test-podname4_test-namespace3_test-container-15fabq.log")
d.run do
Fluent::FileWrapper.open(file1, 'w')
Fluent::FileWrapper.open(file2, 'w')
Fluent::FileWrapper.open(file3, 'w')
Fluent::FileWrapper.open(file4, 'w')
instance = d.instance
assert_equal(100, instance.find_group_from_metadata(file1).limit)
assert_equal(200, instance.find_group_from_metadata(file2).limit)
assert_equal(300, instance.find_group_from_metadata(file3).limit)
assert_equal(400, instance.find_group_from_metadata(file4).limit)
end
end
end
sub_test_case "singleline" do
data(flat: SINGLE_LINE_CONFIG,
parse: PARSE_SINGLE_LINE_CONFIG)
def test_emit(data)
config = data
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "wb") {|f|
f.puts "test1"
f.puts "test2"
}
d = create_driver(config)
d.run(expect_emits: 1) do
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "ab") {|f|
f.puts "test3\ntest4"
}
end
events = d.events
assert_equal(true, events.length > 0)
assert_equal({"message" => "test3"}, events[0][2])
assert_equal({"message" => "test4"}, events[1][2])
assert(events[0][1].is_a?(Fluent::EventTime))
assert(events[1][1].is_a?(Fluent::EventTime))
assert_equal(1, d.emit_count)
end
def test_emit_with_emit_unmatched_lines_true
config = config_element("", "", { "format" => "/^(?<message>test.*)/", "emit_unmatched_lines" => true })
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "wb") { |f| }
d = create_driver(config)
d.run(expect_emits: 1) do
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "ab") {|f|
f.puts "test line 1"
f.puts "test line 2"
f.puts "bad line 1"
f.puts "test line 3"
}
end
events = d.events
assert_equal(4, events.length)
assert_equal({"message" => "test line 1"}, events[0][2])
assert_equal({"message" => "test line 2"}, events[1][2])
assert_equal({"unmatched_line" => "bad line 1"}, events[2][2])
assert_equal({"message" => "test line 3"}, events[3][2])
end
data('flat 1' => [:flat, 1, 2],
'flat 10' => [:flat, 10, 1],
'parse 1' => [:parse, 1, 2],
'parse 10' => [:parse, 10, 1])
def test_emit_with_read_lines_limit(data)
config_style, limit, num_events = data
case config_style
when :flat
config = CONFIG_READ_FROM_HEAD + SINGLE_LINE_CONFIG + config_element("", "", { "read_lines_limit" => limit })
when :parse
config = CONFIG_READ_FROM_HEAD + config_element("", "", { "read_lines_limit" => limit }) + PARSE_SINGLE_LINE_CONFIG
end
d = create_driver(config)
msg = 'test' * 2000 # in_tail reads 8192 bytes at once.
d.run(expect_emits: num_events, timeout: 2) do
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "ab") {|f|
f.puts msg
f.puts msg
}
end
events = d.events
assert_equal(true, events.length > 0)
assert_equal({"message" => msg}, events[0][2])
assert_equal({"message" => msg}, events[1][2])
assert num_events <= d.emit_count
end
sub_test_case "log throttling per file" do
teardown do
cleanup_file("#{@tmp_dir}/tail.txt")
end
sub_test_case "reads_bytes_per_second w/o throttled" do
data("flat 8192 bytes, 2 events" => [:flat, 100, 8192, 2],
"flat 8192 bytes, 2 events w/o stat watcher" => [:flat_without_stat, 100, 8192, 2],
"flat #{8192*10} bytes, 20 events" => [:flat, 100, (8192 * 10), 20],
"flat #{8192*10} bytes, 20 events w/o stat watcher" => [:flat_without_stat, 100, (8192 * 10), 20],
"parse #{8192*4} bytes, 8 events" => [:parse, 100, (8192 * 4), 8],
"parse #{8192*4} bytes, 8 events w/o stat watcher" => [:parse_without_stat, 100, (8192 * 4), 8],
"parse #{8192*10} bytes, 20 events" => [:parse, 100, (8192 * 10), 20],
"parse #{8192*10} bytes, 20 events w/o stat watcher" => [:parse_without_stat, 100, (8192 * 10), 20],
"flat 8k bytes with unit, 2 events" => [:flat, 100, "8k", 2])
def test_emit_with_read_bytes_limit_per_second(data)
config_style, limit, limit_bytes, num_events = data
case config_style
when :flat
config = CONFIG_READ_FROM_HEAD + SINGLE_LINE_CONFIG + config_element("", "", { "read_lines_limit" => limit, "read_bytes_limit_per_second" => limit_bytes })
when :parse
config = CONFIG_READ_FROM_HEAD + config_element("", "", { "read_lines_limit" => limit, "read_bytes_limit_per_second" => limit_bytes }) + PARSE_SINGLE_LINE_CONFIG
when :flat_without_stat
config = CONFIG_READ_FROM_HEAD + SINGLE_LINE_CONFIG + CONFIG_DISABLE_STAT_WATCHER + config_element("", "", { "read_lines_limit" => limit, "read_bytes_limit_per_second" => limit_bytes })
when :parse_without_stat
config = CONFIG_READ_FROM_HEAD + CONFIG_DISABLE_STAT_WATCHER + config_element("", "", { "read_lines_limit" => limit, "read_bytes_limit_per_second" => limit_bytes }) + PARSE_SINGLE_LINE_CONFIG
end
msg = 'test' * 2000 # in_tail reads 8192 bytes at once.
start_time = Fluent::Clock.now
d = create_driver(config)
d.run(expect_emits: 2) do
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "ab") {|f|
100.times do
f.puts msg
end
}
end
assert_true(Fluent::Clock.now - start_time > 1)
assert_equal(Array.new(num_events) { {"message" => msg} },
d.events.collect { |event| event[2] })
end
def test_read_bytes_limit_precede_read_lines_limit
config = CONFIG_READ_FROM_HEAD +
SINGLE_LINE_CONFIG +
config_element("", "", {
"read_lines_limit" => 1000,
"read_bytes_limit_per_second" => 8192
})
msg = 'abc'
start_time = Fluent::Clock.now
d = create_driver(config)
d.run(expect_emits: 2) do
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "ab") {|f|
8000.times do
f.puts msg
end
}
end
assert_true(Fluent::Clock.now - start_time > 1)
assert_equal(Array.new(4096) { {"message" => msg} },
d.events.collect { |event| event[2] })
end
end
sub_test_case "reads_bytes_per_second w/ throttled already" do
data("flat 8192 bytes" => [:flat, 100, 8192],
"parse 8192 bytes" => [:parse, 100, 8192])
def test_emit_with_read_bytes_limit_per_second(data)
config_style, limit, limit_bytes = data
case config_style
when :flat
config = CONFIG_READ_FROM_HEAD + SINGLE_LINE_CONFIG + config_element("", "", { "read_lines_limit" => limit, "read_bytes_limit_per_second" => limit_bytes })
when :parse
config = CONFIG_READ_FROM_HEAD + config_element("", "", { "read_lines_limit" => limit, "read_bytes_limit_per_second" => limit_bytes }) + PARSE_SINGLE_LINE_CONFIG
end
d = create_driver(config)
msg = 'test' * 2000 # in_tail reads 8192 bytes at once.
mock.proxy(d.instance).io_handler(anything, anything) do |io_handler|
require 'fluent/config/types'
limit_bytes_value = Fluent::Config.size_value(limit_bytes)
io_handler.instance_variable_set(:@number_bytes_read, limit_bytes_value)
if Fluent.linux?
mock.proxy(io_handler).handle_notify.at_least(5)
else
mock.proxy(io_handler).handle_notify.twice
end
io_handler
end
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "ab") do |f|
100.times do
f.puts msg
end
end
# We should not do shutdown here due to hard timeout.
d.run do
start_time = Fluent::Clock.now
while Fluent::Clock.now - start_time < 0.8 do
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "ab") do |f|
f.puts msg
f.flush
end
sleep 0.05
end
end
assert_equal([], d.events)
end
end
sub_test_case "EOF with reads_bytes_per_second" do
def test_longer_than_rotate_wait
limit_bytes = 8192
num_lines = 1024 * 3
msg = "08bytes"
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "wb") do |f|
f.write("#{msg}\n" * num_lines)
end
config = CONFIG_READ_FROM_HEAD +
SINGLE_LINE_CONFIG +
config_element("", "", {
"read_bytes_limit_per_second" => limit_bytes,
"rotate_wait" => 0.1,
"refresh_interval" => 0.5,
})
rotated = false
d = create_driver(config)
d.run(timeout: 10) do
while d.events.size < num_lines do
if d.events.size > 0 && !rotated
cleanup_file("#{@tmp_dir}/tail.txt")
FileUtils.touch("#{@tmp_dir}/tail.txt")
rotated = true
end
sleep 0.3
end
end
assert_equal(num_lines,
d.events.count do |event|
event[2]["message"] == msg
end)
end
def test_shorter_than_rotate_wait
limit_bytes = 8192
num_lines = 1024 * 2
msg = "08bytes"
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "wb") do |f|
f.write("#{msg}\n" * num_lines)
end
config = CONFIG_READ_FROM_HEAD +
SINGLE_LINE_CONFIG +
config_element("", "", {
"read_bytes_limit_per_second" => limit_bytes,
"rotate_wait" => 2,
"refresh_interval" => 0.5,
})
start_time = Fluent::Clock.now
rotated = false
detached = false
d = create_driver(config)
mock.proxy(d.instance).setup_watcher(anything, anything) do |tw|
mock.proxy(tw).detach(anything) do |v|
detached = true
v
end
tw
end.twice
d.run(timeout: 10) do
until detached do
if d.events.size > 0 && !rotated
cleanup_file("#{@tmp_dir}/tail.txt")
FileUtils.touch("#{@tmp_dir}/tail.txt")
rotated = true
end
sleep 0.3
end
end
assert_true(Fluent::Clock.now - start_time > 2)
assert_equal(num_lines,
d.events.count do |event|
event[2]["message"] == msg
end)
end
end
end
data(flat: CONFIG_READ_FROM_HEAD + SINGLE_LINE_CONFIG,
parse: CONFIG_READ_FROM_HEAD + PARSE_SINGLE_LINE_CONFIG)
def test_emit_with_read_from_head(data)
config = data
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "wb") {|f|
f.puts "test1"
f.puts "test2"
}
d = create_driver(config)
d.run(expect_emits: 2) do
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "ab") {|f|
f.puts "test3"
f.puts "test4"
}
end
events = d.events
assert(events.length > 0)
assert_equal({"message" => "test1"}, events[0][2])
assert_equal({"message" => "test2"}, events[1][2])
assert_equal({"message" => "test3"}, events[2][2])
assert_equal({"message" => "test4"}, events[3][2])
end
data(flat: CONFIG_DISABLE_WATCH_TIMER + SINGLE_LINE_CONFIG,
parse: CONFIG_DISABLE_WATCH_TIMER + PARSE_SINGLE_LINE_CONFIG)
def test_emit_without_watch_timer(data)
config = data
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "wb") {|f|
f.puts "test1"
f.puts "test2"
}
d = create_driver(config)
d.run(expect_emits: 1) do
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "ab") {|f|
f.puts "test3"
f.puts "test4"
}
# according to cool.io's stat_watcher.c, systems without inotify will use
# an "automatic" value, typically around 5 seconds
end
events = d.events
assert(events.length > 0)
assert_equal({"message" => "test3"}, events[0][2])
assert_equal({"message" => "test4"}, events[1][2])
end
# https://github.com/fluent/fluentd/pull/3541#discussion_r740197711
def test_watch_wildcard_path_without_watch_timer
omit "need inotify" unless Fluent.linux?
config = config_element("ROOT", "", {
"path" => "#{@tmp_dir}/tail*.txt",
"tag" => "t1",
})
config = config + CONFIG_DISABLE_WATCH_TIMER + SINGLE_LINE_CONFIG
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "wb") {|f|
f.puts "test1"
f.puts "test2"
}
d = create_driver(config, false)
d.run(expect_emits: 1, timeout: 1) do
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "ab") {|f|
f.puts "test3"
f.puts "test4"
}
end
assert_equal(
[
{"message" => "test3"},
{"message" => "test4"},
],
d.events.collect { |event| event[2] })
end
data(flat: CONFIG_DISABLE_STAT_WATCHER + SINGLE_LINE_CONFIG,
parse: CONFIG_DISABLE_STAT_WATCHER + PARSE_SINGLE_LINE_CONFIG)
def test_emit_with_disable_stat_watcher(data)
config = data
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "wb") {|f|
f.puts "test1"
f.puts "test2"
}
d = create_driver(config)
d.run(expect_emits: 1) do
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "ab") {|f|
f.puts "test3"
f.puts "test4"
}
end
events = d.events
assert(events.length > 0)
assert_equal({"message" => "test3"}, events[0][2])
assert_equal({"message" => "test4"}, events[1][2])
end
def test_always_read_from_head_on_detecting_a_new_file
d = create_driver(SINGLE_LINE_CONFIG)
d.run(expect_emits: 1, timeout: 3) do
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "wb") {|f|
f.puts "test1\ntest2\n"
}
end
assert_equal(
[
{"message" => "test1"},
{"message" => "test2"},
],
d.events.collect { |event| event[2] })
end
end
class TestWithSystem < self
include Fluent::SystemConfig::Mixin
OVERRIDE_FILE_PERMISSION = 0620
CONFIG_SYSTEM = %[
<system>
file_permission #{OVERRIDE_FILE_PERMISSION}
</system>
]
def setup
omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
super
# Store default permission
@default_permission = system_config.instance_variable_get(:@file_permission)
end
def teardown
return if Fluent.windows?
super
# Restore default permission
system_config.instance_variable_set(:@file_permission, @default_permission)
end
def parse_system(text)
basepath = File.expand_path(File.dirname(__FILE__) + '/../../')
Fluent::Config.parse(text, '(test)', basepath, true).elements.find { |e| e.name == 'system' }
end
def test_emit_with_system
system_conf = parse_system(CONFIG_SYSTEM)
sc = Fluent::SystemConfig.new(system_conf)
Fluent::Engine.init(sc)
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "wb") {|f|
f.puts "test1"
f.puts "test2"
}
d = create_driver
d.run(expect_emits: 1) do
Fluent::FileWrapper.open("#{@tmp_dir}/tail.txt", "ab") {|f|
f.puts "test3"
f.puts "test4"
}
end
events = d.events
assert_equal(true, events.length > 0)
assert_equal({"message" => "test3"}, events[0][2])
assert_equal({"message" => "test4"}, events[1][2])
assert(events[0][1].is_a?(Fluent::EventTime))
assert(events[1][1].is_a?(Fluent::EventTime))
assert_equal(1, d.emit_count)
pos = d.instance.instance_variable_get(:@pf_file)
mode = "%o" % File.stat(pos).mode
assert_equal OVERRIDE_FILE_PERMISSION, mode[-3, 3].to_i
end
end
sub_test_case "rotate file" do
def create_driver(conf = SINGLE_LINE_CONFIG)
config = common_config + conf
Fluent::Test::Driver::Input.new(Fluent::Plugin::TailInput).configure(config)
end
data(flat: SINGLE_LINE_CONFIG,
parse: PARSE_SINGLE_LINE_CONFIG)
def test_rotate_file(data)
config = data
events = sub_test_rotate_file(config, expect_emits: 2)
assert_equal(3.upto(6).collect { |i| {"message" => "test#{i}"} },
events.collect { |event| event[2] })
end
data(flat: CONFIG_READ_FROM_HEAD + SINGLE_LINE_CONFIG,
parse: CONFIG_READ_FROM_HEAD + PARSE_SINGLE_LINE_CONFIG)
def test_rotate_file_with_read_from_head(data)
config = data
events = sub_test_rotate_file(config, expect_records: 6)
assert_equal(1.upto(6).collect { |i| {"message" => "test#{i}"} },
events.collect { |event| event[2] })
end
data(flat: CONFIG_OPEN_ON_EVERY_UPDATE + CONFIG_READ_FROM_HEAD + SINGLE_LINE_CONFIG,
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_base.rb | test/plugin/test_base.rb | require_relative '../helper'
require 'tmpdir'
require 'fluent/plugin/base'
module FluentPluginBaseTest
class DummyPlugin < Fluent::Plugin::Base
end
end
class BaseTest < Test::Unit::TestCase
setup do
@p = FluentPluginBaseTest::DummyPlugin.new
end
test 'has methods for phases of plugin life cycle, and methods to know "super"s were correctly called or not' do
assert !@p.configured?
@p.configure(config_element())
assert @p.configured?
assert !@p.started?
@p.start
assert @p.start
assert !@p.stopped?
@p.stop
assert @p.stopped?
assert !@p.before_shutdown?
@p.before_shutdown
assert @p.before_shutdown?
assert !@p.shutdown?
@p.shutdown
assert @p.shutdown?
assert !@p.after_shutdown?
@p.after_shutdown
assert @p.after_shutdown?
assert !@p.closed?
@p.close
assert @p.closed?
assert !@p.terminated?
@p.terminate
assert @p.terminated?
end
test 'can access system config' do
assert @p.system_config
@p.system_config_override({'process_name' => 'mytest'})
assert_equal 'mytest', @p.system_config.process_name
end
test 'does not have router in default' do
assert !@p.has_router?
end
sub_test_case '#fluentd_worker_id' do
test 'returns 0 in default' do
assert_equal 0, @p.fluentd_worker_id
end
test 'returns the value specified via SERVERENGINE_WORKER_ID env variable' do
pre_value = ENV['SERVERENGINE_WORKER_ID']
begin
ENV['SERVERENGINE_WORKER_ID'] = 7.to_s
assert_equal 7, @p.fluentd_worker_id
ensure
ENV['SERVERENGINE_WORKER_ID'] = pre_value
end
end
end
test 'does not have root dir in default' do
assert_nil @p.plugin_root_dir
end
test 'is configurable by config_param and config_section' do
assert_nothing_raised do
class FluentPluginBaseTest::DummyPlugin2 < Fluent::Plugin::TestBase
config_param :myparam1, :string
config_section :mysection, multi: false do
config_param :myparam2, :integer
end
end
end
p2 = FluentPluginBaseTest::DummyPlugin2.new
assert_nothing_raised do
p2.configure(config_element('ROOT', '', {'myparam1' => 'myvalue1'}, [config_element('mysection', '', {'myparam2' => 99})]))
end
assert_equal 'myvalue1', p2.myparam1
assert_equal 99, p2.mysection.myparam2
end
test 'plugins are available with multi worker configuration in default' do
assert @p.multi_workers_ready?
end
test 'provides #string_safe_encoding to scrub invalid sequence string with info logging' do
logger = Fluent::Test::TestLogger.new
m = Module.new do
define_method(:log) do
logger
end
end
@p.extend m
assert_equal [], logger.logs
ret = @p.string_safe_encoding("abc\xff.\x01f"){|s| s.split(".") }
assert_equal ['abc?', "\u0001f"], ret
assert_equal 1, logger.logs.size
assert{ logger.logs.first.include?("invalid byte sequence is replaced in ") }
end
test 'generates worker lock path safely' do
Dir.mktmpdir("test-fluentd-lock-") do |lock_dir|
ENV['FLUENTD_LOCK_DIR'] = lock_dir
p = FluentPluginBaseTest::DummyPlugin.new
path = p.get_lock_path("Aa\\|=~/_123")
assert_equal lock_dir, File.dirname(path)
assert_equal "fluentd-Aa______123.lock", File.basename(path)
end
end
test 'can acquire inter-worker locking' do
Dir.mktmpdir("test-fluentd-lock-") do |lock_dir|
ENV['FLUENTD_LOCK_DIR'] = lock_dir
p = FluentPluginBaseTest::DummyPlugin.new
lock_path = p.get_lock_path("test_base")
p.acquire_worker_lock("test_base") do
# With LOCK_NB set, flock() returns `false` when the
# file is already locked.
File.open(lock_path, "w") do |f|
assert_equal false, f.flock(File::LOCK_EX|File::LOCK_NB)
end
end
# Lock should be release by now. In that case, flock
# must return 0.
File.open(lock_path, "w") do |f|
assert_equal 0, f.flock(File::LOCK_EX|File::LOCK_NB)
end
end
end
test '`ArgumentError` when `conf` is not `Fluent::Config::Element`' do
assert_raise ArgumentError.new('BUG: type of conf must be Fluent::Config::Element, but Hash is passed.') do
@p.configure({})
end
end
sub_test_case 'system_config.workers value after configure' do
def assert_system_config_workers_value(data)
conf = config_element()
conf.set_target_worker_ids(data[:target_worker_ids])
@p.configure(conf)
assert{ @p.system_config.workers == data[:expected] }
end
def stub_supervisor_mode
stub(Fluent::Engine).supervisor_mode { true }
stub(Fluent::Engine).worker_id { -1 }
end
sub_test_case 'with <system> workers 3 </system>' do
setup do
system_config = Fluent::SystemConfig.new
system_config.workers = 3
stub(Fluent::Engine).system_config { system_config }
end
data(
'without <worker> directive',
{
target_worker_ids: [],
expected: 3
},
keep: true
)
data(
'with <worker 0>',
{
target_worker_ids: [0],
expected: 1
},
keep: true
)
data(
'with <worker 0-1>',
{
target_worker_ids: [0, 1],
expected: 2
},
keep: true
)
data(
'with <worker 0-2>',
{
target_worker_ids: [0, 1, 2],
expected: 3
},
keep: true
)
test 'system_config.workers value after configure' do
assert_system_config_workers_value(data)
end
test 'system_config.workers value after configure with supervisor_mode' do
stub_supervisor_mode
assert_system_config_workers_value(data)
end
end
sub_test_case 'without <system> directive' do
data(
'without <worker> directive',
{
target_worker_ids: [],
expected: 1
},
keep: true
)
data(
'with <worker 0>',
{
target_worker_ids: [0],
expected: 1
},
keep: true
)
test 'system_config.workers value after configure' do
assert_system_config_workers_value(data)
end
test 'system_config.workers value after configure with supervisor_mode' do
stub_supervisor_mode
assert_system_config_workers_value(data)
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_formatter_msgpack.rb | test/plugin/test_formatter_msgpack.rb | require_relative '../helper'
require 'fluent/test/driver/formatter'
require 'fluent/plugin/formatter_msgpack'
class MessagePackFormatterTest < ::Test::Unit::TestCase
def setup
@time = event_time
end
def create_driver(conf = "")
Fluent::Test::Driver::Formatter.new(Fluent::Plugin::MessagePackFormatter).configure(conf)
end
def tag
"tag"
end
def record
{'message' => 'awesome'}
end
def test_format
d = create_driver({})
formatted = d.instance.format(tag, @time, record)
assert_equal(record.to_msgpack, formatted)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_in_unix.rb | test/plugin/test_in_unix.rb | require_relative '../helper'
require 'fluent/test/driver/input'
require 'fluent/plugin/in_unix'
class UnixInputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
@d = nil
end
def teardown
@d.instance_shutdown if @d
end
TMP_DIR = File.dirname(__FILE__) + "/../tmp/in_unix#{ENV['TEST_ENV_NUMBER']}"
CONFIG = %[
path #{TMP_DIR}/unix
backlog 1000
]
def create_driver(conf = CONFIG)
Fluent::Test::Driver::Input.new(Fluent::Plugin::UnixInput).configure(conf)
end
def packer(*args)
Fluent::MessagePackFactory.msgpack_packer(*args)
end
def unpacker
Fluent::MessagePackFactory.msgpack_unpacker
end
def send_data(data)
io = UNIXSocket.new("#{TMP_DIR}/unix")
begin
io.write data
ensure
io.close
end
end
def test_configure
@d = create_driver
assert_equal "#{TMP_DIR}/unix", @d.instance.path
assert_equal 1000, @d.instance.backlog
end
def test_time
@d = create_driver
time = Fluent::EventTime.now
records = [
["tag1", 0, {"a" => 1}],
["tag2", nil, {"a" => 2}],
]
@d.run(expect_records: records.length, timeout: 5) do
records.each {|tag, _time, record|
send_data packer.write([tag, _time, record]).to_s
}
end
@d.events.each_with_index { |e, i|
orig = records[i]
assert_equal(orig[0], e[0])
assert_true(time <= e[1])
assert_equal(orig[2], e[2])
}
end
def test_message
@d = create_driver
time = Fluent::EventTime.now
records = [
["tag1", time, {"a" => 1}],
["tag2", time, {"a" => 2}],
]
@d.run(expect_records: records.length, timeout: 5) do
records.each {|tag, _time, record|
send_data packer.write([tag, _time, record]).to_s
}
end
assert_equal(records, @d.events)
end
def test_forward
@d = create_driver
time = Fluent::EventTime.parse("2011-01-02 13:14:15 UTC")
records = [
["tag1", time, {"a" => 1}],
["tag1", time, {"a" => 2}]
]
@d.run(expect_records: records.length, timeout: 20) do
entries = []
records.each {|tag, _time, record|
entries << [_time, record]
}
send_data packer.write(["tag1", entries]).to_s
end
assert_equal(records, @d.events)
end
def test_packed_forward
@d = create_driver
time = Fluent::EventTime.now
records = [
["tag1", time, {"a" => 1}],
["tag1", time, {"a" => 2}],
]
@d.run(expect_records: records.length, timeout: 20) do
entries = ''
records.each {|_tag, _time, record|
packer(entries).write([_time, record]).flush
}
send_data packer.write(["tag1", entries]).to_s
end
assert_equal(records, @d.events)
end
def test_message_json
@d = create_driver
time = Fluent::EventTime.now
records = [
["tag1", time, {"a" => 1}],
["tag2", time, {"a" => 2}],
]
@d.run(expect_records: records.length, timeout: 5) do
tag, _time, record = records[0]
send_data [tag, _time.to_i, record].to_json
tag, _time, record = records[1]
send_data [tag, _time.to_f, record].to_json
end
assert_equal(records, @d.events)
end
def test_message_with_tag
@d = create_driver(CONFIG + "tag new_tag")
time = Fluent::EventTime.now
records = [
["tag1", time, {"a" => 1}],
["tag2", time, {"a" => 2}],
]
@d.run(expect_records: records.length, timeout: 5) do
records.each {|tag, _time, record|
send_data packer.write([tag, _time, record]).to_s
}
end
@d.events.each { |event|
assert_equal("new_tag", event[0])
}
end
data('string chunk' => 'broken string',
'integer chunk' => 10)
def test_broken_message(data)
@d = create_driver
@d.run(shutdown: false, timeout: 5) do
@d.instance.__send__(:on_message, data)
end
assert_equal 0, @d.events.size
logs = @d.instance.log.logs
assert_equal 1, logs.count { |line|
line =~ / \[warn\]: incoming data is broken: msg=#{data.inspect}/
}, "should not accept broken chunk"
end
end unless Fluent.windows?
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_formatter_tsv.rb | test/plugin/test_formatter_tsv.rb | require_relative '../helper'
require 'fluent/test/driver/formatter'
require 'fluent/plugin/formatter_tsv'
class TSVFormatterTest < ::Test::Unit::TestCase
def setup
@time = event_time
end
def create_driver(conf = "")
Fluent::Test::Driver::Formatter.new(Fluent::Plugin::TSVFormatter).configure(conf)
end
def tag
"tag"
end
def record
{'message' => 'awesome', 'greeting' => 'hello'}
end
def test_config_params
d = create_driver(
'keys' => 'message,greeting',
)
assert_equal ["message", "greeting"], d.instance.keys
assert_equal "\t", d.instance.delimiter
assert_equal true, d.instance.add_newline
d = create_driver(
'keys' => 'message,greeting',
'delimiter' => ',',
'add_newline' => false,
)
assert_equal ["message", "greeting"], d.instance.keys
assert_equal ",", d.instance.delimiter
assert_equal false, d.instance.add_newline
end
data("newline (LF)" => ["lf", "\n"],
"newline (CRLF)" => ["crlf", "\r\n"])
def test_format(data)
newline_conf, newline = data
d = create_driver(
'keys' => 'message,greeting',
'newline' => newline_conf
)
formatted = d.instance.format(tag, @time, record)
assert_equal("awesome\thello#{newline}", formatted)
end
def test_format_without_newline
d = create_driver(
'keys' => 'message,greeting',
'add_newline' => false,
)
formatted = d.instance.format(tag, @time, record)
assert_equal("awesome\thello", formatted)
end
data("newline (LF)" => ["lf", "\n"],
"newline (CRLF)" => ["crlf", "\r\n"])
def test_format_with_customized_delimiters(data)
newline_conf, newline = data
d = create_driver(
'keys' => 'message,greeting',
'delimiter' => ',',
'newline' => newline_conf,
)
formatted = d.instance.format(tag, @time, record)
assert_equal("awesome,hello#{newline}", formatted)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_buffer.rb | test/plugin/test_buffer.rb | require_relative '../helper'
require 'fluent/plugin/buffer'
require 'fluent/plugin/buffer/memory_chunk'
require 'fluent/plugin/compressable'
require 'fluent/plugin/buffer/chunk'
require 'fluent/event'
require 'flexmock/test_unit'
require 'fluent/log'
require 'fluent/plugin_id'
require 'time'
module FluentPluginBufferTest
class DummyOutputPlugin < Fluent::Plugin::Base
include Fluent::PluginId
include Fluent::PluginLoggerMixin
end
class DummyMemoryChunkError < StandardError; end
class DummyMemoryChunk < Fluent::Plugin::Buffer::MemoryChunk
attr_reader :append_count, :rollbacked, :closed, :purged, :chunk
attr_accessor :failing
def initialize(metadata, compress: :text)
super
@append_count = 0
@rollbacked = false
@closed = false
@purged = false
@failing = false
end
def concat(data, size)
@append_count += 1
raise DummyMemoryChunkError if @failing
super
end
def rollback
super
@rollbacked = true
end
def close
super
@closed = true
end
def purge
super
@purged = true
end
end
class DummyPlugin < Fluent::Plugin::Buffer
def create_metadata(timekey=nil, tag=nil, variables=nil)
Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
end
def create_chunk(metadata, data)
c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
c.append(data)
c.commit
c
end
def create_chunk_es(metadata, es)
c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
c.concat(es.to_msgpack_stream, es.size)
c.commit
c
end
def resume
dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
staged = {
dm2 => create_chunk(dm2, ["b" * 100]).staged!,
dm3 => create_chunk(dm3, ["c" * 100]).staged!,
}
queued = [
create_chunk(dm0, ["0" * 100]).enqueued!,
create_chunk(dm1, ["a" * 100]).enqueued!,
create_chunk(dm1, ["a" * 3]).enqueued!,
]
return staged, queued
end
def generate_chunk(metadata)
DummyMemoryChunk.new(metadata, compress: @compress)
end
end
end
class BufferTest < Test::Unit::TestCase
def create_buffer(hash)
buffer_conf = config_element('buffer', '', hash, [])
owner = FluentPluginBufferTest::DummyOutputPlugin.new
owner.configure(config_element('ROOT', '', {}, [ buffer_conf ]))
p = FluentPluginBufferTest::DummyPlugin.new
p.owner = owner
p.configure(buffer_conf)
p
end
def create_metadata(timekey=nil, tag=nil, variables=nil)
Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
end
def create_chunk(metadata, data)
c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
c.append(data)
c.commit
c
end
def create_chunk_es(metadata, es)
c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
c.concat(es.to_msgpack_stream, es.size)
c.commit
c
end
setup do
Fluent::Test.setup
end
sub_test_case 'using base buffer class' do
setup do
buffer_conf = config_element('buffer', '', {}, [])
owner = FluentPluginBufferTest::DummyOutputPlugin.new
owner.configure(config_element('ROOT', '', {}, [ buffer_conf ]))
p = Fluent::Plugin::Buffer.new
p.owner = owner
p.configure(buffer_conf)
@p = p
end
test 'default persistency is false' do
assert !@p.persistent?
end
test 'chunk bytes limit is 8MB, and total bytes limit is 512MB' do
assert_equal 8*1024*1024, @p.chunk_limit_size
assert_equal 512*1024*1024, @p.total_limit_size
end
test 'chunk records limit is ignored in default' do
assert_nil @p.chunk_limit_records
end
test '#storable? checks total size of staged and enqueued(includes dequeued chunks) against total_limit_size' do
assert_equal 512*1024*1024, @p.total_limit_size
assert_equal 0, @p.stage_size
assert_equal 0, @p.queue_size
assert @p.storable?
@p.stage_size = 256 * 1024 * 1024
@p.queue_size = 256 * 1024 * 1024 - 1
assert @p.storable?
@p.queue_size = 256 * 1024 * 1024
assert !@p.storable?
end
test '#resume must be implemented by subclass' do
assert_raise NotImplementedError do
@p.resume
end
end
test '#generate_chunk must be implemented by subclass' do
assert_raise NotImplementedError do
@p.generate_chunk(Object.new)
end
end
end
sub_test_case 'with default configuration and dummy implementation' do
setup do
@p = create_buffer({'queued_chunks_limit_size' => 100})
@dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
@dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
@dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
@dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
@p.start
end
test '#start resumes buffer states and update queued numbers per metadata' do
plugin = create_buffer({})
assert_equal({}, plugin.stage)
assert_equal([], plugin.queue)
assert_equal({}, plugin.dequeued)
assert_equal({}, plugin.queued_num)
assert_equal 0, plugin.stage_size
assert_equal 0, plugin.queue_size
assert_equal [], plugin.timekeys
# @p is started plugin
assert_equal [@dm2,@dm3], @p.stage.keys
assert_equal "b" * 100, @p.stage[@dm2].read
assert_equal "c" * 100, @p.stage[@dm3].read
assert_equal 200, @p.stage_size
assert_equal 3, @p.queue.size
assert_equal "0" * 100, @p.queue[0].read
assert_equal "a" * 100, @p.queue[1].read
assert_equal "a" * 3, @p.queue[2].read
assert_equal 203, @p.queue_size
# staged, queued
assert_equal 1, @p.queued_num[@dm0]
assert_equal 2, @p.queued_num[@dm1]
end
test '#close closes all chunks in dequeued, enqueued and staged' do
dmx = create_metadata(Time.parse('2016-04-11 15:50:00 +0000').to_i, nil, nil)
cx = create_chunk(dmx, ["x" * 1024])
@p.dequeued[cx.unique_id] = cx
staged_chunks = @p.stage.values.dup
queued_chunks = @p.queue.dup
@p.close
assert cx.closed
assert{ staged_chunks.all?{|c| c.closed } }
assert{ queued_chunks.all?{|c| c.closed } }
end
test '#terminate initializes all internal states' do
dmx = create_metadata(Time.parse('2016-04-11 15:50:00 +0000').to_i, nil, nil)
cx = create_chunk(dmx, ["x" * 1024])
@p.dequeued[cx.unique_id] = cx
@p.close
@p.terminate
assert_nil @p.stage
assert_nil @p.queue
assert_nil @p.dequeued
assert_nil @p.queued_num
assert_nil @p.stage_length_metrics
assert_nil @p.stage_size_metrics
assert_nil @p.queue_length_metrics
assert_nil @p.queue_size_metrics
assert_nil @p.available_buffer_space_ratios_metrics
assert_nil @p.total_queued_size_metrics
assert_nil @p.newest_timekey_metrics
assert_nil @p.oldest_timekey_metrics
assert_equal [], @p.timekeys
end
test '#queued_records returns total number of size in all chunks in queue' do
assert_equal 3, @p.queue.size
r0 = @p.queue[0].size
assert_equal 1, r0
r1 = @p.queue[1].size
assert_equal 1, r1
r2 = @p.queue[2].size
assert_equal 1, r2
assert_equal (r0+r1+r2), @p.queued_records
end
test '#queued? returns queue has any chunks or not without arguments' do
assert @p.queued?
@p.queue.reject!{|_c| true }
assert !@p.queued?
end
test '#queued? returns queue has chunks for specified metadata with an argument' do
assert @p.queued?(@dm0)
assert @p.queued?(@dm1)
assert !@p.queued?(@dm2)
end
test '#enqueue_chunk enqueues a chunk on stage with specified metadata' do
assert_equal 2, @p.stage.size
assert_equal [@dm2,@dm3], @p.stage.keys
assert_equal 3, @p.queue.size
assert_nil @p.queued_num[@dm2]
assert_equal 200, @p.stage_size
assert_equal 203, @p.queue_size
@p.enqueue_chunk(@dm2)
assert_equal [@dm3], @p.stage.keys
assert_equal @dm2, @p.queue.last.metadata
assert_equal 1, @p.queued_num[@dm2]
assert_equal 100, @p.stage_size
assert_equal 303, @p.queue_size
end
test '#enqueue_chunk ignores empty chunks' do
assert_equal 3, @p.queue.size
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
c = create_chunk(m, [''])
@p.stage[m] = c
assert @p.stage[m].empty?
assert !c.closed
@p.enqueue_chunk(m)
assert_nil @p.stage[m]
assert_equal 3, @p.queue.size
assert_nil @p.queued_num[m]
assert c.closed
end
test '#enqueue_chunk calls #enqueued! if chunk responds to it' do
assert_equal 3, @p.queue.size
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
c = create_chunk(m, ['c' * 256])
callback_called = false
(class << c; self; end).module_eval do
define_method(:enqueued!){ callback_called = true }
end
@p.stage[m] = c
@p.enqueue_chunk(m)
assert_equal c, @p.queue.last
assert callback_called
end
test '#enqueue_all enqueues chunks on stage which given block returns true with' do
m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
c1 = create_chunk(m1, ['c' * 256])
@p.stage[m1] = c1
m2 = @p.metadata(timekey: Time.parse('2016-04-11 16:50:00 +0000').to_i)
c2 = create_chunk(m2, ['c' * 256])
@p.stage[m2] = c2
assert_equal [@dm2,@dm3,m1,m2], @p.stage.keys
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
@p.enqueue_all{ |m, c| m.timekey < Time.parse('2016-04-11 16:41:00 +0000').to_i }
assert_equal [m2], @p.stage.keys
assert_equal [@dm0,@dm1,@dm1,@dm2,@dm3,m1], @p.queue.map(&:metadata)
end
test '#enqueue_all enqueues all chunks on stage without block' do
m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
c1 = create_chunk(m1, ['c' * 256])
@p.stage[m1] = c1
m2 = @p.metadata(timekey: Time.parse('2016-04-11 16:50:00 +0000').to_i)
c2 = create_chunk(m2, ['c' * 256])
@p.stage[m2] = c2
assert_equal [@dm2,@dm3,m1,m2], @p.stage.keys
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
@p.enqueue_all
assert_equal [], @p.stage.keys
assert_equal [@dm0,@dm1,@dm1,@dm2,@dm3,m1,m2], @p.queue.map(&:metadata)
end
test '#dequeue_chunk dequeues a chunk from queue if a chunk exists' do
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal({}, @p.dequeued)
m1 = @p.dequeue_chunk
assert_equal @dm0, m1.metadata
assert_equal @dm0, @p.dequeued[m1.unique_id].metadata
m2 = @p.dequeue_chunk
assert_equal @dm1, m2.metadata
assert_equal @dm1, @p.dequeued[m2.unique_id].metadata
m3 = @p.dequeue_chunk
assert_equal @dm1, m3.metadata
assert_equal @dm1, @p.dequeued[m3.unique_id].metadata
m4 = @p.dequeue_chunk
assert_nil m4
end
test '#takeback_chunk resumes a chunk from dequeued to queued at the head of queue, and returns true' do
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal({}, @p.dequeued)
m1 = @p.dequeue_chunk
assert_equal @dm0, m1.metadata
assert_equal @dm0, @p.dequeued[m1.unique_id].metadata
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal({m1.unique_id => m1}, @p.dequeued)
assert @p.takeback_chunk(m1.unique_id)
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal({}, @p.dequeued)
end
test '#purge_chunk removes a chunk specified by argument id from dequeued chunks' do
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal({}, @p.dequeued)
m0 = @p.dequeue_chunk
m1 = @p.dequeue_chunk
assert @p.takeback_chunk(m0.unique_id)
assert_equal [@dm0,@dm1], @p.queue.map(&:metadata)
assert_equal({m1.unique_id => m1}, @p.dequeued)
assert !m1.purged
@p.purge_chunk(m1.unique_id)
assert m1.purged
assert_equal [@dm0,@dm1], @p.queue.map(&:metadata)
assert_equal({}, @p.dequeued)
end
test '#purge_chunk removes an argument metadata if no chunks exist on stage or in queue' do
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal({}, @p.dequeued)
m0 = @p.dequeue_chunk
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal({m0.unique_id => m0}, @p.dequeued)
assert !m0.purged
@p.purge_chunk(m0.unique_id)
assert m0.purged
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal({}, @p.dequeued)
end
test '#takeback_chunk returns false if specified chunk_id is already purged' do
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal({}, @p.dequeued)
m0 = @p.dequeue_chunk
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal({m0.unique_id => m0}, @p.dequeued)
assert !m0.purged
@p.purge_chunk(m0.unique_id)
assert m0.purged
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal({}, @p.dequeued)
assert !@p.takeback_chunk(m0.unique_id)
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal({}, @p.dequeued)
end
test '#clear_queue! removes all chunks in queue, but leaves staged chunks' do
qchunks = @p.queue.dup
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal 2, @p.stage.size
assert_equal({}, @p.dequeued)
@p.clear_queue!
assert_equal [], @p.queue
assert_equal 0, @p.queue_size
assert_equal 2, @p.stage.size
assert_equal({}, @p.dequeued)
assert{ qchunks.all?{ |c| c.purged } }
end
test '#write returns immediately if argument data is empty array' do
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
@p.write({m => []})
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
end
test '#write returns immediately if argument data is empty event stream' do
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
@p.write({m => Fluent::ArrayEventStream.new([])})
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
end
test '#write raises BufferOverflowError if buffer is not storable' do
@p.stage_size = 256 * 1024 * 1024
@p.queue_size = 256 * 1024 * 1024
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
@p.write({m => ["x" * 256]})
end
end
test '#write stores data into an existing chunk with metadata specified' do
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
dm3data = @p.stage[@dm3].read.dup
prev_stage_size = @p.stage_size
assert_equal 1, @p.stage[@dm3].append_count
@p.write({@dm3 => ["x" * 256, "y" * 256, "z" * 256]})
assert_equal 2, @p.stage[@dm3].append_count
assert_equal (dm3data + ("x" * 256) + ("y" * 256) + ("z" * 256)), @p.stage[@dm3].read
assert_equal (prev_stage_size + 768), @p.stage_size
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
end
test '#write creates new chunk and store data into it if there are no chunks for specified metadata' do
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
timekey = Time.parse('2016-04-11 16:40:00 +0000').to_i
assert !@p.timekeys.include?(timekey)
prev_stage_size = @p.stage_size
m = @p.metadata(timekey: timekey)
@p.write({m => ["x" * 256, "y" * 256, "z" * 256]})
assert_equal 1, @p.stage[m].append_count
assert_equal ("x" * 256 + "y" * 256 + "z" * 256), @p.stage[m].read
assert_equal (prev_stage_size + 768), @p.stage_size
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3,m], @p.stage.keys
@p.update_timekeys
assert @p.timekeys.include?(timekey)
end
test '#write tries to enqueue and store data into a new chunk if existing chunk is full' do
assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
assert_equal 0.95, @p.chunk_full_threshold
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
row = "x" * 1024 * 1024
small_row = "x" * 1024 * 512
@p.write({m => [row] * 7 + [small_row]})
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3,m], @p.stage.keys
assert_equal 1, @p.stage[m].append_count
@p.write({m => [row]})
assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3,m], @p.stage.keys
assert_equal 1, @p.stage[m].append_count
assert_equal 1024*1024, @p.stage[m].bytesize
assert_equal 3, @p.queue.last.append_count # 1 -> write (2) -> write_step_by_step (3)
assert @p.queue.last.rollbacked
end
test '#write rollbacks if commit raises errors' do
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
row = "x" * 1024
@p.write({m => [row] * 8})
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3,m], @p.stage.keys
target_chunk = @p.stage[m]
assert_equal 1, target_chunk.append_count
assert !target_chunk.rollbacked
(class << target_chunk; self; end).module_eval do
define_method(:commit){ raise "yay" }
end
assert_raise RuntimeError.new("yay") do
@p.write({m => [row]})
end
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3,m], @p.stage.keys
assert_equal 2, target_chunk.append_count
assert target_chunk.rollbacked
assert_equal row * 8, target_chunk.read
end
test '#write w/ format raises BufferOverflowError if buffer is not storable' do
@p.stage_size = 256 * 1024 * 1024
@p.queue_size = 256 * 1024 * 1024
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "xxxxxxxxxxxxxx"} ] ])
assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
@p.write({m => es}, format: ->(e){e.to_msgpack_stream})
end
end
test '#write w/ format stores data into an existing chunk with metadata specified' do
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
dm3data = @p.stage[@dm3].read.dup
prev_stage_size = @p.stage_size
assert_equal 1, @p.stage[@dm3].append_count
es = Fluent::ArrayEventStream.new(
[
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 128}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "y" * 128}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "z" * 128}],
]
)
@p.write({@dm3 => es}, format: ->(e){e.to_msgpack_stream})
assert_equal 2, @p.stage[@dm3].append_count
assert_equal (dm3data + es.to_msgpack_stream), @p.stage[@dm3].read
assert_equal (prev_stage_size + es.to_msgpack_stream.bytesize), @p.stage_size
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
end
test '#write w/ format creates new chunk and store data into it if there are not chunks for specified metadata' do
assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
timekey = Time.parse('2016-04-11 16:40:00 +0000').to_i
assert !@p.timekeys.include?(timekey)
m = @p.metadata(timekey: timekey)
es = Fluent::ArrayEventStream.new(
[
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
]
)
@p.write({m => es}, format: ->(e){e.to_msgpack_stream})
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3,m], @p.stage.keys
assert_equal 1, @p.stage[m].append_count
@p.update_timekeys
assert @p.timekeys.include?(timekey)
end
test '#write w/ format tries to enqueue and store data into a new chunk if existing chunk does not have enough space' do
assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
es = Fluent::ArrayEventStream.new(
[
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
]
)
@p.write({m => es}, format: ->(e){e.to_msgpack_stream})
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3,m], @p.stage.keys
assert_equal 1, @p.stage[m].append_count
es2 = Fluent::OneEventStream.new(event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 1024})
@p.write({m => es2}, format: ->(e){e.to_msgpack_stream})
assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3,m], @p.stage.keys
assert_equal 1, @p.stage[m].append_count
assert_equal es2.to_msgpack_stream.bytesize, @p.stage[m].bytesize
assert_equal 2, @p.queue.last.append_count # 1 -> write (2) -> rollback&enqueue
assert @p.queue.last.rollbacked
end
test '#write w/ format enqueues chunk if it is already full after adding data' do
assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
es = Fluent::ArrayEventStream.new(
[
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}], # 1024 * 1024 bytes as msgpack stream
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
]
)
@p.write({m => es}, format: ->(e){e.to_msgpack_stream})
assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
assert_equal 1, @p.queue.last.append_count
end
test '#write w/ format rollbacks if commit raises errors' do
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
es = Fluent::ArrayEventStream.new(
[
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
[event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
]
)
@p.write({m => es}, format: ->(e){e.to_msgpack_stream})
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3,m], @p.stage.keys
target_chunk = @p.stage[m]
assert_equal 1, target_chunk.append_count
assert !target_chunk.rollbacked
(class << target_chunk; self; end).module_eval do
define_method(:commit){ raise "yay" }
end
es2 = Fluent::ArrayEventStream.new(
[
[event_time('2016-04-11 16:40:04 +0000'), {"message" => "z" * 1024 * 128}],
]
)
assert_raise RuntimeError.new("yay") do
@p.write({m => es2}, format: ->(e){e.to_msgpack_stream})
end
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3,m], @p.stage.keys
assert_equal 2, target_chunk.append_count
assert target_chunk.rollbacked
assert_equal es.to_msgpack_stream, target_chunk.read
end
test '#write writes many metadata and data pairs at once' do
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
row = "x" * 1024
@p.write({ @dm0 => [row, row, row], @dm1 => [row, row] })
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.stage.keys
end
test '#write does not commit on any chunks if any append operation on chunk fails' do
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
assert_equal [@dm2,@dm3], @p.stage.keys
row = "x" * 1024
@p.write({ @dm0 => [row, row, row], @dm1 => [row, row] })
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.stage.keys
dm2_size = @p.stage[@dm2].size
assert !@p.stage[@dm2].rollbacked
dm3_size = @p.stage[@dm3].size
assert !@p.stage[@dm3].rollbacked
assert{ @p.stage[@dm0].size == 3 }
assert !@p.stage[@dm0].rollbacked
assert{ @p.stage[@dm1].size == 2 }
assert !@p.stage[@dm1].rollbacked
meta_list = [@dm0, @dm1, @dm2, @dm3].sort
@p.stage[meta_list.last].failing = true
assert_raise(FluentPluginBufferTest::DummyMemoryChunkError) do
@p.write({ @dm2 => [row], @dm3 => [row], @dm0 => [row, row, row], @dm1 => [row, row] })
end
assert{ @p.stage[@dm2].size == dm2_size }
assert @p.stage[@dm2].rollbacked
assert{ @p.stage[@dm3].size == dm3_size }
assert @p.stage[@dm3].rollbacked
assert{ @p.stage[@dm0].size == 3 }
assert @p.stage[@dm0].rollbacked
assert{ @p.stage[@dm1].size == 2 }
assert @p.stage[@dm1].rollbacked
end
test '#compress returns :text' do
assert_equal :text, @p.compress
end
# https://github.com/fluent/fluentd/issues/3089
test "closed chunk should not be committed" do
assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
assert_equal 0.95, @p.chunk_full_threshold
purge_count = 0
stub.proxy(@p).generate_chunk(anything) do |chunk|
stub.proxy(chunk).purge do |result|
purge_count += 1
result
end
stub.proxy(chunk).commit do |result|
assert_false(chunk.closed?)
result
end
stub.proxy(chunk).rollback do |result|
assert_false(chunk.closed?)
result
end
chunk
end
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
small_row = "x" * 1024 * 400
big_row = "x" * 1024 * 1024 * 8 # just `chunk_size_limit`, it doesn't cause BufferOverFlowError.
# Write 42 events in 1 event stream, last one is for triggering `ShouldRetry`
@p.write({m => [small_row] * 40 + [big_row] + ["x"]})
# Above event stream will be splitted twice by `Buffer#write_step_by_step`
#
# 1. `write_once`: 42 [events] * 1 [stream]
# 2. `write_step_by_step`: 4 [events]* 10 [streams] + 2 [events] * 1 [stream]
# 3. `write_step_by_step` (by `ShouldRetry`): 1 [event] * 42 [streams]
#
# The problematic data is built in the 2nd stage.
# In the 2nd stage, 5 streams are packed in a chunk.
# ((1024 * 400) [bytes] * 4 [events] * 5 [streams] = 8192000 [bytes] < `chunk_limit_size` (8MB)).
# So 3 chunks are used to store all data.
# The 1st chunk is already staged by `write_once`.
# The 2nd & 3rd chunks are newly created as unstaged.
# The 3rd chunk is purged before `ShouldRetry`, it's no problem:
# https://github.com/fluent/fluentd/blob/7e9eba736ff40ad985341be800ddc46558be75f2/lib/fluent/plugin/buffer.rb#L850
# The 2nd chunk is purged in `rescue ShouldRetry`:
# https://github.com/fluent/fluentd/blob/7e9eba736ff40ad985341be800ddc46558be75f2/lib/fluent/plugin/buffer.rb#L862
# It causes the issue described in https://github.com/fluent/fluentd/issues/3089#issuecomment-1811839198
assert_equal 2, purge_count
end
# https://github.com/fluent/fluentd/issues/4446
test "#write_step_by_step keeps chunks kept in locked in entire #write process" do
assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
assert_equal 0.95, @p.chunk_full_threshold
mon_enter_counts_by_chunk = {}
mon_exit_counts_by_chunk = {}
stub.proxy(@p).generate_chunk(anything) do |chunk|
stub(chunk).mon_enter do
enter_count = 1 + mon_enter_counts_by_chunk.fetch(chunk, 0)
exit_count = mon_exit_counts_by_chunk.fetch(chunk, 0)
mon_enter_counts_by_chunk[chunk] = enter_count
# Assert that chunk is passed to &block of write_step_by_step before exiting the lock.
# (i.e. The lock count must be 2 greater than the exit count).
# Since ShouldRetry occurs once, the staged chunk takes the lock 3 times when calling the block.
if chunk.staged?
lock_in_block = enter_count == 3
assert_equal(enter_count - 2, exit_count) if lock_in_block
else
lock_in_block = enter_count == 2
assert_equal(enter_count - 2, exit_count) if lock_in_block
end
end
stub(chunk).mon_exit do
exit_count = 1 + mon_exit_counts_by_chunk.fetch(chunk, 0)
mon_exit_counts_by_chunk[chunk] = exit_count
end
chunk
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_output_as_buffered_secondary.rb | test/plugin/test_output_as_buffered_secondary.rb | require_relative '../helper'
require 'fluent/plugin/output'
require 'fluent/plugin/buffer'
require 'fluent/event'
require 'json'
require 'time'
require 'timeout'
require 'timecop'
module FluentPluginOutputAsBufferedSecondaryTest
class DummyBareOutput < Fluent::Plugin::Output
def register(name, &block)
instance_variable_set("@#{name}", block)
end
end
class DummySyncOutput < DummyBareOutput
def initialize
super
@process = nil
end
def process(tag, es)
@process ? @process.call(tag, es) : nil
end
end
class DummyFullFeatureOutput < DummyBareOutput
def initialize
super
@prefer_buffered_processing = nil
@prefer_delayed_commit = nil
@process = nil
@format = nil
@write = nil
@try_write = nil
end
def prefer_buffered_processing
@prefer_buffered_processing ? @prefer_buffered_processing.call : false
end
def prefer_delayed_commit
@prefer_delayed_commit ? @prefer_delayed_commit.call : false
end
def process(tag, es)
@process ? @process.call(tag, es) : nil
end
def format(tag, time, record)
@format ? @format.call(tag, time, record) : [tag, time, record].to_json
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
def try_write(chunk)
@try_write ? @try_write.call(chunk) : nil
end
end
class DummyFullFeatureOutput2 < DummyFullFeatureOutput
def prefer_buffered_processing; true; end
def prefer_delayed_commit; super; end
def format(tag, time, record); super; end
def write(chunk); super; end
def try_write(chunk); super; end
end
end
class BufferedOutputSecondaryTest < Test::Unit::TestCase
def create_output(type=:full)
case type
when :bare then FluentPluginOutputAsBufferedSecondaryTest::DummyBareOutput.new
when :sync then FluentPluginOutputAsBufferedSecondaryTest::DummySyncOutput.new
when :full then FluentPluginOutputAsBufferedSecondaryTest::DummyFullFeatureOutput.new
else
raise ArgumentError, "unknown type: #{type}"
end
end
def create_metadata(timekey: nil, tag: nil, variables: nil)
Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
end
def waiting(seconds)
begin
Timeout.timeout(seconds) do
yield
end
rescue Timeout::Error
STDERR.print(*@i.log.out.logs)
raise
end
end
def dummy_event_stream
Fluent::ArrayEventStream.new([
[ event_time('2016-04-13 18:33:00'), {"name" => "moris", "age" => 36, "message" => "data1"} ],
[ event_time('2016-04-13 18:33:13'), {"name" => "moris", "age" => 36, "message" => "data2"} ],
[ event_time('2016-04-13 18:33:32'), {"name" => "moris", "age" => 36, "message" => "data3"} ],
])
end
setup do
@i = create_output
end
teardown do
if @i
@i.stop unless @i.stopped?
@i.before_shutdown unless @i.before_shutdown?
@i.shutdown unless @i.shutdown?
@i.after_shutdown unless @i.after_shutdown?
@i.close unless @i.closed?
@i.terminate unless @i.terminated?
end
Timecop.return
end
sub_test_case 'secondary plugin feature for buffered output with periodical retry' do
setup do
Fluent::Plugin.register_output('output_secondary_test', FluentPluginOutputAsBufferedSecondaryTest::DummyFullFeatureOutput)
Fluent::Plugin.register_output('output_secondary_test2', FluentPluginOutputAsBufferedSecondaryTest::DummyFullFeatureOutput2)
end
test 'raises configuration error if primary does not support buffering' do
i = create_output(:sync)
assert_raise Fluent::ConfigError do
i.configure(config_element('ROOT','',{},[config_element('secondary','',{'@type'=>'output_secondary_test'})]))
end
end
test 'raises configuration error if <buffer>/<secondary> section is specified in <secondary> section' do
priconf = config_element('buffer','tag',{'flush_interval' => 1, 'retry_type' => :periodic, 'retry_wait' => 3, 'retry_timeout' => 30, 'retry_randomize' => false})
secconf1 = config_element('secondary','',{'@type' => 'output_secondary_test'},[config_element('buffer', 'time')])
secconf2 = config_element('secondary','',{'@type' => 'output_secondary_test'},[config_element('secondary', '')])
i = create_output()
assert_raise Fluent::ConfigError do
i.configure(config_element('ROOT','',{},[priconf,secconf1]))
end
assert_raise Fluent::ConfigError do
i.configure(config_element('ROOT','',{},[priconf,secconf2]))
end
end
test 'uses same plugin type with primary if @type is missing in secondary' do
bufconf = config_element('buffer','tag',{'flush_interval' => 1, 'retry_type' => :periodic, 'retry_wait' => 3, 'retry_timeout' => 30, 'retry_randomize' => false})
secconf = config_element('secondary','',{})
priconf = config_element('ROOT', '', {'@type' => 'output_secondary_test'}, [bufconf, secconf])
i = create_output()
assert_nothing_raised do
i.configure(priconf)
end
logs = i.log.out.logs
assert{ logs.empty? }
assert{ i.secondary.is_a? FluentPluginOutputAsBufferedSecondaryTest::DummyFullFeatureOutput }
end
test 'warns if secondary plugin is different type from primary one' do
priconf = config_element('buffer','tag',{'flush_interval' => 1, 'retry_type' => :periodic, 'retry_wait' => 3, 'retry_timeout' => 30, 'retry_randomize' => false})
secconf = config_element('secondary','',{'@type' => 'output_secondary_test2'})
i = create_output()
i.configure(config_element('ROOT','',{},[priconf,secconf]))
logs = i.log.out.logs
assert{ logs.any?{|l| l.include?("Use different plugin for secondary. Check the plugin works with primary like secondary_file") } }
end
test 'secondary plugin lifecycle is kicked by primary' do
priconf = config_element('buffer','tag',{'flush_interval' => 1, 'retry_type' => :periodic, 'retry_wait' => 3, 'retry_timeout' => 30, 'retry_randomize' => false})
secconf = config_element('secondary','',{'@type' => 'output_secondary_test2'})
i = create_output()
i.configure(config_element('ROOT','',{},[priconf,secconf]))
logs = i.log.out.logs
assert{ logs.any?{|l| l.include?("Use different plugin for secondary. Check the plugin works with primary like secondary_file") } }
assert i.secondary.configured?
assert !i.secondary.started?
i.start
assert i.secondary.started?
assert !i.secondary.after_started?
i.after_start
assert i.secondary.after_started?
assert !i.secondary.stopped?
i.stop
assert i.secondary.stopped?
assert !i.secondary.before_shutdown?
i.before_shutdown
assert i.secondary.before_shutdown?
assert !i.secondary.shutdown?
i.shutdown
assert i.secondary.shutdown?
assert !i.secondary.after_shutdown?
i.after_shutdown
assert i.secondary.after_shutdown?
assert !i.secondary.closed?
i.close
assert i.secondary.closed?
assert !i.secondary.terminated?
i.terminate
assert i.secondary.terminated?
end
test 'primary plugin will emit event streams to secondary after retries for time of retry_timeout * retry_secondary_threshold' do
written = []
priconf = config_element('buffer','tag',{'flush_interval' => 1, 'retry_type' => :periodic, 'retry_wait' => 3, 'retry_timeout' => 60, 'retry_randomize' => false})
secconf = config_element('secondary','',{'@type' => 'output_secondary_test2'})
@i.configure(config_element('ROOT','',{},[priconf,secconf]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:prefer_delayed_commit){ false }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:write){|chunk| raise "yay, your #write must fail" }
@i.secondary.register(:prefer_delayed_commit){ false }
@i.secondary.register(:write){|chunk| chunk.read.split("\n").each{|line| written << JSON.parse(line) } }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:31 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.2", dummy_event_stream())
assert_equal 0, @i.write_count
assert_equal 0, @i.num_errors
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > 0 && @i.num_errors > 0 }
assert{ @i.buffer.queue.size > 0 }
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
assert{ @i.write_count > 0 }
assert{ @i.num_errors > 0 }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
first_failure = @i.retry.start
# retry_timeout == 60(sec), retry_secondary_threshold == 0.8
now = first_failure + 60 * 0.8 + 1 # to step from primary to secondary
Timecop.freeze( now )
unless @i.retry.secondary?
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > prev_write_count }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
# next step is on secondary
now = first_failure + 60 * 0.8 + 10
Timecop.freeze( now )
end
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > prev_write_count }
current_write_count = @i.write_count
current_num_errors = @i.num_errors
assert{ current_write_count > prev_write_count }
assert{ current_num_errors == prev_num_errors }
assert_nil @i.retry
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:00').to_i, {"name" => "moris", "age" => 36, "message" => "data1"} ], written[0]
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:13').to_i, {"name" => "moris", "age" => 36, "message" => "data2"} ], written[1]
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:32').to_i, {"name" => "moris", "age" => 36, "message" => "data3"} ], written[2]
logs = @i.log.out.logs
waiting(4){ sleep 0.1 until logs.any?{|l| l.include?("[warn]: retry succeeded by secondary.") } }
assert{ logs.any?{|l| l.include?("[warn]: retry succeeded by secondary.") } }
end
test 'secondary can do non-delayed commit even if primary do delayed commit' do
written = []
priconf = config_element('buffer','tag',{'flush_interval' => 1, 'retry_type' => :periodic, 'retry_wait' => 3, 'retry_timeout' => 60, 'retry_randomize' => false})
secconf = config_element('secondary','',{'@type' => 'output_secondary_test2'})
@i.configure(config_element('ROOT','',{},[priconf,secconf]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:prefer_delayed_commit){ true }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:try_write){|chunk| raise "yay, your #write must fail" }
@i.secondary.register(:prefer_delayed_commit){ false }
@i.secondary.register(:write){|chunk| chunk.read.split("\n").each{|line| written << JSON.parse(line) } }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:31 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.2", dummy_event_stream())
assert_equal 0, @i.write_count
assert_equal 0, @i.num_errors
assert_equal 0, @i.write_secondary_count
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > 0 && @i.num_errors > 0 }
assert{ @i.buffer.queue.size > 0 }
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
assert{ @i.write_count > 0 }
assert{ @i.num_errors > 0 }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
first_failure = @i.retry.start
# retry_timeout == 60(sec), retry_secondary_threshold == 0.8
now = first_failure + 60 * 0.8 + 1 # to step from primary to secondary
Timecop.freeze( now )
unless @i.retry.secondary?
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > prev_write_count }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
# next step is on secondary
now = first_failure + 60 * 0.8 + 10
Timecop.freeze( now )
end
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > prev_write_count }
assert{ @i.write_count > prev_write_count }
assert{ @i.num_errors == prev_num_errors }
assert{ @i.write_secondary_count > 0 }
assert_nil @i.retry
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:00').to_i, {"name" => "moris", "age" => 36, "message" => "data1"} ], written[0]
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:13').to_i, {"name" => "moris", "age" => 36, "message" => "data2"} ], written[1]
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:32').to_i, {"name" => "moris", "age" => 36, "message" => "data3"} ], written[2]
logs = @i.log.out.logs
waiting(4){ sleep 0.1 until logs.any?{|l| l.include?("[warn]: retry succeeded by secondary.") } }
assert{ logs.any?{|l| l.include?("[warn]: retry succeeded by secondary.") } }
end
test 'secondary plugin can do delayed commit if primary do it' do
written = []
chunks = []
priconf = config_element('buffer','tag',{'flush_interval' => 1, 'retry_type' => :periodic, 'retry_wait' => 3, 'retry_timeout' => 60, 'retry_randomize' => false})
secconf = config_element('secondary','',{'@type' => 'output_secondary_test2'})
@i.configure(config_element('ROOT','',{},[priconf,secconf]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:prefer_delayed_commit){ true }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:try_write){|chunk| raise "yay, your #write must fail" }
@i.secondary.register(:prefer_delayed_commit){ true }
@i.secondary.register(:try_write){|chunk| chunks << chunk; chunk.read.split("\n").each{|line| written << JSON.parse(line) } }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:31 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.2", dummy_event_stream())
assert_equal 0, @i.write_count
assert_equal 0, @i.num_errors
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > 0 && @i.num_errors > 0 }
assert{ @i.buffer.queue.size > 0 }
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
assert{ @i.write_count > 0 }
assert{ @i.num_errors > 0 }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
first_failure = @i.retry.start
# retry_timeout == 60(sec), retry_secondary_threshold == 0.8
now = first_failure + 60 * 0.8 + 1 # to step from primary to secondary
Timecop.freeze( now )
unless @i.retry.secondary?
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > prev_write_count }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
# next step is on secondary
now = first_failure + 60 * 0.8 + 10
Timecop.freeze( now )
end
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > prev_write_count }
assert{ @i.write_count > prev_write_count }
assert{ @i.num_errors == prev_num_errors }
assert @i.retry
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:00').to_i, {"name" => "moris", "age" => 36, "message" => "data1"} ], written[0]
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:13').to_i, {"name" => "moris", "age" => 36, "message" => "data2"} ], written[1]
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:32').to_i, {"name" => "moris", "age" => 36, "message" => "data3"} ], written[2]
assert{ @i.buffer.dequeued.size > 0 }
assert{ chunks.size > 0 }
assert{ !chunks.first.empty? }
@i.secondary.commit_write(chunks[0].unique_id)
assert{ @i.buffer.dequeued[chunks[0].unique_id].nil? }
assert{ chunks.first.empty? }
assert{ @i.write_secondary_count > 0 }
assert_nil @i.retry
logs = @i.log.out.logs
waiting(4){ sleep 0.1 until logs.any?{|l| l.include?("[warn]: retry succeeded by secondary.") } }
assert{ logs.any?{|l| l.include?("[warn]: retry succeeded by secondary.") } }
end
test 'secondary plugin can do delayed commit even if primary does not do it' do
written = []
chunks = []
priconf = config_element('buffer','tag',{'flush_interval' => 1, 'retry_type' => :periodic, 'retry_wait' => 3, 'retry_timeout' => 60, 'retry_randomize' => false})
secconf = config_element('secondary','',{'@type' => 'output_secondary_test2'})
@i.configure(config_element('ROOT','',{},[priconf,secconf]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:prefer_delayed_commit){ false }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:write){|chunk| raise "yay, your #write must fail" }
@i.secondary.register(:prefer_delayed_commit){ true }
@i.secondary.register(:try_write){|chunk| chunks << chunk; chunk.read.split("\n").each{|line| written << JSON.parse(line) } }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:31 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.2", dummy_event_stream())
assert_equal 0, @i.write_count
assert_equal 0, @i.num_errors
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > 0 && @i.num_errors > 0 }
assert{ @i.buffer.queue.size > 0 }
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
assert{ @i.write_count > 0 }
assert{ @i.num_errors > 0 }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
first_failure = @i.retry.start
# retry_timeout == 60(sec), retry_secondary_threshold == 0.8
now = first_failure + 60 * 0.8 + 1 # to step from primary to secondary
Timecop.freeze( now )
unless @i.retry.secondary?
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > prev_write_count }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
# next step is on secondary
now = first_failure + 60 * 0.8 + 10
Timecop.freeze( now )
end
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > prev_write_count }
assert{ @i.write_count > prev_write_count }
assert{ @i.num_errors == prev_num_errors }
assert @i.retry
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:00').to_i, {"name" => "moris", "age" => 36, "message" => "data1"} ], written[0]
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:13').to_i, {"name" => "moris", "age" => 36, "message" => "data2"} ], written[1]
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:32').to_i, {"name" => "moris", "age" => 36, "message" => "data3"} ], written[2]
assert{ @i.buffer.dequeued.size > 0 }
assert{ chunks.size > 0 }
assert{ !chunks.first.empty? }
@i.secondary.commit_write(chunks[0].unique_id)
assert{ @i.buffer.dequeued[chunks[0].unique_id].nil? }
assert{ chunks.first.empty? }
assert_nil @i.retry
logs = @i.log.out.logs
waiting(4){ sleep 0.1 until logs.any?{|l| l.include?("[warn]: retry succeeded by secondary.") } }
assert{ logs.any?{|l| l.include?("[warn]: retry succeeded by secondary.") } }
end
test 'secondary plugin can do delayed commit even if primary does not do it, and non-committed chunks will be rollbacked by primary' do
written = []
chunks = []
priconf = config_element('buffer','tag',{'flush_interval' => 1, 'retry_type' => :periodic, 'retry_wait' => 3, 'retry_timeout' => 60, 'delayed_commit_timeout' => 2, 'retry_randomize' => false, 'queued_chunks_limit_size' => 10})
secconf = config_element('secondary','',{'@type' => 'output_secondary_test2'})
@i.configure(config_element('ROOT','',{},[priconf,secconf]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:prefer_delayed_commit){ false }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:write){|chunk| raise "yay, your #write must fail" }
@i.secondary.register(:prefer_delayed_commit){ true }
@i.secondary.register(:try_write){|chunk| chunks << chunk; chunk.read.split("\n").each{|line| written << JSON.parse(line) } }
@i.secondary.register(:write){|chunk| raise "don't use this" }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
@i.emit_events("test.tag.2", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:31 -0700')
Timecop.freeze( now )
assert_equal 0, @i.write_count
assert_equal 0, @i.num_errors
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > 0 && @i.num_errors > 0 }
assert{ @i.buffer.queue.size == 2 }
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
assert{ @i.write_count > 0 }
assert{ @i.num_errors > 0 }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
first_failure = @i.retry.start
# retry_timeout == 60(sec), retry_secondary_threshold == 0.8
now = first_failure + 60 * 0.8 + 1
Timecop.freeze( now )
@i.enqueue_thread_wait
@i.flush_thread_wakeup
now = first_failure + 60 * 0.8 + 2
Timecop.freeze( now )
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until chunks.size == 2 }
assert{ @i.write_count > prev_write_count }
assert{ @i.num_errors == prev_num_errors }
assert @i.retry
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:00').to_i, {"name" => "moris", "age" => 36, "message" => "data1"} ], written[0]
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:13').to_i, {"name" => "moris", "age" => 36, "message" => "data2"} ], written[1]
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:32').to_i, {"name" => "moris", "age" => 36, "message" => "data3"} ], written[2]
assert_equal [ 'test.tag.2', event_time('2016-04-13 18:33:00').to_i, {"name" => "moris", "age" => 36, "message" => "data1"} ], written[3]
assert_equal [ 'test.tag.2', event_time('2016-04-13 18:33:13').to_i, {"name" => "moris", "age" => 36, "message" => "data2"} ], written[4]
assert_equal [ 'test.tag.2', event_time('2016-04-13 18:33:32').to_i, {"name" => "moris", "age" => 36, "message" => "data3"} ], written[5]
assert{ @i.buffer.dequeued.size == 2 }
assert{ chunks.size == 2 }
assert{ !chunks[0].empty? }
assert{ !chunks[1].empty? }
30.times do |i| # large enough
# In https://github.com/fluent/fluentd/blob/c90c024576b3d35f356a55fd33d1232947114a9a/lib/fluent/plugin_helper/retry_state.rb
# @timeout_at is 2016-04-13 18:34:31, @next_time must be less than 2016-04-13 18:34:30
#
# first_failure + 60 * 0.8 + 2 # => 2016-04-13 18:34:21
# @next_time is not added by 1, but by randomize(@retry_wait) https://github.com/fluent/fluentd/blob/c90c024576b3d35f356a55fd33d1232947114a9a/lib/fluent/plugin_helper/retry_state.rb#L196
# current_time(=Time.now) + randomize(@retry_wait) < @timeout_at
# (2016-04-13 18:34:21 + 6) + 3 < 2016-04-13 18:34:31
# So, current_time must be at most 6
now = first_failure + 60 * 0.8 + 2 + [i, 6].min
Timecop.freeze( now )
@i.flush_thread_wakeup
break if @i.buffer.dequeued.size == 0
end
assert @i.retry
logs = @i.log.out.logs
waiting(4){ sleep 0.1 until logs.count{|l| l.include?("[warn]: failed to flush the buffer chunk, timeout to commit.") } == 2 }
assert{ logs.count{|l| l.include?("[warn]: failed to flush the buffer chunk, timeout to commit.") } == 2 }
end
test 'retry_wait for secondary is same with one for primary' do
priconf = config_element('buffer','tag',{'flush_interval' => 1, 'retry_type' => :periodic, 'retry_wait' => 3, 'retry_timeout' => 60, 'retry_randomize' => false})
secconf = config_element('secondary','',{'@type' => 'output_secondary_test2'})
@i.configure(config_element('ROOT','',{},[priconf,secconf]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:prefer_delayed_commit){ false }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:write){|chunk| raise "yay, your #write must fail" }
@i.secondary.register(:prefer_delayed_commit){ false }
@i.secondary.register(:write){|chunk| raise "your secondary is also useless." }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:31 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.2", dummy_event_stream())
assert_equal 0, @i.write_count
assert_equal 0, @i.num_errors
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > 0 && @i.num_errors > 0 }
assert{ @i.buffer.queue.size > 0 }
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
assert{ @i.write_count > 0 }
assert{ @i.num_errors > 0 }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
first_failure = @i.retry.start
# retry_timeout == 60(sec), retry_secondary_threshold == 0.8
now = first_failure + 60 * 0.8 + 1
Timecop.freeze( now )
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > prev_write_count }
assert{ @i.write_count > prev_write_count }
assert{ @i.num_errors > prev_num_errors }
assert @i.retry
assert_equal 3, (@i.next_flush_time - Time.now)
logs = @i.log.out.logs
waiting(4){ sleep 0.1 until logs.any?{|l| l.include?("[warn]: failed to flush the buffer with secondary output.") } }
assert{ logs.any?{|l| l.include?("[warn]: failed to flush the buffer with secondary output.") } }
end
end
sub_test_case 'secondary plugin feature for buffered output with exponential backoff' do
setup do
Fluent::Plugin.register_output('output_secondary_test', FluentPluginOutputAsBufferedSecondaryTest::DummyFullFeatureOutput)
Fluent::Plugin.register_output('output_secondary_test2', FluentPluginOutputAsBufferedSecondaryTest::DummyFullFeatureOutput2)
end
test 'primary plugin will emit event streams to secondary after retries for time of retry_timeout * retry_secondary_threshold' do
written = []
priconf = config_element('buffer','tag',{'flush_interval' => 1, 'retry_type' => :exponential_backoff, 'retry_wait' => 1, 'retry_timeout' => 60, 'retry_randomize' => false})
secconf = config_element('secondary','',{'@type' => 'output_secondary_test2'})
@i.configure(config_element('ROOT','',{},[priconf,secconf]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:prefer_delayed_commit){ false }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:write){|chunk| raise "yay, your #write must fail" }
@i.secondary.register(:prefer_delayed_commit){ false }
@i.secondary.register(:write){|chunk| chunk.read.split("\n").each{|line| written << JSON.parse(line) } }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:31 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.2", dummy_event_stream())
assert_equal 0, @i.write_count
assert_equal 0, @i.num_errors
assert_equal 0, @i.write_secondary_count
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > 0 && @i.num_errors > 0 }
assert{ @i.buffer.queue.size > 0 }
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
assert{ @i.write_count > 0 }
assert{ @i.num_errors > 0 }
prev_write_count = @i.write_count
first_failure = @i.retry.start
20.times do |i| # large enough
now = @i.next_flush_time
Timecop.freeze( now )
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > prev_write_count }
assert{ @i.write_count > prev_write_count }
break if @i.buffer.queue.size == 0
prev_write_count = @i.write_count
end
assert{ @i.write_secondary_count > 0 }
# retry_timeout == 60(sec), retry_secondary_threshold == 0.8
assert{ now >= first_failure + 60 * 0.8 }
assert_nil @i.retry
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:00').to_i, {"name" => "moris", "age" => 36, "message" => "data1"} ], written[0]
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:13').to_i, {"name" => "moris", "age" => 36, "message" => "data2"} ], written[1]
assert_equal [ 'test.tag.1', event_time('2016-04-13 18:33:32').to_i, {"name" => "moris", "age" => 36, "message" => "data3"} ], written[2]
assert(@i.log.out.logs.any?{|l| l.include?("[warn]: retry succeeded by secondary.") })
end
test 'exponential backoff interval will be initialized when switched to secondary' do
priconf = config_element('buffer','tag',{'flush_interval' => 1, 'retry_type' => :exponential_backoff, 'retry_wait' => 1, 'retry_timeout' => 60, 'retry_randomize' => false})
secconf = config_element('secondary','',{'@type' => 'output_secondary_test2'})
@i.configure(config_element('ROOT','',{},[priconf,secconf]))
@i.register(:prefer_buffered_processing){ true }
@i.register(:prefer_delayed_commit){ false }
@i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
@i.register(:write){|chunk| raise "yay, your #write must fail" }
@i.secondary.register(:prefer_delayed_commit){ false }
@i.secondary.register(:write){|chunk| raise "your secondary is also useless." }
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:31 -0700')
Timecop.freeze( now )
@i.emit_events("test.tag.2", dummy_event_stream())
assert_equal 0, @i.write_count
assert_equal 0, @i.num_errors
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4){ sleep 0.1 until @i.write_count > 0 && @i.num_errors > 0 }
assert{ @i.buffer.queue.size > 0 }
assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
assert{ @i.write_count > 0 }
assert{ @i.num_errors > 0 }
prev_write_count = @i.write_count
prev_num_errors = @i.num_errors
first_failure = @i.retry.start
20.times do |i| # large enough
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_out_secondary_file.rb | test/plugin/test_out_secondary_file.rb | require_relative '../helper'
require 'time'
require 'fileutils'
require 'fluent/event'
require 'fluent/unique_id'
require 'fluent/plugin/buffer'
require 'fluent/plugin/out_secondary_file'
require 'fluent/plugin/buffer/memory_chunk'
require 'fluent/test/driver/output'
class FileOutputSecondaryTest < Test::Unit::TestCase
include Fluent::UniqueId::Mixin
def setup
Fluent::Test.setup
FileUtils.rm_rf(TMP_DIR)
FileUtils.mkdir_p(TMP_DIR)
end
TMP_DIR = File.expand_path(File.dirname(__FILE__) + "/../tmp/out_secondary_file#{ENV['TEST_ENV_NUMBER']}")
CONFIG = %[
directory #{TMP_DIR}
basename out_file_test
compress gzip
]
class DummyOutput < Fluent::Plugin::Output
def write(chunk); end
end
def create_primary(buffer_config = config_element('buffer'))
DummyOutput.new.configure(config_element('ROOT','',{}, [buffer_config]))
end
def create_driver(conf = CONFIG, primary = create_primary)
c = Fluent::Test::Driver::Output.new(Fluent::Plugin::SecondaryFileOutput)
c.instance.acts_as_secondary(primary)
c.configure(conf)
end
sub_test_case 'configure' do
test 'default configuration' do
d = create_driver %[directory #{TMP_DIR}]
assert_equal 'dump.bin', d.instance.basename
assert_equal TMP_DIR, d.instance.directory
assert_equal :text, d.instance.compress
assert_equal false, d.instance.append
end
test 'should be configurable' do
d = create_driver %[
directory #{TMP_DIR}
basename out_file_test
compress gzip
append true
]
assert_equal 'out_file_test', d.instance.basename
assert_equal TMP_DIR, d.instance.directory
assert_equal :gzip, d.instance.compress
assert_equal true, d.instance.append
end
test 'should only use in secondary' do
c = Fluent::Test::Driver::Output.new(Fluent::Plugin::SecondaryFileOutput)
assert_raise Fluent::ConfigError.new("This plugin can only be used in the <secondary> section") do
c.configure(CONFIG)
end
end
test 'basename should not include `/`' do
assert_raise Fluent::ConfigError.new("basename should not include `/`") do
create_driver %[
directory #{TMP_DIR}
basename out/file
]
end
end
test 'directory should be writable' do
assert_nothing_raised do
create_driver %[directory #{TMP_DIR}/test_dir/foo/bar/]
end
assert_nothing_raised do
FileUtils.mkdir_p("#{TMP_DIR}/test_dir")
File.chmod(0777, "#{TMP_DIR}/test_dir")
create_driver %[directory #{TMP_DIR}/test_dir/foo/bar/]
end
if Process.uid.nonzero?
assert_raise Fluent::ConfigError.new("out_secondary_file: `#{TMP_DIR}/test_dir/foo/bar/` should be writable") do
FileUtils.mkdir_p("#{TMP_DIR}/test_dir")
File.chmod(0555, "#{TMP_DIR}/test_dir")
create_driver %[directory #{TMP_DIR}/test_dir/foo/bar/]
end
end
end
test 'should be passed directory' do
assert_raise Fluent::ConfigError do
i = Fluent::Plugin::SecondaryFileOutput.new
i.acts_as_secondary(create_primary)
i.configure(config_element())
end
assert_nothing_raised do
create_driver %[directory #{TMP_DIR}/test_dir/foo/bar/]
end
end
end
def check_gzipped_result(path, expect)
# Zlib::GzipReader has a bug of concatenated file: https://bugs.ruby-lang.org/issues/9790
# Following code from https://www.ruby-forum.com/topic/971591#979520
result = ""
waiting(10) do
# we can expect that GzipReader#read can wait unflushed raw data of `io` on disk
File.open(path, "rb") { |io|
loop do
gzr = Zlib::GzipReader.new(io)
result << gzr.read
unused = gzr.unused
gzr.finish
break if unused.nil?
io.pos -= unused.length
end
}
end
assert_equal expect, result
end
def create_chunk(primary, metadata, es)
primary.buffer.generate_chunk(metadata).tap do |c|
c.concat(es.to_msgpack_stream, es.size) # to_msgpack_stream is standard_format
c.commit
end
end
sub_test_case 'write' do
setup do
@record = { 'key' => 'value' }
@time = event_time
@es = Fluent::OneEventStream.new(@time, @record)
@primary = create_primary
metadata = @primary.buffer.new_metadata
@chunk = create_chunk(@primary, metadata, @es)
end
test 'should output compressed file when compress option is gzip' do
d = create_driver(CONFIG, @primary)
path = d.instance.write(@chunk)
assert_equal "#{TMP_DIR}/out_file_test.0.gz", path
check_gzipped_result(path, @es.to_msgpack_stream.force_encoding('ASCII-8BIT'))
end
test 'should output plain text when compress option is default(text)' do
d = create_driver(%[
directory #{TMP_DIR}/
basename out_file_test
], @primary)
msgpack_binary = @es.to_msgpack_stream.force_encoding('ASCII-8BIT')
path = d.instance.write(@chunk)
assert_equal "#{TMP_DIR}/out_file_test.0", path
waiting(5) do
sleep 0.1 until File.stat(path).size == msgpack_binary.size
end
assert_equal msgpack_binary, File.binread(path)
end
test 'path should be incremental when append option is false' do
d = create_driver(CONFIG, @primary)
packed_value = @es.to_msgpack_stream.force_encoding('ASCII-8BIT')
5.times do |i|
path = d.instance.write(@chunk)
assert_equal "#{TMP_DIR}/out_file_test.#{i}.gz", path
check_gzipped_result(path, packed_value)
end
end
test 'path should be unchanged when append option is true' do
d = create_driver(CONFIG + %[append true], @primary)
packed_value = @es.to_msgpack_stream.force_encoding('ASCII-8BIT')
[*1..5].each do |i|
path = d.instance.write(@chunk)
assert_equal "#{TMP_DIR}/out_file_test.gz", path
check_gzipped_result(path, packed_value * i)
end
end
end
sub_test_case 'Syntax of placeholders' do
data(
tag: '${tag}',
tag_index: '${tag[0]}',
tag_index1: '${tag[10]}',
variable: '${key1}',
variable2: '${key@value}',
variable3: '${key_value}',
variable4: '${key.value}',
variable5: '${key-value}',
variable6: '${KEYVALUE}',
variable7: '${tags}',
variable8: '${tag${key}', # matched ${key}
)
test 'matches with a valid placeholder' do |path|
assert Fluent::Plugin::SecondaryFileOutput::PLACEHOLDER_REGEX.match(path)
end
data(
invalid_tag: 'tag',
invalid_tag2: '{tag}',
invalid_tag3: '${tag',
invalid_tag4: '${tag0]}',
invalid_tag5: '${tag[]]}',
invalid_variable: '${key[0]}',
invalid_variable2: '${key{key2}}',
)
test "doesn't match with an invalid placeholder" do |path|
assert !Fluent::Plugin::SecondaryFileOutput::PLACEHOLDER_REGEX.match(path)
end
end
sub_test_case 'path' do
setup do
@record = { 'key' => 'value' }
@time = event_time
@es = Fluent::OneEventStream.new(@time, @record)
primary = create_primary
m = primary.buffer.new_metadata
@c = create_chunk(primary, m, @es)
end
test 'normal path when compress option is gzip' do
d = create_driver
path = d.instance.write(@c)
assert_equal "#{TMP_DIR}/out_file_test.0.gz", path
end
test 'normal path when compress option is default' do
d = create_driver %[
directory #{TMP_DIR}
basename out_file_test
]
path = d.instance.write(@c)
assert_equal "#{TMP_DIR}/out_file_test.0", path
end
test 'normal path when append option is true' do
d = create_driver %[
directory #{TMP_DIR}
append true
]
path = d.instance.write(@c)
assert_equal "#{TMP_DIR}/dump.bin", path
end
test 'path with ${chunk_id}' do
d = create_driver %[
directory #{TMP_DIR}
basename out_file_chunk_id_${chunk_id}
]
path = d.instance.write(@c)
if File.basename(path) =~ /out_file_chunk_id_([-_.@a-zA-Z0-9].*).0/
unique_id = Fluent::UniqueId.hex(Fluent::UniqueId.generate)
assert_equal unique_id.size, $1.size, "chunk_id size is mismatched"
else
flunk "chunk_id is not included in the path"
end
end
data(
invalid_tag: [/tag/, '${tag}'],
invalid_tag0: [/tag\[0\]/, '${tag[0]}'],
invalid_variable: [/dummy/, '${dummy}'],
invalid_timeformat: [/time/, '%Y%m%d'],
)
test 'raise an error when basename includes incompatible placeholder' do |(expected_message, invalid_basename)|
c = Fluent::Test::Driver::Output.new(Fluent::Plugin::SecondaryFileOutput)
c.instance.acts_as_secondary(DummyOutput.new)
assert_raise_message(expected_message) do
c.configure %[
directory #{TMP_DIR}/
basename #{invalid_basename}
compress gzip
]
end
end
data(
invalid_tag: [/tag/, '${tag}'],
invalid_tag0: [/tag\[0\]/, '${tag[0]}'],
invalid_variable: [/dummy/, '${dummy}'],
invalid_timeformat: [/time/, '%Y%m%d'],
)
test 'raise an error when directory includes incompatible placeholder' do |(expected_message, invalid_directory)|
c = Fluent::Test::Driver::Output.new(Fluent::Plugin::SecondaryFileOutput)
c.instance.acts_as_secondary(DummyOutput.new)
assert_raise_message(expected_message) do
c.configure %[
directory #{invalid_directory}/
compress gzip
]
end
end
test 'basename includes tag' do
primary = create_primary(config_element('buffer', 'tag'))
d = create_driver(%[
directory #{TMP_DIR}/
basename cool_${tag}
compress gzip
], primary)
m = primary.buffer.new_metadata(tag: 'test.dummy')
c = create_chunk(primary, m, @es)
path = d.instance.write(c)
assert_equal "#{TMP_DIR}/cool_test.dummy.0.gz", path
end
test 'basename includes /tag[\d+]/' do
primary = create_primary(config_element('buffer', 'tag'))
d = create_driver(%[
directory #{TMP_DIR}/
basename cool_${tag[0]}_${tag[1]}
compress gzip
], primary)
m = primary.buffer.new_metadata(tag: 'test.dummy')
c = create_chunk(primary, m, @es)
path = d.instance.write(c)
assert_equal "#{TMP_DIR}/cool_test_dummy.0.gz", path
end
test 'basename includes time format' do
primary = create_primary(
config_element('buffer', 'time', { 'timekey_zone' => '+0900', 'timekey' => 1 })
)
d = create_driver(%[
directory #{TMP_DIR}/
basename cool_%Y%m%d%H
compress gzip
], primary)
m = primary.buffer.new_metadata(timekey: event_time("2011-01-02 13:14:15 UTC"))
c = create_chunk(primary, m, @es)
path = d.instance.write(c)
assert_equal "#{TMP_DIR}/cool_2011010222.0.gz", path
end
test 'basename includes time format with timekey_use_utc option' do
primary = create_primary(
config_element('buffer', 'time', { 'timekey_zone' => '+0900', 'timekey' => 1, 'timekey_use_utc' => true })
)
d = create_driver(%[
directory #{TMP_DIR}/
basename cool_%Y%m%d%H
compress gzip
], primary)
m = primary.buffer.new_metadata(timekey: event_time("2011-01-02 13:14:15 UTC"))
c = create_chunk(primary, m, @es)
path = d.instance.write(c)
assert_equal "#{TMP_DIR}/cool_2011010213.0.gz", path
end
test 'basename includes variable' do
primary = create_primary(config_element('buffer', 'test1'))
d = create_driver(%[
directory #{TMP_DIR}/
basename cool_${test1}
compress gzip
], primary)
m = primary.buffer.new_metadata(variables: { "test1".to_sym => "dummy" })
c = create_chunk(primary, m, @es)
path = d.instance.write(c)
assert_equal "#{TMP_DIR}/cool_dummy.0.gz", path
end
test 'basename includes unnecessary variable' do
primary = create_primary(config_element('buffer', 'test1'))
c = Fluent::Test::Driver::Output.new(Fluent::Plugin::SecondaryFileOutput)
c.instance.acts_as_secondary(primary)
assert_raise_message(/test2/) do
c.configure %[
directory #{TMP_DIR}/
basename ${test1}_${test2}
compress gzip
]
end
end
test 'basename includes tag, time format, and variables' do
primary = create_primary(
config_element('buffer', 'time,tag,test1', { 'timekey_zone' => '+0000', 'timekey' => 1 })
)
d = create_driver(%[
directory #{TMP_DIR}/
basename cool_%Y%m%d%H_${tag}_${test1}
compress gzip
], primary)
m = primary.buffer.new_metadata(
timekey: event_time("2011-01-02 13:14:15 UTC"),
tag: 'test.tag',
variables: { "test1".to_sym => "dummy" }
)
c = create_chunk(primary, m, @es)
path = d.instance.write(c)
assert_equal "#{TMP_DIR}/cool_2011010213_test.tag_dummy.0.gz", path
end
test 'directory includes tag, time format, and variables' do
primary = create_primary(
config_element('buffer', 'time,tag,test1', { 'timekey_zone' => '+0000', 'timekey' => 1 })
)
d = create_driver(%[
directory #{TMP_DIR}/%Y%m%d%H/${tag}/${test1}
compress gzip
], primary)
m = primary.buffer.new_metadata(
timekey: event_time("2011-01-02 13:14:15 UTC"),
tag: 'test.tag',
variables: { "test1".to_sym => "dummy" }
)
c = create_chunk(primary, m, @es)
path = d.instance.write(c)
assert_equal "#{TMP_DIR}/2011010213/test.tag/dummy/dump.bin.0.gz", path
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_parser_apache_error.rb | test/plugin/test_parser_apache_error.rb | require_relative '../helper'
require 'fluent/test/driver/parser'
require 'fluent/plugin/parser_apache_error'
class ApacheErrorParserTest < ::Test::Unit::TestCase
def setup
Fluent::Test.setup
@expected = {
'level' => 'error',
'client' => '127.0.0.1',
'message' => 'client denied by server configuration'
}
end
def create_driver
Fluent::Test::Driver::Parser.new(Fluent::Plugin::ApacheErrorParser.new).configure({})
end
def test_parse
d = create_driver
d.instance.parse('[Wed Oct 11 14:32:52 2000] [error] [client 127.0.0.1] client denied by server configuration') { |time, record|
assert_equal(event_time('Wed Oct 11 14:32:52 2000'), time)
assert_equal(@expected, record)
}
end
def test_parse_with_pid
d = create_driver
d.instance.parse('[Wed Oct 11 14:32:52 2000] [error] [pid 1000] [client 127.0.0.1] client denied by server configuration') { |time, record|
assert_equal(event_time('Wed Oct 11 14:32:52 2000'), time)
assert_equal(@expected.merge('pid' => '1000'), record)
}
end
def test_parse_without_client
d = create_driver
d.instance.parse('[Wed Oct 11 14:32:52 2000] [notice] Apache/2.2.15 (Unix) DAV/2 configured -- resuming normal operations') { |time, record|
assert_equal(event_time('Wed Oct 11 14:32:52 2000'), time)
assert_equal({
'level' => 'notice',
'message' => 'Apache/2.2.15 (Unix) DAV/2 configured -- resuming normal operations'
}, record)
}
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_parser_none.rb | test/plugin/test_parser_none.rb | require_relative '../helper'
require 'fluent/test/driver/parser'
require 'fluent/plugin/parser'
class NoneParserTest < ::Test::Unit::TestCase
def setup
Fluent::Test.setup
end
def test_config_params
parser = Fluent::Test::Driver::Parser.new(Fluent::TextParser::NoneParser)
parser.configure({})
assert_equal "message", parser.instance.message_key
parser.configure('message_key' => 'foobar')
assert_equal "foobar", parser.instance.message_key
end
def test_parse
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin.new_parser('none'))
parser.configure({})
parser.instance.parse('log message!') { |time, record|
assert_equal({'message' => 'log message!'}, record)
}
end
def test_parse_with_message_key
parser = Fluent::Test::Driver::Parser.new(Fluent::TextParser::NoneParser)
parser.configure('message_key' => 'foobar')
parser.instance.parse('log message!') { |time, record|
assert_equal({'foobar' => 'log message!'}, record)
}
end
def test_parse_without_default_time
time_at_start = Time.now.to_i
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin.new_parser('none'))
parser.configure({})
parser.instance.parse('log message!') { |time, record|
assert time && time >= time_at_start, "parser puts current time without time input"
assert_equal({'message' => 'log message!'}, record)
}
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin.new_parser('none'))
parser.configure({'estimate_current_event' => 'false'})
parser.instance.parse('log message!') { |time, record|
assert_equal({'message' => 'log message!'}, record)
assert_nil time, "parser returns nil w/o time if configured so"
}
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_in_exec.rb | test/plugin/test_in_exec.rb | require_relative '../helper'
require 'fluent/test/driver/input'
require 'fluent/plugin/in_exec'
require 'timecop'
class ExecInputTest < Test::Unit::TestCase
SCRIPT_PATH = File.expand_path(File.join(File.dirname(__FILE__), '..', 'scripts', 'exec_script.rb'))
TEST_TIME = "2011-01-02 13:14:15"
TEST_UNIX_TIME = Time.parse(TEST_TIME)
def setup
Fluent::Test.setup
@test_time = event_time()
end
def create_driver(conf)
Fluent::Test::Driver::Input.new(Fluent::Plugin::ExecInput).configure(conf)
end
DEFAULT_CONFIG_ONLY_WITH_KEYS = %[
command ruby #{SCRIPT_PATH} "#{TEST_TIME}" 0
run_interval 1s
tag "my.test.data"
<parse>
keys ["k1", "k2", "k3"]
</parse>
]
TSV_CONFIG = %[
command ruby #{SCRIPT_PATH} "#{TEST_TIME}" 0
run_interval 0.3
<parse>
@type tsv
keys time, tag, k1
</parse>
<extract>
tag_key tag
time_key time
time_type string
time_format %Y-%m-%d %H:%M:%S
</extract>
]
JSON_CONFIG = %[
command ruby #{SCRIPT_PATH} #{TEST_UNIX_TIME.to_i} 1
run_interval 0.3
<parse>
@type json
</parse>
<extract>
tag_key tag
time_key time
time_type unixtime
</extract>
]
MSGPACK_CONFIG = %[
command ruby #{SCRIPT_PATH} #{TEST_UNIX_TIME.to_i} 2
run_interval 0.3
<parse>
@type msgpack
</parse>
<extract>
tag_key tagger
time_key datetime
time_type unixtime
</extract>
]
# here document for not de-quoting backslashes
REGEXP_CONFIG = %[
command ruby #{SCRIPT_PATH} "#{TEST_TIME}" 3
run_interval 0.3
tag regex_tag
] + <<'EOC'
<parse>
@type regexp
expression "(?<time>[^\\]]*) (?<message>[^ ]*)"
time_key time
time_type string
time_format %Y-%m-%d %H:%M:%S
</parse>
EOC
sub_test_case 'with configuration with sections' do
test 'configure with default tsv format without extract' do
d = create_driver DEFAULT_CONFIG_ONLY_WITH_KEYS
assert{ d.instance.parser.is_a? Fluent::Plugin::TSVParser }
assert_equal "my.test.data", d.instance.tag
assert_equal ["k1", "k2", "k3"], d.instance.parser.keys
end
test 'configure raises error if both of tag and extract.tag_key are missing' do
assert_raise Fluent::ConfigError.new("'tag' or 'tag_key' option is required on exec input") do
create_driver %[
command ruby -e 'puts "yay"'
<parse>
keys y1
</parse>
]
end
end
test 'configure for tsv' do
d = create_driver TSV_CONFIG
assert{ d.instance.parser.is_a? Fluent::Plugin::TSVParser }
assert_equal ["time", "tag", "k1"], d.instance.parser.keys
assert_equal "tag", d.instance.extract_config.tag_key
assert_equal "time", d.instance.extract_config.time_key
assert_equal :string, d.instance.extract_config.time_type
assert_equal "%Y-%m-%d %H:%M:%S", d.instance.extract_config.time_format
end
test 'configure for json' do
d = create_driver JSON_CONFIG
assert{ d.instance.parser.is_a? Fluent::Plugin::JSONParser }
assert_equal "tag", d.instance.extract_config.tag_key
assert_equal "time", d.instance.extract_config.time_key
assert_equal :unixtime, d.instance.extract_config.time_type
end
test 'configure for msgpack' do
d = create_driver MSGPACK_CONFIG
assert{ d.instance.parser.is_a? Fluent::Plugin::MessagePackParser }
assert_equal "tagger", d.instance.extract_config.tag_key
assert_equal "datetime", d.instance.extract_config.time_key
assert_equal :unixtime, d.instance.extract_config.time_type
end
test 'configure for regexp' do
d = create_driver REGEXP_CONFIG
assert{ d.instance.parser.is_a? Fluent::Plugin::RegexpParser }
assert_equal "regex_tag", d.instance.tag
expression = /(?<time>[^\]]*) (?<message>[^ ]*)/
assert_equal expression, d.instance.parser.expression
assert_nil d.instance.extract_config
end
end
TSV_CONFIG_COMPAT = %[
command ruby #{SCRIPT_PATH} "#{TEST_TIME}" 0
keys time,tag,k1
time_key time
tag_key tag
time_format %Y-%m-%d %H:%M:%S
run_interval 0.3
]
JSON_CONFIG_COMPAT = %[
command ruby #{SCRIPT_PATH} #{TEST_UNIX_TIME.to_i} 1
format json
tag_key tag
time_key time
run_interval 0.3
]
MSGPACK_CONFIG_COMPAT = %[
command ruby #{SCRIPT_PATH} #{TEST_UNIX_TIME.to_i} 2
format msgpack
tag_key tagger
time_key datetime
run_interval 0.3
]
REGEXP_CONFIG_COMPAT = %[
command ruby #{SCRIPT_PATH} "#{TEST_TIME}" 3
format /(?<time>[^\\\]]*) (?<message>[^ ]*)/
tag regex_tag
run_interval 0.3
]
sub_test_case 'with traditional configuration' do
test 'configure' do
d = create_driver TSV_CONFIG_COMPAT
assert{ d.instance.parser.is_a? Fluent::Plugin::TSVParser }
assert_equal ["time","tag","k1"], d.instance.parser.keys
assert_equal "tag", d.instance.extract_config.tag_key
assert_equal "time", d.instance.extract_config.time_key
assert_equal "%Y-%m-%d %H:%M:%S", d.instance.extract_config.time_format
end
test 'configure_with_json' do
d = create_driver JSON_CONFIG_COMPAT
assert{ d.instance.parser.is_a? Fluent::Plugin::JSONParser }
end
test 'configure_with_msgpack' do
d = create_driver MSGPACK_CONFIG_COMPAT
assert{ d.instance.parser.is_a? Fluent::Plugin::MessagePackParser }
end
test 'configure_with_regexp' do
d = create_driver REGEXP_CONFIG_COMPAT
assert{ d.instance.parser.is_a? Fluent::Plugin::RegexpParser }
assert_equal(/(?<time>[^\]]*) (?<message>[^ ]*)/, d.instance.parser.expression)
assert_equal('regex_tag', d.instance.tag)
end
end
sub_test_case 'with default configuration' do
setup do
@current_event_time = event_time('2016-10-31 20:01:30.123 -0700')
Timecop.freeze(Time.at(@current_event_time))
end
teardown do
Timecop.return
end
test 'emits events with current timestamp if time key is not specified' do
d = create_driver DEFAULT_CONFIG_ONLY_WITH_KEYS
d.run(expect_records: 2, timeout: 10)
assert{ d.events.length > 0 }
d.events.each do |event|
assert_equal ["my.test.data", @current_event_time, {"k1"=>"2011-01-02 13:14:15", "k2"=>"tag1", "k3"=>"ok"}], event
end
end
end
sub_test_case 'encoding' do
data(immediate: "")
data(run_interval: "run_interval 1")
test 'can handle non-ascii characters' do |additional_setting|
content = 'ひらがな漢字'
d = create_driver %[
command ruby -e "puts '#{content}'"
tag test
encoding utf-8
<parse>
@type none
</parse>
#{additional_setting}
]
d.run(expect_records: 1, timeout: 10)
assert_equal 1, d.events.length
tag, time, record = d.events.first
assert_equal({"message" => content}, record)
end
test 'raise ConfigError for invalid encoding' do
assert_raise Fluent::ConfigError do
d = create_driver %[
command ruby -e "puts foo"
tag test
encoding invalid-encode
<parse>
@type none
</parse>
]
end
end
end
data(
'default' => [TSV_CONFIG, "tag1", event_time("2011-01-02 13:14:15"), {"k1"=>"ok"}],
'json' => [JSON_CONFIG, "tag1", event_time("2011-01-02 13:14:15"), {"k1"=>"ok"}],
'msgpack' => [MSGPACK_CONFIG, "tag1", event_time("2011-01-02 13:14:15"), {"k1"=>"ok"}],
'regexp' => [REGEXP_CONFIG, "regex_tag", event_time("2011-01-02 13:14:15"), {"message"=>"hello"}],
'default_c' => [TSV_CONFIG_COMPAT, "tag1", event_time("2011-01-02 13:14:15"), {"k1"=>"ok"}],
'json_c' => [JSON_CONFIG_COMPAT, "tag1", event_time("2011-01-02 13:14:15"), {"k1"=>"ok"}],
'msgpack_c' => [MSGPACK_CONFIG_COMPAT, "tag1", event_time("2011-01-02 13:14:15"), {"k1"=>"ok"}],
'regexp_c' => [REGEXP_CONFIG_COMPAT, "regex_tag", event_time("2011-01-02 13:14:15"), {"message"=>"hello"}],
)
test 'emit with formats' do |data|
config, tag, time, record = data
d = create_driver(config)
d.run(expect_emits: 2, timeout: 10)
assert{ d.events.length > 0 }
d.events.each {|event|
assert_equal_event_time(time, event[1])
assert_equal [tag, time, record], event
}
end
test 'emit error message with read_with_stderr' do
d = create_driver %[
tag test
command ruby #{File.join(File.dirname(SCRIPT_PATH), 'foo_bar_baz_no_existence.rb')}
connect_mode read_with_stderr
<parse>
@type none
</parse>
]
d.run(expect_records: 1, timeout: 10)
assert{ d.events.length > 0 }
d.events.each do |event|
assert_equal 'test', event[0]
assert_match(/LoadError/, event[2]['message'])
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_string_util.rb | test/plugin/test_string_util.rb | require_relative '../helper'
require 'fluent/plugin/string_util'
class StringUtilTest < Test::Unit::TestCase
def setup
@null_value_pattern = Regexp.new("^(-|null|NULL)$")
end
sub_test_case 'valid string' do
test 'null string' do
assert_equal Fluent::StringUtil.match_regexp(@null_value_pattern, "null").to_s, "null"
assert_equal Fluent::StringUtil.match_regexp(@null_value_pattern, "NULL").to_s, "NULL"
assert_equal Fluent::StringUtil.match_regexp(@null_value_pattern, "-").to_s, "-"
end
test 'normal string' do
assert_equal Fluent::StringUtil.match_regexp(@null_value_pattern, "fluentd"), nil
end
end
sub_test_case 'invalid string' do
test 'normal string' do
assert_equal Fluent::StringUtil.match_regexp(@null_value_pattern, "\xff"), nil
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_parser_csv.rb | test/plugin/test_parser_csv.rb | require_relative '../helper'
require 'fluent/test/driver/parser'
require 'fluent/plugin/parser_csv'
class CSVParserTest < ::Test::Unit::TestCase
def setup
Fluent::Test.setup
end
def create_driver(conf={})
Fluent::Test::Driver::Parser.new(Fluent::Plugin::CSVParser).configure(conf)
end
data('array param' => '["time","c","d"]', 'string param' => 'time,c,d')
def test_parse(param)
d = create_driver('keys' => param, 'time_key' => 'time')
d.instance.parse("2013/02/28 12:00:00,192.168.0.1,111") { |time, record|
assert_equal(event_time('2013/02/28 12:00:00', format: '%Y/%m/%d %H:%M:%S'), time)
assert_equal({
'c' => '192.168.0.1',
'd' => '111',
}, record)
}
end
data('array param' => '["c","d"]', 'string param' => 'c,d')
def test_parse_without_time(param)
time_at_start = Time.now.to_i
d = create_driver('keys' => param)
d.instance.parse("192.168.0.1,111") { |time, record|
assert time && time >= time_at_start, "parser puts current time without time input"
assert_equal({
'c' => '192.168.0.1',
'd' => '111',
}, record)
}
d = Fluent::Test::Driver::Parser.new(Fluent::Plugin::CSVParser)
d.configure('keys' => param, 'estimate_current_event' => 'no')
d.instance.parse("192.168.0.1,111") { |time, record|
assert_equal({
'c' => '192.168.0.1',
'd' => '111',
}, record)
assert_nil time, "parser returns nil w/o time and if configured so"
}
end
def test_parse_with_keep_time_key
d = create_driver(
'keys'=>'time',
'time_key'=>'time',
'time_format'=>"%d/%b/%Y:%H:%M:%S %z",
'keep_time_key'=>'true',
)
text = '28/Feb/2013:12:00:00 +0900'
d.instance.parse(text) do |time, record|
assert_equal text, record['time']
end
end
data('array param' => '["a","b","c","d","e","f"]', 'string param' => 'a,b,c,d,e,f')
def test_parse_with_null_value_pattern(param)
d = create_driver(
'keys'=>param,
'null_value_pattern'=>'^(-|null|NULL)$'
)
d.instance.parse("-,null,NULL,,--,nuLL") do |time, record|
assert_nil record['a']
assert_nil record['b']
assert_nil record['c']
assert_nil record['d']
assert_equal record['e'], '--'
assert_equal record['f'], 'nuLL'
end
end
data('array param' => '["a","b"]', 'string param' => 'a,b')
def test_parse_with_null_empty_string(param)
d = create_driver(
'keys'=>param,
'null_empty_string'=>true
)
d.instance.parse(", ") do |time, record|
assert_nil record['a']
assert_equal record['b'], ' '
end
end
data('array param' => '["a","b","c"]', 'string param' => 'a,b,c')
def test_parse_with_option_delimiter(param)
d = create_driver(
'keys'=>param,
'delimiter'=>' ',
)
d.instance.parse("123 456 789") do |time, record|
assert_equal record['a'], '123'
assert_equal record['b'], '456'
assert_equal record['c'], '789'
end
end
sub_test_case 'parser' do
data('normal' => 'normal',
'fast' => 'fast')
def test_compatibility_between_normal_and_fast_parser(param)
d = create_driver(
'keys' => 'time,key1,key2,key3,key4,key5',
'time_key' => 'time',
'time_format' => "%d/%b/%Y:%H:%M:%S %z",
'keep_time_key' => 'false',
'parser_type' => param
)
# non quoted
text = '28/Feb/2013:12:00:00 +0900,value1,value2,value3,value4,value5'
expected = {'key1' => 'value1', 'key2' => 'value2', 'key3' => "value3",
'key4' => 'value4', 'key5' => "value5"}
d.instance.parse(text) do |time, record|
assert_equal(event_time("28/Feb/2013:12:00:00 +0900", format: '%d/%b/%Y:%H:%M:%S %z'), time)
assert_equal expected, record
end
# quoted
text = '28/Feb/2013:12:00:00 +0900,"value1","val,ue2","va,lu,e3","val ue4",""'
expected = {'key1' => 'value1', 'key2' => 'val,ue2', 'key3' => "va,lu,e3",
'key4' => 'val ue4', 'key5' => ""}
d.instance.parse(text) do |time, record|
assert_equal(event_time("28/Feb/2013:12:00:00 +0900", format: '%d/%b/%Y:%H:%M:%S %z'), time)
assert_equal expected, record
end
# mixed
text = '28/Feb/2013:12:00:00 +0900,message,"mes,sage","me,ssa,ge",mess age,""'
expected = {'key1' => 'message', 'key2' => 'mes,sage', 'key3' => "me,ssa,ge",
'key4' => 'mess age', 'key5' => ""}
d.instance.parse(text) do |time, record|
assert_equal(event_time("28/Feb/2013:12:00:00 +0900", format: '%d/%b/%Y:%H:%M:%S %z'), time)
assert_equal expected, record
end
# escaped
text = '28/Feb/2013:12:00:00 +0900,"message","mes""sage","""message""",,""""""'
expected = {'key1' => 'message', 'key2' => 'mes"sage', 'key3' => '"message"',
'key4' => nil, 'key5' => '""'}
d.instance.parse(text) do |time, record|
assert_equal(event_time("28/Feb/2013:12:00:00 +0900", format: '%d/%b/%Y:%H:%M:%S %z'), time)
assert_equal expected, record
end
end
def test_incompatibility_between_normal_and_fast_parser
normal = create_driver(
'keys' => 'key1,key2',
'parser_type' => 'normal'
)
fast = create_driver(
'keys' => 'key1,key2',
'parser_type' => 'fast'
)
# unexpected quote position
text = 'a"b,"a"""c"'
assert_raise(CSV::MalformedCSVError) {
normal.instance.parse(text) { |t, r| }
}
assert_nothing_raised {
# generate broken record
fast.instance.parse(text) { |t, r| }
}
# incorrect the number of column
text = 'a,b,c'
expected = {"key1" => 'a', "key2" => 'b'}
normal.instance.parse(text) { |t, r|
assert_equal expected, r
}
fast.instance.parse(text) { |t, r|
assert_not_equal expected, r
}
# And more...
end
end
# "parser_type" config shouldn't hide Fluent::Plugin::Parser#plugin_type
# https://github.com/fluent/fluentd/issues/3296
data('normal' => :normal, 'fast' => :fast)
def test_parser_type_method(engine)
d = create_driver('keys' => '["time"]','time_key' => 'time', 'parser_type' => engine.to_s)
assert_equal(:text_per_line, d.instance.parser_type)
end
data('normal' => :normal, 'fast' => :fast)
def test_parser_engine(engine)
d = create_driver('keys' => '["time"]', 'time_key' => 'time', 'parser_engine' => engine.to_s)
assert_equal(engine, d.instance.parser_engine)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_output_as_buffered_backup.rb | test/plugin/test_output_as_buffered_backup.rb | require_relative '../helper'
require 'fluent/plugin/output'
require 'fluent/plugin/buffer'
require 'fluent/event'
require 'fluent/error'
require 'json'
require 'time'
require 'timeout'
require 'timecop'
class BufferedOutputBackupTest < Test::Unit::TestCase
class BareOutput < Fluent::Plugin::Output
def register(name, &block)
instance_variable_set("@#{name}", block)
end
end
class DummyOutput < BareOutput
def initialize
super
@process = nil
@format = nil
@write = nil
@try_write = nil
end
def prefer_buffered_processing
true
end
def prefer_delayed_commit
false
end
def process(tag, es)
@process ? @process.call(tag, es) : nil
end
def format(tag, time, record)
[tag, time.to_i, record].to_json + "\n"
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
def try_write(chunk)
@try_write ? @try_write.call(chunk) : nil
end
end
class DummyOutputForSecondary < BareOutput
def initialize
super
@process = nil
@format = nil
@write = nil
@try_write = nil
end
def prefer_buffered_processing
true
end
def prefer_delayed_commit
false
end
def process(tag, es)
@process ? @process.call(tag, es) : nil
end
def format(tag, time, record)
[tag, time.to_i, record].to_json + "\n"
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
def try_write(chunk)
@try_write ? @try_write.call(chunk) : nil
end
end
class DummyAsyncOutputForSecondary < BareOutput
def initialize
super
@process = nil
@format = nil
@write = nil
@try_write = nil
end
def prefer_buffered_processing
true
end
def prefer_delayed_commit
true
end
def process(tag, es)
@process ? @process.call(tag, es) : nil
end
def format(tag, time, record)
[tag, time.to_i, record].to_json + "\n"
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
def try_write(chunk)
@try_write ? @try_write.call(chunk) : nil
end
end
TMP_DIR = File.expand_path(File.dirname(__FILE__) + "/../tmp/bu#{ENV['TEST_ENV_NUMBER']}")
def create_output
DummyOutput.new
end
def create_metadata(timekey: nil, tag: nil, variables: nil)
Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
end
def waiting(seconds)
begin
Timeout.timeout(seconds) do
yield
end
rescue Timeout::Error
STDERR.print(*@i.log.out.logs)
raise
end
end
def dummy_event_stream
Fluent::ArrayEventStream.new([
[ event_time('2016-04-13 18:33:00'), {"name" => "moris", "age" => 36, "message" => "data1"} ],
[ event_time('2016-04-13 18:33:13'), {"name" => "moris", "age" => 36, "message" => "data2"} ],
[ event_time('2016-04-13 18:33:32'), {"name" => "moris", "age" => 36, "message" => "data3"} ],
])
end
setup do
@i = create_output
FileUtils.rm_rf(TMP_DIR)
FileUtils.mkdir_p(TMP_DIR)
Fluent::Plugin.register_output('backup_output', DummyOutput)
Fluent::Plugin.register_output('backup_output2', DummyOutputForSecondary)
Fluent::Plugin.register_output('backup_async_output', DummyAsyncOutputForSecondary)
end
teardown do
if @i
@i.stop unless @i.stopped?
@i.before_shutdown unless @i.before_shutdown?
@i.shutdown unless @i.shutdown?
@i.after_shutdown unless @i.after_shutdown?
@i.close unless @i.closed?
@i.terminate unless @i.terminated?
end
Timecop.return
end
sub_test_case 'buffered output for broken chunks' do
def flush_chunks
@i.start
@i.after_start
@i.interrupt_flushes
now = Time.parse('2016-04-13 18:33:30 -0700')
Timecop.freeze(now)
@i.emit_events("test.tag.1", dummy_event_stream())
now = Time.parse('2016-04-13 18:33:32 -0700')
Timecop.freeze(now)
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4) { Thread.pass until @i.write_count > 0 }
assert { @i.write_count > 0 }
Timecop.freeze(now)
@i.flush_thread_wakeup
end
def wait_flush(target_file)
waiting(5) {
target_dir = File.join(File.dirname(target_file), "*")
while Dir.glob(target_dir).size.zero?
# Avoid to lose globbed entries on Windows in busy loop
sleep 0.1 if Fluent.windows?
end
}
end
data('unrecoverable error' => Fluent::UnrecoverableError,
'type error' => TypeError,
'argument error' => ArgumentError,
'no method error' => NoMethodError,
'msgpack unpack error' => MessagePack::UnpackError,
'encoding error' => EncodingError)
test 'backup chunk without secondary' do |error_class|
Fluent::SystemConfig.overwrite_system_config('root_dir' => TMP_DIR) do
id = 'backup_test'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
}
chunk_id = nil
@i.configure(config_element('ROOT', '', {'@id' => id}, [config_element('buffer', 'tag', hash)]))
@i.register(:write) { |chunk|
chunk_id = chunk.unique_id
raise error_class, "yay, your #write must fail"
}
flush_chunks
target = "#{TMP_DIR}/backup/worker0/#{id}/#{@i.dump_unique_id_hex(chunk_id)}.log"
wait_flush(target)
assert_true File.exist?(target)
logs = @i.log.out.logs
assert { logs.any? { |l| l.include?("got unrecoverable error in primary and no secondary") } }
end
end
test 'backup chunk with same type secondary' do
Fluent::SystemConfig.overwrite_system_config('root_dir' => TMP_DIR) do
id = 'backup_test_with_same_secondary'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
}
chunk_id = nil
secconf = config_element('secondary','',{'@type' => 'backup_output'})
@i.configure(config_element('ROOT', '', {'@id' => id}, [config_element('buffer', 'tag', hash), secconf]))
@i.register(:write) { |chunk|
chunk_id = chunk.unique_id
raise Fluent::UnrecoverableError, "yay, your #write must fail"
}
flush_chunks
target = "#{TMP_DIR}/backup/worker0/#{id}/#{@i.dump_unique_id_hex(chunk_id)}.log"
wait_flush(target)
assert_true File.exist?(target)
logs = @i.log.out.logs
assert { logs.any? { |l| l.include?("got unrecoverable error in primary and secondary type is same as primary") } }
end
end
test 'create directory' do
Fluent::SystemConfig.overwrite_system_config('root_dir' => TMP_DIR) do
id = 'backup_test_with_same_secondary'
hash = { 'flush_interval' => 1, 'flush_thread_burst_interval' => 0.1 }
chunk_id = nil
secconf = config_element('secondary', '', { '@type' => 'backup_output' })
@i.configure(config_element('ROOT', '', { '@id' => id }, [config_element('buffer', 'tag', hash), secconf]))
@i.register(:write) { |chunk|
chunk_id = chunk.unique_id
raise Fluent::UnrecoverableError, "yay, your #write must fail"
}
flush_chunks
target = "#{TMP_DIR}/backup/worker0/#{id}/#{@i.dump_unique_id_hex(chunk_id)}.log"
target_dir = File.dirname(target)
wait_flush(target)
assert_path_exist(target_dir)
assert_equal '755', File.stat(target_dir).mode.to_s(8)[-3, 3]
end
end
test 'create directory with specific mode' do
omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
Fluent::SystemConfig.overwrite_system_config('root_dir' => TMP_DIR, 'dir_permission' => '744') do
id = 'backup_test_with_same_secondary'
hash = { 'flush_interval' => 1, 'flush_thread_burst_interval' => 0.1 }
chunk_id = nil
secconf = config_element('secondary', '', { '@type' => 'backup_output' })
@i.configure(config_element('ROOT', '', { '@id' => id }, [config_element('buffer', 'tag', hash), secconf]))
@i.register(:write) { |chunk|
chunk_id = chunk.unique_id
raise Fluent::UnrecoverableError, "yay, your #write must fail"
}
flush_chunks
target = "#{TMP_DIR}/backup/worker0/#{id}/#{@i.dump_unique_id_hex(chunk_id)}.log"
target_dir = File.dirname(target)
wait_flush(target)
assert_path_exist(target_dir)
assert_equal '744', File.stat(target_dir).mode.to_s(8)[-3, 3]
end
end
test 'backup chunk with different type secondary' do
Fluent::SystemConfig.overwrite_system_config('root_dir' => TMP_DIR) do
id = 'backup_test_with_diff_secondary'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
}
chunk_id = nil
secconf = config_element('secondary','',{'@type' => 'backup_output2'})
@i.configure(config_element('ROOT', '', {'@id' => id}, [config_element('buffer', 'tag', hash), secconf]))
@i.register(:write) { |chunk|
chunk_id = chunk.unique_id
raise Fluent::UnrecoverableError, "yay, your #write must fail"
}
@i.secondary.register(:write) { |chunk|
raise Fluent::UnrecoverableError, "yay, your secondary #write must fail"
}
flush_chunks
target = "#{TMP_DIR}/backup/worker0/#{id}/#{@i.dump_unique_id_hex(chunk_id)}.log"
wait_flush(target)
assert_true File.exist?(target)
logs = @i.log.out.logs
assert { logs.any? { |l| l.include?("got unrecoverable error in primary. Skip retry and flush chunk to secondary") } }
assert { logs.any? { |l| l.include?("got an error in secondary for unrecoverable error") } }
end
end
test 'backup chunk with async secondary' do
Fluent::SystemConfig.overwrite_system_config('root_dir' => TMP_DIR) do
id = 'backup_test_with_diff_secondary'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
}
chunk_id = nil
secconf = config_element('secondary','',{'@type' => 'backup_async_output'})
@i.configure(config_element('ROOT', '', {'@id' => id}, [config_element('buffer', 'tag', hash), secconf]))
@i.register(:write) { |chunk|
chunk_id = chunk.unique_id
raise Fluent::UnrecoverableError, "yay, your #write must fail"
}
flush_chunks
target = "#{TMP_DIR}/backup/worker0/#{id}/#{@i.dump_unique_id_hex(chunk_id)}.log"
wait_flush(target)
assert_true File.exist?(target)
logs = @i.log.out.logs
assert { logs.any? { |l| l.include?("got unrecoverable error in primary and secondary is async output") } }
end
end
test 'chunk is thrown away when disable_chunk_backup is true' do
Fluent::SystemConfig.overwrite_system_config('root_dir' => TMP_DIR) do
id = 'backup_test'
hash = {
'flush_interval' => 1,
'flush_thread_burst_interval' => 0.1,
'disable_chunk_backup' => true
}
chunk_id = nil
@i.configure(config_element('ROOT', '', {'@id' => id}, [config_element('buffer', 'tag', hash)]))
@i.register(:write) { |chunk|
chunk_id = chunk.unique_id
raise Fluent::UnrecoverableError, "yay, your #write must fail"
}
flush_chunks
target = "#{TMP_DIR}/backup/worker0/#{id}/#{@i.dump_unique_id_hex(chunk_id)}.log"
assert_false File.exist?(target)
logs = @i.log.out.logs
assert { logs.any? { |l| l.include?("disable_chunk_backup is true") } }
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_parser_msgpack.rb | test/plugin/test_parser_msgpack.rb | require_relative '../helper'
require 'fluent/test/driver/parser'
require 'fluent/plugin/parser_msgpack'
class MessagePackParserTest < ::Test::Unit::TestCase
def setup
Fluent::Test.setup
end
def create_driver(conf)
Fluent::Test::Driver::Parser.new(Fluent::Plugin::MessagePackParser).configure(conf)
end
sub_test_case "simple setting" do
data(
"Normal Hash",
{
input: "\x82\xA7message\xADHello msgpack\xA3numd",
expected: [{"message" => "Hello msgpack", "num" => 100}]
},
keep: true
)
data(
"Array of multiple Hash",
{
input: "\x92\x81\xA7message\xA3foo\x81\xA7message\xA3bar",
expected: [{"message"=>"foo"}, {"message"=>"bar"}]
},
keep: true
)
data(
"String",
{
# "Hello msgpack".to_msgpack
input: "\xADHello msgpack",
expected: [nil]
},
keep: true
)
data(
"Array of String",
{
# ["foo", "bar"].to_msgpack
input: "\x92\xA3foo\xA3bar",
expected: [nil, nil]
},
keep: true
)
data(
"Array of String and Hash",
{
# ["foo", {message: "bar"}].to_msgpack
input: "\x92\xA3foo\x81\xA7message\xA3bar",
expected: [nil, {"message"=>"bar"}]
},
keep: true
)
def test_parse(data)
parsed_records = []
create_driver("").instance.parse(data[:input]) do |time, record|
parsed_records.append(record)
end
assert_equal(data[:expected], parsed_records)
end
def test_parse_io(data)
parsed_records = []
StringIO.open(data[:input]) do |io|
create_driver("").instance.parse_io(io) do |time, record|
parsed_records.append(record)
end
end
assert_equal(data[:expected], parsed_records)
end
end
# This becomes NoMethodError if a non-Hash object is passed to convert_values.
# https://github.com/fluent/fluentd/issues/4100
sub_test_case "execute_convert_values with null_empty_string" do
data(
"Normal hash",
{
# {message: "foo", empty: ""}.to_msgpack
input: "\x82\xA7message\xA3foo\xA5empty\xA0",
expected: [{"message" => "foo", "empty" => nil}]
},
keep: true
)
data(
"Array of multiple Hash",
{
# [{message: "foo", empty: ""}, {message: "bar", empty: ""}].to_msgpack
input: "\x92\x82\xA7message\xA3foo\xA5empty\xA0\x82\xA7message\xA3bar\xA5empty\xA0",
expected: [{"message"=>"foo", "empty" => nil}, {"message"=>"bar", "empty" => nil}]
},
keep: true
)
data(
"String",
{
# "Hello msgpack".to_msgpack
input: "\xADHello msgpack",
expected: [nil]
},
keep: true
)
def test_parse(data)
parsed_records = []
create_driver("null_empty_string").instance.parse(data[:input]) do |time, record|
parsed_records.append(record)
end
assert_equal(data[:expected], parsed_records)
end
def test_parse_io(data)
parsed_records = []
StringIO.open(data[:input]) do |io|
create_driver("null_empty_string").instance.parse_io(io) do |time, record|
parsed_records.append(record)
end
end
assert_equal(data[:expected], parsed_records)
end
end
end | ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_out_file.rb | test/plugin/test_out_file.rb | require_relative '../helper'
require 'fluent/test/driver/output'
require 'fluent/plugin/out_file'
require 'fileutils'
require 'time'
require 'timecop'
require 'zlib'
require 'zstd-ruby'
require 'fluent/file_wrapper'
class FileOutputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
FileUtils.rm_rf(TMP_DIR)
FileUtils.mkdir_p(TMP_DIR)
@default_newline = if Fluent.windows?
"\r\n"
else
"\n"
end
end
TMP_DIR = File.expand_path(File.dirname(__FILE__) + "/../tmp/out_file#{ENV['TEST_ENV_NUMBER']}")
CONFIG = %[
path #{TMP_DIR}/out_file_test
compress gz
utc
<buffer>
timekey_use_utc true
</buffer>
]
def create_driver(conf = CONFIG, opts = {})
Fluent::Test::Driver::Output.new(Fluent::Plugin::FileOutput, opts: opts).configure(conf)
end
sub_test_case 'configuration' do
test 'basic configuration' do
d = create_driver %[
path test_path
compress gz
]
assert_equal 'test_path', d.instance.path
assert_equal :gz, d.instance.compress
assert_equal :gzip, d.instance.instance_eval{ @compress_method }
end
test 'using root_dir for buffer path' do
system_conf_opts = {'root_dir' => File.join(TMP_DIR, 'testrootdir')}
buf_conf = config_element('buffer', '', {'flush_interval' => '1s'})
conf = config_element('match', '**', {'@id' => 'myout', 'path' => 'test_path', 'append' => 'true'}, [buf_conf])
d = create_driver(conf, system_conf_opts)
assert_equal 'test_path', d.instance.path
assert d.instance.append
assert d.instance.buffer.respond_to?(:path) # file buffer
assert_equal 1, d.instance.buffer_config.flush_interval
assert_equal File.join(TMP_DIR, 'testrootdir', 'worker0', 'myout'), d.instance.plugin_root_dir
buffer_path_under_root_dir = File.join(TMP_DIR, 'testrootdir', 'worker0', 'myout', 'buffer', 'buffer.*.log')
assert_equal buffer_path_under_root_dir, d.instance.buffer.path
end
test 'path should be writable' do
assert_raise(Fluent::ConfigError.new("'path' parameter is required")) do
create_driver ""
end
assert_nothing_raised do
create_driver %[path #{TMP_DIR}/test_path]
end
assert_nothing_raised do
FileUtils.mkdir_p("#{TMP_DIR}/test_dir")
File.chmod(0777, "#{TMP_DIR}/test_dir")
create_driver %[path #{TMP_DIR}/test_dir/foo/bar/baz]
end
if Process.uid.nonzero?
assert_raise(Fluent::ConfigError) do
FileUtils.mkdir_p("#{TMP_DIR}/test_dir")
File.chmod(0555, "#{TMP_DIR}/test_dir")
create_driver %[path #{TMP_DIR}/test_dir/foo/bar/baz]
end
end
end
test 'default timezone is localtime' do
d = create_driver(%[path #{TMP_DIR}/out_file_test])
time = event_time("2011-01-02 13:14:15 UTC")
with_timezone(Fluent.windows? ? 'NST-8' : 'Asia/Taipei') do
d.run(default_tag: 'test') do
d.feed(time, {"a"=>1})
end
end
assert_equal 1, d.formatted.size
assert_equal %[2011-01-02T21:14:15+08:00\ttest\t{"a":1}#{@default_newline}], d.formatted[0]
end
test 'no configuration error raised for basic configuration using "*" (v0.12 style)' do
conf = config_element('match', '**', {
'path' => "#{TMP_DIR}/test_out.*.log",
'time_slice_format' => '%Y%m%d',
})
assert_nothing_raised do
create_driver(conf)
end
end
if Process.uid.nonzero?
test 'configuration error raised if specified directory via template is not writable' do
Timecop.freeze(Time.parse("2016-10-04 21:33:27 UTC")) do
conf = config_element('match', '**', {
'path' => "#{TMP_DIR}/prohibited/${tag}/file.%Y%m%d.log",
}, [ config_element('buffer', 'time,tag', {'timekey' => 86400, 'timekey_zone' => '+0000'}) ])
FileUtils.mkdir_p("#{TMP_DIR}/prohibited")
File.chmod(0555, "#{TMP_DIR}/prohibited")
assert_raise Fluent::ConfigError.new("out_file: `#{TMP_DIR}/prohibited/a/file.20161004.log_**.log` is not writable") do
create_driver(conf)
end
end
end
end
test 'configuration using inject/format/buffer sections fully' do
conf = config_element('match', '**', {
'path' => "#{TMP_DIR}/${tag}/${type}/conf_test.%Y%m%d.%H%M.log",
'add_path_suffix' => 'false',
'append' => "true",
'symlink_path' => "#{TMP_DIR}/${tag}/conf_test.current.log",
'compress' => 'gzip',
'recompress' => 'true',
}, [
config_element('inject', '', {
'hostname_key' => 'hostname',
'hostname' => 'testing.local',
'tag_key' => 'tag',
'time_key' => 'time',
'time_type' => 'string',
'time_format' => '%Y/%m/%d %H:%M:%S %z',
'timezone' => '+0900',
}),
config_element('format', '', {
'@type' => 'out_file',
'include_tag' => 'true',
'include_time' => 'true',
'delimiter' => 'COMMA',
'time_type' => 'string',
'time_format' => '%Y-%m-%d %H:%M:%S %z',
'utc' => 'true',
}),
config_element('buffer', 'time,tag,type', {
'@type' => 'file',
'timekey' => '15m',
'timekey_wait' => '5s',
'timekey_zone' => '+0000',
'path' => "#{TMP_DIR}/buf_conf_test",
'chunk_limit_size' => '50m',
'total_limit_size' => '1g',
'compress' => 'gzip',
}),
])
assert_nothing_raised do
create_driver(conf)
end
end
test 'configured as secondary with primary using chunk_key_tag and not using chunk_key_time' do
require 'fluent/plugin/out_null'
conf = config_element('match', '**', {
}, [
config_element('buffer', 'tag', {
}),
config_element('secondary', '', {
'@type' => 'file',
'path' => "#{TMP_DIR}/testing_to_dump_by_out_file",
}),
])
assert_nothing_raised do
Fluent::Test::Driver::Output.new(Fluent::Plugin::NullOutput).configure(conf)
end
end
test 'warning for symlink_path not including correct placeholders corresponding to chunk keys' do
omit "Windows doesn't support symlink" if Fluent.windows?
conf = config_element('match', '**', {
'path' => "#{TMP_DIR}/${tag}/${key1}/${key2}/conf_test.%Y%m%d.%H%M.log",
'symlink_path' => "#{TMP_DIR}/conf_test.current.log",
}, [
config_element('buffer', 'time,tag,key1,key2', {
'@type' => 'file',
'timekey' => '1d',
'path' => "#{TMP_DIR}/buf_conf_test",
}),
])
assert_nothing_raised do
d = create_driver(conf)
assert do
d.logs.count { |log| log.include?("multiple chunks are competing for a single symlink_path") } == 2
end
end
end
end
sub_test_case 'fully configured output' do
setup do
Timecop.freeze(Time.parse("2016-10-03 23:58:00 UTC"))
conf = config_element('match', '**', {
'path' => "#{TMP_DIR}/${tag}/${type}/full.%Y%m%d.%H%M.log",
'add_path_suffix' => 'false',
'append' => "true",
'symlink_path' => "#{TMP_DIR}/full.current.log",
'compress' => 'gzip',
'recompress' => 'true',
}, [
config_element('inject', '', {
'hostname_key' => 'hostname',
'hostname' => 'testing.local',
'tag_key' => 'tag',
'time_key' => 'time',
'time_type' => 'string',
'time_format' => '%Y/%m/%d %H:%M:%S %z',
'timezone' => '+0900',
}),
config_element('format', '', {
'@type' => 'out_file',
'include_tag' => 'true',
'include_time' => 'true',
'delimiter' => 'COMMA',
'time_type' => 'string',
'time_format' => '%Y-%m-%d %H:%M:%S %z',
'utc' => 'true',
}),
config_element('buffer', 'time,tag,type', {
'@type' => 'file',
'timekey' => '15m',
'timekey_wait' => '5s',
'timekey_zone' => '+0000',
'path' => "#{TMP_DIR}/buf_full",
'chunk_limit_size' => '50m',
'total_limit_size' => '1g',
'compress' => 'gzip',
}),
])
@d = create_driver(conf)
end
teardown do
FileUtils.rm_rf("#{TMP_DIR}/buf_full")
FileUtils.rm_rf("#{TMP_DIR}/my.data")
FileUtils.rm_rf("#{TMP_DIR}/your.data")
FileUtils.rm_rf("#{TMP_DIR}/full.current.log")
Timecop.return
end
test 'can format/write data correctly' do
d = @d
assert_equal 50*1024*1024, d.instance.buffer.chunk_limit_size
assert_equal 1*1024*1024*1024, d.instance.buffer.total_limit_size
assert !(File.symlink?("#{TMP_DIR}/full.current.log"))
t1 = event_time("2016-10-03 23:58:09 UTC")
t2 = event_time("2016-10-03 23:59:33 UTC")
t3 = event_time("2016-10-03 23:59:57 UTC")
t4 = event_time("2016-10-04 00:00:17 UTC")
t5 = event_time("2016-10-04 00:01:59 UTC")
Timecop.freeze(Time.parse("2016-10-03 23:58:30 UTC"))
d.run(start: true, flush: false, shutdown: false) do
d.feed('my.data', t1, {"type" => "a", "message" => "data raw content"})
d.feed('my.data', t2, {"type" => "a", "message" => "data raw content"})
d.feed('your.data', t3, {"type" => "a", "message" => "data raw content"})
end
assert_equal 3, d.formatted.size
assert Dir.exist?("#{TMP_DIR}/buf_full")
assert !(Dir.exist?("#{TMP_DIR}/my.data/a"))
assert !(Dir.exist?("#{TMP_DIR}/your.data/a"))
buffer_files = Dir.entries("#{TMP_DIR}/buf_full").reject{|e| e =~ /^\.+$/ }
assert_equal 2, buffer_files.count{|n| n.end_with?('.meta') }
assert_equal 2, buffer_files.count{|n| !n.end_with?('.meta') }
m1 = d.instance.metadata('my.data', t1, {"type" => "a"})
m2 = d.instance.metadata('your.data', t3, {"type" => "a"})
assert_equal 2, d.instance.buffer.stage.size
b1_path = d.instance.buffer.stage[m1].path
b1_size = File.lstat(b1_path).size
unless Fluent.windows?
assert File.symlink?("#{TMP_DIR}/full.current.log")
assert_equal d.instance.buffer.stage[m2].path, File.readlink("#{TMP_DIR}/full.current.log")
end
Timecop.freeze(Time.parse("2016-10-04 00:00:06 UTC"))
d.run(start: false, flush: true, shutdown: true) do
d.feed('my.data', t4, {"type" => "a", "message" => "data raw content"})
d.feed('your.data', t5, {"type" => "a", "message" => "data raw content"})
end
assert Dir.exist?("#{TMP_DIR}/buf_full")
assert Dir.exist?("#{TMP_DIR}/my.data/a")
assert Dir.exist?("#{TMP_DIR}/your.data/a")
buffer_files = Dir.entries("#{TMP_DIR}/buf_full").reject{|e| e =~ /^\.+$/ }
assert_equal 0, buffer_files.size
assert File.exist?("#{TMP_DIR}/my.data/a/full.20161003.2345.log.gz")
assert File.exist?("#{TMP_DIR}/my.data/a/full.20161004.0000.log.gz")
assert File.exist?("#{TMP_DIR}/your.data/a/full.20161003.2345.log.gz")
assert File.exist?("#{TMP_DIR}/your.data/a/full.20161004.0000.log.gz")
assert{ File.lstat("#{TMP_DIR}/my.data/a/full.20161003.2345.log.gz").size < b1_size } # recompress
assert_equal 5, d.formatted.size
r1 = %!2016-10-03 23:58:09 +0000,my.data,{"type":"a","message":"data raw content","hostname":"testing.local","tag":"my.data","time":"2016/10/04 08:58:09 +0900"}#{@default_newline}!
r2 = %!2016-10-03 23:59:33 +0000,my.data,{"type":"a","message":"data raw content","hostname":"testing.local","tag":"my.data","time":"2016/10/04 08:59:33 +0900"}#{@default_newline}!
r3 = %!2016-10-03 23:59:57 +0000,your.data,{"type":"a","message":"data raw content","hostname":"testing.local","tag":"your.data","time":"2016/10/04 08:59:57 +0900"}#{@default_newline}!
r4 = %!2016-10-04 00:00:17 +0000,my.data,{"type":"a","message":"data raw content","hostname":"testing.local","tag":"my.data","time":"2016/10/04 09:00:17 +0900"}#{@default_newline}!
r5 = %!2016-10-04 00:01:59 +0000,your.data,{"type":"a","message":"data raw content","hostname":"testing.local","tag":"your.data","time":"2016/10/04 09:01:59 +0900"}#{@default_newline}!
assert_equal r1, d.formatted[0]
assert_equal r2, d.formatted[1]
assert_equal r3, d.formatted[2]
assert_equal r4, d.formatted[3]
assert_equal r5, d.formatted[4]
read_gunzip = ->(path){
File.open(path, 'rb'){ |fio|
Zlib::GzipReader.new(StringIO.new(fio.read)).read
}
}
assert_equal r1 + r2, read_gunzip.call("#{TMP_DIR}/my.data/a/full.20161003.2345.log.gz")
assert_equal r3, read_gunzip.call("#{TMP_DIR}/your.data/a/full.20161003.2345.log.gz")
assert_equal r4, read_gunzip.call("#{TMP_DIR}/my.data/a/full.20161004.0000.log.gz")
assert_equal r5, read_gunzip.call("#{TMP_DIR}/your.data/a/full.20161004.0000.log.gz")
end
end
sub_test_case 'format' do
test 'timezone UTC specified' do
d = create_driver
time = event_time("2011-01-02 13:14:15 UTC")
d.run(default_tag: 'test') do
d.feed(time, {"a"=>1})
d.feed(time, {"a"=>2})
end
assert_equal 2, d.formatted.size
assert_equal %[2011-01-02T13:14:15Z\ttest\t{"a":1}#{@default_newline}], d.formatted[0]
assert_equal %[2011-01-02T13:14:15Z\ttest\t{"a":2}#{@default_newline}], d.formatted[1]
end
test 'time formatted with specified timezone, using area name' do
d = create_driver %[
path #{TMP_DIR}/out_file_test
timezone Asia/Taipei
]
time = event_time("2011-01-02 13:14:15 UTC")
d.run(default_tag: 'test') do
d.feed(time, {"a"=>1})
end
assert_equal 1, d.formatted.size
assert_equal %[2011-01-02T21:14:15+08:00\ttest\t{"a":1}#{@default_newline}], d.formatted[0]
end
test 'time formatted with specified timezone, using offset' do
d = create_driver %[
path #{TMP_DIR}/out_file_test
timezone -03:30
]
time = event_time("2011-01-02 13:14:15 UTC")
d.run(default_tag: 'test') do
d.feed(time, {"a"=>1})
end
assert_equal 1, d.formatted.size
assert_equal %[2011-01-02T09:44:15-03:30\ttest\t{"a":1}#{@default_newline}], d.formatted[0]
end
test 'configuration error raised for invalid timezone' do
assert_raise(Fluent::ConfigError) do
create_driver %[
path #{TMP_DIR}/out_file_test
timezone Invalid/Invalid
]
end
end
end
def check_zipped_result(path, expect, type: :gzip)
# Zlib::GzipReader has a bug of concatenated file: https://bugs.ruby-lang.org/issues/9790
# Following code from https://www.ruby-forum.com/topic/971591#979520
result = ''
if type == :gzip || type == :gz
File.open(path, "rb") { |io|
loop do
gzr = Zlib::GzipReader.new(StringIO.new(io.read))
result << gzr.read
unused = gzr.unused
gzr.finish
break if unused.nil?
io.pos -= unused.length
end
}
elsif type == :zstd
File.open(path, "rb") { |io|
loop do
reader = Zstd::StreamReader.new(StringIO.new(io.read))
result << reader.read(1024)
break if io.eof?
end
}
else
raise "Invalid compression type to check"
end
assert_equal expect, result
end
def check_result(path, expect)
result = File.read(path, mode: "rb")
assert_equal expect, result
end
sub_test_case 'write' do
test 'basic case with gz' do
d = create_driver
assert_false File.exist?("#{TMP_DIR}/out_file_test.20110102_0.log.gz")
time = event_time("2011-01-02 13:14:15 UTC")
d.run(default_tag: 'test') do
d.feed(time, {"a"=>1})
d.feed(time, {"a"=>2})
end
assert File.exist?("#{TMP_DIR}/out_file_test.20110102_0.log.gz")
check_zipped_result("#{TMP_DIR}/out_file_test.20110102_0.log.gz", %[2011-01-02T13:14:15Z\ttest\t{"a":1}#{@default_newline}] + %[2011-01-02T13:14:15Z\ttest\t{"a":2}#{@default_newline}])
end
test 'write with zstd compression' do
d = create_driver %[
path #{TMP_DIR}/out_file_test
compress zstd
utc
<buffer>
timekey_use_utc true
</buffer>
]
assert_false File.exist?("#{TMP_DIR}/out_file_test.20110102_0.log.zstd")
time = event_time("2011-01-02 13:14:15 UTC")
d.run(default_tag: 'test') do
d.feed(time, {"a"=>1})
d.feed(time, {"a"=>2})
end
assert File.exist?("#{TMP_DIR}/out_file_test.20110102_0.log.zstd")
check_zipped_result("#{TMP_DIR}/out_file_test.20110102_0.log.zstd", %[2011-01-02T13:14:15Z\ttest\t{"a":1}#{@default_newline}] + %[2011-01-02T13:14:15Z\ttest\t{"a":2}#{@default_newline}], type: :zstd)
end
end
sub_test_case 'file/directory permissions' do
TMP_DIR_WITH_SYSTEM = File.expand_path(File.dirname(__FILE__) + "/../tmp/out_file_system#{ENV['TEST_ENV_NUMBER']}")
# 0750 interprets as "488". "488".to_i(8) # => 4. So, it makes wrong permission. Umm....
OVERRIDE_DIR_PERMISSION = 750
OVERRIDE_FILE_PERMISSION = 0620
CONFIG_WITH_SYSTEM = %[
path #{TMP_DIR_WITH_SYSTEM}/out_file_test
compress gz
utc
<buffer>
timekey_use_utc true
</buffer>
<system>
file_permission #{OVERRIDE_FILE_PERMISSION}
dir_permission #{OVERRIDE_DIR_PERMISSION}
</system>
]
setup do
omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
FileUtils.rm_rf(TMP_DIR_WITH_SYSTEM)
end
def parse_system(text)
basepath = File.expand_path(File.dirname(__FILE__) + '/../../')
Fluent::Config.parse(text, '(test)', basepath, true).elements.find { |e| e.name == 'system' }
end
test 'write to file with permission specifications' do
system_conf = parse_system(CONFIG_WITH_SYSTEM)
sc = Fluent::SystemConfig.new(system_conf)
Fluent::Engine.init(sc)
d = create_driver CONFIG_WITH_SYSTEM
assert_false File.exist?("#{TMP_DIR_WITH_SYSTEM}/out_file_test.20110102_0.log.gz")
time = event_time("2011-01-02 13:14:15 UTC")
d.run(default_tag: 'test') do
d.feed(time, {"a"=>1})
d.feed(time, {"a"=>2})
end
assert File.exist?("#{TMP_DIR_WITH_SYSTEM}/out_file_test.20110102_0.log.gz")
check_zipped_result("#{TMP_DIR_WITH_SYSTEM}/out_file_test.20110102_0.log.gz", %[2011-01-02T13:14:15Z\ttest\t{"a":1}\n] + %[2011-01-02T13:14:15Z\ttest\t{"a":2}\n])
dir_mode = "%o" % File::stat(TMP_DIR_WITH_SYSTEM).mode
assert_equal(OVERRIDE_DIR_PERMISSION, dir_mode[-3, 3].to_i)
file_mode = "%o" % File::stat("#{TMP_DIR_WITH_SYSTEM}/out_file_test.20110102_0.log.gz").mode
assert_equal(OVERRIDE_FILE_PERMISSION, file_mode[-3, 3].to_i)
end
end
sub_test_case 'format specified' do
test 'json' do
d = create_driver [CONFIG, 'format json', 'include_time_key true', 'time_as_epoch'].join("\n")
time = event_time("2011-01-02 13:14:15 UTC")
d.run(default_tag: 'test') do
d.feed(time, {"a"=>1})
d.feed(time, {"a"=>2})
end
path = d.instance.last_written_path
check_zipped_result(path, %[#{JSON.generate({"a" => 1, 'time' => time.to_i})}#{@default_newline}] + %[#{JSON.generate({"a" => 2, 'time' => time.to_i})}#{@default_newline}])
end
test 'ltsv' do
d = create_driver [CONFIG, 'format ltsv', 'include_time_key true'].join("\n")
time = event_time("2011-01-02 13:14:15 UTC")
d.run(default_tag: 'test') do
d.feed(time, {"a"=>1})
d.feed(time, {"a"=>2})
end
path = d.instance.last_written_path
check_zipped_result(path, %[a:1\ttime:2011-01-02T13:14:15Z#{@default_newline}] + %[a:2\ttime:2011-01-02T13:14:15Z#{@default_newline}])
end
test 'single_value' do
d = create_driver [CONFIG, 'format single_value', 'message_key a'].join("\n")
time = event_time("2011-01-02 13:14:15 UTC")
d.run(default_tag: 'test') do
d.feed(time, {"a"=>1})
d.feed(time, {"a"=>2})
end
path = d.instance.last_written_path
check_zipped_result(path, %[1#{@default_newline}] + %[2#{@default_newline}])
end
end
test 'path with index number' do
time = event_time("2011-01-02 13:14:15 UTC")
formatted_lines = %[2011-01-02T13:14:15Z\ttest\t{"a":1}#{@default_newline}] + %[2011-01-02T13:14:15Z\ttest\t{"a":2}#{@default_newline}]
write_once = ->(){
d = create_driver
d.run(default_tag: 'test'){
d.feed(time, {"a"=>1})
d.feed(time, {"a"=>2})
}
d.instance.last_written_path
}
assert !File.exist?("#{TMP_DIR}/out_file_test.20110102_0.log.gz")
path = write_once.call
assert_equal "#{TMP_DIR}/out_file_test.20110102_0.log.gz", path
check_zipped_result(path, formatted_lines)
assert_equal 1, Dir.glob("#{TMP_DIR}/out_file_test.*").size
path = write_once.call
assert_equal "#{TMP_DIR}/out_file_test.20110102_1.log.gz", path
check_zipped_result(path, formatted_lines)
assert_equal 2, Dir.glob("#{TMP_DIR}/out_file_test.*").size
path = write_once.call
assert_equal "#{TMP_DIR}/out_file_test.20110102_2.log.gz", path
check_zipped_result(path, formatted_lines)
assert_equal 3, Dir.glob("#{TMP_DIR}/out_file_test.*").size
end
data(
"without compression" => "text",
"with gzip compression" => "gz",
"with zstd compression" => "zstd"
)
test 'append' do |compression|
time = event_time("2011-01-02 13:14:15 UTC")
formatted_lines = %[2011-01-02T13:14:15Z\ttest\t{"a":1}#{@default_newline}] + %[2011-01-02T13:14:15Z\ttest\t{"a":2}#{@default_newline}]
write_once = ->(){
config = %[
path #{TMP_DIR}/out_file_test
utc
append true
<buffer>
timekey_use_utc true
</buffer>
]
if compression != :text
config << " compress #{compression}"
end
d = create_driver(config)
d.run(default_tag: 'test'){
d.feed(time, {"a"=>1})
d.feed(time, {"a"=>2})
}
d.instance.last_written_path
}
log_file_name = "out_file_test.20110102.log"
if compression != "text"
log_file_name << ".#{compression}"
end
1.upto(3) do |i|
path = write_once.call
assert_equal "#{TMP_DIR}/#{log_file_name}", path
expect = formatted_lines * i
if compression != "text"
check_zipped_result(path, expect, type: compression.to_sym)
else
check_result(path, expect)
end
end
end
test 'append when JST' do
with_timezone(Fluent.windows? ? "JST-9" : "Asia/Tokyo") do
time = event_time("2011-01-02 03:14:15+09:00")
formatted_lines = %[2011-01-02T03:14:15+09:00\ttest\t{"a":1}#{@default_newline}] + %[2011-01-02T03:14:15+09:00\ttest\t{"a":2}#{@default_newline}]
write_once = ->(){
d = create_driver %[
path #{TMP_DIR}/out_file_test
compress gz
append true
<buffer>
timekey_use_utc false
timekey_zone Asia/Tokyo
</buffer>
]
d.run(default_tag: 'test'){
d.feed(time, {"a"=>1})
d.feed(time, {"a"=>2})
}
d.instance.last_written_path
}
path = write_once.call
assert_equal "#{TMP_DIR}/out_file_test.20110102.log.gz", path
check_zipped_result(path, formatted_lines)
path = write_once.call
assert_equal "#{TMP_DIR}/out_file_test.20110102.log.gz", path
check_zipped_result(path, formatted_lines * 2)
path = write_once.call
assert_equal "#{TMP_DIR}/out_file_test.20110102.log.gz", path
check_zipped_result(path, formatted_lines * 3)
end
end
test 'append when UTC-02 but timekey_zone is +0900' do
with_timezone("UTC-02") do # +0200
time = event_time("2011-01-02 17:14:15+02:00")
formatted_lines = %[2011-01-02T17:14:15+02:00\ttest\t{"a":1}#{@default_newline}] + %[2011-01-02T17:14:15+02:00\ttest\t{"a":2}#{@default_newline}]
write_once = ->(){
d = create_driver %[
path #{TMP_DIR}/out_file_test
compress gz
append true
<buffer>
timekey_use_utc false
timekey_zone +0900
</buffer>
]
d.run(default_tag: 'test'){
d.feed(time, {"a"=>1})
d.feed(time, {"a"=>2})
}
d.instance.last_written_path
}
path = write_once.call
# Rotated at 2011-01-02 17:00:00+02:00
assert_equal "#{TMP_DIR}/out_file_test.20110103.log.gz", path
check_zipped_result(path, formatted_lines)
path = write_once.call
assert_equal "#{TMP_DIR}/out_file_test.20110103.log.gz", path
check_zipped_result(path, formatted_lines * 2)
path = write_once.call
assert_equal "#{TMP_DIR}/out_file_test.20110103.log.gz", path
check_zipped_result(path, formatted_lines * 3)
end
end
test '${chunk_id}' do
time = event_time("2011-01-02 13:14:15 UTC")
write_once = ->(){
d = create_driver %[
path #{TMP_DIR}/out_file_chunk_id_${chunk_id}
utc
append true
<buffer>
timekey_use_utc true
</buffer>
]
d.run(default_tag: 'test'){
d.feed(time, {"a"=>1})
d.feed(time, {"a"=>2})
}
d.instance.last_written_path
}
path = write_once.call
if File.basename(path) =~ /out_file_chunk_id_([-_.@a-zA-Z0-9].*).20110102.log/
unique_id = Fluent::UniqueId.hex(Fluent::UniqueId.generate)
assert_equal unique_id.size, $1.size, "chunk_id size is mismatched"
else
flunk "chunk_id is not included in the path"
end
end
SYMLINK_PATH = File.expand_path("#{TMP_DIR}/current")
sub_test_case 'symlink' do
test 'static symlink' do
omit "Windows doesn't support symlink" if Fluent.windows?
conf = CONFIG + %[
symlink_path #{SYMLINK_PATH}
]
symlink_path = "#{SYMLINK_PATH}"
d = create_driver(conf)
begin
run_and_check(d, symlink_path)
ensure
FileUtils.rm_rf(symlink_path)
end
end
test 'symlink with placeholders' do
omit "Windows doesn't support symlink" if Fluent.windows?
conf = %[
path #{TMP_DIR}/${tag}/out_file_test
symlink_path #{SYMLINK_PATH}/foo/${tag}
<buffer tag,time>
</buffer>
]
symlink_path = "#{SYMLINK_PATH}/foo/tag"
d = create_driver(conf)
begin
run_and_check(d, symlink_path)
ensure
FileUtils.rm_rf(symlink_path)
end
end
test 'relative symlink' do
omit "Windows doesn't support symlinks" if Fluent.windows?
conf = CONFIG + %[
symlink_path #{SYMLINK_PATH}
symlink_path_use_relative true
]
symlink_path = "#{SYMLINK_PATH}"
d = create_driver(conf)
begin
run_and_check(d, symlink_path, relative_symlink=true)
ensure
FileUtils.rm_rf(symlink_path)
end
end
def run_and_check(d, symlink_path, relative_symlink=false)
d.run(default_tag: 'tag') do
es = Fluent::OneEventStream.new(event_time("2011-01-02 13:14:15 UTC"), {"a"=>1})
d.feed(es)
assert File.symlink?(symlink_path)
assert File.exist?(symlink_path) # This checks dest of symlink exists or not.
es = Fluent::OneEventStream.new(event_time("2011-01-03 14:15:16 UTC"), {"a"=>2})
d.feed(es)
assert File.symlink?(symlink_path)
assert File.exist?(symlink_path)
meta = d.instance.metadata('tag', event_time("2011-01-03 14:15:16 UTC"), {})
if relative_symlink
target_path = d.instance.buffer.instance_eval{ @stage[meta].path }
link_target = File.readlink(symlink_path)
expected_path = Pathname.new(target_path).relative_path_from(Pathname.new(File.dirname(symlink_path))).to_s
assert_equal expected_path, link_target
else
assert_equal d.instance.buffer.instance_eval{ @stage[meta].path }, File.readlink(symlink_path)
end
end
end
end
sub_test_case 'path' do
test 'normal' do
d = create_driver(%[
path #{TMP_DIR}/out_file_test
time_slice_format %Y-%m-%d-%H
utc true
])
time = event_time("2011-01-02 13:14:15 UTC")
d.run(default_tag: 'test') do
d.feed(time, {"a"=>1})
end
path = d.instance.last_written_path
assert_equal "#{TMP_DIR}/out_file_test.2011-01-02-13_0.log", path
end
test 'normal with append' do
d = create_driver(%[
path #{TMP_DIR}/out_file_test
time_slice_format %Y-%m-%d-%H
utc true
append true
])
time = event_time("2011-01-02 13:14:15 UTC")
d.run(default_tag: 'test') do
d.feed(time, {"a"=>1})
end
path = d.instance.last_written_path
assert_equal "#{TMP_DIR}/out_file_test.2011-01-02-13.log", path
end
test '*' do
d = create_driver(%[
path #{TMP_DIR}/out_file_test.*.txt
time_slice_format %Y-%m-%d-%H
utc true
])
time = event_time("2011-01-02 13:14:15 UTC")
d.run(default_tag: 'test') do
d.feed(time, {"a"=>1})
end
path = d.instance.last_written_path
assert_equal "#{TMP_DIR}/out_file_test.2011-01-02-13_0.txt", path
end
test '* with append' do
d = create_driver(%[
path #{TMP_DIR}/out_file_test.*.txt
time_slice_format %Y-%m-%d-%H
utc true
append true
])
time = event_time("2011-01-02 13:14:15 UTC")
d.run(default_tag: 'test') do
d.feed(time, {"a"=>1})
end
path = d.instance.last_written_path
assert_equal "#{TMP_DIR}/out_file_test.2011-01-02-13.txt", path
end
end
sub_test_case '#timekey_to_timeformat' do
setup do
@d = create_driver
@i = @d.instance
end
test 'returns empty string for nil' do
assert_equal '', @i.timekey_to_timeformat(nil)
end
test 'returns timestamp string with seconds for timekey smaller than 60' do
assert_equal '%Y%m%d%H%M%S', @i.timekey_to_timeformat(1)
assert_equal '%Y%m%d%H%M%S', @i.timekey_to_timeformat(30)
assert_equal '%Y%m%d%H%M%S', @i.timekey_to_timeformat(59)
end
test 'returns timestamp string with minutes for timekey smaller than 3600' do
assert_equal '%Y%m%d%H%M', @i.timekey_to_timeformat(60)
assert_equal '%Y%m%d%H%M', @i.timekey_to_timeformat(180)
assert_equal '%Y%m%d%H%M', @i.timekey_to_timeformat(1800)
assert_equal '%Y%m%d%H%M', @i.timekey_to_timeformat(3599)
end
test 'returns timestamp string with hours for timekey smaller than 86400 (1 day)' do
assert_equal '%Y%m%d%H', @i.timekey_to_timeformat(3600)
assert_equal '%Y%m%d%H', @i.timekey_to_timeformat(7200)
assert_equal '%Y%m%d%H', @i.timekey_to_timeformat(86399)
end
test 'returns timestamp string with days for timekey equal or greater than 86400' do
assert_equal '%Y%m%d', @i.timekey_to_timeformat(86400)
assert_equal '%Y%m%d', @i.timekey_to_timeformat(1000000)
assert_equal '%Y%m%d', @i.timekey_to_timeformat(1000000000)
end
end
sub_test_case '#compression_suffix' do
setup do
@i = create_driver.instance
end
test 'returns empty string for nil (no compression method specified)' do
assert_equal '', @i.compression_suffix(nil)
end
test 'returns .gz for gzip' do
assert_equal '.gz', @i.compression_suffix(:gzip)
end
test 'returns .zstd for zstd' do
assert_equal '.zstd', @i.compression_suffix(:zstd)
end
end
sub_test_case '#generate_path_template' do
setup do
@i = create_driver.instance
end
data(
'day' => [86400, '%Y%m%d', '%Y-%m-%d'],
'hour' => [3600, '%Y%m%d%H', '%Y-%m-%d_%H'],
'minute' => [60, '%Y%m%d%H%M', '%Y-%m-%d_%H%M'],
)
test 'generates path with timestamp placeholder for original path with tailing star with timekey' do |data|
timekey, placeholder, time_slice_format = data
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_parser_apache.rb | test/plugin/test_parser_apache.rb | require_relative '../helper'
require 'fluent/test/driver/parser'
require 'fluent/plugin/parser_apache'
class ApacheParserTest < ::Test::Unit::TestCase
def setup
Fluent::Test.setup
end
def create_driver(conf = {})
Fluent::Test::Driver::Parser.new(Fluent::Plugin::ApacheParser.new).configure(conf)
end
data('parse' => :parse, 'call' => :call)
def test_call(method_name)
d = create_driver
m = d.instance.method(method_name)
m.call('192.168.0.1 - - [28/Feb/2013:12:00:00 +0900] "GET / HTTP/1.1" 200 777') { |time, record|
assert_equal(event_time('28/Feb/2013:12:00:00 +0900', format: '%d/%b/%Y:%H:%M:%S %z'), time)
assert_equal({
'user' => '-',
'method' => 'GET',
'code' => '200',
'size' => '777',
'host' => '192.168.0.1',
'path' => '/'
}, record)
}
end
def test_parse_with_keep_time_key
conf = {
'time_format' => "%d/%b/%Y:%H:%M:%S %z",
'keep_time_key' => 'true',
}
d = create_driver(conf)
text = '192.168.0.1 - - [28/Feb/2013:12:00:00 +0900] "GET / HTTP/1.1" 200 777'
d.instance.parse(text) do |_time, record|
assert_equal "28/Feb/2013:12:00:00 +0900", record['time']
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_in_debug_agent.rb | test/plugin/test_in_debug_agent.rb | require_relative '../helper'
require 'fluent/test/driver/input'
require 'fluent/plugin/in_debug_agent'
require 'fileutils'
class DebugAgentInputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
FileUtils.rm_rf(TMP_DIR)
FileUtils.mkdir_p(TMP_DIR)
end
TMP_DIR = File.expand_path(File.dirname(__FILE__) + "/../tmp/in_debug_agent")
def create_driver(conf = '')
Fluent::Test::Driver::Input.new(Fluent::Plugin::DebugAgentInput).configure(conf)
end
def test_unix_path_writable
assert_nothing_raised do
create_driver %[unix_path #{TMP_DIR}/test_path]
end
assert_raise(Fluent::ConfigError) do
create_driver %[unix_path #{TMP_DIR}/does_not_exist/test_path]
end
end
def test_multi_worker_environment_with_port
Fluent::SystemConfig.overwrite_system_config('workers' => 4) do
d = Fluent::Test::Driver::Input.new(Fluent::Plugin::DebugAgentInput)
d.instance.instance_eval { @_fluentd_worker_id = 2 }
d.configure('port 24230')
assert_true d.instance.multi_workers_ready?
assert_equal(24232, d.instance.instance_variable_get(:@port))
end
end
def test_multi_worker_environment_with_unix_path
Fluent::SystemConfig.overwrite_system_config('workers' => 4) do
d = Fluent::Test::Driver::Input.new(Fluent::Plugin::DebugAgentInput)
d.instance.instance_eval { @_fluentd_worker_id = 2 }
d.configure("unix_path #{TMP_DIR}/test_path")
assert_false d.instance.multi_workers_ready?
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_formatter_hash.rb | test/plugin/test_formatter_hash.rb | require_relative '../helper'
require 'fluent/test/driver/formatter'
require 'fluent/plugin/formatter_hash'
class HashFormatterTest < ::Test::Unit::TestCase
def setup
@time = event_time
end
def create_driver(conf = "")
Fluent::Test::Driver::Formatter.new(Fluent::Plugin::HashFormatter).configure(conf)
end
def tag
"tag"
end
def record
{'message' => 'awesome', 'greeting' => 'hello'}
end
data("newline (LF)" => ["lf", "\n"],
"newline (CRLF)" => ["crlf", "\r\n"])
def test_format(data)
newline_conf, newline = data
d = create_driver({"newline" => newline_conf})
formatted = d.instance.format(tag, @time, record)
assert_equal(%Q!{"message"=>"awesome", "greeting"=>"hello"}#{newline}!, formatted.gsub(' => ', '=>').encode(Encoding::UTF_8))
end
def test_format_without_newline
d = create_driver('add_newline' => false)
formatted = d.instance.format(tag, @time, record)
assert_equal(%Q!{"message"=>"awesome", "greeting"=>"hello"}!, formatted.gsub(' => ', '=>').encode(Encoding::UTF_8))
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_formatter_out_file.rb | test/plugin/test_formatter_out_file.rb | require_relative '../helper'
require 'fluent/test/driver/formatter'
require 'fluent/plugin/formatter_out_file'
class OutFileFormatterTest < ::Test::Unit::TestCase
def setup
@time = event_time
@default_newline = if Fluent.windows?
"\r\n"
else
"\n"
end
end
def create_driver(conf = {})
d = Fluent::Test::Driver::Formatter.new(Fluent::Plugin::OutFileFormatter)
case conf
when Fluent::Config::Element
d.configure(conf)
when Hash
d.configure({'utc' => true}.merge(conf))
else
d.configure(conf)
end
end
def tag
"tag"
end
def record
{'message' => 'awesome'}
end
data('both true' => 'true', 'both false' => 'false')
def test_configured_with_both_of_utc_and_localtime(value)
assert_raise(Fluent::ConfigError.new("both of utc and localtime are specified, use only one of them")) do
create_driver({'utc' => value, 'localtime' => value})
end
end
time_i = Time.parse("2016-07-26 21:08:30 -0700").to_i
data(
'configured for localtime by localtime' => ['localtime', 'true', time_i, "2016-07-26T21:08:30-07:00"],
'configured for localtime by utc' => ['utc', 'false', time_i, "2016-07-26T21:08:30-07:00"],
'configured for utc by localtime' => ['localtime', 'false', time_i, "2016-07-27T04:08:30Z"],
'configured for utc by utc' => ['utc', 'true', time_i, "2016-07-27T04:08:30Z"],
)
def test_configured_with_utc_or_localtime(data)
key, value, time_i, expected = data
time = Time.at(time_i)
begin
oldtz, ENV['TZ'] = ENV['TZ'], "UTC+07"
d = create_driver(config_element('ROOT', '', {key => value}))
tag = 'test'
assert_equal "#{expected}\t#{tag}\t#{JSON.generate(record)}#{@default_newline}", d.instance.format(tag, time, record)
ensure
ENV['TZ'] = oldtz
end
end
data("newline (LF)" => ["lf", "\n"],
"newline (CRLF)" => ["crlf", "\r\n"])
def test_format(data)
newline_conf, newline = data
d = create_driver({"newline" => newline_conf})
formatted = d.instance.format(tag, @time, record)
assert_equal("#{time2str(@time)}\t#{tag}\t#{JSON.generate(record)}#{newline}", formatted)
end
data("newline (LF)" => ["lf", "\n"],
"newline (CRLF)" => ["crlf", "\r\n"])
def test_format_without_time(data)
newline_conf, newline = data
d = create_driver('output_time' => 'false', 'newline' => newline_conf)
formatted = d.instance.format(tag, @time, record)
assert_equal("#{tag}\t#{JSON.generate(record)}#{newline}", formatted)
end
data("newline (LF)" => ["lf", "\n"],
"newline (CRLF)" => ["crlf", "\r\n"])
def test_format_without_tag(data)
newline_conf, newline = data
d = create_driver('output_tag' => 'false', 'newline' => newline_conf)
formatted = d.instance.format(tag, @time, record)
assert_equal("#{time2str(@time)}\t#{JSON.generate(record)}#{newline}", formatted)
end
data("newline (LF)" => ["lf", "\n"],
"newline (CRLF)" => ["crlf", "\r\n"])
def test_format_without_time_and_tag
newline_conf, newline = data
d = create_driver('output_tag' => 'false', 'output_time' => 'false', 'newline' => newline_conf)
formatted = d.instance.format('tag', @time, record)
assert_equal("#{JSON.generate(record)}#{newline}", formatted)
end
data("newline (LF)" => ["lf", "\n"],
"newline (CRLF)" => ["crlf", "\r\n"])
def test_format_without_time_and_tag_against_string_literal_configure(data)
newline_conf, newline = data
d = create_driver(%[
utc true
output_tag false
output_time false
newline #{newline_conf}
])
formatted = d.instance.format('tag', @time, record)
assert_equal("#{JSON.generate(record)}#{newline}", formatted)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_output_as_buffered_overflow.rb | test/plugin/test_output_as_buffered_overflow.rb | require_relative '../helper'
require 'fluent/plugin/output'
require 'fluent/plugin/buffer'
require 'fluent/event'
require 'json'
require 'time'
require 'timeout'
require 'timecop'
module FluentPluginOutputAsBufferedOverflowTest
class DummyBareOutput < Fluent::Plugin::Output
def register(name, &block)
instance_variable_set("@#{name}", block)
end
end
class DummyAsyncOutput < DummyBareOutput
def initialize
super
@format = @write = nil
end
def format(tag, time, record)
@format ? @format.call(tag, time, record) : [tag, time, record].to_json
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
end
end
class BufferedOutputOverflowTest < Test::Unit::TestCase
def create_output
FluentPluginOutputAsBufferedOverflowTest::DummyAsyncOutput.new
end
def create_metadata(timekey: nil, tag: nil, variables: nil)
Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
end
def waiting(seconds)
begin
Timeout.timeout(seconds) do
yield
end
rescue Timeout::Error
logs = @i.log.out.logs
STDERR.print(*logs)
raise
end
end
teardown do
if @i
@i.stop unless @i.stopped?
@i.before_shutdown unless @i.before_shutdown?
@i.shutdown unless @i.shutdown?
@i.after_shutdown unless @i.after_shutdown?
@i.close unless @i.closed?
@i.terminate unless @i.terminated?
end
Timecop.return
end
sub_test_case 'buffered output with default configuration (throws exception for buffer overflow)' do
setup do
hash = {
'flush_mode' => 'lazy',
'flush_thread_burst_interval' => 0.01,
'chunk_limit_size' => 1024,
'total_limit_size' => 4096,
}
@i = create_output()
@i.configure(config_element('ROOT','',{},[config_element('buffer','tag',hash)]))
@i.start
@i.after_start
end
test '#emit_events raises error when buffer is full' do
@i.register(:format){|tag, time, record| "x" * 128 } # 128bytes per record (x4 -> 512bytes)
es = Fluent::ArrayEventStream.new([
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
])
8.times do |i|
@i.emit_events("tag#{i}", es)
end
assert !@i.buffer.storable?
assert_raise(Fluent::Plugin::Buffer::BufferOverflowError) do
@i.emit_events("tag9", es)
end
logs = @i.log.out.logs
assert{ logs.any?{|line| line.include?("failed to write data into buffer by buffer overflow") } }
end
end
sub_test_case 'buffered output configured with "overflow_action block"' do
setup do
hash = {
'flush_mode' => 'lazy',
'flush_thread_burst_interval' => 0.01,
'chunk_limit_size' => 1024,
'total_limit_size' => 4096,
'overflow_action' => "block",
}
@i = create_output()
@i.configure(config_element('ROOT','',{'log_level' => 'debug'},[config_element('buffer','tag',hash)]))
@i.start
@i.after_start
end
test '#emit_events blocks until any queues are flushed' do
failing = true
flushed_chunks = []
@i.register(:format){|tag, time, record| "x" * 128 } # 128bytes per record (x4 -> 512bytes)
@i.register(:write) do |chunk|
if failing
raise "blocking"
end
flushed_chunks << chunk
end
es = Fluent::ArrayEventStream.new([
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
])
4.times do |i|
@i.emit_events("tag#{i}", es)
end
assert !@i.buffer.storable?
Thread.new do
sleep 1
failing = false
end
assert_nothing_raised do
@i.emit_events("tag9", es)
end
assert !failing
assert{ flushed_chunks.size > 0 }
logs = @i.log.out.logs
assert{ logs.any?{|line| line.include?("failed to write data into buffer by buffer overflow") } }
assert{ logs.any?{|line| line.include?("buffer.write is now blocking") } }
assert{ logs.any?{|line| line.include?("retrying buffer.write after blocked operation") } }
end
end
sub_test_case 'buffered output configured with "overflow_action drop_oldest_chunk"' do
setup do
hash = {
'flush_mode' => 'lazy',
'flush_thread_burst_interval' => 0.01,
'chunk_limit_size' => 1024,
'total_limit_size' => 4096,
'overflow_action' => "drop_oldest_chunk",
}
@i = create_output()
@i.configure(config_element('ROOT','',{'log_level' => 'debug'},[config_element('buffer','tag',hash)]))
@i.start
@i.after_start
end
test '#emit_events will success by dropping oldest chunk' do
failing = true
flushed_chunks = []
@i.register(:format){|tag, time, record| "x" * 128 } # 128bytes per record (x4 -> 512bytes)
@i.register(:write) do |chunk|
if failing
raise "blocking"
end
flushed_chunks << chunk
end
es = Fluent::ArrayEventStream.new([
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
])
4.times do |i|
@i.emit_events("tag#{i}", es)
end
assert !@i.buffer.storable?
assert{ @i.buffer.queue[0].metadata.tag == "tag0" }
assert{ @i.buffer.queue[1].metadata.tag == "tag1" }
assert_nothing_raised do
@i.emit_events("tag9", es)
end
assert failing
assert{ flushed_chunks.size == 0 }
assert{ @i.buffer.queue[0].metadata.tag == "tag1" }
logs = @i.log.out.logs
assert{ logs.any?{|line| line.include?("failed to write data into buffer by buffer overflow") } }
assert{ logs.any?{|line| line.include?("dropping oldest chunk to make space after buffer overflow") } }
assert{ @i.drop_oldest_chunk_count > 0 }
end
test '#emit_events raises OverflowError if all buffer spaces are used by staged chunks' do
@i.register(:format){|tag, time, record| "x" * 128 } # 128bytes per record (x4 -> 512bytes)
es = Fluent::ArrayEventStream.new([
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
[event_time(), {"message" => "test"}],
])
8.times do |i|
@i.emit_events("tag#{i}", es)
end
assert !@i.buffer.storable?
assert{ @i.buffer.queue.size == 0 }
assert{ @i.buffer.stage.size == 8 }
assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
@i.emit_events("tag9", es)
end
logs = @i.log.out.logs
assert{ logs.any?{|line| line.include?("failed to write data into buffer by buffer overflow") } }
assert{ logs.any?{|line| line.include?("no queued chunks to be dropped for drop_oldest_chunk") } }
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_buffer_file_chunk.rb | test/plugin/test_buffer_file_chunk.rb | require_relative '../helper'
require 'fluent/plugin/buffer/file_chunk'
require 'fluent/plugin/compressable'
require 'fluent/unique_id'
require 'fileutils'
require 'msgpack'
require 'time'
require 'timecop'
class BufferFileChunkTest < Test::Unit::TestCase
include Fluent::Plugin::Compressable
setup do
@klass = Fluent::Plugin::Buffer::FileChunk
@chunkdir = File.expand_path('../../tmp/buffer_file_chunk', __FILE__)
FileUtils.rm_r @chunkdir rescue nil
FileUtils.mkdir_p @chunkdir
end
teardown do
Timecop.return
end
Metadata = Fluent::Plugin::Buffer::Metadata
def gen_metadata(timekey: nil, tag: nil, variables: nil)
Metadata.new(timekey, tag, variables)
end
def read_metadata_file(path)
File.open(path, 'rb') do |f|
chunk = f.read
if chunk.size <= 6 # size of BUFFER_HEADER (2) + size of data(4)
return nil
end
data = nil
if chunk.slice(0, 2) == Fluent::Plugin::Buffer::FileChunk::BUFFER_HEADER
size = chunk.slice(2, 4).unpack('N').first
if size
data = MessagePack.unpack(chunk.slice(6, size), symbolize_keys: true)
end
else
# old type
data = MessagePack.unpack(chunk, symbolize_keys: true)
end
data
end
end
def gen_path(path)
File.join(@chunkdir, path)
end
def gen_test_chunk_id
require 'time'
now = Time.parse('2016-04-07 14:31:33 +0900')
u1 = ((now.to_i * 1000 * 1000 + now.usec) << 12 | 1725) # 1725 is one of `rand(0xfff)`
u3 = 2979763054 # one of rand(0xffffffff)
u4 = 438020492 # ditto
[u1 >> 32, u1 & 0xffffffff, u3, u4].pack('NNNN')
# unique_id.unpack('N*').map{|n| n.to_s(16)}.join => "52fde6425d7406bdb19b936e1a1ba98c"
end
def hex_id(id)
id.unpack('N*').map{|n| n.to_s(16)}.join
end
sub_test_case 'classmethods' do
data(
correct_staged: ['/mydir/mypath/myfile.b00ff.log', :staged],
correct_queued: ['/mydir/mypath/myfile.q00ff.log', :queued],
incorrect_staged: ['/mydir/mypath/myfile.b00ff.log/unknown', :unknown],
incorrect_queued: ['/mydir/mypath/myfile.q00ff.log/unknown', :unknown],
output_file: ['/mydir/mypath/myfile.20160716.log', :unknown],
)
test 'can .assume_chunk_state' do |data|
path, expected = data
assert_equal expected, @klass.assume_chunk_state(path)
end
test '.generate_stage_chunk_path generates path with staged mark & chunk unique_id' do
assert_equal gen_path("mychunk.b52fde6425d7406bdb19b936e1a1ba98c.log"), @klass.generate_stage_chunk_path(gen_path("mychunk.*.log"), gen_test_chunk_id)
assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
@klass.generate_stage_chunk_path(gen_path("mychunk.log"), gen_test_chunk_id)
end
assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
@klass.generate_stage_chunk_path(gen_path("mychunk.*"), gen_test_chunk_id)
end
assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
@klass.generate_stage_chunk_path(gen_path("*.log"), gen_test_chunk_id)
end
end
test '.generate_queued_chunk_path generates path with enqueued mark for staged chunk path' do
assert_equal(
gen_path("mychunk.q52fde6425d7406bdb19b936e1a1ba98c.log"),
@klass.generate_queued_chunk_path(gen_path("mychunk.b52fde6425d7406bdb19b936e1a1ba98c.log"), gen_test_chunk_id)
)
end
test '.generate_queued_chunk_path generates special path with chunk unique_id for non staged chunk path' do
assert_equal(
gen_path("mychunk.log.q52fde6425d7406bdb19b936e1a1ba98c.chunk"),
@klass.generate_queued_chunk_path(gen_path("mychunk.log"), gen_test_chunk_id)
)
assert_equal(
gen_path("mychunk.q55555555555555555555555555555555.log.q52fde6425d7406bdb19b936e1a1ba98c.chunk"),
@klass.generate_queued_chunk_path(gen_path("mychunk.q55555555555555555555555555555555.log"), gen_test_chunk_id)
)
end
test '.unique_id_from_path recreates unique_id from file path to assume unique_id for v0.12 chunks' do
assert_equal gen_test_chunk_id, @klass.unique_id_from_path(gen_path("mychunk.q52fde6425d7406bdb19b936e1a1ba98c.log"))
end
end
sub_test_case 'newly created chunk' do
setup do
@chunk_path = File.join(@chunkdir, 'test.*.log')
@c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @chunk_path, :create)
end
def gen_chunk_path(prefix, unique_id)
File.join(@chunkdir, "test.#{prefix}#{Fluent::UniqueId.hex(unique_id)}.log")
end
teardown do
if @c
@c.purge rescue nil
end
if File.exist? @chunk_path
File.unlink @chunk_path
end
end
test 'creates new files for chunk and metadata with specified path & permission' do
assert{ @c.unique_id.size == 16 }
assert_equal gen_chunk_path('b', @c.unique_id), @c.path
assert File.exist?(gen_chunk_path('b', @c.unique_id))
assert{ File.stat(gen_chunk_path('b', @c.unique_id)).mode.to_s(8).end_with?(Fluent::DEFAULT_FILE_PERMISSION.to_s(8)) }
assert File.exist?(gen_chunk_path('b', @c.unique_id) + '.meta')
assert{ File.stat(gen_chunk_path('b', @c.unique_id) + '.meta').mode.to_s(8).end_with?(Fluent::DEFAULT_FILE_PERMISSION.to_s(8)) }
assert_equal :unstaged, @c.state
assert @c.empty?
end
test 'can #append, #commit and #read it' do
assert @c.empty?
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
content = @c.read
ds = content.split("\n").select{|d| !d.empty? }
assert_equal 2, ds.size
assert_equal d1, JSON.parse(ds[0])
assert_equal d2, JSON.parse(ds[1])
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
@c.commit
content = @c.read
ds = content.split("\n").select{|d| !d.empty? }
assert_equal 4, ds.size
assert_equal d1, JSON.parse(ds[0])
assert_equal d2, JSON.parse(ds[1])
assert_equal d3, JSON.parse(ds[2])
assert_equal d4, JSON.parse(ds[3])
end
test 'can #concat, #commit and #read it' do
assert @c.empty?
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"].join
@c.concat(data, 2)
@c.commit
content = @c.read
ds = content.split("\n").select{|d| !d.empty? }
assert_equal 2, ds.size
assert_equal d1, JSON.parse(ds[0])
assert_equal d2, JSON.parse(ds[1])
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.concat([d3.to_json + "\n", d4.to_json + "\n"].join, 2)
@c.commit
content = @c.read
ds = content.split("\n").select{|d| !d.empty? }
assert_equal 4, ds.size
assert_equal d1, JSON.parse(ds[0])
assert_equal d2, JSON.parse(ds[1])
assert_equal d3, JSON.parse(ds[2])
assert_equal d4, JSON.parse(ds[3])
end
test 'has its contents in binary (ascii-8bit)' do
data1 = "aaa bbb ccc".force_encoding('utf-8')
@c.append([data1])
@c.commit
assert_equal Encoding::ASCII_8BIT, @c.instance_eval{ @chunk.external_encoding }
content = @c.read
assert_equal Encoding::ASCII_8BIT, content.encoding
end
test 'has #bytesize and #size' do
assert @c.empty?
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
@c.commit
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
first_bytesize = @c.bytesize
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
assert_equal 4, @c.size
@c.commit
assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
assert_equal 4, @c.size
end
test 'can #rollback to revert non-committed data' do
assert @c.empty?
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
@c.rollback
assert @c.empty?
assert_equal '', File.open(@c.path, 'rb'){|f| f.read }
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
first_bytesize = @c.bytesize
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
assert_equal 4, @c.size
@c.rollback
assert_equal first_bytesize, @c.bytesize
assert_equal 2, @c.size
assert_equal (d1.to_json + "\n" + d2.to_json + "\n"), File.open(@c.path, 'rb'){|f| f.read }
end
test 'can #rollback to revert non-committed data from #concat' do
assert @c.empty?
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"].join
@c.concat(data, 2)
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
@c.rollback
assert @c.empty?
assert_equal '', File.open(@c.path, 'rb'){|f| f.read }
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
first_bytesize = @c.bytesize
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.concat([d3.to_json + "\n", d4.to_json + "\n"].join, 2)
assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
assert_equal 4, @c.size
@c.rollback
assert_equal first_bytesize, @c.bytesize
assert_equal 2, @c.size
assert_equal (d1.to_json + "\n" + d2.to_json + "\n"), File.open(@c.path, 'rb'){|f| f.read }
end
test 'can store its data by #close' do
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
@c.commit
content = @c.read
unique_id = @c.unique_id
size = @c.size
created_at = @c.created_at
modified_at = @c.modified_at
@c.close
assert_equal content, File.open(@c.path, 'rb'){|f| f.read }
stored_meta = {
timekey: nil, tag: nil, variables: nil, seq: 0,
id: unique_id,
s: size,
c: created_at.to_i,
m: modified_at.to_i,
}
assert_equal stored_meta, read_metadata_file(@c.path + '.meta')
end
test 'deletes all data by #purge' do
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
@c.commit
@c.purge
assert @c.empty?
assert_equal 0, @c.bytesize
assert_equal 0, @c.size
assert !File.exist?(@c.path)
assert !File.exist?(@c.path + '.meta')
end
test 'can #open its contents as io' do
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
@c.commit
lines = []
@c.open do |io|
assert io
io.readlines.each do |l|
lines << l
end
end
assert_equal d1.to_json + "\n", lines[0]
assert_equal d2.to_json + "\n", lines[1]
assert_equal d3.to_json + "\n", lines[2]
assert_equal d4.to_json + "\n", lines[3]
end
test '#write_metadata tries to store metadata on file' do
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
expected = {
timekey: nil, tag: nil, variables: nil, seq: 0,
id: @c.unique_id,
s: @c.size,
c: @c.created_at.to_i,
m: @c.modified_at.to_i,
}
assert_equal expected, read_metadata_file(@c.path + '.meta')
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
# append does write_metadata
dummy_now = Time.parse('2016-04-07 16:59:59 +0900')
Timecop.freeze(dummy_now)
@c.write_metadata
expected = {
timekey: nil, tag: nil, variables: nil, seq: 0,
id: @c.unique_id,
s: @c.size,
c: @c.created_at.to_i,
m: dummy_now.to_i,
}
assert_equal expected, read_metadata_file(@c.path + '.meta')
@c.commit
expected = {
timekey: nil, tag: nil, variables: nil, seq: 0,
id: @c.unique_id,
s: @c.size,
c: @c.created_at.to_i,
m: @c.modified_at.to_i,
}
assert_equal expected, read_metadata_file(@c.path + '.meta')
content = @c.read
unique_id = @c.unique_id
size = @c.size
created_at = @c.created_at
modified_at = @c.modified_at
@c.close
assert_equal content, File.open(@c.path, 'rb'){|f| f.read }
stored_meta = {
timekey: nil, tag: nil, variables: nil, seq: 0,
id: unique_id,
s: size,
c: created_at.to_i,
m: modified_at.to_i,
}
assert_equal stored_meta, read_metadata_file(@c.path + '.meta')
end
end
test 'ensure to remove metadata file if #write_metadata raise an error because of disk full' do
chunk_path = File.join(@chunkdir, 'test.*.log')
stub(Fluent::UniqueId).hex(anything) { 'id' } # to fix chunk id
any_instance_of(Fluent::Plugin::Buffer::FileChunk) do |klass|
stub(klass).write_metadata(anything) do |v|
raise 'disk full'
end
end
err = assert_raise(Fluent::Plugin::Buffer::BufferOverflowError) do
Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, chunk_path, :create)
end
assert_false File.exist?(File.join(@chunkdir, 'test.bid.log.meta'))
assert_match(/create buffer metadata/, err.message)
end
sub_test_case 'chunk with file for staged chunk' do
setup do
@chunk_id = gen_test_chunk_id
@chunk_path = File.join(@chunkdir, "test_staged.b#{hex_id(@chunk_id)}.log")
@enqueued_path = File.join(@chunkdir, "test_staged.q#{hex_id(@chunk_id)}.log")
@d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
@d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
@d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
@d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
@d = [@d1,@d2,@d3,@d4].map{|d| d.to_json + "\n" }.join
File.open(@chunk_path, 'wb') do |f|
f.write @d
end
@metadata = {
timekey: nil, tag: 'testing', variables: {k: "x"}, seq: 0,
id: @chunk_id,
s: 4,
c: Time.parse('2016-04-07 17:44:00 +0900').to_i,
m: Time.parse('2016-04-07 17:44:13 +0900').to_i,
}
File.open(@chunk_path + '.meta', 'wb') do |f|
f.write @metadata.to_msgpack
end
@c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @chunk_path, :staged)
end
teardown do
if @c
@c.purge rescue nil
end
[@chunk_path, @chunk_path + '.meta', @enqueued_path, @enqueued_path + '.meta'].each do |path|
File.unlink path if File.exist? path
end
end
test 'can load as staged chunk from file with metadata' do
assert_equal @chunk_path, @c.path
assert_equal :staged, @c.state
assert_nil @c.metadata.timekey
assert_equal 'testing', @c.metadata.tag
assert_equal({k: "x"}, @c.metadata.variables)
assert_equal 4, @c.size
assert_equal Time.parse('2016-04-07 17:44:00 +0900'), @c.created_at
assert_equal Time.parse('2016-04-07 17:44:13 +0900'), @c.modified_at
content = @c.read
assert_equal @d, content
end
test 'can be enqueued' do
stage_path = @c.path
queue_path = @enqueued_path
assert File.exist?(stage_path)
assert File.exist?(stage_path + '.meta')
assert !File.exist?(queue_path)
assert !File.exist?(queue_path + '.meta')
@c.enqueued!
assert_equal queue_path, @c.path
assert !File.exist?(stage_path)
assert !File.exist?(stage_path + '.meta')
assert File.exist?(queue_path)
assert File.exist?(queue_path + '.meta')
assert_nil @c.metadata.timekey
assert_equal 'testing', @c.metadata.tag
assert_equal({k: "x"}, @c.metadata.variables)
assert_equal 4, @c.size
assert_equal Time.parse('2016-04-07 17:44:00 +0900'), @c.created_at
assert_equal Time.parse('2016-04-07 17:44:13 +0900'), @c.modified_at
assert_equal @d, File.open(@c.path, 'rb'){|f| f.read }
assert_equal @metadata, read_metadata_file(@c.path + '.meta')
end
test '#write_metadata tries to store metadata on file with non-committed data' do
d5 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
d5s = d5.to_json + "\n"
@c.append([d5s])
metadata = {
timekey: nil, tag: 'testing', variables: {k: "x"}, seq: 0,
id: @chunk_id,
s: 4,
c: Time.parse('2016-04-07 17:44:00 +0900').to_i,
m: Time.parse('2016-04-07 17:44:13 +0900').to_i,
}
assert_equal metadata, read_metadata_file(@c.path + '.meta')
@c.write_metadata
metadata = {
timekey: nil, tag: 'testing', variables: {k: "x"}, seq: 0,
id: @chunk_id,
s: 5,
c: Time.parse('2016-04-07 17:44:00 +0900').to_i,
m: Time.parse('2016-04-07 17:44:38 +0900').to_i,
}
dummy_now = Time.parse('2016-04-07 17:44:38 +0900')
Timecop.freeze(dummy_now)
@c.write_metadata
assert_equal metadata, read_metadata_file(@c.path + '.meta')
end
test '#file_rename can rename chunk files even in windows, and call callback with file size' do
data = "aaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbccccccccccccccccccccccccccccc"
testing_file1 = gen_path('rename1.test')
testing_file2 = gen_path('rename2.test')
f = File.open(testing_file1, 'wb', @c.permission)
f.set_encoding(Encoding::ASCII_8BIT)
f.sync = true
f.binmode
f.write data
pos = f.pos
assert f.binmode?
assert f.sync
assert_equal data.bytesize, f.size
io = nil
@c.file_rename(f, testing_file1, testing_file2, ->(new_io){ io = new_io })
assert io
if Fluent.windows?
assert{ f != io }
else
assert_equal f, io
end
assert_equal Encoding::ASCII_8BIT, io.external_encoding
assert io.sync
assert io.binmode?
assert_equal data.bytesize, io.size
assert_equal pos, io.pos
assert_equal '', io.read
io.rewind
assert_equal data, io.read
end
end
sub_test_case 'chunk with file for enqueued chunk' do
setup do
@chunk_id = gen_test_chunk_id
@enqueued_path = File.join(@chunkdir, "test_staged.q#{hex_id(@chunk_id)}.log")
@d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
@d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
@d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
@d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
@d = [@d1,@d2,@d3,@d4].map{|d| d.to_json + "\n" }.join
File.open(@enqueued_path, 'wb') do |f|
f.write @d
end
@dummy_timekey = Time.parse('2016-04-07 17:40:00 +0900').to_i
@metadata = {
timekey: @dummy_timekey, tag: 'testing', variables: {k: "x"}, seq: 0,
id: @chunk_id,
s: 4,
c: Time.parse('2016-04-07 17:44:00 +0900').to_i,
m: Time.parse('2016-04-07 17:44:13 +0900').to_i,
}
File.open(@enqueued_path + '.meta', 'wb') do |f|
f.write @metadata.to_msgpack
end
@c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @enqueued_path, :queued)
end
teardown do
if @c
@c.purge rescue nil
end
[@enqueued_path, @enqueued_path + '.meta'].each do |path|
File.unlink path if File.exist? path
end
end
test 'can load as queued chunk (read only) with metadata' do
assert @c
assert_equal @chunk_id, @c.unique_id
assert_equal :queued, @c.state
assert_equal gen_metadata(timekey: @dummy_timekey, tag: 'testing', variables: {k: "x"}), @c.metadata
assert_equal Time.at(@metadata[:c]), @c.created_at
assert_equal Time.at(@metadata[:m]), @c.modified_at
assert_equal @metadata[:s], @c.size
assert_equal @d.bytesize, @c.bytesize
assert_equal @d, @c.read
assert_raise RuntimeError.new("BUG: concatenating to unwritable chunk, now 'queued'") do
@c.append(["queued chunk is read only"])
end
assert_raise IOError do
@c.instance_eval{ @chunk }.write "chunk io is opened as read only"
end
end
end
sub_test_case 'chunk with queued chunk file of v0.12, without metadata' do
setup do
@chunk_id = gen_test_chunk_id
@chunk_path = File.join(@chunkdir, "test_v12.2016040811.q#{hex_id(@chunk_id)}.log")
@d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
@d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
@d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
@d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
@d = [@d1,@d2,@d3,@d4].map{|d| d.to_json + "\n" }.join
File.open(@chunk_path, 'wb') do |f|
f.write @d
end
@c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @chunk_path, :queued)
end
teardown do
if @c
@c.purge rescue nil
end
File.unlink @chunk_path if File.exist? @chunk_path
end
test 'can load as queued chunk from file without metadata' do
assert @c
assert_equal :queued, @c.state
assert_equal @chunk_id, @c.unique_id
assert_equal gen_metadata, @c.metadata
assert_equal @d.bytesize, @c.bytesize
assert_equal 0, @c.size
assert_equal @d, @c.read
assert_raise RuntimeError.new("BUG: concatenating to unwritable chunk, now 'queued'") do
@c.append(["queued chunk is read only"])
end
assert_raise IOError do
@c.instance_eval{ @chunk }.write "chunk io is opened as read only"
end
end
end
sub_test_case 'chunk with staged chunk file of v0.12, without metadata' do
setup do
@chunk_id = gen_test_chunk_id
@chunk_path = File.join(@chunkdir, "test_v12.2016040811.b#{hex_id(@chunk_id)}.log")
@d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
@d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
@d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
@d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
@d = [@d1,@d2,@d3,@d4].map{|d| d.to_json + "\n" }.join
File.open(@chunk_path, 'wb') do |f|
f.write @d
end
@c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @chunk_path, :staged)
end
teardown do
if @c
@c.purge rescue nil
end
File.unlink @chunk_path if File.exist? @chunk_path
end
test 'can load as queued chunk from file without metadata even if it was loaded as staged chunk' do
assert @c
assert_equal :queued, @c.state
assert_equal @chunk_id, @c.unique_id
assert_equal gen_metadata, @c.metadata
assert_equal @d.bytesize, @c.bytesize
assert_equal 0, @c.size
assert_equal @d, @c.read
assert_raise RuntimeError.new("BUG: concatenating to unwritable chunk, now 'queued'") do
@c.append(["queued chunk is read only"])
end
assert_raise IOError do
@c.instance_eval{ @chunk }.write "chunk io is opened as read only"
end
end
end
sub_test_case 'compressed buffer' do
setup do
@src = 'text data for compressing' * 5
@gzipped_src = compress(@src)
@zstded_src = compress(@src, type: :zstd)
end
test '#append with compress option writes compressed data to chunk when compress is gzip' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :gzip)
c.append([@src, @src], compress: :gzip)
c.commit
# check chunk is compressed
assert c.read(compressed: :gzip).size < [@src, @src].join("").size
assert_equal @src + @src, c.read
end
test '#open passes io object having decompressed data to a block when compress is gzip' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :gzip)
c.concat(@gzipped_src, @src.size)
c.commit
decomressed_data = c.open do |io|
v = io.read
assert_equal @src, v
v
end
assert_equal @src, decomressed_data
end
test '#open with compressed option passes io object having decompressed data to a block when compress is gzip' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :gzip)
c.concat(@gzipped_src, @src.size)
c.commit
comressed_data = c.open(compressed: :gzip) do |io|
v = io.read
assert_equal @gzipped_src, v
v
end
assert_equal @gzipped_src, comressed_data
end
test '#write_to writes decompressed data when compress is gzip' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :gzip)
c.concat(@gzipped_src, @src.size)
c.commit
assert_equal @src, c.read
assert_equal @gzipped_src, c.read(compressed: :gzip)
io = StringIO.new
c.write_to(io)
assert_equal @src, io.string
end
test '#write_to with compressed option writes compressed data when compress is gzip' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :gzip)
c.concat(@gzipped_src, @src.size)
c.commit
assert_equal @src, c.read
assert_equal @gzipped_src, c.read(compressed: :gzip)
io = StringIO.new
io.set_encoding(Encoding::ASCII_8BIT)
c.write_to(io, compressed: :gzip)
assert_equal @gzipped_src, io.string
end
test '#append with compress option writes compressed data to chunk when compress is zstd' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :zstd)
c.append([@src, @src], compress: :zstd)
c.commit
# check chunk is compressed
assert c.read(compressed: :zstd).size < [@src, @src].join("").size
assert_equal @src + @src, c.read
end
test '#open passes io object having decompressed data to a block when compress is zstd' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :zstd)
c.concat(@zstded_src, @src.size)
c.commit
decomressed_data = c.open do |io|
v = io.read
assert_equal @src, v
v
end
assert_equal @src, decomressed_data
end
test '#open with compressed option passes io object having decompressed data to a block when compress is zstd' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :zstd)
c.concat(@zstded_src, @src.size)
c.commit
comressed_data = c.open(compressed: :zstd) do |io|
v = io.read
assert_equal @zstded_src, v
v
end
assert_equal @zstded_src, comressed_data
end
test '#write_to writes decompressed data when compress is zstd' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :zstd)
c.concat(@zstded_src, @src.size)
c.commit
assert_equal @src, c.read
assert_equal @zstded_src, c.read(compressed: :zstd)
io = StringIO.new
c.write_to(io)
assert_equal @src, io.string
end
test '#write_to with compressed option writes compressed data when compress is zstd' do
c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :zstd)
c.concat(@zstded_src, @src.size)
c.commit
assert_equal @src, c.read
assert_equal @zstded_src, c.read(compressed: :zstd)
io = StringIO.new
io.set_encoding(Encoding::ASCII_8BIT)
c.write_to(io, compressed: :zstd)
assert_equal @zstded_src, io.string
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_in_object_space.rb | test/plugin/test_in_object_space.rb | require_relative '../helper'
require 'fluent/test/driver/input'
require 'fluent/plugin/in_object_space'
require 'timeout'
class ObjectSpaceInputTest < Test::Unit::TestCase
def waiting(seconds, instance)
begin
Timeout.timeout(seconds) do
yield
end
rescue Timeout::Error
STDERR.print(*instance.log.out.logs)
raise
end
end
class FailObject
end
def setup
Fluent::Test.setup
# Overriding this behavior in the global scope will have an unexpected influence on other tests.
# So this should be overridden here and be removed in `teardown`.
def FailObject.class
raise "FailObject error for tests in ObjectSpaceInputTest."
end
end
def teardown
FailObject.singleton_class.remove_method(:class)
end
TESTCONFIG = %[
emit_interval 0.2
tag t1
top 2
]
def create_driver(conf=TESTCONFIG)
Fluent::Test::Driver::Input.new(Fluent::Plugin::ObjectSpaceInput).configure(conf)
end
def test_configure
d = create_driver
assert_equal 0.2, d.instance.emit_interval
assert_equal "t1", d.instance.tag
assert_equal 2, d.instance.top
end
def test_emit
# Force release garbaged objects due to avoid unexpected error by mock objects on `on_timer`
# https://github.com/fluent/fluentd/pull/5055
GC.start
d = create_driver
d.run(expect_emits: 3)
emits = d.events
assert{ emits.length > 0 }
emits.each { |tag, time, record|
assert_equal d.instance.tag, tag
assert_equal d.instance.top, record.keys.size
assert(time.is_a?(Fluent::EventTime))
}
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_out_stream.rb | test/plugin/test_out_stream.rb | require_relative '../helper'
require 'fluent/test'
require 'fluent/plugin/out_stream'
module StreamOutputTest
def setup
Fluent::Test.setup
end
def test_write
d = create_driver
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
d.emit({"a"=>1}, time)
d.emit({"a"=>2}, time)
expect = ["test",
[time,{"a"=>1}].to_msgpack +
[time,{"a"=>2}].to_msgpack
].to_msgpack
result = d.run
assert_equal(expect, result)
end
def test_write_event_time
d = create_driver
time = Fluent::EventTime.parse("2011-01-02 13:14:15 UTC")
d.emit({"a"=>1}, time)
d.emit({"a"=>2}, time)
expect = ["test",
Fluent::MessagePackFactory.msgpack_packer.write([time,{"a"=>1}]).to_s +
Fluent::MessagePackFactory.msgpack_packer.write([time,{"a"=>2}]).to_s
]
expect = Fluent::MessagePackFactory.msgpack_packer.write(expect).to_s
result = d.run
assert_equal(expect, result)
end
def create_driver(klass, conf)
Fluent::Test::BufferedOutputTestDriver.new(klass) do
def write(chunk)
chunk.read
end
end.configure(conf)
end
end
class TcpOutputTest < Test::Unit::TestCase
include StreamOutputTest
def setup
super
@port = unused_port(protocol: :tcp)
end
def teardown
@port = nil
end
def config
%[
port #{@port}
host 127.0.0.1
send_timeout 51
]
end
def create_driver(conf=config)
super(Fluent::TcpOutput, conf)
end
def test_configure
d = create_driver
assert_equal @port, d.instance.port
assert_equal '127.0.0.1', d.instance.host
assert_equal 51, d.instance.send_timeout
end
end
class UnixOutputTest < Test::Unit::TestCase
include StreamOutputTest
TMP_DIR = File.dirname(__FILE__) + "/../tmp/out_unix#{ENV['TEST_ENV_NUMBER']}"
CONFIG = %[
path #{TMP_DIR}/unix
send_timeout 52
]
def create_driver(conf=CONFIG)
super(Fluent::UnixOutput, conf)
end
def test_configure
d = create_driver
assert_equal "#{TMP_DIR}/unix", d.instance.path
assert_equal 52, d.instance.send_timeout
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_parser_syslog.rb | test/plugin/test_parser_syslog.rb | require_relative '../helper'
require 'fluent/test/driver/parser'
require 'fluent/plugin/parser'
class SyslogParserTest < ::Test::Unit::TestCase
def setup
Fluent::Test.setup
@parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::SyslogParser)
@expected = {
'host' => '192.168.0.1',
'ident' => 'fluentd',
'pid' => '11111',
'message' => '[error] Syslog test'
}
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse(param)
@parser.configure({'parser_type' => param})
@parser.instance.parse('Feb 28 12:00:00 192.168.0.1 fluentd[11111]: [error] Syslog test') { |time, record|
assert_equal(event_time('Feb 28 12:00:00', format: '%b %d %H:%M:%S'), time)
assert_equal(@expected, record)
}
assert_equal(Fluent::Plugin::SyslogParser::RFC3164_WITHOUT_TIME_AND_PRI_REGEXP, @parser.instance.patterns['format'])
assert_equal("%b %d %H:%M:%S", @parser.instance.patterns['time_format'])
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_time_format(param)
@parser.configure('time_format' => '%b %d %M:%S:%H', 'parser_type' => param)
@parser.instance.parse('Feb 28 00:00:12 192.168.0.1 fluentd[11111]: [error] Syslog test') { |time, record|
assert_equal(event_time('Feb 28 12:00:00', format: '%b %d %H:%M:%S'), time)
assert_equal(@expected, record)
}
assert_equal('%b %d %M:%S:%H', @parser.instance.patterns['time_format'])
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_time_format2(param)
@parser.configure('time_format' => '%Y-%m-%dT%H:%M:%SZ', 'parser_type' => param)
@parser.instance.parse("#{Time.now.year}-03-03T10:14:29Z 192.168.0.1 fluentd[11111]: [error] Syslog test") { |time, record|
assert_equal(event_time('Mar 03 10:14:29', format: '%b %d %H:%M:%S'), time)
assert_equal(@expected, record)
}
assert_equal('%Y-%m-%dT%H:%M:%SZ', @parser.instance.patterns['time_format'])
end
def test_parse_with_time_format_rfc5424
@parser.configure('time_format' => '%Y-%m-%dT%H:%M:%SZ', 'message_format' => 'rfc5424')
@parser.instance.parse("#{Time.now.year}-03-03T10:14:29Z 192.168.0.1 fluentd 11111 - - [error] Syslog test") { |time, record|
assert_equal(event_time('Mar 03 10:14:29', format: '%b %d %H:%M:%S'), time)
assert_equal(@expected.merge('host' => '192.168.0.1', 'msgid' => '-', 'extradata' => '-'), record)
}
assert_equal('%Y-%m-%dT%H:%M:%SZ', @parser.instance.patterns['time_format'])
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_subsecond_time(param)
@parser.configure('time_format' => '%b %d %H:%M:%S.%N', 'parser_type' => param)
@parser.instance.parse('Feb 28 12:00:00.456 192.168.0.1 fluentd[11111]: [error] Syslog test') { |time, record|
assert_equal(event_time('Feb 28 12:00:00.456', format: '%b %d %H:%M:%S.%N'), time)
assert_equal(@expected, record)
}
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_priority(param)
@parser.configure('with_priority' => true, 'parser_type' => param)
@parser.instance.parse('<6>Feb 28 12:00:00 192.168.0.1 fluentd[11111]: [error] Syslog test') { |time, record|
assert_equal(event_time('Feb 28 12:00:00', format: '%b %d %H:%M:%S'), time)
assert_equal(@expected.merge('pri' => 6), record)
}
assert_equal(Fluent::Plugin::SyslogParser::RFC3164_WITHOUT_TIME_AND_PRI_REGEXP, @parser.instance.patterns['format'])
assert_equal("%b %d %H:%M:%S", @parser.instance.patterns['time_format'])
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_rfc5452_with_priority(param)
@parser.configure('with_priority' => true, 'parser_type' => param, 'message_format' => 'rfc5424')
@parser.instance.parse('<30>1 2020-03-31T20:32:54Z myhostname 02abaf0687f5 10339 02abaf0687f5 - method=POST db=0.00') do |time, record|
assert_equal(event_time('2020-03-31T20:32:54Z', format: '%Y-%m-%dT%H:%M:%S%z'), time)
expected = { 'extradata' => '-', 'host' => 'myhostname', 'ident' => '02abaf0687f5', 'message' => 'method=POST db=0.00', 'msgid' => '02abaf0687f5', 'pid' => '10339', 'pri' => 30 }
assert_equal(expected, record)
end
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_empty_priority(param)
@parser.configure('with_priority' => true, 'parser_type' => param)
@parser.instance.parse('<>Feb 28 12:00:00 192.168.0.1 fluentd[11111]: [error] Syslog test') { |time, record|
assert_nil time
assert_nil record
}
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_without_colon(param)
@parser.configure({'parser_type' => param})
@parser.instance.parse('Feb 28 12:00:00 192.168.0.1 fluentd[11111] [error] Syslog test') { |time, record|
assert_equal(event_time('Feb 28 12:00:00', format: '%b %d %H:%M:%S'), time)
assert_equal(@expected, record)
}
assert_equal(Fluent::Plugin::SyslogParser::RFC3164_WITHOUT_TIME_AND_PRI_REGEXP, @parser.instance.patterns['format'])
assert_equal("%b %d %H:%M:%S", @parser.instance.patterns['time_format'])
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_keep_time_key(param)
@parser.configure(
'time_format' => '%b %d %M:%S:%H',
'keep_time_key'=>'true',
'parser_type' => param
)
text = 'Feb 28 00:00:12 192.168.0.1 fluentd[11111]: [error] Syslog test'
@parser.instance.parse(text) do |time, record|
assert_equal "Feb 28 00:00:12", record['time']
end
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_various_characters_for_tag(param)
ident = '~!@#$%^&*()_+=-`]{};"\'/?\\,.<>'
@parser.configure({'parser_type' => param})
@parser.instance.parse("Feb 28 12:00:00 192.168.0.1 #{ident}[11111]: [error] Syslog test") { |time, record|
assert_equal(event_time('Feb 28 12:00:00', format: '%b %d %H:%M:%S'), time)
assert_equal(@expected.merge('ident' => ident), record)
}
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_various_characters_for_tag_with_priority(param)
ident = '~!@#$%^&*()_+=-`]{};"\'/?\\,.<>'
@parser.configure('with_priority' => true, 'parser_type' => param)
@parser.instance.parse("<6>Feb 28 12:00:00 192.168.0.1 #{ident}[11111]: [error] Syslog test") { |time, record|
assert_equal(event_time('Feb 28 12:00:00', format: '%b %d %H:%M:%S'), time)
assert_equal(@expected.merge('pri' => 6, 'ident' => ident), record)
}
end
sub_test_case 'Check the difference of regexp and string parser' do
# examples from rfc3164
data('regexp' => 'regexp', 'string' => 'string')
test 'wrong result with no ident message by default' do |param|
@parser.configure('parser_type' => param)
@parser.instance.parse('Feb 5 17:32:18 10.0.0.99 Use the BFG!') { |time, record|
assert_equal({'host' => '10.0.0.99', 'ident' => 'Use', 'message' => 'the BFG!'}, record)
}
end
test "proper result with no ident message by 'support_colonless_ident false'" do
@parser.configure('parser_type' => 'string', 'support_colonless_ident' => false)
@parser.instance.parse('Feb 5 17:32:18 10.0.0.99 Use the BFG!') { |time, record|
assert_equal({'host' => '10.0.0.99', 'message' => 'Use the BFG!'}, record)
}
end
test "string parsers can't parse broken syslog message and generate wrong record" do
@parser.configure('parser_type' => 'string')
@parser.instance.parse("1990 Oct 22 10:52:01 TZ-6 scapegoat.dmz.example.org 10.1.2.32 sched[0]: That's All Folks!") { |time, record|
expected = {'host' => 'scapegoat.dmz.example.org', 'ident' => 'sched', 'pid' => '0', 'message' => "That's All Folks!"}
assert_not_equal(expected, record)
}
end
test "regexp parsers can't parse broken syslog message and raises an error" do
@parser.configure('parser_type' => 'regexp')
assert_raise(Fluent::TimeParser::TimeParseError) {
@parser.instance.parse("1990 Oct 22 10:52:01 TZ-6 scapegoat.dmz.example.org 10.1.2.32 sched[0]: That's All Folks!") { |time, record| }
}
end
data('regexp' => 'regexp', 'string' => 'string')
test "':' included message breaks regexp parser" do |param|
@parser.configure('parser_type' => param)
@parser.instance.parse('Aug 10 12:00:00 127.0.0.1 test foo:bar') { |time, record|
expected = {'host' => '127.0.0.1', 'ident' => 'test', 'message' => 'foo:bar'}
if param == 'string'
assert_equal(expected, record)
else
assert_not_equal(expected, record)
end
}
end
data('regexp' => 'regexp', 'string' => 'string')
test "Only no whitespace content in MSG causes different result" do |param|
@parser.configure('parser_type' => param)
@parser.instance.parse('Aug 10 12:00:00 127.0.0.1 value1,value2,value3,value4') { |time, record|
# 'message' is correct but regexp set it as 'ident'
if param == 'string'
expected = {'host' => '127.0.0.1', 'message' => 'value1,value2,value3,value4'}
assert_equal(expected, record)
else
expected = {'host' => '127.0.0.1', 'ident' => 'value1,value2,value3,value4', 'message' => ''}
assert_equal(expected, record)
end
}
end
end
class TestRFC5424Regexp < self
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_rfc5424_message(param)
@parser.configure(
'time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
'message_format' => 'rfc5424',
'with_priority' => true,
'parser_type' => param
)
text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd - - - Hi, from Fluentd!'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "-", record["pid"]
assert_equal "-", record["msgid"]
assert_equal "-", record["extradata"]
assert_equal "Hi, from Fluentd!", record["message"]
end
assert_equal(Fluent::Plugin::SyslogParser::RFC5424_WITHOUT_TIME_AND_PRI_REGEXP, @parser.instance.patterns['format'])
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_rfc5424_message_trailing_eol(param)
@parser.configure(
'time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
'message_format' => 'rfc5424',
'with_priority' => true,
'parser_type' => param
)
text = "<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd - - - Hi, from Fluentd!\n"
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "-", record["pid"]
assert_equal "-", record["msgid"]
assert_equal "-", record["extradata"]
assert_equal "Hi, from Fluentd!", record["message"]
end
assert_equal(Fluent::Plugin::SyslogParser::RFC5424_WITHOUT_TIME_AND_PRI_REGEXP, @parser.instance.patterns['format'])
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_rfc5424_multiline_message(param)
@parser.configure(
'time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
'message_format' => 'rfc5424',
'with_priority' => true,
'parser_type' => param
)
text = "<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd - - - Hi,\nfrom\nFluentd!"
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "-", record["pid"]
assert_equal "-", record["msgid"]
assert_equal "-", record["extradata"]
assert_equal "Hi,\nfrom\nFluentd!", record["message"]
end
assert_equal(Fluent::Plugin::SyslogParser::RFC5424_WITHOUT_TIME_AND_PRI_REGEXP, @parser.instance.patterns['format'])
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_rfc5424_message_and_without_priority(param)
@parser.configure(
'time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
'message_format' => 'rfc5424',
'parser_type' => param
)
text = '2017-02-06T13:14:15.003Z 192.168.0.1 fluentd - - - Hi, from Fluentd!'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "-", record["pid"]
assert_equal "-", record["msgid"]
assert_equal "-", record["extradata"]
assert_equal "Hi, from Fluentd!", record["message"]
end
assert_equal(Fluent::Plugin::SyslogParser::RFC5424_WITHOUT_TIME_AND_PRI_REGEXP, @parser.instance.patterns['format'])
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_rfc5424_empty_message_and_without_priority(param)
@parser.configure(
'time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
'message_format' => 'rfc5424',
'parser_type' => param
)
text = '2017-02-06T13:14:15.003Z 192.168.0.1 fluentd - - -'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "-", record["pid"]
assert_equal "-", record["msgid"]
assert_equal "-", record["extradata"]
assert_nil record["message"]
end
assert_equal(Fluent::Plugin::SyslogParser::RFC5424_WITHOUT_TIME_AND_PRI_REGEXP, @parser.instance.patterns['format'])
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_rfc5424_message_without_time_format(param)
@parser.configure(
'message_format' => 'rfc5424',
'with_priority' => true,
'parser_type' => param
)
text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd - - - Hi, from Fluentd!'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "-", record["pid"]
assert_equal "-", record["msgid"]
assert_equal "-", record["extradata"]
assert_equal "Hi, from Fluentd!", record["message"]
end
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_rfc5424_message_with_priority_and_pid(param)
@parser.configure(
'message_format' => 'rfc5424',
'with_priority' => true,
'parser_type' => param
)
text = '<28>1 2018-09-26T15:54:26.620412+09:00 machine minissdpd 1298 - - peer 192.168.0.5:50123 is not from a LAN'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2018-09-26T15:54:26.620412+0900", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "1298", record["pid"]
assert_equal "-", record["msgid"]
assert_equal "-", record["extradata"]
assert_equal " peer 192.168.0.5:50123 is not from a LAN", record["message"]
end
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_rfc5424_structured_message(param)
@parser.configure(
'time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
'message_format' => 'rfc5424',
'with_priority' => true,
'parser_type' => param
)
text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd 11111 ID24224 [exampleSDID@20224 iut="3" eventSource="Application" eventID="11211"] [Hi] from Fluentd!'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "11111", record["pid"]
assert_equal "ID24224", record["msgid"]
assert_equal "[exampleSDID@20224 iut=\"3\" eventSource=\"Application\" eventID=\"11211\"]",
record["extradata"]
assert_equal "[Hi] from Fluentd!", record["message"]
end
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_rfc5424_multiple_structured_message(param)
@parser.configure(
'time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
'message_format' => 'rfc5424',
'with_priority' => true,
'parser_type' => param
)
text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd 11111 ID24224 [exampleSDID@20224 iut="3" eventSource="Application" eventID="11211"][exampleSDID@20224 class="high"] Hi, from Fluentd!'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "11111", record["pid"]
assert_equal "ID24224", record["msgid"]
assert_equal "[exampleSDID@20224 iut=\"3\" eventSource=\"Application\" eventID=\"11211\"][exampleSDID@20224 class=\"high\"]",
record["extradata"]
assert_equal "Hi, from Fluentd!", record["message"]
end
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_rfc5424_message_includes_right_bracket(param)
@parser.configure(
'time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
'message_format' => 'rfc5424',
'with_priority' => true,
'parser_type' => param
)
text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd 11111 ID24224 [exampleSDID@20224 iut="3" eventSource="Application" eventID="11211"] [Hi] from Fluentd]!'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "11111", record["pid"]
assert_equal "ID24224", record["msgid"]
assert_equal "[exampleSDID@20224 iut=\"3\" eventSource=\"Application\" eventID=\"11211\"]",
record["extradata"]
assert_equal "[Hi] from Fluentd]!", record["message"]
end
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_rfc5424_empty_message(param)
@parser.configure(
'time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
'message_format' => 'rfc5424',
'with_priority' => true,
'parser_type' => param
)
text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd 11111 ID24224 [exampleSDID@20224 iut="3" eventSource="Application" eventID="11211"]'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "11111", record["pid"]
assert_equal "ID24224", record["msgid"]
assert_equal "[exampleSDID@20224 iut=\"3\" eventSource=\"Application\" eventID=\"11211\"]",
record["extradata"]
assert_nil record["message"]
end
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_rfc5424_space_empty_message(param)
@parser.configure(
'message_format' => 'rfc5424',
'with_priority' => true,
'parser_type' => param
)
text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd 11111 ID24224 [exampleSDID@20224 iut="3" eventSource="Application" eventID="11211"] '
@parser.instance.parse(text) do |time, record|
if param == 'string'
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "11111", record["pid"]
assert_equal "ID24224", record["msgid"]
assert_equal "[exampleSDID@20224 iut=\"3\" eventSource=\"Application\" eventID=\"11211\"]",
record["extradata"]
assert_equal '', record["message"]
else
assert_nil time
assert_nil record
end
end
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_rfc5424_message_without_subseconds(param)
@parser.configure(
'message_format' => 'rfc5424',
'with_priority' => true,
'parser_type' => param
)
text = '<16>1 2017-02-06T13:14:15Z 192.168.0.1 fluentd - - - Hi, from Fluentd!'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15Z", format: '%Y-%m-%dT%H:%M:%S%z'), time)
assert_equal "-", record["pid"]
assert_equal "-", record["msgid"]
assert_equal "-", record["extradata"]
assert_equal "Hi, from Fluentd!", record["message"]
end
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_rfc5424_message_both_timestamp(param)
@parser.configure(
'message_format' => 'rfc5424',
'with_priority' => true,
'parser_type' => param
)
text = '<16>1 2017-02-06T13:14:15Z 192.168.0.1 fluentd - - - Hi, from Fluentd!'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15Z", format: '%Y-%m-%dT%H:%M:%S%z'), time)
assert_equal "-", record["pid"]
assert_equal "-", record["msgid"]
assert_equal "-", record["extradata"]
assert_equal "Hi, from Fluentd!", record["message"]
end
text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd - - - Hi, from Fluentd with subseconds!'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "-", record["pid"]
assert_equal "-", record["msgid"]
assert_equal "-", record["extradata"]
assert_equal "Hi, from Fluentd with subseconds!", record["message"]
end
end
end
class TestAutoRegexp < self
data('regexp' => 'regexp', 'string' => 'string')
def test_auto_with_legacy_syslog_message(param)
@parser.configure(
'time_format' => '%b %d %M:%S:%H',
'message_format' => 'auto',
'parser_type' => param
)
text = 'Feb 28 00:00:12 192.168.0.1 fluentd[11111]: [error] Syslog test'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("Feb 28 00:00:12", format: '%b %d %M:%S:%H'), time)
assert_equal(@expected, record)
end
end
data('regexp' => 'regexp', 'string' => 'string')
def test_auto_with_legacy_syslog_priority_message(param)
@parser.configure(
'time_format' => '%b %d %M:%S:%H',
'with_priority' => true,
'message_format' => 'auto',
'parser_type' => param
)
text = '<6>Feb 28 12:00:00 192.168.0.1 fluentd[11111]: [error] Syslog test'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("Feb 28 12:00:00", format: '%b %d %M:%S:%H'), time)
assert_equal(@expected.merge('pri' => 6), record)
end
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_rfc5424_message(param)
@parser.configure(
'time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
'message_format' => 'auto',
'with_priority' => true,
'parser_type' => param
)
text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd - - - Hi, from Fluentd!'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "-", record["pid"]
assert_equal "-", record["msgid"]
assert_equal "-", record["extradata"]
assert_equal 16, record["pri"]
assert_equal "Hi, from Fluentd!", record["message"]
end
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_rfc5424_structured_message(param)
@parser.configure(
'time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
'message_format' => 'auto',
'with_priority' => true,
'parser_type' => param
)
text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd 11111 ID24224 [exampleSDID@20224 iut="3" eventSource="Application" eventID="11211"] Hi, from Fluentd!'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "11111", record["pid"]
assert_equal "ID24224", record["msgid"]
assert_equal "[exampleSDID@20224 iut=\"3\" eventSource=\"Application\" eventID=\"11211\"]",
record["extradata"]
assert_equal "Hi, from Fluentd!", record["message"]
end
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_both_message_type(param)
@parser.configure(
'time_format' => '%b %d %M:%S:%H',
'rfc5424_time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
'message_format' => 'auto',
'with_priority' => true,
'parser_type' => param
)
text = '<1>Feb 28 12:00:00 192.168.0.1 fluentd[11111]: [error] Syslog test'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("Feb 28 12:00:00", format: '%b %d %M:%S:%H'), time)
assert_equal(@expected.merge('pri' => 1), record)
end
text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd 11111 ID24224 [exampleSDID@20224 iut="3" eventSource="Application" eventID="11211"] Hi, from Fluentd!'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "11111", record["pid"]
assert_equal "ID24224", record["msgid"]
assert_equal "[exampleSDID@20224 iut=\"3\" eventSource=\"Application\" eventID=\"11211\"]",
record["extradata"]
assert_equal "Hi, from Fluentd!", record["message"]
end
text = '<1>Feb 28 12:00:02 192.168.0.1 fluentd[11111]: [error] Syslog test 2>1'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("Feb 28 12:00:02", format: '%b %d %M:%S:%H'), time)
assert_equal(@expected.merge('pri' => 1, 'message'=> '[error] Syslog test 2>1'), record)
end
text = '<1>Feb 28 12:00:02 192.168.0.1 fluentd[11111]: [error] Syslog test'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("Feb 28 12:00:02", format: '%b %d %M:%S:%H'), time)
assert_equal(@expected.merge('pri' => 1), record)
end
text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd - - - Hi, from Fluentd!'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "-", record["pid"]
assert_equal "-", record["msgid"]
assert_equal "-", record["extradata"]
assert_equal "Hi, from Fluentd!", record["message"]
end
end
data('regexp' => 'regexp', 'string' => 'string')
def test_parse_with_both_message_type_and_priority(param)
@parser.configure(
'time_format' => '%b %d %M:%S:%H',
'rfc5424_time_format' => '%Y-%m-%dT%H:%M:%S.%L%z',
'with_priority' => true,
'message_format' => 'auto',
'parser_type' => param
)
text = '<6>Feb 28 12:00:00 192.168.0.1 fluentd[11111]: [error] Syslog test'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("Feb 28 12:00:00", format: '%b %d %M:%S:%H'), time)
assert_equal(@expected.merge('pri' => 6), record)
end
text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd 11111 ID24224 [exampleSDID@20224 iut="3" eventSource="Application" eventID="11211"] Hi, from Fluentd!'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "11111", record["pid"]
assert_equal "ID24224", record["msgid"]
assert_equal "[exampleSDID@20224 iut=\"3\" eventSource=\"Application\" eventID=\"11211\"]",
record["extradata"]
assert_equal "Hi, from Fluentd!", record["message"]
end
text = '<16>Feb 28 12:00:02 192.168.0.1 fluentd[11111]: [error] Syslog test'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("Feb 28 12:00:02", format: '%b %d %M:%S:%H'), time)
assert_equal(@expected.merge('pri' => 16), record)
end
text = '<16>1 2017-02-06T13:14:15.003Z 192.168.0.1 fluentd - - - Hi, from Fluentd!'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15.003Z", format: '%Y-%m-%dT%H:%M:%S.%L%z'), time)
assert_equal "-", record["pid"]
assert_equal "-", record["msgid"]
assert_equal "-", record["extradata"]
assert_equal "Hi, from Fluentd!", record["message"]
end
text = '<16>1 2017-02-06T13:14:15Z 192.168.0.1 fluentd - - - Hi, from Fluentd without subseconds!'
@parser.instance.parse(text) do |time, record|
assert_equal(event_time("2017-02-06T13:14:15Z", format: '%Y-%m-%dT%H:%M:%S%z'), time)
assert_equal "-", record["pid"]
assert_equal "-", record["msgid"]
assert_equal "-", record["extradata"]
assert_equal "Hi, from Fluentd without subseconds!", record["message"]
end
end
end
# "parser_type" config shouldn't hide Fluent::Plugin::Parser#plugin_type
# https://github.com/fluent/fluentd/issues/3296
data('regexp' => :regexp, 'fast' => :string)
def test_parser_type_method(engine)
@parser.configure({'parser_type' => engine.to_s})
assert_equal(:text_per_line, @parser.instance.parser_type)
end
data('regexp' => :regexp, 'string' => :string)
def test_parser_engine(engine)
@parser.configure({'parser_engine' => engine.to_s})
assert_equal(engine, @parser.instance.parser_engine)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_in_syslog.rb | test/plugin/test_in_syslog.rb | require_relative '../helper'
require 'fluent/test/driver/input'
require 'fluent/plugin/in_syslog'
class SyslogInputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
@port = unused_port(protocol: :udp)
end
def teardown
@port = nil
end
def ipv4_config(port = @port)
%[
port #{port}
bind 127.0.0.1
tag syslog
]
end
def ipv6_config(port = @port)
%[
port #{port}
bind ::1
tag syslog
]
end
def create_driver(conf=ipv4_config)
Fluent::Test::Driver::Input.new(Fluent::Plugin::SyslogInput).configure(conf)
end
data(
ipv4: ['127.0.0.1', :ipv4, ::Socket::AF_INET],
ipv6: ['::1', :ipv6, ::Socket::AF_INET6],
)
def test_configure(data)
bind_addr, protocol, family = data
config = send("#{protocol}_config")
omit "IPv6 unavailable" if family == ::Socket::AF_INET6 && !ipv6_enabled?
d = create_driver(config)
assert_equal @port, d.instance.port
assert_equal bind_addr, d.instance.bind
end
sub_test_case 'source_hostname_key and source_address_key features' do
test 'resolve_hostname must be true with source_hostname_key' do
assert_raise(Fluent::ConfigError) {
create_driver(ipv4_config + <<EOS)
resolve_hostname false
source_hostname_key hostname
EOS
}
end
data('resolve_hostname' => 'resolve_hostname true',
'source_hostname_key' => 'source_hostname_key source_host')
def test_configure_resolve_hostname(param)
d = create_driver([ipv4_config, param].join("\n"))
assert_true d.instance.resolve_hostname
end
end
data('Use protocol_type' => ['protocol_type tcp', :tcp, :udp],
'Use transport' => ["<transport tcp>\n </transport>", nil, :tcp],
'Use transport and protocol' => ["protocol_type udp\n<transport tcp>\n </transport>", :udp, :tcp])
def test_configure_protocol(param)
conf, proto_type, transport_proto_type = *param
port = unused_port(protocol: proto_type ? proto_type : transport_proto_type)
d = create_driver([ipv4_config(port), conf].join("\n"))
assert_equal(d.instance.protocol_type, proto_type)
assert_equal(d.instance.transport_config.protocol, transport_proto_type)
end
# For backward compat
def test_respect_protocol_type_than_transport
d = create_driver([ipv4_config, "<transport tcp> \n</transport>", "protocol_type udp"].join("\n"))
tests = create_test_case
d.run(expect_emits: 2) do
u = UDPSocket.new
u.connect('127.0.0.1', @port)
tests.each {|test|
u.send(test['msg'], 0)
}
end
assert(d.events.size > 0)
compare_test_result(d.events, tests)
end
data(
ipv4: ['127.0.0.1', :ipv4, ::Socket::AF_INET],
ipv6: ['::1', :ipv6, ::Socket::AF_INET6],
)
def test_time_format(data)
bind_addr, protocol, family = data
config = send("#{protocol}_config")
omit "IPv6 unavailable" if family == ::Socket::AF_INET6 && !ipv6_enabled?
d = create_driver(config)
tests = [
{'msg' => '<6>Dec 11 00:00:00 localhost logger: foo', 'expected' => Fluent::EventTime.from_time(Time.strptime('Dec 11 00:00:00', '%b %d %H:%M:%S'))},
{'msg' => '<6>Dec 1 00:00:00 localhost logger: foo', 'expected' => Fluent::EventTime.from_time(Time.strptime('Dec 1 00:00:00', '%b %d %H:%M:%S'))},
]
d.run(expect_emits: 2) do
u = UDPSocket.new(family)
u.connect(bind_addr, @port)
tests.each {|test|
u.send(test['msg'], 0)
}
end
events = d.events
assert(events.size > 0)
events.each_index {|i|
assert_equal_event_time(tests[i]['expected'], events[i][1])
}
end
def test_msg_size
d = create_driver
tests = create_test_case
d.run(expect_emits: 2) do
u = UDPSocket.new
u.connect('127.0.0.1', @port)
tests.each {|test|
u.send(test['msg'], 0)
}
end
assert(d.events.size > 0)
compare_test_result(d.events, tests)
end
def test_msg_size_udp_for_large_msg
d = create_driver(ipv4_config + %[
message_length_limit 5k
])
tests = create_test_case(large_message: true)
d.run(expect_emits: 3) do
u = UDPSocket.new
u.connect('127.0.0.1', @port)
tests.each {|test|
u.send(test['msg'], 0)
}
end
assert(d.events.size > 0)
compare_test_result(d.events, tests)
end
def test_msg_size_with_tcp
port = unused_port(protocol: :tcp)
d = create_driver([ipv4_config(port), "<transport tcp> \n</transport>"].join("\n"))
tests = create_test_case
d.run(expect_emits: 2) do
tests.each {|test|
TCPSocket.open('127.0.0.1', port) do |s|
s.send(test['msg'], 0)
end
}
end
assert(d.events.size > 0)
compare_test_result(d.events, tests)
end
def test_emit_rfc5452
d = create_driver([ipv4_config, "facility_key pri\n<parse>\n message_format rfc5424\nwith_priority true\n</parse>"].join("\n"))
msg = '<1>1 2017-02-06T13:14:15.003Z myhostname 02abaf0687f5 10339 02abaf0687f5 - method=POST db=0.00'
d.run(expect_emits: 1, timeout: 2) do
u = UDPSocket.new
u.connect('127.0.0.1', @port)
u.send(msg, 0)
end
tag, _, event = d.events[0]
assert_equal('syslog.kern.alert', tag)
assert_equal('kern', event['pri'])
end
def test_msg_size_with_same_tcp_connection
port = unused_port(protocol: :tcp)
d = create_driver([ipv4_config(port), "<transport tcp> \n</transport>"].join("\n"))
tests = create_test_case
d.run(expect_emits: 2) do
TCPSocket.open('127.0.0.1', port) do |s|
tests.each {|test|
s.send(test['msg'], 0)
}
end
end
assert(d.events.size > 0)
compare_test_result(d.events, tests)
end
def test_msg_size_with_json_format
d = create_driver([ipv4_config, 'format json'].join("\n"))
time = Time.parse('2013-09-18 12:00:00 +0900').to_i
tests = ['Hello!', 'Syslog!'].map { |msg|
event = {'time' => time, 'message' => msg}
{'msg' => '<6>' + event.to_json + "\n", 'expected' => msg}
}
d.run(expect_emits: 2) do
u = UDPSocket.new
u.connect('127.0.0.1', @port)
tests.each {|test|
u.send(test['msg'], 0)
}
end
assert(d.events.size > 0)
compare_test_result(d.events, tests)
end
def test_msg_size_with_include_source_host
d = create_driver([ipv4_config, 'include_source_host true'].join("\n"))
tests = create_test_case
host = nil
d.run(expect_emits: 2) do
u = UDPSocket.new
u.connect('127.0.0.1', @port)
host = u.peeraddr[2]
tests.each {|test|
u.send(test['msg'], 0)
}
end
assert(d.events.size > 0)
compare_test_result(d.events, tests, {host: host})
end
data(
severity_key: 'severity_key',
priority_key: 'priority_key',
)
def test_msg_size_with_severity_key(param_name)
d = create_driver([ipv4_config, "#{param_name} severity"].join("\n"))
tests = create_test_case
severity = 'info'
d.run(expect_emits: 2) do
u = UDPSocket.new
u.connect('127.0.0.1', @port)
tests.each {|test|
u.send(test['msg'], 0)
}
end
assert(d.events.size > 0)
compare_test_result(d.events, tests, {severity: severity})
end
def test_msg_size_with_facility_key
d = create_driver([ipv4_config, 'facility_key facility'].join("\n"))
tests = create_test_case
facility = 'kern'
d.run(expect_emits: 2) do
u = UDPSocket.new
u.connect('127.0.0.1', @port)
tests.each {|test|
u.send(test['msg'], 0)
}
end
assert(d.events.size > 0)
compare_test_result(d.events, tests, {facility: facility})
end
def test_msg_size_with_source_address_key
d = create_driver([ipv4_config, 'source_address_key source_address'].join("\n"))
tests = create_test_case
address = nil
d.run(expect_emits: 2) do
u = UDPSocket.new
u.connect('127.0.0.1', @port)
address = u.peeraddr[3]
tests.each {|test|
u.send(test['msg'], 0)
}
end
assert(d.events.size > 0)
compare_test_result(d.events, tests, {address: address})
end
def test_msg_size_with_source_hostname_key
d = create_driver([ipv4_config, 'source_hostname_key source_hostname'].join("\n"))
tests = create_test_case
hostname = nil
d.run(expect_emits: 2) do
u = UDPSocket.new
u.do_not_reverse_lookup = false
u.connect('127.0.0.1', @port)
hostname = u.peeraddr[2]
tests.each {|test|
u.send(test['msg'], 0)
}
end
assert(d.events.size > 0)
compare_test_result(d.events, tests, {hostname: hostname})
end
def create_test_case(large_message: false)
# actual syslog message has "\n"
if large_message
[
{'msg' => '<6>Sep 10 00:00:00 localhost logger: ' + 'x' * 100 + "\n", 'expected' => 'x' * 100},
{'msg' => '<6>Sep 10 00:00:00 localhost logger: ' + 'x' * 1024 + "\n", 'expected' => 'x' * 1024},
{'msg' => '<6>Sep 10 00:00:00 localhost logger: ' + 'x' * 4096 + "\n", 'expected' => 'x' * 4096},
]
else
[
{'msg' => '<6>Sep 10 00:00:00 localhost logger: ' + 'x' * 100 + "\n", 'expected' => 'x' * 100},
{'msg' => '<6>Sep 10 00:00:00 localhost logger: ' + 'x' * 1024 + "\n", 'expected' => 'x' * 1024},
]
end
end
def compare_test_result(events, tests, options = {})
events.each_index { |i|
assert_equal('syslog.kern.info', events[i][0]) # <6> means kern.info
assert_equal(tests[i]['expected'], events[i][2]['message'])
assert_equal(options[:host], events[i][2]['source_host']) if options[:host]
assert_equal(options[:address], events[i][2]['source_address']) if options[:address]
assert_equal(options[:hostname], events[i][2]['source_hostname']) if options[:hostname]
assert_equal(options[:severity], events[i][2]['severity']) if options[:severity]
assert_equal(options[:facility], events[i][2]['facility']) if options[:facility]
}
end
sub_test_case 'octet counting frame' do
def test_msg_size_with_tcp
port = unused_port(protocol: :tcp)
d = create_driver([ipv4_config(port), "<transport tcp> \n</transport>", 'frame_type octet_count'].join("\n"))
tests = create_test_case
d.run(expect_emits: 2) do
tests.each {|test|
TCPSocket.open('127.0.0.1', port) do |s|
s.send(test['msg'], 0)
end
}
end
assert(d.events.size > 0)
compare_test_result(d.events, tests)
end
def test_msg_size_with_same_tcp_connection
port = unused_port(protocol: :tcp)
d = create_driver([ipv4_config(port), "<transport tcp> \n</transport>", 'frame_type octet_count'].join("\n"))
tests = create_test_case
d.run(expect_emits: 2) do
TCPSocket.open('127.0.0.1', port) do |s|
tests.each {|test|
s.send(test['msg'], 0)
}
end
end
assert(d.events.size > 0)
compare_test_result(d.events, tests)
end
def create_test_case(large_message: false)
msgs = [
{'msg' => '<6>Sep 10 00:00:00 localhost logger: ' + 'x' * 100, 'expected' => 'x' * 100},
{'msg' => '<6>Sep 10 00:00:00 localhost logger: ' + 'x' * 1024, 'expected' => 'x' * 1024},
]
msgs.each { |msg|
m = msg['msg']
msg['msg'] = "#{m.size} #{m}"
}
msgs
end
end
def create_unmatched_lines_test_case
[
# valid message
{'msg' => '<6>Sep 10 00:00:00 localhost logger: xxx', 'expected' => {'host'=>'localhost', 'ident'=>'logger', 'message'=>'xxx'}},
# missing priority
{'msg' => 'hello world', 'expected' => {'unmatched_line' => 'hello world'}},
# timestamp parsing failure
{'msg' => '<6>ZZZ 99 99:99:99 localhost logger: xxx', 'expected' => {'unmatched_line' => '<6>ZZZ 99 99:99:99 localhost logger: xxx'}},
]
end
def compare_unmatched_lines_test_result(events, tests, options = {})
events.each_index { |i|
tests[i]['expected'].each { |k,v|
assert_equal v, events[i][2][k], "No key <#{k}> in response or value mismatch"
}
assert_equal('syslog.unmatched', events[i][0], 'tag does not match syslog.unmatched') unless i==0
assert_equal(options[:address], events[i][2]['source_address'], 'response has no source_address or mismatch') if options[:address]
assert_equal(options[:hostname], events[i][2]['source_hostname'], 'response has no source_hostname or mismatch') if options[:hostname]
}
end
def test_emit_unmatched_lines
d = create_driver([ipv4_config, 'emit_unmatched_lines true'].join("\n"))
tests = create_unmatched_lines_test_case
d.run(expect_emits: 3) do
u = UDPSocket.new
u.do_not_reverse_lookup = false
u.connect('127.0.0.1', @port)
tests.each {|test|
u.send(test['msg'], 0)
}
end
assert_equal tests.size, d.events.size
compare_unmatched_lines_test_result(d.events, tests)
end
def test_emit_unmatched_lines_with_hostname
d = create_driver([ipv4_config, 'emit_unmatched_lines true', 'source_hostname_key source_hostname'].join("\n"))
tests = create_unmatched_lines_test_case
hostname = nil
d.run(expect_emits: 3) do
u = UDPSocket.new
u.do_not_reverse_lookup = false
u.connect('127.0.0.1', @port)
hostname = u.peeraddr[2]
tests.each {|test|
u.send(test['msg'], 0)
}
end
assert_equal tests.size, d.events.size
compare_unmatched_lines_test_result(d.events, tests, {hostname: hostname})
end
def test_emit_unmatched_lines_with_address
d = create_driver([ipv4_config, 'emit_unmatched_lines true', 'source_address_key source_address'].join("\n"))
tests = create_unmatched_lines_test_case
address = nil
d.run(expect_emits: 3) do
u = UDPSocket.new
u.do_not_reverse_lookup = false
u.connect('127.0.0.1', @port)
address = u.peeraddr[3]
tests.each {|test|
u.send(test['msg'], 0)
}
end
assert_equal tests.size, d.events.size
compare_unmatched_lines_test_result(d.events, tests, {address: address})
end
def test_send_keepalive_packet_is_disabled_by_default
port = unused_port(protocol: :tcp)
d = create_driver(ipv4_config(port) + %[
<transport tcp>
</transport>
protocol tcp
])
assert_false d.instance.send_keepalive_packet
end
def test_send_keepalive_packet_can_be_enabled
addr = "127.0.0.1"
port = unused_port(protocol: :tcp)
d = create_driver(ipv4_config(port) + %[
<transport tcp>
</transport>
send_keepalive_packet true
])
assert_true d.instance.send_keepalive_packet
mock.proxy(d.instance).server_create_connection(
:in_syslog_tcp_server, port,
bind: addr,
resolve_name: nil,
send_keepalive_packet: true)
d.run do
TCPSocket.open(addr, port)
end
end
def test_send_keepalive_packet_can_not_be_enabled_for_udp
assert_raise(Fluent::ConfigError) do
create_driver(ipv4_config + %[
send_keepalive_packet true
])
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_parser_multiline.rb | test/plugin/test_parser_multiline.rb | require_relative '../helper'
require 'fluent/test/driver/parser'
require 'fluent/plugin/parser'
class MultilineParserTest < ::Test::Unit::TestCase
def setup
Fluent::Test.setup
end
def create_parser(conf)
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::MultilineParser).configure(conf)
parser
end
def test_configure_with_invalid_params
[{'format100' => '/(?<msg>.*)/'}, {'format1' => '/(?<msg>.*)/', 'format3' => '/(?<msg>.*)/'}, 'format1' => '/(?<msg>.*)'].each { |config|
assert_raise(Fluent::ConfigError) {
create_parser(config)
}
}
end
def test_parse
parser = create_parser('format1' => '/^(?<time>\d{4}-\d{1,2}-\d{1,2} \d{1,2}:\d{1,2}:\d{1,2}) \[(?<thread>.*)\] (?<level>[^\s]+)(?<message>.*)/')
parser.instance.parse(<<EOS.chomp) { |time, record|
2013-3-03 14:27:33 [main] ERROR Main - Exception
javax.management.RuntimeErrorException: null
\tat Main.main(Main.java:16) ~[bin/:na]
EOS
assert_equal(event_time('2013-3-03 14:27:33').to_i, time)
assert_equal({
"thread" => "main",
"level" => "ERROR",
"message" => " Main - Exception\njavax.management.RuntimeErrorException: null\n\tat Main.main(Main.java:16) ~[bin/:na]"
}, record)
}
end
def test_parse_with_firstline
parser = create_parser('format_firstline' => '/----/', 'format1' => '/time=(?<time>\d{4}-\d{1,2}-\d{1,2} \d{1,2}:\d{1,2}:\d{1,2}).*message=(?<message>.*)/')
parser.instance.parse(<<EOS.chomp) { |time, record|
----
time=2013-3-03 14:27:33
message=test1
EOS
assert(parser.instance.firstline?('----'))
assert_equal(event_time('2013-3-03 14:27:33').to_i, time)
assert_equal({"message" => "test1"}, record)
}
end
def test_parse_with_multiple_formats
parser = create_parser('format_firstline' => '/^Started/',
'format1' => '/Started (?<method>[^ ]+) "(?<path>[^"]+)" for (?<host>[^ ]+) at (?<time>[^ ]+ [^ ]+ [^ ]+)\n/',
'format2' => '/Processing by (?<controller>[^\u0023]+)\u0023(?<controller_method>[^ ]+) as (?<format>[^ ]+?)\n/',
'format3' => '/( Parameters: (?<parameters>[^ ]+)\n)?/',
'format4' => '/ Rendered (?<template>[^ ]+) within (?<layout>.+) \([\d\.]+ms\)\n/',
'format5' => '/Completed (?<code>[^ ]+) [^ ]+ in (?<runtime>[\d\.]+)ms \(Views: (?<view_runtime>[\d\.]+)ms \| ActiveRecord: (?<ar_runtime>[\d\.]+)ms\)/'
)
parser.instance.parse(<<EOS.chomp) { |time, record|
Started GET "/users/123/" for 127.0.0.1 at 2013-06-14 12:00:11 +0900
Processing by UsersController#show as HTML
Parameters: {"user_id"=>"123"}
Rendered users/show.html.erb within layouts/application (0.3ms)
Completed 200 OK in 4ms (Views: 3.2ms | ActiveRecord: 0.0ms)
EOS
assert(parser.instance.firstline?('Started GET "/users/123/" for 127.0.0.1...'))
assert_equal(event_time('2013-06-14 12:00:11 +0900').to_i, time)
assert_equal({
"method" => "GET",
"path" => "/users/123/",
"host" => "127.0.0.1",
"controller" => "UsersController",
"controller_method" => "show",
"format" => "HTML",
"parameters" => "{\"user_id\"=>\"123\"}",
"template" => "users/show.html.erb",
"layout" => "layouts/application",
"code" => "200",
"runtime" => "4",
"view_runtime" => "3.2",
"ar_runtime" => "0.0"
}, record)
}
end
def test_parse_with_keep_time_key
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::MultilineParser).configure(
'format1' => '/^(?<time>\d{4}-\d{1,2}-\d{1,2} \d{1,2}:\d{1,2}:\d{1,2})/',
'keep_time_key' => 'true'
)
text = '2013-3-03 14:27:33'
parser.instance.parse(text) { |time, record|
assert_equal text, record['time']
}
end
def test_parse_unmatched_lines
parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::MultilineParser).configure(
'format1' => '/^message (?<message_id>\d)/',
'unmatched_lines' => true,
)
text = "message 1\nmessage a"
r = []
parser.instance.parse(text) { |_, record| r << record }
assert_equal [{ 'message_id' => '1' }, { 'unmatched_line' => 'message a'}], r
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_bare_output.rb | test/plugin/test_bare_output.rb | require_relative '../helper'
require 'fluent/plugin/bare_output'
require 'fluent/event'
module FluentPluginBareOutputTest
class DummyPlugin < Fluent::Plugin::BareOutput
attr_reader :store
def initialize
super
@store = []
end
def process(tag, es)
es.each do |time, record|
@store << [tag, time, record]
end
end
end
end
class BareOutputTest < Test::Unit::TestCase
setup do
Fluent::Test.setup
@p = FluentPluginBareOutputTest::DummyPlugin.new
end
test 'has healthy lifecycle' do
assert !@p.configured?
@p.configure(config_element())
assert @p.configured?
assert !@p.started?
@p.start
assert @p.start
assert !@p.stopped?
@p.stop
assert @p.stopped?
assert !@p.before_shutdown?
@p.before_shutdown
assert @p.before_shutdown?
assert !@p.shutdown?
@p.shutdown
assert @p.shutdown?
assert !@p.after_shutdown?
@p.after_shutdown
assert @p.after_shutdown?
assert !@p.closed?
@p.close
assert @p.closed?
assert !@p.terminated?
@p.terminate
assert @p.terminated?
end
test 'has plugin_id automatically generated' do
assert @p.respond_to?(:plugin_id_configured?)
assert @p.respond_to?(:plugin_id)
@p.configure(config_element())
assert !@p.plugin_id_configured?
assert @p.plugin_id
assert{ @p.plugin_id != 'mytest' }
end
test 'has plugin_id manually configured' do
@p.configure(config_element('ROOT', '', {'@id' => 'mytest'}))
assert @p.plugin_id_configured?
assert_equal 'mytest', @p.plugin_id
end
test 'has plugin logger' do
assert @p.respond_to?(:log)
assert @p.log
# default logger
original_logger = @p.log
@p.configure(config_element('ROOT', '', {'@log_level' => 'debug'}))
assert(@p.log.object_id != original_logger.object_id)
assert_equal Fluent::Log::LEVEL_DEBUG, @p.log.level
end
test 'can load plugin helpers' do
assert_nothing_raised do
class FluentPluginBareOutputTest::DummyPlugin2 < Fluent::Plugin::BareOutput
helpers :storage
end
end
end
test 'can use metrics plugins and fallback methods' do
@p.configure(config_element('ROOT', '', {'@log_level' => 'debug'}))
%w[num_errors_metrics emit_count_metrics emit_size_metrics emit_records_metrics].each do |metric_name|
assert_true @p.instance_variable_get(:"@#{metric_name}").is_a?(Fluent::Plugin::Metrics)
end
assert_equal 0, @p.num_errors
assert_equal 0, @p.emit_count
assert_equal 0, @p.emit_size
assert_equal 0, @p.emit_records
end
test 'can get input event stream to write' do
@p.configure(config_element('ROOT'))
@p.start
es1 = Fluent::OneEventStream.new(event_time('2016-05-21 18:37:31 +0900'), {'k1' => 'v1'})
es2 = Fluent::ArrayEventStream.new([
[event_time('2016-05-21 18:38:33 +0900'), {'k2' => 'v2'}],
[event_time('2016-05-21 18:39:10 +0900'), {'k3' => 'v3'}],
])
@p.emit_events('mytest1', es1)
@p.emit_events('mytest2', es2)
all_events = [
['mytest1', event_time('2016-05-21 18:37:31 +0900'), {'k1' => 'v1'}],
['mytest2', event_time('2016-05-21 18:38:33 +0900'), {'k2' => 'v2'}],
['mytest2', event_time('2016-05-21 18:39:10 +0900'), {'k3' => 'v3'}],
]
assert_equal all_events, @p.store
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_out_buffer.rb | test/plugin/test_out_buffer.rb | require_relative '../helper'
require 'fluent/test/driver/output'
require 'fluent/plugin/out_buffer'
class BufferOutputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
end
def create_driver(conf = "")
Fluent::Test::Driver::Output.new(Fluent::Plugin::BufferOutput).configure(conf)
end
test "default setting" do
d = create_driver(
config_element(
"ROOT", "", {},
[config_element("buffer", "", {"path" => "test"})]
)
)
assert_equal(
[
"file",
["tag"],
:interval,
10,
],
[
d.instance.buffer_config["@type"],
d.instance.buffer_config.chunk_keys,
d.instance.buffer_config.flush_mode,
d.instance.buffer_config.flush_interval,
]
)
end
test "#write" do
d = create_driver(
config_element(
"ROOT", "", {},
[config_element("buffer", "", {"@type" => "memory", "flush_mode" => "immediate"})]
)
)
time = event_time
record = {"message" => "test"}
d.run(default_tag: 'test') do
d.feed(time, record)
end
assert_equal [["test", time, record]], d.events
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_filter_record_transformer.rb | test/plugin/test_filter_record_transformer.rb | require_relative '../helper'
require 'timecop'
require 'fluent/test/driver/filter'
require 'fluent/plugin/filter_record_transformer'
require 'flexmock/test_unit'
class RecordTransformerFilterTest < Test::Unit::TestCase
include Fluent
include FlexMock::TestCase
setup do
Test.setup
@hostname = Socket.gethostname.chomp
@tag = 'test.tag'
@tag_parts = @tag.split('.')
@time = event_time('2010-05-04 03:02:01 UTC')
Timecop.freeze(@time.to_time)
end
teardown do
Timecop.return
end
def create_driver(conf = '')
Fluent::Test::Driver::Filter.new(Fluent::Plugin::RecordTransformerFilter).configure(conf)
end
sub_test_case 'configure' do
test 'check default' do
assert_nothing_raised do
create_driver
end
end
test "keep_keys must be specified together with renew_record true" do
assert_raise(Fluent::ConfigError) do
create_driver(%[keep_keys a])
end
end
end
sub_test_case "test options" do
def filter(config, msgs = [''])
d = create_driver(config)
d.run {
msgs.each { |msg|
d.feed(@tag, @time, {'foo' => 'bar', 'message' => msg, 'nest' => {'k1' => 'v1', 'k2' => 'v2'}})
}
}
d.filtered
end
CONFIG = %[
<record>
hostname ${hostname}
tag ${tag}
time ${time}
message ${hostname} ${tag_parts[-1]} ${record["message"]}
</record>
]
test 'typical usage' do
msgs = ['1', '2']
filtered = filter(CONFIG, msgs)
filtered.each_with_index do |(_t, r), i|
assert_equal('bar', r['foo'])
assert_equal(@hostname, r['hostname'])
assert_equal(@tag, r['tag'])
assert_equal(Time.at(@time).localtime.to_s, r['time'])
assert_equal("#{@hostname} #{@tag_parts[-1]} #{msgs[i]}", r['message'])
assert_equal({'k1' => 'v1', 'k2' => 'v2'}, r['nest'])
end
end
test 'remove_keys' do
config = CONFIG + %[remove_keys foo,message]
filtered = filter(config)
filtered.each_with_index do |(_t, r), i|
assert_not_include(r, 'foo')
assert_equal(@hostname, r['hostname'])
assert_equal(@tag, r['tag'])
assert_equal(Time.at(@time).localtime.to_s, r['time'])
assert_not_include(r, 'message')
end
end
test 'remove_keys with nested key' do
config = CONFIG + %[remove_keys $.nest.k1]
filtered = filter(config)
filtered.each_with_index do |(_t, r), i|
assert_not_include(r['nest'], 'k1')
end
end
test 'renew_record' do
config = CONFIG + %[renew_record true]
msgs = ['1', '2']
filtered = filter(config, msgs)
filtered.each_with_index do |(_t, r), i|
assert_not_include(r, 'foo')
assert_equal(@hostname, r['hostname'])
assert_equal(@tag, r['tag'])
assert_equal(Time.at(@time).localtime.to_s, r['time'])
assert_equal("#{@hostname} #{@tag_parts[-1]} #{msgs[i]}", r['message'])
end
end
test 'renew_time_key' do
config = %[renew_time_key message]
times = [ Time.local(2,2,3,4,5,2010,nil,nil,nil,nil), Time.local(3,2,3,4,5,2010,nil,nil,nil,nil) ]
msgs = times.map{|t| t.to_f.to_s }
filtered = filter(config, msgs)
filtered.each_with_index do |(time, _record), i|
assert_equal(times[i].to_i, time)
assert(time.is_a?(Fluent::EventTime))
assert_true(_record.has_key?('message'))
end
end
test 'renew_time_key and remove_keys' do
config = %[
renew_time_key event_time_key
remove_keys event_time_key
auto_typecast true
<record>
event_time_key ${record["message"]}
</record>
]
times = [Time.local(2, 2, 3, 4, 5, 2010, nil, nil, nil, nil), Time.local(3, 2, 3, 4, 5, 2010, nil, nil, nil, nil)]
msgs = times.map { |t| t.to_f.to_s }
filtered = filter(config, msgs)
filtered.each_with_index do |(time, _record), i|
assert_equal(times[i].to_i, time)
assert(time.is_a?(Fluent::EventTime))
assert_false(_record.has_key?('event_time_key'))
end
end
test 'keep_keys' do
config = %[renew_record true\nkeep_keys foo,message]
msgs = ['1', '2']
filtered = filter(config, msgs)
filtered.each_with_index do |(_t, r), i|
assert_equal('bar', r['foo'])
assert_equal(msgs[i], r['message'])
end
end
test 'keep_keys that are not present in the original record should not be included in the result record' do
config = %[renew_record true\nkeep_keys foo, bar, baz, message]
msgs = ['1', '2', nil]
filtered = filter(config, msgs)
filtered.each_with_index do |(_t, r), i|
assert_equal('bar', r['foo'])
assert_equal(msgs[i], r['message'])
assert_equal(false, r.has_key?('bar'))
assert_equal(false, r.has_key?('baz'))
end
end
test 'enable_ruby' do
config = %[
enable_ruby yes
<record>
message ${hostname} ${tag_parts.last} ${"'" + record["message"] + "'"}
</record>
]
msgs = ['1', '2']
filtered = filter(config, msgs)
filtered.each_with_index do |(_t, r), i|
assert_equal("#{@hostname} #{@tag_parts[-1]} '#{msgs[i]}'", r['message'])
end
end
test 'hash_value' do
config = %[
<record>
hash_field {"k1":100, "k2":"foobar"}
</record>
%]
msgs = ['1', '2']
filtered = filter(config, msgs)
filtered.each_with_index do |(_t, r), i|
assert_equal({"k1"=>100, "k2"=>"foobar"}, r['hash_field'])
end
end
test 'array_value' do
config = %[
<record>
array_field [1, 2, 3]
</record>
%]
msgs = ['1', '2']
filtered = filter(config, msgs)
filtered.each_with_index do |(_t, r), i|
assert_equal([1,2,3], r['array_field'])
end
end
test 'array_hash_mixed' do
config = %[
<record>
mixed_field {"hello":[1,2,3], "world":{"foo":"bar"}}
</record>
%]
msgs = ['1', '2']
filtered = filter(config, msgs)
filtered.each_with_index do |(_t, r), i|
assert_equal({"hello"=>[1,2,3], "world"=>{"foo"=>"bar"}}, r['mixed_field'])
end
end
end
sub_test_case 'test placeholders' do
def filter(config, msgs = [''])
d = create_driver(config)
yield d if block_given?
d.run {
records = msgs.map do |msg|
next msg if msg.is_a?(Hash)
{ 'eventType0' => 'bar', 'message' => msg }
end
records.each do |record|
d.feed(@tag, @time, record)
end
}
d.filtered
end
%w[yes no].each do |enable_ruby|
test "hostname with enable_ruby #{enable_ruby}" do
config = %[
enable_ruby #{enable_ruby}
<record>
message ${hostname}
</record>
]
filtered = filter(config)
filtered.each do |t, r|
assert_equal(@hostname, r['message'])
end
end
test "tag with enable_ruby #{enable_ruby}" do
config = %[
enable_ruby #{enable_ruby}
<record>
message ${tag}
</record>
]
filtered = filter(config)
filtered.each do |t, r|
assert_equal(@tag, r['message'])
end
end
test "tag_parts with enable_ruby #{enable_ruby}" do
config = %[
enable_ruby #{enable_ruby}
<record>
message ${tag_parts[0]} ${tag_parts[-1]}
</record>
]
expected = "#{@tag.split('.').first} #{@tag.split('.').last}"
filtered = filter(config)
filtered.each do |t, r|
assert_equal(expected, r['message'])
end
end
test "${tag_prefix[N]} and ${tag_suffix[N]} with enable_ruby #{enable_ruby}" do
config = %[
enable_ruby #{enable_ruby}
<record>
message ${tag_prefix[1]} ${tag_prefix[-2]} ${tag_suffix[2]} ${tag_suffix[-3]}
</record>
]
@tag = 'prefix.test.tag.suffix'
expected = "prefix.test prefix.test.tag tag.suffix test.tag.suffix"
filtered = filter(config)
filtered.each do |t, r|
assert_equal(expected, r['message'])
end
end
test "time with enable_ruby #{enable_ruby}" do
config = %[
enable_ruby #{enable_ruby}
<record>
message ${time}
</record>
]
filtered = filter(config)
filtered.each do |t, r|
if enable_ruby == "yes"
assert_equal(Time.at(@time).localtime, r['message'])
else
assert_equal(Time.at(@time).localtime.to_s, r['message'])
end
end
end
test "record keys with enable_ruby #{enable_ruby}" do
config = %[
enable_ruby #{enable_ruby}
remove_keys eventType0
<record>
message bar ${record["message"]}
eventtype ${record["eventType0"]}
</record>
]
msgs = ['1', '2']
filtered = filter(config, msgs)
filtered.each_with_index do |(_t, r), i|
assert_not_include(r, 'eventType0')
assert_equal("bar", r['eventtype'])
assert_equal("bar #{msgs[i]}", r['message'])
end
end
test "Prevent overwriting reserved keys such as tag with enable_ruby #{enable_ruby}" do
config = %[
enable_ruby #{enable_ruby}
<record>
new_tag ${tag}
new_record_tag ${record["tag"]}
</record>
]
records = [{'tag' => 'tag', 'time' => 'time'}]
filtered = filter(config, records)
filtered.each_with_index do |(_t, r), i|
assert_not_equal('tag', r['new_tag'])
assert_equal(@tag, r['new_tag'])
assert_equal('tag', r['new_record_tag'])
end
end
test "hash values with placeholders with enable_ruby #{enable_ruby}" do
config = %[
enable_ruby #{enable_ruby}
<record>
hash_field {
"hostname":"${hostname}",
"tag":"${tag}",
"${tag}":100
}
</record>
]
msgs = ['1', '2']
filtered = filter(config, msgs)
filtered.each_with_index do |(_t, r), i|
assert_equal({"hostname" => @hostname, "tag" => @tag, "#{@tag}" => 100}, r['hash_field'])
end
end
test "array values with placeholders with enable_ruby #{enable_ruby}" do
config = %[
enable_ruby #{enable_ruby}
<record>
array_field ["${hostname}", "${tag}"]
</record>
]
msgs = ['1', '2']
filtered = filter(config, msgs)
filtered.each_with_index do |(_t, r), i|
assert_equal([@hostname, @tag], r['array_field'])
end
end
test "array and hash values with placeholders with enable_ruby #{enable_ruby}" do
config = %[
enable_ruby #{enable_ruby}
<record>
mixed_field [{"tag":"${tag}"}]
</record>
]
msgs = ['1', '2']
filtered = filter(config, msgs)
filtered.each_with_index do |(_t, r), i|
assert_equal([{"tag" => @tag}], r['mixed_field'])
end
end
test "keys with placeholders with enable_ruby #{enable_ruby}" do
config = %[
enable_ruby #{enable_ruby}
renew_record true
<record>
${hostname} hostname
foo.${tag} tag
</record>
]
msgs = ['1', '2']
filtered = filter(config, msgs)
filtered.each_with_index do |(_t, r), i|
assert_equal({@hostname=>'hostname',"foo.#{@tag}"=>'tag'}, r)
end
end
test "disabled typecasting of values with enable_ruby #{enable_ruby}" do
config = %[
auto_typecast false
enable_ruby #{enable_ruby}
<record>
single ${record["source"]}
multiple ${record["source"]}${record["source"]}
with_prefix prefix-${record["source"]}
with_suffix ${record["source"]}-suffix
with_quote record["source"][""]
</record>
]
msgs = [
{ "source" => "string" },
{ "source" => 123 },
{ "source" => [1, 2] },
{ "source" => {a:1, b:2} },
{ "source" => nil },
]
expected_results = [
{ single: "string",
multiple: "stringstring",
with_prefix: "prefix-string",
with_suffix: "string-suffix",
with_quote: %Q{record["source"][""]} },
{ single: 123.to_s,
multiple: "#{123.to_s}#{123.to_s}",
with_prefix: "prefix-#{123.to_s}",
with_suffix: "#{123.to_s}-suffix",
with_quote: %Q{record["source"][""]} },
{ single: [1, 2].to_s,
multiple: "#{[1, 2].to_s}#{[1, 2].to_s}",
with_prefix: "prefix-#{[1, 2].to_s}",
with_suffix: "#{[1, 2].to_s}-suffix",
with_quote: %Q{record["source"][""]} },
{ single: {a:1, b:2}.to_s,
multiple: "#{{a:1, b:2}.to_s}#{{a:1, b:2}.to_s}",
with_prefix: "prefix-#{{a:1, b:2}.to_s}",
with_suffix: "#{{a:1, b:2}.to_s}-suffix",
with_quote: %Q{record["source"][""]} },
{ single: nil.to_s,
multiple: "#{nil.to_s}#{nil.to_s}",
with_prefix: "prefix-#{nil.to_s}",
with_suffix: "#{nil.to_s}-suffix",
with_quote: %Q{record["source"][""]} },
]
actual_results = []
filtered = filter(config, msgs)
filtered.each_with_index do |(_t, r), i|
actual_results << {
single: r["single"],
multiple: r["multiple"],
with_prefix: r["with_prefix"],
with_suffix: r["with_suffix"],
with_quote: r["with_quote"],
}
end
assert_equal(expected_results, actual_results)
end
test "enabled typecasting of values with enable_ruby #{enable_ruby}" do
config = %[
auto_typecast yes
enable_ruby #{enable_ruby}
<record>
single ${record["source"]}
multiple ${record["source"]}${record["source"]}
with_prefix prefix-${record["source"]}
with_suffix ${record["source"]}-suffix
</record>
]
msgs = [
{ "source" => "string" },
{ "source" => 123 },
{ "source" => [1, 2] },
{ "source" => {a:1, b:2} },
{ "source" => nil },
]
expected_results = [
{ single: "string",
multiple: "stringstring",
with_prefix: "prefix-string",
with_suffix: "string-suffix" },
{ single: 123,
multiple: "#{123.to_s}#{123.to_s}",
with_prefix: "prefix-#{123.to_s}",
with_suffix: "#{123.to_s}-suffix" },
{ single: [1, 2],
multiple: "#{[1, 2].to_s}#{[1, 2].to_s}",
with_prefix: "prefix-#{[1, 2].to_s}",
with_suffix: "#{[1, 2].to_s}-suffix" },
{ single: {a:1, b:2},
multiple: "#{{a:1, b:2}.to_s}#{{a:1, b:2}.to_s}",
with_prefix: "prefix-#{{a:1, b:2}.to_s}",
with_suffix: "#{{a:1, b:2}.to_s}-suffix" },
{ single: nil,
multiple: "#{nil.to_s}#{nil.to_s}",
with_prefix: "prefix-#{nil.to_s}",
with_suffix: "#{nil.to_s}-suffix" },
]
actual_results = []
filtered = filter(config, msgs)
filtered.each_with_index do |(_t, r), i|
actual_results << {
single: r["single"],
multiple: r["multiple"],
with_prefix: r["with_prefix"],
with_suffix: r["with_suffix"],
}
end
assert_equal(expected_results, actual_results)
end
end
test 'unknown placeholder (enable_ruby no)' do
config = %[
enable_ruby no
<record>
message ${unknown}
</record>
]
filter(config) { |d|
mock(d.instance.log).warn("unknown placeholder `${unknown}` found")
}
end
test 'expand fields starting with @ (enable_ruby no)' do
config = %[
enable_ruby no
<record>
foo ${record["@timestamp"]}
</record>
]
d = create_driver(config)
message = {"@timestamp" => "foo"}
d.run { d.feed(@tag, @time, message) }
filtered = d.filtered
filtered.each do |t, r|
assert_equal(message["@timestamp"], r['foo'])
end
end
test 'auto_typecast placeholder containing {} (enable_ruby yes)' do
config = %[
tag tag
enable_ruby yes
auto_typecast yes
<record>
foo ${record.map{|k,v|v}}
</record>
]
d = create_driver(config)
message = {"@timestamp" => "foo"}
d.run { d.feed(@tag, @time, message) }
filtered = d.filtered
filtered.each do |t, r|
assert_equal([message["@timestamp"]], r['foo'])
end
end
end # test placeholders
sub_test_case 'test error record' do
test 'invalid record for placeholders' do
d = create_driver(%[
enable_ruby yes
<record>
foo ${record["unknown"]["key"]}
</record>
])
flexmock(d.instance.router).should_receive(:emit_error_event).
with(String, Fluent::EventTime, Hash, RuntimeError).once
d.run do
d.feed(@tag, Fluent::EventTime.now, {'key' => 'value'})
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_buf_file.rb | test/plugin/test_buf_file.rb | require_relative '../helper'
require 'fluent/plugin/buf_file'
require 'fluent/plugin/output'
require 'fluent/unique_id'
require 'fluent/system_config'
require 'fluent/env'
require 'msgpack'
module FluentPluginFileBufferTest
class DummyOutputPlugin < Fluent::Plugin::Output
Fluent::Plugin.register_output('buffer_file_test_output', self)
config_section :buffer do
config_set_default :@type, 'file'
end
def multi_workers_ready?
true
end
def write(chunk)
# drop
end
end
class DummyErrorOutputPlugin < DummyOutputPlugin
def register_write(&block)
instance_variable_set(:@write, block)
end
def initialize
super
@should_fail_writing = true
@write = nil
end
def recover
@should_fail_writing = false
end
def write(chunk)
if @should_fail_writing
raise "failed writing chunk"
else
@write ? @write.call(chunk) : nil
end
end
def format(tag, time, record)
[tag, time.to_i, record].to_json + "\n"
end
end
end
class FileBufferTest < Test::Unit::TestCase
def metadata(timekey: nil, tag: nil, variables: nil, seq: 0)
m = Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
m.seq = seq
m
end
def write_metadata_old(path, chunk_id, metadata, size, ctime, mtime)
metadata = {
timekey: metadata.timekey, tag: metadata.tag, variables: metadata.variables,
id: chunk_id,
s: size,
c: ctime,
m: mtime,
}
File.open(path, 'wb') do |f|
f.write metadata.to_msgpack
end
end
def write_metadata(path, chunk_id, metadata, size, ctime, mtime)
metadata = {
timekey: metadata.timekey, tag: metadata.tag, variables: metadata.variables,
seq: metadata.seq,
id: chunk_id,
s: size,
c: ctime,
m: mtime,
}
data = metadata.to_msgpack
size = [data.size].pack('N')
File.open(path, 'wb') do |f|
f.write(Fluent::Plugin::Buffer::FileChunk::BUFFER_HEADER + size + data)
end
end
sub_test_case 'non configured buffer plugin instance' do
setup do
Fluent::Test.setup
@dir = File.expand_path('../../tmp/buffer_file_dir', __FILE__)
FileUtils.rm_rf @dir
FileUtils.mkdir_p @dir
end
test 'path should include * normally' do
d = FluentPluginFileBufferTest::DummyOutputPlugin.new
p = Fluent::Plugin::FileBuffer.new
p.owner = d
p.configure(config_element('buffer', '', {'path' => File.join(@dir, 'buffer.*.file')}))
assert_equal File.join(@dir, 'buffer.*.file'), p.path
end
data('default' => [nil, 'log'],
'conf' => ['.buf', 'buf'])
test 'existing directory will be used with additional default file name' do |params|
conf, suffix = params
d = FluentPluginFileBufferTest::DummyOutputPlugin.new
p = Fluent::Plugin::FileBuffer.new
p.owner = d
c = {'path' => @dir}
c['path_suffix'] = conf if conf
p.configure(config_element('buffer', '', c))
assert_equal File.join(@dir, "buffer.*.#{suffix}"), p.path
end
data('default' => [nil, 'log'],
'conf' => ['.buf', 'buf'])
test 'unexisting path without * handled as directory' do |params|
conf, suffix = params
d = FluentPluginFileBufferTest::DummyOutputPlugin.new
p = Fluent::Plugin::FileBuffer.new
p.owner = d
c = {'path' => File.join(@dir, 'buffer')}
c['path_suffix'] = conf if conf
p.configure(config_element('buffer', '', c))
assert_equal File.join(@dir, 'buffer', "buffer.*.#{suffix}"), p.path
end
end
sub_test_case 'buffer configurations and workers' do
setup do
@bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
FileUtils.rm_rf @bufdir
Fluent::Test.setup
@d = FluentPluginFileBufferTest::DummyOutputPlugin.new
@p = Fluent::Plugin::FileBuffer.new
@p.owner = @d
end
test 'raise error if configured path is of existing file' do
@bufpath = File.join(@bufdir, 'buf')
FileUtils.mkdir_p @bufdir
File.open(@bufpath, 'w'){|f| } # create and close the file
assert File.exist?(@bufpath)
assert File.file?(@bufpath)
buf_conf = config_element('buffer', '', {'path' => @bufpath})
assert_raise Fluent::ConfigError.new("Plugin 'file' does not support multi workers configuration (Fluent::Plugin::FileBuffer)") do
Fluent::SystemConfig.overwrite_system_config('workers' => 4) do
@d.configure(config_element('ROOT', '', {'@id' => 'dummy_output_with_buf'}, [buf_conf]))
end
end
end
test 'raise error if fluentd is configured to use file path pattern and multi workers' do
@bufpath = File.join(@bufdir, 'testbuf.*.log')
buf_conf = config_element('buffer', '', {'path' => @bufpath})
assert_raise Fluent::ConfigError.new("Plugin 'file' does not support multi workers configuration (Fluent::Plugin::FileBuffer)") do
Fluent::SystemConfig.overwrite_system_config('workers' => 4) do
@d.configure(config_element('ROOT', '', {'@id' => 'dummy_output_with_buf'}, [buf_conf]))
end
end
end
test 'enables multi worker configuration with unexisting directory path' do
assert_false File.exist?(@bufdir)
buf_conf = config_element('buffer', '', {'path' => @bufdir})
assert_nothing_raised do
Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
@d.configure(config_element('ROOT', '', {}, [buf_conf]))
end
end
end
test 'enables multi worker configuration with existing directory path' do
FileUtils.mkdir_p @bufdir
buf_conf = config_element('buffer', '', {'path' => @bufdir})
assert_nothing_raised do
Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
@d.configure(config_element('ROOT', '', {}, [buf_conf]))
end
end
end
test 'enables multi worker configuration with root dir' do
buf_conf = config_element('buffer', '')
assert_nothing_raised do
Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
@d.configure(config_element('ROOT', '', {'@id' => 'dummy_output_with_buf'}, [buf_conf]))
end
end
end
end
sub_test_case 'buffer plugin configured only with path' do
setup do
@bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
@bufpath = File.join(@bufdir, 'testbuf.*.log')
FileUtils.rm_r @bufdir if File.exist?(@bufdir)
Fluent::Test.setup
@d = FluentPluginFileBufferTest::DummyOutputPlugin.new
@p = Fluent::Plugin::FileBuffer.new
@p.owner = @d
@p.configure(config_element('buffer', '', {'path' => @bufpath}))
@p.start
end
teardown do
if @p
@p.stop unless @p.stopped?
@p.before_shutdown unless @p.before_shutdown?
@p.shutdown unless @p.shutdown?
@p.after_shutdown unless @p.after_shutdown?
@p.close unless @p.closed?
@p.terminate unless @p.terminated?
end
if @bufdir
Dir.glob(File.join(@bufdir, '*')).each do |path|
next if ['.', '..'].include?(File.basename(path))
File.delete(path)
end
end
end
test 'this is persistent plugin' do
assert @p.persistent?
end
test '#start creates directory for buffer chunks' do
plugin = Fluent::Plugin::FileBuffer.new
plugin.owner = @d
rand_num = rand(0..100)
bufpath = File.join(File.expand_path("../../tmp/buffer_file_#{rand_num}", __FILE__), 'testbuf.*.log')
bufdir = File.dirname(bufpath)
FileUtils.rm_r bufdir if File.exist?(bufdir)
assert !File.exist?(bufdir)
plugin.configure(config_element('buffer', '', {'path' => bufpath}))
assert !File.exist?(bufdir)
plugin.start
assert File.exist?(bufdir)
assert{ File.stat(bufdir).mode.to_s(8).end_with?('755') }
plugin.stop; plugin.before_shutdown; plugin.shutdown; plugin.after_shutdown; plugin.close; plugin.terminate
FileUtils.rm_r bufdir
end
test '#start creates directory for buffer chunks with specified permission' do
omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
plugin = Fluent::Plugin::FileBuffer.new
plugin.owner = @d
rand_num = rand(0..100)
bufpath = File.join(File.expand_path("../../tmp/buffer_file_#{rand_num}", __FILE__), 'testbuf.*.log')
bufdir = File.dirname(bufpath)
FileUtils.rm_r bufdir if File.exist?(bufdir)
assert !File.exist?(bufdir)
plugin.configure(config_element('buffer', '', {'path' => bufpath, 'dir_permission' => '0700'}))
assert !File.exist?(bufdir)
plugin.start
assert File.exist?(bufdir)
assert{ File.stat(bufdir).mode.to_s(8).end_with?('700') }
plugin.stop; plugin.before_shutdown; plugin.shutdown; plugin.after_shutdown; plugin.close; plugin.terminate
FileUtils.rm_r bufdir
end
test '#start creates directory for buffer chunks with specified permission via system config' do
omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
sysconf = {'dir_permission' => '700'}
Fluent::SystemConfig.overwrite_system_config(sysconf) do
plugin = Fluent::Plugin::FileBuffer.new
plugin.owner = @d
rand_num = rand(0..100)
bufpath = File.join(File.expand_path("../../tmp/buffer_file_#{rand_num}", __FILE__), 'testbuf.*.log')
bufdir = File.dirname(bufpath)
FileUtils.rm_r bufdir if File.exist?(bufdir)
assert !File.exist?(bufdir)
plugin.configure(config_element('buffer', '', {'path' => bufpath}))
assert !File.exist?(bufdir)
plugin.start
assert File.exist?(bufdir)
assert{ File.stat(bufdir).mode.to_s(8).end_with?('700') }
plugin.stop; plugin.before_shutdown; plugin.shutdown; plugin.after_shutdown; plugin.close; plugin.terminate
FileUtils.rm_r bufdir
end
end
test '#generate_chunk generates blank file chunk on path from unique_id of metadata' do
m1 = metadata()
c1 = @p.generate_chunk(m1)
assert c1.is_a? Fluent::Plugin::Buffer::FileChunk
assert_equal m1, c1.metadata
assert c1.empty?
assert_equal :unstaged, c1.state
assert_equal Fluent::DEFAULT_FILE_PERMISSION, c1.permission
assert_equal @bufpath.gsub('.*.', ".b#{Fluent::UniqueId.hex(c1.unique_id)}."), c1.path
assert{ File.stat(c1.path).mode.to_s(8).end_with?('644') }
m2 = metadata(timekey: event_time('2016-04-17 11:15:00 -0700').to_i)
c2 = @p.generate_chunk(m2)
assert c2.is_a? Fluent::Plugin::Buffer::FileChunk
assert_equal m2, c2.metadata
assert c2.empty?
assert_equal :unstaged, c2.state
assert_equal Fluent::DEFAULT_FILE_PERMISSION, c2.permission
assert_equal @bufpath.gsub('.*.', ".b#{Fluent::UniqueId.hex(c2.unique_id)}."), c2.path
assert{ File.stat(c2.path).mode.to_s(8).end_with?('644') }
c1.purge
c2.purge
end
test '#generate_chunk generates blank file chunk with specified permission' do
omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
plugin = Fluent::Plugin::FileBuffer.new
plugin.owner = @d
rand_num = rand(0..100)
bufpath = File.join(File.expand_path("../../tmp/buffer_file_#{rand_num}", __FILE__), 'testbuf.*.log')
bufdir = File.dirname(bufpath)
FileUtils.rm_r bufdir if File.exist?(bufdir)
assert !File.exist?(bufdir)
plugin.configure(config_element('buffer', '', {'path' => bufpath, 'file_permission' => '0600'}))
assert !File.exist?(bufdir)
plugin.start
m = metadata()
c = plugin.generate_chunk(m)
assert c.is_a? Fluent::Plugin::Buffer::FileChunk
assert_equal m, c.metadata
assert c.empty?
assert_equal :unstaged, c.state
assert_equal 0600, c.permission
assert_equal bufpath.gsub('.*.', ".b#{Fluent::UniqueId.hex(c.unique_id)}."), c.path
assert{ File.stat(c.path).mode.to_s(8).end_with?('600') }
c.purge
plugin.stop; plugin.before_shutdown; plugin.shutdown; plugin.after_shutdown; plugin.close; plugin.terminate
FileUtils.rm_r bufdir
end
test '#generate_chunk generates blank file chunk with specified permission with system_config' do
omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
begin
plugin = Fluent::Plugin::FileBuffer.new
plugin.owner = @d
rand_num = rand(0..100)
bufpath = File.join(File.expand_path("../../tmp/buffer_file_#{rand_num}", __FILE__), 'testbuf.*.log')
bufdir = File.dirname(bufpath)
FileUtils.rm_r bufdir if File.exist?(bufdir)
assert !File.exist?(bufdir)
plugin.configure(config_element('buffer', '', { 'path' => bufpath }))
assert !File.exist?(bufdir)
plugin.start
m = metadata()
c = nil
Fluent::SystemConfig.overwrite_system_config("file_permission" => "700") do
c = plugin.generate_chunk(m)
end
assert c.is_a? Fluent::Plugin::Buffer::FileChunk
assert_equal m, c.metadata
assert c.empty?
assert_equal :unstaged, c.state
assert_equal 0700, c.permission
assert_equal bufpath.gsub('.*.', ".b#{Fluent::UniqueId.hex(c.unique_id)}."), c.path
assert{ File.stat(c.path).mode.to_s(8).end_with?('700') }
c.purge
plugin.stop; plugin.before_shutdown; plugin.shutdown; plugin.after_shutdown; plugin.close; plugin.terminate
ensure
FileUtils.rm_r bufdir
end
end
end
sub_test_case 'configured with system root directory and plugin @id' do
setup do
@root_dir = File.expand_path('../../tmp/buffer_file_root', __FILE__)
FileUtils.rm_rf @root_dir
Fluent::Test.setup
@d = FluentPluginFileBufferTest::DummyOutputPlugin.new
@p = Fluent::Plugin::FileBuffer.new
@p.owner = @d
end
teardown do
if @p
@p.stop unless @p.stopped?
@p.before_shutdown unless @p.before_shutdown?
@p.shutdown unless @p.shutdown?
@p.after_shutdown unless @p.after_shutdown?
@p.close unless @p.closed?
@p.terminate unless @p.terminated?
end
end
data('default' => [nil, 'log'],
'conf' => ['.buf', 'buf'])
test '#start creates directory for buffer chunks' do |params|
conf, suffix = params
c = {}
c['path_suffix'] = conf if conf
Fluent::SystemConfig.overwrite_system_config('root_dir' => @root_dir) do
@d.configure(config_element('ROOT', '', {'@id' => 'dummy_output_with_buf'}))
@p.configure(config_element('buffer', '', c))
end
expected_buffer_path = File.join(@root_dir, 'worker0', 'dummy_output_with_buf', 'buffer', "buffer.*.#{suffix}")
expected_buffer_dir = File.dirname(expected_buffer_path)
assert_equal expected_buffer_path, @p.path
assert_false Dir.exist?(expected_buffer_dir)
@p.start
assert Dir.exist?(expected_buffer_dir)
end
end
sub_test_case 'there are no existing file chunks' do
setup do
@bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
@bufpath = File.join(@bufdir, 'testbuf.*.log')
FileUtils.rm_r @bufdir if File.exist?(@bufdir)
Fluent::Test.setup
@d = FluentPluginFileBufferTest::DummyOutputPlugin.new
@p = Fluent::Plugin::FileBuffer.new
@p.owner = @d
@p.configure(config_element('buffer', '', {'path' => @bufpath}))
@p.start
end
teardown do
if @p
@p.stop unless @p.stopped?
@p.before_shutdown unless @p.before_shutdown?
@p.shutdown unless @p.shutdown?
@p.after_shutdown unless @p.after_shutdown?
@p.close unless @p.closed?
@p.terminate unless @p.terminated?
end
if @bufdir
Dir.glob(File.join(@bufdir, '*')).each do |path|
next if ['.', '..'].include?(File.basename(path))
File.delete(path)
end
end
end
test '#resume returns empty buffer state' do
ary = @p.resume
assert_equal({}, ary[0])
assert_equal([], ary[1])
end
end
sub_test_case 'there are some existing file chunks' do
setup do
@bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
FileUtils.mkdir_p @bufdir unless File.exist?(@bufdir)
@c1id = Fluent::UniqueId.generate
p1 = File.join(@bufdir, "etest.q#{Fluent::UniqueId.hex(@c1id)}.log")
File.open(p1, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
write_metadata(
p1 + '.meta', @c1id, metadata(timekey: event_time('2016-04-17 13:58:00 -0700').to_i),
4, event_time('2016-04-17 13:58:00 -0700').to_i, event_time('2016-04-17 13:58:22 -0700').to_i
)
@c2id = Fluent::UniqueId.generate
p2 = File.join(@bufdir, "etest.q#{Fluent::UniqueId.hex(@c2id)}.log")
File.open(p2, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
write_metadata(
p2 + '.meta', @c2id, metadata(timekey: event_time('2016-04-17 13:59:00 -0700').to_i),
3, event_time('2016-04-17 13:59:00 -0700').to_i, event_time('2016-04-17 13:59:23 -0700').to_i
)
@c3id = Fluent::UniqueId.generate
p3 = File.join(@bufdir, "etest.b#{Fluent::UniqueId.hex(@c3id)}.log")
File.open(p3, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
write_metadata(
p3 + '.meta', @c3id, metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i),
4, event_time('2016-04-17 14:00:00 -0700').to_i, event_time('2016-04-17 14:00:28 -0700').to_i
)
@c4id = Fluent::UniqueId.generate
p4 = File.join(@bufdir, "etest.b#{Fluent::UniqueId.hex(@c4id)}.log")
File.open(p4, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
write_metadata(
p4 + '.meta', @c4id, metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i),
3, event_time('2016-04-17 14:01:00 -0700').to_i, event_time('2016-04-17 14:01:25 -0700').to_i
)
@bufpath = File.join(@bufdir, 'etest.*.log')
Fluent::Test.setup
@d = FluentPluginFileBufferTest::DummyOutputPlugin.new
@p = Fluent::Plugin::FileBuffer.new
@p.owner = @d
@p.configure(config_element('buffer', '', {'path' => @bufpath}))
@p.start
end
teardown do
if @p
@p.stop unless @p.stopped?
@p.before_shutdown unless @p.before_shutdown?
@p.shutdown unless @p.shutdown?
@p.after_shutdown unless @p.after_shutdown?
@p.close unless @p.closed?
@p.terminate unless @p.terminated?
end
if @bufdir
Dir.glob(File.join(@bufdir, '*')).each do |path|
next if ['.', '..'].include?(File.basename(path))
File.delete(path)
end
end
end
test '#resume returns staged/queued chunks with metadata' do
assert_equal 2, @p.stage.size
assert_equal 2, @p.queue.size
stage = @p.stage
m3 = metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i)
assert_equal @c3id, stage[m3].unique_id
assert_equal 4, stage[m3].size
assert_equal :staged, stage[m3].state
m4 = metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i)
assert_equal @c4id, stage[m4].unique_id
assert_equal 3, stage[m4].size
assert_equal :staged, stage[m4].state
end
test '#resume returns queued chunks ordered by last modified time (FIFO)' do
assert_equal 2, @p.stage.size
assert_equal 2, @p.queue.size
queue = @p.queue
assert{ queue[0].modified_at < queue[1].modified_at }
assert_equal @c1id, queue[0].unique_id
assert_equal :queued, queue[0].state
assert_equal event_time('2016-04-17 13:58:00 -0700').to_i, queue[0].metadata.timekey
assert_nil queue[0].metadata.tag
assert_nil queue[0].metadata.variables
assert_equal Time.parse('2016-04-17 13:58:00 -0700').localtime, queue[0].created_at
assert_equal Time.parse('2016-04-17 13:58:22 -0700').localtime, queue[0].modified_at
assert_equal 4, queue[0].size
assert_equal @c2id, queue[1].unique_id
assert_equal :queued, queue[1].state
assert_equal event_time('2016-04-17 13:59:00 -0700').to_i, queue[1].metadata.timekey
assert_nil queue[1].metadata.tag
assert_nil queue[1].metadata.variables
assert_equal Time.parse('2016-04-17 13:59:00 -0700').localtime, queue[1].created_at
assert_equal Time.parse('2016-04-17 13:59:23 -0700').localtime, queue[1].modified_at
assert_equal 3, queue[1].size
end
end
sub_test_case 'there are some existing file chunks with placeholders path' do
setup do
@bufdir = File.expand_path('../../tmp/buffer_${test}_file', __FILE__)
FileUtils.rm_rf(@bufdir)
FileUtils.mkdir_p(@bufdir)
@c1id = Fluent::UniqueId.generate
p1 = File.join(@bufdir, "etest.q#{Fluent::UniqueId.hex(@c1id)}.log")
File.open(p1, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
write_metadata(
p1 + '.meta', @c1id, metadata(timekey: event_time('2016-04-17 13:58:00 -0700').to_i),
1, event_time('2016-04-17 13:58:00 -0700').to_i, event_time('2016-04-17 13:58:22 -0700').to_i
)
@c2id = Fluent::UniqueId.generate
p2 = File.join(@bufdir, "etest.b#{Fluent::UniqueId.hex(@c2id)}.log")
File.open(p2, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
write_metadata(
p2 + '.meta', @c2id, metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i),
1, event_time('2016-04-17 14:00:00 -0700').to_i, event_time('2016-04-17 14:00:28 -0700').to_i
)
@bufpath = File.join(@bufdir, 'etest.*.log')
Fluent::Test.setup
@d = FluentPluginFileBufferTest::DummyOutputPlugin.new
@p = Fluent::Plugin::FileBuffer.new
@p.owner = @d
@p.configure(config_element('buffer', '', {'path' => @bufpath}))
@p.start
end
teardown do
if @p
@p.stop unless @p.stopped?
@p.before_shutdown unless @p.before_shutdown?
@p.shutdown unless @p.shutdown?
@p.after_shutdown unless @p.after_shutdown?
@p.close unless @p.closed?
@p.terminate unless @p.terminated?
end
FileUtils.rm_rf(@bufdir)
end
test '#resume returns staged/queued chunks with metadata' do
assert_equal 1, @p.stage.size
assert_equal 1, @p.queue.size
end
end
sub_test_case 'there are some existing file chunks, both in specified path and per-worker directory under specified path, configured as multi workers' do
setup do
@bufdir = File.expand_path('../../tmp/buffer_file/path', __FILE__)
@worker0_dir = File.join(@bufdir, "worker0")
@worker1_dir = File.join(@bufdir, "worker1")
FileUtils.rm_rf @bufdir
FileUtils.mkdir_p @worker0_dir
FileUtils.mkdir_p @worker1_dir
@bufdir_chunk_1 = Fluent::UniqueId.generate
bc1 = File.join(@bufdir, "buffer.q#{Fluent::UniqueId.hex(@bufdir_chunk_1)}.log")
File.open(bc1, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
write_metadata(
bc1 + '.meta', @bufdir_chunk_1, metadata(timekey: event_time('2016-04-17 13:58:00 -0700').to_i),
4, event_time('2016-04-17 13:58:00 -0700').to_i, event_time('2016-04-17 13:58:22 -0700').to_i
)
@bufdir_chunk_2 = Fluent::UniqueId.generate
bc2 = File.join(@bufdir, "buffer.q#{Fluent::UniqueId.hex(@bufdir_chunk_2)}.log")
File.open(bc2, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
write_metadata(
bc2 + '.meta', @bufdir_chunk_2, metadata(timekey: event_time('2016-04-17 13:58:00 -0700').to_i),
4, event_time('2016-04-17 13:58:00 -0700').to_i, event_time('2016-04-17 13:58:22 -0700').to_i
)
@worker_dir_chunk_1 = Fluent::UniqueId.generate
wc0_1 = File.join(@worker0_dir, "buffer.q#{Fluent::UniqueId.hex(@worker_dir_chunk_1)}.log")
wc1_1 = File.join(@worker1_dir, "buffer.q#{Fluent::UniqueId.hex(@worker_dir_chunk_1)}.log")
[wc0_1, wc1_1].each do |chunk_path|
File.open(chunk_path, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
write_metadata(
chunk_path + '.meta', @worker_dir_chunk_1, metadata(timekey: event_time('2016-04-17 13:59:00 -0700').to_i),
3, event_time('2016-04-17 13:59:00 -0700').to_i, event_time('2016-04-17 13:59:23 -0700').to_i
)
end
@worker_dir_chunk_2 = Fluent::UniqueId.generate
wc0_2 = File.join(@worker0_dir, "buffer.b#{Fluent::UniqueId.hex(@worker_dir_chunk_2)}.log")
wc1_2 = File.join(@worker1_dir, "buffer.b#{Fluent::UniqueId.hex(@worker_dir_chunk_2)}.log")
[wc0_2, wc1_2].each do |chunk_path|
File.open(chunk_path, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
write_metadata(
chunk_path + '.meta', @worker_dir_chunk_2, metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i),
4, event_time('2016-04-17 14:00:00 -0700').to_i, event_time('2016-04-17 14:00:28 -0700').to_i
)
end
@worker_dir_chunk_3 = Fluent::UniqueId.generate
wc0_3 = File.join(@worker0_dir, "buffer.b#{Fluent::UniqueId.hex(@worker_dir_chunk_3)}.log")
wc1_3 = File.join(@worker1_dir, "buffer.b#{Fluent::UniqueId.hex(@worker_dir_chunk_3)}.log")
[wc0_3, wc1_3].each do |chunk_path|
File.open(chunk_path, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
write_metadata(
chunk_path + '.meta', @worker_dir_chunk_3, metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i),
3, event_time('2016-04-17 14:01:00 -0700').to_i, event_time('2016-04-17 14:01:25 -0700').to_i
)
end
Fluent::Test.setup
end
teardown do
if @p
@p.stop unless @p.stopped?
@p.before_shutdown unless @p.before_shutdown?
@p.shutdown unless @p.shutdown?
@p.after_shutdown unless @p.after_shutdown?
@p.close unless @p.closed?
@p.terminate unless @p.terminated?
end
end
test 'worker(id=0) #resume returns staged/queued chunks with metadata, not only in worker dir, including the directory specified by path' do
ENV['SERVERENGINE_WORKER_ID'] = '0'
buf_conf = config_element('buffer', '', {'path' => @bufdir})
@d = FluentPluginFileBufferTest::DummyOutputPlugin.new
with_worker_config(workers: 2, worker_id: 0) do
@d.configure(config_element('output', '', {}, [buf_conf]))
end
@d.start
@p = @d.buffer
assert_equal 2, @p.stage.size
assert_equal 3, @p.queue.size
stage = @p.stage
m1 = metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i)
assert_equal @worker_dir_chunk_2, stage[m1].unique_id
assert_equal 4, stage[m1].size
assert_equal :staged, stage[m1].state
m2 = metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i)
assert_equal @worker_dir_chunk_3, stage[m2].unique_id
assert_equal 3, stage[m2].size
assert_equal :staged, stage[m2].state
queue = @p.queue
assert_equal [@bufdir_chunk_1, @bufdir_chunk_2, @worker_dir_chunk_1].sort, queue.map(&:unique_id).sort
assert_equal [3, 4, 4], queue.map(&:size).sort
assert_equal [:queued, :queued, :queued], queue.map(&:state)
end
test 'worker(id=1) #resume returns staged/queued chunks with metadata, only in worker dir' do
buf_conf = config_element('buffer', '', {'path' => @bufdir})
@d = FluentPluginFileBufferTest::DummyOutputPlugin.new
with_worker_config(workers: 2, worker_id: 1) do
@d.configure(config_element('output', '', {}, [buf_conf]))
end
@d.start
@p = @d.buffer
assert_equal 2, @p.stage.size
assert_equal 1, @p.queue.size
stage = @p.stage
m1 = metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i)
assert_equal @worker_dir_chunk_2, stage[m1].unique_id
assert_equal 4, stage[m1].size
assert_equal :staged, stage[m1].state
m2 = metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i)
assert_equal @worker_dir_chunk_3, stage[m2].unique_id
assert_equal 3, stage[m2].size
assert_equal :staged, stage[m2].state
queue = @p.queue
assert_equal @worker_dir_chunk_1, queue[0].unique_id
assert_equal 3, queue[0].size
assert_equal :queued, queue[0].state
end
end
sub_test_case 'there are some existing file chunks with old format metadata' do
setup do
@bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
FileUtils.mkdir_p @bufdir unless File.exist?(@bufdir)
@c1id = Fluent::UniqueId.generate
p1 = File.join(@bufdir, "etest.q#{Fluent::UniqueId.hex(@c1id)}.log")
File.open(p1, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_parser.rb | test/plugin/test_parser.rb | require_relative '../helper'
require 'fluent/test/driver/parser'
require 'fluent/plugin/parser'
require 'json'
require 'timecop'
class ParserTest < ::Test::Unit::TestCase
class ExampleParser < Fluent::Plugin::Parser
def parse(data)
r = JSON.parse(data)
yield convert_values(parse_time(r), r)
end
end
def create_driver(conf={})
Fluent::Test::Driver::Parser.new(Fluent::Plugin::Parser).configure(conf)
end
def setup
Fluent::Test.setup
end
sub_test_case 'base class works as plugin' do
def test_init
i = Fluent::Plugin::Parser.new
assert_nil i.types
assert_nil i.null_value_pattern
assert !i.null_empty_string
assert i.estimate_current_event
assert !i.keep_time_key
end
def test_configure_against_string_literal
d = create_driver('keep_time_key true')
assert_true d.instance.keep_time_key
end
def test_parse
d = create_driver
assert_raise NotImplementedError do
d.instance.parse('')
end
end
end
sub_test_case '#string_like_null' do
setup do
@i = ExampleParser.new
end
test 'returns false if null_empty_string is false and null_value_regexp is nil' do
assert ! @i.string_like_null('a', false, nil)
assert ! @i.string_like_null('', false, nil)
end
test 'returns true if null_empty_string is true and string value is empty' do
assert ! @i.string_like_null('a', true, nil)
assert @i.string_like_null('', true, nil)
end
test 'returns true if null_value_regexp has regexp and it matches string value' do
assert ! @i.string_like_null('a', false, /null/i)
assert @i.string_like_null('NULL', false, /null/i)
assert @i.string_like_null('empty', false, /null|empty/i)
end
end
sub_test_case '#build_type_converters converters' do
setup do
@i = ExampleParser.new
types_config = {
"s" => "string",
"i" => "integer",
"f" => "float",
"b" => "bool",
"t1" => "time",
"t2" => "time:%Y-%m-%d %H:%M:%S.%N",
"t3" => "time:+0100:%Y-%m-%d %H:%M:%S.%N",
"t4" => "time:unixtime",
"t5" => "time:float",
"a1" => "array",
"a2" => "array:|",
}
@hash = {
'types' => JSON.dump(types_config),
}
end
test 'to do #to_s by "string" type' do
@i.configure(config_element('parse', '', @hash))
c = @i.type_converters["s"]
assert_equal "", c.call("")
assert_equal "a", c.call("a")
assert_equal "1", c.call(1)
assert_equal "1.01", c.call(1.01)
assert_equal "true", c.call(true)
assert_equal "false", c.call(false)
end
test 'to do #to_i by "integer" type' do
@i.configure(config_element('parse', '', @hash))
c = @i.type_converters["i"]
assert_equal 0, c.call("")
assert_equal 0, c.call("0")
assert_equal 0, c.call("a")
assert_equal(-1000, c.call("-1000"))
assert_equal 1, c.call(1)
assert_equal 1, c.call(1.01)
assert_equal 0, c.call(true)
assert_equal 0, c.call(false)
end
test 'to do #to_f by "float" type' do
@i.configure(config_element('parse', '', @hash))
c = @i.type_converters["f"]
assert_equal 0.0, c.call("")
assert_equal 0.0, c.call("0")
assert_equal 0.0, c.call("a")
assert_equal(-1000.0, c.call("-1000"))
assert_equal 1.0, c.call(1)
assert_equal 1.01, c.call(1.01)
assert_equal 0.0, c.call(true)
assert_equal 0.0, c.call(false)
end
test 'to return true/false, which returns true only for true/yes/1 (C & perl style), by "bool"' do
@i.configure(config_element('parse', '', @hash))
c = @i.type_converters["b"]
assert_false c.call("")
assert_false c.call("0")
assert_false c.call("a")
assert_true c.call("1")
assert_true c.call("true")
assert_true c.call("True")
assert_true c.call("YES")
assert_true c.call(true)
assert_false c.call(false)
assert_false c.call("1.0")
end
test 'to parse time string by ruby default time parser without any options' do
# "t1" => "time",
with_timezone("UTC+02") do # -0200
@i.configure(config_element('parse', '', @hash))
c = @i.type_converters["t1"]
assert_nil c.call("")
assert_equal_event_time event_time("2016-10-21 01:54:30 -0200"), c.call("2016-10-21 01:54:30")
assert_equal_event_time event_time("2016-10-21 03:54:30 -0200"), c.call("2016-10-21 01:54:30 -0400")
assert_equal_event_time event_time("2016-10-21 01:55:24 -0200"), c.call("2016-10-21T01:55:24-02:00")
assert_equal_event_time event_time("2016-10-21 01:55:24 -0200"), c.call("2016-10-21T03:55:24Z")
end
end
test 'to parse time string with specified time format' do
# "t2" => "time:%Y-%m-%d %H:%M:%S.%N",
with_timezone("UTC+02") do # -0200
@i.configure(config_element('parse', '', @hash))
c = @i.type_converters["t2"]
assert_nil c.call("")
assert_equal_event_time event_time("2016-10-21 01:54:30.123000000 -0200"), c.call("2016-10-21 01:54:30.123")
assert_equal_event_time event_time("2016-10-21 01:54:30.012345678 -0200"), c.call("2016-10-21 01:54:30.012345678")
assert_nil c.call("2016/10/21 015430")
end
end
test 'to parse time string with specified time format and timezone' do
# "t3" => "time:+0100:%Y-%m-%d %H:%M:%S.%N",
with_timezone("UTC+02") do # -0200
@i.configure(config_element('parse', '', @hash))
c = @i.type_converters["t3"]
assert_nil c.call("")
assert_equal_event_time event_time("2016-10-21 01:54:30.123000000 +0100"), c.call("2016-10-21 01:54:30.123")
assert_equal_event_time event_time("2016-10-21 01:54:30.012345678 +0100"), c.call("2016-10-21 01:54:30.012345678")
end
end
test 'to parse time string in unix timestamp' do
# "t4" => "time:unixtime",
with_timezone("UTC+02") do # -0200
@i.configure(config_element('parse', '', @hash))
c = @i.type_converters["t4"]
assert_equal_event_time event_time("1970-01-01 00:00:00.0 +0000"), c.call("")
assert_equal_event_time event_time("2016-10-21 01:54:30.0 -0200"), c.call("1477022070")
assert_equal_event_time event_time("2016-10-21 01:54:30.0 -0200"), c.call("1477022070.01")
end
end
test 'to parse time string in floating point value' do
# "t5" => "time:float",
with_timezone("UTC+02") do # -0200
@i.configure(config_element('parse', '', @hash))
c = @i.type_converters["t5"]
assert_equal_event_time event_time("1970-01-01 00:00:00.0 +0000"), c.call("")
assert_equal_event_time event_time("2016-10-21 01:54:30.012 -0200"), c.call("1477022070.012")
assert_equal_event_time event_time("2016-10-21 01:54:30.123456789 -0200"), c.call("1477022070.123456789")
end
end
test 'to return array of string' do
@i.configure(config_element('parse', '', @hash))
c = @i.type_converters["a1"]
assert_equal [], c.call("")
assert_equal ["0"], c.call("0")
assert_equal ["0"], c.call(0)
assert_equal ["0", "1"], c.call("0,1")
assert_equal ["0|1", "2"], c.call("0|1,2")
assert_equal ["true"], c.call(true)
end
test 'to return array of string using specified delimiter' do
@i.configure(config_element('parse', '', @hash))
c = @i.type_converters["a2"]
assert_equal [], c.call("")
assert_equal ["0"], c.call("0")
assert_equal ["0"], c.call(0)
assert_equal ["0,1"], c.call("0,1")
assert_equal ["0", "1,2"], c.call("0|1,2")
assert_equal ["true"], c.call(true)
end
end
sub_test_case 'example parser without any configurations' do
setup do
@current_time = Time.parse("2016-10-21 14:22:01.0 +1000")
@current_event_time = Fluent::EventTime.new(@current_time.to_i, 0)
# @current_time.to_i #=> 1477023721
Timecop.freeze(@current_time)
@i = ExampleParser.new
@i.configure(config_element('parse', '', {}))
end
teardown do
Timecop.return
end
test 'parser returns parsed JSON object, leaving empty/NULL strings, with current time' do
json = '{"t1":"1477023720.101","s1":"","s2":"NULL","s3":"null","k1":1,"k2":"13.1","k3":"1","k4":"yes"}'
@i.parse(json) do |time, record|
assert_equal_event_time @current_event_time, time
assert_equal "1477023720.101", record["t1"]
assert_equal "", record["s1"]
assert_equal "NULL", record["s2"]
assert_equal "null", record["s3"]
assert_equal 1, record["k1"]
assert_equal "13.1", record["k2"]
assert_equal "1", record["k3"]
assert_equal "yes", record["k4"]
end
end
end
sub_test_case 'example parser fully configured' do
setup do
@current_time = Time.parse("2016-10-21 14:22:01.0 +1000")
@current_event_time = Fluent::EventTime.new(@current_time.to_i, 0)
# @current_time.to_i #=> 1477023721
Timecop.freeze(@current_time)
@i = ExampleParser.new
hash = {
'keep_time_key' => "no",
'estimate_current_event' => "yes",
'time_key' => "t1",
'time_type' => "float",
'null_empty_string' => 'yes',
'null_value_pattern' => 'NULL|null',
'types' => "k1:string, k2:integer, k3:float, k4:bool",
}
@i.configure(config_element('parse', '', hash))
end
teardown do
Timecop.return
end
test 'parser returns parsed JSON object, leaving empty/NULL strings, with current time' do
json = '{"t1":"1477023720.101","s1":"","s2":"NULL","s3":"null","k1":1,"k2":"13.1","k3":"1","k4":"yes"}'
@i.parse(json) do |time, record|
assert_equal_event_time Fluent::EventTime.new(1477023720, 101_000_000), time
assert !record.has_key?("t1")
assert{ record.has_key?("s1") && record["s1"].nil? }
assert{ record.has_key?("s2") && record["s2"].nil? }
assert{ record.has_key?("s3") && record["s3"].nil? }
assert_equal "1", record["k1"]
assert_equal 13, record["k2"]
assert_equal 1.0, record["k3"]
assert_equal true, record["k4"]
end
end
test 'parser returns current time if a field is missing specified by time_key' do
json = '{"s1":"","s2":"NULL","s3":"null","k1":1,"k2":"13.1","k3":"1","k4":"yes"}'
@i.parse(json) do |time, record|
assert_equal_event_time @current_event_time, time
assert !record.has_key?("t1")
assert{ record.has_key?("s1") && record["s1"].nil? }
assert{ record.has_key?("s2") && record["s2"].nil? }
assert{ record.has_key?("s3") && record["s3"].nil? }
assert_equal "1", record["k1"]
assert_equal 13, record["k2"]
assert_equal 1.0, record["k3"]
assert_equal true, record["k4"]
end
end
end
sub_test_case 'example parser configured not to estimate current time, and to keep time key' do
setup do
@current_time = Time.parse("2016-10-21 14:22:01.0 +1000")
@current_event_time = Fluent::EventTime.new(@current_time.to_i, 0)
# @current_time.to_i #=> 1477023721
Timecop.freeze(@current_time)
@i = ExampleParser.new
hash = {
'keep_time_key' => "yes",
'estimate_current_event' => "no",
'time_key' => "t1",
'time_type' => "float",
'null_empty_string' => 'yes',
'null_value_pattern' => 'NULL|null',
'types' => "k1:string, k2:integer, k3:float, k4:bool",
}
@i.configure(config_element('parse', '', hash))
end
teardown do
Timecop.return
end
test 'parser returns parsed time with original field and value if the field of time exists' do
json = '{"t1":"1477023720.101","s1":"","s2":"NULL","s3":"null","k1":1,"k2":"13.1","k3":"1","k4":"yes"}'
@i.parse(json) do |time, record|
assert_equal_event_time Fluent::EventTime.new(1477023720, 101_000_000), time
assert_equal "1477023720.101", record["t1"]
assert{ record.has_key?("s1") && record["s1"].nil? }
assert{ record.has_key?("s2") && record["s2"].nil? }
assert{ record.has_key?("s3") && record["s3"].nil? }
assert_equal "1", record["k1"]
assert_equal 13, record["k2"]
assert_equal 1.0, record["k3"]
assert_equal true, record["k4"]
end
end
test 'parser returns nil as time if the field of time is missing' do
json = '{"s1":"","s2":"NULL","s3":"null","k1":1,"k2":"13.1","k3":"1","k4":"yes"}'
@i.parse(json) do |time, record|
assert_nil time
assert !record.has_key?("t1")
assert{ record.has_key?("s1") && record["s1"].nil? }
assert{ record.has_key?("s2") && record["s2"].nil? }
assert{ record.has_key?("s3") && record["s3"].nil? }
assert_equal "1", record["k1"]
assert_equal 13, record["k2"]
assert_equal 1.0, record["k3"]
assert_equal true, record["k4"]
end
end
end
sub_test_case 'timeout' do
class SleepParser < Fluent::Plugin::Parser
attr :test_value
def configure(conf)
super
@test_value = nil
end
def parse(data)
sleep 10
@test_value = :passed
yield JSON.parse(data), Fluent::EventTime.now
end
end
setup do
@i = SleepParser.new
@i.instance_variable_set(:@log, Fluent::Test::TestLogger.new)
@i.configure(config_element('parse', '', {'timeout' => '1.0'}))
@i.start
end
teardown do
@i.stop
end
test 'stop longer processing and return nil' do
waiting(10) {
@i.parse('{"k":"v"}') do |time, record|
assert_nil @i.test_value
assert_nil time
assert_nil record
end
assert_true @i.log.out.logs.first.include?('parsing timed out')
}
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_formatter_single_value.rb | test/plugin/test_formatter_single_value.rb | require_relative '../helper'
require 'fluent/test/driver/formatter'
require 'fluent/plugin/formatter_single_value'
class SingleValueFormatterTest < ::Test::Unit::TestCase
def create_driver(conf = "")
Fluent::Test::Driver::Formatter.new(Fluent::Plugin::SingleValueFormatter).configure(conf)
end
def test_config_params
d = create_driver
assert_equal "message", d.instance.message_key
end
def test_config_params_message_key
d = create_driver('message_key' => 'foobar')
assert_equal "foobar", d.instance.message_key
end
data("newline (LF)" => ["lf", "\n"],
"newline (CRLF)" => ["crlf", "\r\n"])
def test_format(data)
newline_conf, newline = data
d = create_driver('newline' => newline_conf)
formatted = d.instance.format('tag', event_time, {'message' => 'awesome'})
assert_equal("awesome#{newline}", formatted)
end
def test_format_without_newline
d = create_driver('add_newline' => 'false')
formatted = d.instance.format('tag', event_time, {'message' => 'awesome'})
assert_equal("awesome", formatted)
end
data("newline (LF)" => ["lf", "\n"],
"newline (CRLF)" => ["crlf", "\r\n"])
def test_format_with_message_key(data)
newline_conf, newline = data
d = create_driver('message_key' => 'foobar', 'newline' => newline_conf)
formatted = d.instance.format('tag', event_time, {'foobar' => 'foo'})
assert_equal("foo#{newline}", formatted)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_output_as_buffered.rb | test/plugin/test_output_as_buffered.rb | require_relative '../helper'
require 'fluent/plugin/output'
require 'fluent/plugin/buffer'
require 'fluent/output'
require 'fluent/event'
require 'json'
require 'time'
require 'timeout'
require 'timecop'
module FluentPluginOutputAsBufferedTest
class DummyBareOutput < Fluent::Plugin::Output
def register(name, &block)
instance_variable_set("@#{name}", block)
end
end
class DummySyncOutput < DummyBareOutput
def initialize
super
@process = nil
end
def process(tag, es)
@process ? @process.call(tag, es) : nil
end
end
class DummyAsyncOutput < DummyBareOutput
def initialize
super
@format = nil
@write = nil
end
def format(tag, time, record)
@format ? @format.call(tag, time, record) : [tag, time, record].to_json
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
end
class DummyDelayedOutput < DummyBareOutput
def initialize
super
@format = nil
@try_write = nil
@shutdown_hook = nil
end
def format(tag, time, record)
@format ? @format.call(tag, time, record) : [tag, time, record].to_json
end
def try_write(chunk)
@try_write ? @try_write.call(chunk) : nil
end
def shutdown
if @shutdown_hook
@shutdown_hook.call
end
super
end
end
class DummyStandardBufferedOutput < DummyBareOutput
def initialize
super
@prefer_delayed_commit = nil
@write = nil
@try_write = nil
end
def prefer_delayed_commit
@prefer_delayed_commit ? @prefer_delayed_commit.call : false
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
def try_write(chunk)
@try_write ? @try_write.call(chunk) : nil
end
end
class DummyCustomFormatBufferedOutput < DummyBareOutput
def initialize
super
@format_type_is_msgpack = nil
@prefer_delayed_commit = nil
@write = nil
@try_write = nil
end
def format(tag, time, record)
@format ? @format.call(tag, time, record) : [tag, time, record].to_json
end
def formatted_to_msgpack_binary?
@format_type_is_msgpack ? @format_type_is_msgpack.call : false
end
def prefer_delayed_commit
@prefer_delayed_commit ? @prefer_delayed_commit.call : false
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
def try_write(chunk)
@try_write ? @try_write.call(chunk) : nil
end
end
# check for formatted_to_msgpack_binary compatibility
class DummyOldCustomFormatBufferedOutput < DummyBareOutput
def initialize
super
@format_type_is_msgpack = nil
@prefer_delayed_commit = nil
@write = nil
@try_write = nil
end
def format(tag, time, record)
@format ? @format.call(tag, time, record) : [tag, time, record].to_json
end
def formatted_to_msgpack_binary
@format_type_is_msgpack ? @format_type_is_msgpack.call : false
end
def prefer_delayed_commit
@prefer_delayed_commit ? @prefer_delayed_commit.call : false
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
def try_write(chunk)
@try_write ? @try_write.call(chunk) : nil
end
end
class DummyFullFeatureOutput < DummyBareOutput
def initialize
super
@prefer_buffered_processing = nil
@prefer_delayed_commit = nil
@process = nil
@format = nil
@write = nil
@try_write = nil
end
def prefer_buffered_processing
@prefer_buffered_processing ? @prefer_buffered_processing.call : false
end
def prefer_delayed_commit
@prefer_delayed_commit ? @prefer_delayed_commit.call : false
end
def process(tag, es)
@process ? @process.call(tag, es) : nil
end
def format(tag, time, record)
@format ? @format.call(tag, time, record) : [tag, time, record].to_json
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
def try_write(chunk)
@try_write ? @try_write.call(chunk) : nil
end
end
module OldPluginMethodMixin
def initialize
super
@format = nil
@write = nil
end
def register(name, &block)
instance_variable_set("@#{name}", block)
end
def format(tag, time, record)
@format ? @format.call(tag, time, record) : [tag, time, record].to_json
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
end
class DummyOldBufferedOutput < Fluent::BufferedOutput
include OldPluginMethodMixin
end
class DummyOldObjectBufferedOutput < Fluent::ObjectBufferedOutput
include OldPluginMethodMixin
end
end
class BufferedOutputTest < Test::Unit::TestCase
def create_output(type=:full)
case type
when :bare then FluentPluginOutputAsBufferedTest::DummyBareOutput.new
when :sync then FluentPluginOutputAsBufferedTest::DummySyncOutput.new
when :buffered then FluentPluginOutputAsBufferedTest::DummyAsyncOutput.new
when :delayed then FluentPluginOutputAsBufferedTest::DummyDelayedOutput.new
when :standard then FluentPluginOutputAsBufferedTest::DummyStandardBufferedOutput.new
when :custom then FluentPluginOutputAsBufferedTest::DummyCustomFormatBufferedOutput.new
when :full then FluentPluginOutputAsBufferedTest::DummyFullFeatureOutput.new
when :old_buf then FluentPluginOutputAsBufferedTest::DummyOldBufferedOutput.new
when :old_obj then FluentPluginOutputAsBufferedTest::DummyOldObjectBufferedOutput.new
when :old_custom then FluentPluginOutputAsBufferedTest::DummyOldCustomFormatBufferedOutput.new
else
raise ArgumentError, "unknown type: #{type}"
end
end
def create_metadata(timekey: nil, tag: nil, variables: nil)
Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
end
def waiting(seconds)
begin
Timeout.timeout(seconds) do
yield
end
rescue Timeout::Error
STDERR.print(*@i.log.out.logs)
raise
end
end
setup do
@i = nil
end
teardown do
if @i
@i.stop unless @i.stopped?
@i.before_shutdown unless @i.before_shutdown?
@i.shutdown unless @i.shutdown?
@i.after_shutdown unless @i.after_shutdown?
@i.close unless @i.closed?
@i.terminate unless @i.terminated?
end
Timecop.return
end
test 'queued_chunks_limit_size is same as flush_thread_count by default' do
hash = {'flush_thread_count' => 4}
i = create_output
i.register(:prefer_buffered_processing) { true }
i.configure(config_element('ROOT', '', {}, [config_element('buffer','tag',hash)]))
assert_equal 4, i.buffer.queued_chunks_limit_size
end
test 'prefer queued_chunks_limit_size parameter than flush_thread_count' do
hash = {'flush_thread_count' => 4, 'queued_chunks_limit_size' => 2}
i = create_output
i.register(:prefer_buffered_processing) { true }
i.configure(config_element('ROOT', '', {}, [config_element('buffer','tag',hash)]))
assert_equal 2, i.buffer.queued_chunks_limit_size
end
sub_test_case 'chunk feature in #write for output plugins' do
setup do
@stored_global_logger = $log
$log = Fluent::Test::TestLogger.new
@hash = {
'flush_mode' => 'immediate',
'flush_thread_interval' => '0.01',
'flush_thread_burst_interval' => '0.01',
}
end
teardown do
$log = @stored_global_logger
end
test 'plugin using standard format can iterate chunk for time, record in #write' do
events_from_chunk = []
@i = create_output(:standard)
@i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
@i.register(:prefer_delayed_commit){ false }
@i.register(:write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|t,r| e << [t,r]}; events_from_chunk << [:write, e] }
@i.register(:try_write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|t,r| e << [t,r]}; events_from_chunk << [:try_write, e] }
@i.start
@i.after_start
events = [
[event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
[event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
]
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
waiting(5){ sleep 0.1 until events_from_chunk.size == 2 }
assert_equal 2, events_from_chunk.size
2.times.each do |i|
assert_equal :write, events_from_chunk[i][0]
assert_equal events, events_from_chunk[i][1]
end
end
test 'plugin using standard format can iterate chunk for time, record in #try_write' do
events_from_chunk = []
@i = create_output(:standard)
@i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
@i.register(:prefer_delayed_commit){ true }
@i.register(:write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|t,r| e << [t,r]}; events_from_chunk << [:write, e] }
@i.register(:try_write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|t,r| e << [t,r]}; events_from_chunk << [:try_write, e] }
@i.start
@i.after_start
events = [
[event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
[event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
]
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
waiting(5){ sleep 0.1 until events_from_chunk.size == 2 }
assert_equal 2, events_from_chunk.size
2.times.each do |i|
assert_equal :try_write, events_from_chunk[i][0]
assert_equal events, events_from_chunk[i][1]
end
end
test 'plugin using custom format cannot iterate chunk in #write' do
events_from_chunk = []
@i = create_output(:custom)
@i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
@i.register(:prefer_delayed_commit){ false }
@i.register(:format){ |tag, time, record| [tag,time,record].to_json }
@i.register(:format_type_is_msgpack){ false }
@i.register(:write){ |chunk| assert !(chunk.respond_to?(:each)) }
@i.register(:try_write){ |chunk| assert !(chunk.respond_to?(:each)) }
@i.start
@i.after_start
events = [
[event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
[event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
]
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
assert_equal 0, events_from_chunk.size
end
test 'plugin using custom format cannot iterate chunk in #try_write' do
events_from_chunk = []
@i = create_output(:custom)
@i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
@i.register(:prefer_delayed_commit){ true }
@i.register(:format){ |tag, time, record| [tag,time,record].to_json }
@i.register(:format_type_is_msgpack){ false }
@i.register(:write){ |chunk| assert !(chunk.respond_to?(:each)) }
@i.register(:try_write){ |chunk| assert !(chunk.respond_to?(:each)) }
@i.start
@i.after_start
events = [
[event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
[event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
]
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
assert_equal 0, events_from_chunk.size
end
data('formatted_to_msgpack_binary?' => :custom,
'formatted_to_msgpack_binary' => :old_custom)
test 'plugin using custom format can iterate chunk in #write if #format returns msgpack' do |out_type|
events_from_chunk = []
@i = create_output(out_type)
@i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
@i.register(:prefer_delayed_commit){ false }
@i.register(:format){ |tag, time, record| [tag,time,record].to_msgpack }
@i.register(:format_type_is_msgpack){ true }
@i.register(:write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|ta,t,r| e << [ta,t,r]}; events_from_chunk << [:write, e] }
@i.register(:try_write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|ta,t,r| e << [ta,t,r]}; events_from_chunk << [:try_write, e] }
@i.start
@i.after_start
events = [
[event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
[event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
]
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
waiting(5){ sleep 0.1 until events_from_chunk.size == 2 }
assert_equal 2, events_from_chunk.size
2.times.each do |i|
assert_equal :write, events_from_chunk[i][0]
each_pushed = events_from_chunk[i][1]
assert_equal 2, each_pushed.size
assert_equal 'test.tag', each_pushed[0][0]
assert_equal 'test.tag', each_pushed[1][0]
assert_equal events, each_pushed.map{|tag,time,record| [time,record]}
end
end
data(:handle_stream_simple => '',
:handle_stream_with_custom_format => 'tag,message')
test 'plugin using custom format can skip record chunk when format return nil' do |chunk_keys|
events_from_chunk = []
@i = create_output(:custom)
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', chunk_keys, @hash)]))
@i.register(:prefer_delayed_commit) { false }
@i.register(:format) { |tag, time, record|
if record['message'] == 'test1'
nil
else
[tag,time,record].to_msgpack
end
}
@i.register(:format_type_is_msgpack) { true }
@i.register(:write){ |chunk| e = []; chunk.each { |ta, t, r| e << [ta, t, r] }; events_from_chunk << [:write, e] }
@i.start
@i.after_start
events = [
[event_time('2016-10-05 16:16:16 -0700'), {"message" => "test1"}],
[event_time('2016-10-05 16:16:17 -0700'), {"message" => "test2"}],
]
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
waiting(5) { sleep 0.1 until events_from_chunk.size == 1 }
assert_equal 1, events_from_chunk.size
assert_equal :write, events_from_chunk[0][0]
each_pushed = events_from_chunk[0][1]
assert_equal 1, each_pushed.size
assert_equal 'test.tag', each_pushed[0][0]
assert_equal "test2", each_pushed[0][2]['message']
end
test 'plugin using custom format can iterate chunk in #try_write if #format returns msgpack' do
events_from_chunk = []
@i = create_output(:custom)
@i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
@i.register(:prefer_delayed_commit){ true }
@i.register(:format){ |tag, time, record| [tag,time,record].to_msgpack }
@i.register(:format_type_is_msgpack){ true }
@i.register(:write){ |chunk| events_from_chunk = []; assert chunk.respond_to?(:each); chunk.each{|ta,t,r| e << [ta,t,r]}; events_from_chunk << [:write, e] }
@i.register(:try_write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|ta,t,r| e << [ta,t,r]}; events_from_chunk << [:try_write, e] }
@i.start
@i.after_start
events = [
[event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
[event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
]
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
waiting(5){ sleep 0.1 until events_from_chunk.size == 2 }
assert_equal 2, events_from_chunk.size
2.times.each do |i|
assert_equal :try_write, events_from_chunk[i][0]
each_pushed = events_from_chunk[i][1]
assert_equal 2, each_pushed.size
assert_equal 'test.tag', each_pushed[0][0]
assert_equal 'test.tag', each_pushed[1][0]
assert_equal events, each_pushed.map{|tag,time,record| [time,record]}
end
end
data(:BufferedOutput => :old_buf,
:ObjectBufferedOutput => :old_obj)
test 'old plugin types can iterate chunk by msgpack_each in #write' do |plugin_type|
events_from_chunk = []
# event_emitter helper requires Engine.root_agent for routing
ra = Fluent::RootAgent.new(log: $log)
stub(Fluent::Engine).root_agent { ra }
@i = create_output(plugin_type)
@i.configure(config_element('ROOT', '', {}, [config_element('buffer', '', @hash)]))
@i.register(:format) { |tag, time, record| [time, record].to_msgpack }
@i.register(:write) { |chunk| e = []; chunk.msgpack_each { |t, r| e << [t, r] }; events_from_chunk << [:write, e]; }
@i.start
@i.after_start
events = [
[event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
[event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
]
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
waiting(5) { sleep 0.1 until events_from_chunk.size == 2 }
assert_equal 2, events_from_chunk.size
2.times.each do |i|
assert_equal :write, events_from_chunk[i][0]
assert_equal events, events_from_chunk[i][1]
end
end
end
sub_test_case 'buffered output configured with many chunk keys' do
setup do
@stored_global_logger = $log
$log = Fluent::Test::TestLogger.new
@hash = {
'flush_mode' => 'interval',
'flush_thread_burst_interval' => 0.01,
'chunk_limit_size' => 1024,
'timekey' => 60,
}
@i = create_output(:buffered)
end
teardown do
$log = @stored_global_logger
end
test 'nothing are warned with less chunk keys' do
chunk_keys = 'time,key1,key2,key3'
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_keys,@hash)]))
logs = @i.log.out.logs.dup
@i.start
@i.after_start
assert{ logs.count{|log| log.include?('[warn]') } == 0 }
end
test 'a warning reported with 4 chunk keys' do
chunk_keys = 'key1,key2,key3,key4'
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_keys,@hash)]))
logs = @i.log.out.logs.dup
@i.start # this calls `log.reset`... capturing logs about configure must be done before this line
@i.after_start
assert_equal ['key1', 'key2', 'key3', 'key4'], @i.chunk_keys
assert{ logs.count{|log| log.include?('[warn]: many chunk keys specified, and it may cause too many chunks on your system.') } == 1 }
end
test 'a warning reported with 4 chunk keys including "tag"' do
chunk_keys = 'tag,key1,key2,key3'
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_keys,@hash)]))
logs = @i.log.out.logs.dup
@i.start # this calls `log.reset`... capturing logs about configure must be done before this line
@i.after_start
assert{ logs.count{|log| log.include?('[warn]: many chunk keys specified, and it may cause too many chunks on your system.') } == 1 }
end
test 'time key is not included for warned chunk keys' do
chunk_keys = 'time,key1,key2,key3'
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_keys,@hash)]))
logs = @i.log.out.logs.dup
@i.start
@i.after_start
assert{ logs.count{|log| log.include?('[warn]') } == 0 }
end
end
sub_test_case 'buffered output feature without any buffer key, flush_mode: lazy' do
setup do
hash = {
'flush_mode' => 'lazy',
'flush_thread_burst_interval' => 0.01,
'flush_thread_count' => 2,
'chunk_limit_size' => 1024,
}
@i = create_output(:buffered)
@i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
@i.start
@i.after_start
end
test '#start does not create enqueue thread, but creates flush threads' do
@i.thread_wait_until_start
assert @i.thread_exist?(:flush_thread_0)
assert @i.thread_exist?(:flush_thread_1)
assert !@i.thread_exist?(:enqueue_thread)
end
test '#format is called for each events' do
ary = []
@i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
t = event_time()
es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
4.times do
@i.emit_events('tag.test', es)
end
assert_equal 8, ary.size
4.times do |i|
assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
end
end
test '#write is called only when chunk bytes limit exceeded, and buffer chunk is purged' do
ary = []
@i.register(:write){|chunk| ary << chunk.read }
tag = "test.tag"
t = event_time()
r = {}
(0...10).each do |i|
r["key#{i}"] = "value #{i}"
end
event_size = [tag, t, r].to_json.size # 195
(1024 * 0.9 / event_size).to_i.times do |i|
@i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
end
assert{ @i.buffer.queue.size == 0 && ary.size == 0 }
staged_chunk = @i.buffer.stage[@i.buffer.stage.keys.first]
assert{ staged_chunk.size != 0 }
@i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
assert{ @i.buffer.queue.size > 0 || @i.buffer.dequeued.size > 0 || ary.size > 0 }
waiting(10) do
Thread.pass until @i.buffer.queue.size == 0 && @i.buffer.dequeued.size == 0
Thread.pass until staged_chunk.size == 0
end
assert_equal 1, ary.size
assert_equal [tag,t,r].to_json * (1024 / event_size), ary.first
end
test 'flush_at_shutdown work well when plugin is shutdown' do
ary = []
@i.register(:write){|chunk| ary << chunk.read }
tag = "test.tag"
t = event_time()
r = {}
(0...10).each do |i|
r["key#{i}"] = "value #{i}"
end
event_size = [tag, t, r].to_json.size # 195
(1024 * 0.9 / event_size).to_i.times do |i|
@i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
end
assert{ @i.buffer.queue.size == 0 && ary.size == 0 }
@i.stop
@i.before_shutdown
@i.shutdown
@i.after_shutdown
waiting(10) do
Thread.pass until ary.size == 1
end
assert_equal [tag,t,r].to_json * (1024 * 0.9 / event_size), ary.first
end
end
sub_test_case 'buffered output feature without any buffer key, flush_mode: interval' do
setup do
hash = {
'flush_mode' => 'interval',
'flush_interval' => 1,
'flush_thread_count' => 1,
'flush_thread_burst_interval' => 0.01,
'chunk_limit_size' => 1024,
}
@i = create_output(:buffered)
@i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
@i.start
@i.after_start
end
test '#start creates enqueue thread and flush threads' do
@i.thread_wait_until_start
assert @i.thread_exist?(:flush_thread_0)
assert @i.thread_exist?(:enqueue_thread)
end
test '#format is called for each event streams' do
ary = []
@i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
t = event_time()
es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
4.times do
@i.emit_events('tag.test', es)
end
assert_equal 8, ary.size
4.times do |i|
assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
end
end
test '#write is called per flush_interval, and buffer chunk is purged' do
@i.thread_wait_until_start
ary = []
@i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
@i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| ary << data } }
t = event_time()
r = {}
(0...10).each do |i|
r["key#{i}"] = "value #{i}"
end
2.times do |i|
rand_records = rand(1..4)
es = Fluent::ArrayEventStream.new([ [t, r] ] * rand_records)
assert_equal rand_records, es.size
@i.interrupt_flushes
assert{ @i.buffer.queue.size == 0 }
@i.emit_events("test.tag", es)
assert{ @i.buffer.queue.size == 0 }
assert{ @i.buffer.stage.size == 1 }
staged_chunk = @i.instance_eval{ @buffer.stage[@buffer.stage.keys.first] }
assert{ staged_chunk.size != 0 }
@i.enqueue_thread_wait
waiting(10) do
Thread.pass until @i.buffer.queue.size == 0 && @i.buffer.dequeued.size == 0
Thread.pass until staged_chunk.size == 0
end
assert_equal rand_records, ary.size
ary.reject!{|e| true }
end
end
end
sub_test_case 'with much longer flush_interval' do
setup do
hash = {
'flush_mode' => 'interval',
'flush_interval' => 3000,
'flush_thread_count' => 1,
'flush_thread_burst_interval' => 0.01,
'chunk_limit_size' => 1024,
}
@i = create_output(:buffered)
@i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
@i.start
@i.after_start
end
test 'flush_at_shutdown work well when plugin is shutdown' do
ary = []
@i.register(:write){|chunk| ary << chunk.read }
tag = "test.tag"
t = event_time()
r = {}
(0...10).each do |i|
r["key#{i}"] = "value #{i}"
end
event_size = [tag, t, r].to_json.size # 195
(1024 * 0.9 / event_size).to_i.times do |i|
@i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
end
queue_size = @i.buffer.queue.size
assert{ queue_size == 0 && ary.size == 0 }
@i.stop
@i.before_shutdown
@i.shutdown
@i.after_shutdown
waiting(10){ sleep 0.1 until ary.size == 1 }
assert_equal [tag,t,r].to_json * (1024 * 0.9 / event_size), ary.first
end
end
sub_test_case 'buffered output feature without any buffer key, flush_mode: immediate' do
setup do
hash = {
'flush_mode' => 'immediate',
'flush_thread_count' => 1,
'flush_thread_burst_interval' => 0.01,
'chunk_limit_size' => 1024,
}
@i = create_output(:buffered)
@i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
@i.start
@i.after_start
end
test '#start does not create enqueue thread, but creates flush threads' do
@i.thread_wait_until_start
assert @i.thread_exist?(:flush_thread_0)
assert !@i.thread_exist?(:enqueue_thread)
end
test '#format is called for each event streams' do
ary = []
@i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
t = event_time()
es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
4.times do
@i.emit_events('tag.test', es)
end
assert_equal 8, ary.size
4.times do |i|
assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
end
end
test '#write is called every time for each emits, and buffer chunk is purged' do
@i.thread_wait_until_start
ary = []
@i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
@i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| ary << data } }
t = event_time()
r = {}
(0...10).each do |i|
r["key#{i}"] = "value #{i}"
end
3.times do |i|
rand_records = rand(1..5)
es = Fluent::ArrayEventStream.new([ [t, r] ] * rand_records)
assert_equal rand_records, es.size
@i.emit_events("test.tag", es)
waiting(10){ sleep 0.1 until @i.buffer.stage.size == 0 } # make sure that the emitted es is enqueued by "flush_mode immediate"
waiting(10){ sleep 0.1 until @i.buffer.queue.size == 0 && @i.buffer.dequeued.size == 0 }
waiting(10){ sleep 0.1 until ary.size == rand_records }
assert_equal rand_records, ary.size
ary.reject!{|e| true }
end
end
test 'flush_at_shutdown work well when plugin is shutdown' do
ary = []
@i.register(:write){|chunk| ary << chunk.read }
tag = "test.tag"
t = event_time()
r = {}
(0...10).each do |i|
r["key#{i}"] = "value #{i}"
end
@i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
@i.stop
@i.before_shutdown
@i.shutdown
@i.after_shutdown
waiting(10) do
Thread.pass until ary.size == 1
end
assert_equal [tag,t,r].to_json, ary.first
end
end
sub_test_case 'buffered output feature with timekey and range' do
setup do
chunk_key = 'time'
hash = {
'timekey' => 30, # per 30seconds
'timekey_wait' => 5, # 5 second delay for flush
'flush_thread_count' => 1,
'flush_thread_burst_interval' => 0.01,
}
@i = create_output(:buffered)
@i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
@i.start
@i.after_start
end
test '#configure raises config error if timekey is not specified' do
i = create_output(:buffered)
assert_raise Fluent::ConfigError do
i.configure(config_element('ROOT','',{},[config_element('buffer','time',)]))
end
end
test 'default flush_mode is set to :lazy' do
assert_equal :lazy, @i.instance_eval{ @flush_mode }
end
test '#start creates enqueue thread and flush threads' do
@i.thread_wait_until_start
assert @i.thread_exist?(:flush_thread_0)
assert @i.thread_exist?(:enqueue_thread)
end
test '#format is called for each event streams' do
ary = []
@i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
t = event_time()
es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
5.times do
@i.emit_events('tag.test', es)
end
assert_equal 10, ary.size
5.times do |i|
assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
end
end
test '#write is called per time ranges after timekey_wait, and buffer chunk is purged' do
Timecop.freeze( Time.parse('2016-04-13 14:04:00 +0900') )
@i.thread_wait_until_start
ary = []
metachecks = []
@i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
@i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (chunk.metadata.timekey.to_i <= e[1].to_i && e[1].to_i < chunk.metadata.timekey.to_i + 30) } }
r = {}
(0...10).each do |i|
r["key#{i}"] = "value #{i}"
end
ts = [
Fluent::EventTime.parse('2016-04-13 14:03:21 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:23 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:29 +0900'),
Fluent::EventTime.parse('2016-04-13 14:03:30 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:33 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:38 +0900'),
Fluent::EventTime.parse('2016-04-13 14:03:43 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:49 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:51 +0900'),
Fluent::EventTime.parse('2016-04-13 14:04:00 +0900'), Fluent::EventTime.parse('2016-04-13 14:04:01 +0900'),
]
events = [
["test.tag.1", ts[0], r], # range 14:03:00 - 03:29
["test.tag.2", ts[1], r],
["test.tag.1", ts[2], r],
["test.tag.1", ts[3], r], # range 14:03:30 - 04:00
["test.tag.1", ts[4], r],
["test.tag.1", ts[5], r],
["test.tag.1", ts[6], r],
["test.tag.1", ts[7], r],
["test.tag.2", ts[8], r],
["test.tag.1", ts[9], r], # range 14:04:00 - 04:29
["test.tag.2", ts[10], r],
]
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_out_exec.rb | test/plugin/test_out_exec.rb | require_relative '../helper'
require 'fluent/test/driver/output'
require 'fluent/plugin/out_exec'
require 'fileutils'
class ExecOutputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
FileUtils.rm_rf(TMP_DIR, secure: true)
if File.exist?(TMP_DIR)
# ensure files are closed for Windows, on which deleted files
# are still visible from filesystem
GC.start(full_mark: true, immediate_sweep: true)
FileUtils.remove_entry_secure(TMP_DIR)
end
FileUtils.mkdir_p(TMP_DIR)
end
TMP_DIR = File.dirname(__FILE__) + "/../tmp/out_exec#{ENV['TEST_ENV_NUMBER']}"
def create_driver(config)
Fluent::Test::Driver::Output.new(Fluent::Plugin::ExecOutput).configure(config)
end
def create_test_data
time = event_time("2011-01-02 13:14:15.123")
records = [{"k1"=>"v1","kx"=>"vx"}, {"k1"=>"v2","kx"=>"vx"}]
return time, records
end
DEFAULT_CONFIG_ONLY_WITH_KEYS = %[
command cat >#{TMP_DIR}/out
<format>
keys ["k1", "kx"]
</format>
]
test 'configure in default' do
d = create_driver DEFAULT_CONFIG_ONLY_WITH_KEYS
assert{ d.instance.formatter.is_a? Fluent::Plugin::TSVFormatter }
assert_equal ["k1", "kx"], d.instance.formatter.keys
assert_nil d.instance.inject_config
end
TSV_CONFIG = %[
command cat >#{TMP_DIR}/out
<inject>
tag_key tag
time_key time
time_format %Y-%m-%d %H:%M:%S
localtime yes
</inject>
<format>
@type tsv
keys time, tag, k1
</format>
]
TSV_CONFIG_WITH_SUBSEC = %[
command cat >#{TMP_DIR}/out
<inject>
tag_key tag
time_key time
time_format %Y-%m-%d %H:%M:%S.%3N
localtime yes
</inject>
<format>
@type tsv
keys time, tag, k1
</format>
]
TSV_CONFIG_WITH_BUFFER = TSV_CONFIG + %[
<buffer time>
@type memory
timekey 3600
flush_thread_count 5
chunk_limit_size 50m
total_limit_size #{50 * 1024 * 1024 * 128}
flush_at_shutdown yes
</buffer>
]
JSON_CONFIG = %[
command cat >#{TMP_DIR}/out
<format>
@type json
</format>
]
MSGPACK_CONFIG = %[
command cat >#{TMP_DIR}/out
<format>
@type msgpack
</format>
]
CONFIG_COMPAT = %[
buffer_path #{TMP_DIR}/buffer
command cat >#{TMP_DIR}/out
localtime
]
TSV_CONFIG_COMPAT = %[
keys "time,tag,k1"
tag_key "tag"
time_key "time"
time_format %Y-%m-%d %H:%M:%S
]
BUFFER_CONFIG_COMPAT = %[
buffer_type memory
time_slice_format %Y%m%d%H
num_threads 5
buffer_chunk_limit 50m
buffer_queue_limit 128
flush_at_shutdown yes
]
TSV_CONFIG_WITH_SUBSEC_COMPAT = %[
keys "time,tag,k1"
tag_key "tag"
time_key "time"
time_format %Y-%m-%d %H:%M:%S.%3N
]
data(
'with sections' => TSV_CONFIG,
'traditional' => CONFIG_COMPAT + TSV_CONFIG_COMPAT,
)
test 'configure for tsv' do |conf|
d = create_driver(conf)
assert_equal ["time","tag","k1"], d.instance.formatter.keys
assert_equal "tag", d.instance.inject_config.tag_key
assert_equal "time", d.instance.inject_config.time_key
assert_equal "%Y-%m-%d %H:%M:%S", d.instance.inject_config.time_format
assert_equal true, d.instance.inject_config.localtime
end
data(
'with sections' => TSV_CONFIG_WITH_BUFFER,
'traditional' => CONFIG_COMPAT + TSV_CONFIG_COMPAT + BUFFER_CONFIG_COMPAT,
)
test 'configure_with_compat_buffer_parameters' do |conf|
d = create_driver(conf)
assert_equal 3600, d.instance.buffer_config.timekey
assert_equal 5, d.instance.buffer_config.flush_thread_count
assert_equal 50*1024*1024, d.instance.buffer.chunk_limit_size
assert_equal 50*1024*1024*128, d.instance.buffer.total_limit_size
assert d.instance.buffer_config.flush_at_shutdown
end
data(
'with sections' => TSV_CONFIG,
'traditional' => CONFIG_COMPAT + TSV_CONFIG_COMPAT,
)
test 'format' do |conf|
d = create_driver(conf)
time, records = create_test_data
d.run(default_tag: 'test') do
d.feed(time, records[0])
d.feed(time, records[1])
end
assert_equal %[2011-01-02 13:14:15\ttest\tv1\n], d.formatted[0]
assert_equal %[2011-01-02 13:14:15\ttest\tv2\n], d.formatted[1]
end
data(
'with sections' => JSON_CONFIG,
'traditional' => CONFIG_COMPAT + "format json",
)
test 'format_json' do |conf|
d = create_driver(conf)
time, records = create_test_data
d.run(default_tag: 'test') do
d.feed(time, records[0])
d.feed(time, records[1])
end
assert_equal JSON.generate(records[0]) + "\n", d.formatted[0]
assert_equal JSON.generate(records[1]) + "\n", d.formatted[1]
end
data(
'with sections' => MSGPACK_CONFIG,
'traditional' => CONFIG_COMPAT + "format msgpack"
)
test 'format_msgpack' do |conf|
d = create_driver(conf)
time, records = create_test_data
d.run(default_tag: 'test') do
d.feed(time, records[0])
d.feed(time, records[1])
end
assert_equal records[0].to_msgpack, d.formatted[0]
assert_equal records[1].to_msgpack, d.formatted[1]
end
data(
'with sections' => TSV_CONFIG_WITH_SUBSEC,
'traditional' => CONFIG_COMPAT + TSV_CONFIG_WITH_SUBSEC_COMPAT,
)
test 'format subsecond time' do |conf|
d = create_driver(conf)
time, records = create_test_data
d.run(default_tag: 'test') do
d.feed(time, records[0])
d.feed(time, records[1])
end
assert_equal %[2011-01-02 13:14:15.123\ttest\tv1\n], d.formatted[0]
assert_equal %[2011-01-02 13:14:15.123\ttest\tv2\n], d.formatted[1]
end
data(
'with sections' => TSV_CONFIG,
'traditional' => CONFIG_COMPAT + TSV_CONFIG_COMPAT,
)
test 'write' do |conf|
d = create_driver(conf)
time, records = create_test_data
d.run(default_tag: 'test', flush: true) do
d.feed(time, records[0])
d.feed(time, records[1])
end
expect_path = "#{TMP_DIR}/out"
waiting(10, plugin: d.instance) do
sleep(0.1) until File.exist?(expect_path)
end
assert_equal true, File.exist?(expect_path)
data = File.read(expect_path)
expect_data =
%[2011-01-02 13:14:15\ttest\tv1\n] +
%[2011-01-02 13:14:15\ttest\tv2\n]
assert_equal expect_data, data
end
sub_test_case 'when executed process dies unexpectedly' do
setup do
@gen_config = ->(num){ <<EOC
command ruby -e "ARGV.first.to_i == 0 ? open(ARGV[1]){|f| STDOUT.write(f.read); STDOUT.flush} : (sleep 1 ; exit ARGV.first.to_i)" #{num} >#{TMP_DIR}/fail_out
<inject>
tag_key tag
time_key time
time_format %Y-%m-%d %H:%M:%S
localtime yes
</inject>
<format>
@type tsv
keys time, tag, k1
</format>
EOC
}
end
test 'flushed chunk will be committed after child process successfully exits' do
d = create_driver(@gen_config.call(0))
time, records = create_test_data
expect_path = "#{TMP_DIR}/fail_out"
d.end_if{ File.exist?(expect_path) }
d.run(default_tag: 'test', flush: true, wait_flush_completion: true, shutdown: false) do
d.feed(time, records[0])
d.feed(time, records[1])
end
assert{ File.exist?(expect_path) }
data = File.read(expect_path)
expect_data =
%[2011-01-02 13:14:15\ttest\tv1\n] +
%[2011-01-02 13:14:15\ttest\tv2\n]
assert_equal expect_data, data
assert{ d.instance.buffer.queue.empty? }
assert{ d.instance.dequeued_chunks.empty? }
ensure
d.instance_shutdown if d&.instance
end
test 'flushed chunk will be taken back after child process unexpectedly exits' do
d = create_driver(@gen_config.call(3))
time, records = create_test_data
expect_path = "#{TMP_DIR}/fail_out"
d.end_if{ d.instance.log.out.logs.any?{|line| line.include?("command exits with error code") } }
d.run(default_tag: 'test', flush: true, wait_flush_completion: false, shutdown: false) do
d.feed(time, records[0])
d.feed(time, records[1])
end
assert{ d.instance.dequeued_chunks.empty? } # because it's already taken back
assert{ d.instance.buffer.queue.size == 1 }
logs = d.instance.log.out.logs
assert{ logs.any?{|line| line.include?("command exits with error code") && line.include?("status=3") } }
assert{ File.exist?(expect_path) && File.size(expect_path) == 0 }
ensure
d.instance_shutdown if d&.instance
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_out_forward.rb | test/plugin/test_out_forward.rb | require_relative '../helper'
require 'fluent/test/driver/output'
require 'fluent/plugin/out_forward'
require 'flexmock/test_unit'
require 'fluent/test/driver/input'
require 'fluent/plugin/in_forward'
class ForwardOutputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
FileUtils.rm_rf(TMP_DIR)
FileUtils.mkdir_p(TMP_DIR)
@d = nil
# forward plugin uses TCP and UDP sockets on the same port number
@target_port = unused_port(protocol: :all)
end
def teardown
@d.instance_shutdown if @d
@port = nil
end
TMP_DIR = File.join(__dir__, "../tmp/out_forward#{ENV['TEST_ENV_NUMBER']}")
TARGET_HOST = '127.0.0.1'
def config
%[
send_timeout 51
heartbeat_type udp
<server>
name test
host #{TARGET_HOST}
port #{@target_port}
</server>
]
end
def target_config
%[
port #{@target_port}
bind #{TARGET_HOST}
]
end
def create_driver(conf=config)
Fluent::Test::Driver::Output.new(Fluent::Plugin::ForwardOutput) {
attr_reader :sent_chunk_ids, :ack_handler, :discovery_manager
def initialize
super
@sent_chunk_ids = []
end
def try_write(chunk)
retval = super
@sent_chunk_ids << chunk.unique_id
retval
end
}.configure(conf)
end
test 'configure' do
@d = d = create_driver(%[
self_hostname localhost
<server>
name test
host #{TARGET_HOST}
port #{@target_port}
</server>
])
nodes = d.instance.nodes
assert_equal 60, d.instance.send_timeout
assert_equal :transport, d.instance.heartbeat_type
assert_equal 1, nodes.length
assert_nil d.instance.connect_timeout
node = nodes.first
assert_equal "test", node.name
assert_equal '127.0.0.1', node.host
assert_equal @target_port, node.port
end
test 'configure_traditional' do
@d = d = create_driver(<<EOL)
self_hostname localhost
<server>
name test
host #{TARGET_HOST}
port #{@target_port}
</server>
buffer_chunk_limit 10m
EOL
instance = d.instance
assert instance.chunk_key_tag
assert !instance.chunk_key_time
assert_equal [], instance.chunk_keys
assert{ instance.buffer.is_a?(Fluent::Plugin::MemoryBuffer) }
assert_equal( 10*1024*1024, instance.buffer.chunk_limit_size )
end
test 'configure timeouts' do
@d = d = create_driver(%[
send_timeout 30
connect_timeout 10
hard_timeout 15
ack_response_timeout 20
<server>
host #{TARGET_HOST}
port #{@target_port}
</server>
])
assert_equal 30, d.instance.send_timeout
assert_equal 10, d.instance.connect_timeout
assert_equal 15, d.instance.hard_timeout
assert_equal 20, d.instance.ack_response_timeout
end
test 'configure_udp_heartbeat' do
@d = d = create_driver(config + "\nheartbeat_type udp")
assert_equal :udp, d.instance.heartbeat_type
end
test 'configure_none_heartbeat' do
@d = d = create_driver(config + "\nheartbeat_type none")
assert_equal :none, d.instance.heartbeat_type
end
test 'configure_expire_dns_cache' do
@d = d = create_driver(config + "\nexpire_dns_cache 5")
assert_equal 5, d.instance.expire_dns_cache
end
test 'configure_dns_round_robin udp' do
assert_raise(Fluent::ConfigError) do
create_driver(config + "\nheartbeat_type udp\ndns_round_robin true")
end
end
test 'configure_dns_round_robin transport' do
@d = d = create_driver(config + "\nheartbeat_type transport\ndns_round_robin true")
assert_equal true, d.instance.dns_round_robin
end
test 'configure_dns_round_robin none' do
@d = d = create_driver(config + "\nheartbeat_type none\ndns_round_robin true")
assert_equal true, d.instance.dns_round_robin
end
test 'configure_no_server' do
assert_raise(Fluent::ConfigError, 'forward output plugin requires at least one <server> is required') do
create_driver('')
end
end
test 'configure with ignore_network_errors_at_startup' do
normal_conf = config_element('match', '**', {}, [
config_element('server', '', {'name' => 'test', 'host' => 'unexisting.yaaaaaaaaaaaaaay.host.example.com'})
])
if Socket.const_defined?(:ResolutionError) # as of Ruby 3.3
error_class = Socket::ResolutionError
else
error_class = SocketError
end
assert_raise error_class do
create_driver(normal_conf)
end
conf = config_element('match', '**', {'ignore_network_errors_at_startup' => 'true'}, [
config_element('server', '', {'name' => 'test', 'host' => 'unexisting.yaaaaaaaaaaaaaay.host.example.com'})
])
@d = d = create_driver(conf)
expected_log = "failed to resolve node name when configured"
expected_detail = "server=\"test\" error_class=#{error_class.name}"
logs = d.logs
assert{ logs.any?{|log| log.include?(expected_log) && log.include?(expected_detail) } }
end
sub_test_case 'configure compress' do
data('default', ['', :text])
data('gzip', ['compress gzip', :gzip])
data('zstd', ['compress zstd', :zstd])
test 'should be applied' do |(option, expected)|
@d = d = create_driver(config + option)
node = d.instance.nodes.first
assert_equal(
[expected, expected],
[d.instance.compress, node.instance_variable_get(:@compress)]
)
end
data('default' => '')
data('gzip' => 'compress gzip')
data('zstd' => 'compress zstd')
test 'should log as experimental only for zstd' do |option|
@d = d = create_driver(config + option)
log_message = "zstd compression feature is an experimental new feature"
assert do
if d.instance.compress == :zstd
d.logs.any? { |log| log.include?(log_message) }
else
d.logs.none? { |log| log.include?(log_message) }
end
end
end
# TODO add tests that we cannot configure the different compress type between owner and buffer except for :text
data('gzip', ['compress gzip', :text, :gzip])
data('zstd', ['compress zstd', :text, :zstd])
test 'can configure buffer compress separately when owner uses :text' do |(buffer_option, expected_owner_compress, expected_buffer_compress)|
@d = d = create_driver(config + %[
<buffer>
type memory
#{buffer_option}
</buffer>
])
node = d.instance.nodes.first
assert_equal(
[expected_owner_compress, expected_owner_compress, expected_buffer_compress],
[d.instance.compress, node.instance_variable_get(:@compress), d.instance.buffer.compress],
)
log_message = "buffer is compressed. If you also want to save the bandwidth of a network, Add `compress` configuration in <match>"
assert do
d.logs.any? { |log| log.include?(log_message) }
end
end
end
data('CA cert' => 'tls_ca_cert_path',
'non CA cert' => 'tls_cert_path')
test 'configure tls_cert_path/tls_ca_cert_path' do |param|
dummy_cert_path = File.join(TMP_DIR, "dummy_cert.pem")
FileUtils.touch(dummy_cert_path)
conf = %[
send_timeout 5
transport tls
tls_insecure_mode true
#{param} #{dummy_cert_path}
<server>
host #{TARGET_HOST}
port #{@target_port}
</server>
]
@d = d = create_driver(conf)
# In the plugin, tls_ca_cert_path is used for both cases
assert_equal([dummy_cert_path], d.instance.tls_ca_cert_path)
end
sub_test_case "certstore loading parameters for Windows" do
test 'certstore related config parameters' do
omit "certstore related values raise error on not Windows" if Fluent.windows?
conf = %[
send_timeout 5
transport tls
tls_cert_logical_store_name Root
tls_cert_thumbprint a909502dd82ae41433e6f83886b00d4277a32a7b
<server>
host #{TARGET_HOST}
port #{@target_port}
</server>
]
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
test 'cert_logical_store_name and tls_cert_thumbprint default values' do
conf = %[
send_timeout 5
transport tls
<server>
host #{TARGET_HOST}
port #{@target_port}
</server>
]
@d = d = create_driver(conf)
assert_nil d.instance.tls_cert_logical_store_name
assert_nil d.instance.tls_cert_thumbprint
end
data('CA cert' => 'tls_ca_cert_path',
'non CA cert' => 'tls_cert_path')
test 'specify tls_cert_logical_store_name and tls_cert_path should raise error' do |param|
omit "Loading CertStore feature works only Windows" unless Fluent.windows?
dummy_cert_path = File.join(TMP_DIR, "dummy_cert.pem")
FileUtils.touch(dummy_cert_path)
conf = %[
send_timeout 5
transport tls
#{param} #{dummy_cert_path}
tls_cert_logical_store_name Root
<server>
host #{TARGET_HOST}
port #{@target_port}
</server>
]
assert_raise(Fluent::ConfigError) do
create_driver(conf)
end
end
test 'configure cert_logical_store_name and tls_cert_thumbprint' do
omit "Loading CertStore feature works only Windows" unless Fluent.windows?
conf = %[
send_timeout 5
transport tls
tls_cert_logical_store_name Root
tls_cert_thumbprint a909502dd82ae41433e6f83886b00d4277a32a7b
<server>
host #{TARGET_HOST}
port #{@target_port}
</server>
]
@d = d = create_driver(conf)
assert_equal "Root", d.instance.tls_cert_logical_store_name
assert_equal "a909502dd82ae41433e6f83886b00d4277a32a7b", d.instance.tls_cert_thumbprint
end
end
test 'server is an abbreviation of static type of service_discovery' do
@d = d = create_driver(%[
<server>
host 127.0.0.1
port 1234
</server>
<service_discovery>
@type static
<service>
host 127.0.0.1
port 1235
</service>
</service_discovery>
])
assert_equal(
[
{ host: '127.0.0.1', port: 1234 },
{ host: '127.0.0.1', port: 1235 },
],
d.instance.discovery_manager.services.collect do |service|
{ host: service.host, port: service.port }
end
)
end
test 'pass username and password as empty string to HandshakeProtocol' do
config_path = File.join(TMP_DIR, "sd_file.conf")
File.open(config_path, 'w') do |file|
file.write(%[
- 'host': 127.0.0.1
'port': 1234
'weight': 1
])
end
mock(Fluent::Plugin::ForwardOutput::HandshakeProtocol).new(log: anything, hostname: nil, shared_key: anything, password: '', username: '')
@d = d = create_driver(%[
<service_discovery>
@type file
path #{config_path}
</service_discovery>
])
assert_equal 1, d.instance.discovery_manager.services.size
assert_equal '127.0.0.1', d.instance.discovery_manager.services[0].host
assert_equal 1234, d.instance.discovery_manager.services[0].port
end
test 'phi_failure_detector disabled' do
@d = d = create_driver(config + %[phi_failure_detector false \n phi_threshold 0])
node = d.instance.nodes.first
stub(node.failure).phi { raise 'Should not be called' }
node.tick
assert_true node.available?
end
test 'phi_failure_detector enabled' do
@d = d = create_driver(config + %[phi_failure_detector true \n phi_threshold 0])
node = d.instance.nodes.first
node.tick
assert_false node.available?
end
test 'require_ack_response is disabled in default' do
@d = d = create_driver(config)
assert_equal false, d.instance.require_ack_response
assert_equal 190, d.instance.ack_response_timeout
end
test 'require_ack_response can be enabled' do
@d = d = create_driver(config + %[
require_ack_response true
ack_response_timeout 2s
])
d.instance_start
assert d.instance.require_ack_response
assert_equal 2, d.instance.ack_response_timeout
end
test 'suspend_flush is disable before before_shutdown' do
@d = d = create_driver(config + %[
require_ack_response true
ack_response_timeout 2s
])
d.instance_start
assert_false d.instance.instance_variable_get(:@suspend_flush)
end
test 'suspend_flush should be enabled and try_flush returns nil after before_shutdown' do
@d = d = create_driver(config + %[
require_ack_response true
ack_response_timeout 2s
])
d.instance_start
d.instance.before_shutdown
assert_true d.instance.instance_variable_get(:@suspend_flush)
assert_nil d.instance.try_flush
end
test 'verify_connection_at_startup is disabled in default' do
@d = d = create_driver(config)
assert_false d.instance.verify_connection_at_startup
end
test 'verify_connection_at_startup can be enabled' do
@d = d = create_driver(config + %[
verify_connection_at_startup true
])
assert_true d.instance.verify_connection_at_startup
end
test 'send tags in str (utf-8 strings)' do
target_input_driver = create_target_input_driver
@d = d = create_driver(config + %[flush_interval 1s])
time = event_time("2011-01-02 13:14:15 UTC")
tag_in_utf8 = "test.utf8".encode("utf-8")
tag_in_ascii = "test.ascii".encode("ascii-8bit")
emit_events = [
[tag_in_utf8, time, {"a" => 1}],
[tag_in_ascii, time, {"a" => 2}],
]
stub(d.instance.ack_handler).read_ack_from_sock(anything).never
assert_rr do
target_input_driver.run(expect_records: 2) do
d.run do
emit_events.each do |tag, t, record|
d.feed(tag, t, record)
end
end
end
end
events = target_input_driver.events
assert_equal_event_time(time, events[0][1])
assert_equal ['test.utf8', time, emit_events[0][2]], events[0]
assert_equal Encoding::UTF_8, events[0][0].encoding
assert_equal_event_time(time, events[1][1])
assert_equal ['test.ascii', time, emit_events[1][2]], events[1]
assert_equal Encoding::UTF_8, events[1][0].encoding
end
test 'send_with_time_as_integer' do
target_input_driver = create_target_input_driver
@d = d = create_driver(config + %[flush_interval 1s])
time = event_time("2011-01-02 13:14:15 UTC")
records = [
{"a" => 1},
{"a" => 2}
]
stub(d.instance.ack_handler).read_ack_from_sock(anything).never
assert_rr do
target_input_driver.run(expect_records: 2) do
d.run(default_tag: 'test') do
records.each do |record|
d.feed(time, record)
end
end
end
end
events = target_input_driver.events
assert_equal_event_time(time, events[0][1])
assert_equal ['test', time, records[0]], events[0]
assert_equal_event_time(time, events[1][1])
assert_equal ['test', time, records[1]], events[1]
end
test 'send_without_time_as_integer' do
target_input_driver = create_target_input_driver
@d = d = create_driver(config + %[
flush_interval 1s
time_as_integer false
])
time = event_time("2011-01-02 13:14:15 UTC")
records = [
{"a" => 1},
{"a" => 2}
]
stub(d.instance.ack_handler).read_ack_from_sock(anything).never
assert_rr do
target_input_driver.run(expect_records: 2) do
d.run(default_tag: 'test') do
records.each do |record|
d.feed(time, record)
end
end
end
end
events = target_input_driver.events
assert_equal_event_time(time, events[0][1])
assert_equal ['test', time, records[0]], events[0]
assert_equal_event_time(time, events[1][1])
assert_equal ['test', time, records[1]], events[1]
end
test 'send_compressed_message_pack_stream_if_compress_is_gzip' do
target_input_driver = create_target_input_driver(conf: target_config + "skip_invalid_event false")
@d = d = create_driver(config + %[
flush_interval 1s
compress gzip
])
time = event_time('2011-01-02 13:14:15 UTC')
records = [
{"a" => 1},
{"a" => 2}
]
target_input_driver.run(expect_records: 2) do
d.run(default_tag: 'test') do
records.each do |record|
d.feed(time, record)
end
end
end
event_streams = target_input_driver.event_streams
assert_true event_streams[0][1].is_a?(Fluent::CompressedMessagePackEventStream)
events = target_input_driver.events
assert_equal ['test', time, records[0]], events[0]
assert_equal ['test', time, records[1]], events[1]
end
test 'send_compressed_message_pack_stream_if_compress_is_zstd' do
target_input_driver = create_target_input_driver(conf: target_config + "skip_invalid_event false")
@d = d = create_driver(config + %[
flush_interval 1s
compress zstd
])
time = event_time('2011-01-02 13:14:15 UTC')
records = [
{"a" => 1},
{"a" => 2}
]
target_input_driver.run(expect_records: 2) do
d.run(default_tag: 'test') do
records.each do |record|
d.feed(time, record)
end
end
end
event_streams = target_input_driver.event_streams
assert_true event_streams[0][1].is_a?(Fluent::CompressedMessagePackEventStream)
events = target_input_driver.events
assert_equal ['test', time, records[0]], events[0]
assert_equal ['test', time, records[1]], events[1]
end
test 'send_to_a_node_supporting_responses' do
target_input_driver = create_target_input_driver
@d = d = create_driver(config + %[flush_interval 1s])
time = event_time("2011-01-02 13:14:15 UTC")
records = [
{"a" => 1},
{"a" => 2}
]
# not attempt to receive responses
stub(d.instance.ack_handler).read_ack_from_sock(anything).never
assert_rr do
target_input_driver.run(expect_records: 2) do
d.run(default_tag: 'test') do
records.each do |record|
d.feed(time, record)
end
end
end
end
events = target_input_driver.events
assert_equal ['test', time, records[0]], events[0]
assert_equal ['test', time, records[1]], events[1]
end
test 'send_to_a_node_not_supporting_responses' do
target_input_driver = create_target_input_driver
@d = d = create_driver(config + %[flush_interval 1s])
time = event_time("2011-01-02 13:14:15 UTC")
records = [
{"a" => 1},
{"a" => 2}
]
# not attempt to receive responses
stub(d.instance.ack_handler).read_ack_from_sock(anything).never
assert_rr do
target_input_driver.run(expect_records: 2) do
d.run(default_tag: 'test') do
records.each do |record|
d.feed(time, record)
end
end
end
end
events = target_input_driver.events
assert_equal ['test', time, records[0]], events[0]
assert_equal ['test', time, records[1]], events[1]
end
test 'a node supporting responses' do
target_input_driver = create_target_input_driver
@d = d = create_driver(config + %[
require_ack_response true
<buffer tag>
flush_mode immediate
retry_type periodic
retry_wait 30s
flush_at_shutdown true
</buffer>
])
time = event_time("2011-01-02 13:14:15 UTC")
acked_chunk_ids = []
nacked = false
mock.proxy(d.instance.ack_handler).read_ack_from_sock(anything) do |info, success|
if success
acked_chunk_ids << info.chunk_id
else
nacked = true
end
[info, success]
end
records = [
{"a" => 1},
{"a" => 2}
]
target_input_driver.run(expect_records: 2, timeout: 5) do
d.end_if { acked_chunk_ids.size > 0 || nacked }
d.run(default_tag: 'test', wait_flush_completion: false, shutdown: false) do
d.feed([[time, records[0]], [time,records[1]]])
end
end
assert(!nacked, d.instance.log.logs.join)
events = target_input_driver.events
assert_equal ['test', time, records[0]], events[0]
assert_equal ['test', time, records[1]], events[1]
assert_equal 1, acked_chunk_ids.size
assert_equal d.instance.sent_chunk_ids.first, acked_chunk_ids.first
end
test 'a node supporting responses after stop' do
target_input_driver = create_target_input_driver
@d = d = create_driver(config + %[
require_ack_response true
<buffer tag>
flush_mode immediate
retry_type periodic
retry_wait 30s
flush_at_shutdown true
</buffer>
])
time = event_time("2011-01-02 13:14:15 UTC")
acked_chunk_ids = []
nacked = false
mock.proxy(d.instance.ack_handler).read_ack_from_sock(anything) do |info, success|
if success
acked_chunk_ids << info.chunk_id
else
nacked = true
end
[info, success]
end
records = [
{"a" => 1},
{"a" => 2}
]
target_input_driver.run(expect_records: 2, timeout: 5) do
d.end_if { acked_chunk_ids.size > 0 || nacked }
d.run(default_tag: 'test', wait_flush_completion: false, shutdown: false) do
d.instance.stop
d.feed([[time, records[0]], [time,records[1]]])
d.instance.before_shutdown
d.instance.shutdown
d.instance.after_shutdown
end
end
assert(!nacked, d.instance.log.logs.join)
events = target_input_driver.events
assert_equal ['test', time, records[0]], events[0]
assert_equal ['test', time, records[1]], events[1]
assert_equal 1, acked_chunk_ids.size
assert_equal d.instance.sent_chunk_ids.first, acked_chunk_ids.first
end
data('ack true' => true,
'ack false' => false)
test 'TLS transport and ack parameter combination' do |ack|
omit "TLS and 'ack false' always fails on AppVeyor. Need to debug" if Fluent.windows? && !ack
input_conf = target_config + %[
<transport tls>
insecure true
</transport>
]
target_input_driver = create_target_input_driver(conf: input_conf)
output_conf = %[
send_timeout 5
require_ack_response #{ack}
transport tls
tls_insecure_mode true
<server>
host #{TARGET_HOST}
port #{@target_port}
</server>
<buffer>
#flush_mode immediate
flush_interval 0s
flush_at_shutdown false # suppress errors in d.instance_shutdown
</buffer>
]
@d = d = create_driver(output_conf)
time = event_time("2011-01-02 13:14:15 UTC")
records = [{"a" => 1}, {"a" => 2}]
target_input_driver.run(expect_records: 2, timeout: 3) do
d.run(default_tag: 'test', wait_flush_completion: false, shutdown: false) do
records.each do |record|
d.feed(time, record)
end
end
end
events = target_input_driver.events
assert{ events != [] }
assert_equal(['test', time, records[0]], events[0])
assert_equal(['test', time, records[1]], events[1])
end
test 'a destination node not supporting responses by just ignoring' do
target_input_driver = create_target_input_driver(response_stub: ->(_option) { nil }, disconnect: false)
@d = d = create_driver(config + %[
require_ack_response true
ack_response_timeout 1s
<buffer tag>
flush_mode immediate
retry_type periodic
retry_wait 30s
flush_at_shutdown false # suppress errors in d.instance_shutdown
flush_thread_interval 30s
</buffer>
])
node = d.instance.nodes.first
delayed_commit_timeout_value = nil
time = event_time("2011-01-02 13:14:15 UTC")
records = [
{"a" => 1},
{"a" => 2}
]
target_input_driver.end_if{ d.instance.rollback_count > 0 }
target_input_driver.end_if{ !node.available? }
target_input_driver.run(expect_records: 2, timeout: 25) do
d.run(default_tag: 'test', timeout: 20, wait_flush_completion: false, shutdown: false, flush: false) do
delayed_commit_timeout_value = d.instance.delayed_commit_timeout
d.feed([[time, records[0]], [time,records[1]]])
end
end
assert_equal (1 + 2), delayed_commit_timeout_value
events = target_input_driver.events
assert_equal ['test', time, records[0]], events[0]
assert_equal ['test', time, records[1]], events[1]
assert{ d.instance.rollback_count > 0 }
logs = d.instance.log.logs
assert{ logs.any?{|log| log.include?("no response from node. regard it as unavailable.") } }
end
test 'a destination node not supporting responses by disconnection' do
target_input_driver = create_target_input_driver(response_stub: ->(_option) { nil }, disconnect: true)
@d = d = create_driver(config + %[
require_ack_response true
ack_response_timeout 1s
<buffer tag>
flush_mode immediate
retry_type periodic
retry_wait 30s
flush_at_shutdown false # suppress errors in d.instance_shutdown
flush_thread_interval 30s
</buffer>
])
node = d.instance.nodes.first
delayed_commit_timeout_value = nil
time = event_time("2011-01-02 13:14:15 UTC")
records = [
{"a" => 1},
{"a" => 2}
]
target_input_driver.end_if{ d.instance.rollback_count > 0 }
target_input_driver.end_if{ !node.available? }
target_input_driver.run(expect_records: 2, timeout: 25) do
d.run(default_tag: 'test', timeout: 20, wait_flush_completion: false, shutdown: false, flush: false) do
delayed_commit_timeout_value = d.instance.delayed_commit_timeout
d.feed([[time, records[0]], [time,records[1]]])
end
end
assert_equal (1 + 2), delayed_commit_timeout_value
events = target_input_driver.events
assert_equal ['test', time, records[0]], events[0]
assert_equal ['test', time, records[1]], events[1]
assert{ d.instance.rollback_count > 0 }
logs = d.instance.log.logs
assert{ logs.any?{|log| log.include?("no response from node. regard it as unavailable.") } }
end
test 'authentication_with_shared_key' do
input_conf = target_config + %[
<security>
self_hostname in.localhost
shared_key fluentd-sharedkey
<client>
host 127.0.0.1
</client>
</security>
]
target_input_driver = create_target_input_driver(conf: input_conf)
output_conf = %[
send_timeout 51
<security>
self_hostname localhost
shared_key fluentd-sharedkey
</security>
<server>
name test
host #{TARGET_HOST}
port #{@target_port}
shared_key fluentd-sharedkey
</server>
]
@d = d = create_driver(output_conf)
time = event_time("2011-01-02 13:14:15 UTC")
records = [
{"a" => 1},
{"a" => 2}
]
target_input_driver.run(expect_records: 2, timeout: 15) do
d.run(default_tag: 'test') do
records.each do |record|
d.feed(time, record)
end
end
end
events = target_input_driver.events
assert{ events != [] }
assert_equal(['test', time, records[0]], events[0])
assert_equal(['test', time, records[1]], events[1])
end
test 'keepalive + shared_key' do
input_conf = target_config + %[
<security>
self_hostname in.localhost
shared_key fluentd-sharedkey
</security>
]
target_input_driver = create_target_input_driver(conf: input_conf)
output_conf = %[
send_timeout 51
keepalive true
<security>
self_hostname localhost
shared_key fluentd-sharedkey
</security>
<server>
name test
host #{TARGET_HOST}
port #{@target_port}
</server>
]
@d = d = create_driver(output_conf)
time = event_time('2011-01-02 13:14:15 UTC')
records = [{ 'a' => 1 }, { 'a' => 2 }]
records2 = [{ 'b' => 1}, { 'b' => 2}]
target_input_driver.run(expect_records: 4, timeout: 15) do
d.run(default_tag: 'test') do
records.each do |record|
d.feed(time, record)
end
d.flush # emit buffer to reuse same socket later
records2.each do |record|
d.feed(time, record)
end
end
end
events = target_input_driver.events
assert{ events != [] }
assert_equal(['test', time, records[0]], events[0])
assert_equal(['test', time, records[1]], events[1])
assert_equal(['test', time, records2[0]], events[2])
assert_equal(['test', time, records2[1]], events[3])
end
test 'authentication_with_user_auth' do
input_conf = target_config + %[
<security>
self_hostname in.localhost
shared_key fluentd-sharedkey
user_auth true
<user>
username fluentd
password fluentd
</user>
<client>
host 127.0.0.1
</client>
</security>
]
target_input_driver = create_target_input_driver(conf: input_conf)
output_conf = %[
send_timeout 51
<security>
self_hostname localhost
shared_key fluentd-sharedkey
</security>
<server>
name test
host #{TARGET_HOST}
port #{@target_port}
shared_key fluentd-sharedkey
username fluentd
password fluentd
</server>
]
@d = d = create_driver(output_conf)
time = event_time("2011-01-02 13:14:15 UTC")
records = [
{"a" => 1},
{"a" => 2}
]
target_input_driver.run(expect_records: 2, timeout: 15) do
d.run(default_tag: 'test') do
records.each do |record|
d.feed(time, record)
end
end
end
events = target_input_driver.events
assert{ events != [] }
assert_equal(['test', time, records[0]], events[0])
assert_equal(['test', time, records[1]], events[1])
end
# This test is not 100% but test failed with previous Node implementation which has race condition
test 'Node with security is thread-safe on multi threads' do
input_conf = target_config + %[
<security>
self_hostname in.localhost
shared_key fluentd-sharedkey
<client>
host 127.0.0.1
</client>
</security>
]
target_input_driver = create_target_input_driver(conf: input_conf)
output_conf = %[
send_timeout 51
<security>
self_hostname localhost
shared_key fluentd-sharedkey
</security>
<server>
name test
host #{TARGET_HOST}
port #{@target_port}
shared_key fluentd-sharedkey
</server>
]
@d = d = create_driver(output_conf)
chunk = Fluent::Plugin::Buffer::MemoryChunk.new(Fluent::Plugin::Buffer::Metadata.new(nil, nil, nil))
target_input_driver.run(timeout: 15) do
d.run(shutdown: false) do
node = d.instance.nodes.first
arr = []
4.times {
arr << Thread.new {
node.send_data('test', chunk) rescue nil
}
}
arr.each { |a| a.join }
end
end
logs = d.logs
assert_false(logs.any? { |log|
log.include?("invalid format for PONG message") || log.include?("shared key mismatch")
}, "Actual log:\n#{logs.join}")
end
def create_target_input_driver(response_stub: nil, disconnect: false, conf: target_config)
require 'fluent/plugin/in_forward'
# TODO: Support actual TCP heartbeat test
Fluent::Test::Driver::Input.new(Fluent::Plugin::ForwardInput) {
if response_stub.nil?
# do nothing because in_forward responds for ack option in default
else
define_method(:response) do |options|
return response_stub.(options)
end
end
}.configure(conf)
end
test 'heartbeat_type_none' do
@d = d = create_driver(config + "\nheartbeat_type none")
node = d.instance.nodes.first
assert_equal Fluent::Plugin::ForwardOutput::NoneHeartbeatNode, node.class
d.instance_start
assert_nil d.instance.instance_variable_get(:@loop) # no HeartbeatHandler, or HeartbeatRequestTimer
assert_nil d.instance.instance_variable_get(:@thread) # no HeartbeatHandler, or HeartbeatRequestTimer
stub(node.failure).phi { raise 'Should not be called' }
node.tick
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_parser_apache2.rb | test/plugin/test_parser_apache2.rb | require_relative '../helper'
require 'fluent/test/driver/parser'
require 'fluent/plugin/parser'
class Apache2ParserTest < ::Test::Unit::TestCase
def setup
Fluent::Test.setup
@parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::Apache2Parser)
@expected = {
'user' => nil,
'method' => 'GET',
'code' => 200,
'size' => 777,
'host' => '192.168.0.1',
'path' => '/',
'referer' => nil,
'agent' => 'Opera/12.0'
}
@parser.configure({})
end
def test_parse
@parser.instance.parse('192.168.0.1 - - [28/Feb/2013:12:00:00 +0900] "GET / HTTP/1.1" 200 777 "-" "Opera/12.0"') { |time, record|
assert_equal(event_time('28/Feb/2013:12:00:00 +0900', format: '%d/%b/%Y:%H:%M:%S %z'), time)
assert_equal(@expected, record)
}
assert_equal(Fluent::Plugin::Apache2Parser::REGEXP,
@parser.instance.patterns['format'])
assert_equal(Fluent::Plugin::Apache2Parser::TIME_FORMAT,
@parser.instance.patterns['time_format'])
end
def test_parse_without_http_version
@parser.instance.parse('192.168.0.1 - - [28/Feb/2013:12:00:00 +0900] "GET /" 200 777 "-" "Opera/12.0"') { |time, record|
assert_equal(event_time('28/Feb/2013:12:00:00 +0900', format: '%d/%b/%Y:%H:%M:%S %z'), time)
assert_equal(@expected, record)
}
end
def test_parse_with_escape_sequence
@parser.instance.parse('192.168.0.1 - - [28/Feb/2013:12:00:00 +0900] "GET /\" HTTP/1.1" 200 777 "referer \\\ \"" "user agent \\\ \""') { |_, record|
assert_equal('/\"', record['path'])
assert_equal('referer \\\ \"', record['referer'])
assert_equal('user agent \\\ \"', record['agent'])
}
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_in_tcp.rb | test/plugin/test_in_tcp.rb | require_relative '../helper'
require 'fluent/test/driver/input'
require 'fluent/plugin/in_tcp'
class TcpInputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
@port = unused_port(protocol: :tcp)
end
def teardown
@port = nil
end
def base_config
%[
port #{@port}
tag tcp
]
end
def ipv4_config
base_config + %[
bind 127.0.0.1
format none
]
end
def ipv6_config
base_config + %[
bind ::1
format none
]
end
def create_driver(conf)
Fluent::Test::Driver::Input.new(Fluent::Plugin::TcpInput).configure(conf)
end
def create_tcp_socket(host, port, &block)
if block_given?
TCPSocket.open(host, port, &block)
else
TCPSocket.open(host, port)
end
end
data(
'ipv4' => ['127.0.0.1', :ipv4],
'ipv6' => ['::1', :ipv6],
)
test 'configure' do |data|
bind, protocol = data
conf = send("#{protocol}_config")
omit "IPv6 is not supported on this environment" if protocol == :ipv6 && !ipv6_enabled?
d = create_driver(conf)
assert_equal @port, d.instance.port
assert_equal bind, d.instance.bind
assert_equal "\n", d.instance.delimiter
end
test ' configure w/o parse section' do
assert_raise(Fluent::ConfigError.new("<parse> section is required.")) {
create_driver(base_config)
}
end
test_case_data = {
'none' => {
'format' => 'none',
'payloads' => [ "tcptest1\n", "tcptest2\n" ],
'expecteds' => [
{'message' => 'tcptest1'},
{'message' => 'tcptest2'},
],
},
'json' => {
'format' => 'json',
'payloads' => [
{'k' => 123, 'message' => 'tcptest1'}.to_json + "\n",
{'k' => 'tcptest2', 'message' => 456}.to_json + "\n",
],
'expecteds' => [
{'k' => 123, 'message' => 'tcptest1'},
{'k' => 'tcptest2', 'message' => 456}
],
},
}
data(test_case_data)
test 'test_msg_size' do |data|
format = data['format']
payloads = data['payloads']
expecteds = data['expecteds']
d = create_driver(base_config + "format #{format}")
d.run(expect_records: 2) do
payloads.each do |payload|
create_tcp_socket('127.0.0.1', @port) do |sock|
sock.send(payload, 0)
end
end
end
assert_equal 2, d.events.size
expecteds.each_with_index do |expected_record, i|
assert_equal "tcp", d.events[i][0]
assert d.events[i][1].is_a?(Fluent::EventTime)
assert_equal expected_record, d.events[i][2]
end
end
data(test_case_data)
test 'test data in a connection' do |data|
format = data['format']
payloads = data['payloads']
expecteds = data['expecteds']
d = create_driver(base_config + "format #{format}")
d.run(expect_records: 2) do
create_tcp_socket('127.0.0.1', @port) do |sock|
payloads.each do |payload|
sock.send(payload, 0)
end
end
end
assert_equal 2, d.events.size
expecteds.each_with_index do |expected_record, i|
assert_equal "tcp", d.events[i][0]
assert d.events[i][1].is_a?(Fluent::EventTime)
assert_equal expected_record, d.events[i][2]
end
end
test 'source_hostname_key' do
d = create_driver(base_config + %!
format none
source_hostname_key host
!)
hostname = nil
d.run(expect_records: 1) do
create_tcp_socket('127.0.0.1', @port) do |sock|
sock.do_not_reverse_lookup = false
hostname = sock.peeraddr[2]
sock.send("test\n", 0)
end
end
assert_equal 1, d.events.size
event = d.events[0]
assert_equal "tcp", event[0]
assert event[1].is_a?(Fluent::EventTime)
assert_equal hostname, event[2]['host']
end
test "send_keepalive_packet_can_be_enabled" do
d = create_driver(base_config + %!
format none
send_keepalive_packet true
!)
assert_true d.instance.send_keepalive_packet
d = create_driver(base_config + %!
format none
!)
assert_false d.instance.send_keepalive_packet
end
test 'source_address_key' do
d = create_driver(base_config + %!
format none
source_address_key addr
!)
address = nil
d.run(expect_records: 1) do
create_tcp_socket('127.0.0.1', @port) do |sock|
address = sock.peeraddr[3]
sock.send("test\n", 0)
end
end
assert_equal 1, d.events.size
event = d.events[0]
assert_equal "tcp", event[0]
assert event[1].is_a?(Fluent::EventTime)
assert_equal address, event[2]['addr']
end
sub_test_case '<security>' do
test 'accept from allowed client' do
d = create_driver(ipv4_config + %!
<security>
<client>
network 127.0.0.1
</client>
</security>
!)
d.run(expect_records: 1) do
create_tcp_socket('127.0.0.1', @port) do |sock|
sock.send("hello\n", 0)
end
end
assert_equal 1, d.events.size
event = d.events[0]
assert_equal 'tcp', event[0]
assert_equal 'hello', event[2]['message']
end
test 'deny from disallowed client' do
d = create_driver(ipv4_config + %!
<security>
<client>
network 200.0.0.0
</client>
</security>
!)
d.run(expect_records: 1, timeout: 2) do
create_tcp_socket('127.0.0.1', @port) do |sock|
sock.send("hello\n", 0)
end
end
assert_equal 1, d.logs.count { |l| l =~ /anonymous client/ }
assert_equal 0, d.events.size
end
end
sub_test_case '<extract>' do
test 'extract tag from record field' do
d = create_driver(base_config + %!
<parse>
@type json
</parse>
<extract>
tag_key tag
</extract>
!)
d.run(expect_records: 1) do
create_tcp_socket('127.0.0.1', @port) do |sock|
data = {'msg' => 'hello', 'tag' => 'helper_test'}
sock.send("#{data.to_json}\n", 0)
end
end
assert_equal 1, d.events.size
event = d.events[0]
assert_equal 'helper_test', event[0]
assert event[1].is_a?(Fluent::EventTime)
assert_equal 'hello', event[2]['msg']
end
end
sub_test_case "message_length_limit" do
data("batch_emit", { extract: "" }, keep: true)
data("single_emit", { extract: "<extract>\ntag_key tag\n</extract>\n" }, keep: true)
test "drop records exceeding limit" do |data|
message_length_limit = 10
d = create_driver(base_config + %!
message_length_limit #{message_length_limit}
<parse>
@type none
</parse>
#{data[:extract]}
!)
d.run(expect_records: 2, timeout: 10) do
create_tcp_socket('127.0.0.1', @port) do |sock|
sock.send("a" * message_length_limit + "\n", 0)
sock.send("b" * (message_length_limit + 1) + "\n", 0)
sock.send("c" * (message_length_limit - 1) + "\n", 0)
end
end
expected_records = [
"a" * message_length_limit,
"c" * (message_length_limit - 1)
]
actual_records = d.events.collect do |event|
event[2]["message"]
end
assert_equal expected_records, actual_records
end
test "clear buffer and discard the subsequent data until the next delimiter" do |data|
message_length_limit = 12
d = create_driver(base_config + %!
message_length_limit #{message_length_limit}
delimiter ";"
<parse>
@type json
</parse>
#{data[:extract]}
!)
d.run(expect_records: 1, timeout: 10) do
create_tcp_socket('127.0.0.1', @port) do |sock|
sock.send('{"message":', 0)
sock.send('"hello', 0)
sleep 1 # To make the server read data and clear the buffer here.
sock.send('world!"};', 0) # This subsequent data must be discarded so that a parsing failure doesn't occur.
sock.send('{"k":"v"};', 0) # This will succeed to parse.
end
end
logs = d.logs.collect do |log|
log.gsub(/\A\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} [-+]\d{4} /, "")
end
actual_records = d.events.collect do |event|
event[2]
end
assert_equal(
{
# Asserting that '[warn]: pattern not matched message="world!\"}"' warning does not occur.
logs: ['[info]: The buffer size exceeds \'message_length_limit\', cleared: limit=12 size=17 head="{\"message\":\"hello"' + "\n"],
records: [{"k" => "v"}],
},
{
logs: logs[1..],
records: actual_records,
}
)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_out_http.rb | test/plugin/test_out_http.rb | require_relative "../helper"
require 'fluent/test/driver/output'
require 'fluent/plugin/out_http'
require 'webrick'
require 'webrick/https'
require 'net/http'
require 'uri'
require 'json'
require 'aws-sdk-core'
# WEBrick's ProcHandler doesn't handle PUT by default
module WEBrick::HTTPServlet
class ProcHandler < AbstractServlet
alias do_PUT do_GET
end
end
class HTTPOutputTest < Test::Unit::TestCase
include Fluent::Test::Helpers
TMP_DIR = File.join(__dir__, "../tmp/out_http#{ENV['TEST_ENV_NUMBER']}")
DEFAULT_LOGGER = ::WEBrick::Log.new(::STDOUT, ::WEBrick::BasicLog::FATAL)
class << self
# Use class variable to reduce server start/shutdown time
def startup
@@result = nil
@@auth_handler = nil
@@http_server_thread = nil
end
def shutdown
@@http_server_thread.kill
@@http_server_thread.join
rescue
end
end
def server_port
19880
end
def base_endpoint
"http://127.0.0.1:#{server_port}"
end
def server_config
config = {BindAddress: '127.0.0.1', Port: server_port}
# Suppress webrick logs
config[:Logger] = DEFAULT_LOGGER
config[:AccessLog] = []
config
end
def http_client(**opts, &block)
opts = opts.merge(open_timeout: 1, read_timeout: 1)
if block_given?
Net::HTTP.start('127.0.0.1', server_port, **opts, &block)
else
Net::HTTP.start('127.0.0.1', server_port, **opts)
end
end
def run_http_server
server = ::WEBrick::HTTPServer.new(server_config)
server.mount_proc('/test') { |req, res|
if @@auth_handler
@@auth_handler.call(req, res)
end
@@result.method = req.request_method
@@result.content_type = req.content_type
req.each do |key, value|
@@result.headers[key] = value
end
body = ""
data = []
case req['content-encoding']
when 'gzip'
body = Zlib::GzipReader.new(StringIO.new(req.body)).read
else
body = req.body
end
case req.content_type
when 'application/x-ndjson'
body.each_line { |l|
data << JSON.parse(l)
}
when 'application/json'
data = JSON.parse(body)
when 'text/plain'
# Use single_value in this test
body.each_line { |line|
data << line.chomp
}
else
data << body
end
@@result.data = data
res.status = 200
res.body = "success"
}
server.mount_proc('/503') { |_, res|
res.status = 503
res.body = 'Service Unavailable'
}
server.mount_proc('/404') { |_, res|
res.status = 404
res.body = 'Not Found'
}
# For start check
server.mount_proc('/') { |_, res|
res.status = 200
res.body = 'Hello Fluentd!'
}
server.start
ensure
server.shutdown rescue nil
end
Result = Struct.new("Result", :method, :content_type, :headers, :data)
setup do
Fluent::Test.setup
FileUtils.rm_rf(TMP_DIR)
@@result = Result.new(nil, nil, {}, nil)
@@http_server_thread ||= Thread.new do
run_http_server
end
now = Time.now
started = false
until started
raise "Server not started" if (Time.now - now > 10.0)
begin
http_client { |c| c.request_get('/') }
started = true
rescue
sleep 0.5
end
end
end
teardown do
@@result = nil
@@auth_handler = nil
end
def create_driver(conf)
Fluent::Test::Driver::Output.new(Fluent::Plugin::HTTPOutput).configure(conf)
end
def config
%[
endpoint #{base_endpoint}/test
]
end
def test_events
[
{"message" => "hello", "num" => 10, "bool" => true},
{"message" => "hello", "num" => 11, "bool" => false}
]
end
def test_configure
d = create_driver(config)
assert_equal "http://127.0.0.1:#{server_port}/test", d.instance.endpoint
assert_equal :post, d.instance.http_method
assert_equal 'application/x-ndjson', d.instance.content_type
assert_equal [503], d.instance.retryable_response_codes
assert_true d.instance.error_response_as_unrecoverable
assert_nil d.instance.proxy
assert_nil d.instance.headers
end
def test_configure_with_warn
d = create_driver(config)
assert_match(/Status code 503 is going to be removed/, d.instance.log.out.logs.join)
end
def test_configure_without_warn
d = create_driver(<<~CONFIG)
endpoint #{base_endpoint}/test
retryable_response_codes [503]
CONFIG
assert_not_match(/Status code 503 is going to be removed/, d.instance.log.out.logs.join)
end
# Check if an exception is raised on not JSON format use
data('not_json' => 'msgpack')
def test_configure_with_json_array_err(format_type)
assert_raise(Fluent::ConfigError) do
create_driver(config + %[
json_array true
<format>
@type #{format_type}
</format>
])
end
end
data('json' => ['json', 'application/x-ndjson'],
'ltsv' => ['ltsv', 'text/tab-separated-values'],
'msgpack' => ['msgpack', 'application/x-msgpack'],
'single_value' => ['single_value', 'text/plain'])
def test_configure_content_type(types)
format_type, content_type = types
d = create_driver(config + %[
<format>
@type #{format_type}
</format>
])
assert_equal content_type, d.instance.content_type
end
# Check that json_array setting sets content_type = application/json
data('json' => 'application/json')
def test_configure_content_type_json_array(content_type)
d = create_driver(config + "json_array true")
assert_equal content_type, d.instance.content_type
end
data('PUT' => 'put', 'POST' => 'post')
def test_write_with_method(method)
d = create_driver(config + "http_method #{method}")
d.run(default_tag: 'test.http') do
test_events.each { |event|
d.feed(event)
}
end
result = @@result
assert_equal method.upcase, result.method
assert_equal 'application/x-ndjson', result.content_type
assert_equal test_events, result.data
assert_not_empty result.headers
end
# Check that JSON at HTTP request body is valid
def test_write_with_json_array_setting
d = create_driver(config + "json_array true")
d.run(default_tag: 'test.http') do
test_events.each { |event|
d.feed(event)
}
end
result = @@result
assert_equal 'application/json', result.content_type
assert_equal test_events, result.data
assert_not_empty result.headers
end
def test_write_with_single_value_format
d = create_driver(config + %[
<format>
@type single_value
</format>
])
d.run(default_tag: 'test.http') do
test_events.each { |event|
d.feed(event)
}
end
result = @@result
assert_equal 'text/plain', result.content_type
assert_equal (test_events.map { |e| e['message'] }), result.data
assert_not_empty result.headers
end
def test_write_with_headers
d = create_driver(config + 'headers {"test_header":"fluentd!"}')
d.run(default_tag: 'test.http') do
test_events.each { |event|
d.feed(event)
}
end
result = @@result
assert_true result.headers.has_key?('test_header')
assert_equal "fluentd!", result.headers['test_header']
end
def test_write_with_headers_from_placeholders
d = create_driver(config + %[
headers_from_placeholders {"x-test":"${$.foo.bar}-test","x-tag":"${tag}"}
<buffer tag,$.foo.bar>
</buffer>
])
d.run(default_tag: 'test.http') do
test_events.each { |event|
ev = event.dup
ev['foo'] = {'bar' => 'abcd'}
d.feed(ev)
}
end
result = @@result
assert_equal "abcd-test", result.headers['x-test']
assert_equal "test.http", result.headers['x-tag']
end
def test_write_with_retryable_response
old_report_on_exception = Thread.report_on_exception
Thread.report_on_exception = false # thread finished as invalid state since RetryableResponse raises.
d = create_driver("endpoint #{base_endpoint}/503")
assert_raise(Fluent::Plugin::HTTPOutput::RetryableResponse) do
d.run(default_tag: 'test.http', shutdown: false) do
test_events.each { |event|
d.feed(event)
}
end
end
d.instance_shutdown(log: $log)
ensure
Thread.report_on_exception = old_report_on_exception
end
def test_write_with_disabled_unrecoverable
d = create_driver(%[
endpoint #{base_endpoint}/404
error_response_as_unrecoverable false
])
d.run(default_tag: 'test.http', shutdown: false) do
test_events.each { |event|
d.feed(event)
}
end
assert_match(/got error response from.*404 Not Found Not Found/, d.instance.log.out.logs.join)
d.instance_shutdown
end
sub_test_case 'basic auth' do
setup do
FileUtils.mkdir_p(TMP_DIR)
htpd = WEBrick::HTTPAuth::Htpasswd.new(File.join(TMP_DIR, 'dot.htpasswd'))
htpd.set_passwd(nil, 'test', 'hey')
authenticator = WEBrick::HTTPAuth::BasicAuth.new(:UserDB => htpd, :Realm => 'test', :Logger => DEFAULT_LOGGER)
@@auth_handler = Proc.new { |req, res| authenticator.authenticate(req, res) }
end
teardown do
FileUtils.rm_rf(TMP_DIR)
end
def server_port
19881
end
def test_basic_auth
d = create_driver(config + %[
<auth>
method basic
username test
password hey
</auth>
])
d.run(default_tag: 'test.http') do
test_events.each { |event|
d.feed(event)
}
end
result = @@result
assert_equal 'POST', result.method
assert_equal 'application/x-ndjson', result.content_type
assert_equal test_events, result.data
assert_not_empty result.headers
end
# This test includes `error_response_as_unrecoverable true` behaviour check
def test_basic_auth_with_invalid_auth
d = create_driver(config + %[
<auth>
method basic
username ayaya
password hello?
</auth>
])
d.instance.system_config_override(root_dir: TMP_DIR) # Backup files are generated in TMP_DIR.
d.run(default_tag: 'test.http', shutdown: false) do
test_events.each { |event|
d.feed(event)
}
end
assert_match(/got unrecoverable error/, d.instance.log.out.logs.join)
d.instance_shutdown
end
end
sub_test_case 'aws sigv4 auth' do
setup do
@@fake_aws_credentials = Aws::Credentials.new(
'fakeaccess',
'fakesecret',
'fake session token'
)
end
def server_port
19883
end
def test_aws_sigv4_sts_role_arn
stub(Aws::AssumeRoleCredentials).new do |credentials_provider|
stub(credentials_provider).credentials {
@@fake_aws_credentials
}
credentials_provider
end
d = create_driver(config + %[
<auth>
method aws_sigv4
aws_service someservice
aws_region my-region-1
aws_role_arn arn:aws:iam::123456789012:role/MyRole
</auth>
])
d.run(default_tag: 'test.http') do
test_events.each { |event|
d.feed(event)
}
end
result = @@result
assert_equal 'POST', result.method
assert_equal 'application/x-ndjson', result.content_type
assert_equal test_events, result.data
assert_not_empty result.headers
assert_not_nil result.headers['authorization']
assert_match(/AWS4-HMAC-SHA256 Credential=[a-zA-Z0-9]*\/\d+\/my-region-1\/someservice\/aws4_request/, result.headers['authorization'])
assert_match(/SignedHeaders=content-type;host;x-amz-content-sha256;x-amz-date;x-amz-security-token/, result.headers['authorization'])
assert_equal @@fake_aws_credentials.session_token, result.headers['x-amz-security-token']
assert_not_nil result.headers['x-amz-content-sha256']
assert_not_empty result.headers['x-amz-content-sha256']
assert_not_nil result.headers['x-amz-security-token']
assert_not_empty result.headers['x-amz-security-token']
assert_not_nil result.headers['x-amz-date']
assert_not_empty result.headers['x-amz-date']
end
def test_aws_sigv4_no_role
stub(Aws::CredentialProviderChain).new do |provider_chain|
stub(provider_chain).resolve {
@@fake_aws_credentials
}
provider_chain
end
d = create_driver(config + %[
<auth>
method aws_sigv4
aws_service someservice
aws_region my-region-1
</auth>
])
d.run(default_tag: 'test.http') do
test_events.each { |event|
d.feed(event)
}
end
result = @@result
assert_equal 'POST', result.method
assert_equal 'application/x-ndjson', result.content_type
assert_equal test_events, result.data
assert_not_empty result.headers
assert_not_nil result.headers['authorization']
assert_match(/AWS4-HMAC-SHA256 Credential=[a-zA-Z0-9]*\/\d+\/my-region-1\/someservice\/aws4_request/, result.headers['authorization'])
assert_match(/SignedHeaders=content-type;host;x-amz-content-sha256;x-amz-date;x-amz-security-token/, result.headers['authorization'])
assert_equal @@fake_aws_credentials.session_token, result.headers['x-amz-security-token']
assert_not_nil result.headers['x-amz-content-sha256']
assert_not_empty result.headers['x-amz-content-sha256']
assert_not_nil result.headers['x-amz-security-token']
assert_not_empty result.headers['x-amz-security-token']
assert_not_nil result.headers['x-amz-date']
assert_not_empty result.headers['x-amz-date']
end
end
sub_test_case 'HTTPS' do
def server_port
19882
end
def server_config
config = super
# WEBrick supports self-generated self-signed certificate
config[:SSLEnable] = true
config[:SSLCertName] = [["CN", WEBrick::Utils::getservername]]
config[:SSLMaxVersion] = OpenSSL::SSL::TLS1_3_VERSION
config
end
def http_client(&block)
super(use_ssl: true, verify_mode: OpenSSL::SSL::VERIFY_NONE, &block)
end
def test_write_with_https
d = create_driver(%[
endpoint https://127.0.0.1:#{server_port}/test
tls_verify_mode none
tls_version TLSv1_3
ssl_timeout 2s
])
d.run(default_tag: 'test.http') do
test_events.each { |event|
d.feed(event)
}
end
result = @@result
assert_equal 'POST', result.method
assert_equal 'application/x-ndjson', result.content_type
assert_equal test_events, result.data
assert_not_empty result.headers
end
end
sub_test_case 'GZIP' do
def server_port
19882
end
data(:json_array, [false, true])
data(:buffer_compress, ["text", "gzip"])
def test_write_with_gzip
d = create_driver(%[
endpoint http://127.0.0.1:#{server_port}/test
compress gzip
json_array #{data[:json_array]}
<buffer>
@type memory
compress #{data[:buffer_compress]}
</buffer>
])
d.run(default_tag: 'test.http') do
test_events.each { |event|
d.feed(event)
}
end
result = @@result
assert_equal 'POST', result.method
assert_equal(
data[:json_array] ? 'application/json' : 'application/x-ndjson',
result.content_type
)
assert_equal 'gzip', result.headers['content-encoding']
assert_equal test_events, result.data
assert_not_empty result.headers
end
end
sub_test_case 'connection_reuse' do
def server_port
19883
end
def test_connection_recreation
d = create_driver(%[
endpoint http://127.0.0.1:#{server_port}/test
reuse_connections true
])
d.run(default_tag: 'test.http', shutdown: false) do
d.feed(test_events[0])
end
data = @@result.data
# Restart server to simulate connection loss
@@http_server_thread.kill
@@http_server_thread.join
@@http_server_thread = Thread.new do
run_http_server
end
d.run(default_tag: 'test.http') do
d.feed(test_events[1])
end
result = @@result
assert_equal 'POST', result.method
assert_equal 'application/x-ndjson', result.content_type
assert_equal test_events, data.concat(result.data)
assert_not_empty result.headers
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_metrics_local.rb | test/plugin/test_metrics_local.rb | require_relative '../helper'
require 'fluent/plugin/metrics_local'
require 'fluent/system_config'
class LocalMetricsTest < ::Test::Unit::TestCase
sub_test_case 'configure' do
test "configured for counter mode" do
m = Fluent::Plugin::LocalMetrics.new
m.configure(config_element('metrics', '', {"labels" => {test: "test-unit", language: "Ruby"}}))
assert_false m.use_gauge_metric
assert_equal({agent: "Fluentd", hostname: "#{Socket.gethostname}"}, m.default_labels)
assert_equal({test: "test-unit", language: "Ruby"}, m.labels)
assert_true m.has_methods_for_counter
assert_false m.has_methods_for_gauge
end
test "configured for gauge mode" do
m = Fluent::Plugin::LocalMetrics.new
m.use_gauge_metric = true
m.configure(config_element('metrics', '', {"labels" => {test: "test-unit", language: "Ruby"}}))
assert_true m.use_gauge_metric
assert_equal({agent: "Fluentd", hostname: "#{Socket.gethostname}"}, m.default_labels)
assert_equal({test: "test-unit", language: "Ruby"}, m.labels)
assert_false m.has_methods_for_counter
assert_true m.has_methods_for_gauge
end
end
sub_test_case 'LocalMetric' do
sub_test_case "counter" do
setup do
@m = Fluent::Plugin::LocalMetrics.new
@m.configure(config_element('metrics', '', {}))
end
test '#configure' do
assert_true @m.has_methods_for_counter
assert_false @m.has_methods_for_gauge
end
test 'all local counter operations work well' do
assert_equal 0, @m.get
assert_equal 1, @m.inc
@m.add(20)
assert_equal 21, @m.get
assert_raise NotImplementedError do
@m.dec
end
@m.set(100)
assert_equal 100, @m.get
@m.set(10)
assert_equal 100, @m.get # On counter, value should be overwritten bigger than stored one.
assert_raise NotImplementedError do
@m.sub(11)
end
end
end
sub_test_case "gauge" do
setup do
@m = Fluent::Plugin::LocalMetrics.new
@m.use_gauge_metric = true
@m.configure(config_element('metrics', '', {}))
end
test '#configure' do
assert_false @m.has_methods_for_counter
assert_true @m.has_methods_for_gauge
end
test 'all local gauge operations work well' do
assert_equal 0, @m.get
assert_equal 1, @m.inc
@m.add(20)
assert_equal 21, @m.get
@m.dec
assert_equal 20, @m.get
@m.set(100)
assert_equal 100, @m.get
@m.sub(11)
assert_equal 89, @m.get
@m.set(10)
assert_equal 10, @m.get # On gauge, value always should be overwritten.
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_output_as_buffered_compress.rb | test/plugin/test_output_as_buffered_compress.rb | require_relative '../helper'
require 'fluent/plugin/output'
require 'fluent/plugin/buffer'
require 'fluent/plugin/compressable'
require 'fluent/event'
require 'timeout'
module FluentPluginOutputAsBufferedCompressTest
class DummyBareOutput < Fluent::Plugin::Output
def register(name, &block)
instance_variable_set("@#{name}", block)
end
end
class DummyAsyncOutput < DummyBareOutput
def initialize
super
@format = @write = nil
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
end
class DummyAsyncOutputWithFormat < DummyBareOutput
def initialize
super
@format = nil
end
def write(chunk)
@write ? @write.call(chunk) : nil
end
def format(tag, time, record)
@format ? @format.call(tag, time, record) : [tag, time, record].to_json
end
end
def self.dummy_event_stream
Fluent::ArrayEventStream.new(
[
[event_time('2016-04-13 18:33:00'), { 'name' => 'moris', 'age' => 36, 'message' => 'data1' }],
[event_time('2016-04-13 18:33:13'), { 'name' => 'moris', 'age' => 36, 'message' => 'data2' }],
[event_time('2016-04-13 18:33:32'), { 'name' => 'moris', 'age' => 36, 'message' => 'data3' }],
]
)
end
end
class BufferedOutputCompressTest < Test::Unit::TestCase
include Fluent::Plugin::Compressable
def create_output(type=:async)
case type
when :async then FluentPluginOutputAsBufferedCompressTest::DummyAsyncOutput.new
when :async_with_format then FluentPluginOutputAsBufferedCompressTest::DummyAsyncOutputWithFormat.new
else
raise ArgumentError, "unknown type: #{type}"
end
end
def waiting(seconds)
begin
Timeout.timeout(seconds) do
yield
end
rescue Timeout::Error
STDERR.print(*@i.log.out.logs)
raise
end
end
TMP_DIR = File.expand_path('../../tmp/test_output_as_buffered_compress', __FILE__)
setup do
FileUtils.rm_r TMP_DIR rescue nil
FileUtils.mkdir_p TMP_DIR
end
teardown do
if @i
@i.stop unless @i.stopped?
@i.before_shutdown unless @i.before_shutdown?
@i.shutdown unless @i.shutdown?
@i.after_shutdown unless @i.after_shutdown?
@i.close unless @i.closed?
@i.terminate unless @i.terminated?
end
end
data(
:buffer_config,
[
config_element('buffer', '', { 'flush_interval' => 1, 'compress' => 'gzip' }),
config_element('buffer', 'tag', { 'flush_interval' => 1, 'compress' => 'gzip' }),
config_element('buffer', '', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
config_element('buffer', 'tag', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
],
)
data(
:input_es,
[
FluentPluginOutputAsBufferedCompressTest.dummy_event_stream,
# If already compressed data is incoming, it must be written as is (i.e. without decompressed).
# https://github.com/fluent/fluentd/issues/4146
Fluent::CompressedMessagePackEventStream.new(FluentPluginOutputAsBufferedCompressTest.dummy_event_stream.to_compressed_msgpack_stream),
],
)
test 'call a standard format when output plugin adds data to chunk' do |data|
buffer_config = data[:buffer_config]
es = data[:input_es].dup # Note: the data matrix is shared in all patterns, so we need `dup` here.
@i = create_output(:async)
@i.configure(config_element('ROOT','', {}, [buffer_config]))
@i.start
@i.after_start
io = StringIO.new
expected = es.dup.map { |t, r| [t, r] }
compressed_data = ''
assert_equal :gzip, @i.buffer.compress
@i.register(:write) do |c|
compressed_data = c.read(compressed: :gzip)
c.write_to(io)
end
@i.emit_events('tag', es)
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4) { Thread.pass until io.size > 0 }
assert_equal expected, Fluent::MessagePackEventStream.new(decompress(compressed_data)).map { |t, r| [t, r] }
assert_equal expected, Fluent::MessagePackEventStream.new(io.string).map { |t, r| [t, r] }
end
data(
handle_simple_stream: config_element('buffer', '', { 'flush_interval' => 1, 'compress' => 'gzip' }),
handle_stream_with_custom_format: config_element('buffer', 'tag', { 'flush_interval' => 1, 'compress' => 'gzip' }),
handle_simple_stream_and_file_chunk: config_element('buffer', '', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
handle_stream_with_custom_format_and_file_chunk: config_element('buffer', 'tag', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
)
test 'call a custom format when output plugin adds data to chunk' do |buffer_config|
@i = create_output(:async_with_format)
@i.configure(config_element('ROOT','', {}, [buffer_config]))
@i.start
@i.after_start
io = StringIO.new
es = FluentPluginOutputAsBufferedCompressTest.dummy_event_stream
expected = es.map { |e| "#{e[1]}\n" }.join # e[1] is record
compressed_data = ''
assert_equal :gzip, @i.buffer.compress
@i.register(:format) { |tag, time, record| "#{record}\n" }
@i.register(:write) { |c|
compressed_data = c.read(compressed: :gzip)
c.write_to(io)
}
@i.emit_events('tag', es)
@i.enqueue_thread_wait
@i.flush_thread_wakeup
waiting(4) { sleep 0.1 until io.size > 0 }
assert_equal expected, decompress(compressed_data)
assert_equal expected, io.string
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_buf_file_single.rb | test/plugin/test_buf_file_single.rb | require_relative '../helper'
require 'fluent/plugin/buf_file_single'
require 'fluent/plugin/output'
require 'fluent/unique_id'
require 'fluent/system_config'
require 'fluent/env'
require 'fluent/test/driver/output'
require 'msgpack'
module FluentPluginFileSingleBufferTest
class DummyOutputPlugin < Fluent::Plugin::Output
Fluent::Plugin.register_output('buf_file_single_test', self)
config_section :buffer do
config_set_default :@type, 'file_single'
end
def multi_workers_ready?
true
end
def write(chunk)
# drop
end
end
class DummyOutputMPPlugin < Fluent::Plugin::Output
Fluent::Plugin.register_output('buf_file_single_mp_test', self)
config_section :buffer do
config_set_default :@type, 'file_single'
end
def formatted_to_msgpack_binary?
true
end
def multi_workers_ready?
true
end
def write(chunk)
# drop
end
end
class DummyErrorOutputPlugin < DummyOutputPlugin
def register_write(&block)
instance_variable_set(:@write, block)
end
def initialize
super
@should_fail_writing = true
@write = nil
end
def recover
@should_fail_writing = false
end
def write(chunk)
if @should_fail_writing
raise "failed writing chunk"
else
@write ? @write.call(chunk) : nil
end
end
def format(tag, time, record)
[tag, time.to_i, record].to_json + "\n"
end
end
end
class FileSingleBufferTest < Test::Unit::TestCase
def metadata(timekey: nil, tag: 'testing', variables: nil)
Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
end
PATH = File.expand_path('../../tmp/buffer_file_single_dir', __FILE__)
TAG_CONF = %[
<buffer tag>
@type file_single
path #{PATH}
</buffer>
]
FIELD_CONF = %[
<buffer k>
@type file_single
path #{PATH}
</buffer>
]
setup do
Fluent::Test.setup
@d = nil
@bufdir = PATH
FileUtils.rm_rf(@bufdir) rescue nil
FileUtils.mkdir_p(@bufdir)
end
teardown do
FileUtils.rm_rf(@bufdir) rescue nil
end
def create_driver(conf = TAG_CONF, klass = FluentPluginFileSingleBufferTest::DummyOutputPlugin)
Fluent::Test::Driver::Output.new(klass).configure(conf)
end
sub_test_case 'configuration' do
test 'path has "fsb" prefix and "buf" suffix by default' do
@d = create_driver
p = @d.instance.buffer
assert_equal File.join(@bufdir, 'fsb.*.buf'), p.path
end
data('text based chunk' => [FluentPluginFileSingleBufferTest::DummyOutputPlugin, :text],
'msgpack based chunk' => [FluentPluginFileSingleBufferTest::DummyOutputMPPlugin, :msgpack])
test 'detect chunk_format' do |param|
klass, expected = param
@d = create_driver(TAG_CONF, klass)
p = @d.instance.buffer
assert_equal expected, p.chunk_format
end
test '"prefix.*.suffix" path will be replaced with default' do
@d = create_driver(%[
<buffer tag>
@type file_single
path #{@bufdir}/foo.*.bar
</buffer>
])
p = @d.instance.buffer
assert_equal File.join(@bufdir, 'fsb.*.buf'), p.path
end
end
sub_test_case 'buffer configurations and workers' do
setup do
@d = FluentPluginFileSingleBufferTest::DummyOutputPlugin.new
end
test 'enables multi worker configuration with unexisting directory path' do
FileUtils.rm_rf(@bufdir)
buf_conf = config_element('buffer', '', {'path' => @bufdir})
assert_nothing_raised do
Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
@d.configure(config_element('ROOT', '', {}, [buf_conf]))
end
end
end
test 'enables multi worker configuration with existing directory path' do
FileUtils.mkdir_p @bufdir
buf_conf = config_element('buffer', '', {'path' => @bufdir})
assert_nothing_raised do
Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
@d.configure(config_element('ROOT', '', {}, [buf_conf]))
end
end
end
test 'enables multi worker configuration with root dir' do
buf_conf = config_element('buffer', '')
assert_nothing_raised do
Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
@d.configure(config_element('ROOT', '', {'@id' => 'dummy_output_with_buf'}, [buf_conf]))
end
end
end
end
test 'raise config error when using same file path' do
d = FluentPluginFileSingleBufferTest::DummyOutputPlugin.new
d2 = FluentPluginFileSingleBufferTest::DummyOutputPlugin.new
Fluent::SystemConfig.overwrite_system_config({}) do
d.configure(config_element('ROOT', '', {}, [config_element('buffer', '', { 'path' => File.join(PATH, 'foo.*.bar') })]))
end
any_instance_of(Fluent::Plugin::FileSingleBuffer) do |klass|
stub(klass).called_in_test? { false }
end
err = assert_raise(Fluent::ConfigError) do
Fluent::SystemConfig.overwrite_system_config({}) do
d2.configure(config_element('ROOT', '', {}, [config_element('buffer', '', { 'path' => PATH })]))
end
end
assert_match(/plugin already uses same buffer path/, err.message)
end
sub_test_case 'buffer plugin configured only with path' do
setup do
@bufpath = File.join(@bufdir, 'testbuf.*.buf')
FileUtils.rm_rf(@bufdir) if File.exist?(@bufdir)
@d = create_driver
@p = @d.instance.buffer
end
teardown do
if @p
@p.stop unless @p.stopped?
@p.before_shutdown unless @p.before_shutdown?
@p.shutdown unless @p.shutdown?
@p.after_shutdown unless @p.after_shutdown?
@p.close unless @p.closed?
@p.terminate unless @p.terminated?
end
end
test 'this is persistent plugin' do
assert @p.persistent?
end
test '#start creates directory for buffer chunks' do
@d = create_driver
@p = @d.instance.buffer
FileUtils.rm_rf(@bufdir) if File.exist?(@bufdir)
assert !File.exist?(@bufdir)
@p.start
assert File.exist?(@bufdir)
assert { File.stat(@bufdir).mode.to_s(8).end_with?('755') }
end
test '#start creates directory for buffer chunks with specified permission' do
omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
@d = create_driver(%[
<buffer tag>
@type file_single
path #{PATH}
dir_permission 700
</buffer>
])
@p = @d.instance.buffer
FileUtils.rm_rf(@bufdir) if File.exist?(@bufdir)
assert !File.exist?(@bufdir)
@p.start
assert File.exist?(@bufdir)
assert { File.stat(@bufdir).mode.to_s(8).end_with?('700') }
end
test '#start creates directory for buffer chunks with specified permission via system config' do
omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
sysconf = {'dir_permission' => '700'}
Fluent::SystemConfig.overwrite_system_config(sysconf) do
@d = create_driver
@p = @d.instance.buffer
FileUtils.rm_r @bufdir if File.exist?(@bufdir)
assert !File.exist?(@bufdir)
@p.start
assert File.exist?(@bufdir)
assert { File.stat(@bufdir).mode.to_s(8).end_with?('700') }
end
end
test '#generate_chunk generates blank file chunk on path with unique_id and tag' do
FileUtils.mkdir_p(@bufdir) unless File.exist?(@bufdir)
m1 = metadata()
c1 = @p.generate_chunk(m1)
assert c1.is_a? Fluent::Plugin::Buffer::FileSingleChunk
assert_equal m1, c1.metadata
assert c1.empty?
assert_equal :unstaged, c1.state
assert_equal Fluent::DEFAULT_FILE_PERMISSION, c1.permission
assert_equal File.join(@bufdir, "fsb.testing.b#{Fluent::UniqueId.hex(c1.unique_id)}.buf"), c1.path
assert{ File.stat(c1.path).mode.to_s(8).end_with?('644') }
c1.purge
end
test '#generate_chunk generates blank file chunk on path with unique_id and field key' do
FileUtils.mkdir_p(@bufdir) unless File.exist?(@bufdir)
@d = create_driver(FIELD_CONF)
@p = @d.instance.buffer
m1 = metadata(tag: nil, variables: {:k => 'foo_bar'})
c1 = @p.generate_chunk(m1)
assert c1.is_a? Fluent::Plugin::Buffer::FileSingleChunk
assert_equal m1, c1.metadata
assert c1.empty?
assert_equal :unstaged, c1.state
assert_equal Fluent::DEFAULT_FILE_PERMISSION, c1.permission
assert_equal File.join(@bufdir, "fsb.foo_bar.b#{Fluent::UniqueId.hex(c1.unique_id)}.buf"), c1.path
c1.purge
end
test '#generate_chunk generates blank file chunk with specified permission' do
omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
@d = create_driver(%[
<buffer tag>
@type file_single
path #{PATH}
file_permission 600
</buffer>
])
@p = @d.instance.buffer
FileUtils.rm_r @bufdir if File.exist?(@bufdir)
assert !File.exist?(@bufdir)
@p.start
m = metadata()
c = @p.generate_chunk(m)
assert c.is_a? Fluent::Plugin::Buffer::FileSingleChunk
assert_equal m, c.metadata
assert c.empty?
assert_equal :unstaged, c.state
assert_equal 0600, c.permission
assert_equal File.join(@bufdir, "fsb.testing.b#{Fluent::UniqueId.hex(c.unique_id)}.buf"), c.path
assert{ File.stat(c.path).mode.to_s(8).end_with?('600') }
c.purge
end
test '#generate_chunk generates blank file chunk with specified permission with system_config' do
omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
@d = create_driver(%[
<buffer tag>
@type file_single
path #{PATH}
</buffer>
])
@p = @d.instance.buffer
FileUtils.rm_r @bufdir if File.exist?(@bufdir)
assert !File.exist?(@bufdir)
@p.start
m = metadata()
c = nil
Fluent::SystemConfig.overwrite_system_config("file_permission" => "700") do
c = @p.generate_chunk(m)
end
assert c.is_a? Fluent::Plugin::Buffer::FileSingleChunk
assert_equal m, c.metadata
assert c.empty?
assert_equal :unstaged, c.state
assert_equal 0700, c.permission
assert_equal File.join(@bufdir, "fsb.testing.b#{Fluent::UniqueId.hex(c.unique_id)}.buf"), c.path
assert{ File.stat(c.path).mode.to_s(8).end_with?('700') }
c.purge
end
end
sub_test_case 'configured with system root directory and plugin @id' do
setup do
@root_dir = File.expand_path('../../tmp/buffer_file_single_root', __FILE__)
FileUtils.rm_rf(@root_dir)
@d = FluentPluginFileSingleBufferTest::DummyOutputPlugin.new
@p = nil
end
teardown do
if @p
@p.stop unless @p.stopped?
@p.before_shutdown unless @p.before_shutdown?
@p.shutdown unless @p.shutdown?
@p.after_shutdown unless @p.after_shutdown?
@p.close unless @p.closed?
@p.terminate unless @p.terminated?
end
end
test '#start creates directory for buffer chunks' do
Fluent::SystemConfig.overwrite_system_config('root_dir' => @root_dir) do
@d.configure(config_element('ROOT', '', {'@id' => 'dummy_output_with_buf'}, [config_element('buffer', '', {})]))
@p = @d.buffer
end
expected_buffer_path = File.join(@root_dir, 'worker0', 'dummy_output_with_buf', 'buffer', "fsb.*.buf")
expected_buffer_dir = File.dirname(expected_buffer_path)
assert_equal expected_buffer_path, @d.buffer.path
assert_false Dir.exist?(expected_buffer_dir)
@p.start
assert Dir.exist?(expected_buffer_dir)
end
end
sub_test_case 'buffer plugin configuration errors' do
data('tag and key' => 'tag,key',
'multiple keys' => 'key1,key2')
test 'invalid chunk keys' do |param|
assert_raise Fluent::ConfigError do
@d = create_driver(%[
<buffer #{param}>
@type file_single
path #{PATH}
calc_num_records false
</buffer>
])
end
end
test 'path is not specified' do
assert_raise Fluent::ConfigError do
@d = create_driver(%[
<buffer tag>
@type file_single
</buffer>
])
end
end
end
sub_test_case 'there are no existing file chunks' do
setup do
FileUtils.rm_rf(@bufdir) if File.exist?(@bufdir)
@d = create_driver
@p = @d.instance.buffer
@p.start
end
teardown do
if @p
@p.stop unless @p.stopped?
@p.before_shutdown unless @p.before_shutdown?
@p.shutdown unless @p.shutdown?
@p.after_shutdown unless @p.after_shutdown?
@p.close unless @p.closed?
@p.terminate unless @p.terminated?
end
if @bufdir
Dir.glob(File.join(@bufdir, '*')).each do |path|
next if ['.', '..'].include?(File.basename(path))
File.delete(path)
end
end
end
test '#resume returns empty buffer state' do
ary = @p.resume
assert_equal({}, ary[0])
assert_equal([], ary[1])
end
end
sub_test_case 'there are some existing file chunks' do
setup do
@c1id = Fluent::UniqueId.generate
p1 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@c1id)}.buf")
File.open(p1, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
t = Time.now - 50000
File.utime(t, t, p1)
@c2id = Fluent::UniqueId.generate
p2 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@c2id)}.buf")
File.open(p2, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
t = Time.now - 40000
File.utime(t, t, p2)
@c3id = Fluent::UniqueId.generate
p3 = File.join(@bufdir, "fsb.testing.b#{Fluent::UniqueId.hex(@c3id)}.buf")
File.open(p3, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
@c4id = Fluent::UniqueId.generate
p4 = File.join(@bufdir, "fsb.foo.b#{Fluent::UniqueId.hex(@c4id)}.buf")
File.open(p4, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
end
teardown do
if @p
@p.stop unless @p.stopped?
@p.before_shutdown unless @p.before_shutdown?
@p.shutdown unless @p.shutdown?
@p.after_shutdown unless @p.after_shutdown?
@p.close unless @p.closed?
@p.terminate unless @p.terminated?
end
if @bufdir
Dir.glob(File.join(@bufdir, '*')).each do |path|
next if ['.', '..'].include?(File.basename(path))
File.delete(path)
end
end
end
test '#resume returns staged/queued chunks with metadata' do
@d = create_driver
@p = @d.instance.buffer
@p.start
assert_equal 2, @p.stage.size
assert_equal 2, @p.queue.size
stage = @p.stage
m3 = metadata()
assert_equal @c3id, stage[m3].unique_id
assert_equal 4, stage[m3].size
assert_equal :staged, stage[m3].state
m4 = metadata(tag: 'foo')
assert_equal @c4id, stage[m4].unique_id
assert_equal 3, stage[m4].size
assert_equal :staged, stage[m4].state
end
test '#resume returns queued chunks ordered by last modified time (FIFO)' do
@d = create_driver
@p = @d.instance.buffer
@p.start
assert_equal 2, @p.stage.size
assert_equal 2, @p.queue.size
queue = @p.queue
assert{ queue[0].modified_at <= queue[1].modified_at }
assert_equal @c1id, queue[0].unique_id
assert_equal :queued, queue[0].state
assert_equal 'testing', queue[0].metadata.tag
assert_nil queue[0].metadata.variables
assert_equal 4, queue[0].size
assert_equal @c2id, queue[1].unique_id
assert_equal :queued, queue[1].state
assert_equal 'testing', queue[1].metadata.tag
assert_nil queue[1].metadata.variables
assert_equal 3, queue[1].size
end
test '#resume returns staged/queued chunks but skips size calculation by calc_num_records' do
@d = create_driver(%[
<buffer tag>
@type file_single
path #{PATH}
calc_num_records false
</buffer>
])
@p = @d.instance.buffer
@p.start
assert_equal 2, @p.stage.size
assert_equal 2, @p.queue.size
stage = @p.stage
m3 = metadata()
assert_equal @c3id, stage[m3].unique_id
assert_equal 0, stage[m3].size
assert_equal :staged, stage[m3].state
m4 = metadata(tag: 'foo')
assert_equal @c4id, stage[m4].unique_id
assert_equal 0, stage[m4].size
assert_equal :staged, stage[m4].state
end
end
sub_test_case 'there are some existing file chunks with placeholders path' do
setup do
@buf_ph_dir = File.expand_path('../../tmp/buffer_${test}_file_single_dir', __FILE__)
FileUtils.rm_rf(@buf_ph_dir)
FileUtils.mkdir_p(@buf_ph_dir)
@c1id = Fluent::UniqueId.generate
p1 = File.join(@buf_ph_dir, "fsb.testing.q#{Fluent::UniqueId.hex(@c1id)}.buf")
File.open(p1, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
t = Time.now - 50000
File.utime(t, t, p1)
@c2id = Fluent::UniqueId.generate
p2 = File.join(@buf_ph_dir, "fsb.testing.b#{Fluent::UniqueId.hex(@c2id)}.buf")
File.open(p2, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
end
teardown do
if @p
@p.stop unless @p.stopped?
@p.before_shutdown unless @p.before_shutdown?
@p.shutdown unless @p.shutdown?
@p.after_shutdown unless @p.after_shutdown?
@p.close unless @p.closed?
@p.terminate unless @p.terminated?
end
FileUtils.rm_rf(@buf_ph_dir)
end
test '#resume returns staged/queued chunks with metadata' do
@d = create_driver(%[
<buffer tag>
@type file_single
path #{@buf_ph_dir}
</buffer>
])
@p = @d.instance.buffer
@p.start
assert_equal 1, @p.stage.size
assert_equal 1, @p.queue.size
end
end
sub_test_case 'there are some existing msgpack file chunks' do
setup do
packer = Fluent::MessagePackFactory.packer
@c1id = Fluent::UniqueId.generate
p1 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@c1id)}.buf")
File.open(p1, 'wb') do |f|
packer.write(["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}])
packer.write(["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}])
packer.write(["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}])
packer.write(["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}])
f.write packer.full_pack
end
t = Time.now - 50000
File.utime(t, t, p1)
@c2id = Fluent::UniqueId.generate
p2 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@c2id)}.buf")
File.open(p2, 'wb') do |f|
packer.write(["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}])
packer.write(["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}])
packer.write(["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}])
f.write packer.full_pack
end
t = Time.now - 40000
File.utime(t, t, p2)
@c3id = Fluent::UniqueId.generate
p3 = File.join(@bufdir, "fsb.testing.b#{Fluent::UniqueId.hex(@c3id)}.buf")
File.open(p3, 'wb') do |f|
packer.write(["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}])
packer.write(["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}])
packer.write(["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}])
packer.write(["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}])
f.write packer.full_pack
end
@c4id = Fluent::UniqueId.generate
p4 = File.join(@bufdir, "fsb.foo.b#{Fluent::UniqueId.hex(@c4id)}.buf")
File.open(p4, 'wb') do |f|
packer.write(["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}])
packer.write(["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}])
packer.write(["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}])
f.write packer.full_pack
end
end
teardown do
if @p
@p.stop unless @p.stopped?
@p.before_shutdown unless @p.before_shutdown?
@p.shutdown unless @p.shutdown?
@p.after_shutdown unless @p.after_shutdown?
@p.close unless @p.closed?
@p.terminate unless @p.terminated?
end
if @bufdir
Dir.glob(File.join(@bufdir, '*')).each do |path|
next if ['.', '..'].include?(File.basename(path))
File.delete(path)
end
end
end
test '#resume returns staged/queued chunks with msgpack format' do
@d = create_driver(%[
<buffer tag>
@type file_single
path #{PATH}
chunk_format msgpack
</buffer>
])
@p = @d.instance.buffer
@p.start
assert_equal 2, @p.stage.size
assert_equal 2, @p.queue.size
stage = @p.stage
m3 = metadata()
assert_equal @c3id, stage[m3].unique_id
assert_equal 4, stage[m3].size
assert_equal :staged, stage[m3].state
m4 = metadata(tag: 'foo')
assert_equal @c4id, stage[m4].unique_id
assert_equal 3, stage[m4].size
assert_equal :staged, stage[m4].state
end
end
sub_test_case 'there are some existing file chunks, both in specified path and per-worker directory under specified path, configured as multi workers' do
setup do
@worker0_dir = File.join(@bufdir, "worker0")
@worker1_dir = File.join(@bufdir, "worker1")
FileUtils.rm_rf(@bufdir)
FileUtils.mkdir_p(@worker0_dir)
FileUtils.mkdir_p(@worker1_dir)
@bufdir_chunk_1 = Fluent::UniqueId.generate
bc1 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@bufdir_chunk_1)}.buf")
File.open(bc1, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
@bufdir_chunk_2 = Fluent::UniqueId.generate
bc2 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@bufdir_chunk_2)}.buf")
File.open(bc2, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
@worker_dir_chunk_1 = Fluent::UniqueId.generate
wc0_1 = File.join(@worker0_dir, "fsb.testing.q#{Fluent::UniqueId.hex(@worker_dir_chunk_1)}.buf")
wc1_1 = File.join(@worker1_dir, "fsb.testing.q#{Fluent::UniqueId.hex(@worker_dir_chunk_1)}.buf")
[wc0_1, wc1_1].each do |chunk_path|
File.open(chunk_path, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
end
@worker_dir_chunk_2 = Fluent::UniqueId.generate
wc0_2 = File.join(@worker0_dir, "fsb.testing.b#{Fluent::UniqueId.hex(@worker_dir_chunk_2)}.buf")
wc1_2 = File.join(@worker1_dir, "fsb.foo.b#{Fluent::UniqueId.hex(@worker_dir_chunk_2)}.buf")
[wc0_2, wc1_2].each do |chunk_path|
File.open(chunk_path, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
end
@worker_dir_chunk_3 = Fluent::UniqueId.generate
wc0_3 = File.join(@worker0_dir, "fsb.bar.b#{Fluent::UniqueId.hex(@worker_dir_chunk_3)}.buf")
wc1_3 = File.join(@worker1_dir, "fsb.baz.b#{Fluent::UniqueId.hex(@worker_dir_chunk_3)}.buf")
[wc0_3, wc1_3].each do |chunk_path|
File.open(chunk_path, 'wb') do |f|
f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
end
end
end
teardown do
if @p
@p.stop unless @p.stopped?
@p.before_shutdown unless @p.before_shutdown?
@p.shutdown unless @p.shutdown?
@p.after_shutdown unless @p.after_shutdown?
@p.close unless @p.closed?
@p.terminate unless @p.terminated?
end
end
test 'worker(id=0) #resume returns staged/queued chunks with metadata, not only in worker dir, including the directory specified by path' do
ENV['SERVERENGINE_WORKER_ID'] = '0'
buf_conf = config_element('buffer', '', {'path' => @bufdir})
@d = FluentPluginFileSingleBufferTest::DummyOutputPlugin.new
with_worker_config(workers: 2, worker_id: 0) do
@d.configure(config_element('output', '', {}, [buf_conf]))
end
@d.start
@p = @d.buffer
assert_equal 2, @p.stage.size
assert_equal 3, @p.queue.size
stage = @p.stage
m1 = metadata(tag: 'testing')
assert_equal @worker_dir_chunk_2, stage[m1].unique_id
assert_equal 4, stage[m1].size
assert_equal :staged, stage[m1].state
m2 = metadata(tag: 'bar')
assert_equal @worker_dir_chunk_3, stage[m2].unique_id
assert_equal 3, stage[m2].size
assert_equal :staged, stage[m2].state
queue = @p.queue
assert_equal [@bufdir_chunk_1, @bufdir_chunk_2, @worker_dir_chunk_1].sort, queue.map(&:unique_id).sort
assert_equal [3, 4, 4], queue.map(&:size).sort
assert_equal [:queued, :queued, :queued], queue.map(&:state)
end
test 'worker(id=1) #resume returns staged/queued chunks with metadata, only in worker dir' do
buf_conf = config_element('buffer', '', {'path' => @bufdir})
@d = FluentPluginFileSingleBufferTest::DummyOutputPlugin.new
with_worker_config(workers: 2, worker_id: 1) do
@d.configure(config_element('output', '', {}, [buf_conf]))
end
@d.start
@p = @d.buffer
assert_equal 2, @p.stage.size
assert_equal 1, @p.queue.size
stage = @p.stage
m1 = metadata(tag: 'foo')
assert_equal @worker_dir_chunk_2, stage[m1].unique_id
assert_equal 4, stage[m1].size
assert_equal :staged, stage[m1].state
m2 = metadata(tag: 'baz')
assert_equal @worker_dir_chunk_3, stage[m2].unique_id
assert_equal 3, stage[m2].size
assert_equal :staged, stage[m2].state
queue = @p.queue
assert_equal @worker_dir_chunk_1, queue[0].unique_id
assert_equal 3, queue[0].size
assert_equal :queued, queue[0].state
end
end
sub_test_case 'there are existing broken file chunks' do
setup do
FileUtils.rm_rf(@bufdir) rescue nil
FileUtils.mkdir_p(@bufdir)
end
teardown do
return unless @p
@p.stop unless @p.stopped?
@p.before_shutdown unless @p.before_shutdown?
@p.shutdown unless @p.shutdown?
@p.after_shutdown unless @p.after_shutdown?
@p.close unless @p.closed?
@p.terminate unless @p.terminated?
end
test '#resume backups empty chunk' do
id_output = 'backup_test'
@d = create_driver(%[
@id #{id_output}
<buffer tag>
@type file_single
path #{PATH}
</buffer>
])
@p = @d.instance.buffer
c1id = Fluent::UniqueId.generate
p1 = File.join(@bufdir, "fsb.foo.b#{Fluent::UniqueId.hex(c1id)}.buf")
File.open(p1, 'wb') { |f| } # create empty chunk file
Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir) do
@p.start
end
assert { not File.exist?(p1) }
assert { File.exist?("#{@bufdir}/backup/worker0/#{id_output}/#{@d.instance.dump_unique_id_hex(c1id)}.log") }
end
test '#resume throws away broken chunk with disable_chunk_backup' do
id_output = 'backup_test'
@d = create_driver(%[
@id #{id_output}
<buffer tag>
@type file_single
path #{PATH}
disable_chunk_backup true
</buffer>
])
@p = @d.instance.buffer
c1id = Fluent::UniqueId.generate
p1 = File.join(@bufdir, "fsb.foo.b#{Fluent::UniqueId.hex(c1id)}.buf")
File.open(p1, 'wb') { |f| } # create empty chunk file
Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir) do
@p.start
end
assert { not File.exist?(p1) }
assert { not File.exist?("#{@bufdir}/backup/worker0/#{id_output}/#{@d.instance.dump_unique_id_hex(c1id)}.log") }
end
end
sub_test_case 'evacuate_chunk' do
def setup
Fluent::Test.setup
@now = Time.local(2025, 5, 30, 17, 0, 0)
@base_dir = File.expand_path("../../tmp/evacuate_chunk", __FILE__)
@buf_dir = File.join(@base_dir, "buffer")
@root_dir = File.join(@base_dir, "root")
FileUtils.mkdir_p(@root_dir)
Fluent::SystemConfig.overwrite_system_config("root_dir" => @root_dir) do
Timecop.freeze(@now)
yield
end
ensure
Timecop.return
FileUtils.rm_rf(@base_dir)
end
def start_plugin(plugin)
plugin.start
plugin.after_start
end
def stop_plugin(plugin)
plugin.stop unless plugin.stopped?
plugin.before_shutdown unless plugin.before_shutdown?
plugin.shutdown unless plugin.shutdown?
plugin.after_shutdown unless plugin.after_shutdown?
plugin.close unless plugin.closed?
plugin.terminate unless plugin.terminated?
end
def configure_output(id, chunk_key, buffer_conf)
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_in_http.rb | test/plugin/test_in_http.rb | require_relative '../helper'
require 'fluent/test/driver/input'
require 'fluent/plugin/in_http'
require 'net/http'
require 'timecop'
class HttpInputTest < Test::Unit::TestCase
class << self
def startup
@server = ServerEngine::SocketManager::Server.open
ENV['SERVERENGINE_SOCKETMANAGER_PATH'] = @server.path.to_s
end
def shutdown
@server.close
end
end
def setup
Fluent::Test.setup
@port = unused_port(protocol: :tcp)
end
def teardown
Timecop.return
@port = nil
end
def config
%[
port #{@port}
bind "127.0.0.1"
body_size_limit 10m
keepalive_timeout 5
respond_with_empty_img true
use_204_response false
]
end
def create_driver(conf=config)
Fluent::Test::Driver::Input.new(Fluent::Plugin::HttpInput).configure(conf)
end
def test_configure
d = create_driver
assert_equal @port, d.instance.port
assert_equal '127.0.0.1', d.instance.bind
assert_equal 10*1024*1024, d.instance.body_size_limit
assert_equal 5, d.instance.keepalive_timeout
assert_equal false, d.instance.add_http_headers
assert_equal false, d.instance.add_query_params
end
def test_time
d = create_driver
time = event_time("2011-01-02 13:14:15.123 UTC")
Timecop.freeze(Time.at(time))
events = [
["tag1", time, {"a" => 1}],
["tag2", time, {"a" => 2}],
]
res_codes = []
d.run(expect_records: 2) do
events.each do |tag, _time, record|
res = post("/#{tag}", {"json"=>record.to_json})
res_codes << res.code
end
end
assert_equal ["200", "200"], res_codes
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_time_as_float
d = create_driver
time = event_time("2011-01-02 13:14:15.123 UTC")
float_time = time.to_f
events = [
["tag1", time, {"a"=>1}],
]
res_codes = []
d.run(expect_records: 1) do
events.each do |tag, t, record|
res = post("/#{tag}", {"json"=>record.to_json, "time"=>float_time.to_s})
res_codes << res.code
end
end
assert_equal ["200"], res_codes
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
end
def test_json
d = create_driver
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
events = [
["tag1", time_i, {"a"=>1}],
["tag2", time_i, {"a"=>2}],
]
res_codes = []
d.run(expect_records: 2) do
events.each do |tag, t, record|
res = post("/#{tag}", {"json"=>record.to_json, "time"=>t.to_s})
res_codes << res.code
end
end
assert_equal ["200", "200"], res_codes
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
data('json' => ['json', :to_json],
'msgpack' => ['msgpack', :to_msgpack])
def test_default_with_time_format(data)
param, method_name = data
d = create_driver(config + %[
<parse>
keep_time_key
time_format %iso8601
</parse>
])
time = event_time("2020-06-10T01:14:27+00:00")
events = [
["tag1", time, {"a" => 1, "time" => '2020-06-10T01:14:27+00:00'}],
["tag2", time, {"a" => 2, "time" => '2020-06-10T01:14:27+00:00'}],
]
res_codes = []
d.run(expect_records: 2) do
events.each do |tag, t, record|
res = post("/#{tag}", {param => record.__send__(method_name)})
res_codes << res.code
end
end
assert_equal ["200", "200"], res_codes
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_multi_json
d = create_driver
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
records = [{"a"=>1},{"a"=>2}]
events = [
["tag1", time_i, records[0]],
["tag1", time_i, records[1]],
]
tag = "tag1"
res_codes = []
d.run(expect_records: 2, timeout: 5) do
res = post("/#{tag}", {"json"=>records.to_json, "time"=>time_i.to_s})
res_codes << res.code
end
assert_equal ["200"], res_codes
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_multi_json_with_time_field
d = create_driver
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
time_f = time.to_f
records = [{"a" => 1, 'time' => time_i},{"a" => 2, 'time' => time_f}]
events = [
["tag1", time, {'a' => 1}],
["tag1", time, {'a' => 2}],
]
tag = "tag1"
res_codes = []
d.run(expect_records: 2, timeout: 5) do
res = post("/#{tag}", {"json" => records.to_json})
res_codes << res.code
end
assert_equal ["200"], res_codes
assert_equal events, d.events
assert_instance_of Fluent::EventTime, d.events[0][1]
assert_instance_of Fluent::EventTime, d.events[1][1]
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
data('json' => ['json', :to_json],
'msgpack' => ['msgpack', :to_msgpack])
def test_default_multi_with_time_format(data)
param, method_name = data
d = create_driver(config + %[
<parse>
keep_time_key
time_format %iso8601
</parse>
])
time = event_time("2020-06-10T01:14:27+00:00")
events = [
["tag1", time, {'a' => 1, 'time' => "2020-06-10T01:14:27+00:00"}],
["tag1", time, {'a' => 2, 'time' => "2020-06-10T01:14:27+00:00"}],
]
tag = "tag1"
res_codes = []
d.run(expect_records: 2, timeout: 5) do
res = post("/#{tag}", {param => events.map { |e| e[2] }.__send__(method_name)})
res_codes << res.code
end
assert_equal ["200"], res_codes
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_multi_json_with_nonexistent_time_key
d = create_driver(config + %[
<parse>
time_key missing
</parse>
])
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
time_f = time.to_f
records = [{"a" => 1, 'time' => time_i},{"a" => 2, 'time' => time_f}]
tag = "tag1"
res_codes = []
d.run(expect_records: 2, timeout: 5) do
res = post("/#{tag}", {"json" => records.to_json})
res_codes << res.code
end
assert_equal ["200"], res_codes
assert_equal 2, d.events.size
assert_not_equal time_i, d.events[0][1].sec # current time is used because "missing" field doesn't exist
assert_not_equal time_i, d.events[1][1].sec
end
def test_json_with_add_remote_addr
d = create_driver(config + "add_remote_addr true")
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
events = [
["tag1", time, {"REMOTE_ADDR"=>"127.0.0.1", "a"=>1}],
["tag2", time, {"REMOTE_ADDR"=>"127.0.0.1", "a"=>2}],
]
res_codes = []
d.run(expect_records: 2) do
events.each do |tag, _t, record|
res = post("/#{tag}", {"json"=>record.to_json, "time"=>time_i.to_s})
res_codes << res.code
end
end
assert_equal ["200", "200"], res_codes
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_exact_match_for_expect
d = create_driver(config)
records = [{ "a" => 1}, { "a" => 2 }]
tag = "tag1"
res_codes = []
d.run(expect_records: 0, timeout: 5) do
res = post("/#{tag}", { "json" => records.to_json }, { 'Expect' => 'something' })
res_codes << res.code
end
assert_equal ["417"], res_codes
end
def test_exact_match_for_expect_with_other_header
d = create_driver(config)
records = [{ "a" => 1}, { "a" => 2 }]
tag = "tag1"
res_codes = []
d.run(expect_records: 2, timeout: 5) do
res = post("/#{tag}", { "json" => records.to_json, 'x-envoy-expected-rq-timeout-ms' => 4 })
res_codes << res.code
end
assert_equal ["200"], res_codes
assert_equal "tag1", d.events[0][0]
assert_equal 1, d.events[0][2]["a"]
assert_equal "tag1", d.events[1][0]
assert_equal 2, d.events[1][2]["a"]
end
def test_multi_json_with_add_remote_addr
d = create_driver(config + "add_remote_addr true")
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
records = [{"a"=>1},{"a"=>2}]
tag = "tag1"
res_codes = []
d.run(expect_records: 2, timeout: 5) do
res = post("/#{tag}", {"json"=>records.to_json, "time"=>time_i.to_s})
res_codes << res.code
end
assert_equal ["200"], res_codes
assert_equal "tag1", d.events[0][0]
assert_equal_event_time time, d.events[0][1]
assert_equal 1, d.events[0][2]["a"]
assert{ d.events[0][2].has_key?("REMOTE_ADDR") && d.events[0][2]["REMOTE_ADDR"] =~ /^\d{1,4}(\.\d{1,4}){3}$/ }
assert_equal "tag1", d.events[1][0]
assert_equal_event_time time, d.events[1][1]
assert_equal 2, d.events[1][2]["a"]
end
def test_json_with_add_remote_addr_given_x_forwarded_for
d = create_driver(config + "add_remote_addr true")
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
events = [
["tag1", time, {"a"=>1}],
["tag2", time, {"a"=>2}],
]
res_codes = []
d.run(expect_records: 2) do
events.each do |tag, _t, record|
res = post("/#{tag}", {"json"=>record.to_json, "time"=>time_i.to_s}, {"X-Forwarded-For"=>"129.78.138.66, 127.0.0.1"})
res_codes << res.code
end
end
assert_equal ["200", "200"], res_codes
assert_equal "tag1", d.events[0][0]
assert_equal_event_time time, d.events[0][1]
assert_equal({"REMOTE_ADDR"=>"129.78.138.66", "a"=>1}, d.events[0][2])
assert_equal "tag2", d.events[1][0]
assert_equal_event_time time, d.events[1][1]
assert_equal({"REMOTE_ADDR"=>"129.78.138.66", "a"=>2}, d.events[1][2])
end
def test_multi_json_with_add_remote_addr_given_x_forwarded_for
d = create_driver(config + "add_remote_addr true")
tag = "tag1"
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
records = [{"a"=>1},{"a"=>2}]
events = [
[tag, time, {"REMOTE_ADDR"=>"129.78.138.66", "a"=>1}],
[tag, time, {"REMOTE_ADDR"=>"129.78.138.66", "a"=>2}],
]
res_codes = []
d.run(expect_records: 2, timeout: 5) do
res = post("/#{tag}", {"json"=>records.to_json, "time"=>time_i.to_s}, {"X-Forwarded-For"=>"129.78.138.66, 127.0.0.1"})
res_codes << res.code
end
assert_equal ["200"], res_codes
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_add_remote_addr_given_multi_x_forwarded_for
d = create_driver(config + "add_remote_addr true")
tag = "tag1"
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
record = {"a" => 1}
event = ["tag1", time, {"REMOTE_ADDR" => "129.78.138.66", "a" => 1}]
res_code = nil
d.run(expect_records: 1, timeout: 5) do
res = post("/#{tag}", {"json" => record.to_json, "time" => time_i.to_s}) { |http, req|
# net/http can't send multiple headers so overwrite it.
def req.each_capitalized
block_given? or return enum_for(__method__) { @header.size }
@header.each do |k, vs|
vs.each { |v|
yield capitalize(k), v
}
end
end
req.add_field("X-Forwarded-For", "129.78.138.66, 127.0.0.1")
req.add_field("X-Forwarded-For", "8.8.8.8")
}
res_code = res.code
end
assert_equal "200", res_code
assert_equal event, d.events.first
assert_equal_event_time time, d.events.first[1]
end
def test_multi_json_with_add_http_headers
d = create_driver(config + "add_http_headers true")
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
records = [{"a"=>1},{"a"=>2}]
tag = "tag1"
res_codes = []
d.run(expect_records: 2, timeout: 5) do
res = post("/#{tag}", {"json"=>records.to_json, "time"=>time_i.to_s})
res_codes << res.code
end
assert_equal ["200"], res_codes
assert_equal "tag1", d.events[0][0]
assert_equal_event_time time, d.events[0][1]
assert_equal 1, d.events[0][2]["a"]
assert_equal "tag1", d.events[1][0]
assert_equal_event_time time, d.events[1][1]
assert_equal 2, d.events[1][2]["a"]
assert include_http_header?(d.events[0][2])
assert include_http_header?(d.events[1][2])
end
def test_json_with_add_http_headers
d = create_driver(config + "add_http_headers true")
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
events = [
["tag1", time, {"a"=>1}],
["tag2", time, {"a"=>2}],
]
res_codes = []
d.run(expect_records: 2) do
events.each do |tag, t, record|
res = post("/#{tag}", {"json"=>record.to_json, "time"=>time_i.to_s})
res_codes << res.code
end
end
assert_equal ["200", "200"], res_codes
assert_equal "tag1", d.events[0][0]
assert_equal_event_time time, d.events[0][1]
assert_equal 1, d.events[0][2]["a"]
assert_equal "tag2", d.events[1][0]
assert_equal_event_time time, d.events[1][1]
assert_equal 2, d.events[1][2]["a"]
assert include_http_header?(d.events[0][2])
assert include_http_header?(d.events[1][2])
end
def test_multi_json_with_custom_parser
d = create_driver(config + %[
<parse>
@type json
keep_time_key true
time_key foo
time_format %iso8601
</parse>
])
time = event_time("2011-01-02 13:14:15 UTC")
time_s = Time.at(time).iso8601
records = [{"foo"=>time_s,"bar"=>"test1"},{"foo"=>time_s,"bar"=>"test2"}]
tag = "tag1"
res_codes = []
d.run(expect_records: 2, timeout: 5) do
res = post("/#{tag}", records.to_json, {"Content-Type"=>"application/octet-stream"})
res_codes << res.code
end
assert_equal ["200"], res_codes
assert_equal "tag1", d.events[0][0]
assert_equal_event_time time, d.events[0][1]
assert_equal d.events[0][2], records[0]
assert_equal "tag1", d.events[1][0]
assert_equal_event_time time, d.events[1][1]
assert_equal d.events[1][2], records[1]
end
def test_application_json
d = create_driver
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
events = [
["tag1", time, {"a"=>1}],
["tag2", time, {"a"=>2}],
]
res_codes = []
d.run(expect_records: 2) do
events.each do |tag, t, record|
res = post("/#{tag}?time=#{time_i.to_s}", record.to_json, {"Content-Type"=>"application/json; charset=utf-8"})
res_codes << res.code
end
end
assert_equal ["200", "200"], res_codes
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_csp_report
d = create_driver
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
events = [
["tag1", time, {"a"=>1}],
["tag2", time, {"a"=>2}],
]
res_codes = []
d.run(expect_records: 2) do
events.each do |tag, t, record|
res = post("/#{tag}?time=#{time_i.to_s}", record.to_json, {"Content-Type"=>"application/csp-report; charset=utf-8"})
res_codes << res.code
end
end
assert_equal ["200", "200"], res_codes
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_application_msgpack
d = create_driver
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
events = [
["tag1", time, {"a"=>1}],
["tag2", time, {"a"=>2}],
]
res_codes = []
d.run(expect_records: 2) do
events.each do |tag, t, record|
res = post("/#{tag}?time=#{time_i.to_s}", record.to_msgpack, {"Content-Type"=>"application/msgpack"})
res_codes << res.code
end
end
assert_equal ["200", "200"], res_codes
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_application_ndjson
d = create_driver
events = [
["tag1", 1643935663, "{\"a\":1}\n{\"b\":2}"],
["tag2", 1643935664, "{\"a\":3}\r\n{\"b\":4}"]
]
expected = [
["tag1", 1643935663, {"a"=>1}],
["tag1", 1643935663, {"b"=>2}],
["tag2", 1643935664, {"a"=>3}],
["tag2", 1643935664, {"b"=>4}]
]
d.run(expect_records: 1) do
events.each do |tag, time, record|
res = post("/#{tag}?time=#{time}", record, {"Content-Type"=>"application/x-ndjson"})
assert_equal("200", res.code)
end
end
assert_equal(expected, d.events)
end
def test_multipart_formdata
tag = "tag"
time = event_time_without_nsec
event = [tag, time, {"key" => 0}]
body = "--TESTBOUNDARY\r\n" +
"Content-Disposition: form-data; name=\"json\"\r\n" +
"\r\n" +
%[{"key":0}\r\n] +
"--TESTBOUNDARY\r\n"
d = create_driver
res = nil
d.run(expect_records: 1) do
res = post("/#{tag}?time=#{time.to_s}", body, {"Content-Type"=>"multipart/form-data; boundary=TESTBOUNDARY"})
end
assert_equal(
["200", [event]],
[res.code, d.events],
)
end
def test_msgpack
d = create_driver
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
events = [
["tag1", time, {"a"=>1}],
["tag2", time, {"a"=>2}],
]
res_codes = []
d.run(expect_records: 2) do
events.each do |tag, t, record|
res = post("/#{tag}", {"msgpack"=>record.to_msgpack, "time"=>time_i.to_s})
res_codes << res.code
end
end
assert_equal ["200", "200"], res_codes
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_multi_msgpack
d = create_driver
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
records = [{"a"=>1},{"a"=>2}]
events = [
["tag1", time, records[0]],
["tag1", time, records[1]],
]
tag = "tag1"
res_codes = []
d.run(expect_records: 2) do
res = post("/#{tag}", {"msgpack"=>records.to_msgpack, "time"=>time_i.to_s})
res_codes << res.code
end
assert_equal ["200"], res_codes
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_with_regexp
d = create_driver(config + %[
format /^(?<field_1>\\d+):(?<field_2>\\w+)$/
types field_1:integer
])
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
events = [
["tag1", time, {"field_1" => 1, "field_2" => 'str'}],
["tag2", time, {"field_1" => 2, "field_2" => 'str'}],
]
res_codes = []
d.run(expect_records: 2) do
events.each do |tag, t, record|
body = record.map { |k, v|
v.to_s
}.join(':')
res = post("/#{tag}?time=#{time_i.to_s}", body, {'Content-Type' => 'application/octet-stream'})
res_codes << res.code
end
end
assert_equal ["200", "200"], res_codes
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_with_csv
require 'csv'
d = create_driver(config + %[
format csv
keys foo,bar
])
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
events = [
["tag1", time, {"foo" => "1", "bar" => 'st"r'}],
["tag2", time, {"foo" => "2", "bar" => 'str'}],
]
res_codes = []
d.run(expect_records: 2) do
events.each do |tag, t, record|
body = record.map { |k, v| v }.to_csv
res = post("/#{tag}?time=#{time_i.to_s}", body, {'Content-Type' => 'text/comma-separated-values'})
res_codes << res.code
end
end
assert_equal ["200", "200"], res_codes
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_response_with_empty_img
d = create_driver(config)
assert_equal true, d.instance.respond_with_empty_img
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
events = [
["tag1", time, {"a"=>1}],
["tag2", time, {"a"=>2}],
]
res_codes = []
res_bodies = []
d.run do
events.each do |tag, _t, record|
res = post("/#{tag}", {"json"=>record.to_json, "time"=>time_i.to_s})
res_codes << res.code
# Ruby returns ASCII-8 encoded string for GIF.
res_bodies << res.body.force_encoding("UTF-8")
end
end
assert_equal ["200", "200"], res_codes
assert_equal [Fluent::Plugin::HttpInput::EMPTY_GIF_IMAGE, Fluent::Plugin::HttpInput::EMPTY_GIF_IMAGE], res_bodies
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_response_without_empty_img
d = create_driver(config + "respond_with_empty_img false")
assert_equal false, d.instance.respond_with_empty_img
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
events = [
["tag1", time, {"a"=>1}],
["tag2", time, {"a"=>2}],
]
res_codes = []
res_bodies = []
d.run do
events.each do |tag, _t, record|
res = post("/#{tag}", {"json"=>record.to_json, "time"=>time_i.to_s})
res_codes << res.code
end
end
assert_equal ["200", "200"], res_codes
assert_equal [], res_bodies
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_response_use_204_response
d = create_driver(config + %[
respond_with_empty_img false
use_204_response true
])
assert_equal true, d.instance.use_204_response
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
events = [
["tag1", time, {"a"=>1}],
["tag2", time, {"a"=>2}],
]
res_codes = []
res_bodies = []
d.run do
events.each do |tag, _t, record|
res = post("/#{tag}", {"json"=>record.to_json, "time"=>time_i.to_s})
res_codes << res.code
end
end
assert_equal ["204", "204"], res_codes
assert_equal [], res_bodies
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_cors_allowed
d = create_driver(config + "cors_allow_origins [\"http://foo.com\"]")
assert_equal ["http://foo.com"], d.instance.cors_allow_origins
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
events = [
["tag1", time, {"a"=>1}],
["tag2", time, {"a"=>2}],
]
res_codes = []
res_headers = []
d.run do
events.each do |tag, _t, record|
res = post("/#{tag}", {"json"=>record.to_json, "time"=>time_i.to_s}, {"Origin"=>"http://foo.com"})
res_codes << res.code
res_headers << res["Access-Control-Allow-Origin"]
end
end
assert_equal ["200", "200"], res_codes
assert_equal ["http://foo.com", "http://foo.com"], res_headers
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_cors_allowed_wildcard
d = create_driver(config + 'cors_allow_origins ["*"]')
time = event_time("2011-01-02 13:14:15 UTC")
events = [
["tag1", time, {"a"=>1}],
]
d.run do
events.each do |tag, time, record|
headers = {"Origin" => "http://foo.com"}
res = post("/#{tag}", {"json" => record.to_json, "time" => time.to_i}, headers)
assert_equal "200", res.code
assert_equal "*", res["Access-Control-Allow-Origin"]
end
end
end
def test_get_request
d = create_driver(config)
d.run do
res = get("/cors.test", {}, {})
assert_equal "200", res.code
end
end
def test_cors_preflight
d = create_driver(config + 'cors_allow_origins ["*"]')
d.run do
header = {
"Origin" => "http://foo.com",
"Access-Control-Request-Method" => "POST",
"Access-Control-Request-Headers" => "Content-Type",
}
res = options("/cors.test", {}, header)
assert_equal "200", res.code
assert_equal "*", res["Access-Control-Allow-Origin"]
assert_equal "POST", res["Access-Control-Allow-Methods"]
end
end
def test_cors_allowed_wildcard_for_subdomain
d = create_driver(config + 'cors_allow_origins ["http://*.foo.com"]')
time = event_time("2011-01-02 13:14:15 UTC")
events = [
["tag1", time, {"a"=>1}],
]
d.run do
events.each do |tag, time, record|
headers = {"Origin" => "http://subdomain.foo.com"}
res = post("/#{tag}", {"json" => record.to_json, "time" => time.to_i}, headers)
assert_equal "200", res.code
assert_equal "http://subdomain.foo.com", res["Access-Control-Allow-Origin"]
end
end
end
def test_cors_allowed_exclude_empty_string
d = create_driver(config + 'cors_allow_origins ["", "http://*.foo.com"]')
time = event_time("2011-01-02 13:14:15 UTC")
events = [
["tag1", time, {"a"=>1}],
]
d.run do
events.each do |tag, time, record|
headers = {"Origin" => "http://subdomain.foo.com"}
res = post("/#{tag}", {"json" => record.to_json, "time" => time.to_i}, headers)
assert_equal "200", res.code
assert_equal "http://subdomain.foo.com", res["Access-Control-Allow-Origin"]
end
end
end
def test_cors_allowed_wildcard_preflight_for_subdomain
d = create_driver(config + 'cors_allow_origins ["http://*.foo.com"]')
d.run do
header = {
"Origin" => "http://subdomain.foo.com",
"Access-Control-Request-Method" => "POST",
"Access-Control-Request-Headers" => "Content-Type",
}
res = options("/cors.test", {}, header)
assert_equal "200", res.code
assert_equal "http://subdomain.foo.com", res["Access-Control-Allow-Origin"]
assert_equal "POST", res["Access-Control-Allow-Methods"]
end
end
def test_cors_allow_credentials
d = create_driver(config + %[
cors_allow_origins ["http://foo.com"]
cors_allow_credentials
])
assert_equal true, d.instance.cors_allow_credentials
time = event_time("2011-01-02 13:14:15 UTC")
event = ["tag1", time, {"a"=>1}]
res_code = nil
res_header = nil
d.run do
res = post("/#{event[0]}", {"json"=>event[2].to_json, "time"=>time.to_i.to_s}, {"Origin"=>"http://foo.com"})
res_code = res.code
res_header = res["Access-Control-Allow-Credentials"]
end
assert_equal(
{
response_code: "200",
allow_credentials_header: "true",
events: [event]
},
{
response_code: res_code,
allow_credentials_header: res_header,
events: d.events
}
)
end
def test_cors_allow_credentials_for_wildcard_origins
assert_raise(Fluent::ConfigError) do
create_driver(config + %[
cors_allow_origins ["*"]
cors_allow_credentials
])
end
end
def test_cors_with_nil_origin
d = create_driver(config + %[
cors_allow_origins ["http://foo.com"]
])
assert_equal ["http://foo.com"], d.instance.cors_allow_origins
time = event_time("2011-01-02 13:14:15 UTC")
event = ["tag1", time, {"a"=>1}]
res_code = nil
d.run do
res = post("/#{event[0]}", {"json"=>event[2].to_json, "time"=>time.to_i.to_s})
res_code = res.code
end
assert_equal "200", res_code
assert_equal [event], d.events
assert_equal_event_time time, d.events[0][1]
end
def test_content_encoding_gzip
d = create_driver
time = event_time("2011-01-02 13:14:15 UTC")
events = [
["tag1", time, {"a"=>1}],
["tag2", time, {"a"=>2}],
]
res_codes = []
d.run do
events.each do |tag, time, record|
header = {'Content-Type'=>'application/json', 'Content-Encoding'=>'gzip'}
res = post("/#{tag}?time=#{time}", compress_gzip(record.to_json), header)
res_codes << res.code
end
end
assert_equal ["200", "200"], res_codes
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_content_encoding_deflate
d = create_driver
time = event_time("2011-01-02 13:14:15 UTC")
events = [
["tag1", time, {"a"=>1}],
["tag2", time, {"a"=>2}],
]
res_codes = []
d.run do
events.each do |tag, time, record|
header = {'Content-Type'=>'application/msgpack', 'Content-Encoding'=>'deflate'}
res = post("/#{tag}?time=#{time}", Zlib.deflate(record.to_msgpack), header)
res_codes << res.code
end
end
assert_equal ["200", "200"], res_codes
assert_equal events, d.events
assert_equal_event_time time, d.events[0][1]
assert_equal_event_time time, d.events[1][1]
end
def test_cors_disallowed
d = create_driver(config + "cors_allow_origins [\"http://foo.com\"]")
assert_equal ["http://foo.com"], d.instance.cors_allow_origins
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
res_codes = []
d.end_if{ res_codes.size == 2 }
d.run do
res = post("/tag1", {"json"=>{"a"=>1}.to_json, "time"=>time_i.to_s}, {"Origin"=>"http://bar.com"})
res_codes << res.code
res = post("/tag2", {"json"=>{"a"=>1}.to_json, "time"=>time_i.to_s}, {"Origin"=>"http://bar.com"})
res_codes << res.code
end
assert_equal ["403", "403"], res_codes
end
def test_add_query_params
d = create_driver(config + "add_query_params true")
assert_equal true, d.instance.add_query_params
time = event_time("2011-01-02 13:14:15 UTC")
time_i = time.to_i
events = [
["tag1", time, {"a"=>1, "QUERY_A"=>"b"}],
["tag2", time, {"a"=>2, "QUERY_A"=>"b"}],
]
res_codes = []
res_bodies = []
d.run do
events.each do |tag, _t, record|
res = post("/#{tag}?a=b", {"json"=>record.to_json, "time"=>time_i.to_s})
res_codes << res.code
end
end
assert_equal ["200", "200"], res_codes
assert_equal [], res_bodies
assert_equal events, d.events
end
def test_add_tag_prefix
d = create_driver(config + "add_tag_prefix test")
assert_equal "test", d.instance.add_tag_prefix
time = event_time("2011-01-02 13:14:15.123 UTC")
float_time = time.to_f
events = [
["tag1", time, {"a"=>1}],
]
res_codes = []
d.run(expect_records: 1) do
events.each do |tag, t, record|
res = post("/#{tag}", {"json"=>record.to_json, "time"=>float_time.to_s})
res_codes << res.code
end
end
assert_equal ["200"], res_codes
assert_equal [["test.tag1", time, {"a"=>1}]], d.events
end
$test_in_http_connection_object_ids = []
$test_in_http_content_types = []
$test_in_http_content_types_flag = false
module ContentTypeHook
def initialize(*args)
@io_handler = nil
super
end
def on_headers_complete(headers)
super
if $test_in_http_content_types_flag
$test_in_http_content_types << self.content_type
end
end
def on_message_begin
super
if $test_in_http_content_types_flag
$test_in_http_connection_object_ids << @io_handler.object_id
end
end
end
class Fluent::Plugin::HttpInput::Handler
prepend ContentTypeHook
end
def test_if_content_type_is_initialized_properly
# This test is to check if Fluent::HttpInput::Handler's @content_type is initialized properly.
# Especially when in Keep-Alive and the second request has no 'Content-Type'.
begin
d = create_driver
$test_in_http_content_types_flag = true
d.run do
# Send two requests the second one has no Content-Type in Keep-Alive
Net::HTTP.start("127.0.0.1", @port) do |http|
req = Net::HTTP::Post.new("/foodb/bartbl", {"connection" => "keepalive", "Content-Type" => "application/json"})
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_buf_memory.rb | test/plugin/test_buf_memory.rb | require_relative '../helper'
require 'fluent/plugin/buf_memory'
require 'fluent/plugin/output'
require 'flexmock/test_unit'
module FluentPluginMemoryBufferTest
class DummyOutputPlugin < Fluent::Plugin::Output
end
end
class MemoryBufferTest < Test::Unit::TestCase
setup do
Fluent::Test.setup
@d = FluentPluginMemoryBufferTest::DummyOutputPlugin.new
@p = Fluent::Plugin::MemoryBuffer.new
@p.owner = @d
end
test 'this is non persistent plugin' do
assert !@p.persistent?
end
test '#resume always returns empty stage and queue' do
ary = @p.resume
assert_equal({}, ary[0])
assert_equal([], ary[1])
end
test '#generate_chunk returns memory chunk instance' do
m1 = Fluent::Plugin::Buffer::Metadata.new(nil, nil, nil)
c1 = @p.generate_chunk(m1)
assert c1.is_a? Fluent::Plugin::Buffer::MemoryChunk
assert_equal m1, c1.metadata
require 'time'
t2 = Time.parse('2016-04-08 19:55:00 +0900').to_i
m2 = Fluent::Plugin::Buffer::Metadata.new(t2, 'test.tag', {k1: 'v1', k2: 0})
c2 = @p.generate_chunk(m2)
assert c2.is_a? Fluent::Plugin::Buffer::MemoryChunk
assert_equal m2, c2.metadata
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_parser_nginx.rb | test/plugin/test_parser_nginx.rb | require_relative '../helper'
require 'fluent/test/driver/parser'
require 'fluent/plugin/parser_nginx'
class NginxParserTest < ::Test::Unit::TestCase
def setup
Fluent::Test.setup
@expected = {
'remote' => '127.0.0.1',
'host' => '192.168.0.1',
'user' => '-',
'method' => 'GET',
'path' => '/',
'code' => '200',
'size' => '777',
'referer' => '-',
'agent' => 'Opera/12.0'
}
@expected_extended = {
'remote' => '127.0.0.1',
'host' => '192.168.0.1',
'user' => '-',
'method' => 'GET',
'path' => '/',
'code' => '200',
'size' => '777',
'referer' => '-',
'agent' => 'Opera/12.0',
'http_x_forwarded_for' => '-'
}
@expected_extended_multiple_ip = {
'remote' => '127.0.0.1',
'host' => '192.168.0.1',
'user' => '-',
'method' => 'GET',
'path' => '/',
'code' => '200',
'size' => '777',
'referer' => '-',
'agent' => 'Opera/12.0',
'http_x_forwarded_for' => '127.0.0.1, 192.168.0.1'
}
end
def create_driver
Fluent::Test::Driver::Parser.new(Fluent::Plugin::NginxParser.new).configure({})
end
def test_parse
d = create_driver
d.instance.parse('127.0.0.1 192.168.0.1 - [28/Feb/2013:12:00:00 +0900] "GET / HTTP/1.1" 200 777 "-" "Opera/12.0"') { |time, record|
assert_equal(event_time('28/Feb/2013:12:00:00 +0900', format: '%d/%b/%Y:%H:%M:%S %z'), time)
assert_equal(@expected, record)
}
end
def test_parse_with_empty_included_path
d = create_driver
d.instance.parse('127.0.0.1 192.168.0.1 - [28/Feb/2013:12:00:00 +0900] "GET /a[ ]b HTTP/1.1" 200 777 "-" "Opera/12.0"') { |time, record|
assert_equal(event_time('28/Feb/2013:12:00:00 +0900', format: '%d/%b/%Y:%H:%M:%S %z'), time)
assert_equal(@expected.merge('path' => '/a[ ]b'), record)
}
end
def test_parse_without_http_version
d = create_driver
d.instance.parse('127.0.0.1 192.168.0.1 - [28/Feb/2013:12:00:00 +0900] "GET /" 200 777 "-" "Opera/12.0"') { |time, record|
assert_equal(event_time('28/Feb/2013:12:00:00 +0900', format: '%d/%b/%Y:%H:%M:%S %z'), time)
assert_equal(@expected, record)
}
end
def test_parse_with_http_x_forwarded_for
d = create_driver
d.instance.parse('127.0.0.1 192.168.0.1 - [28/Feb/2013:12:00:00 +0900] "GET / HTTP/1.1" 200 777 "-" "Opera/12.0" -') { |time, record|
assert_equal(event_time('28/Feb/2013:12:00:00 +0900', format: '%d/%b/%Y:%H:%M:%S %z'), time)
assert_equal(@expected_extended, record)
}
end
def test_parse_with_http_x_forwarded_for_multiple_ip
d = create_driver
d.instance.parse('127.0.0.1 192.168.0.1 - [28/Feb/2013:12:00:00 +0900] "GET / HTTP/1.1" 200 777 "-" "Opera/12.0" "127.0.0.1, 192.168.0.1"') { |time, record|
assert_equal(event_time('28/Feb/2013:12:00:00 +0900', format: '%d/%b/%Y:%H:%M:%S %z'), time)
assert_equal(@expected_extended_multiple_ip, record)
}
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_buffer_memory_chunk.rb | test/plugin/test_buffer_memory_chunk.rb | require_relative '../helper'
require 'fluent/plugin/buffer/memory_chunk'
require 'fluent/plugin/compressable'
require 'json'
class BufferMemoryChunkTest < Test::Unit::TestCase
include Fluent::Plugin::Compressable
setup do
@c = Fluent::Plugin::Buffer::MemoryChunk.new(Object.new)
end
test 'has blank chunk initially' do
assert @c.empty?
assert_equal '', @c.instance_eval{ @chunk }
assert_equal 0, @c.instance_eval{ @chunk_bytes }
assert_equal 0, @c.instance_eval{ @adding_bytes }
assert_equal 0, @c.instance_eval{ @adding_size }
end
test 'can #append, #commit and #read it' do
assert @c.empty?
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
content = @c.read
ds = content.split("\n").select{|d| !d.empty? }
assert_equal 2, ds.size
assert_equal d1, JSON.parse(ds[0])
assert_equal d2, JSON.parse(ds[1])
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
@c.commit
content = @c.read
ds = content.split("\n").select{|d| !d.empty? }
assert_equal 4, ds.size
assert_equal d1, JSON.parse(ds[0])
assert_equal d2, JSON.parse(ds[1])
assert_equal d3, JSON.parse(ds[2])
assert_equal d4, JSON.parse(ds[3])
end
test 'can #concat, #commit and #read it' do
assert @c.empty?
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"].join
@c.concat(data, 2)
@c.commit
content = @c.read
ds = content.split("\n").select{|d| !d.empty? }
assert_equal 2, ds.size
assert_equal d1, JSON.parse(ds[0])
assert_equal d2, JSON.parse(ds[1])
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.concat([d3.to_json + "\n", d4.to_json + "\n"].join, 2)
@c.commit
content = @c.read
ds = content.split("\n").select{|d| !d.empty? }
assert_equal 4, ds.size
assert_equal d1, JSON.parse(ds[0])
assert_equal d2, JSON.parse(ds[1])
assert_equal d3, JSON.parse(ds[2])
assert_equal d4, JSON.parse(ds[3])
end
test 'has its contents in binary (ascii-8bit)' do
data1 = "aaa bbb ccc".force_encoding('utf-8')
@c.append([data1])
@c.commit
assert_equal Encoding::ASCII_8BIT, @c.instance_eval{ @chunk.encoding }
content = @c.read
assert_equal Encoding::ASCII_8BIT, content.encoding
end
test 'has #bytesize and #size' do
assert @c.empty?
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
@c.commit
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
first_bytesize = @c.bytesize
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
assert_equal 4, @c.size
@c.commit
assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
assert_equal 4, @c.size
end
test 'can #rollback to revert non-committed data' do
assert @c.empty?
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
@c.rollback
assert @c.empty?
assert @c.empty?
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
first_bytesize = @c.bytesize
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
assert_equal 4, @c.size
@c.rollback
assert_equal first_bytesize, @c.bytesize
assert_equal 2, @c.size
end
test 'can #rollback to revert non-committed data from #concat' do
assert @c.empty?
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"].join
@c.concat(data, 2)
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
@c.rollback
assert @c.empty?
assert @c.empty?
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
assert_equal 2, @c.size
first_bytesize = @c.bytesize
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.concat([d3.to_json + "\n", d4.to_json + "\n"].join, 2)
assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
assert_equal 4, @c.size
@c.rollback
assert_equal first_bytesize, @c.bytesize
assert_equal 2, @c.size
end
test 'does nothing for #close' do
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
@c.commit
content = @c.read
@c.close
assert_equal content, @c.read
end
test 'deletes all data by #purge' do
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
@c.commit
@c.purge
assert @c.empty?
assert_equal 0, @c.bytesize
assert_equal 0, @c.size
assert_equal '', @c.read
end
test 'can #open its contents as io' do
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
data = [d1.to_json + "\n", d2.to_json + "\n"]
@c.append(data)
@c.commit
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
@c.commit
lines = []
@c.open do |io|
assert io
io.readlines.each do |l|
lines << l
end
end
assert_equal d1.to_json + "\n", lines[0]
assert_equal d2.to_json + "\n", lines[1]
assert_equal d3.to_json + "\n", lines[2]
assert_equal d4.to_json + "\n", lines[3]
end
sub_test_case 'compressed buffer' do
setup do
@src = 'text data for compressing' * 5
@gzipped_src = compress(@src)
@zstded_src = compress(@src, type: :zstd)
end
test '#append with compress option writes compressed data to chunk when compress is gzip' do
c = Fluent::Plugin::Buffer::MemoryChunk.new(Object.new, compress: :gzip)
c.append([@src, @src], compress: :gzip)
c.commit
# check chunk is compressed
assert c.read(compressed: :gzip).size < [@src, @src].join("").size
assert_equal @src + @src, c.read
end
test '#open passes io object having decompressed data to a block when compress is gzip' do
c = Fluent::Plugin::Buffer::MemoryChunk.new(Object.new, compress: :gzip)
c.concat(@gzipped_src, @src.size)
c.commit
decomressed_data = c.open do |io|
v = io.read
assert_equal @src, v
v
end
assert_equal @src, decomressed_data
end
test '#open with compressed option passes io object having decompressed data to a block when compress is gzip' do
c = Fluent::Plugin::Buffer::MemoryChunk.new(Object.new, compress: :gzip)
c.concat(@gzipped_src, @src.size)
c.commit
comressed_data = c.open(compressed: :gzip) do |io|
v = io.read
assert_equal @gzipped_src, v
v
end
assert_equal @gzipped_src, comressed_data
end
test '#write_to writes decompressed data when compress is gzip' do
c = Fluent::Plugin::Buffer::MemoryChunk.new(Object.new, compress: :gzip)
c.concat(@gzipped_src, @src.size)
c.commit
assert_equal @src, c.read
assert_equal @gzipped_src, c.read(compressed: :gzip)
io = StringIO.new
c.write_to(io)
assert_equal @src, io.string
end
test '#write_to with compressed option writes compressed data when compress is gzip' do
c = Fluent::Plugin::Buffer::MemoryChunk.new(Object.new, compress: :gzip)
c.concat(@gzipped_src, @src.size)
c.commit
assert_equal @src, c.read
assert_equal @gzipped_src, c.read(compressed: :gzip)
io = StringIO.new
io.set_encoding(Encoding::ASCII_8BIT)
c.write_to(io, compressed: :gzip)
assert_equal @gzipped_src, io.string
end
test '#append with compress option writes compressed data to chunk when compress is zstd' do
c = Fluent::Plugin::Buffer::MemoryChunk.new(Object.new, compress: :zstd)
c.append([@src, @src], compress: :zstd)
c.commit
# check chunk is compressed
assert c.read(compressed: :zstd).size < [@src, @src].join("").size
assert_equal @src + @src, c.read
end
test '#open passes io object having decompressed data to a block when compress is zstd' do
c = Fluent::Plugin::Buffer::MemoryChunk.new(Object.new, compress: :zstd)
c.concat(@zstded_src, @src.size)
c.commit
decomressed_data = c.open do |io|
v = io.read
assert_equal @src, v
v
end
assert_equal @src, decomressed_data
end
test '#open with compressed option passes io object having decompressed data to a block when compress is zstd' do
c = Fluent::Plugin::Buffer::MemoryChunk.new(Object.new, compress: :zstd)
c.concat(@zstded_src, @src.size)
c.commit
comressed_data = c.open(compressed: :zstd) do |io|
v = io.read
assert_equal @zstded_src, v
v
end
assert_equal @zstded_src, comressed_data
end
test '#write_to writes decompressed data when compress is zstd' do
c = Fluent::Plugin::Buffer::MemoryChunk.new(Object.new, compress: :zstd)
c.concat(@zstded_src, @src.size)
c.commit
assert_equal @src, c.read
assert_equal @zstded_src, c.read(compressed: :zstd)
io = StringIO.new
c.write_to(io)
assert_equal @src, io.string
end
test '#write_to with compressed option writes compressed data when compress is zstd' do
c = Fluent::Plugin::Buffer::MemoryChunk.new(Object.new, compress: :zstd)
c.concat(@zstded_src, @src.size)
c.commit
assert_equal @src, c.read
assert_equal @zstded_src, c.read(compressed: :zstd)
io = StringIO.new
io.set_encoding(Encoding::ASCII_8BIT)
c.write_to(io, compressed: :zstd)
assert_equal @zstded_src, io.string
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_filter_parser.rb | test/plugin/test_filter_parser.rb | require_relative '../helper'
require 'timecop'
require 'fluent/test/driver/filter'
require 'fluent/plugin/filter_parser'
class ParserFilterTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
@tag = 'test'
@default_time = Time.parse('2010-05-04 03:02:01 UTC')
Timecop.freeze(@default_time)
end
def teardown
super
Timecop.return
end
def assert_equal_parsed_time(expected, actual)
if expected.is_a?(Integer)
assert_equal(expected, actual.to_i)
else
assert_equal_event_time(expected, actual)
end
end
ParserError = Fluent::Plugin::Parser::ParserError
CONFIG = %[
key_name message
reserve_data true
<parse>
@type regexp
expression /^(?<x>.)(?<y>.) (?<time>.+)$/
time_format %Y%m%d%H%M%S
</parse>
]
def create_driver(conf=CONFIG)
Fluent::Test::Driver::Filter.new(Fluent::Plugin::ParserFilter).configure(conf)
end
def test_configure
assert_raise(Fluent::ConfigError) {
create_driver('')
}
assert_raise(Fluent::NotFoundPluginError) {
create_driver %[
key_name foo
<parse>
@type unknown_format_that_will_never_be_implemented
</parse>
]
}
assert_nothing_raised {
create_driver %[
key_name foo
<parse>
@type regexp
expression /(?<x>.)/
</parse>
]
}
assert_nothing_raised {
create_driver %[
key_name foo
<parse>
@type json
</parse>
]
}
assert_nothing_raised {
create_driver %[
key_name foo
format json
]
}
assert_nothing_raised {
create_driver %[
key_name foo
<parse>
@type ltsv
</parse>
]
}
assert_nothing_raised {
create_driver %[
key_name message
<parse>
@type regexp
expression /^col1=(?<col1>.+) col2=(?<col2>.+)$/
</parse>
]
}
d = create_driver %[
key_name foo
<parse>
@type regexp
expression /(?<x>.)/
</parse>
]
assert_false d.instance.reserve_data
end
# CONFIG = %[
# remove_prefix test
# add_prefix parsed
# key_name message
# format /^(?<x>.)(?<y>.) (?<time>.+)$/
# time_format %Y%m%d%H%M%S
# reserve_data true
# ]
def test_filter
d1 = create_driver(CONFIG)
time = event_time("2012-01-02 13:14:15")
d1.run(default_tag: @tag) do
d1.feed(time, {'message' => '12 20120402182059'})
d1.feed(time, {'message' => '34 20120402182100'})
d1.feed(time, {'message' => '56 20120402182100'})
d1.feed(time, {'message' => '78 20120402182101'})
d1.feed(time, {'message' => '90 20120402182100'})
end
filtered = d1.filtered
assert_equal 5, filtered.length
first = filtered[0]
assert_equal_event_time event_time("2012-04-02 18:20:59"), first[0]
assert_equal '1', first[1]['x']
assert_equal '2', first[1]['y']
assert_equal '12 20120402182059', first[1]['message']
second = filtered[1]
assert_equal_event_time event_time("2012-04-02 18:21:00"), second[0]
assert_equal '3', second[1]['x']
assert_equal '4', second[1]['y']
third = filtered[2]
assert_equal_event_time event_time("2012-04-02 18:21:00"), third[0]
assert_equal '5', third[1]['x']
assert_equal '6', third[1]['y']
fourth = filtered[3]
assert_equal_event_time event_time("2012-04-02 18:21:01"), fourth[0]
assert_equal '7', fourth[1]['x']
assert_equal '8', fourth[1]['y']
fifth = filtered[4]
assert_equal_event_time event_time("2012-04-02 18:21:00"), fifth[0]
assert_equal '9', fifth[1]['x']
assert_equal '0', fifth[1]['y']
d2 = create_driver(%[
key_name data
format /^(?<x>.)(?<y>.) (?<t>.+)$/
])
time = Fluent::EventTime.from_time(@default_time) # EventTime emit test
d2.run(default_tag: @tag) do
d2.feed(time, {'data' => '12 20120402182059'})
d2.feed(time, {'data' => '34 20120402182100'})
end
filtered = d2.filtered
assert_equal 2, filtered.length
first = filtered[0]
assert_equal_event_time time, first[0]
assert_nil first[1]['data']
assert_equal '1', first[1]['x']
assert_equal '2', first[1]['y']
assert_equal '20120402182059', first[1]['t']
second = filtered[1]
assert_equal_event_time time, second[0]
assert_nil second[1]['data']
assert_equal '3', second[1]['x']
assert_equal '4', second[1]['y']
assert_equal '20120402182100', second[1]['t']
d3 = create_driver(%[
key_name data
<parse>
@type regexp
expression /^(?<x>[0-9])(?<y>[0-9]) (?<t>.+)$/
</parse>
])
time = Time.parse("2012-04-02 18:20:59").to_i
d3.run(default_tag: @tag) do
d3.feed(time, {'data' => '12 20120402182059'})
d3.feed(time, {'data' => '34 20120402182100'})
d3.feed(time, {'data' => 'xy 20120402182101'})
end
filtered = d3.filtered
assert_equal 2, filtered.length
d4 = create_driver(%[
key_name data
<parse>
@type json
</parse>
])
time = Time.parse("2012-04-02 18:20:59").to_i
d4.run(default_tag: @tag) do
d4.feed(time, {'data' => '{"xxx":"first","yyy":"second"}', 'xxx' => 'x', 'yyy' => 'y'})
d4.feed(time, {'data' => 'foobar', 'xxx' => 'x', 'yyy' => 'y'})
end
filtered = d4.filtered
assert_equal 1, filtered.length
end
def test_filter_with_multiple_records
d1 = create_driver(%[
key_name data
<parse>
@type json
</parse>
])
time = Fluent::EventTime.from_time(@default_time)
d1.run(default_tag: @tag) do
d1.feed(time, {'data' => '[{"xxx_1":"first","yyy":"second"}, {"xxx_2":"first", "yyy_2":"second"}]'})
end
filtered = d1.filtered
assert_equal 2, filtered.length
assert_equal ({"xxx_1"=>"first", "yyy"=>"second"}), filtered[0][1]
assert_equal ({"xxx_2"=>"first", "yyy_2"=>"second"}), filtered[1][1]
end
data(:keep_key_name => false,
:remove_key_name => true)
def test_filter_with_reserved_data(remove_key_name)
d1 = create_driver(%[
key_name data
reserve_data yes
remove_key_name_field #{remove_key_name}
<parse>
@type regexp
expression /^(?<x>\\d)(?<y>\\d) (?<t>.+)$/
</parse>
])
time = event_time("2012-04-02 18:20:59")
d1.run(default_tag: @tag) do
d1.feed(time, {'data' => '12 20120402182059'})
d1.feed(time, {'data' => '34 20120402182100'})
d1.feed(time, {'data' => 'xy 20120402182101'})
end
filtered = d1.filtered
assert_equal 3, filtered.length
d2 = create_driver(%[
key_name data
reserve_data yes
remove_key_name_field #{remove_key_name}
<parse>
@type json
</parse>
])
time = Fluent::EventTime.from_time(@default_time)
d2.run(default_tag: @tag) do
d2.feed(time, {'data' => '{"xxx":"first","yyy":"second"}', 'xxx' => 'x', 'yyy' => 'y'})
d2.feed(time, {'data' => 'foobar', 'xxx' => 'x', 'yyy' => 'y'})
end
filtered = d2.filtered
assert_equal 2, filtered.length
first = filtered[0]
assert_equal_event_time time, first[0]
if remove_key_name
assert_not_include first[1], 'data'
else
assert_equal '{"xxx":"first","yyy":"second"}', first[1]['data']
end
assert_equal 'first', first[1]['xxx']
assert_equal 'second', first[1]['yyy']
second = filtered[1]
assert_equal_event_time time, second[0]
assert_equal 'foobar', second[1]['data']
assert_equal 'x', second[1]['xxx']
assert_equal 'y', second[1]['yyy']
end
CONFIG_LTSV = %[
key_name data
<parse>
@type ltsv
</parse>
]
CONFIG_LTSV_WITH_TYPES = %[
key_name data
<parse>
@type ltsv
types i:integer,s:string,f:float,b:bool
</parse>
]
data(:event_time => lambda { |time| Fluent::EventTime.from_time(time) },
:int_time => lambda { |time| time.to_i })
def test_filter_ltsv(time_parse)
d = create_driver(CONFIG_LTSV)
time = time_parse.call(@default_time)
d.run(default_tag: @tag) do
d.feed(time, {'data' => "xxx:first\tyyy:second", 'xxx' => 'x', 'yyy' => 'y'})
d.feed(time, {'data' => "xxx:first\tyyy:second2", 'xxx' => 'x', 'yyy' => 'y'})
end
filtered = d.filtered
assert_equal 2, filtered.length
first = filtered[0]
assert_equal_parsed_time time, first[0]
assert_nil first[1]['data']
assert_equal 'first', first[1]['xxx']
assert_equal 'second', first[1]['yyy']
second = filtered[1]
assert_equal_parsed_time time, second[0]
assert_nil first[1]['data']
assert_equal 'first', second[1]['xxx']
assert_equal 'second2', second[1]['yyy']
d = create_driver(CONFIG_LTSV + %[reserve_data yes])
time = @default_time.to_i
d.run(default_tag: @tag) do
d.feed(time, {'data' => "xxx:first\tyyy:second", 'xxx' => 'x', 'yyy' => 'y'})
d.feed(time, {'data' => "xxx:first\tyyy:second2", 'xxx' => 'x', 'yyy' => 'y'})
end
filtered = d.filtered
assert_equal 2, filtered.length
first = filtered[0]
assert_equal_parsed_time time, first[0]
assert_equal "xxx:first\tyyy:second", first[1]['data']
assert_equal 'first', first[1]['xxx']
assert_equal 'second', first[1]['yyy']
second = filtered[1]
assert_equal_parsed_time time, second[0]
assert_equal "xxx:first\tyyy:second", first[1]['data']
assert_equal 'first', second[1]['xxx']
assert_equal 'second2', second[1]['yyy']
# convert types
#d = create_driver(CONFIG_LTSV + %[
d = create_driver(CONFIG_LTSV_WITH_TYPES)
time = @default_time.to_i
d.run do
d.feed(@tag, time, {'data' => "i:1\ts:2\tf:3\tb:true\tx:123"})
end
filtered = d.filtered
assert_equal 1, filtered.length
first = filtered[0]
assert_equal_parsed_time time, first[0]
assert_equal 1, first[1]['i']
assert_equal '2', first[1]['s']
assert_equal 3.0, first[1]['f']
assert_equal true, first[1]['b']
assert_equal '123', first[1]['x']
end
CONFIG_TSV = %[
key_name data
<parse>
@type tsv
keys key1,key2,key3
</parse>
]
data(:event_time => lambda { |time| Fluent::EventTime.from_time(time) },
:int_time => lambda { |time| time.to_i })
def test_filter_tsv(time_parse)
d = create_driver(CONFIG_TSV)
time = time_parse.call(@default_time)
d.run do
d.feed(@tag, time, {'data' => "value1\tvalue2\tvalueThree", 'xxx' => 'x', 'yyy' => 'y'})
end
filtered = d.filtered
assert_equal 1, filtered.length
first = filtered[0]
assert_equal_parsed_time time, first[0]
assert_nil first[1]['data']
assert_equal 'value1', first[1]['key1']
assert_equal 'value2', first[1]['key2']
assert_equal 'valueThree', first[1]['key3']
end
CONFIG_CSV = %[
key_name data
<parse>
@type csv
keys key1,key2,key3
</parse>
]
CONFIG_CSV_COMPAT = %[
key_name data
format csv
keys key1,key2,key3
]
data(new_conf: CONFIG_CSV,
compat_conf: CONFIG_CSV_COMPAT)
def test_filter_csv(conf)
d = create_driver(conf)
time = @default_time.to_i
d.run do
d.feed(@tag, time, {'data' => 'value1,"value2","value""ThreeYes!"', 'xxx' => 'x', 'yyy' => 'y'})
end
filtered = d.filtered
assert_equal 1, filtered.length
first = filtered[0]
assert_equal time, first[0]
assert_nil first[1]['data']
assert_equal 'value1', first[1]['key1']
assert_equal 'value2', first[1]['key2']
assert_equal 'value"ThreeYes!', first[1]['key3']
end
def test_filter_with_nested_record
d = create_driver(%[
key_name $.data.log
<parse>
@type csv
keys key1,key2,key3
</parse>
])
time = @default_time.to_i
d.run do
d.feed(@tag, time, {'data' => {'log' => 'value1,"value2","value""ThreeYes!"'}, 'xxx' => 'x', 'yyy' => 'y'})
end
filtered = d.filtered
assert_equal 1, filtered.length
first = filtered[0]
assert_equal time, first[0]
assert_nil first[1]['data']
assert_equal 'value1', first[1]['key1']
assert_equal 'value2', first[1]['key2']
assert_equal 'value"ThreeYes!', first[1]['key3']
end
CONFIG_HASH_VALUE_FIELD = %[
key_name data
hash_value_field parsed
<parse>
@type json
</parse>
]
CONFIG_HASH_VALUE_FIELD_RESERVE_DATA = %[
key_name data
reserve_data yes
hash_value_field parsed
<parse>
@type json
</parse>
]
CONFIG_HASH_VALUE_FIELD_WITH_INJECT_KEY_PREFIX = %[
key_name data
hash_value_field parsed
inject_key_prefix data.
<parse>
@type json
</parse>
]
data(:event_time => lambda { |time| Fluent::EventTime.from_time(time) },
:int_time => lambda { |time| time.to_i })
def test_filter_inject_hash_value_field(time_parse)
original = {'data' => '{"xxx":"first","yyy":"second"}', 'xxx' => 'x', 'yyy' => 'y'}
d = create_driver(CONFIG_HASH_VALUE_FIELD)
time = time_parse.call(@default_time)
d.run do
d.feed(@tag, time, original)
end
filtered = d.filtered
assert_equal 1, filtered.length
first = filtered[0]
assert_equal_parsed_time time, first[0]
record = first[1]
assert_equal 1, record.keys.size
assert_equal({"xxx"=>"first","yyy"=>"second"}, record['parsed'])
d = create_driver(CONFIG_HASH_VALUE_FIELD_RESERVE_DATA)
time = @default_time.to_i
d.run do
d.feed(@tag, time, original)
end
filtered = d.filtered
assert_equal 1, filtered.length
first = filtered[0]
assert_equal_parsed_time time, first[0]
record = first[1]
assert_equal 4, record.keys.size
assert_equal original['data'], record['data']
assert_equal original['xxx'], record['xxx']
assert_equal original['yyy'], record['yyy']
assert_equal({"xxx"=>"first","yyy"=>"second"}, record['parsed'])
d = create_driver(CONFIG_HASH_VALUE_FIELD_WITH_INJECT_KEY_PREFIX)
time = @default_time.to_i
d.run do
d.feed(@tag, time, original)
end
filtered = d.filtered
assert_equal 1, filtered.length
first = filtered[0]
assert_equal_parsed_time time, first[0]
record = first[1]
assert_equal 1, record.keys.size
assert_equal({"data.xxx"=>"first","data.yyy"=>"second"}, record['parsed'])
end
CONFIG_DONT_PARSE_TIME = %[
key_name data
reserve_time true
<parse>
@type json
keep_time_key true
</parse>
]
CONFIG_DONT_PARSE_TIME_COMPAT = %[
key_name data
reserve_time true
format json
keep_time_key true
]
data(new_conf: CONFIG_DONT_PARSE_TIME,
compat_conf: CONFIG_DONT_PARSE_TIME_COMPAT)
def test_time_should_be_reserved(conf)
t = Time.now.to_i
d = create_driver(conf)
d.run(default_tag: @tag) do
d.feed(t, {'data' => '{"time":1383190430, "f1":"v1"}'})
d.feed(t, {'data' => '{"time":"1383190430", "f1":"v1"}'})
d.feed(t, {'data' => '{"time":"2013-10-31 12:34:03 +0900", "f1":"v1"}'})
end
filtered = d.filtered
assert_equal 3, filtered.length
assert_equal 'v1', filtered[0][1]['f1']
assert_equal 1383190430, filtered[0][1]['time']
assert_equal t, filtered[0][0]
assert_equal 'v1', filtered[1][1]['f1']
assert_equal "1383190430", filtered[1][1]['time']
assert_equal t, filtered[1][0]
assert_equal 'v1', filtered[2][1]['f1']
assert_equal '2013-10-31 12:34:03 +0900', filtered[2][1]['time']
assert_equal t, filtered[2][0]
end
sub_test_case "abnormal cases" do
module HashExcept
refine Hash do
def except(*keys)
reject do |key, _|
keys.include?(key)
end
end
end
end
# Ruby 2.x does not support Hash#except.
using HashExcept unless {}.respond_to?(:except)
def run_and_assert(driver, records:, expected_records:, expected_error_records:, expected_errors:)
driver.run do
records.each do |record|
driver.feed(@tag, Fluent::EventTime.now.to_i, record)
end
end
assert_equal(
[
expected_records,
expected_error_records,
expected_errors.collect { |e| [e.class, e.message] },
],
[
driver.filtered_records,
driver.error_events.collect { |_, _, record, _| record },
driver.error_events.collect { |_, _, _, e| [e.class, e.message] },
]
)
end
data(
"with default" => {
records: [{"foo" => "bar"}],
additional_config: "",
expected_records: [],
expected_error_records: [{"foo" => "bar"}],
expected_errors: [ArgumentError.new("data does not exist")],
},
"with reserve_data" => {
records: [{"foo" => "bar"}],
additional_config: "reserve_data",
expected_records: [{"foo" => "bar"}],
expected_error_records: [{"foo" => "bar"}],
expected_errors: [ArgumentError.new("data does not exist")],
},
"with disabled emit_invalid_record_to_error" => {
records: [{"foo" => "bar"}],
additional_config: "emit_invalid_record_to_error false",
expected_records: [],
expected_error_records: [],
expected_errors: [],
},
"with reserve_data and disabled emit_invalid_record_to_error" => {
records: [{"foo" => "bar"}],
additional_config: ["reserve_data", "emit_invalid_record_to_error false"].join("\n"),
expected_records: [{"foo" => "bar"}],
expected_error_records: [],
expected_errors: [],
},
"with reserve_data and hash_value_field" => {
records: [{"foo" => "bar"}],
additional_config: ["reserve_data", "hash_value_field parsed"].join("\n"),
expected_records: [{"foo" => "bar", "parsed" => {}}],
expected_error_records: [{"foo" => "bar"}],
expected_errors: [ArgumentError.new("data does not exist")],
},
)
def test_filter_key_not_exist(data)
driver = create_driver(<<~EOC)
key_name data
#{data[:additional_config]}
<parse>
@type json
</parse>
EOC
run_and_assert(driver, **data.except(:additional_config))
end
data(
"with default" => {
records: [{"data" => "foo bar"}],
additional_config: "",
expected_records: [],
expected_error_records: [{"data" => "foo bar"}],
expected_errors: [Fluent::Plugin::Parser::ParserError.new("pattern not matched with data 'foo bar'")],
},
"with reserve_data" => {
records: [{"data" => "foo bar"}],
additional_config: "reserve_data",
expected_records: [{"data" => "foo bar"}],
expected_error_records: [{"data" => "foo bar"}],
expected_errors: [Fluent::Plugin::Parser::ParserError.new("pattern not matched with data 'foo bar'")],
},
"with disabled emit_invalid_record_to_error" => {
records: [{"data" => "foo bar"}],
additional_config: "emit_invalid_record_to_error false",
expected_records: [],
expected_error_records: [],
expected_errors: [],
},
"with reserve_data and disabled emit_invalid_record_to_error" => {
records: [{"data" => "foo bar"}],
additional_config: ["reserve_data", "emit_invalid_record_to_error false"].join("\n"),
expected_records: [{"data" => "foo bar"}],
expected_error_records: [],
expected_errors: [],
},
"with matched pattern" => {
records: [{"data" => "col1=foo col2=bar"}],
additional_config: "",
expected_records: [{"col1" => "foo", "col2" => "bar"}],
expected_error_records: [],
expected_errors: [],
},
)
def test_pattern_not_matched(data)
driver = create_driver(<<~EOC)
key_name data
#{data[:additional_config]}
<parse>
@type regexp
expression /^col1=(?<col1>.+) col2=(?<col2>.+)$/
</parse>
EOC
run_and_assert(driver, **data.except(:additional_config))
end
data(
"invalid format with default" => {
records: [{'data' => '{"time":[], "f1":"v1"}'}],
additional_config: "",
expected_records: [],
expected_error_records: [{'data' => '{"time":[], "f1":"v1"}'}],
expected_errors: [Fluent::Plugin::Parser::ParserError.new("value must be a string or a number: [](Array)")],
},
"invalid format with disabled emit_invalid_record_to_error" => {
records: [{'data' => '{"time":[], "f1":"v1"}'}],
additional_config: "emit_invalid_record_to_error false",
expected_records: [],
expected_error_records: [],
expected_errors: [],
},
"mixed valid and invalid with default" => {
records: [{'data' => '{"time":[], "f1":"v1"}'}, {'data' => '{"time":0, "f1":"v1"}'}],
additional_config: "",
expected_records: [{"f1" => "v1"}],
expected_error_records: [{'data' => '{"time":[], "f1":"v1"}'}],
expected_errors: [Fluent::Plugin::Parser::ParserError.new("value must be a string or a number: [](Array)")],
},
)
def test_parser_error(data)
driver = create_driver(<<~EOC)
key_name data
#{data[:additional_config]}
<parse>
@type json
</parse>
EOC
run_and_assert(driver, **data.except(:additional_config))
end
data(
"UTF-8 with default" => {
records: [{"data" => "\xff"}],
additional_config: "",
expected_records: [],
expected_error_records: [{"data" => "\xff"}],
expected_errors: [ArgumentError.new("invalid byte sequence in UTF-8")],
},
"UTF-8 with replace_invalid_sequence" => {
records: [{"data" => "\xff"}],
additional_config: "replace_invalid_sequence",
expected_records: [{"message" => "?"}],
expected_error_records: [],
expected_errors: [],
},
"UTF-8 with replace_invalid_sequence and reserve_data" => {
records: [{"data" => "\xff"}],
additional_config: ["replace_invalid_sequence", "reserve_data"].join("\n"),
expected_records: [{"message" => "?", "data" => "\xff"}],
expected_error_records: [],
expected_errors: [],
},
"US-ASCII with default" => {
records: [{"data" => "\xff".force_encoding("US-ASCII")}],
additional_config: "",
expected_records: [],
expected_error_records: [{"data" => "\xff".force_encoding("US-ASCII")}],
expected_errors: [ArgumentError.new("invalid byte sequence in US-ASCII")],
},
"US-ASCII with replace_invalid_sequence" => {
records: [{"data" => "\xff".force_encoding("US-ASCII")}],
additional_config: "replace_invalid_sequence",
expected_records: [{"message" => "?".force_encoding("US-ASCII")}],
expected_error_records: [],
expected_errors: [],
},
"US-ASCII with replace_invalid_sequence and reserve_data" => {
records: [{"data" => "\xff".force_encoding("US-ASCII")}],
additional_config: ["replace_invalid_sequence", "reserve_data"].join("\n"),
expected_records: [{"message" => "?".force_encoding("US-ASCII"), "data" => "\xff".force_encoding("US-ASCII")}],
expected_error_records: [],
expected_errors: [],
},
)
def test_invalid_byte(data)
driver = create_driver(<<~EOC)
key_name data
#{data[:additional_config]}
<parse>
@type regexp
expression /^(?<message>.*)$/
</parse>
EOC
run_and_assert(driver, **data.except(:additional_config))
end
test "replace_invalid_sequence should be applied only to invalid byte sequence errors" do
error_msg = "This is a dummy ArgumentError other than invalid byte sequence"
any_instance_of(Fluent::Plugin::JSONParser) do |klass|
stub(klass).parse do
raise ArgumentError, error_msg
end
end
driver = create_driver(<<~EOC)
key_name data
replace_invalid_sequence
<parse>
@type json
</parse>
EOC
# replace_invalid_sequence should not applied
run_and_assert(
driver,
records: [{'data' => '{"foo":"bar"}'}],
expected_records: [],
expected_error_records: [{'data' => '{"foo":"bar"}'}],
expected_errors: [ArgumentError.new(error_msg)]
)
end
data(
"with default" => {
records: [{'data' => '{"foo":"bar"}'}],
additional_config: "",
expected_records: [],
expected_error_records: [{'data' => '{"foo":"bar"}'}],
expected_errors: [Fluent::Plugin::Parser::ParserError.new("parse failed This is a dummy unassumed error")],
},
"with disabled emit_invalid_record_to_error" => {
records: [{'data' => '{"foo":"bar"}'}],
additional_config: "emit_invalid_record_to_error false",
expected_records: [],
expected_error_records: [],
expected_errors: [],
},
)
def test_unassumed_error(data)
any_instance_of(Fluent::Plugin::JSONParser) do |klass|
stub(klass).parse do
raise RuntimeError, "This is a dummy unassumed error"
end
end
driver = create_driver(<<~EOC)
key_name data
#{data[:additional_config]}
<parse>
@type json
</parse>
EOC
run_and_assert(driver, **data.except(:additional_config))
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_owned_by.rb | test/plugin/test_owned_by.rb | require_relative '../helper'
require 'fluent/plugin/base'
require 'fluent/plugin/input'
require 'fluent/plugin/owned_by_mixin'
module OwnedByMixinTestEnv
class DummyParent < Fluent::Plugin::Input
Fluent::Plugin.register_input('dummy_parent', self)
end
class DummyChild < Fluent::Plugin::Base
include Fluent::Plugin::OwnedByMixin
Fluent::Plugin.register_parser('dummy_child', self)
end
end
class OwnedByMixinTest < Test::Unit::TestCase
sub_test_case 'Owned plugins' do
setup do
Fluent::Test.setup
end
test 'inherits plugin id and logger from parent' do
parent = Fluent::Plugin.new_input('dummy_parent')
parent.configure(config_element('ROOT', '', {'@id' => 'my_parent_id', '@log_level' => 'trace'}))
child = Fluent::Plugin.new_parser('dummy_child', parent: parent)
assert_equal parent.object_id, child.owner.object_id
assert_equal 'my_parent_id', child.instance_eval{ @_plugin_id }
assert_equal Fluent::Log::LEVEL_TRACE, child.log.level
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/test_in_sample.rb | test/plugin/test_in_sample.rb | require_relative '../helper'
require 'fluent/test/driver/input'
require 'fluent/plugin/in_sample'
require 'fileutils'
class SampleTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
end
def create_driver(conf)
Fluent::Test::Driver::Input.new(Fluent::Plugin::SampleInput).configure(conf)
end
sub_test_case 'configure' do
test 'required parameters' do
assert_raise_message("'tag' parameter is required") do
Fluent::Plugin::SampleInput.new.configure(config_element('ROOT',''))
end
end
test 'tag' do
d = create_driver(%[
tag sample
])
assert_equal "sample", d.instance.tag
end
config = %[
tag sample
]
test 'auto_increment_key' do
d = create_driver(config + %[
auto_increment_key id
])
assert_equal "id", d.instance.auto_increment_key
end
test 'rate' do
d = create_driver(config + %[
rate 10
])
assert_equal 10, d.instance.rate
end
test 'sample' do
# hash is okay
d = create_driver(config + %[sample {"foo":"bar"}])
assert_equal [{"foo"=>"bar"}], d.instance.sample
# array of hash is okay
d = create_driver(config + %[sample [{"foo":"bar"}]])
assert_equal [{"foo"=>"bar"}], d.instance.sample
assert_raise_message(/JSON::ParserError|got incomplete JSON/) do
create_driver(config + %[sample "foo"])
end
assert_raise_message(/is not a hash/) do
create_driver(config + %[sample ["foo"]])
end
end
end
sub_test_case "emit" do
config = %[
tag sample
rate 10
sample {"foo":"bar"}
]
test 'simple' do
d = create_driver(config)
d.run(timeout: 0.5)
d.events.each do |tag, time, record|
assert_equal("sample", tag)
assert_equal({"foo"=>"bar"}, record)
assert(time.is_a?(Fluent::EventTime))
end
end
test 'with auto_increment_key' do
d = create_driver(config + %[auto_increment_key id])
d.run(timeout: 0.5)
d.events.each_with_index do |(tag, _time, record), i|
assert_equal("sample", tag)
assert_equal({"foo"=>"bar", "id"=>i}, record)
end
end
end
TEST_PLUGIN_STORAGE_PATH = File.join( File.dirname(File.dirname(__FILE__)), 'tmp', 'in_sample', 'store' )
FileUtils.mkdir_p TEST_PLUGIN_STORAGE_PATH
sub_test_case 'when sample plugin has storage which is not specified the path' do
config1 = {
'tag' => 'sample',
'rate' => '0',
'sample' => '[{"x": 1, "y": "1"}, {"x": 2, "y": "2"}, {"x": 3, "y": "3"}]',
'auto_increment_key' => 'id',
}
conf1 = config_element('ROOT', '', config1, [])
test "value of auto increment key is not kept after stop-and-start" do
assert !File.exist?(File.join(TEST_PLUGIN_STORAGE_PATH, 'json', 'test-01.json'))
d1 = create_driver(conf1)
d1.run(timeout: 0.5) do
d1.instance.emit(2)
end
events = d1.events.sort{|a,b| a[2]['id'] <=> b[2]['id'] }
first_id1 = events.first[2]['id']
assert_equal 0, first_id1
last_id1 = events.last[2]['id']
assert { last_id1 > 0 }
assert !File.exist?(File.join(TEST_PLUGIN_STORAGE_PATH, 'json', 'test-01.json'))
d2 = create_driver(conf1)
d2.run(timeout: 0.5) do
d2.instance.emit(2)
end
events = d2.events.sort{|a,b| a[2]['id'] <=> b[2]['id'] }
first_id2 = events.first[2]['id']
assert_equal 0, first_id2
assert !File.exist?(File.join(TEST_PLUGIN_STORAGE_PATH, 'json', 'test-01.json'))
end
end
sub_test_case 'when sample plugin has storage which is specified the path' do
setup do
FileUtils.rm_rf(TEST_PLUGIN_STORAGE_PATH)
FileUtils.mkdir_p(File.join(TEST_PLUGIN_STORAGE_PATH, 'json'))
FileUtils.chmod_R(0755, File.join(TEST_PLUGIN_STORAGE_PATH, 'json'))
end
config2 = {
'@id' => 'test-02',
'tag' => 'sample',
'rate' => '0',
'sample' => '[{"x": 1, "y": "1"}, {"x": 2, "y": "2"}, {"x": 3, "y": "3"}]',
'auto_increment_key' => 'id',
}
conf2 = config_element('ROOT', '', config2, [
config_element(
'storage', '',
{'@type' => 'local',
'@id' => 'test-02',
'path' => File.join(TEST_PLUGIN_STORAGE_PATH,
'json', 'test-02.json'),
'persistent' => true,
})
])
test "value of auto increment key is kept after stop-and-start" do
assert !File.exist?(File.join(TEST_PLUGIN_STORAGE_PATH, 'json', 'test-02.json'))
d1 = create_driver(conf2)
d1.run(timeout: 1) do
d1.instance.emit(2)
end
first_id1 = d1.events.first[2]['id']
assert_equal 0, first_id1
last_id1 = d1.events.last[2]['id']
assert { last_id1 > 0 }
assert File.exist?(File.join(TEST_PLUGIN_STORAGE_PATH, 'json', 'test-02.json'))
d2 = create_driver(conf2)
d2.run(timeout: 1) do
d2.instance.emit(2)
end
d2.events
first_id2 = d2.events.first[2]['id']
assert_equal last_id1 + 1, first_id2
assert File.exist?(File.join(TEST_PLUGIN_STORAGE_PATH, 'json', 'test-02.json'))
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/out_forward/test_load_balancer.rb | test/plugin/out_forward/test_load_balancer.rb | require_relative '../../helper'
require 'flexmock/test_unit'
require 'fluent/plugin/out_forward/load_balancer'
class LoadBalancerTest < Test::Unit::TestCase
sub_test_case 'select_healthy_node' do
test 'select healthy node' do
lb = Fluent::Plugin::ForwardOutput::LoadBalancer.new($log)
n1 = flexmock('node', :'standby?' => false, :'available?' => false, weight: 1)
n2 = flexmock('node', :'standby?' => false, :'available?' => true, weight: 1)
lb.rebuild_weight_array([n1, n2])
lb.select_healthy_node do |node|
assert_equal(node, n2)
end
lb.select_healthy_node do |node|
assert_equal(node, n2)
end
end
test 'call like round robin' do
lb = Fluent::Plugin::ForwardOutput::LoadBalancer.new($log)
n1 = flexmock('node', :'standby?' => false, :'available?' => true, weight: 1)
n2 = flexmock('node', :'standby?' => false, :'available?' => true, weight: 1)
lb.rebuild_weight_array([n1, n2])
lb.select_healthy_node do |node|
# to handle random choice
if node == n1
lb.select_healthy_node do |node|
assert_equal(node, n2)
end
lb.select_healthy_node do |node|
assert_equal(node, n1)
end
else
lb.select_healthy_node do |node|
assert_equal(node, n1)
end
lb.select_healthy_node do |node|
assert_equal(node, n2)
end
end
end
end
test 'call like round robin without weight=0 node' do
lb = Fluent::Plugin::ForwardOutput::LoadBalancer.new($log)
n1 = flexmock('node', :'standby?' => false, :'available?' => true, weight: 1)
n2 = flexmock('node', :'standby?' => false, :'available?' => true, weight: 1)
n3 = flexmock('node', :'standby?' => false, :'available?' => true, weight: 0)
lb.rebuild_weight_array([n1, n2, n3])
lb.select_healthy_node do |node|
# to handle random choice
if node == n1
lb.select_healthy_node do |node|
assert_equal(node, n2)
end
lb.select_healthy_node do |node|
assert_equal(node, n1)
end
lb.select_healthy_node do |node|
assert_equal(node, n2)
end
else
lb.select_healthy_node do |node|
assert_equal(node, n1)
end
lb.select_healthy_node do |node|
assert_equal(node, n2)
end
lb.select_healthy_node do |node|
assert_equal(node, n1)
end
end
end
end
test 'raise an error if all node are unavailable' do
lb = Fluent::Plugin::ForwardOutput::LoadBalancer.new($log)
lb.rebuild_weight_array([flexmock('node', :'standby?' => false, :'available?' => false, weight: 1)])
assert_raise(Fluent::Plugin::ForwardOutput::NoNodesAvailable) do
lb.select_healthy_node
end
end
test 'it regards weight=0 node as unavailable' do
lb = Fluent::Plugin::ForwardOutput::LoadBalancer.new($log)
lb.rebuild_weight_array([flexmock('node', :'standby?' => false, :'available?' => true, weight: 0)])
assert_raise(Fluent::Plugin::ForwardOutput::NoNodesAvailable) do
lb.select_healthy_node
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/out_forward/test_connection_manager.rb | test/plugin/out_forward/test_connection_manager.rb | require_relative '../../helper'
require 'fluent/test/driver/output'
require 'flexmock/test_unit'
require 'fluent/plugin/out_forward'
require 'fluent/plugin/out_forward/connection_manager'
require 'fluent/plugin/out_forward/socket_cache'
class ConnectionManager < Test::Unit::TestCase
sub_test_case '#connect' do
sub_test_case 'when socket_cache is nil' do
test 'creates socket and does not close when block is not given' do
cm = Fluent::Plugin::ForwardOutput::ConnectionManager.new(
log: $log,
secure: false,
connection_factory: -> (_, _, _) { sock = 'sock'; mock(sock).close.never; sock },
socket_cache: nil,
)
mock.proxy(cm).connect_keepalive(anything).never
sock, ri = cm.connect(host: 'host', port: 1234, hostname: 'hostname', ack: nil)
assert_equal(sock, 'sock')
assert_equal(ri.state, :established)
end
test 'creates socket and calls close when block is given' do
cm = Fluent::Plugin::ForwardOutput::ConnectionManager.new(
log: $log,
secure: false,
connection_factory: -> (_, _, _) {
sock = 'sock'
mock(sock).close.once
mock(sock).close_write.once
sock
},
socket_cache: nil,
)
mock.proxy(cm).connect_keepalive(anything).never
cm.connect(host: 'host', port: 1234, hostname: 'hostname', ack: nil) do |sock, ri|
assert_equal(sock, 'sock')
assert_equal(ri.state, :established)
end
end
test 'when secure is true, state is helo' do
cm = Fluent::Plugin::ForwardOutput::ConnectionManager.new(
log: $log,
secure: true,
connection_factory: -> (_, _, _) { sock = 'sock'; mock(sock).close.never; sock },
socket_cache: nil,
)
mock.proxy(cm).connect_keepalive(anything).never
sock, ri = cm.connect(host: 'host', port: 1234, hostname: 'hostname', ack: nil)
assert_equal(sock, 'sock')
assert_equal(ri.state, :helo)
end
test 'when passed ack' do
sock = 'sock'
cm = Fluent::Plugin::ForwardOutput::ConnectionManager.new(
log: $log,
secure: false,
connection_factory: -> (_, _, _) {
mock(sock).close.never
mock(sock).close_write.never
sock
},
socket_cache: nil,
)
mock.proxy(cm).connect_keepalive(anything).never
ack = mock('ack').enqueue(sock).once.subject
cm.connect(host: 'host', port: 1234, hostname: 'hostname', ack: ack) do |sock, ri|
assert_equal(sock, 'sock')
assert_equal(ri.state, :established)
end
end
end
sub_test_case 'when socket_cache exists' do
test 'calls connect_keepalive' do
cache = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
mock(cache).checkin('sock').never
cm = Fluent::Plugin::ForwardOutput::ConnectionManager.new(
log: $log,
secure: false,
connection_factory: -> (_, _, _) { sock = 'sock'; mock(sock).close.never; sock },
socket_cache: cache,
)
mock.proxy(cm).connect_keepalive(host: 'host', port: 1234, hostname: 'hostname', ack: nil).once
sock, ri = cm.connect(host: 'host', port: 1234, hostname: 'hostname', ack: nil)
assert_equal(sock, 'sock')
assert_equal(ri.state, :established)
end
test 'calls connect_keepalive and closes socket with block' do
cache = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
mock(cache).checkin('sock').once
cm = Fluent::Plugin::ForwardOutput::ConnectionManager.new(
log: $log,
secure: false,
connection_factory: -> (_, _, _) { sock = 'sock'; mock(sock); sock },
socket_cache: cache,
)
mock.proxy(cm).connect_keepalive(host: 'host', port: 1234, hostname: 'hostname', ack: nil).once
cm.connect(host: 'host', port: 1234, hostname: 'hostname', ack: nil) do |sock, ri|
assert_equal(sock, 'sock')
assert_equal(ri.state, :established)
end
end
test 'does not call dec_ref when ack is passed' do
cache = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
mock(cache).checkin('sock').never
sock = 'sock'
ack = stub('ack').enqueue(sock).once.subject
cm = Fluent::Plugin::ForwardOutput::ConnectionManager.new(
log: $log,
secure: false,
connection_factory: -> (_, _, _) {
mock(sock).close.never
mock(sock).close_write.never
sock
},
socket_cache: cache,
)
mock.proxy(cm).connect_keepalive(host: 'host', port: 1234, hostname: 'hostname', ack: ack).once
cm.connect(host: 'host', port: 1234, hostname: 'hostname', ack: ack) do |sock, ri|
assert_equal(sock, 'sock')
assert_equal(ri.state, :established)
end
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/out_forward/test_handshake_protocol.rb | test/plugin/out_forward/test_handshake_protocol.rb | require_relative '../../helper'
require 'flexmock/test_unit'
require 'fluent/plugin/out_forward'
require 'fluent/plugin/out_forward/handshake_protocol'
require 'fluent/plugin/out_forward/connection_manager'
class HandshakeProtocolTest < Test::Unit::TestCase
sub_test_case '#invok when helo state' do
test 'sends PING message and change state to pingpong' do
hostname = 'hostname'
handshake = Fluent::Plugin::ForwardOutput::HandshakeProtocol.new(log: $log, hostname: hostname, shared_key: 'shared_key', password: nil, username: nil)
ri = Fluent::Plugin::ForwardOutput::ConnectionManager::RequestInfo.new(:helo)
sock = StringIO.new('')
handshake.invoke(sock, ri, ['HELO', {}])
assert_equal(ri.state, :pingpong)
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(sock.string) do |ping|
assert_equal(ping.size, 6)
assert_equal(ping[0], 'PING')
assert_equal(ping[1], hostname)
assert(ping[2].is_a?(String)) # content is hashed value
assert(ping[3].is_a?(String)) # content is hashed value
assert_equal(ping[4], '')
assert_equal(ping[5], '')
end
end
test 'returns PING message with username if auth exists' do
hostname = 'hostname'
username = 'username'
pass = 'pass'
handshake = Fluent::Plugin::ForwardOutput::HandshakeProtocol.new(log: $log, hostname: hostname, shared_key: 'shared_key', password: pass, username: username)
ri = Fluent::Plugin::ForwardOutput::ConnectionManager::RequestInfo.new(:helo)
sock = StringIO.new('')
handshake.invoke(sock, ri, ['HELO', { 'auth' => 'auth' }])
assert_equal(ri.state, :pingpong)
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(sock.string) do |ping|
assert_equal(ping.size, 6)
assert_equal(ping[0], 'PING')
assert_equal(ping[1], hostname)
assert(ping[2].is_a?(String)) # content is hashed value
assert(ping[3].is_a?(String)) # content is hashed value
assert_equal(ping[4], username)
assert_not_equal(ping[5], pass) # should be hashed
end
end
data(
lack_of_elem: ['HELO'],
wrong_message: ['HELLO!', {}],
)
test 'raises an error when message is' do |msg|
handshake = Fluent::Plugin::ForwardOutput::HandshakeProtocol.new(log: $log, hostname: 'hostname', shared_key: 'shared_key', password: nil, username: nil)
ri = Fluent::Plugin::ForwardOutput::ConnectionManager::RequestInfo.new(:helo)
sock = StringIO.new('')
assert_raise(Fluent::Plugin::ForwardOutput::HeloError) do
handshake.invoke(sock, ri, msg)
end
assert_equal(ri.state, :helo)
end
end
sub_test_case '#invok when pingpong state' do
test 'sends PING message and change state to pingpong' do
handshake = Fluent::Plugin::ForwardOutput::HandshakeProtocol.new(log: $log, hostname: 'hostname', shared_key: 'shared_key', password: nil, username: nil)
handshake.instance_variable_set(:@shared_key_salt, 'ce1897b0d3dbd76b90d7fb96010dcac3') # to fix salt
ri = Fluent::Plugin::ForwardOutput::ConnectionManager::RequestInfo.new(:pingpong, '', '')
handshake.invoke(
'',
ri,
# 40a3.... = Digest::SHA512.new.update('ce1897b0d3dbd76b90d7fb96010dcac3').update('client_hostname').update('').update('shared_key').hexdigest
['PONG', true, '', 'client_hostname', '40a3c5943cc6256e0c5dcf176e97db3826b0909698c330dc8e53d15af63efb47e030d113130255dd6e7ced5176d2999cc2e02a44852d45152503af317b73b33f']
)
assert_equal(ri.state, :established)
end
test 'raises an error when password and username are nil if auth exists' do
handshake = Fluent::Plugin::ForwardOutput::HandshakeProtocol.new(log: $log, hostname: 'hostname', shared_key: 'shared_key', password: nil, username: nil)
ri = Fluent::Plugin::ForwardOutput::ConnectionManager::RequestInfo.new(:helo)
assert_raise(Fluent::Plugin::ForwardOutput::PingpongError.new('username and password are required')) do
handshake.invoke('', ri, ['HELO', { 'auth' => 'auth' }])
end
end
data(
lack_of_elem: ['PONG', true, '', 'client_hostname'],
wrong_message: ['WRONG_PONG', true, '', 'client_hostname', '40a3c5943cc6256e0c5dcf176e97db3826b0909698c330dc8e53d15af63efb47e030d113130255dd6e7ced5176d2999cc2e02a44852d45152503af317b73b33f'],
error_by_server: ['PONG', false, 'error', 'client_hostname', '40a3c5943cc6256e0c5dcf176e97db3826b0909698c330dc8e53d15af63efb47e030d113130255dd6e7ced5176d2999cc2e02a44852d45152503af317b73b33f'],
same_hostname_as_server: ['PONG', true, '', 'hostname', '40a3c5943cc6256e0c5dcf176e97db3826b0909698c330dc8e53d15af63efb47e030d113130255dd6e7ced5176d2999cc2e02a44852d45152503af317b73b33f'],
wrong_key: ['PONG', true, '', 'hostname', 'wrong_key'],
)
test 'raises an error when message is' do |msg|
handshake = Fluent::Plugin::ForwardOutput::HandshakeProtocol.new(log: $log, hostname: 'hostname', shared_key: 'shared_key', password: '', username: '')
handshake.instance_variable_set(:@shared_key_salt, 'ce1897b0d3dbd76b90d7fb96010dcac3') # to fix salt
ri = Fluent::Plugin::ForwardOutput::ConnectionManager::RequestInfo.new(:pingpong, '', '')
assert_raise(Fluent::Plugin::ForwardOutput::PingpongError) do
handshake.invoke('', ri, msg)
end
assert_equal(ri.state, :pingpong)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/out_forward/test_socket_cache.rb | test/plugin/out_forward/test_socket_cache.rb | require_relative '../../helper'
require 'fluent/plugin/out_forward/socket_cache'
require 'timecop'
class SocketCacheTest < Test::Unit::TestCase
sub_test_case 'checkout_or' do
test 'when given key does not exist' do
c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
sock = mock!.open { 'socket' }.subject
assert_equal('socket', c.checkout_or('key') { sock.open })
end
test 'when given key exists' do
c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
socket = 'socket'
assert_equal(socket, c.checkout_or('key') { socket })
c.checkin(socket)
sock = mock!.open.never.subject
assert_equal(socket, c.checkout_or('key') { sock.open })
end
test 'when given key exists but used by other' do
c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
assert_equal('sock', c.checkout_or('key') { 'sock' })
new_sock = 'new sock'
sock = mock!.open { new_sock }.subject
assert_equal(new_sock, c.checkout_or('key') { sock.open })
end
test "when given key's value was expired" do
c = Fluent::Plugin::ForwardOutput::SocketCache.new(0, $log)
assert_equal('sock', c.checkout_or('key') { 'sock' })
new_sock = 'new sock'
sock = mock!.open { new_sock }.subject
assert_equal(new_sock, c.checkout_or('key') { sock.open })
end
test 'reuse same hash object after calling purge_obsolete_socks' do
c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
c.checkout_or('key') { 'socket' }
c.purge_obsolete_socks
assert_nothing_raised(NoMethodError) do
c.checkout_or('key') { 'new socket' }
end
end
end
sub_test_case 'checkin' do
test 'when value exists' do
c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
socket = 'socket'
c.checkout_or('key') { socket }
c.checkin(socket)
assert_equal(socket, c.instance_variable_get(:@available_sockets)['key'].first.sock)
assert_equal(1, c.instance_variable_get(:@available_sockets)['key'].size)
assert_equal(0, c.instance_variable_get(:@inflight_sockets).size)
end
test 'when value does not exist' do
c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
c.checkout_or('key') { 'sock' }
c.checkin('other sock')
assert_equal(0, c.instance_variable_get(:@available_sockets)['key'].size)
assert_equal(1, c.instance_variable_get(:@inflight_sockets).size)
end
end
test 'revoke' do
c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
socket = 'socket'
c.checkout_or('key') { socket }
c.revoke(socket)
assert_equal(1, c.instance_variable_get(:@inactive_sockets).size)
assert_equal(0, c.instance_variable_get(:@inflight_sockets).size)
assert_equal(0, c.instance_variable_get(:@available_sockets)['key'].size)
sock = mock!.open { 1 }.subject
assert_equal(1, c.checkout_or('key') { sock.open })
end
sub_test_case 'clear' do
test 'when value is in available_sockets' do
c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
m = mock!.close { 'closed' }.subject
m2 = mock!.close { 'closed' }.subject
m3 = mock!.close { 'closed' }.subject
c.checkout_or('key') { m }
c.revoke(m)
c.checkout_or('key') { m2 }
c.checkin(m2)
c.checkout_or('key2') { m3 }
assert_equal(1, c.instance_variable_get(:@inflight_sockets).size)
assert_equal(1, c.instance_variable_get(:@available_sockets)['key'].size)
assert_equal(1, c.instance_variable_get(:@inactive_sockets).size)
c.clear
assert_equal(0, c.instance_variable_get(:@inflight_sockets).size)
assert_equal(0, c.instance_variable_get(:@available_sockets)['key'].size)
assert_equal(0, c.instance_variable_get(:@inactive_sockets).size)
end
end
sub_test_case 'purge_obsolete_socks' do
def teardown
Timecop.return
end
test 'delete key in inactive_socks' do
c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
sock = mock!.close { 'closed' }.subject
c.checkout_or('key') { sock }
c.revoke(sock)
assert_false(c.instance_variable_get(:@inactive_sockets).empty?)
c.purge_obsolete_socks
assert_true(c.instance_variable_get(:@inactive_sockets).empty?)
end
test 'move key from available_sockets to inactive_sockets' do
Timecop.freeze(Time.parse('2016-04-13 14:00:00 +0900'))
c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
sock = mock!.close { 'closed' }.subject
sock2 = mock!.close.never.subject
stub(sock).inspect
stub(sock2).inspect
c.checkout_or('key') { sock }
c.checkin(sock)
# wait timeout
Timecop.freeze(Time.parse('2016-04-13 14:00:11 +0900'))
c.checkout_or('key') { sock2 }
assert_equal(1, c.instance_variable_get(:@inflight_sockets).size)
assert_equal(sock2, c.instance_variable_get(:@inflight_sockets).values.first.sock)
c.purge_obsolete_socks
assert_equal(1, c.instance_variable_get(:@inflight_sockets).size)
assert_equal(sock2, c.instance_variable_get(:@inflight_sockets).values.first.sock)
end
test 'should not purge just after checkin and purge after timeout' do
Timecop.freeze(Time.parse('2016-04-13 14:00:00 +0900'))
c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
sock = mock!.close.never.subject
stub(sock).inspect
c.checkout_or('key') { sock }
Timecop.freeze(Time.parse('2016-04-13 14:00:11 +0900'))
c.checkin(sock)
assert_equal(1, c.instance_variable_get(:@available_sockets).size)
c.purge_obsolete_socks
assert_equal(1, c.instance_variable_get(:@available_sockets).size)
Timecop.freeze(Time.parse('2016-04-13 14:00:22 +0900'))
assert_equal(1, c.instance_variable_get(:@available_sockets).size)
c.purge_obsolete_socks
assert_equal(0, c.instance_variable_get(:@available_sockets).size)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/out_forward/test_ack_handler.rb | test/plugin/out_forward/test_ack_handler.rb | require_relative '../../helper'
require 'fluent/test/driver/output'
require 'flexmock/test_unit'
require 'fluent/plugin/out_forward'
require 'fluent/plugin/out_forward/ack_handler'
class AckHandlerTest < Test::Unit::TestCase
data(
'chunk_id is matched' => [MessagePack.pack({ 'ack' => Base64.encode64('chunk_id 111') }), Fluent::Plugin::ForwardOutput::AckHandler::Result::SUCCESS],
'chunk_id is not matched' => [MessagePack.pack({ 'ack' => 'unmatched' }), Fluent::Plugin::ForwardOutput::AckHandler::Result::CHUNKID_UNMATCHED],
'chunk_id is empty' => ['', Fluent::Plugin::ForwardOutput::AckHandler::Result::FAILED],
)
test 'returns chunk_id, node, sock and result status' do |args|
receved, state = args
ack_handler = Fluent::Plugin::ForwardOutput::AckHandler.new(timeout: 10, log: $log, read_length: 100)
node = flexmock('node', host: '127.0.0.1', port: '1000') # for log
chunk_id = 'chunk_id 111'
ack = ack_handler.create_ack(chunk_id, node)
r, w = IO.pipe
begin
w.write(chunk_id)
mock(r).recv(anything) { |_| receved } # IO does not have recv
ack.enqueue(r)
a1 = a2 = a3 = a4 = nil
ack_handler.collect_response(1) do |cid, n, s, ret|
# This block is rescued by ack_handler so it needs to invoke assertion outside of this block
a1 = cid; a2 = n; a3 = s; a4 = ret
end
assert_equal chunk_id, a1
assert_equal node, a2
assert_equal r, a3
assert_equal state, a4
ensure
r.close rescue nil
w.close rescue nil
end
end
test 'returns nil if raise an error' do
ack_handler = Fluent::Plugin::ForwardOutput::AckHandler.new(timeout: 10, log: $log, read_length: 100)
node = flexmock('node', host: '127.0.0.1', port: '1000') # for log
chunk_id = 'chunk_id 111'
ack = ack_handler.create_ack(chunk_id, node)
r, w = IO.pipe
begin
w.write(chunk_id)
mock(r).recv(anything) { |_| raise 'unexpected error' } # IO does not have recv
ack.enqueue(r)
a1 = a2 = a3 = a4 = nil
ack_handler.collect_response(1) do |cid, n, s, ret|
# This block is rescued by ack_handler so it needs to invoke assertion outside of this block
a1 = cid; a2 = n; a3 = s; a4 = ret
end
assert_nil a1
assert_nil a2
assert_nil a3
assert_equal Fluent::Plugin::ForwardOutput::AckHandler::Result::FAILED, a4
ensure
r.close rescue nil
w.close rescue nil
end
end
test 'when ack is expired' do
ack_handler = Fluent::Plugin::ForwardOutput::AckHandler.new(timeout: 0, log: $log, read_length: 100)
node = flexmock('node', host: '127.0.0.1', port: '1000') # for log
chunk_id = 'chunk_id 111'
ack = ack_handler.create_ack(chunk_id, node)
r, w = IO.pipe
begin
w.write(chunk_id)
mock(r).recv(anything).never
ack.enqueue(r)
a1 = a2 = a3 = a4 = nil
ack_handler.collect_response(1) do |cid, n, s, ret|
# This block is rescued by ack_handler so it needs to invoke assertion outside of this block
a1 = cid; a2 = n; a3 = s; a4 = ret
end
assert_equal chunk_id, a1
assert_equal node, a2
assert_equal r, a3
assert_equal Fluent::Plugin::ForwardOutput::AckHandler::Result::FAILED, a4
ensure
r.close rescue nil
w.close rescue nil
end
end
# ForwardOutput uses AckHandler in multiple threads, so we need to assume this case.
# If exclusive control for this case is implemented, this test may not be necessary.
test 'raises no error when another thread closes a socket' do
ack_handler = Fluent::Plugin::ForwardOutput::AckHandler.new(timeout: 10, log: $log, read_length: 100)
node = flexmock('node', host: '127.0.0.1', port: '1000') # for log
chunk_id = 'chunk_id 111'
ack = ack_handler.create_ack(chunk_id, node)
r, w = IO.pipe
begin
w.write(chunk_id)
def r.recv(arg)
sleep(1) # To ensure that multiple threads select the socket before closing.
raise IOError, 'stream closed in another thread' if self.closed?
MessagePack.pack({ 'ack' => Base64.encode64('chunk_id 111') })
end
ack.enqueue(r)
threads = []
2.times do
threads << Thread.new do
ack_handler.collect_response(1) do |cid, n, s, ret|
s&.close
end
end
end
assert_true threads.map{ |t| t.join(10) }.all?
assert_false(
$log.out.logs.any? { |log| log.include?('[error]') },
$log.out.logs.select{ |log| log.include?('[error]') }.join('\n')
)
ensure
r.close rescue nil
w.close rescue nil
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/in_tail/test_position_file.rb | test/plugin/in_tail/test_position_file.rb | require_relative '../../helper'
require 'fluent/plugin/in_tail/position_file'
require 'fluent/plugin/in_tail'
require 'fileutils'
require 'tempfile'
class IntailPositionFileTest < Test::Unit::TestCase
def setup
Tempfile.create('intail_position_file_test') do |file|
file.binmode
@file = file
yield
end
end
UNWATCHED_STR = '%016x' % Fluent::Plugin::TailInput::PositionFile::UNWATCHED_POSITION
TEST_CONTENT = <<~EOF
valid_path\t0000000000000002\t0000000000000001
inode23bit\t0000000000000000\t00000000
invalidpath100000000000000000000000000000000
unwatched\t#{UNWATCHED_STR}\t0000000000000000
EOF
TEST_CONTENT_PATHS = {
"valid_path" => Fluent::Plugin::TailInput::TargetInfo.new("valid_path", 1),
"inode23bit" => Fluent::Plugin::TailInput::TargetInfo.new("inode23bit", 0),
}
TEST_CONTENT_INODES = {
1 => Fluent::Plugin::TailInput::TargetInfo.new("valid_path", 1),
0 => Fluent::Plugin::TailInput::TargetInfo.new("inode23bit", 0),
}
def write_data(f, content)
f.write(content)
f.seek(0)
end
def follow_inodes_block
[true, false].each do |follow_inodes|
yield follow_inodes
end
end
test '.load' do
write_data(@file, TEST_CONTENT)
Fluent::Plugin::TailInput::PositionFile.load(@file, false, TEST_CONTENT_PATHS, **{logger: $log})
@file.seek(0)
lines = @file.readlines
assert_equal 2, lines.size
assert_equal "valid_path\t0000000000000002\t0000000000000001\n", lines[0]
assert_equal "inode23bit\t0000000000000000\t0000000000000000\n", lines[1]
end
sub_test_case '#try_compact' do
test 'compact invalid and convert 32 bit inode value' do
write_data(@file, TEST_CONTENT)
Fluent::Plugin::TailInput::PositionFile.new(@file, false, **{logger: $log}).try_compact
@file.seek(0)
lines = @file.readlines
assert_equal 2, lines.size
assert_equal "valid_path\t0000000000000002\t0000000000000001\n", lines[0]
assert_equal "inode23bit\t0000000000000000\t0000000000000000\n", lines[1]
end
test 'compact data if duplicated line' do
write_data(@file, <<~EOF)
valid_path\t0000000000000002\t0000000000000001
valid_path\t0000000000000003\t0000000000000004
EOF
Fluent::Plugin::TailInput::PositionFile.new(@file, false, **{logger: $log}).try_compact
@file.seek(0)
lines = @file.readlines
assert_equal "valid_path\t0000000000000003\t0000000000000004\n", lines[0]
end
test 'does not change when the file is changed' do
write_data(@file, TEST_CONTENT)
pf = Fluent::Plugin::TailInput::PositionFile.new(@file, false, **{logger: $log})
mock.proxy(pf).fetch_compacted_entries do |r|
@file.write("unwatched\t#{UNWATCHED_STR}\t0000000000000000\n")
r
end
pf.try_compact
@file.seek(0)
lines = @file.readlines
assert_equal 5, lines.size
end
test 'update seek position of remained position entry' do
pf = Fluent::Plugin::TailInput::PositionFile.new(@file, false, **{logger: $log})
target_info1 = Fluent::Plugin::TailInput::TargetInfo.new('path1', -1)
target_info2 = Fluent::Plugin::TailInput::TargetInfo.new('path2', -1)
target_info3 = Fluent::Plugin::TailInput::TargetInfo.new('path3', -1)
pf[target_info1]
pf[target_info2]
pf[target_info3]
target_info1_2 = Fluent::Plugin::TailInput::TargetInfo.new('path1', 1234)
pf.unwatch(target_info1_2)
pf.try_compact
@file.seek(0)
lines = @file.readlines
assert_equal "path2\t0000000000000000\t0000000000000000\n", lines[0]
assert_equal "path3\t0000000000000000\t0000000000000000\n", lines[1]
assert_equal 2, lines.size
target_info2_2 = Fluent::Plugin::TailInput::TargetInfo.new('path2', 1235)
target_info3_2 = Fluent::Plugin::TailInput::TargetInfo.new('path3', 1236)
pf.unwatch(target_info2_2)
pf.unwatch(target_info3_2)
@file.seek(0)
lines = @file.readlines
assert_equal "path2\t#{UNWATCHED_STR}\t0000000000000000\n", lines[0]
assert_equal "path3\t#{UNWATCHED_STR}\t0000000000000000\n", lines[1]
assert_equal 2, lines.size
end
test 'should ignore initial existing files on follow_inode' do
write_data(@file, TEST_CONTENT)
pos_file = Fluent::Plugin::TailInput::PositionFile.load(@file, true, TEST_CONTENT_PATHS, **{logger: $log})
@file.seek(0)
assert_equal([], @file.readlines)
@file.seek(0)
write_data(@file, TEST_CONTENT)
pos_file.try_compact
@file.seek(0)
assert_equal([
"valid_path\t0000000000000002\t0000000000000001\n",
"inode23bit\t0000000000000000\t0000000000000000\n",
],
@file.readlines)
end
end
sub_test_case '#load' do
test 'compact invalid and convert 32 bit inode value' do
write_data(@file, TEST_CONTENT)
Fluent::Plugin::TailInput::PositionFile.load(@file, false, TEST_CONTENT_PATHS, **{logger: $log})
@file.seek(0)
lines = @file.readlines
assert_equal 2, lines.size
assert_equal "valid_path\t0000000000000002\t0000000000000001\n", lines[0]
assert_equal "inode23bit\t0000000000000000\t0000000000000000\n", lines[1]
end
test 'compact deleted paths' do
write_data(@file, TEST_CONTENT)
Fluent::Plugin::TailInput::PositionFile.load(@file, false, {}, **{logger: $log})
@file.seek(0)
lines = @file.readlines
assert_equal [], lines
end
test 'compact data if duplicated line' do
write_data(@file, <<~EOF)
valid_path\t0000000000000002\t0000000000000001
valid_path\t0000000000000003\t0000000000000004
EOF
Fluent::Plugin::TailInput::PositionFile.new(@file, false, **{logger: $log}).load
@file.seek(0)
lines = @file.readlines
assert_equal "valid_path\t0000000000000003\t0000000000000004\n", lines[0]
end
end
sub_test_case '#[]' do
test 'return entry' do
write_data(@file, TEST_CONTENT)
pf = Fluent::Plugin::TailInput::PositionFile.load(@file, false, TEST_CONTENT_PATHS, **{logger: $log})
valid_target_info = Fluent::Plugin::TailInput::TargetInfo.new('valid_path', File.stat(@file).ino)
f = pf[valid_target_info]
assert_equal Fluent::Plugin::TailInput::FilePositionEntry, f.class
assert_equal 2, f.read_pos
assert_equal 1, f.read_inode
@file.seek(0)
lines = @file.readlines
assert_equal 2, lines.size
nonexistent_target_info = Fluent::Plugin::TailInput::TargetInfo.new('nonexist_path', -1)
f = pf[nonexistent_target_info]
assert_equal Fluent::Plugin::TailInput::FilePositionEntry, f.class
assert_equal 0, f.read_pos
assert_equal 0, f.read_inode
@file.seek(0)
lines = @file.readlines
assert_equal 3, lines.size
assert_equal "nonexist_path\t0000000000000000\t0000000000000000\n", lines[2]
end
test 'does not change other value position if other entry try to write' do
write_data(@file, TEST_CONTENT)
pf = Fluent::Plugin::TailInput::PositionFile.load(@file, false, {}, logger: $log)
f = pf[Fluent::Plugin::TailInput::TargetInfo.new('nonexist_path', -1)]
assert_equal 0, f.read_inode
assert_equal 0, f.read_pos
pf[Fluent::Plugin::TailInput::TargetInfo.new('valid_path', File.stat(@file).ino)].update(1, 2)
f = pf[Fluent::Plugin::TailInput::TargetInfo.new('nonexist_path', -1)]
assert_equal 0, f.read_inode
assert_equal 0, f.read_pos
pf[Fluent::Plugin::TailInput::TargetInfo.new('nonexist_path', -1)].update(1, 2)
assert_equal 1, f.read_inode
assert_equal 2, f.read_pos
end
end
sub_test_case '#unwatch' do
test 'unwatch entry by path' do
write_data(@file, TEST_CONTENT)
pf = Fluent::Plugin::TailInput::PositionFile.load(@file, false, {}, logger: $log)
inode1 = File.stat(@file).ino
target_info1 = Fluent::Plugin::TailInput::TargetInfo.new('valid_path', inode1)
p1 = pf[target_info1]
assert_equal Fluent::Plugin::TailInput::FilePositionEntry, p1.class
pf.unwatch(target_info1)
assert_equal p1.read_pos, Fluent::Plugin::TailInput::PositionFile::UNWATCHED_POSITION
inode2 = File.stat(@file).ino
target_info2 = Fluent::Plugin::TailInput::TargetInfo.new('valid_path', inode2)
p2 = pf[target_info2]
assert_equal Fluent::Plugin::TailInput::FilePositionEntry, p2.class
assert_not_equal p1, p2
end
test 'unwatch entries by inode' do
write_data(@file, TEST_CONTENT)
pf = Fluent::Plugin::TailInput::PositionFile.load(@file, true, TEST_CONTENT_INODES, logger: $log)
existing_targets = TEST_CONTENT_INODES.select do |inode, target_info|
inode == 1
end
pe_to_unwatch = pf[TEST_CONTENT_INODES[0]]
pf.unwatch_removed_targets(existing_targets)
assert_equal(
{
map_keys: [TEST_CONTENT_INODES[1].ino],
unwatched_pe_pos: Fluent::Plugin::TailInput::PositionFile::UNWATCHED_POSITION,
},
{
map_keys: pf.instance_variable_get(:@map).keys,
unwatched_pe_pos: pe_to_unwatch.read_pos,
}
)
unwatched_pe_retaken = pf[TEST_CONTENT_INODES[0]]
assert_not_equal pe_to_unwatch, unwatched_pe_retaken
end
end
sub_test_case 'FilePositionEntry' do
FILE_POS_CONTENT = <<~EOF
valid_path\t0000000000000002\t0000000000000001
valid_path2\t0000000000000003\t0000000000000002
EOF
def build_files(file)
r = {}
file.each_line do |line|
m = /^([^\t]+)\t([0-9a-fA-F]+)\t([0-9a-fA-F]+)/.match(line)
path = m[1]
pos = m[2].to_i(16)
ino = m[3].to_i(16)
seek = file.pos - line.bytesize + path.bytesize + 1
r[path] = Fluent::Plugin::TailInput::FilePositionEntry.new(@file, Mutex.new, seek, pos, ino)
end
r
end
test '#update' do
write_data(@file, FILE_POS_CONTENT)
fs = build_files(@file)
f = fs['valid_path']
f.update(11, 10)
@file.seek(0)
lines = @file.readlines
assert_equal 2, lines.size
assert_equal "valid_path\t000000000000000a\t000000000000000b\n", lines[0]
assert_equal "valid_path2\t0000000000000003\t0000000000000002\n", lines[1]
end
test '#update_pos' do
write_data(@file, FILE_POS_CONTENT)
fs = build_files(@file)
f = fs['valid_path']
f.update_pos(10)
@file.seek(0)
lines = @file.readlines
assert_equal 2, lines.size
assert_equal "valid_path\t000000000000000a\t0000000000000001\n", lines[0]
assert_equal "valid_path2\t0000000000000003\t0000000000000002\n", lines[1]
end
test '#read_pos' do
write_data(@file, FILE_POS_CONTENT)
fs = build_files(@file)
f = fs['valid_path']
assert_equal 2, f.read_pos
f.update_pos(10)
assert_equal 10, f.read_pos
f.update(2, 11)
assert_equal 11, f.read_pos
end
test '#read_inode' do
write_data(@file, FILE_POS_CONTENT)
fs = build_files(@file)
f = fs['valid_path']
assert_equal 1, f.read_inode
f.update_pos(10)
assert_equal 1, f.read_inode
f.update(2, 11)
assert_equal 2, f.read_inode
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.