repo stringlengths 5 92 | file_url stringlengths 80 287 | file_path stringlengths 5 197 | content stringlengths 0 32.8k | language stringclasses 1 value | license stringclasses 7 values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:37:27 2026-01-04 17:58:21 | truncated bool 2 classes |
|---|---|---|---|---|---|---|---|---|
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/in_tail/test_fifo.rb | test/plugin/in_tail/test_fifo.rb | require_relative '../../helper'
require 'fluent/plugin/in_tail'
class IntailFIFO < Test::Unit::TestCase
sub_test_case '#read_line' do
test 'returns lines splitting per `\n`' do
fifo = Fluent::Plugin::TailInput::TailWatcher::FIFO.new(Encoding::ASCII_8BIT, $log)
text = ("test\n" * 3).force_encoding(Encoding::ASCII_8BIT)
fifo << text
lines = []
fifo.read_lines(lines)
assert_equal Encoding::ASCII_8BIT, lines[0].encoding
assert_equal ["test\n", "test\n", "test\n"], lines
end
test 'concat line when line is separated' do
fifo = Fluent::Plugin::TailInput::TailWatcher::FIFO.new(Encoding::ASCII_8BIT, $log)
text = ("test\n" * 3 + 'test').force_encoding(Encoding::ASCII_8BIT)
fifo << text
lines = []
fifo.read_lines(lines)
assert_equal Encoding::ASCII_8BIT, lines[0].encoding
assert_equal ["test\n", "test\n", "test\n"], lines
fifo << "2\n"
fifo.read_lines(lines)
assert_equal Encoding::ASCII_8BIT, lines[0].encoding
assert_equal ["test\n", "test\n", "test\n", "test2\n"], lines
end
test 'returns lines which convert encoding' do
fifo = Fluent::Plugin::TailInput::TailWatcher::FIFO.new(Encoding::ASCII_8BIT, $log, nil, Encoding::UTF_8)
text = ("test\n" * 3).force_encoding(Encoding::ASCII_8BIT)
fifo << text
lines = []
fifo.read_lines(lines)
assert_equal Encoding::UTF_8, lines[0].encoding
assert_equal ["test\n", "test\n", "test\n"], lines
end
test 'reads lines as from_encoding' do
fifo = Fluent::Plugin::TailInput::TailWatcher::FIFO.new(Encoding::UTF_8, $log, nil, Encoding::ASCII_8BIT)
text = ("test\n" * 3).force_encoding(Encoding::UTF_8)
fifo << text
lines = []
fifo.read_lines(lines)
assert_equal Encoding::ASCII_8BIT, lines[0].encoding
assert_equal ["test\n", "test\n", "test\n"], lines
end
sub_test_case 'when it includes multi byte chars' do
test 'handles it as ascii_8bit' do
fifo = Fluent::Plugin::TailInput::TailWatcher::FIFO.new(Encoding::ASCII_8BIT, $log)
text = ("てすと\n" * 3).force_encoding(Encoding::ASCII_8BIT)
fifo << text
lines = []
fifo.read_lines(lines)
assert_equal Encoding::ASCII_8BIT, lines[0].encoding
assert_equal ["てすと\n", "てすと\n", "てすと\n"].map { |e| e.force_encoding(Encoding::ASCII_8BIT) }, lines
end
test 'replaces character with ? when convert error happens' do
fifo = Fluent::Plugin::TailInput::TailWatcher::FIFO.new(Encoding::UTF_8, $log, nil, Encoding::ASCII_8BIT)
text = ("てすと\n" * 3).force_encoding(Encoding::UTF_8)
fifo << text
lines = []
fifo.read_lines(lines)
assert_equal Encoding::ASCII_8BIT, lines[0].encoding
assert_equal ["???\n", "???\n", "???\n"].map { |e| e.force_encoding(Encoding::ASCII_8BIT) }, lines
end
end
test 'returns nothing when buffer is empty' do
fifo = Fluent::Plugin::TailInput::TailWatcher::FIFO.new(Encoding::ASCII_8BIT, $log)
lines = []
fifo.read_lines(lines)
assert_equal [], lines
text = "test\n" * 3
fifo << text
fifo.read_lines(lines)
assert_equal ["test\n", "test\n", "test\n"], lines
lines = []
fifo.read_lines(lines)
assert_equal [], lines
end
data('bigger than max_line_size', [
["test test test\n" * 3],
[],
])
data('less than or equal to max_line_size', [
["test\n" * 2],
["test\n", "test\n"],
])
data('mix', [
["test test test\ntest\ntest test test\ntest\ntest test test\n"],
["test\n", "test\n"],
])
data('mix and multiple', [
[
"test test test\ntest\n",
"test",
" test test\nt",
"est\nt"
],
["test\n", "test\n"],
])
data('remaining data bigger than max_line_size should be discarded', [
[
"test\nlong line still not having EOL",
"following texts to the previous long line\ntest\n",
],
["test\n", "test\n"],
])
test 'return lines only that size is less than or equal to max_line_size' do |(input_texts, expected)|
max_line_size = 5
fifo = Fluent::Plugin::TailInput::TailWatcher::FIFO.new(Encoding::ASCII_8BIT, $log, max_line_size)
lines = []
input_texts.each do |text|
fifo << text.force_encoding(Encoding::ASCII_8BIT)
fifo.read_lines(lines)
# The size of remaining buffer (i.e. a line still not having EOL) must not exceed max_line_size.
assert { fifo.buffer.bytesize <= max_line_size }
end
assert_equal expected, lines
end
end
sub_test_case '#<<' do
test 'does not make any change about encoding to an argument' do
fifo = Fluent::Plugin::TailInput::TailWatcher::FIFO.new(Encoding::ASCII_8BIT, $log)
text = ("test\n" * 3).force_encoding(Encoding::UTF_8)
assert_equal Encoding::UTF_8, text.encoding
fifo << text
assert_equal Encoding::UTF_8, text.encoding
end
end
sub_test_case '#reading_bytesize' do
test 'returns buffer size' do
fifo = Fluent::Plugin::TailInput::TailWatcher::FIFO.new(Encoding::ASCII_8BIT, $log)
text = "test\n" * 3 + 'test'
fifo << text
assert_equal text.bytesize, fifo.reading_bytesize
lines = []
fifo.read_lines(lines)
assert_equal ["test\n", "test\n", "test\n"], lines
assert_equal 'test'.bytesize, fifo.reading_bytesize
fifo << "2\n"
fifo.read_lines(lines)
assert_equal ["test\n", "test\n", "test\n", "test2\n"], lines
assert_equal 0, fifo.reading_bytesize
end
test 'returns the entire line size even if the size is over max_line_size' do
max_line_size = 20
fifo = Fluent::Plugin::TailInput::TailWatcher::FIFO.new(Encoding::ASCII_8BIT, $log, max_line_size)
lines = []
text = "long line still not having EOL"
fifo << text
fifo.read_lines(lines)
assert_equal [], lines
assert_equal 0, fifo.buffer.bytesize
assert_equal text.bytesize, fifo.reading_bytesize
text2 = " following texts"
fifo << text2
fifo.read_lines(lines)
assert_equal [], lines
assert_equal 0, fifo.buffer.bytesize
assert_equal text.bytesize + text2.bytesize, fifo.reading_bytesize
text3 = " end of the line\n"
fifo << text3
fifo.read_lines(lines)
assert_equal [], lines
assert_equal 0, fifo.buffer.bytesize
assert_equal 0, fifo.reading_bytesize
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin/in_tail/test_io_handler.rb | test/plugin/in_tail/test_io_handler.rb | require_relative '../../helper'
require 'fluent/plugin/in_tail'
require 'fluent/plugin/metrics_local'
require 'tempfile'
class IntailIOHandlerTest < Test::Unit::TestCase
def setup
Tempfile.create('intail_io_handler') do |file|
file.binmode
@file = file
opened_file_metrics = Fluent::Plugin::LocalMetrics.new
opened_file_metrics.configure(config_element('metrics', '', {}))
closed_file_metrics = Fluent::Plugin::LocalMetrics.new
closed_file_metrics.configure(config_element('metrics', '', {}))
rotated_file_metrics = Fluent::Plugin::LocalMetrics.new
rotated_file_metrics.configure(config_element('metrics', '', {}))
throttling_metrics = Fluent::Plugin::LocalMetrics.new
throttling_metrics.configure(config_element('metrics', '', {}))
@metrics = Fluent::Plugin::TailInput::MetricsInfo.new(opened_file_metrics, closed_file_metrics, rotated_file_metrics, throttling_metrics)
yield
end
end
def create_target_info
Fluent::Plugin::TailInput::TargetInfo.new(@file.path, Fluent::FileWrapper.stat(@file.path).ino)
end
def create_watcher
Fluent::Plugin::TailInput::TailWatcher.new(create_target_info, nil, nil, nil, nil, nil, nil, nil, nil)
end
test '#on_notify load file content and passed it to receive_lines method' do
text = "this line is test\ntest line is test\n"
@file.write(text)
@file.close
watcher = create_watcher
update_pos = 0
stub(watcher).pe do
pe = 'position_file'
stub(pe).read_pos { 0 }
stub(pe).update_pos { |val| update_pos = val }
pe
end
returned_lines = ''
r = Fluent::Plugin::TailInput::TailWatcher::IOHandler.new(watcher, path: @file.path, read_lines_limit: 100, read_bytes_limit_per_second: -1, log: $log, open_on_every_update: false, metrics: @metrics) do |lines, _watcher|
returned_lines << lines.join
true
end
r.on_notify
assert_equal text.bytesize, update_pos
assert_equal text, returned_lines
r.on_notify
assert_equal text.bytesize, update_pos
assert_equal text, returned_lines
end
sub_test_case 'when open_on_every_update is true and read_pos returns always 0' do
test 'open new IO and change pos to 0 and read it' do
text = "this line is test\ntest line is test\n"
@file.write(text)
@file.close
update_pos = 0
watcher = create_watcher
stub(watcher).pe do
pe = 'position_file'
stub(pe).read_pos { 0 }
stub(pe).update_pos { |val| update_pos = val }
pe
end
returned_lines = ''
r = Fluent::Plugin::TailInput::TailWatcher::IOHandler.new(watcher, path: @file.path, read_lines_limit: 100, read_bytes_limit_per_second: -1, log: $log, open_on_every_update: true, metrics: @metrics) do |lines, _watcher|
returned_lines << lines.join
true
end
r.on_notify
assert_equal text.bytesize, update_pos
assert_equal text, returned_lines
r.on_notify
assert_equal text * 2, returned_lines
end
end
sub_test_case 'when limit is 5' do
test 'call receive_lines once when short line(less than 8192)' do
text = "line\n" * 8
@file.write(text)
@file.close
update_pos = 0
watcher = create_watcher
stub(watcher).pe do
pe = 'position_file'
stub(pe).read_pos { 0 }
stub(pe).update_pos { |val| update_pos = val }
pe
end
returned_lines = []
r = Fluent::Plugin::TailInput::TailWatcher::IOHandler.new(watcher, path: @file.path, read_lines_limit: 5, read_bytes_limit_per_second: -1, log: $log, open_on_every_update: false, metrics: @metrics) do |lines, _watcher|
returned_lines << lines.dup
true
end
r.on_notify
assert_equal text.bytesize, update_pos
assert_equal 8, returned_lines[0].size
end
test 'call receive_lines some times when long line(more than 8192)' do
t = 'line' * (8192 / 8)
text = "#{t}\n" * 8
@file.write(text)
@file.close
update_pos = 0
watcher = create_watcher
stub(watcher).pe do
pe = 'position_file'
stub(pe).read_pos { 0 }
stub(pe).update_pos { |val| update_pos = val }
pe
end
returned_lines = []
r = Fluent::Plugin::TailInput::TailWatcher::IOHandler.new(watcher, path: @file.path, read_lines_limit: 5, read_bytes_limit_per_second: -1, log: $log, open_on_every_update: false, metrics: @metrics) do |lines, _watcher|
returned_lines << lines.dup
true
end
r.on_notify
assert_equal text.bytesize, update_pos
assert_equal 5, returned_lines[0].size
assert_equal 3, returned_lines[1].size
end
end
sub_test_case 'max_line_size' do
test 'does not call receive_lines when line_size exceeds max_line_size' do
t = 'x' * (8192)
text = "#{t}\n"
max_line_size = 8192
@file.write(text)
@file.close
update_pos = 0
watcher = create_watcher
stub(watcher).pe do
pe = 'position_file'
stub(pe).read_pos {0}
stub(pe).update_pos { |val| update_pos = val }
pe
end
returned_lines = []
r = Fluent::Plugin::TailInput::TailWatcher::IOHandler.new(watcher, path: @file.path, read_lines_limit: 1000, read_bytes_limit_per_second: -1, max_line_size: max_line_size, log: $log, open_on_every_update: false, metrics: @metrics) do |lines, _watcher|
returned_lines << lines.dup
true
end
r.on_notify
assert_equal text.bytesize, update_pos
assert_equal 0, returned_lines.size
end
data(
"open_on_every_update false" => false,
"open_on_every_update true" => true,
)
test 'manage pos correctly if a long line not having EOL occurs' do |open_on_every_update|
max_line_size = 20
returned_lines = []
pos = 0
watcher = create_watcher
stub(watcher).pe do
pe = 'position_file'
stub(pe).read_pos { pos }
stub(pe).update_pos { |val| pos = val }
pe
end
io_handler = Fluent::Plugin::TailInput::TailWatcher::IOHandler.new(
watcher, path: @file.path, read_lines_limit: 1000, read_bytes_limit_per_second: -1,
max_line_size: max_line_size, log: $log, open_on_every_update: open_on_every_update,
metrics: @metrics
) do |lines, _watcher|
returned_lines << lines.dup
true
end
short_line = "short line\n"
long_lines = [
"long line still not having EOL",
" end of the line\n",
]
@file.write(short_line)
@file.write(long_lines[0])
@file.flush
io_handler.on_notify
assert_equal [[short_line]], returned_lines
assert_equal short_line.bytesize, pos
@file.write(long_lines[1])
@file.flush
io_handler.on_notify
assert_equal [[short_line]], returned_lines
expected_size = short_line.bytesize + long_lines[0..1].map{|l| l.bytesize}.sum
assert_equal expected_size, pos
io_handler.close
end
data(
"open_on_every_update false" => false,
"open_on_every_update true" => true,
)
test 'discards a subsequent data in a long line even if restarting occurs between' do |open_on_every_update|
max_line_size = 20
returned_lines = []
pos = 0
watcher = create_watcher
stub(watcher).pe do
pe = 'position_file'
stub(pe).read_pos { pos }
stub(pe).update_pos { |val| pos = val }
pe
end
io_handler = Fluent::Plugin::TailInput::TailWatcher::IOHandler.new(
watcher, path: @file.path, read_lines_limit: 1000, read_bytes_limit_per_second: -1,
max_line_size: max_line_size, log: $log, open_on_every_update: open_on_every_update,
metrics: @metrics
) do |lines, _watcher|
returned_lines << lines.dup
true
end
short_line = "short line\n"
long_lines = [
"long line still not having EOL",
" end of the line\n",
]
@file.write(short_line)
@file.write(long_lines[0])
@file.flush
io_handler.on_notify
assert_equal [[short_line]], returned_lines
io_handler.close
io_handler = Fluent::Plugin::TailInput::TailWatcher::IOHandler.new(
watcher, path: @file.path, read_lines_limit: 1000, read_bytes_limit_per_second: -1,
max_line_size: max_line_size, log: $log, open_on_every_update: open_on_every_update,
metrics: @metrics
) do |lines, _watcher|
returned_lines << lines.dup
true
end
@file.write(long_lines[1])
@file.flush
io_handler.on_notify
assert_equal [[short_line]], returned_lines
io_handler.close
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/command/test_plugin_config_formatter.rb | test/command/test_plugin_config_formatter.rb | require_relative '../helper'
require 'pathname'
require 'fluent/command/plugin_config_formatter'
require 'fluent/plugin/input'
require 'fluent/plugin/output'
require 'fluent/plugin/filter'
require 'fluent/plugin/parser'
require 'fluent/plugin/formatter'
class TestFluentPluginConfigFormatter < Test::Unit::TestCase
class FakeInput < ::Fluent::Plugin::Input
::Fluent::Plugin.register_input("fake", self)
desc "path to something"
config_param :path, :string
end
class FakeOutput < ::Fluent::Plugin::Output
::Fluent::Plugin.register_output("fake", self)
desc "path to something"
config_param :path, :string
def process(tag, es)
end
end
class FakeFilter < ::Fluent::Plugin::Filter
::Fluent::Plugin.register_filter("fake", self)
desc "path to something"
config_param :path, :string
def filter(tag, time, record)
end
end
class FakeParser < ::Fluent::Plugin::Parser
::Fluent::Plugin.register_parser("fake", self)
desc "path to something"
config_param :path, :string
def parse(text)
end
end
class FakeFormatter < ::Fluent::Plugin::Formatter
::Fluent::Plugin.register_formatter("fake", self)
desc "path to something"
config_param :path, :string
def format(tag, time, record)
end
end
class FakeStorage < ::Fluent::Plugin::Storage
::Fluent::Plugin.register_storage('fake', self)
def get(key)
end
def fetch(key, defval)
end
def put(key, value)
end
def delete(key)
end
def update(key, &block)
end
end
class FakeServiceDiscovery < ::Fluent::Plugin::ServiceDiscovery
::Fluent::Plugin.register_sd('fake', self)
desc "hostname"
config_param :hostname, :string
end
class SimpleInput < ::Fluent::Plugin::Input
::Fluent::Plugin.register_input("simple", self)
helpers :inject, :compat_parameters
desc "path to something"
config_param :path, :string
end
class ComplexOutput < ::Fluent::Plugin::Output
::Fluent::Plugin.register_output("complex", self)
helpers :inject, :compat_parameters
config_section :authentication, required: true, multi: false do
desc "username"
config_param :username, :string
desc "password"
config_param :password, :string, secret: true
end
config_section :parent do
config_section :child do
desc "names"
config_param :names, :array
desc "difficulty"
config_param :difficulty, :enum, list: [:easy, :normal, :hard], default: :normal
end
end
def process(tag, es)
end
end
class SimpleServiceDiscovery < ::Fluent::Plugin::ServiceDiscovery
::Fluent::Plugin.register_sd('simple', self)
desc "servers"
config_param :servers, :array
end
sub_test_case "json" do
data(input: [FakeInput, "input"],
output: [FakeOutput, "output"],
filter: [FakeFilter, "filter"],
parser: [FakeParser, "parser"],
formatter: [FakeFormatter, "formatter"])
test "dumped config should be valid JSON" do |(klass, type)|
dumped_config = capture_stdout do
FluentPluginConfigFormatter.new(["--format=json", type, "fake"]).call
end
expected = {
path: {
desc: "path to something",
type: "string",
required: true
}
}
assert_equal(expected, JSON.parse(dumped_config, symbolize_names: true)[klass.name.to_sym])
end
end
sub_test_case "text" do
test "input simple" do
dumped_config = capture_stdout do
FluentPluginConfigFormatter.new(["--format=txt", "input", "simple"]).call
end
expected = <<TEXT
helpers: inject,compat_parameters
@log_level: string: (nil)
path: string: (nil)
TEXT
assert_equal(expected, dumped_config)
end
test "output complex" do
dumped_config = capture_stdout do
FluentPluginConfigFormatter.new(["--format=txt", "output", "complex"]).call
end
expected = <<TEXT
helpers: inject,compat_parameters
@log_level: string: (nil)
time_as_integer: bool: (false)
slow_flush_log_threshold: float: (20.0)
<buffer>: optional, single
chunk_keys: array: ([])
@type: string: ("memory")
timekey: time: (nil)
timekey_wait: time: (600)
timekey_use_utc: bool: (false)
timekey_zone: string: ("#{Time.now.strftime('%z')}")
flush_at_shutdown: bool: (nil)
flush_mode: enum: (:default)
flush_interval: time: (60)
flush_thread_count: integer: (1)
flush_thread_interval: float: (1.0)
flush_thread_burst_interval: float: (1.0)
delayed_commit_timeout: time: (60)
overflow_action: enum: (:throw_exception)
retry_forever: bool: (false)
retry_timeout: time: (259200)
retry_max_times: integer: (nil)
retry_secondary_threshold: float: (0.8)
retry_type: enum: (:exponential_backoff)
retry_wait: time: (1)
retry_exponential_backoff_base: float: (2)
retry_max_interval: time: (nil)
retry_randomize: bool: (true)
<secondary>: optional, single
@type: string: (nil)
<buffer>: optional, single
<secondary>: optional, single
<authentication>: required, single
username: string: (nil)
password: string: (nil)
<parent>: optional, multiple
<child>: optional, multiple
names: array: (nil)
difficulty: enum: (:normal)
TEXT
assert_equal(expected, dumped_config)
end
end
sub_test_case "markdown" do
test "input simple" do
dumped_config = capture_stdout do
FluentPluginConfigFormatter.new(["--format=markdown", "input", "simple"]).call
end
expected = <<TEXT
## Plugin helpers
* [inject](https://docs.fluentd.org/v/1.0/plugin-helper-overview/api-plugin-helper-inject)
* [compat_parameters](https://docs.fluentd.org/v/1.0/plugin-helper-overview/api-plugin-helper-compat_parameters)
* See also: [Input Plugin Overview](https://docs.fluentd.org/v/1.0/input#overview)
## TestFluentPluginConfigFormatter::SimpleInput
### path (string) (required)
path to something
TEXT
assert_equal(expected, dumped_config)
end
test "input simple (table)" do
dumped_config = capture_stdout do
FluentPluginConfigFormatter.new(["--format=markdown", "--table", "input", "simple"]).call
end
expected = <<TEXT
## Plugin helpers
* [inject](https://docs.fluentd.org/v/1.0/plugin-helper-overview/api-plugin-helper-inject)
* [compat_parameters](https://docs.fluentd.org/v/1.0/plugin-helper-overview/api-plugin-helper-compat_parameters)
* See also: [Input Plugin Overview](https://docs.fluentd.org/v/1.0/input#overview)
## TestFluentPluginConfigFormatter::SimpleInput
### Configuration
|parameter|type|description|default|
|---|---|---|---|
|path|string (required)|path to something||
TEXT
assert_equal(expected, dumped_config)
end
data("abbrev" => "sd",
"normal" => "service_discovery")
test "service_discovery simple" do |data|
plugin_type = data
dumped_config = capture_stdout do
FluentPluginConfigFormatter.new(["--format=markdown", plugin_type, "simple"]).call
end
expected = <<TEXT
* See also: [ServiceDiscovery Plugin Overview](https://docs.fluentd.org/v/1.0/servicediscovery#overview)
## TestFluentPluginConfigFormatter::SimpleServiceDiscovery
### servers (array) (required)
servers
TEXT
assert_equal(expected, dumped_config)
end
test "output complex" do
dumped_config = capture_stdout do
FluentPluginConfigFormatter.new(["--format=markdown", "output", "complex"]).call
end
expected = <<TEXT
## Plugin helpers
* [inject](https://docs.fluentd.org/v/1.0/plugin-helper-overview/api-plugin-helper-inject)
* [compat_parameters](https://docs.fluentd.org/v/1.0/plugin-helper-overview/api-plugin-helper-compat_parameters)
* See also: [Output Plugin Overview](https://docs.fluentd.org/v/1.0/output#overview)
## TestFluentPluginConfigFormatter::ComplexOutput
### \\<authentication\\> section (required) (single)
#### username (string) (required)
username
#### password (string) (required)
password
### \\<parent\\> section (optional) (multiple)
#### \\<child\\> section (optional) (multiple)
##### names (array) (required)
names
##### difficulty (enum) (optional)
difficulty
Available values: easy, normal, hard
Default value: `normal`.
TEXT
assert_equal(expected, dumped_config)
end
test "output complex (table)" do
dumped_config = capture_stdout do
FluentPluginConfigFormatter.new(["--format=markdown", "--table", "output", "complex"]).call
end
expected = <<TEXT
## Plugin helpers
* [inject](https://docs.fluentd.org/v/1.0/plugin-helper-overview/api-plugin-helper-inject)
* [compat_parameters](https://docs.fluentd.org/v/1.0/plugin-helper-overview/api-plugin-helper-compat_parameters)
* See also: [Output Plugin Overview](https://docs.fluentd.org/v/1.0/output#overview)
## TestFluentPluginConfigFormatter::ComplexOutput
### \\<authentication\\> section (required) (single)
### Configuration
|parameter|type|description|default|
|---|---|---|---|
|username|string (required)|username||
|password|string (required)|password||
### \\<parent\\> section (optional) (multiple)
#### \\<child\\> section (optional) (multiple)
### Configuration
|parameter|type|description|default|
|---|---|---|---|
|names|array (required)|names||
|difficulty|enum (optional)|difficulty (`easy`, `normal`, `hard`)|`normal`|
TEXT
assert_equal(expected, dumped_config)
end
end
sub_test_case "arguments" do
data do
hash = {}
["input", "output", "filter", "parser", "formatter", "storage", "service_discovery"].each do |type|
["txt", "json", "markdown"].each do |format|
argv = ["--format=#{format}"]
[
["--verbose", "--compact"],
["--verbose"],
["--compact"]
].each do |options|
hash["[#{type}] " + (argv + options).join(" ")] = argv + options + [type, "fake"]
end
end
end
hash
end
test "dump txt" do |argv|
capture_stdout do
assert_nothing_raised do
FluentPluginConfigFormatter.new(argv).call
end
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/command/test_cap_ctl.rb | test/command/test_cap_ctl.rb | require_relative '../helper'
require 'tempfile'
require 'fluent/command/cap_ctl'
class TestFluentCapCtl < Test::Unit::TestCase
setup do
omit "This environment does not handle Linux capability" unless defined?(CapNG)
end
sub_test_case "success" do
test "clear capability" do
logs = capture_stdout do
Fluent::CapCtl.new(["--clear"]).call
end
expression = /\AClear capabilities .*\n/m
assert_match expression, logs
end
test "add capability" do
logs = capture_stdout do
Fluent::CapCtl.new(["--add", "dac_override"]).call
end
expression = /\AUpdating .* done.\nAdding .*\n/m
assert_match expression, logs
end
test "drop capability" do
logs = capture_stdout do
Fluent::CapCtl.new(["--drop", "chown"]).call
end
expression = /\AUpdating .* done.\nDropping .*\n/m
assert_match expression, logs
end
test "get capability" do
logs = capture_stdout do
Fluent::CapCtl.new(["--get"]).call
end
expression = /\ACapabilities in .*,\nEffective: .*\nInheritable: .*\nPermitted: .*/m
assert_match expression, logs
end
end
sub_test_case "success with file" do
test "clear capability" do
logs = capture_stdout do
Tempfile.create("fluent-cap-") do |tempfile|
Fluent::CapCtl.new(["--clear-cap", "-f", tempfile.path]).call
end
end
expression = /\AClear capabilities .*\n/m
assert_match expression, logs
end
test "add capability" do
logs = capture_stdout do
Tempfile.create("fluent-cap-") do |tempfile|
Fluent::CapCtl.new(["--add", "dac_override", "-f", tempfile.path]).call
end
end
expression = /\AUpdating .* done.\nAdding .*\n/m
assert_match expression, logs
end
test "drop capability" do
logs = capture_stdout do
Tempfile.create("fluent-cap-") do |tempfile|
Fluent::CapCtl.new(["--drop", "chown", "-f", tempfile.path]).call
end
end
expression = /\AUpdating .* done.\nDropping .*\n/m
assert_match expression, logs
end
test "get capability" do
logs = capture_stdout do
Tempfile.create("fluent-cap-") do |tempfile|
Fluent::CapCtl.new(["--get", "-f", tempfile.path]).call
end
end
expression = /\ACapabilities in .*,\nEffective: .*\nInheritable: .*\nPermitted: .*/m
assert_match expression, logs
end
end
sub_test_case "invalid" do
test "add capability" do
assert_raise(ArgumentError) do
Fluent::CapCtl.new(["--add", "nonexitent"]).call
end
end
test "drop capability" do
assert_raise(ArgumentError) do
Fluent::CapCtl.new(["--drop", "invalid"]).call
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/command/test_ca_generate.rb | test/command/test_ca_generate.rb | require_relative '../helper'
require 'flexmock/test_unit'
require 'tmpdir'
require 'fluent/command/ca_generate'
require 'fluent/event'
class TestFluentCaGenerate < ::Test::Unit::TestCase
def test_generate_ca_pair
cert, key = Fluent::CaGenerate.generate_ca_pair(Fluent::CaGenerate::DEFAULT_OPTIONS)
assert_equal(OpenSSL::X509::Certificate, cert.class)
assert_true(key.private?)
end
def test_ca_generate
dumped_output = capture_stdout do
Dir.mktmpdir do |dir|
Fluent::CaGenerate.new([dir, "fluentd"]).call
assert_true(File.exist?(File.join(dir, "ca_key.pem")))
assert_true(File.exist?(File.join(dir, "ca_cert.pem")))
end
end
expected = <<TEXT
successfully generated: ca_key.pem, ca_cert.pem
copy and use ca_cert.pem to client(out_forward)
TEXT
assert_equal(expected, dumped_output)
end
sub_test_case "configure options" do
test "should respond multiple options" do
dumped_output = capture_stdout do
Dir.mktmpdir do |dir|
Fluent::CaGenerate.new([dir, "fluentd",
"--country", "JP", "--key-length", "4096",
"--state", "Tokyo", "--locality", "Chiyoda-ku",
"--common-name", "Forward CA"]).call
assert_true(File.exist?(File.join(dir, "ca_key.pem")))
assert_true(File.exist?(File.join(dir, "ca_cert.pem")))
end
end
expected = <<TEXT
successfully generated: ca_key.pem, ca_cert.pem
copy and use ca_cert.pem to client(out_forward)
TEXT
assert_equal(expected, dumped_output)
end
test "invalid options" do
Dir.mktmpdir do |dir|
assert_raise(OptionParser::InvalidOption) do
Fluent::CaGenerate.new([dir, "fluentd",
"--invalid"]).call
end
assert_false(File.exist?(File.join(dir, "ca_key.pem")))
assert_false(File.exist?(File.join(dir, "ca_cert.pem")))
end
end
test "empty options" do
assert_raise(SystemExit) do
capture_stdout do
Fluent::CaGenerate.new([]).call
end
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/command/test_ctl.rb | test/command/test_ctl.rb | require_relative '../helper'
require 'test-unit'
require 'win32/event' if Fluent.windows?
require 'fluent/command/ctl'
class TestFluentdCtl < ::Test::Unit::TestCase
def assert_win32_event(event_name, command, pid_or_svcname)
event = Win32::Event.new(event_name)
ipc = Win32::Ipc.new(event.handle)
ret = Win32::Ipc::TIMEOUT
wait_thread = Thread.new do
ret = ipc.wait(1)
end
Fluent::Ctl.new([command, pid_or_svcname]).call
wait_thread.join
assert_equal(Win32::Ipc::SIGNALED, ret)
end
data("shutdown" => ["shutdown", "TERM", ""],
"restart" => ["restart", "HUP", "HUP"],
"flush" => ["flush", "USR1", "USR1"],
"reload" => ["reload", "USR2", "USR2"])
def test_commands(data)
command, signal, event_suffix = data
if Fluent.windows?
event_name = "fluentd_54321"
event_name << "_#{event_suffix}" unless event_suffix.empty?
assert_win32_event(event_name, command, "54321")
else
got_signal = false
Signal.trap(signal) do
got_signal = true
end
Fluent::Ctl.new([command, Process.pid.to_s]).call
assert_true(got_signal)
end
end
data("shutdown" => ["shutdown", ""],
"restart" => ["restart", "HUP"],
"flush" => ["flush", "USR1"],
"reload" => ["reload", "USR2"])
def test_commands_with_winsvcname(data)
omit "Only for Windows" unless Fluent.windows?
command, event_suffix = data
event_name = "testfluentdwinsvc"
event_name << "_#{event_suffix}" unless event_suffix.empty?
assert_win32_event(event_name, command, "testfluentdwinsvc")
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/command/test_plugin_generator.rb | test/command/test_plugin_generator.rb | require_relative '../helper'
require 'pathname'
require 'fluent/command/plugin_generator'
class TestFluentPluginGenerator < Test::Unit::TestCase
TEMP_DIR = "tmp/plugin_generator"
setup do
FileUtils.mkdir_p(TEMP_DIR)
@pwd = Dir.pwd
Dir.chdir(TEMP_DIR)
end
teardown do
Dir.chdir(@pwd)
FileUtils.rm_rf(TEMP_DIR)
end
def stub_git_process(target)
stub(target).spawn do |cmd, arg1, arg2|
assert_equal %w[git init .], [cmd, arg1, arg2]
-1
end
stub(Process).wait { |pid| assert_equal(pid, -1) }
end
sub_test_case "generate plugin" do
data(input: ["input", "in"],
output: ["output", "out"],
filter: ["filter", "filter"],
parser: ["parser", "parser"],
formatter: ["formatter", "formatter"],
storage: ["storage", "storage"])
test "generate plugin" do |(type, part)|
generator = FluentPluginGenerator.new([type, "fake"])
stub_git_process(generator)
capture_stdout do
generator.call
end
plugin_base_dir = Pathname("fluent-plugin-fake")
assert { plugin_base_dir.directory? }
expected = [
"fluent-plugin-fake",
"fluent-plugin-fake/Gemfile",
"fluent-plugin-fake/LICENSE",
"fluent-plugin-fake/README.md",
"fluent-plugin-fake/Rakefile",
"fluent-plugin-fake/fluent-plugin-fake.gemspec",
"fluent-plugin-fake/lib",
"fluent-plugin-fake/lib/fluent",
"fluent-plugin-fake/lib/fluent/plugin",
"fluent-plugin-fake/lib/fluent/plugin/#{part}_fake.rb",
"fluent-plugin-fake/test",
"fluent-plugin-fake/test/helper.rb",
"fluent-plugin-fake/test/plugin",
"fluent-plugin-fake/test/plugin/test_#{part}_fake.rb",
]
actual = plugin_base_dir.find.reject {|f| f.fnmatch("*/.git*") }.map(&:to_s).sort
assert_equal(expected, actual)
end
test "no license" do
generator = FluentPluginGenerator.new(["--no-license", "filter", "fake"])
stub_git_process(generator)
capture_stdout do
generator.call
end
assert { !Pathname("fluent-plugin-fake/LICENSE").exist? }
assert { Pathname("fluent-plugin-fake/Gemfile").exist? }
end
test "unknown license" do
out = capture_stdout do
assert_raise(SystemExit) do
FluentPluginGenerator.new(["--license=unknown", "filter", "fake"]).call
end
end
assert { out.lines.include?("License: unknown\n") }
end
end
sub_test_case "unify plugin name" do
data("word" => ["fake", "fake"],
"underscore" => ["rewrite_tag_filter", "rewrite_tag_filter"],
"dash" => ["rewrite-tag-filter", "rewrite_tag_filter"])
test "plugin_name" do |(name, plugin_name)|
generator = FluentPluginGenerator.new(["filter", name])
stub_git_process(generator)
stub(Process).wait { |pid| assert_equal(pid, -1) }
capture_stdout do
generator.call
end
assert_equal(plugin_name, generator.__send__(:plugin_name))
end
data("word" => ["fake", "fluent-plugin-fake"],
"underscore" => ["rewrite_tag_filter", "fluent-plugin-rewrite-tag-filter"],
"dash" => ["rewrite-tag-filter", "fluent-plugin-rewrite-tag-filter"])
test "gem_name" do |(name, gem_name)|
generator = FluentPluginGenerator.new(["output", name])
stub_git_process(generator)
stub(Process).wait { |pid| assert_equal(pid, -1) }
capture_stdout do
generator.call
end
assert_equal(gem_name, generator.__send__(:gem_name))
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/command/test_fluentd.rb | test/command/test_fluentd.rb | require_relative '../helper'
# require 'fluent/command/fluentd'
# don't require it... it runs immediately
require 'fileutils'
require 'timeout'
require 'securerandom'
require 'fluent/file_wrapper'
class TestFluentdCommand < ::Test::Unit::TestCase
SUPERVISOR_PID_PATTERN = /starting fluentd-[.0-9]+ pid=(\d+)/
WORKER_PID_PATTERN = /starting fluentd worker pid=(\d+) /
def tmp_dir
File.join(File.dirname(__FILE__), "..", "tmp", "command" "fluentd#{ENV['TEST_ENV_NUMBER']}", SecureRandom.hex(10))
end
setup do
@tmp_dir = tmp_dir
FileUtils.mkdir_p(@tmp_dir)
@supervisor_pid = nil
@worker_pids = []
ENV["TEST_RUBY_PATH"] = nil
end
teardown do
begin
FileUtils.rm_rf(@tmp_dir)
rescue Errno::EACCES
# It may occur on Windows because of delete pending state due to delayed GC.
# Ruby 3.2 or later doesn't ignore Errno::EACCES:
# https://github.com/ruby/ruby/commit/983115cf3c8f75b1afbe3274f02c1529e1ce3a81
end
end
def process_exist?(pid)
begin
r = Process.waitpid(pid, Process::WNOHANG)
return true if r.nil?
false
rescue SystemCallError
false
end
end
def create_conf_file(name, content, ext_enc = 'utf-8')
conf_path = File.join(@tmp_dir, name)
Fluent::FileWrapper.open(conf_path, "w:#{ext_enc}:utf-8") do |file|
file.write content
end
conf_path
end
def create_plugin_file(name, content)
file_path = File.join(@tmp_dir, 'plugin', name)
FileUtils.mkdir_p(File.dirname(file_path))
Fluent::FileWrapper.open(file_path, 'w') do |file|
file.write content
end
file_path
end
def create_cmdline(conf_path, *fluentd_options)
if Fluent.windows?
cmd_path = File.expand_path(File.dirname(__FILE__) + "../../../bin/fluentd")
["bundle", "exec", ServerEngine.ruby_bin_path, cmd_path, "-c", conf_path, *fluentd_options]
else
cmd_path = File.expand_path(File.dirname(__FILE__) + "../../../bin/fluentd")
["bundle", "exec", cmd_path, "-c", conf_path, *fluentd_options]
end
end
def process_kill(pid)
if Fluent.windows?
Process.kill(:KILL, pid) rescue nil
return
end
begin
Process.kill(:TERM, pid) rescue nil
Timeout.timeout(10){ sleep 0.1 while process_exist?(pid) }
rescue Timeout::Error
Process.kill(:KILL, pid) rescue nil
end
end
def execute_command(cmdline, chdir=@tmp_dir, env = {})
null_stream = Fluent::FileWrapper.open(File::NULL, 'w')
gemfile_path = File.expand_path(File.dirname(__FILE__) + "../../../Gemfile")
env = { "BUNDLE_GEMFILE" => gemfile_path }.merge(env)
cmdname = cmdline.shift
arg0 = "testing-fluentd"
# p(here: "executing process", env: env, cmdname: cmdname, arg0: arg0, args: cmdline)
IO.popen(env, [[cmdname, arg0], *cmdline], chdir: chdir, err: [:child, :out]) do |io|
pid = io.pid
begin
yield pid, io
# p(here: "execute command", pid: pid, worker_pids: @worker_pids)
ensure
process_kill(pid)
if @supervisor_pid
process_kill(@supervisor_pid)
end
@worker_pids.each do |cpid|
process_kill(cpid)
end
# p(here: "execute command", pid: pid, exist: process_exist?(pid), worker_pids: @worker_pids, exists: @worker_pids.map{|i| process_exist?(i) })
Timeout.timeout(10){ sleep 0.1 while process_exist?(pid) }
end
end
ensure
null_stream.close rescue nil
end
def eager_read(io)
buf = +''
loop do
b = io.read_nonblock(1024, nil, exception: false)
if b == :wait_readable || b.nil?
return buf
end
buf << b
end
end
# ATTENTION: This stops taking logs when all `pattern_list` match or timeout,
# so `patterns_not_match` can test only logs up to that point.
# You can pass a block to assert something after log matching.
def assert_log_matches(cmdline, *pattern_list, patterns_not_match: [], timeout: 20, env: {})
matched = false
matched_wrongly = false
error_msg_match = ""
stdio_buf = ""
succeeded_block = true
error_msg_block = ""
begin
execute_command(cmdline, @tmp_dir, env) do |pid, stdout|
begin
waiting(timeout) do
while process_exist?(pid)
readables, _, _ = IO.select([stdout], nil, nil, 1)
next unless readables
break if readables.first.eof?
buf = eager_read(readables.first)
# puts buf
stdio_buf << buf
lines = stdio_buf.split("\n")
if pattern_list.all?{|ptn| lines.any?{|line| ptn.is_a?(Regexp) ? ptn.match(line) : line.include?(ptn) } }
matched = true
end
if Fluent.windows?
# https://github.com/fluent/fluentd/issues/4095
# On Windows, the initial process is different from the supervisor process,
# so we need to wait until `SUPERVISOR_PID_PATTERN` appears in the logs to get the pid.
# (Worker processes will be killed by the supervisor process, so we don't need it-)
break if matched && SUPERVISOR_PID_PATTERN =~ stdio_buf
else
# On Non-Windows, the initial process is the supervisor process,
# so we don't need to wait `SUPERVISOR_PID_PATTERN`.
break if matched
end
end
end
begin
yield if block_given?
rescue => e
succeeded_block = false
error_msg_block = "failed block execution after matching: #{e}"
end
ensure
if SUPERVISOR_PID_PATTERN =~ stdio_buf
@supervisor_pid = $1.to_i
end
stdio_buf.scan(WORKER_PID_PATTERN) do |worker_pid|
@worker_pids << worker_pid.first.to_i
end
end
end
rescue Timeout::Error
error_msg_match = "execution timeout"
# https://github.com/fluent/fluentd/issues/4095
# On Windows, timeout without `@supervisor_pid` means that the test is invalid,
# since the supervisor process will survive without being killed correctly.
flunk("Invalid test: The pid of supervisor could not be taken, which is necessary on Windows.") if Fluent.windows? && @supervisor_pid.nil?
rescue => e
error_msg_match = "unexpected error in launching fluentd: #{e.inspect}"
else
error_msg_match = "log doesn't match" unless matched
end
if patterns_not_match.empty?
error_msg_match = build_message(error_msg_match,
"<?>\nwas expected to include:\n<?>",
stdio_buf, pattern_list)
else
lines = stdio_buf.split("\n")
patterns_not_match.each do |ptn|
matched_wrongly = if ptn.is_a? Regexp
lines.any?{|line| ptn.match(line) }
else
lines.any?{|line| line.include?(ptn) }
end
if matched_wrongly
error_msg_match << "\n" unless error_msg_match.empty?
error_msg_match << "pattern exists in logs wrongly: #{ptn}"
end
end
error_msg_match = build_message(error_msg_match,
"<?>\nwas expected to include:\n<?>\nand not include:\n<?>",
stdio_buf, pattern_list, patterns_not_match)
end
assert matched && !matched_wrongly, error_msg_match
assert succeeded_block, error_msg_block if block_given?
end
def assert_fluentd_fails_to_start(cmdline, *pattern_list, timeout: 20)
# empty_list.all?{ ... } is always true
matched = false
running = false
assert_error_msg = "failed to start correctly"
stdio_buf = ""
begin
execute_command(cmdline) do |pid, stdout|
begin
waiting(timeout) do
while process_exist?(pid) && !running
readables, _, _ = IO.select([stdout], nil, nil, 1)
next unless readables
next if readables.first.eof?
stdio_buf << eager_read(readables.first)
lines = stdio_buf.split("\n")
if lines.any?{|line| line.include?("fluentd worker is now running") }
running = true
end
if pattern_list.all?{|ptn| lines.any?{|line| ptn.is_a?(Regexp) ? ptn.match(line) : line.include?(ptn) } }
matched = true
end
end
end
ensure
if SUPERVISOR_PID_PATTERN =~ stdio_buf
@supervisor_pid = $1.to_i
end
stdio_buf.scan(WORKER_PID_PATTERN) do |worker_pid|
@worker_pids << worker_pid.first.to_i
end
end
end
rescue Timeout::Error
assert_error_msg = "execution timeout with command out:\n" + stdio_buf
# https://github.com/fluent/fluentd/issues/4095
# On Windows, timeout without `@supervisor_pid` means that the test is invalid,
# since the supervisor process will survive without being killed correctly.
flunk("Invalid test: The pid of supervisor could not be taken, which is necessary on Windows.") if Fluent.windows? && @supervisor_pid.nil?
rescue => e
assert_error_msg = "unexpected error in launching fluentd: #{e.inspect}\n" + stdio_buf
assert false, assert_error_msg
end
assert !running, "fluentd started to run incorrectly:\n" + stdio_buf
unless matched
assert_error_msg = "fluentd failed to start, without specified regular expressions:\n" + stdio_buf
end
assert matched, assert_error_msg
end
sub_test_case 'with valid configuration' do
test 'runs successfully' do
conf = <<CONF
<source>
@type dummy
@id dummy
@label @dummydata
tag dummy
dummy {"message": "yay!"}
</source>
<label @dummydata>
<match dummy>
@type null
@id blackhole
</match>
</label>
CONF
conf_path = create_conf_file('valid.conf', conf)
assert File.exist?(conf_path)
assert_log_matches(create_cmdline(conf_path), "fluentd worker is now running", 'worker=0')
end
end
sub_test_case 'with --conf-encoding' do
test 'runs successfully' do
conf = <<CONF
# テスト
<source>
@type dummy
tag dummy
dummy {"message": "yay!"}
</source>
<match dummy>
@type null
</match>
CONF
conf_path = create_conf_file('shift_jis.conf', conf, 'shift_jis')
assert_log_matches(create_cmdline(conf_path, '--conf-encoding', 'shift_jis'), "fluentd worker is now running", 'worker=0')
end
test 'failed to run by invalid encoding' do
conf = <<CONF
# テスト
<source>
@type dummy
tag dummy
dummy {"message": "yay!"}
</source>
<match dummy>
@type null
</match>
CONF
conf_path = create_conf_file('shift_jis.conf', conf, 'shift_jis')
assert_fluentd_fails_to_start(create_cmdline(conf_path), "invalid byte sequence in UTF-8")
end
end
sub_test_case 'with system configuration about root directory' do
setup do
@root_path = File.join(@tmp_dir, "rootpath")
FileUtils.rm_rf(@root_path)
@conf = <<CONF
<system>
root_dir #{@root_path}
</system>
<source>
@type dummy
@id dummy
@label @dummydata
tag dummy
dummy {"message": "yay!"}
</source>
<label @dummydata>
<match dummy>
@type null
@id blackhole
</match>
</label>
CONF
end
test 'use the specified existing directory as root' do
FileUtils.mkdir_p(@root_path)
conf_path = create_conf_file('existing_root_dir.conf', @conf)
assert Dir.exist?(@root_path)
assert_log_matches(create_cmdline(conf_path), "fluentd worker is now running", 'worker=0')
end
test 'creates the specified root directory if missing' do
conf_path = create_conf_file('missing_root_dir.conf', @conf)
assert_false Dir.exist?(@root_path)
assert_log_matches(create_cmdline(conf_path), "fluentd worker is now running", 'worker=0')
assert Dir.exist?(@root_path)
end
test 'fails to launch fluentd if specified root path is invalid path for directory' do
Fluent::FileWrapper.open(@root_path, 'w') do |_|
# create file and close it
end
conf_path = create_conf_file('existing_root_dir.conf', @conf)
assert_fluentd_fails_to_start(
create_cmdline(conf_path),
"non directory entry exists:#{@root_path}",
)
end
end
sub_test_case 'configured to route log events to plugins' do
setup do
@basic_conf = <<CONF
<source>
@type dummy
@id dummy
tag dummy
dummy {"message": "yay!"}
</source>
<match dummy>
@type null
@id blackhole
</match>
CONF
end
test 'by top level <match fluent.*> section' do
conf = @basic_conf + <<CONF
<match fluent.**>
@type stdout
</match>
CONF
conf_path = create_conf_file('logevent_1.conf', conf)
assert_log_matches(
create_cmdline(conf_path),
"fluentd worker is now running",
'fluent.info: {"worker":0,"message":"fluentd worker is now running worker=0"}',
"define <match fluent.**> to capture fluentd logs in top level is deprecated. Use <label @FLUENT_LOG> instead",
patterns_not_match: ['[warn]: some tags for log events are not defined in top level (to be ignored) tags=["fluent.trace", "fluent.debug"]'],
)
end
test 'by top level <match> section with warning for missing log levels (and warnings for each log event records)' do
conf = @basic_conf + <<CONF
<match fluent.warn fluent.error fluent.fatal>
@type stdout
</match>
CONF
conf_path = create_conf_file('logevent_2.conf', conf)
assert_log_matches(
create_cmdline(conf_path),
"fluentd worker is now running",
'[warn]: #0 match for some tags of log events are not defined in top level (to be ignored) tags=["fluent.trace", "fluent.debug", "fluent.info"]',
"define <match fluent.warn>, <match fluent.error>, <match fluent.fatal> to capture fluentd logs in top level is deprecated. Use <label @FLUENT_LOG> instead",
'[warn]: #0 no patterns matched tag="fluent.info"',
)
end
test 'by <label @FLUENT_LOG> section' do
conf = @basic_conf + <<CONF
<label @FLUENT_LOG>
<match **>
@type stdout
</match>
</label>
CONF
conf_path = create_conf_file('logevent_3.conf', conf)
assert_log_matches(
create_cmdline(conf_path),
"fluentd worker is now running",
'fluent.info: {"worker":0,"message":"fluentd worker is now running worker=0"}',
patterns_not_match: ['[warn]: some tags for log events are not defined in @FLUENT_LOG label (to be ignored)'],
)
end
test 'by <label> section with warning for missing log levels' do
conf = @basic_conf + <<CONF
<label @FLUENT_LOG>
<match fluent.{trace,debug}>
@type null
</match>
<match fluent.warn fluent.error>
@type stdout
</match>
</label>
CONF
conf_path = create_conf_file('logevent_4.conf', conf)
assert_log_matches(
create_cmdline(conf_path),
"fluentd worker is now running",
'[warn]: #0 match for some tags of log events are not defined in @FLUENT_LOG label (to be ignored) tags=["fluent.info", "fluent.fatal"]',
patterns_not_match: ['[warn]: no patterns matched tag="fluent.info"'],
)
end
end
sub_test_case 'configured to suppress configuration dump' do
setup do
@basic_conf = <<CONF
<source>
@type dummy
@id dummy
@label @dummydata
tag dummy
dummy {"message": "yay!"}
</source>
<label @dummydata>
<match dummy>
@type null
@id blackhole
</match>
</label>
CONF
end
test 'configured by system config' do
conf = <<SYSTEM + @basic_conf
<system>
suppress_config_dump
</system>
SYSTEM
conf_path = create_conf_file('suppress_conf_dump_1.conf', conf)
assert_log_matches(create_cmdline(conf_path), "fluentd worker is now running", patterns_not_match: ["tag dummy"])
end
test 'configured by command line option' do
conf_path = create_conf_file('suppress_conf_dump_2.conf', @basic_conf)
assert_log_matches(create_cmdline(conf_path, '--suppress-config-dump'), "fluentd worker is now running", patterns_not_match: ["tag dummy"])
end
test 'configured as false by system config, but overridden as true by command line option' do
conf = <<SYSTEM + @basic_conf
<system>
suppress_config_dump false
</system>
SYSTEM
conf_path = create_conf_file('suppress_conf_dump_3.conf', conf)
assert_log_matches(create_cmdline(conf_path, '--suppress-config-dump'), "fluentd worker is now running", patterns_not_match: ["tag dummy"])
end
end
sub_test_case 'configuration with wrong plugin type' do
test 'failed to start' do
conf = <<CONF
<source>
@type
@id dummy
@label @dummydata
tag dummy
dummy {"message": "yay!"}
</source>
<label @dummydata>
<match dummy>
@type null
@id blackhole
</match>
</label>
CONF
conf_path = create_conf_file('type_missing.conf', conf)
assert File.exist?(conf_path)
assert_fluentd_fails_to_start(
create_cmdline(conf_path),
"config error",
"error=\"Unknown input plugin ''. Run 'gem search -rd fluent-plugin' to find plugins",
)
end
end
sub_test_case 'configuration to load plugin file with syntax error' do
test 'failed to start' do
script = "require 'fluent/plugin/input'\n"
script << "module Fluent::Plugin\n"
script << " class BuggyInput < Input\n"
script << " Fluent::Plugin.register_input('buggy', self)\n"
script << " end\n"
plugin_path = create_plugin_file('in_buggy.rb', script)
conf = <<CONF
<source>
@type buggy
@id dummy
@label @dummydata
tag dummy
dummy {"message": "yay!"}
</source>
<label @dummydata>
<match dummy>
@type null
@id blackhole
</match>
</label>
CONF
conf_path = create_conf_file('buggy_plugin.conf', conf)
assert File.exist?(conf_path)
assert_fluentd_fails_to_start(
create_cmdline(conf_path, "-p", File.dirname(plugin_path)),
/\[error\]: .+in_buggy.rb:\d+: syntax error/
)
end
end
sub_test_case 'configuration to load plugin which raises unrecoverable error in #start' do
test 'failed to start' do
script = "require 'fluent/plugin/input'\n"
script << "require 'fluent/error'\n"
script << "module Fluent::Plugin\n"
script << " class CrashingInput < Input\n"
script << " Fluent::Plugin.register_input('crashing', self)\n"
script << " def start\n"
script << " raise Fluent::UnrecoverableError"
script << " end\n"
script << " end\n"
script << "end\n"
plugin_path = create_plugin_file('in_crashing.rb', script)
conf = <<CONF
<source>
@type crashing
@id dummy
@label @dummydata
tag dummy
dummy {"message": "yay!"}
</source>
<label @dummydata>
<match dummy>
@type null
@id blackhole
</match>
</label>
CONF
conf_path = create_conf_file('crashing_plugin.conf', conf)
assert File.exist?(conf_path)
assert_fluentd_fails_to_start(
create_cmdline(conf_path, "-p", File.dirname(plugin_path)),
'unexpected error error_class=Fluent::UnrecoverableError error="an unrecoverable error occurs in Fluentd process"',
)
end
end
sub_test_case 'configured to run 2 workers' do
setup do
@root_path = File.join(@tmp_dir, "rootpath")
FileUtils.rm_rf(@root_path)
FileUtils.mkdir_p(@root_path)
end
test 'success to start the number of workers specified in configuration' do
conf = <<'CONF'
<system>
workers 2
root_dir #{@root_path}
</system>
<source>
@type dummy
@id "dummy#{worker_id}" # check worker_id works or not with actual command
@label @dummydata
tag dummy
dummy {"message": "yay!"}
</source>
<label @dummydata>
<match dummy>
@type null
@id blackhole
</match>
</label>
CONF
conf_path = create_conf_file('workers1.conf', conf)
assert Dir.exist?(@root_path)
assert_log_matches(
create_cmdline(conf_path),
"#0 fluentd worker is now running worker=0",
"#1 fluentd worker is now running worker=1"
)
end
sub_test_case "YAML config format" do
test 'success to start the number of workers specified in configuration' do
conf = <<'CONF'
system:
workers: 2
root_dir: "#{@root_path}"
config:
- source:
$type: dummy
$id: !fluent/s "dummy.#{worker_id}" # check worker_id works or not with actual command
$label: '@dummydata'
tag: dummy
dummy: !fluent/json {"message": !fluent/s "yay from #{hostname}!"}
- label:
$name: '@dummydata'
config:
- match:
$tag: dummy
$type: "null"
$id: blackhole
CONF
conf_path = create_conf_file('workers1.yaml', conf)
assert Dir.exist?(@root_path)
assert_log_matches(
create_cmdline(conf_path),
"#0 fluentd worker is now running worker=0",
"#1 fluentd worker is now running worker=1"
)
end
end
test 'success to start the number of workers specified by command line option' do
conf = <<CONF
<system>
root_dir #{@root_path}
</system>
<source>
@type dummy
@id dummy
@label @dummydata
tag dummy
dummy {"message": "yay!"}
</source>
<label @dummydata>
<match dummy>
@type null
@id blackhole
</match>
</label>
CONF
conf_path = create_conf_file('workers2.conf', conf)
assert_log_matches(
create_cmdline(conf_path, '--workers', '2'),
"#0 fluentd worker is now running worker=0",
"#1 fluentd worker is now running worker=1"
)
end
test 'failed to start workers when configured plugins do not support multi worker configuration' do
script = "require 'fluent/plugin/input'\n"
script << "module Fluent::Plugin\n"
script << " class SingleInput < Input\n"
script << " Fluent::Plugin.register_input('single', self)\n"
script << " def multi_workers_ready?\n"
script << " false\n"
script << " end\n"
script << " end\n"
script << "end\n"
plugin_path = create_plugin_file('in_single.rb', script)
conf = <<CONF
<system>
workers 2
</system>
<source>
@type single
@id single
@label @dummydata
</source>
<label @dummydata>
<match dummy>
@type null
@id blackhole
</match>
</label>
CONF
conf_path = create_conf_file('workers_invalid1.conf', conf)
assert_fluentd_fails_to_start(
create_cmdline(conf_path, "-p", File.dirname(plugin_path)),
"Plugin 'single' does not support multi workers configuration (Fluent::Plugin::SingleInput)",
)
end
test 'failed to start workers when file buffer is configured in non-workers way' do
conf = <<CONF
<system>
workers 2
</system>
<source>
@type dummy
tag dummy
@id single
@label @dummydata
</source>
<label @dummydata>
<match dummy>
@type null
@id blackhole
<buffer>
@type file
path #{File.join(@root_path, "buf", "file.*.log")}
</buffer>
</match>
</label>
CONF
conf_path = create_conf_file('workers_invalid2.conf', conf)
assert_fluentd_fails_to_start(
create_cmdline(conf_path),
"[blackhole] file buffer with multi workers should be configured to use directory 'path', or system root_dir and plugin id",
"config error file=\"#{conf_path}\" error_class=Fluent::ConfigError error=\"Plugin 'file' does not support multi workers configuration (Fluent::Plugin::FileBuffer)\"",
)
end
test 'failed to start workers when configured plugins as children of MultiOutput do not support multi worker configuration' do
script = <<-EOC
require 'fluent/plugin/output'
module Fluent::Plugin
class SingleOutput < Output
Fluent::Plugin.register_output('single', self)
def multi_workers_ready?
false
end
def write(chunk)
end
end
end
EOC
plugin_path = create_plugin_file('out_single.rb', script)
conf = <<CONF
<system>
workers 2
</system>
<source>
@type dummy
tag dummy
@id single
@label @dummydata
</source>
<label @dummydata>
<match dummy>
@type copy
<store>
@type single
</store>
<store>
@type single
</store>
</match>
</label>
CONF
conf_path = create_conf_file('workers_invalid3.conf', conf)
assert_fluentd_fails_to_start(
create_cmdline(conf_path, "-p", File.dirname(plugin_path)),
"Plugin 'single' does not support multi workers configuration (Fluent::Plugin::SingleOutput)",
)
end
test 'success to start a worker2 with worker specific configuration' do
conf = <<CONF
<system>
root_dir #{@root_path}
dir_permission 0744
</system>
CONF
conf_path = create_conf_file('worker_section0.conf', conf)
FileUtils.rm_rf(@root_path) rescue nil
assert_path_not_exist(@root_path)
assert_log_matches(create_cmdline(conf_path), 'spawn command to main') # any message is ok
assert_path_exist(@root_path)
if Fluent.windows?
# In Windows, dir permission is always 755.
assert_equal '755', File.stat(@root_path).mode.to_s(8)[-3, 3]
else
assert_equal '744', File.stat(@root_path).mode.to_s(8)[-3, 3]
end
end
test 'success to start a worker with worker specific configuration' do
conf = <<CONF
<system>
workers 2
root_dir #{@root_path}
</system>
<source>
@type dummy
@id dummy
@label @dummydata
tag dummy
dummy {"message": "yay!"}
</source>
<worker 1>
<source>
@type dummy
@id dummy_in_worker
@label @dummydata
tag dummy
dummy {"message": "yay!"}
</source>
</worker>
<label @dummydata>
<match dummy>
@type null
@id blackhole
</match>
</label>
CONF
conf_path = create_conf_file('worker_section0.conf', conf)
assert Dir.exist?(@root_path)
assert_log_matches(
create_cmdline(conf_path),
"#0 fluentd worker is now running worker=0",
"#1 fluentd worker is now running worker=1",
/(?!#\d) adding source type="dummy"/,
'#1 adding source type="dummy"'
)
end
test 'success to start workers when configured plugins only for specific worker do not support multi worker configuration' do
script = <<-EOC
require 'fluent/plugin/input'
module Fluent::Plugin
class SingleInput < Input
Fluent::Plugin.register_input('single', self)
def multi_workers_ready?
false
end
end
end
EOC
plugin_path = create_plugin_file('in_single.rb', script)
conf = <<CONF
<system>
workers 2
</system>
<worker 1>
<source>
@type single
@id single
@label @dummydata
</source>
</worker>
<label @dummydata>
<match dummy>
@type null
@id blackhole
</match>
</label>
CONF
conf_path = create_conf_file('worker_section1.conf', conf)
assert Dir.exist?(@root_path)
assert_log_matches(
create_cmdline(conf_path, "-p", File.dirname(plugin_path)),
"#0 fluentd worker is now running worker=0",
"#1 fluentd worker is now running worker=1",
'#1 adding source type="single"'
)
end
test "multiple values are set to RUBYOPT" do
conf = <<CONF
<source>
@type dummy
tag dummy
</source>
<match>
@type null
</match>
CONF
conf_path = create_conf_file('rubyopt_test.conf', conf)
assert_log_matches(
create_cmdline(conf_path),
'#0 fluentd worker is now running worker=0',
patterns_not_match: ['(LoadError)'],
env: { 'RUBYOPT' => '-rtest-unit -rbundler/setup' },
)
end
data(
'-E' => '-Eutf-8',
'-encoding' => '--encoding=utf-8',
'-external-encoding' => '--external-encoding=utf-8',
'-internal-encoding' => '--internal-encoding=utf-8',
)
test "-E option is set to RUBYOPT" do |base_opt|
conf = <<CONF
<source>
@type dummy
tag dummy
</source>
<match>
@type null
</match>
CONF
conf_path = create_conf_file('rubyopt_test.conf', conf)
opt = base_opt.dup
opt << " #{ENV['RUBYOPT']}" if ENV['RUBYOPT']
assert_log_matches(
create_cmdline(conf_path),
*opt.split(' '),
patterns_not_match: ['-Eascii-8bit:ascii-8bit'],
env: { 'RUBYOPT' => opt },
)
end
test "without RUBYOPT" do
saved_ruby_opt = ENV["RUBYOPT"]
ENV["RUBYOPT"] = nil
conf = <<CONF
<source>
@type dummy
tag dummy
</source>
<match>
@type null
</match>
CONF
conf_path = create_conf_file('rubyopt_test.conf', conf)
assert_log_matches(create_cmdline(conf_path), '-Eascii-8bit:ascii-8bit')
ensure
ENV["RUBYOPT"] = saved_ruby_opt
end
test 'invalid values are set to RUBYOPT' do
omit "hard to run correctly because RUBYOPT=-r/path/to/bundler/setup is required on Windows while this test set invalid RUBYOPT" if Fluent.windows?
conf = <<CONF
<source>
@type dummy
tag dummy
</source>
<match>
@type null
</match>
CONF
conf_path = create_conf_file('rubyopt_invalid_test.conf', conf)
if Gem::Version.create(RUBY_VERSION) >= Gem::Version.create('3.3.0')
expected_phrase = 'ruby: invalid switch in RUBYOPT'
else
expected_phrase = 'Invalid option is passed to RUBYOPT'
end
assert_log_matches(
create_cmdline(conf_path),
expected_phrase,
env: { 'RUBYOPT' => 'a' },
)
end
# https://github.com/fluent/fluentd/issues/2915
test "ruby path contains spaces" do
saved_ruby_opt = ENV["RUBYOPT"]
ENV["RUBYOPT"] = nil
conf = <<CONF
<source>
@type dummy
tag dummy
</source>
<match>
@type null
</match>
CONF
ruby_path = ServerEngine.ruby_bin_path
tmp_ruby_path = File.join(@tmp_dir, "ruby with spaces")
if Fluent.windows?
tmp_ruby_path << ".bat"
Fluent::FileWrapper.open(tmp_ruby_path, "w") do |file|
file.write "#{ruby_path} %*"
end
else
FileUtils.ln_sf(ruby_path, tmp_ruby_path)
end
ENV["TEST_RUBY_PATH"] = tmp_ruby_path
cmd_path = File.expand_path(File.dirname(__FILE__) + "../../../bin/fluentd")
conf_path = create_conf_file('space_mixed_ruby_path_test.conf', conf)
args = ["bundle", "exec", tmp_ruby_path, cmd_path, "-c", conf_path]
assert_log_matches(
args,
'spawn command to main:',
'-Eascii-8bit:ascii-8bit'
)
ensure
ENV["RUBYOPT"] = saved_ruby_opt
end
test 'success to start workers when file buffer is configured in non-workers way only for specific worker' do
conf = <<CONF
<system>
workers 2
</system>
<source>
@type dummy
@id dummy
tag dummy
dummy {"message": "yay!"}
</source>
<worker 1>
<match dummy>
@type null
@id blackhole
<buffer>
@type file
path #{File.join(@root_path, "buf")}
</buffer>
</match>
</worker>
CONF
conf_path = create_conf_file('worker_section2.conf', conf)
assert_log_matches(
create_cmdline(conf_path),
"#0 fluentd worker is now running worker=0",
"#1 fluentd worker is now running worker=1",
'#1 adding match pattern="dummy" type="null"'
)
end
test 'success to start workers when configured plugins as a children of MultiOutput only for specific worker do not support multi worker configuration' do
script = <<-EOC
require 'fluent/plugin/output'
module Fluent::Plugin
class SingleOutput < Output
Fluent::Plugin.register_output('single', self)
def multi_workers_ready?
false
end
def write(chunk)
end
end
end
EOC
plugin_path = create_plugin_file('out_single.rb', script)
conf = <<CONF
<system>
workers 2
</system>
<source>
@type dummy
@id dummy
tag dummy
dummy {"message": "yay!"}
</source>
<worker 1>
<match dummy>
@type copy
<store>
@type single
</store>
<store>
@type single
</store>
</match>
</worker>
CONF
conf_path = create_conf_file('worker_section3.conf', conf)
assert_log_matches(
create_cmdline(conf_path, "-p", File.dirname(plugin_path)),
"#0 fluentd worker is now running worker=0",
"#1 fluentd worker is now running worker=1",
'#1 adding match pattern="dummy" type="copy"'
)
end
end
sub_test_case 'config dump' do
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/command/test_binlog_reader.rb | test/command/test_binlog_reader.rb | require_relative '../helper'
require 'json'
require 'flexmock/test_unit'
require 'fluent/command/binlog_reader'
require 'fluent/event'
class TestFluentBinlogReader < ::Test::Unit::TestCase
module ::BinlogReaderCommand
class Dummy < Base
def call; end
end
end
def suppress_stdout
out = StringIO.new
$stdout = out
yield
ensure
$stdout = STDOUT
end
sub_test_case 'call' do
data(
empty: [],
invalid: %w(invalid packed.log),
)
test 'should fail when invalid command' do |argv|
fu = FluentBinlogReader.new(argv)
assert_raise(SystemExit) do
suppress_stdout { fu.call }
end
end
data(
cat: %w(cat packed.log),
head: %w(head packed.log),
formats: %w(formats packed.log)
)
test 'should succeed when valid command' do |argv|
fu = FluentBinlogReader.new(argv)
flexstub(::BinlogReaderCommand) do |command|
command.should_receive(:const_get).once.and_return(::BinlogReaderCommand::Dummy)
assert_nothing_raised do
fu.call
end
end
end
end
end
class TestBaseCommand < ::Test::Unit::TestCase
TMP_DIR = File.expand_path(File.dirname(__FILE__) + "/../tmp/command/binlog_reader#{ENV['TEST_ENV_NUMBER']}")
def create_message_packed_file(path, times = [event_time], records = [{ 'message' => 'dummy' }])
es = Fluent::MultiEventStream.new(times, records)
v = es.to_msgpack_stream
out_path = "#{TMP_DIR}/#{path}"
File.open(out_path, 'wb') do |f|
f.print(v)
end
waiting(5) do
sleep 0.5 until File.size(out_path) == v.bytesize
end
end
def setup
FileUtils.rm_rf(TMP_DIR)
FileUtils.mkdir_p(TMP_DIR)
end
def timezone(timezone = 'UTC')
old = ENV['TZ']
ENV['TZ'] = timezone
yield
ensure
ENV['TZ'] = old
end
end
class TestHead < TestBaseCommand
setup do
@default_newline = if Fluent.windows?
"\r\n"
else
"\n"
end
end
sub_test_case 'initialize' do
data(
'file is not passed' => %w(),
'file is not found' => %w(invalid_path.log)
)
test 'should fail if file is invalid' do |argv|
assert_raise(SystemExit) do
capture_stdout { BinlogReaderCommand::Head.new(argv) }
end
end
test 'should succeed if a file is valid' do
file_name = 'packed.log'
argv = ["#{TMP_DIR}/#{file_name}"]
create_message_packed_file(file_name)
assert_nothing_raised do
BinlogReaderCommand::Head.new(argv)
end
end
test 'should fail when config_params format is invalid' do
file_name = 'packed.log'
argv = ["#{TMP_DIR}/#{file_name}", '--format=csv', '-e', 'only_key']
create_message_packed_file(file_name)
assert_raise(SystemExit) do
capture_stdout { BinlogReaderCommand::Head.new(argv) }
end
end
test 'should succeed if config_params format is valid' do
file_name = 'packed.log'
argv = ["#{TMP_DIR}/#{file_name}", '--format=csv', '-e', 'fields=message']
create_message_packed_file(file_name)
assert_nothing_raised do
capture_stdout { BinlogReaderCommand::Head.new(argv) }
end
end
end
sub_test_case 'call' do
setup do
@file_name = 'packed.log'
@t = '2011-01-02 13:14:15 UTC'
@record = { 'message' => 'dummy' }
end
test 'should output the beginning of the file with default format (out_file)' do
argv = ["#{TMP_DIR}/#{@file_name}"]
timezone do
create_message_packed_file(@file_name, [event_time(@t).to_i] * 6, [@record] * 6)
head = BinlogReaderCommand::Head.new(argv)
out = capture_stdout { head.call }
assert_equal "2011-01-02T13:14:15+00:00\t#{TMP_DIR}/#{@file_name}\t#{JSON.generate(@record)}#{@default_newline}" * 5, out
end
end
test 'should set the number of lines to display' do
argv = ["#{TMP_DIR}/#{@file_name}", '-n', '1']
timezone do
create_message_packed_file(@file_name, [event_time(@t).to_i] * 6, [@record] * 6)
head = BinlogReaderCommand::Head.new(argv)
out = capture_stdout { head.call }
assert_equal "2011-01-02T13:14:15+00:00\t#{TMP_DIR}/#{@file_name}\t#{JSON.generate(@record)}#{@default_newline}", out
end
end
test 'should fail when the number of lines is invalid' do
argv = ["#{TMP_DIR}/#{@file_name}", '-n', '0']
create_message_packed_file(@file_name)
assert_raise(SystemExit) do
capture_stdout { BinlogReaderCommand::Head.new(argv) }
end
end
test 'should output content of a file with json format' do
argv = ["#{TMP_DIR}/#{@file_name}", '--format=json']
timezone do
create_message_packed_file(@file_name, [event_time(@t).to_i], [@record])
head = BinlogReaderCommand::Head.new(argv)
out = capture_stdout { head.call }
assert_equal "#{JSON.generate(@record)}#{@default_newline}", out
end
end
test 'should fail with an invalid format' do
argv = ["#{TMP_DIR}/#{@file_name}", '--format=invalid']
timezone do
create_message_packed_file(@file_name, [event_time(@t).to_i], [@record])
head = BinlogReaderCommand::Head.new(argv)
assert_raise(SystemExit) do
capture_stdout { head.call }
end
end
end
test 'should succeed if multiple config_params format' do
file_name = 'packed.log'
argv = ["#{TMP_DIR}/#{file_name}", '--format=csv', '-e', 'fields=message,fo', '-e', 'delimiter=|']
create_message_packed_file(file_name, [event_time], [{ 'message' => 'dummy', 'fo' => 'dummy2' }])
head = BinlogReaderCommand::Head.new(argv)
assert_equal "\"dummy\"|\"dummy2\"\n", capture_stdout { head.call }
end
end
end
class TestCat < TestBaseCommand
setup do
@default_newline = if Fluent.windows?
"\r\n"
else
"\n"
end
end
sub_test_case 'initialize' do
data(
'file is not passed' => [],
'file is not found' => %w(invalid_path.log)
)
test 'should fail if a file is invalid' do |argv|
assert_raise(SystemExit) do
capture_stdout { BinlogReaderCommand::Head.new(argv) }
end
end
test 'should succeed if a file is valid' do
file_name = 'packed.log'
argv = ["#{TMP_DIR}/#{file_name}"]
create_message_packed_file(file_name)
assert_nothing_raised do
BinlogReaderCommand::Cat.new(argv)
end
end
test 'should fail when config_params format is invalid' do
file_name = 'packed.log'
argv = ["#{TMP_DIR}/#{file_name}", '--format=json', '-e', 'only_key']
create_message_packed_file(file_name)
assert_raise(SystemExit) do
capture_stdout { BinlogReaderCommand::Cat.new(argv) }
end
end
test 'should succeed when config_params format is valid' do
file_name = 'packed.log'
argv = ["#{TMP_DIR}/#{file_name}", '--format=csv', '-e', 'fields=message']
create_message_packed_file(file_name)
assert_nothing_raised do
capture_stdout { BinlogReaderCommand::Cat.new(argv) }
end
end
end
sub_test_case 'call' do
setup do
@file_name = 'packed.log'
@t = '2011-01-02 13:14:15 UTC'
@record = { 'message' => 'dummy' }
end
test 'should output the file with default format(out_file)' do
argv = ["#{TMP_DIR}/#{@file_name}"]
timezone do
create_message_packed_file(@file_name, [event_time(@t).to_i] * 6, [@record] * 6)
head = BinlogReaderCommand::Cat.new(argv)
out = capture_stdout { head.call }
assert_equal "2011-01-02T13:14:15+00:00\t#{TMP_DIR}/#{@file_name}\t#{JSON.generate(@record)}#{@default_newline}" * 6, out
end
end
test 'should set the number of lines to display' do
argv = ["#{TMP_DIR}/#{@file_name}", '-n', '1']
timezone do
create_message_packed_file(@file_name, [event_time(@t).to_i] * 6, [@record] * 6)
head = BinlogReaderCommand::Cat.new(argv)
out = capture_stdout { head.call }
assert_equal "2011-01-02T13:14:15+00:00\t#{TMP_DIR}/#{@file_name}\t#{JSON.generate(@record)}#{@default_newline}", out
end
end
test 'should output content of a file with json format' do
argv = ["#{TMP_DIR}/#{@file_name}", '--format=json']
timezone do
create_message_packed_file(@file_name, [event_time(@t).to_i], [@record])
head = BinlogReaderCommand::Cat.new(argv)
out = capture_stdout { head.call }
assert_equal "#{JSON.generate(@record)}#{@default_newline}", out
end
end
test 'should fail with an invalid format' do
argv = ["#{TMP_DIR}/#{@file_name}", '--format=invalid']
timezone do
create_message_packed_file(@file_name, [event_time(@t).to_i], [@record])
head = BinlogReaderCommand::Cat.new(argv)
assert_raise(SystemExit) do
capture_stdout { head.call }
end
end
end
test 'should succeed if multiple config_params format' do
file_name = 'packed.log'
argv = ["#{TMP_DIR}/#{file_name}", '--format=csv', '-e', 'fields=message,fo', '-e', 'delimiter=|']
create_message_packed_file(file_name, [event_time], [{ 'message' => 'dummy', 'fo' => 'dummy2' }])
head = BinlogReaderCommand::Cat.new(argv)
assert_equal "\"dummy\"|\"dummy2\"\n", capture_stdout { head.call }
end
end
end
class TestFormats < TestBaseCommand
test 'parse_option!' do
assert_raise(SystemExit) do
capture_stdout do
BinlogReaderCommand::Formats.new(['--plugin=invalid_dir_path'])
end
end
end
sub_test_case 'call' do
test 'display available plugins' do
f = BinlogReaderCommand::Formats.new
out = capture_stdout { f.call }
assert out.include?('json')
assert out.include?('csv')
end
test 'add new plugins using --plugin option' do
dir_path = File.expand_path(File.dirname(__FILE__) + '/../scripts/fluent/plugin/formatter1')
f = BinlogReaderCommand::Formats.new(["--plugin=#{dir_path}"])
out = capture_stdout { f.call }
assert out.include?('json')
assert out.include?('csv')
assert out.include?('test1')
end
test 'add multiple plugins using --plugin option' do
dir_path1 = File.expand_path(File.dirname(__FILE__) + '/../scripts/fluent/plugin/formatter1')
dir_path2 = File.expand_path(File.dirname(__FILE__) + '/../scripts/fluent/plugin/formatter2')
f = BinlogReaderCommand::Formats.new(["--plugin=#{dir_path1}", '-p', dir_path2])
out = capture_stdout { f.call }
assert out.include?('json')
assert out.include?('csv')
assert out.include?('test1')
assert out.include?('test2')
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/command/test_cat.rb | test/command/test_cat.rb | require_relative '../helper'
require 'test-unit'
require 'open3'
require 'fluent/plugin/output'
require 'fluent/plugin/in_forward'
require 'fluent/plugin/out_secondary_file'
require 'fluent/test/driver/output'
require 'fluent/test/driver/input'
class TestFluentCat < ::Test::Unit::TestCase
def setup
Fluent::Test.setup
FileUtils.mkdir_p(TMP_DIR)
@record = { 'key' => 'value' }
@time = event_time
@es = Fluent::OneEventStream.new(@time, @record)
@primary = create_primary
metadata = @primary.buffer.new_metadata
@chunk = create_chunk(@primary, metadata, @es)
@port = unused_port(protocol: :all)
end
def teardown
FileUtils.rm_rf(TMP_DIR)
@port = nil
end
TMP_DIR = File.expand_path(File.dirname(__FILE__) + "/../tmp/command/fluent_cat#{ENV['TEST_ENV_NUMBER']}")
FLUENT_CAT_COMMAND = File.expand_path(File.dirname(__FILE__) + "/../../bin/fluent-cat")
def config
%[
port #{@port}
bind 127.0.0.1
]
end
SECONDARY_CONFIG = %[
directory #{TMP_DIR}
]
class DummyOutput < Fluent::Plugin::Output
def write(chunk); end
end
def create_driver(conf=config)
Fluent::Test::Driver::Input.new(Fluent::Plugin::ForwardInput).configure(conf)
end
def create_primary(buffer_config = config_element('buffer'))
DummyOutput.new.configure(config_element('ROOT','',{}, [buffer_config]))
end
def create_secondary_driver(conf=SECONDARY_CONFIG)
c = Fluent::Test::Driver::Output.new(Fluent::Plugin::SecondaryFileOutput)
c.instance.acts_as_secondary(@primary)
c.configure(conf)
end
def create_chunk(primary, metadata, es)
primary.buffer.generate_chunk(metadata).tap do |c|
c.concat(es.to_msgpack_stream, es.size)
c.commit
end
end
sub_test_case "json" do
def test_cat_json
d = create_driver
d.run(expect_records: 1) do
Open3.pipeline_w("#{ServerEngine.ruby_bin_path} #{FLUENT_CAT_COMMAND} --port #{@port} json") do |stdin|
stdin.puts('{"key":"value"}')
stdin.close
end
end
event = d.events.first
assert_equal([1, "json", @record],
[d.events.size, event.first, event.last])
end
end
sub_test_case "msgpack" do
def test_cat_secondary_file
d = create_secondary_driver
path = d.instance.write(@chunk)
d = create_driver
d.run(expect_records: 1) do
Open3.pipeline_w("#{ServerEngine.ruby_bin_path} #{FLUENT_CAT_COMMAND} --port #{@port} --format msgpack secondary") do |stdin|
stdin.write(File.read(path, File.size(path)))
stdin.close
end
end
event = d.events.first
assert_equal([1, "secondary", @record],
[d.events.size, event.first, event.last])
end
end
sub_test_case "send specific event time" do
def test_without_event_time
event_time = Fluent::EventTime.now
d = create_driver
d.run(expect_records: 1) do
Open3.pipeline_w("#{ServerEngine.ruby_bin_path} #{FLUENT_CAT_COMMAND} --port #{@port} tag") do |stdin|
stdin.puts('{"key":"value"}')
stdin.close
end
end
event = d.events.first
assert_in_delta(event_time.to_f, event[1].to_f, 3.0) # expect command to be finished in 3 seconds
assert_equal([1, "tag", true, @record],
[d.events.size, event.first, event_time.to_f < event[1].to_f, event.last])
end
def test_with_event_time
event_time = "2021-01-02 13:14:15.0+00:00"
d = create_driver
d.run(expect_records: 1) do
Open3.pipeline_w("#{ServerEngine.ruby_bin_path} #{FLUENT_CAT_COMMAND} --port #{@port} --event-time '#{event_time}' tag") do |stdin|
stdin.puts('{"key":"value"}')
stdin.close
end
end
assert_equal([["tag", Fluent::EventTime.parse(event_time), @record]], d.events)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/helpers/process_extenstion.rb | test/helpers/process_extenstion.rb | require 'timecop'
module Process
class << self
alias_method :clock_gettime_original, :clock_gettime
def clock_gettime(clock_id, unit = :float_second)
# now only support CLOCK_REALTIME
if Process::CLOCK_REALTIME == clock_id
t = Time.now
case unit
when :float_second
t.to_i + t.nsec / 1_000_000_000.0
when :float_millisecond
t.to_i * 1_000 + t.nsec / 1_000_000.0
when :float_microsecond
t.to_i * 1_000_000 + t.nsec / 1_000.0
when :second
t.to_i
when :millisecond
t.to_i * 1000 + t.nsec / 1_000_000
when :microsecond
t.to_i * 1_000_000 + t.nsec / 1_000
when :nanosecond
t.to_i * 1_000_000_000 + t.nsec
end
else
Process.clock_gettime_original(clock_id, unit)
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/helpers/fuzzy_assert.rb | test/helpers/fuzzy_assert.rb | require 'test/unit'
class FuzzyIncludeAssertion
include Test::Unit::Assertions
def self.assert(expected, actual, message = nil)
new(expected, actual, message).assert
end
def initialize(expected, actual, message)
@expected = expected
@actual = actual
@message = message
end
def assert
if collection?
assert_same_collection
else
assert_same_value
end
end
private
def assert_same_value
m = "expected(#{@expected}) !== actual(#{@actual.inspect})"
if @message
m = "#{@message}: #{m}"
end
assert_true(@expected === @actual, m)
end
def assert_same_class
if @expected.class != @actual.class
if (@expected.class.ancestors | @actual.class.ancestors).empty?
assert_equal(@expected.class, @actual.class, @message)
end
end
end
def assert_same_collection
assert_same_class
assert_same_values
end
def assert_same_values
if @expected.is_a?(Array)
@expected.each_with_index do |val, i|
self.class.assert(val, @actual[i], @message)
end
else
@expected.each do |key, val|
self.class.assert(val, @actual[key], "#{key}: ")
end
end
end
def collection?
@actual.is_a?(Array) || @actual.is_a?(Hash)
end
end
class FuzzyAssertion < FuzzyIncludeAssertion
private
def assert_same_collection
super
assert_same_keys
end
def assert_same_keys
if @expected.is_a?(Array)
assert_equal(@expected.size, @actual.size, "expected.size(#{@expected}) != actual.size(#{@expected})")
else
assert_equal(@expected.keys.sort, @actual.keys.sort)
end
end
end
module FuzzyAssert
def assert_fuzzy_include(left, right, message = nil)
FuzzyIncludeAssertion.new(left, right, message).assert
end
def assert_fuzzy_equal(left, right, message = nil)
FuzzyAssertion.new(left, right, message).assert
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/counter/test_store.rb | test/counter/test_store.rb | require_relative '../helper'
require 'fluent/counter/store'
require 'fluent/time'
require 'timecop'
class CounterStoreTest < ::Test::Unit::TestCase
setup do
@name = 'key_name'
@scope = "server\tworker\tplugin"
# timecop isn't compatible with EventTime
t = Time.parse('2016-09-22 16:59:59 +0900')
Timecop.freeze(t)
@now = Fluent::EventTime.now
end
teardown do
Timecop.return
end
def extract_value_from_counter(counter, key)
store = counter.instance_variable_get(:@storage).instance_variable_get(:@store)
store[key]
end
def travel(sec)
# Since Timecop.travel() causes test failures on Windows/AppVeyor by inducing
# rounding errors to Time.now, we need to use Timecop.freeze() instead.
Timecop.freeze(Time.now + sec)
end
sub_test_case 'init' do
setup do
@reset_interval = 10
@store = Fluent::Counter::Store.new
@data = { 'name' => @name, 'reset_interval' => @reset_interval }
@key = Fluent::Counter::Store.gen_key(@scope, @name)
end
test 'create new value in the counter' do
v = @store.init(@key, @data)
assert_equal @name, v['name']
assert_equal @reset_interval, v['reset_interval']
v2 = extract_value_from_counter(@store, @key)
v2 = @store.send(:build_response, v2)
assert_equal v, v2
end
test 'raise an error when a passed key already exists' do
@store.init(@key, @data)
assert_raise Fluent::Counter::InvalidParams do
@store.init(@key, @data)
end
end
test 'return a value when passed key already exists and a ignore option is true' do
v = @store.init(@key, @data)
v1 = extract_value_from_counter(@store, @key)
v1 = @store.send(:build_response, v1)
v2 = @store.init(@key, @data, ignore: true)
assert_equal v, v2
assert_equal v1, v2
end
end
sub_test_case 'get' do
setup do
@store = Fluent::Counter::Store.new
data = { 'name' => @name, 'reset_interval' => 10 }
@key = Fluent::Counter::Store.gen_key(@scope, @name)
@store.init(@key, data)
end
test 'return a value from the counter' do
v = extract_value_from_counter(@store, @key)
expected = @store.send(:build_response, v)
assert_equal expected, @store.get(@key)
end
test 'return a raw value from the counter when raw option is true' do
v = extract_value_from_counter(@store, @key)
assert_equal v, @store.get(@key, raw: true)
end
test "return nil when a passed key doesn't exist" do
assert_equal nil, @store.get('unknown_key')
end
test "raise a error when a passed key doesn't exist and raise_error option is true" do
assert_raise Fluent::Counter::UnknownKey do
@store.get('unknown_key', raise_error: true)
end
end
end
sub_test_case 'key?' do
setup do
@store = Fluent::Counter::Store.new
data = { 'name' => @name, 'reset_interval' => 10 }
@key = Fluent::Counter::Store.gen_key(@scope, @name)
@store.init(@key, data)
end
test 'return true when passed key exists' do
assert_true @store.key?(@key)
end
test "return false when passed key doesn't exist" do
assert_true !@store.key?('unknown_key')
end
end
sub_test_case 'delete' do
setup do
@store = Fluent::Counter::Store.new
data = { 'name' => @name, 'reset_interval' => 10 }
@key = Fluent::Counter::Store.gen_key(@scope, @name)
@init_value = @store.init(@key, data)
end
test 'delete a value from the counter' do
v = @store.delete(@key)
assert_equal @init_value, v
assert_nil extract_value_from_counter(@store, @key)
end
test "raise an error when passed key doesn't exist" do
assert_raise Fluent::Counter::UnknownKey do
@store.delete('unknown_key')
end
end
end
sub_test_case 'inc' do
setup do
@store = Fluent::Counter::Store.new
@init_data = { 'name' => @name, 'reset_interval' => 10 }
@travel_sec = 10
end
data(
positive: 10,
negative: -10
)
test 'increment or decrement a value in the counter' do |value|
key = Fluent::Counter::Store.gen_key(@scope, @name)
@store.init(key, @init_data)
travel(@travel_sec)
v = @store.inc(key, { 'value' => value })
assert_equal value, v['total']
assert_equal value, v['current']
assert_equal @now, v['last_reset_at'] # last_reset_at doesn't change
v1 = extract_value_from_counter(@store, key)
v1 = @store.send(:build_response, v1)
assert_equal v, v1
end
test "raise an error when passed key doesn't exist" do
assert_raise Fluent::Counter::UnknownKey do
@store.inc('unknown_key', { 'value' => 1 })
end
end
test 'raise an error when a type of passed value is incompatible with a stored value' do
key1 = Fluent::Counter::Store.gen_key(@scope, @name)
key2 = Fluent::Counter::Store.gen_key(@scope, 'name2')
key3 = Fluent::Counter::Store.gen_key(@scope, 'name3')
v1 = @store.init(key1, @init_data.merge('type' => 'integer'))
v2 = @store.init(key2, @init_data.merge('type' => 'float'))
v3 = @store.init(key3, @init_data.merge('type' => 'numeric'))
assert_equal 'integer', v1['type']
assert_equal 'float', v2['type']
assert_equal 'numeric', v3['type']
assert_raise Fluent::Counter::InvalidParams do
@store.inc(key1, { 'value' => 1.1 })
end
assert_raise Fluent::Counter::InvalidParams do
@store.inc(key2, { 'value' => 1 })
end
assert_nothing_raised do
@store.inc(key3, { 'value' => 1 })
@store.inc(key3, { 'value' => 1.0 })
end
end
end
sub_test_case 'reset' do
setup do
@store = Fluent::Counter::Store.new
@travel_sec = 10
@inc_value = 10
@key = Fluent::Counter::Store.gen_key(@scope, @name)
@store.init(@key, { 'name' => @name, 'reset_interval' => 10 })
@store.inc(@key, { 'value' => 10 })
end
test 'reset a value in the counter' do
travel(@travel_sec)
v = @store.reset(@key)
assert_equal @travel_sec, v['elapsed_time']
assert_true v['success']
counter = v['counter_data']
assert_equal @name, counter['name']
assert_equal @inc_value, counter['total']
assert_equal @inc_value, counter['current']
assert_equal 'numeric', counter['type']
assert_equal @now, counter['last_reset_at']
assert_equal 10, counter['reset_interval']
v1 = extract_value_from_counter(@store, @key)
assert_equal 0, v1['current']
assert_true v1['current'].is_a?(Integer)
assert_equal @inc_value, v1['total']
assert_equal (@now + @travel_sec), Fluent::EventTime.new(*v1['last_reset_at'])
assert_equal (@now + @travel_sec), Fluent::EventTime.new(*v1['last_modified_at'])
end
test 'reset a value after `reset_interval` passed' do
first_travel_sec = 5
travel(first_travel_sec) # jump time less than reset_interval
v = @store.reset(@key)
assert_equal false, v['success']
assert_equal first_travel_sec, v['elapsed_time']
store = extract_value_from_counter(@store, @key)
assert_equal 10, store['current']
assert_equal @now, Fluent::EventTime.new(*store['last_reset_at'])
# time is passed greater than reset_interval
travel(@travel_sec)
v = @store.reset(@key)
assert_true v['success']
assert_equal @travel_sec + first_travel_sec, v['elapsed_time']
v1 = extract_value_from_counter(@store, @key)
assert_equal 0, v1['current']
assert_equal (@now + @travel_sec + first_travel_sec), Fluent::EventTime.new(*v1['last_reset_at'])
assert_equal (@now + @travel_sec + first_travel_sec), Fluent::EventTime.new(*v1['last_modified_at'])
end
test "raise an error when passed key doesn't exist" do
assert_raise Fluent::Counter::UnknownKey do
@store.reset('unknown_key')
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/counter/test_mutex_hash.rb | test/counter/test_mutex_hash.rb | require_relative '../helper'
require 'fluent/counter/mutex_hash'
require 'fluent/counter/store'
require 'flexmock/test_unit'
require 'timecop'
class MutexHashTest < ::Test::Unit::TestCase
setup do
@store = {}
@value = 'sample value'
@counter_store_mutex = Fluent::Counter::MutexHash.new(@store)
end
sub_test_case 'synchronize' do
test "create new mutex values if keys don't exist" do
keys = ['key', 'key1']
@counter_store_mutex.synchronize(*keys) do |store, k|
store[k] = @value
end
mhash = @counter_store_mutex.instance_variable_get(:@mutex_hash)
keys.each do |key|
assert_true mhash[key].is_a?(Mutex)
assert_equal @value, @store[key]
end
end
test 'nothing to do when an empty array passed' do
@counter_store_mutex.synchronize(*[]) do |store, k|
store[k] = @value
end
mhash = @counter_store_mutex.instance_variable_get(:@mutex_hash)
assert_true mhash.empty?
assert_true @store.empty?
end
test 'use a one mutex value when the same key specified' do
key = 'key'
@counter_store_mutex.synchronize(key) do |store, k|
store[k] = @value
end
mhash = @counter_store_mutex.instance_variable_get(:@mutex_hash)
m = mhash[key]
assert_true m.is_a?(Mutex)
assert_equal @value, @store[key]
# access the same key once again
value2 = 'test value2'
@counter_store_mutex.synchronize(key) do |store, k|
store[k] = value2
end
mhash = @counter_store_mutex.instance_variable_get(:@mutex_hash)
m2 = mhash[key]
assert_true m2.is_a?(Mutex)
assert_equal value2, @store[key]
assert_equal m, m2
end
end
sub_test_case 'synchronize_key' do
test "create new mutex values if keys don't exist" do
keys = ['key', 'key1']
@counter_store_mutex.synchronize_keys(*keys) do |store, k|
store[k] = @value
end
mhash = @counter_store_mutex.instance_variable_get(:@mutex_hash)
keys.each do |key|
assert_true mhash[key].is_a?(Mutex)
assert_equal @value, @store[key]
end
end
test 'nothing to do when an empty array passed' do
@counter_store_mutex.synchronize_keys(*[]) do |store, k|
store[k] = @value
end
mhash = @counter_store_mutex.instance_variable_get(:@mutex_hash)
assert_true mhash.empty?
assert_true @store.empty?
end
test 'use a one mutex value when the same key specified' do
key = 'key'
@counter_store_mutex.synchronize_keys(key) do |store, k|
store[k] = @value
end
mhash = @counter_store_mutex.instance_variable_get(:@mutex_hash)
m = mhash[key]
assert_true m.is_a?(Mutex)
assert_equal @value, @store[key]
# access the same key once again
value2 = 'test value2'
@counter_store_mutex.synchronize_keys(key) do |store, k|
store[k] = value2
end
mhash = @counter_store_mutex.instance_variable_get(:@mutex_hash)
m2 = mhash[key]
assert_true m2.is_a?(Mutex)
assert_equal value2, @store[key]
assert_equal m, m2
end
end
end
class CleanupThreadTest < ::Test::Unit::TestCase
StoreValue = Struct.new(:last_modified_at)
setup do
# timecop isn't compatible with EventTime
t = Time.parse('2016-09-22 16:59:59 +0900')
Timecop.freeze(t)
@store = Fluent::Counter::Store.new
@mhash = Fluent::Counter::MutexHash.new(@store)
# stub sleep method to avoid waiting CLEANUP_INTERVAL
ct = @mhash.instance_variable_get(:@cleanup_thread)
flexstub(ct).should_receive(:sleep)
end
teardown do
@mhash.stop
Timecop.return
end
test 'clean up unused mutex' do
name = 'key1'
init_obj = { 'name' => name, 'reset_interval' => 2 }
@mhash.synchronize(init_obj['name']) do
@store.init(name, init_obj)
end
ct = @mhash.instance_variable_get(:@mutex_hash)
assert ct[name]
Timecop.travel(15 * 60 + 1) # 15 min
@mhash.start # start cleanup
sleep 1
ct = @mhash.instance_variable_get(:@mutex_hash)
assert_empty ct
@mhash.stop
end
test "don't remove when `last_modified_at` is greater than (Time.now - CLEANUP_INTERVAL)" do
name = 'key1'
init_obj = { 'name' => name, 'reset_interval' => 2 }
@mhash.synchronize(init_obj['name']) do
@store.init(name, init_obj)
end
ct = @mhash.instance_variable_get(:@mutex_hash)
assert ct[name]
@mhash.start # start cleanup
sleep 1
ct = @mhash.instance_variable_get(:@mutex_hash)
assert ct[name]
@mhash.stop
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/counter/test_error.rb | test/counter/test_error.rb | require_relative '../helper'
require 'fluent/counter/error'
class CounterErrorTest < ::Test::Unit::TestCase
setup do
@message = 'error message'
end
test 'invalid_params' do
error = Fluent::Counter::InvalidParams.new(@message)
expected = { 'code' => 'invalid_params', 'message' => @message }
assert_equal expected, error.to_hash
end
test 'unknown_key' do
error = Fluent::Counter::UnknownKey.new(@message)
expected = { 'code' => 'unknown_key', 'message' => @message }
assert_equal expected, error.to_hash
end
test 'parse_error' do
error = Fluent::Counter::ParseError.new(@message)
expected = { 'code' => 'parse_error', 'message' => @message }
assert_equal expected, error.to_hash
end
test 'method_not_found' do
error = Fluent::Counter::MethodNotFound.new(@message)
expected = { 'code' => 'method_not_found', 'message' => @message }
assert_equal expected, error.to_hash
end
test 'invalid_request' do
error = Fluent::Counter::InvalidRequest.new(@message)
expected = { 'code' => 'invalid_request', 'message' => @message }
assert_equal expected, error.to_hash
end
test 'internal_server_error' do
error = Fluent::Counter::InternalServerError.new(@message)
expected = { 'code' => 'internal_server_error', 'message' => @message }
assert_equal expected, error.to_hash
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/counter/test_validator.rb | test/counter/test_validator.rb | require_relative '../helper'
require 'fluent/counter/validator'
class CounterValidatorTest < ::Test::Unit::TestCase
data(
invalid_name1: '',
invalid_name3: '_',
invalid_name4: 'A',
invalid_name5: 'a*',
invalid_name6: "a\t",
invalid_name7: "\n",
)
test 'invalid name' do |invalid_name|
assert_nil(Fluent::Counter::Validator::VALID_NAME =~ invalid_name)
end
sub_test_case 'request' do
test 'return an empty array' do
data = { 'id' => 0, 'method' => 'init' }
errors = Fluent::Counter::Validator.request(data)
assert_empty errors
end
data(
missing_id: [
{ 'method' => 'init' },
{ 'code' => 'invalid_request', 'message' => 'Request should include `id`' }
],
missing_method: [
{ 'id' => 0 },
{ 'code' => 'invalid_request', 'message' => 'Request should include `method`' }
],
invalid_method: [
{ 'id' => 0, 'method' => "A\t" },
{ 'code' => 'invalid_request', 'message' => '`method` is the invalid format' }
],
unknown_method: [
{ 'id' => 0, 'method' => 'unknown_method' },
{ 'code' => 'method_not_found', 'message' => 'Unknown method name passed: unknown_method' }
]
)
test 'return an error array' do |(data, expected_error)|
errors = Fluent::Counter::Validator.request(data)
assert_equal [expected_error], errors
end
end
sub_test_case 'call' do
test "return an error hash when passed method doesn't exist" do
v = Fluent::Counter::Validator.new(:unknown)
success, errors = v.call(['key1'])
assert_empty success
assert_equal 'internal_server_error', errors.first.to_hash['code']
end
end
test 'validate_empty!' do
v = Fluent::Counter::Validator.new(:empty)
success, errors = v.call([])
assert_empty success
assert_equal [Fluent::Counter::InvalidParams.new('One or more `params` are required')], errors
end
end
class CounterArrayValidatorTest < ::Test::Unit::TestCase
test 'validate_key!' do
ary = ['key', 100, '_']
error_expected = [
{ 'code' => 'invalid_params', 'message' => 'The type of `key` should be String' },
{ 'code' => 'invalid_params', 'message' => '`key` is the invalid format' }
]
v = Fluent::Counter::ArrayValidator.new(:key)
valid_params, errors = v.call(ary)
assert_equal ['key'], valid_params
assert_equal error_expected, errors.map(&:to_hash)
end
end
class CounterHashValidatorTest < ::Test::Unit::TestCase
test 'validate_name!' do
hash = [
{ 'name' => 'key' },
{},
{ 'name' => 10 },
{ 'name' => '_' }
]
error_expected = [
{ 'code' => 'invalid_params', 'message' => '`name` is required' },
{ 'code' => 'invalid_params', 'message' => 'The type of `name` should be String' },
{ 'code' => 'invalid_params', 'message' => '`name` is the invalid format' },
]
v = Fluent::Counter::HashValidator.new(:name)
success, errors = v.call(hash)
assert_equal [{ 'name' => 'key' }], success
assert_equal error_expected, errors.map(&:to_hash)
end
test 'validate_value!' do
hash = [
{ 'value' => 1 },
{ 'value' => -1 },
{},
{ 'value' => 'str' }
]
error_expected = [
{ 'code' => 'invalid_params', 'message' => '`value` is required' },
{ 'code' => 'invalid_params', 'message' => 'The type of `value` type should be Numeric' },
]
v = Fluent::Counter::HashValidator.new(:value)
valid_params, errors = v.call(hash)
assert_equal [{ 'value' => 1 }, { 'value' => -1 }], valid_params
assert_equal error_expected, errors.map(&:to_hash)
end
test 'validate_reset_interval!' do
hash = [
{ 'reset_interval' => 1 },
{ 'reset_interval' => 1.0 },
{},
{ 'reset_interval' => -1 },
{ 'reset_interval' => 'str' }
]
error_expected = [
{ 'code' => 'invalid_params', 'message' => '`reset_interval` is required' },
{ 'code' => 'invalid_params', 'message' => '`reset_interval` should be a positive number' },
{ 'code' => 'invalid_params', 'message' => 'The type of `reset_interval` should be Numeric' },
]
v = Fluent::Counter::HashValidator.new(:reset_interval)
valid_params, errors = v.call(hash)
assert_equal [{ 'reset_interval' => 1 }, { 'reset_interval' => 1.0 }], valid_params
assert_equal error_expected.map(&:to_hash), errors.map(&:to_hash)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/counter/test_server.rb | test/counter/test_server.rb | require_relative '../helper'
require 'fluent/counter/server'
require 'fluent/counter/store'
require 'fluent/time'
require 'flexmock/test_unit'
require 'timecop'
class CounterServerTest < ::Test::Unit::TestCase
setup do
# timecop isn't compatible with EventTime
t = Time.parse('2016-09-22 16:59:59 +0900')
Timecop.freeze(t)
@now = Fluent::EventTime.now
@scope = "server\tworker\tplugin"
@server_name = 'server1'
@server = Fluent::Counter::Server.new(@server_name, opt: { log: $log })
@server.instance_eval { @server.close }
end
teardown do
Timecop.return
end
def extract_value_from_counter(counter, scope, name)
store = counter.instance_variable_get(:@store).instance_variable_get(:@storage).instance_variable_get(:@store)
key = Fluent::Counter::Store.gen_key(scope, name)
store[key]
end
def travel(sec)
# Since Timecop.travel() causes test failures on Windows/AppVeyor by inducing
# rounding errors to Time.now, we need to use Timecop.freeze() instead.
Timecop.freeze(Time.now + sec)
end
test 'raise an error when server name is invalid' do
assert_raise do
Fluent::Counter::Server.new("\tinvalid_name")
end
end
sub_test_case 'on_message' do
data(
establish: 'establish',
init: 'init',
delete: 'delete',
inc: 'inc',
get: 'get',
reset: 'reset',
)
test 'call valid methods' do |method|
stub(@server).send do |_m, params, scope, options|
{ 'data' => [params, scope, options] }
end
request = { 'id' => 0, 'method' => method }
expected = { 'id' => 0, 'data' => [nil, nil, nil] }
assert_equal expected, @server.on_message(request)
end
data(
missing_id: [
{ 'method' => 'init' },
{ 'code' => 'invalid_request', 'message' => 'Request should include `id`' }
],
missing_method: [
{ 'id' => 0 },
{ 'code' => 'invalid_request', 'message' => 'Request should include `method`' }
],
invalid_method: [
{ 'id' => 0, 'method' => 'invalid_method' },
{ 'code' => 'method_not_found', 'message' => 'Unknown method name passed: invalid_method' }
]
)
test 'invalid request' do |(request, error)|
expected = {
'id' => request['id'],
'data' => [],
'errors' => [error],
}
assert_equal expected, @server.on_message(request)
end
test 'return an `internal_server_error` error object when an error raises in safe_run' do
stub(@server).send do |_m, _params, _scope, _options|
raise 'Error in safe_run'
end
request = { 'id' => 0, 'method' => 'init' }
expected = {
'id' => request['id'],
'data' => [],
'errors' => [
{ 'code' => 'internal_server_error', 'message' => 'Error in safe_run' }
]
}
assert_equal expected, @server.on_message(request)
end
test 'output an error log when passed data is not Hash' do
data = 'this is not a hash'
mock($log).error("Received data is not Hash: #{data}")
@server.on_message(data)
end
end
sub_test_case 'establish' do
test 'establish a scope in a counter' do
result = @server.send('establish', ['key'], nil, nil)
expected = { 'data' => ["#{@server_name}\tkey"] }
assert_equal expected, result
end
data(
empty: [[], 'One or more `params` are required'],
empty_key: [[''], '`scope` is the invalid format'],
invalid_key: [['_key'], '`scope` is the invalid format'],
)
test 'raise an error: invalid_params' do |(params, msg)|
result = @server.send('establish', params, nil, nil)
expected = {
'data' => [],
'errors' => [{ 'code' => 'invalid_params', 'message' => msg }]
}
assert_equal expected, result
end
end
sub_test_case 'init' do
setup do
@name = 'key1'
@key = Fluent::Counter::Store.gen_key(@scope, @name)
end
test 'create new value in a counter' do
assert_nil extract_value_from_counter(@server, @scope, @name)
result = @server.send('init', [{ 'name' => @name, 'reset_interval' => 1 }], @scope, {})
assert_nil result['errors']
counter = result['data'].first
assert_equal 'numeric', counter['type']
assert_equal @name, counter['name']
assert_equal 0, counter['current']
assert_equal 0, counter['total']
assert_equal @now, counter['last_reset_at']
assert extract_value_from_counter(@server, @scope, @name)
end
data(
numeric: 'numeric',
integer: 'integer',
float: 'float'
)
test 'set the type of a counter value' do |type|
result = @server.send('init', [{ 'name' => @name, 'reset_interval' => 1, 'type' => type }], @scope, {})
counter = result['data'].first
assert_equal type, counter['type']
v = extract_value_from_counter(@server, @scope, @name)
assert_equal type, v['type']
end
data(
empty: [[], 'One or more `params` are required'],
missing_name: [
[{ 'rest_interval' => 20 }],
'`name` is required'
],
invalid_name: [
[{ 'name' => '_test', 'reset_interval' => 20 }],
'`name` is the invalid format'
],
missing_interval: [
[{ 'name' => 'test' }],
'`reset_interval` is required'
],
minus_interval: [
[{ 'name' => 'test', 'reset_interval' => -1 }],
'`reset_interval` should be a positive number'
],
invalid_type: [
[{ 'name' => 'test', 'reset_interval' => 1, 'type' => 'invalid_type' }],
'`type` should be integer, float, or numeric'
]
)
test 'return an error object: invalid_params' do |(params, msg)|
result = @server.send('init', params, @scope, {})
assert_empty result['data']
error = result['errors'].first
assert_equal 'invalid_params', error['code']
assert_equal msg, error['message']
end
test 'return error objects when passed key already exists' do
@server.send('init', [{ 'name' => @name, 'reset_interval' => 1 }], @scope, {})
# call `init` to same key twice
result = @server.send('init', [{ 'name' => @name, 'reset_interval' => 1 }], @scope, {})
assert_empty result['data']
error = result['errors'].first
expected = { 'code' => 'invalid_params', 'message' => "#{@key} already exists in counter" }
assert_equal expected, error
end
test 'return existing value when passed key already exists and ignore option is true' do
v1 = @server.send('init', [{ 'name' => @name, 'reset_interval' => 1 }], @scope, {})
# call `init` to same key twice
v2 = @server.send('init', [{ 'name' => @name, 'reset_interval' => 1 }], @scope, { 'ignore' => true })
assert_nil v2['errors']
assert_equal v1['data'], v2['data']
end
test 'call `synchronize_keys` when random option is true' do
mhash = @server.instance_variable_get(:@mutex_hash)
mock(mhash).synchronize(@key).once
@server.send('init', [{ 'name' => @name, 'reset_interval' => 1 }], @scope, {})
mhash = @server.instance_variable_get(:@mutex_hash)
mock(mhash).synchronize_keys(@key).once
@server.send('init', [{ 'name' => @name, 'reset_interval' => 1 }], @scope, { 'random' => true })
end
end
sub_test_case 'delete' do
setup do
@name = 'key1'
@key = Fluent::Counter::Store.gen_key(@scope, @name)
@server.send('init', [{ 'name' => @name, 'reset_interval' => 20 }], @scope, {})
end
test 'delete a value from a counter' do
assert extract_value_from_counter(@server, @scope, @name)
result = @server.send('delete', [@name], @scope, {})
assert_nil result['errors']
counter = result['data'].first
assert_equal 0, counter['current']
assert_equal 0, counter['total']
assert_equal 'numeric', counter['type']
assert_equal @name, counter['name']
assert_equal @now, counter['last_reset_at']
assert_nil extract_value_from_counter(@server, @scope, @name)
end
data(
empty: [[], 'One or more `params` are required'],
empty_key: [[''], '`key` is the invalid format'],
invalid_key: [['_key'], '`key` is the invalid format'],
)
test 'return an error object: invalid_params' do |(params, msg)|
result = @server.send('delete', params, @scope, {})
assert_empty result['data']
error = result['errors'].first
assert_equal 'invalid_params', error['code']
assert_equal msg, error['message']
end
test 'return an error object: unknown_key' do
unknown_key = 'unknown_key'
result = @server.send('delete', [unknown_key], @scope, {})
assert_empty result['data']
error = result['errors'].first
assert_equal unknown_key, error['code']
assert_equal "`#{@scope}\t#{unknown_key}` doesn't exist in counter", error['message']
end
test 'call `synchronize_keys` when random option is true' do
mhash = @server.instance_variable_get(:@mutex_hash)
mock(mhash).synchronize(@key).once
@server.send('delete', [@name], @scope, {})
mhash = @server.instance_variable_get(:@mutex_hash)
mock(mhash).synchronize_keys(@key).once
@server.send('delete', [@name], @scope, { 'random' => true })
end
end
sub_test_case 'inc' do
setup do
@name1 = 'key1'
@name2 = 'key2'
@key1 = Fluent::Counter::Store.gen_key(@scope, @name1)
inc_objects = [
{ 'name' => @name1, 'reset_interval' => 20 },
{ 'name' => @name2, 'type' => 'integer', 'reset_interval' => 20 }
]
@server.send('init', inc_objects, @scope, {})
end
test 'increment or decrement a value in counter' do
result = @server.send('inc', [{ 'name' => @name1, 'value' => 10 }], @scope, {})
assert_nil result['errors']
counter = result['data'].first
assert_equal 10, counter['current']
assert_equal 10, counter['total']
assert_equal 'numeric', counter['type']
assert_equal @name1, counter['name']
assert_equal @now, counter['last_reset_at']
c = extract_value_from_counter(@server, @scope, @name1)
assert_equal 10, c['current']
assert_equal 10, c['total']
assert_equal @now, Fluent::EventTime.new(*c['last_reset_at'])
assert_equal @now, Fluent::EventTime.new(*c['last_modified_at'])
end
test 'create new value and increment/decrement its value when `force` option is true' do
new_name = 'new_key'
assert_nil extract_value_from_counter(@server, @scope, new_name)
v1 = @server.send('inc', [{ 'name' => new_name, 'value' => 10 }], @scope, {})
assert_empty v1['data']
error = v1['errors'].first
assert_equal 'unknown_key', error['code']
assert_nil extract_value_from_counter(@server, @scope, new_name)
v2 = @server.send(
'inc',
[{ 'name' => new_name, 'value' => 10, 'reset_interval' => 20 }],
@scope,
{ 'force' => true }
)
assert_nil v2['errors']
counter = v2['data'].first
assert_equal 10, counter['current']
assert_equal 10, counter['total']
assert_equal 'numeric', counter['type']
assert_equal new_name, counter['name']
assert_equal @now, counter['last_reset_at']
assert extract_value_from_counter(@server, @scope, new_name)
end
data(
empty: [[], 'One or more `params` are required', {}],
missing_name: [
[{ 'value' => 10 }],
'`name` is required', {}
],
missing_value: [
[{ 'name' => 'key1' }],
'`value` is required', {}
],
invalid_type: [
[{ 'name' => 'key2', 'value' => 10.0 }],
'`type` is integer. You should pass integer value as a `value`', {}
],
missing_reset_interval: [
[{ 'name' => 'key1', 'value' => 1 }],
'`reset_interval` is required',
{ 'force' => true }
]
)
test 'return an error object: invalid_params' do |(params, msg, opt)|
result = @server.send('inc', params, @scope, opt)
assert_empty result['data']
error = result['errors'].first
assert_equal 'invalid_params', error['code']
assert_equal msg, error['message']
end
test 'call `synchronize_keys` when random option is true' do
mhash = @server.instance_variable_get(:@mutex_hash)
mock(mhash).synchronize(@key1).once
params = [{ 'name' => @name1, 'value' => 1 }]
@server.send('inc', params, @scope, {})
mhash = @server.instance_variable_get(:@mutex_hash)
mock(mhash).synchronize_keys(@key1).once
@server.send('inc', params, @scope, { 'random' => true })
end
end
sub_test_case 'reset' do
setup do
@name = 'key'
@travel_sec = 10
@server.send('init', [{ 'name' => @name, 'reset_interval' => 10 }], @scope, {})
@server.send('inc', [{ 'name' => @name, 'value' => 10 }], @scope, {})
end
test 'reset a value in the counter' do
travel(@travel_sec)
result = @server.send('reset', [@name], @scope, {})
assert_nil result['errors']
data = result['data'].first
assert_true data['success']
assert_equal @travel_sec, data['elapsed_time']
counter = data['counter_data']
assert_equal 10, counter['current']
assert_equal 10, counter['total']
assert_equal 'numeric', counter['type']
assert_equal @name, counter['name']
assert_equal @now, counter['last_reset_at']
v = extract_value_from_counter(@server, @scope, @name)
assert_equal 0, v['current']
assert_equal 10, v['total']
assert_equal (@now + @travel_sec), Fluent::EventTime.new(*v['last_reset_at'])
assert_equal (@now + @travel_sec), Fluent::EventTime.new(*v['last_modified_at'])
end
test 'reset a value after `reset_interval` passed' do
first_travel_sec = 5
travel(first_travel_sec) # jump time less than reset_interval
result = @server.send('reset', [@name], @scope, {})
v = result['data'].first
assert_equal false, v['success']
assert_equal first_travel_sec, v['elapsed_time']
store = extract_value_from_counter(@server, @scope, @name)
assert_equal 10, store['current']
assert_equal @now, Fluent::EventTime.new(*store['last_reset_at'])
# time is passed greater than reset_interval
travel(@travel_sec)
result = @server.send('reset', [@name], @scope, {})
v = result['data'].first
assert_true v['success']
assert_equal @travel_sec + first_travel_sec, v['elapsed_time']
v1 = extract_value_from_counter(@server, @scope, @name)
assert_equal 0, v1['current']
assert_equal (@now + @travel_sec + first_travel_sec), Fluent::EventTime.new(*v1['last_reset_at'])
assert_equal (@now + @travel_sec + first_travel_sec), Fluent::EventTime.new(*v1['last_modified_at'])
end
data(
empty: [[], 'One or more `params` are required'],
empty_key: [[''], '`key` is the invalid format'],
invalid_key: [['_key'], '`key` is the invalid format'],
)
test 'return an error object: invalid_params' do |(params, msg)|
result = @server.send('reset', params, @scope, {})
assert_empty result['data']
assert_equal 'invalid_params', result['errors'].first['code']
assert_equal msg, result['errors'].first['message']
end
test 'return an error object: unknown_key' do
unknown_key = 'unknown_key'
result = @server.send('reset', [unknown_key], @scope, {})
assert_empty result['data']
error = result['errors'].first
assert_equal unknown_key, error['code']
assert_equal "`#{@scope}\t#{unknown_key}` doesn't exist in counter", error['message']
end
end
sub_test_case 'get' do
setup do
@name1 = 'key1'
@name2 = 'key2'
init_objects = [
{ 'name' => @name1, 'reset_interval' => 0 },
{ 'name' => @name2, 'reset_interval' => 0 },
]
@server.send('init', init_objects, @scope, {})
end
test 'get a counter value' do
key = @name1
result = @server.send('get', [key], @scope, {})
assert_nil result['errors']
counter = result['data'].first
assert_equal 0, counter['current']
assert_equal 0, counter['total']
assert_equal 'numeric', counter['type']
assert_equal key, counter['name']
end
test 'get counter values' do
result = @server.send('get', [@name1, @name2], @scope, {})
assert_nil result['errors']
counter1 = result['data'][0]
assert_equal 0, counter1['current']
assert_equal 0, counter1['total']
assert_equal 'numeric', counter1['type']
assert_equal @name1, counter1['name']
counter2 = result['data'][1]
assert_equal 0, counter2['current']
assert_equal 0, counter2['total']
assert_equal 'numeric', counter2['type']
assert_equal @name2, counter2['name']
end
data(
empty: [[], 'One or more `params` are required'],
empty_key: [[''], '`key` is the invalid format'],
invalid_key: [['_key'], '`key` is the invalid format'],
)
test 'return an error object: invalid_params' do |(params, msg)|
result = @server.send('get', params, @scope, {})
assert_empty result['data']
assert_equal 'invalid_params', result['errors'].first['code']
assert_equal msg, result['errors'].first['message']
end
test 'return an error object: unknown_key' do
unknown_key = 'unknown_key'
result = @server.send('get', [unknown_key], @scope, {})
assert_empty result['data']
error = result['errors'].first
assert_equal unknown_key, error['code']
assert_equal "`#{@scope}\t#{unknown_key}` doesn't exist in counter", error['message']
end
end
end
class CounterCounterResponseTest < ::Test::Unit::TestCase
setup do
@response = Fluent::Counter::Server::Response.new
@errors = [
StandardError.new('standard error'),
Fluent::Counter::InternalServerError.new('internal server error')
]
@now = Fluent::EventTime.now
value = {
'name' => 'name',
'total' => 100,
'current' => 11,
'type' => 'numeric',
'reset_interval' => 10,
'last_reset_at' => @now,
}
@values = [value, 'test']
end
test 'push_error' do
@errors.each do |e|
@response.push_error(e)
end
v = @response.instance_variable_get(:@errors)
assert_equal @errors, v
end
test 'push_data' do
@values.each do |v|
@response.push_data v
end
data = @response.instance_variable_get(:@data)
assert_equal @values, data
end
test 'to_hash' do
@errors.each do |e|
@response.push_error(e)
end
@values.each do |v|
@response.push_data v
end
expected_errors = [
{ 'code' => 'internal_server_error', 'message' => 'standard error' },
{ 'code' => 'internal_server_error', 'message' => 'internal server error' }
]
expected_data = @values
hash = @response.to_hash
assert_equal expected_errors, hash['errors']
assert_equal expected_data, hash['data']
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/counter/test_client.rb | test/counter/test_client.rb | require_relative '../helper'
require 'fluent/counter/client'
require 'fluent/counter/store'
require 'fluent/counter/server'
require 'flexmock/test_unit'
require 'timecop'
class CounterClientTest < ::Test::Unit::TestCase
TEST_ADDR = '127.0.0.1'
TEST_PORT = '8277'
setup do
# timecop isn't compatible with EventTime
t = Time.parse('2016-09-22 16:59:59 +0900')
Timecop.freeze(t)
@now = Fluent::EventTime.now
@options = {
addr: TEST_ADDR,
port: TEST_PORT,
log: $log,
}
@server_name = 'server1'
@scope = "worker1\tplugin1"
@loop = Coolio::Loop.new
@server = Fluent::Counter::Server.new(@server_name, @options).start
@client = Fluent::Counter::Client.new(@loop, @options).start
end
teardown do
Timecop.return
@server.stop
@client.stop
end
test 'Callable API' do
[:establish, :init, :delete, :inc, :reset, :get].each do |m|
assert_true @client.respond_to?(m)
end
end
sub_test_case 'on_message' do
setup do
@future = flexmock('future')
@client.instance_variable_set(:@responses, { 1 => @future })
end
test 'call a set method to a corresponding object' do
@future.should_receive(:set).once.with(Hash)
@client.send(:on_message, { 'id' => 1 })
end
test "output a warning log when passed id doesn't exist" do
data = { 'id' => 2 }
mock($log).warn("Receiving missing id data: #{data}")
@client.send(:on_message, data)
end
end
def extract_value_from_server(server, scope, name)
store = server.instance_variable_get(:@store).instance_variable_get(:@storage).instance_variable_get(:@store)
key = Fluent::Counter::Store.gen_key(scope, name)
store[key]
end
def travel(sec)
# Since Timecop.travel() causes test failures on Windows/AppVeyor by inducing
# rounding errors to Time.now, we need to use Timecop.freeze() instead.
Timecop.freeze(Time.now + sec)
end
sub_test_case 'establish' do
test 'establish a scope' do
@client.establish(@scope)
assert_equal "#{@server_name}\t#{@scope}", @client.instance_variable_get(:@scope)
end
data(
empty: '',
invalid_string: '_scope',
invalid_string2: 'Scope'
)
test 'raise an error when passed scope is invalid' do |scope|
assert_raise do
@client.establish(scope)
end
end
end
sub_test_case 'init' do
setup do
@client.instance_variable_set(:@scope, @scope)
end
data(
numeric_type: [
{ name: 'key', reset_interval: 20, type: 'numeric' }, 0
],
float_type: [
{ name: 'key', reset_interval: 20, type: 'float' }, 0.0
],
integer_type: [
{ name: 'key', reset_interval: 20, type: 'integer' }, 0
]
)
test 'create a value' do |(param, initial_value)|
assert_nil extract_value_from_server(@server, @scope, param[:name])
response = @client.init(param).get
data = response.data.first
assert_nil response.errors
assert_equal param[:name], data['name']
assert_equal param[:reset_interval], data['reset_interval']
assert_equal param[:type], data['type']
assert_equal initial_value, data['current']
assert_equal initial_value, data['total']
v = extract_value_from_server(@server, @scope, param[:name])
assert_equal param[:name], v['name']
assert_equal param[:reset_interval], v['reset_interval']
assert_equal param[:type], v['type']
assert_equal initial_value, v['total']
assert_equal initial_value, v['current']
end
test 'raise an error when @scope is nil' do
@client.instance_variable_set(:@scope, nil)
assert_raise 'Call `establish` method to get a `scope` before calling this method' do
params = { name: 'key1', reset_interval: 10 }
@client.init(params).get
end
end
data(
already_exist_key: [
{ name: 'key1', reset_interval: 10 },
{ 'code' => 'invalid_params', 'message' => "worker1\tplugin1\tkey1 already exists in counter" }
],
missing_name: [
{ reset_interval: 10 },
{ 'code' => 'invalid_params', 'message' => '`name` is required' },
],
missing_reset_interval: [
{ name: 'key' },
{ 'code' => 'invalid_params', 'message' => '`reset_interval` is required' },
],
invalid_name: [
{ name: '\tkey' },
{ 'code' => 'invalid_params', 'message' => '`name` is the invalid format' }
]
)
test 'return an error object' do |(param, expected_error)|
params = { name: 'key1', reset_interval: 10 }
@client.init(params).get
response = @client.init(param).get
errors = response.errors.first
assert_empty response.data
assert_equal expected_error, errors
assert_raise {
@client.init(param).wait
}
end
test 'return an existing value when passed key already exists and ignore option is true' do
params = { name: 'key1', reset_interval: 10 }
res1 = @client.init(params).get
res2 = nil
assert_nothing_raised do
res2 = @client.init({ name: 'key1', reset_interval: 10 }, options: { ignore: true }).get
end
assert_equal res1.data, res2.data
end
test 'return an error object and data object' do
param = { name: 'key1', reset_interval: 10 }
param2 = { name: 'key2', reset_interval: 10 }
@client.init(param).get
response = @client.init([param2, param]).get
data = response.data.first
error = response.errors.first
assert_equal param2[:name], data['name']
assert_equal param2[:reset_interval], data['reset_interval']
assert_equal 'invalid_params', error['code']
assert_equal "#{@scope}\t#{param[:name]} already exists in counter", error['message']
end
test 'return a future object when async call' do
param = { name: 'key', reset_interval: 10 }
r = @client.init(param)
assert_true r.is_a?(Fluent::Counter::Future)
assert_nil r.errors
end
end
sub_test_case 'delete' do
setup do
@client.instance_variable_set(:@scope, @scope)
@name = 'key'
@key = Fluent::Counter::Store.gen_key(@scope, @name)
@init_obj = { name: @name, reset_interval: 20, type: 'numeric' }
@client.init(@init_obj).get
end
test 'delete a value' do
assert extract_value_from_server(@server, @scope, @name)
response = @client.delete(@name).get
v = response.data.first
assert_nil response.errors
assert_equal @init_obj[:name], v['name']
assert_equal @init_obj[:type], v['type']
assert_equal @init_obj[:reset_interval], v['reset_interval']
assert_nil extract_value_from_server(@server, @scope, @name)
end
test 'raise an error when @scope is nil' do
@client.instance_variable_set(:@scope, nil)
assert_raise 'Call `establish` method to get a `scope` before calling this method' do
@client.delete(@name).get
end
end
data(
key_not_found: [
'key2',
{ 'code' => 'unknown_key', 'message' => "`worker1\tplugin1\tkey2` doesn't exist in counter" }
],
invalid_key: [
'\tkey',
{ 'code' => 'invalid_params', 'message' => '`key` is the invalid format' }
]
)
test 'return an error object' do |(param, expected_error)|
response = @client.delete(param).get
errors = response.errors.first
assert_empty response.data
assert_equal expected_error, errors
end
test 'return an error object and data object' do
unknown_name = 'key2'
response = @client.delete(@name, unknown_name).get
data = response.data.first
error = response.errors.first
assert_equal @name, data['name']
assert_equal @init_obj[:reset_interval], data['reset_interval']
assert_equal 'unknown_key', error['code']
assert_equal "`#{@scope}\t#{unknown_name}` doesn't exist in counter", error['message']
assert_nil extract_value_from_server(@server, @scope, @name)
end
test 'return a future object when async call' do
r = @client.delete(@name)
assert_true r.is_a?(Fluent::Counter::Future)
assert_nil r.errors
end
end
sub_test_case 'inc' do
setup do
@client.instance_variable_set(:@scope, @scope)
@name = 'key'
@key = Fluent::Counter::Store.gen_key(@scope, @name)
@init_obj = { name: @name, reset_interval: 20, type: 'numeric' }
@client.init(@init_obj).get
end
test 'increment a value' do
v = extract_value_from_server(@server, @scope, @name)
assert_equal 0, v['total']
assert_equal 0, v['current']
travel(1)
inc_obj = { name: @name, value: 10 }
@client.inc(inc_obj).get
v = extract_value_from_server(@server, @scope, @name)
assert_equal inc_obj[:value], v['total']
assert_equal inc_obj[:value], v['current']
assert_equal (@now + 1), Fluent::EventTime.new(*v['last_modified_at'])
end
test 'create and increment a value when force option is true' do
name = 'new_key'
param = { name: name, value: 11, reset_interval: 1 }
assert_nil extract_value_from_server(@server, @scope, name)
@client.inc(param, options: { force: true }).get
v = extract_value_from_server(@server, @scope, name)
assert v
assert_equal param[:name], v['name']
assert_equal 1, v['reset_interval']
assert_equal param[:value], v['current']
assert_equal param[:value], v['total']
end
test 'raise an error when @scope is nil' do
@client.instance_variable_set(:@scope, nil)
assert_raise 'Call `establish` method to get a `scope` before calling this method' do
params = { name: 'name', value: 1 }
@client.inc(params).get
end
end
data(
not_exist_key: [
{ name: 'key2', value: 10 },
{ 'code' => 'unknown_key', 'message' => "`worker1\tplugin1\tkey2` doesn't exist in counter" }
],
missing_name: [
{ value: 10 },
{ 'code' => 'invalid_params', 'message' => '`name` is required' },
],
missing_value: [
{ name: 'key' },
{ 'code' => 'invalid_params', 'message' => '`value` is required' },
],
invalid_name: [
{ name: '\tkey' },
{ 'code' => 'invalid_params', 'message' => '`name` is the invalid format' }
]
)
test 'return an error object' do |(param, expected_error)|
response = @client.inc(param).get
errors = response.errors.first
assert_empty response.data
assert_equal expected_error, errors
end
test 'return an error object and data object' do
parmas = [
{ name: @name, value: 10 },
{ name: 'unknown_key', value: 9 },
]
response = @client.inc(parmas).get
data = response.data.first
error = response.errors.first
assert_equal @name, data['name']
assert_equal 10, data['current']
assert_equal 10, data['total']
assert_equal 'unknown_key', error['code']
assert_equal "`#{@scope}\tunknown_key` doesn't exist in counter", error['message']
end
test 'return a future object when async call' do
param = { name: 'key', value: 10 }
r = @client.inc(param)
assert_true r.is_a?(Fluent::Counter::Future)
assert_nil r.errors
end
end
sub_test_case 'get' do
setup do
@client.instance_variable_set(:@scope, @scope)
@name = 'key'
@init_obj = { name: @name, reset_interval: 20, type: 'numeric' }
@client.init(@init_obj).get
end
test 'get a value' do
v1 = extract_value_from_server(@server, @scope, @name)
v2 = @client.get(@name).data.first
assert_equal v1['name'], v2['name']
assert_equal v1['current'], v2['current']
assert_equal v1['total'], v2['total']
assert_equal v1['type'], v2['type']
end
test 'raise an error when @scope is nil' do
@client.instance_variable_set(:@scope, nil)
assert_raise 'Call `establish` method to get a `scope` before calling this method' do
@client.get(@name).get
end
end
data(
key_not_found: [
'key2',
{ 'code' => 'unknown_key', 'message' => "`worker1\tplugin1\tkey2` doesn't exist in counter" }
],
invalid_key: [
'\tkey',
{ 'code' => 'invalid_params', 'message' => '`key` is the invalid format' }
]
)
test 'return an error object' do |(param, expected_error)|
response = @client.get(param).get
errors = response.errors.first
assert_empty response.data
assert_equal expected_error, errors
end
test 'return an error object and data object' do
unknown_name = 'key2'
response = @client.get(@name, unknown_name).get
data = response.data.first
error = response.errors.first
assert_equal @name, data['name']
assert_equal @init_obj[:reset_interval], data['reset_interval']
assert_equal 'unknown_key', error['code']
assert_equal "`#{@scope}\t#{unknown_name}` doesn't exist in counter", error['message']
end
test 'return a future object when async call' do
r = @client.get(@name)
assert_true r.is_a?(Fluent::Counter::Future)
assert_nil r.errors
end
end
sub_test_case 'reset' do
setup do
@client.instance_variable_set(:@scope, @scope)
@name = 'key'
@key = Fluent::Counter::Store.gen_key(@scope, @name)
@init_obj = { name: @name, reset_interval: 5, type: 'numeric' }
@client.init(@init_obj).get
@inc_obj = { name: @name, value: 10 }
@client.inc(@inc_obj).get
end
test 'reset a value after `reset_interval` passed' do
v1 = extract_value_from_server(@server, @scope, @name)
assert_equal @inc_obj[:value], v1['total']
assert_equal @inc_obj[:value], v1['current']
assert_equal @now, Fluent::EventTime.new(*v1['last_reset_at'])
travel_sec = 6 # greater than reset_interval
travel(travel_sec)
v2 = @client.reset(@name).get
data = v2.data.first
c = data['counter_data']
assert_equal travel_sec, data['elapsed_time']
assert_true data['success']
assert_equal @inc_obj[:value], c['current']
assert_equal @inc_obj[:value], c['total']
assert_equal @now, c['last_reset_at']
v1 = extract_value_from_server(@server, @scope, @name)
assert_equal 0, v1['current']
assert_equal @inc_obj[:value], v1['total']
assert_equal (@now + travel_sec), Fluent::EventTime.new(*v1['last_reset_at'])
assert_equal (@now + travel_sec), Fluent::EventTime.new(*v1['last_modified_at'])
end
test 'return a value object before `reset_interval` passed' do
v1 = extract_value_from_server(@server, @scope, @name)
assert_equal @inc_obj[:value], v1['total']
assert_equal @inc_obj[:value], v1['current']
assert_equal @now, Fluent::EventTime.new(*v1['last_reset_at'])
travel_sec = 4 # less than reset_interval
travel(travel_sec)
v2 = @client.reset(@name).get
data = v2.data.first
c = data['counter_data']
assert_equal travel_sec, data['elapsed_time']
assert_equal false, data['success']
assert_equal @inc_obj[:value], c['current']
assert_equal @inc_obj[:value], c['total']
assert_equal @now, c['last_reset_at']
v1 = extract_value_from_server(@server, @scope, @name)
assert_equal @inc_obj[:value], v1['current']
assert_equal @inc_obj[:value], v1['total']
assert_equal @now, Fluent::EventTime.new(*v1['last_reset_at'])
end
test 'raise an error when @scope is nil' do
@client.instance_variable_set(:@scope, nil)
assert_raise 'Call `establish` method to get a `scope` before calling this method' do
@client.reset(@name).get
end
end
data(
key_not_found: [
'key2',
{ 'code' => 'unknown_key', 'message' => "`worker1\tplugin1\tkey2` doesn't exist in counter" }
],
invalid_key: [
'\tkey',
{ 'code' => 'invalid_params', 'message' => '`key` is the invalid format' }
]
)
test 'return an error object' do |(param, expected_error)|
response = @client.reset(param).get
errors = response.errors.first
assert_empty response.data
assert_equal expected_error, errors
end
test 'return an error object and data object' do
unknown_name = 'key2'
travel_sec = 6 # greater than reset_interval
travel(travel_sec)
response = @client.reset(@name, unknown_name).get
data = response.data.first
error = response.errors.first
counter = data['counter_data']
assert_true data['success']
assert_equal travel_sec, data['elapsed_time']
assert_equal @name, counter['name']
assert_equal @init_obj[:reset_interval], counter['reset_interval']
assert_equal @inc_obj[:value], counter['total']
assert_equal @inc_obj[:value], counter['current']
assert_equal 'unknown_key', error['code']
assert_equal "`#{@scope}\t#{unknown_name}` doesn't exist in counter", error['message']
v1 = extract_value_from_server(@server, @scope, @name)
assert_equal 0, v1['current']
assert_equal @inc_obj[:value], v1['total']
assert_equal (@now + travel_sec), Fluent::EventTime.new(*v1['last_reset_at'])
assert_equal (@now + travel_sec), Fluent::EventTime.new(*v1['last_modified_at'])
end
test 'return a future object when async call' do
r = @client.reset(@name)
assert_true r.is_a?(Fluent::Counter::Future)
assert_nil r.errors
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/config/assertions.rb | test/config/assertions.rb | require 'test/unit/assertions'
module Test::Unit::Assertions
def assert_text_parsed_as(expected, actual)
msg = parse_text(actual).inspect rescue 'failed'
msg = "expected that #{actual.inspect} would be a parsed as #{expected.inspect} but got #{msg}"
assert_block(msg) {
v = parse_text(actual)
if expected.is_a?(Float)
v.is_a?(Float) && (v == obj || (v.nan? && obj.nan?) || (v - obj).abs < 0.000001)
else
v == expected
end
}
end
def assert_text_parsed_as_json(expected, actual)
msg = parse_text(actual).inspect rescue 'failed'
msg = "expected that #{actual.inspect} would be a parsed as #{expected.inspect} but got #{msg}"
assert_block(msg) {
v = JSON.parse(parse_text(actual))
v == expected
}
end
def assert_parse_error(actual)
msg = begin
parse_text(actual).inspect
rescue => e
e.inspect
end
msg = "expected that #{actual.inspect} would cause a parse error but got #{msg}"
assert_block(msg) {
begin
parse_text(actual)
false
rescue Fluent::ConfigParseError
true
end
}
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/config/test_section.rb | test/config/test_section.rb | require_relative '../helper'
require 'fluent/config/section'
require 'pp'
module Fluent::Config
class TestSection < ::Test::Unit::TestCase
sub_test_case Fluent::Config::Section do
sub_test_case 'class' do
sub_test_case '.name' do
test 'returns its full module name as String' do
assert_equal('Fluent::Config::Section', Fluent::Config::Section.name)
end
end
end
sub_test_case 'instance object' do
sub_test_case '#initialize' do
test 'creates blank object without argument' do
s = Fluent::Config::Section.new
assert_equal({}, s.instance_eval{ @params })
end
test 'creates object which contains specified hash object itself' do
hash = {
name: 'tagomoris',
age: 34,
send: 'email',
klass: 'normal',
keys: 5,
}
s1 = Fluent::Config::Section.new(hash)
assert_equal(hash, s1.instance_eval { @params })
assert_equal("tagomoris", s1[:name])
assert_equal(34, s1[:age])
assert_equal("email", s1[:send])
assert_equal("normal", s1[:klass])
assert_equal(5, s1[:keys])
assert_equal("tagomoris", s1.name)
assert_equal(34, s1.age)
assert_equal("email", s1.send)
assert_equal("normal", s1.klass)
assert_equal(5, s1.keys)
end
test 'creates object which contains specified hash object itself, including fields with at prefix' do
hash = {
name: 'tagomoris',
age: 34,
send: 'email',
klass: 'normal',
keys: 5,
}
hash['@id'.to_sym] = 'myid'
s1 = Fluent::Config::Section.new(hash)
assert_equal('myid', s1['@id'])
assert_equal('myid', s1['@id'.to_sym])
assert_equal('myid', s1.__send__('@id'.to_sym))
end
test 'creates object and config element which corresponds to section object itself' do
hash = {
name: 'tagomoris',
age: 34,
send: 'email',
klass: 'normal',
keys: 5,
}
hash['@id'.to_sym] = 'myid'
conf = config_element('section', '', {'name' => 'tagomoris', 'age' => 34, 'send' => 'email', 'klass' => 'normal', 'keys' => 5})
s2 = Fluent::Config::Section.new(hash, conf)
assert s2.corresponding_config_element.is_a?(Fluent::Config::Element)
end
end
sub_test_case '#class' do
test 'returns class constant' do
assert_equal Fluent::Config::Section, Fluent::Config::Section.new({}).class
end
end
sub_test_case '#object_id' do
test 'returns its object id' do
s1 = Fluent::Config::Section.new({})
assert s1.object_id
s2 = Fluent::Config::Section.new({})
assert s2.object_id
assert_not_equal s1.object_id, s2.object_id
end
end
sub_test_case '#to_h' do
test 'returns internal hash itself' do
hash = {
name: 'tagomoris',
age: 34,
send: 'email',
klass: 'normal',
keys: 5,
}
s = Fluent::Config::Section.new(hash)
assert_equal(hash, s.to_h)
assert_instance_of(Hash, s.to_h)
end
end
sub_test_case '#instance_of?' do
test 'can judge whether it is a Section object or not' do
s = Fluent::Config::Section.new
assert_true(s.instance_of?(Fluent::Config::Section))
assert_false(s.instance_of?(BasicObject))
end
end
sub_test_case '#is_a?' do
test 'can judge whether it belongs to or not' do
s = Fluent::Config::Section.new
assert_true(s.is_a?(Fluent::Config::Section))
assert_true(s.kind_of?(Fluent::Config::Section))
assert_true(s.is_a?(BasicObject))
end
end
sub_test_case '#+' do
test 'can merge 2 sections: argument side is primary, internal hash is newly created' do
h1 = {name: "s1", num: 10, klass: "A"}
s1 = Fluent::Config::Section.new(h1)
h2 = {name: "s2", klass: "A", num2: "5", num3: "8"}
s2 = Fluent::Config::Section.new(h2)
s = s1 + s2
assert_not_equal(h1.object_id, s.to_h.object_id)
assert_not_equal(h2.object_id, s.to_h.object_id)
assert_equal("s2", s.name)
assert_equal(10, s.num)
assert_equal("A", s.klass)
assert_equal("5", s.num2)
assert_equal("8", s.num3)
end
end
sub_test_case '#to_s' do
test '#to_s == #inspect' do
h1 = {name: "s1", num: 10, klass: "A"}
s1 = Fluent::Config::Section.new(h1)
assert_equal(s1.to_s, s1.inspect)
end
end
data("inspect" => [:inspect, true],
"nil?" => [:nil?, true],
"to_h" => [:to_h, true],
"+" => [:+, true],
"instance_of?" => [:instance_of?, true],
"kind_of?" => [:kind_of?, true],
"[]" => [:[], true],
"respond_to?" => [:respond_to?, true],
"respond_to_missing?" => [:respond_to_missing?, true],
"!" => [:!, true],
"!=" => [:!=, true],
"==" => [:==, true],
"equal?" => [:equal?, true],
"instance_eval" => [:instance_eval, true],
"instance_exec" => [:instance_exec, true],
"method_missing" => [:method_missing, false],
"singleton_method_added" => [:singleton_method_added, false],
"singleton_method_removed" => [:singleton_method_removed, false],
"singleton_method_undefined" => [:singleton_method_undefined, false],
"no_such_method" => [:no_such_method, false])
test '#respond_to?' do |data|
method, expected = data
h1 = {name: "s1", num: 10, klass: "A"}
s1 = Fluent::Config::Section.new(h1)
assert_equal(expected, s1.respond_to?(method))
end
test '#pretty_print' do
q = PP.new
h1 = {name: "s1", klass: "A"}
s1 = Fluent::Config::Section.new(h1)
s1.pretty_print(q)
assert_equal s1.inspect, q.output
end
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/config/test_config_parser.rb | test/config/test_config_parser.rb | require_relative '../helper'
require_relative "assertions"
require "json"
require "fluent/config/error"
require "fluent/config/basic_parser"
require "fluent/config/literal_parser"
require "fluent/config/v1_parser"
require 'fluent/config/parser'
module Fluent::Config
module V1TestHelper
def root(*elements)
if elements.first.is_a?(Fluent::Config::Element)
attrs = {}
else
attrs = elements.shift || {}
end
Fluent::Config::Element.new('ROOT', '', attrs, elements)
end
def e(name, arg='', attrs={}, elements=[])
Fluent::Config::Element.new(name, arg, attrs, elements)
end
end
class AllTypes
include Fluent::Configurable
config_param :param_string, :string
config_param :param_enum, :enum, list: [:foo, :bar, :baz]
config_param :param_integer, :integer
config_param :param_float, :float
config_param :param_size, :size
config_param :param_bool, :bool
config_param :param_time, :time
config_param :param_hash, :hash
config_param :param_array, :array
config_param :param_regexp, :regexp
end
class TestV1Parser < ::Test::Unit::TestCase
def read_config(path)
path = File.expand_path(path)
data = File.read(path)
Fluent::Config::V1Parser.parse(data, File.basename(path), File.dirname(path))
end
def parse_text(text)
basepath = File.expand_path(File.dirname(__FILE__) + '/../../')
Fluent::Config::V1Parser.parse(text, '(test)', basepath, nil)
end
include V1TestHelper
extend V1TestHelper
sub_test_case 'attribute parsing' do
test "parses attributes" do
assert_text_parsed_as(e('ROOT', '', {"k1"=>"v1", "k2"=>"v2"}), %[
k1 v1
k2 v2
])
end
test "allows attribute without value" do
assert_text_parsed_as(e('ROOT', '', {"k1"=>"", "k2"=>"v2"}), %[
k1
k2 v2
])
end
test "parses attribute key always string" do
assert_text_parsed_as(e('ROOT', '', {"1" => "1"}), "1 1")
end
data("_.%$!," => "_.%$!,",
"/=~-~@\`:?" => "/=~-~@\`:?",
"()*{}.[]" => "()*{}.[]")
test "parses a value with symbols" do |v|
assert_text_parsed_as(e('ROOT', '', {"k" => v}), "k #{v}")
end
test "ignores spacing around value" do
assert_text_parsed_as(e('ROOT', '', {"k1" => "a"}), " k1 a ")
end
test "allows spaces in value" do
assert_text_parsed_as(e('ROOT', '', {"k1" => "a b c"}), "k1 a b c")
end
test "parses value into empty string if only key exists" do
# value parser parses empty string as true for bool type
assert_text_parsed_as(e('ROOT', '', {"k1" => ""}), "k1\n")
assert_text_parsed_as(e('ROOT', '', {"k1" => ""}), "k1")
end
sub_test_case 'non-quoted string' do
test "remains text starting with '#'" do
assert_text_parsed_as(e('ROOT', '', {"k1" => "#not_comment"}), " k1 #not_comment")
end
test "remains text just after '#'" do
assert_text_parsed_as(e('ROOT', '', {"k1" => "a#not_comment"}), " k1 a#not_comment")
end
test "remove text after ` #` (comment)" do
assert_text_parsed_as(e('ROOT', '', {"k1" => "a"}), " k1 a #comment")
end
test "does not require escaping backslash" do
assert_text_parsed_as(e('ROOT', '', {"k1" => "\\\\"}), " k1 \\\\")
assert_text_parsed_as(e('ROOT', '', {"k1" => "\\"}), " k1 \\")
end
test "remains backslash in front of a normal character" do
assert_text_parsed_as(e('ROOT', '', {"k1" => '\['}), " k1 \\[")
end
test "does not accept escape characters" do
assert_text_parsed_as(e('ROOT', '', {"k1" => '\n'}), " k1 \\n")
end
end
sub_test_case 'double quoted string' do
test "allows # in value" do
assert_text_parsed_as(e('ROOT', '', {"k1" => "a#comment"}), ' k1 "a#comment"')
end
test "rejects characters after double quoted string" do
assert_parse_error(' k1 "a" 1')
end
test "requires escaping backslash" do
assert_text_parsed_as(e('ROOT', '', {"k1" => "\\"}), ' k1 "\\\\"')
assert_parse_error(' k1 "\\"')
end
test "requires escaping double quote" do
assert_text_parsed_as(e('ROOT', '', {"k1" => '"'}), ' k1 "\\""')
assert_parse_error(' k1 """')
assert_parse_error(' k1 ""\'')
end
test "removes backslash in front of a normal character" do
assert_text_parsed_as(e('ROOT', '', {"k1" => '['}), ' k1 "\\["')
end
test "accepts escape characters" do
assert_text_parsed_as(e('ROOT', '', {"k1" => "\n"}), ' k1 "\\n"')
end
test "support multiline string" do
assert_text_parsed_as(e('ROOT', '',
{"k1" => %[line1
line2]
}),
%[k1 "line1
line2"]
)
assert_text_parsed_as(e('ROOT', '',
{"k1" => %[line1 line2]
}),
%[k1 "line1\\
line2"]
)
assert_text_parsed_as(e('ROOT', '',
{"k1" => %[line1
line2
line3]
}),
%[k1 "line1
line2
line3"]
)
assert_text_parsed_as(e('ROOT', '',
{"k1" => %[line1
line2 line3]
}),
%[k1 "line1
line2\\
line3"]
)
end
end
sub_test_case 'single quoted string' do
test "allows # in value" do
assert_text_parsed_as(e('ROOT', '', {"k1" => "a#comment"}), " k1 'a#comment'")
end
test "rejects characters after single quoted string" do
assert_parse_error(" k1 'a' 1")
end
test "requires escaping backslash" do
assert_text_parsed_as(e('ROOT', '', {"k1" => "\\"}), " k1 '\\\\'")
assert_parse_error(" k1 '\\'")
end
test "requires escaping single quote" do
assert_text_parsed_as(e('ROOT', '', {"k1" => "'"}), " k1 '\\''")
assert_parse_error(" k1 '''")
end
test "remains backslash in front of a normal character" do
assert_text_parsed_as(e('ROOT', '', {"k1" => '\\['}), " k1 '\\['")
end
test "does not accept escape characters" do
assert_text_parsed_as(e('ROOT', '', {"k1" => "\\n"}), " k1 '\\n'")
end
end
data(
"in match" => %[
<match>
@k v
</match>
],
"in source" => %[
<source>
@k v
</source>
],
"in filter" => %[
<filter>
@k v
</filter>
],
"in top-level" => ' @k v '
)
def test_rejects_at_prefix_in_the_parameter_name(data)
assert_parse_error(data)
end
data(
"in nested" => %[
<match>
<record>
@k v
</record>
</match>
]
)
def test_not_reject_at_prefix_in_the_parameter_name(data)
assert_nothing_raised { parse_text(data) }
end
end
sub_test_case 'element parsing' do
data(
'root' => [root, ""],
"accepts empty element" => [root(e("test")), %[
<test>
</test>
]],
"accepts argument and attributes" => [root(e("test", 'var', {'key'=>"val"})), %[
<test var>
key val
</test>
]],
"accepts nested elements" => [root(
e("test", 'var', {'key'=>'1'}, [
e('nested1'),
e('nested2')
])), %[
<test var>
key 1
<nested1>
</nested1>
<nested2>
</nested2>
</test>
]],
"accepts multiline json values" => [root(e("test", 'var', {'key'=>"[\"a\",\"b\",\"c\",\"d\"]"})), %[
<test var>
key ["a",
"b", "c",
"d"]
</test>
]],
"parses empty element argument to nil" => [root(e("test", '')), %[
<test >
</test>
]],
"ignores spacing around element argument" => [root(e("test", "a")), %[
<test a >
</test>
]],
"accepts spacing inside element argument (for multiple tags)" => [root(e("test", "a.** b.**")), %[
<test a.** b.** >
</test>
]])
def test_parse_element(data)
expected, target = data
assert_text_parsed_as(expected, target)
end
[
"**",
"*.*",
"1",
"_.%$!",
"/",
"()*{}.[]",
].each do |arg|
test "parses symbol element argument:#{arg}" do
assert_text_parsed_as(root(e("test", arg)), %[
<test #{arg}>
</test>
])
end
end
data(
"considers comments in element argument" => %[
<test #a>
</test>
],
"requires line_end after begin tag" => %[
<test></test>
],
"requires line_end after end tag" => %[
<test>
</test><test>
</test>
])
def test_parse_error(data)
assert_parse_error(data)
end
end
sub_test_case "Embedded Ruby Code in section attributes" do
setup do
ENV["EMBEDDED_VAR"] = "embedded"
ENV["NESTED_EMBEDDED_VAR"] = "nested-embedded"
@hostname = Socket.gethostname
end
teardown do
ENV["EMBEDDED_VAR"] = nil
ENV["NESTED_EMBEDDED_VAR"] = nil
end
test "embedded Ruby code should be expanded" do
assert_text_parsed_as(root(
e("test", 'embedded', {'key'=>'1'}, [
e('nested1', 'nested-embedded'),
e('nested2', "#{@hostname}")
])), <<-EOF
<test "#{ENV["EMBEDDED_VAR"]}">
key 1
<nested1 "#{ENV["NESTED_EMBEDDED_VAR"]}">
</nested1>
<nested2 "#{Socket.gethostname}">
</nested2>
</test>
EOF
)
end
end
# port from test_config.rb
sub_test_case '@include parsing' do
TMP_DIR = File.dirname(__FILE__) + "/tmp/v1_config#{ENV['TEST_ENV_NUMBER']}"
TMP_DIR_WITH_SPACES = File.dirname(__FILE__) + "/tmp/folder with spaces/v1_config#{ENV['TEST_ENV_NUMBER']}"
def write_config(path, data)
FileUtils.mkdir_p(File.dirname(path))
File.open(path, "w") { |f| f.write data }
end
def prepare_config(tmp_dir)
write_config "#{tmp_dir}/config_test_1.conf", %[
k1 root_config
include dir/config_test_2.conf #
@include #{tmp_dir}/config_test_4.conf
include file://#{tmp_dir}/config_test_5.conf
@include config.d/*.conf
]
write_config "#{tmp_dir}/dir/config_test_2.conf", %[
k2 relative_path_include
@include ../config_test_3.conf
]
write_config "#{tmp_dir}/config_test_3.conf", %[
k3 relative_include_in_included_file
]
write_config "#{tmp_dir}/config_test_4.conf", %[
k4 absolute_path_include
]
write_config "#{tmp_dir}/config_test_5.conf", %[
k5 uri_include
]
write_config "#{tmp_dir}/config.d/config_test_6.conf", %[
k6 wildcard_include_1
<elem1 name>
include normal_parameter
</elem1>
]
write_config "#{tmp_dir}/config.d/config_test_7.conf", %[
k7 wildcard_include_2
]
write_config "#{tmp_dir}/config.d/config_test_8.conf", %[
<elem2 name>
@include ../dir/config_test_9.conf
</elem2>
]
write_config "#{tmp_dir}/dir/config_test_9.conf", %[
k9 embedded
<elem3 name>
nested nested_value
include hoge
</elem3>
]
write_config "#{tmp_dir}/config.d/00_config_test_8.conf", %[
k8 wildcard_include_3
<elem4 name>
include normal_parameter
</elem4>
]
end
data("TMP_DIR without spaces" => TMP_DIR,
"TMP_DIR with spaces" => TMP_DIR_WITH_SPACES)
test 'parses @include / include correctly' do |data|
prepare_config(data)
c = read_config("#{data}/config_test_1.conf")
assert_equal('root_config', c['k1'])
assert_equal('relative_path_include', c['k2'])
assert_equal('relative_include_in_included_file', c['k3'])
assert_equal('absolute_path_include', c['k4'])
assert_equal('uri_include', c['k5'])
assert_equal('wildcard_include_1', c['k6'])
assert_equal('wildcard_include_2', c['k7'])
assert_equal('wildcard_include_3', c['k8'])
assert_equal([
'k1',
'k2',
'k3',
'k4',
'k5',
'k8', # Because of the file name this comes first.
'k6',
'k7',
], c.keys)
elem1 = c.elements.find { |e| e.name == 'elem1' }
assert(elem1)
assert_equal('name', elem1.arg)
assert_equal('normal_parameter', elem1['include'])
elem2 = c.elements.find { |e| e.name == 'elem2' }
assert(elem2)
assert_equal('name', elem2.arg)
assert_equal('embedded', elem2['k9'])
assert_not_include(elem2, 'include')
elem3 = elem2.elements.find { |e| e.name == 'elem3' }
assert(elem3)
assert_equal('nested_value', elem3['nested'])
assert_equal('hoge', elem3['include'])
end
# TODO: Add uri based include spec
end
sub_test_case '#to_s' do
test 'parses dumpped configuration' do
original = %q!a\\\n\r\f\b'"z!
expected = %q!a\\\n\r\f\b'"z!
conf = parse_text(%[k1 #{original}])
assert_equal(expected, conf['k1']) # escape check
conf2 = parse_text(conf.to_s) # use dumpped configuration to check unescape
assert_equal(expected, conf2.elements.first['k1'])
end
test 'all types' do
conf = parse_text(%[
param_string "value"
param_enum foo
param_integer 999
param_float 55.55
param_size 4k
param_bool true
param_time 10m
param_hash { "key1": "value1", "key2": 2 }
param_array ["value1", "value2", 100]
param_regexp /pattern/
])
target = AllTypes.new.configure(conf)
assert_equal(conf.to_s, target.config.to_s)
expected = <<DUMP
<ROOT>
param_string "value"
param_enum foo
param_integer 999
param_float 55.55
param_size 4k
param_bool true
param_time 10m
param_hash {"key1":"value1","key2":2}
param_array ["value1","value2",100]
param_regexp /pattern/
</ROOT>
DUMP
assert_equal(expected, conf.to_s)
end
end
end
class TestV0Parser < ::Test::Unit::TestCase
def parse_text(text)
basepath = File.expand_path(File.dirname(__FILE__) + '/../../')
Fluent::Config::Parser.parse(StringIO.new(text), '(test)', basepath)
end
sub_test_case "Fluent::Config::Element#to_s" do
test 'all types' do
conf = parse_text(%[
param_string value
param_enum foo
param_integer 999
param_float 55.55
param_size 4k
param_bool true
param_time 10m
param_hash { "key1": "value1", "key2": 2 }
param_array ["value1", "value2", 100]
param_regexp /pattern/
])
target = AllTypes.new.configure(conf)
assert_equal(conf.to_s, target.config.to_s)
expected = <<DUMP
<ROOT>
param_string value
param_enum foo
param_integer 999
param_float 55.55
param_size 4k
param_bool true
param_time 10m
param_hash { "key1": "value1", "key2": 2 }
param_array ["value1", "value2", 100]
param_regexp /pattern/
</ROOT>
DUMP
assert_equal(expected, conf.to_s)
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/config/test_configurable.rb | test/config/test_configurable.rb | require_relative '../helper'
require 'fluent/configurable'
require 'fluent/config/element'
require 'fluent/config/section'
module ConfigurableSpec
class Base1
include Fluent::Configurable
config_param :node, :string, default: "node"
config_param :flag1, :bool, default: false
config_param :flag2, :bool, default: true
config_param :name1, :string
config_param :name2, :string
config_param :name3, :string, default: "base1"
config_param :name4, :string, default: "base1"
config_param :opt1, :enum, list: [:foo, :bar, :baz]
config_param :opt2, :enum, list: [:foo, :bar, :baz], default: :foo
def get_all
[@node, @flag1, @flag2, @name1, @name2, @name3, @name4]
end
end
class Base1Safe < Base1
config_set_default :name1, "basex1"
config_set_default :name2, "basex2"
config_set_default :opt1, :baz
end
class Base1Nil < Base1
config_set_default :name1, nil
config_set_default :name2, nil
config_set_default :opt1, nil
config_param :name5, :string, default: nil
end
class Base2 < Base1
config_set_default :name2, "base2"
config_set_default :name4, "base2"
config_set_default :opt1, :bar
config_param :name5, :string
config_param :name6, :string, default: "base2"
config_param :opt3, :enum, list: [:a, :b]
def get_all
ary = super
ary + [@name5, @name6]
end
end
class Base3 < Base2
config_set_default :opt3, :a
config_section :node do
config_param :name, :string, default: "node"
config_param :type, :string
end
config_section :branch, required: true, multi: true do
config_argument :name, :string
config_param :size, :integer, default: 10
config_section :leaf, required: false, multi: true do
config_param :weight, :integer
config_section :worm, param_name: 'worms', multi: true do
config_param :type, :string, default: 'ladybird'
end
end
end
def get_all
ary = super
ary + [@branch]
end
end
class Base4 < Base2
config_set_default :opt3, :a
config_section :node, param_name: :nodes do
config_argument :num, :integer
config_param :name, :string, default: "node"
config_param :type, :string, default: "b4"
end
config_section :description1, required: false, multi: false do
config_argument :note, :string, default: "desc1"
config_param :text, :string
end
config_section :description2, required: true, multi: false do
config_argument :note, :string, default: "desc2"
config_param :text, :string
end
config_section :description3, required: true, multi: true do
config_argument :note, default: "desc3" do |val|
"desc3: #{val}"
end
config_param :text, :string
end
def get_all
ary = super
ary + [@nodes, @description1, @description2, @description3]
end
end
class Base4Safe < Base4
# config_section :node, param_name: :nodes do
# config_argument :num, :integer
# config_param :name, :string, default: "node"
# config_param :type, :string, default: "b4"
# end
# config_section :description1, required: false, multi: false do
# config_argument :note, :string, default: "desc1"
# config_param :text, :string
# end
# config_section :description2, required: true, multi: false do
# config_argument :note, :string, default: "desc2"
# config_param :text, :string
# end
# config_section :description3, required: true, multi: true do
# config_argument :note, default: "desc3" do |val|
# "desc3: #{val}"
# end
# config_param :text, :string
# end
config_section :node do
config_set_default :num, 0
end
config_section :description1 do
config_set_default :text, "teeeext"
end
config_section :description2 do
config_set_default :text, nil
end
config_section :description3 do
config_set_default :text, "yay"
end
end
class Init0
include Fluent::Configurable
config_section :sec1, init: true, multi: false do
config_param :name, :string, default: 'sec1'
end
config_section :sec2, init: true, multi: true do
config_param :name, :string, default: 'sec1'
end
end
class Example0
include Fluent::Configurable
config_param :stringvalue, :string
config_param :boolvalue, :bool
config_param :integervalue, :integer
config_param :sizevalue, :size
config_param :timevalue, :time
config_param :floatvalue, :float
config_param :hashvalue, :hash
config_param :arrayvalue, :array
end
class ExampleWithAlias
include Fluent::Configurable
config_param :name, :string, alias: :fullname
config_param :bool, :bool, alias: :flag
config_section :detail, required: false, multi: false, alias: "information" do
config_param :address, :string, default: "x"
end
def get_all
[@name, @detail]
end
end
class ExampleWithSecret
include Fluent::Configurable
config_param :normal_param, :string
config_param :secret_param, :string, secret: true
config_section :section do
config_param :normal_param2, :string
config_param :secret_param2, :string, secret: true
end
end
class ExampleWithDefaultHashAndArray
include Fluent::Configurable
config_param :obj1, :hash, default: {}
config_param :obj2, :array, default: []
end
class ExampleWithSkipAccessor
include Fluent::Configurable
config_param :name, :string, default: 'example7', skip_accessor: true
end
class ExampleWithCustomSection
include Fluent::Configurable
config_param :name_param, :string
config_section :normal_section do
config_param :normal_section_param, :string
end
class CustomSection
include Fluent::Configurable
config_param :custom_section_param, :string
end
class AnotherElement
include Fluent::Configurable
end
def configure(conf)
super
conf.elements.each do |e|
next if e.name != 'custom_section'
CustomSection.new.configure(e)
end
end
end
class ExampleWithIntFloat
include Fluent::Configurable
config_param :int1, :integer
config_param :float1, :float
end
module Overwrite
class Base
include Fluent::Configurable
config_param :name, :string, alias: :fullname
config_param :bool, :bool, alias: :flag
config_section :detail, required: false, multi: false, alias: "information" do
config_param :address, :string, default: "x"
end
end
class Required < Base
config_section :detail, required: true do
config_param :address, :string, default: "x"
end
end
class Multi < Base
config_section :detail, multi: true do
config_param :address, :string, default: "x"
end
end
class Alias < Base
config_section :detail, alias: "information2" do
config_param :address, :string, default: "x"
end
end
class DefaultOptions < Base
config_section :detail do
config_param :address, :string, default: "x"
end
end
class DetailAddressDefault < Base
config_section :detail do
config_param :address, :string, default: "y"
end
end
class AddParam < Base
config_section :detail do
config_param :phone_no, :string
end
end
class AddParamOverwriteAddress < Base
config_section :detail do
config_param :address, :string, default: "y"
config_param :phone_no, :string
end
end
end
module Final
# Show what is allowed in finalized sections
# InheritsFinalized < Finalized < Base
class Base
include Fluent::Configurable
config_section :appendix, multi: false, final: false do
config_param :code, :string
config_param :name, :string
config_param :address, :string, default: ""
end
end
class Finalized < Base
# to non-finalized section
# subclass can change type (code)
# add default value (name)
# change default value (address)
# add field (age)
config_section :appendix, final: true do
config_param :code, :integer
config_set_default :name, "y"
config_set_default :address, "-"
config_param :age, :integer, default: 10
end
end
class InheritsFinalized < Finalized
# to finalized section
# subclass can add default value (code)
# change default value (age)
# add field (phone_no)
config_section :appendix do
config_set_default :code, 2
config_set_default :age, 0
config_param :phone_no, :string
end
end
# Show what is allowed/prohibited for finalized sections
class FinalizedBase
include Fluent::Configurable
config_section :appendix, param_name: :apd, init: false, required: true, multi: false, alias: "options", final: true do
config_param :name, :string
end
end
class FinalizedBase2
include Fluent::Configurable
config_section :appendix, param_name: :apd, init: false, required: false, multi: false, alias: "options", final: true do
config_param :name, :string
end
end
# subclass can change init with adding default values
class OverwriteInit < FinalizedBase2
config_section :appendix, init: true do
config_set_default :name, "moris"
config_param :code, :integer, default: 0
end
end
# subclass cannot change type (name)
class Subclass < FinalizedBase
config_section :appendix do
config_param :name, :integer
end
end
# subclass cannot change param_name
class OverwriteParamName < FinalizedBase
config_section :appendix, param_name: :adx do
end
end
# subclass cannot change final (section)
class OverwriteFinal < FinalizedBase
config_section :appendix, final: false do
config_param :name, :integer
end
end
# subclass cannot change required
class OverwriteRequired < FinalizedBase
config_section :appendix, required: false do
end
end
# subclass cannot change multi
class OverwriteMulti < FinalizedBase
config_section :appendix, multi: true do
end
end
# subclass cannot change alias
class OverwriteAlias < FinalizedBase
config_section :appendix, alias: "options2" do
end
end
end
module OverwriteDefaults
class Owner
include Fluent::Configurable
config_set_default :key1, "V1"
config_section :buffer do
config_set_default :size_of_something, 1024
end
end
class SubOwner < Owner
config_section :buffer do
config_set_default :size_of_something, 2048
end
end
class NilOwner < Owner
config_section :buffer do
config_set_default :size_of_something, nil
end
end
class FlatChild
include Fluent::Configurable
attr_accessor :owner
config_param :key1, :string, default: "v1"
end
class BufferChild
include Fluent::Configurable
attr_accessor :owner
configured_in :buffer
config_param :size_of_something, :size, default: 128
end
class BufferBase
include Fluent::Configurable
end
class BufferSubclass < BufferBase
attr_accessor :owner
configured_in :buffer
config_param :size_of_something, :size, default: 512
end
class BufferSubSubclass < BufferSubclass
end
end
class UnRecommended
include Fluent::Configurable
attr_accessor :log
config_param :key1, :string, default: 'deprecated', deprecated: "key1 will be removed."
config_param :key2, :string, default: 'obsoleted', obsoleted: "key2 has been removed."
end
end
module Fluent::Config
class TestConfigurable < ::Test::Unit::TestCase
sub_test_case 'class defined without config_section' do
sub_test_case '#initialize' do
test 'create instance methods and default values by config_param and config_set_default' do
obj1 = ConfigurableSpec::Base1.new
assert_equal("node", obj1.node)
assert_false(obj1.flag1)
assert_true(obj1.flag2)
assert_nil(obj1.name1)
assert_nil(obj1.name2)
assert_equal("base1", obj1.name3)
assert_equal("base1", obj1.name4)
assert_nil(obj1.opt1)
assert_equal(:foo, obj1.opt2)
end
test 'create instance methods and default values overwritten by sub class definition' do
obj2 = ConfigurableSpec::Base2.new
assert_equal("node", obj2.node)
assert_false(obj2.flag1)
assert_true(obj2.flag2)
assert_nil(obj2.name1)
assert_equal("base2", obj2.name2)
assert_equal("base1", obj2.name3)
assert_equal("base2", obj2.name4)
assert_nil(obj2.name5)
assert_equal("base2", obj2.name6)
assert_equal(:bar, obj2.opt1)
assert_equal(:foo, obj2.opt2)
end
end
sub_test_case '#configured_section_create' do
test 'raises configuration error if required param exists but no configuration element is specified' do
obj = ConfigurableSpec::Base1.new
assert_raise(Fluent::ConfigError.new("'name1' parameter is required")) do
obj.configured_section_create(nil)
end
end
test 'creates root section with default values if name and config are specified with nil' do
obj = ConfigurableSpec::Base1Safe.new
root = obj.configured_section_create(nil)
assert_equal "node", root.node
assert_false root.flag1
assert_true root.flag2
assert_equal "basex1", root.name1
assert_equal "basex2", root.name2
assert_equal "base1", root.name3
assert_equal "base1", root.name4
assert_equal :baz, root.opt1
assert_equal :foo, root.opt2
end
test 'creates root section with default values if name is nil and config is empty element' do
obj = ConfigurableSpec::Base1Safe.new
root = obj.configured_section_create(nil, config_element())
assert_equal "node", root.node
assert_false root.flag1
assert_true root.flag2
assert_equal "basex1", root.name1
assert_equal "basex2", root.name2
assert_equal "base1", root.name3
assert_equal "base1", root.name4
assert_equal :baz, root.opt1
assert_equal :foo, root.opt2
end
test 'creates root section with specified value if name is nil and configuration element is specified' do
obj = ConfigurableSpec::Base1Safe.new
root = obj.configured_section_create(nil, config_element('match', '', {'node' => "nodename", 'flag1' => 'true', 'name1' => 'fixed1', 'opt1' => 'foo'}))
assert_equal "nodename", root.node
assert_equal "fixed1", root.name1
assert_true root.flag1
assert_equal :foo, root.opt1
assert_true root.flag2
assert_equal "basex2", root.name2
assert_equal "base1", root.name3
assert_equal "base1", root.name4
assert_equal :foo, root.opt2
end
end
sub_test_case '#configure' do
test 'returns configurable object itself' do
b2 = ConfigurableSpec::Base2.new
assert_instance_of(ConfigurableSpec::Base2, b2.configure(config_element("", "", {"name1" => "t1", "name5" => "t5", "opt3" => "a"})))
end
test 'can accept frozen string' do
b2 = ConfigurableSpec::Base2.new
assert_instance_of(ConfigurableSpec::Base2, b2.configure(config_element("", "", {"name1" => "t1".freeze, "name5" => "t5", "opt3" => "a"})))
end
test 'raise errors without any specifications for param without defaults' do
b2 = ConfigurableSpec::Base2.new
assert_raise(Fluent::ConfigError) { b2.configure(config_element("", "", {})) }
assert_raise(Fluent::ConfigError) { b2.configure(config_element("", "", {"name1" => "t1"})) }
assert_raise(Fluent::ConfigError) { b2.configure(config_element("", "", {"name5" => "t5"})) }
assert_raise(Fluent::ConfigError) { b2.configure(config_element("", "", {"name1" => "t1", "name5" => "t5"})) }
assert_nothing_raised { b2.configure(config_element("", "", {"name1" => "t1", "name5" => "t5", "opt3" => "a"})) }
assert_equal(["node", false, true, "t1", "base2", "base1", "base2", "t5", "base2"], b2.get_all)
assert_equal(:a, b2.opt3)
end
test 'can configure bool values' do
b2a = ConfigurableSpec::Base2.new
assert_nothing_raised { b2a.configure(config_element("", "", {"flag1" => "true", "flag2" => "yes", "name1" => "t1", "name5" => "t5", "opt3" => "a"})) }
assert_true(b2a.flag1)
assert_true(b2a.flag2)
b2b = ConfigurableSpec::Base2.new
assert_nothing_raised { b2b.configure(config_element("", "", {"flag1" => false, "flag2" => "no", "name1" => "t1", "name5" => "t5", "opt3" => "a"})) }
assert_false(b2b.flag1)
assert_false(b2b.flag2)
end
test 'overwrites values of defaults' do
b2 = ConfigurableSpec::Base2.new
b2.configure(config_element("", "", {"name1" => "t1", "name2" => "t2", "name3" => "t3", "name4" => "t4", "name5" => "t5", "opt1" => "foo", "opt3" => "b"}))
assert_equal("t1", b2.name1)
assert_equal("t2", b2.name2)
assert_equal("t3", b2.name3)
assert_equal("t4", b2.name4)
assert_equal("t5", b2.name5)
assert_equal("base2", b2.name6)
assert_equal(:foo, b2.opt1)
assert_equal(:b, b2.opt3)
assert_equal(["node", false, true, "t1", "t2", "t3", "t4", "t5", "base2"], b2.get_all)
end
test 'enum type rejects values which does not exist in list' do
default = config_element("", "", {"name1" => "t1", "name2" => "t2", "name3" => "t3", "name4" => "t4", "name5" => "t5", "opt1" => "foo", "opt3" => "b"})
b2 = ConfigurableSpec::Base2.new
assert_nothing_raised { b2.configure(default) }
assert_raise(Fluent::ConfigError) { b2.configure(default.merge({"opt1" => "bazz"})) }
assert_raise(Fluent::ConfigError) { b2.configure(default.merge({"opt2" => "fooooooo"})) }
assert_raise(Fluent::ConfigError) { b2.configure(default.merge({"opt3" => "c"})) }
end
sub_test_case 'default values should be duplicated before touched in plugin code' do
test 'default object should be dupped for cases configured twice' do
x6a = ConfigurableSpec::ExampleWithDefaultHashAndArray.new
assert_nothing_raised { x6a.configure(config_element("")) }
assert_equal({}, x6a.obj1)
assert_equal([], x6a.obj2)
x6b = ConfigurableSpec::ExampleWithDefaultHashAndArray.new
assert_nothing_raised { x6b.configure(config_element("")) }
assert_equal({}, x6b.obj1)
assert_equal([], x6b.obj2)
assert { x6a.obj1.object_id != x6b.obj1.object_id }
assert { x6a.obj2.object_id != x6b.obj2.object_id }
x6c = ConfigurableSpec::ExampleWithDefaultHashAndArray.new
assert_nothing_raised { x6c.configure(config_element("")) }
assert_equal({}, x6c.obj1)
assert_equal([], x6c.obj2)
x6c.obj1['k'] = 'v'
x6c.obj2 << 'v'
assert_equal({'k' => 'v'}, x6c.obj1)
assert_equal(['v'], x6c.obj2)
assert_equal({}, x6a.obj1)
assert_equal([], x6a.obj2)
end
end
test 'strict value type' do
default = config_element("", "", {"int1" => "1", "float1" => ""})
c = ConfigurableSpec::ExampleWithIntFloat.new
assert_nothing_raised { c.configure(default) }
assert_raise(Fluent::ConfigError) { c.configure(default, true) }
end
end
test 'set nil for a parameter which has no default value' do
obj = ConfigurableSpec::Base2.new
conf = config_element("", "", {"name1" => nil, "name5" => "t5", "opt3" => "a"})
assert_raise(Fluent::ConfigError.new("'name1' parameter is required but nil is specified")) do
obj.configure(conf)
end
end
test 'set nil for a parameter which has non-nil default value' do
obj = ConfigurableSpec::Base2.new
conf = config_element("", "", {"name1" => "t1", "name3" => nil, "name5" => "t5", "opt3" => "a"})
assert_raise(Fluent::ConfigError.new("'name3' parameter is required but nil is specified")) do
obj.configure(conf)
end
end
test 'set nil for a parameter whose default value is nil' do
obj = ConfigurableSpec::Base1Nil.new
conf = config_element("", "", {"name5" => nil})
obj.configure(conf)
assert_nil obj.name5
end
test 'set nil for parameters whose default values are overwritten by nil' do
obj = ConfigurableSpec::Base1Nil.new
conf = config_element("", "", {"name1" => nil, "name2" => nil, "opt1" => nil})
obj.configure(conf)
assert_nil obj.name1
assert_nil obj.name2
assert_nil obj.opt1
end
test 'set :default' do
obj = ConfigurableSpec::Base2.new
conf = config_element("", "", {"name1" => "t1", "name3" => :default, "name5" => "t5", "opt3" => "a"})
obj.configure(conf)
assert_equal "base1", obj.name3
end
test 'set :default for a parameter which has no default value' do
obj = ConfigurableSpec::Base2.new
conf = config_element("", "", {"name1" => :default, "name5" => "t5", "opt3" => "a"})
assert_raise(Fluent::ConfigError.new("'name1' doesn't have default value")) do
obj.configure(conf)
end
end
test 'set :default for a parameter which has an overwritten default value' do
obj = ConfigurableSpec::Base2.new
conf = config_element("", "", {"name1" => "t1", "name3" => "t3", "name4" => :default, "name5" => "t5", "opt3" => "a"})
obj.configure(conf)
assert_equal "base2", obj.name4
end
end
sub_test_case 'class defined with config_section' do
sub_test_case '#initialize' do
test 'create instance methods and default values as nil for params from config_section specified as non-multi' do
b4 = ConfigurableSpec::Base4.new
assert_nil(b4.description1)
assert_nil(b4.description2)
end
test 'create instance methods and default values as [] for params from config_section specified as multi' do
b4 = ConfigurableSpec::Base4.new
assert_equal([], b4.description3)
end
test 'overwrite base class definition by config_section of sub class definition' do
b3 = ConfigurableSpec::Base3.new
assert_equal([], b3.node)
end
test 'create instance methods and default values by param_name' do
b4 = ConfigurableSpec::Base4.new
assert_equal([], b4.nodes)
assert_equal("node", b4.node)
end
test 'create non-required and multi without any specifications' do
b3 = ConfigurableSpec::Base3.new
assert_false(b3.class.merged_configure_proxy.sections[:node].required?)
assert_true(b3.class.merged_configure_proxy.sections[:node].multi?)
end
end
sub_test_case '#configured_section_create' do
test 'raises configuration error if required param exists but no configuration element is specified' do
obj = ConfigurableSpec::Base4.new
assert_raise(Fluent::ConfigError.new("'<node ARG>' section requires argument")) do
obj.configured_section_create(:node)
end
assert_raise(Fluent::ConfigError.new("'text' parameter is required")) do
obj.configured_section_create(:description1)
end
end
test 'creates any defined section with default values if name is nil and config is not specified' do
obj = ConfigurableSpec::Base4Safe.new
node = obj.configured_section_create(:node)
assert_equal 0, node.num
assert_equal "node", node.name
assert_equal "b4", node.type
desc1 = obj.configured_section_create(:description1)
assert_equal "desc1", desc1.note
assert_equal "teeeext", desc1.text
end
test 'creates any defined section with default values if name is nil and config is empty element' do
obj = ConfigurableSpec::Base4Safe.new
node = obj.configured_section_create(:node, config_element())
assert_equal 0, node.num
assert_equal "node", node.name
assert_equal "b4", node.type
desc1 = obj.configured_section_create(:description1, config_element())
assert_equal "desc1", desc1.note
assert_equal "teeeext", desc1.text
end
test 'creates any defined section with specified value if name is nil and configuration element is specified' do
obj = ConfigurableSpec::Base4Safe.new
node = obj.configured_section_create(:node, config_element('node', '1', {'name' => 'node1', 'type' => 'b1'}))
assert_equal 1, node.num
assert_equal "node1", node.name
assert_equal "b1", node.type
desc1 = obj.configured_section_create(:description1, config_element('description1', 'desc one', {'text' => 't'}))
assert_equal "desc one", desc1.note
assert_equal "t", desc1.text
end
test 'creates a defined section instance even if it is defined as multi:true' do
obj = ConfigurableSpec::Base4Safe.new
desc3 = obj.configured_section_create(:description3)
assert_equal "desc3", desc3.note
assert_equal "yay", desc3.text
desc3 = obj.configured_section_create(:description3, config_element('description3', 'foo'))
assert_equal "desc3: foo", desc3.note
assert_equal "yay", desc3.text
end
end
sub_test_case '#configure' do
BASE_ATTRS = {
"name1" => "1", "name2" => "2", "name3" => "3",
"name4" => "4", "name5" => "5", "name6" => "6",
}
test 'checks required subsections' do
b3 = ConfigurableSpec::Base3.new
# branch sections required
assert_raise(Fluent::ConfigError) { b3.configure(config_element('ROOT', '', BASE_ATTRS, [])) }
# branch argument required
msg = "'<branch ARG>' section requires argument, in section branch"
#expect{ b3.configure(e('ROOT', '', BASE_ATTRS, [e('branch', '')])) }.to raise_error(Fluent::ConfigError, msg)
assert_raise(Fluent::ConfigError.new(msg)) { b3.configure(config_element('ROOT', '', BASE_ATTRS, [config_element('branch', '')])) }
# leaf is not required
assert_nothing_raised { b3.configure(config_element('ROOT', '', BASE_ATTRS, [config_element('branch', 'branch_name')])) }
# leaf weight required
msg = "'weight' parameter is required, in section branch > leaf"
branch1 = config_element('branch', 'branch_name', {size: 1}, [config_element('leaf', '10', {"weight" => 1})])
assert_nothing_raised { b3.configure(config_element('ROOT', '', BASE_ATTRS, [branch1])) }
branch2 = config_element('branch', 'branch_name', {size: 1}, [config_element('leaf', '20')])
assert_raise(Fluent::ConfigError.new(msg)) { b3.configure(config_element('ROOT', '', BASE_ATTRS, [branch1, branch2])) }
branch3 = config_element('branch', 'branch_name', {size: 1}, [config_element('leaf', '10', {"weight" => 3}), config_element('leaf', '20')])
assert_raise(Fluent::ConfigError.new(msg)) { b3.configure(config_element('ROOT', '', BASE_ATTRS, [branch3])) }
### worm not required
b4 = ConfigurableSpec::Base4.new
d1 = config_element('description1', '', {"text" => "d1"})
d2 = config_element('description2', '', {"text" => "d2"})
d3 = config_element('description3', '', {"text" => "d3"})
assert_nothing_raised { b4.configure(config_element('ROOT', '', BASE_ATTRS, [d1.dup, d2.dup, d3.dup])) }
# description1 cannot be specified 2 or more
msg = "'<description1>' section cannot be written twice or more"
assert_raise(Fluent::ConfigError.new(msg)) { b4.configure(config_element('ROOT', '', BASE_ATTRS, [d1.dup, d2.dup, d1.dup, d3.dup])) }
# description2 cannot be specified 2 or more
msg = "'<description2>' section cannot be written twice or more"
assert_raise(Fluent::ConfigError.new(msg)) { b4.configure(config_element('ROOT', '', BASE_ATTRS, [d1.dup, d2.dup, d3.dup, d2.dup])) }
# description3 can be specified 2 or more
assert_nothing_raised { b4.configure(config_element('ROOT', '', BASE_ATTRS, [d1.dup, d2.dup, d3.dup, d3.dup])) }
end
test 'constructs configuration object tree for Base3' do
conf = config_element(
'ROOT',
'',
BASE_ATTRS,
[
config_element('node', '', {"type" => "1"}), config_element('node', '', {"name" => "node2","type" => "2"}),
config_element('branch', 'b1.*', {}, []),
config_element('branch',
'b2.*',
{"size" => 5},
[
config_element('leaf', 'THIS IS IGNORED', {"weight" => 55}, []),
config_element('leaf', 'THIS IS IGNORED', {"weight" => 50}, [ config_element('worm', '', {}) ]),
config_element('leaf', 'THIS IS IGNORED', {"weight" => 50}, [ config_element('worm', '', {"type" => "w1"}), config_element('worm', '', {"type" => "w2"}) ]),
]
),
config_element('branch',
'b3.*',
{"size" => "503"},
[
config_element('leaf', 'THIS IS IGNORED', {"weight" => 55}, []),
]
)
],
)
b3 = ConfigurableSpec::Base3.new.configure(conf)
assert_not_equal("node", b3.node) # overwritten
assert_equal("1", b3.name1)
assert_equal("2", b3.name2)
assert_equal("3", b3.name3)
assert_equal("4", b3.name4)
assert_equal("5", b3.name5)
assert_equal("6", b3.name6)
assert_instance_of(Array, b3.node)
assert_equal(2, b3.node.size)
assert_equal("node", b3.node[0].name)
assert_equal("1", b3.node[0].type)
assert_equal(b3.node[0].type, b3.node[0][:type])
assert_equal("node2", b3.node[1].name)
assert_equal("2", b3.node[1].type)
assert_equal(b3.node[1].type, b3.node[1][:type])
assert_instance_of(Array, b3.branch)
assert_equal(3, b3.branch.size)
assert_equal('b1.*', b3.branch[0].name)
assert_equal(10, b3.branch[0].size)
assert_equal([], b3.branch[0].leaf)
assert_equal('b2.*', b3.branch[1].name)
assert_equal(5, b3.branch[1].size)
assert_equal(3, b3.branch[1].leaf.size)
assert_equal(b3.branch[1].leaf, b3.branch[1][:leaf])
assert_equal(55, b3.branch[1].leaf[0].weight)
assert_equal(0, b3.branch[1].leaf[0].worms.size)
assert_equal(50, b3.branch[1].leaf[1].weight)
assert_equal(1, b3.branch[1].leaf[1].worms.size)
assert_equal("ladybird", b3.branch[1].leaf[1].worms[0].type)
assert_equal(50, b3.branch[1].leaf[2].weight)
assert_equal(2, b3.branch[1].leaf[2].worms.size)
assert_equal("w1", b3.branch[1].leaf[2].worms[0].type)
assert_equal("w2", b3.branch[1].leaf[2].worms[1].type)
assert_equal('b3.*', b3.branch[2].name)
assert_equal(503, b3.branch[2].size)
assert_equal(1, b3.branch[2].leaf.size)
assert_equal(55, b3.branch[2].leaf[0].weight)
end
test 'constructs configuration object tree for Base4' do
conf = config_element(
'ROOT',
'',
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/config/test_literal_parser.rb | test/config/test_literal_parser.rb | require_relative "../helper"
require_relative 'assertions'
require "fluent/config/error"
require "fluent/config/literal_parser"
require "fluent/config/v1_parser"
require 'json'
module Fluent::Config
class TestLiteralParser < ::Test::Unit::TestCase
def parse_text(text)
basepath = File.expand_path(File.dirname(__FILE__)+'/../../')
ss = StringScanner.new(text)
parser = Fluent::Config::V1Parser.new(ss, basepath, "(test)", eval_context)
parser.parse_literal
end
TestLiteralParserContext = Struct.new(:v1, :v2, :v3)
def v1
:test
end
def v2
true
end
def v3
nil
end
def eval_context
@eval_context ||= TestLiteralParserContext.new(v1, v2, v3)
end
sub_test_case 'boolean parsing' do
def test_true
assert_text_parsed_as('true', "true")
end
def test_false
assert_text_parsed_as('false', "false")
end
def test_trueX
assert_text_parsed_as('trueX', "trueX")
end
def test_falseX
assert_text_parsed_as('falseX', "falseX")
end
end
sub_test_case 'integer parsing' do
test('0') { assert_text_parsed_as('0', "0") }
test('1') { assert_text_parsed_as('1', "1") }
test('10') { assert_text_parsed_as('10', "10") }
test('-1') { assert_text_parsed_as('-1', "-1") }
test('-10') { assert_text_parsed_as('-10', "-10") }
test('0 ') { assert_text_parsed_as('0', "0 ") }
test(' -1 ') { assert_text_parsed_as("-1", ' -1 ') }
# string
test('01') { assert_text_parsed_as('01', "01") }
test('00') { assert_text_parsed_as('00', "00") }
test('-01') { assert_text_parsed_as('-01', "-01") }
test('-00') { assert_text_parsed_as('-00', "-00") }
test('0x61') { assert_text_parsed_as('0x61', "0x61") }
test('0s') { assert_text_parsed_as('0s', "0s") }
end
sub_test_case 'float parsing' do
test('1.1') { assert_text_parsed_as('1.1', "1.1") }
test('0.1') { assert_text_parsed_as('0.1', "0.1") }
test('0.0') { assert_text_parsed_as('0.0', "0.0") }
test('-1.1') { assert_text_parsed_as('-1.1', "-1.1") }
test('-0.1') { assert_text_parsed_as('-0.1', "-0.1") }
test('1.10') { assert_text_parsed_as('1.10', "1.10") }
# string
test('12e8') { assert_text_parsed_as('12e8', "12e8") }
test('12.1e7') { assert_text_parsed_as('12.1e7', "12.1e7") }
test('-12e8') { assert_text_parsed_as('-12e8', "-12e8") }
test('-12.1e7') { assert_text_parsed_as('-12.1e7', "-12.1e7") }
test('.0') { assert_text_parsed_as('.0', ".0") }
test('.1') { assert_text_parsed_as('.1', ".1") }
test('0.') { assert_text_parsed_as('0.', "0.") }
test('1.') { assert_text_parsed_as('1.', "1.") }
test('.0a') { assert_text_parsed_as('.0a', ".0a") }
test('1.a') { assert_text_parsed_as('1.a', "1.a") }
test('0@') { assert_text_parsed_as('0@', "0@") }
end
sub_test_case 'float keywords parsing' do
test('NaN') { assert_text_parsed_as('NaN', "NaN") }
test('Infinity') { assert_text_parsed_as('Infinity', "Infinity") }
test('-Infinity') { assert_text_parsed_as('-Infinity', "-Infinity") }
test('NaNX') { assert_text_parsed_as('NaNX', "NaNX") }
test('InfinityX') { assert_text_parsed_as('InfinityX', "InfinityX") }
test('-InfinityX') { assert_text_parsed_as('-InfinityX', "-InfinityX") }
end
sub_test_case 'double quoted string' do
test('""') { assert_text_parsed_as("", '""') }
test('"text"') { assert_text_parsed_as("text", '"text"') }
test('"\\""') { assert_text_parsed_as("\"", '"\\""') }
test('"\\t"') { assert_text_parsed_as("\t", '"\\t"') }
test('"\\n"') { assert_text_parsed_as("\n", '"\\n"') }
test('"\\r\\n"') { assert_text_parsed_as("\r\n", '"\\r\\n"') }
test('"\\f\\b"') { assert_text_parsed_as("\f\b", '"\\f\\b"') }
test('"\\.t"') { assert_text_parsed_as(".t", '"\\.t"') }
test('"\\$t"') { assert_text_parsed_as("$t", '"\\$t"') }
test('"\\"') { assert_text_parsed_as("#t", '"\\#t"') }
test('"\\0"') { assert_text_parsed_as("\0", '"\\0"') }
test('"\\z"') { assert_parse_error('"\\z"') } # unknown escaped character
test('"\\1"') { assert_parse_error('"\\1"') } # unknown escaped character
test('"t') { assert_parse_error('"t') } # non-terminated quoted character
test("\"t\nt\"") { assert_text_parsed_as("t\nt", "\"t\nt\"" ) } # multiline string
test("\"t\\\nt\"") { assert_text_parsed_as("tt", "\"t\\\nt\"" ) } # multiline string
test('t"') { assert_text_parsed_as('t"', 't"') }
test('"."') { assert_text_parsed_as('.', '"."') }
test('"*"') { assert_text_parsed_as('*', '"*"') }
test('"@"') { assert_text_parsed_as('@', '"@"') }
test('"\\#{test}"') { assert_text_parsed_as("\#{test}", '"\\#{test}"') }
test('"$"') { assert_text_parsed_as('$', '"$"') }
test('"$t"') { assert_text_parsed_as('$t', '"$t"') }
test('"$}"') { assert_text_parsed_as('$}', '"$}"') }
test('"\\\\"') { assert_text_parsed_as("\\", '"\\\\"') }
test('"\\["') { assert_text_parsed_as("[", '"\\["') }
end
sub_test_case 'single quoted string' do
test("''") { assert_text_parsed_as("", "''") }
test("'text'") { assert_text_parsed_as("text", "'text'") }
test("'\\''") { assert_text_parsed_as('\'', "'\\''") }
test("'\\t'") { assert_text_parsed_as('\t', "'\\t'") }
test("'\\n'") { assert_text_parsed_as('\n', "'\\n'") }
test("'\\r\\n'") { assert_text_parsed_as('\r\n', "'\\r\\n'") }
test("'\\f\\b'") { assert_text_parsed_as('\f\b', "'\\f\\b'") }
test("'\\.t'") { assert_text_parsed_as('\.t', "'\\.t'") }
test("'\\$t'") { assert_text_parsed_as('\$t', "'\\$t'") }
test("'\\#t'") { assert_text_parsed_as('\#t', "'\\#t'") }
test("'\\z'") { assert_text_parsed_as('\z', "'\\z'") }
test("'\\0'") { assert_text_parsed_as('\0', "'\\0'") }
test("'\\1'") { assert_text_parsed_as('\1', "'\\1'") }
test("'t") { assert_parse_error("'t") } # non-terminated quoted character
test("t'") { assert_text_parsed_as("t'", "t'") }
test("'.'") { assert_text_parsed_as('.', "'.'") }
test("'*'") { assert_text_parsed_as('*', "'*'") }
test("'@'") { assert_text_parsed_as('@', "'@'") }
test(%q['#{test}']) { assert_text_parsed_as('#{test}', %q['#{test}']) }
test("'$'") { assert_text_parsed_as('$', "'$'") }
test("'$t'") { assert_text_parsed_as('$t', "'$t'") }
test("'$}'") { assert_text_parsed_as('$}', "'$}'") }
test("'\\\\'") { assert_text_parsed_as('\\', "'\\\\'") }
test("'\\['") { assert_text_parsed_as('\[', "'\\['") }
end
sub_test_case 'nonquoted string parsing' do
test("''") { assert_text_parsed_as(nil, '') }
test('text') { assert_text_parsed_as('text', 'text') }
test('\"') { assert_text_parsed_as('\"', '\"') }
test('\t') { assert_text_parsed_as('\t', '\t') }
test('\n') { assert_text_parsed_as('\n', '\n') }
test('\r\n') { assert_text_parsed_as('\r\n', '\r\n') }
test('\f\b') { assert_text_parsed_as('\f\b', '\f\b') }
test('\.t') { assert_text_parsed_as('\.t', '\.t') }
test('\$t') { assert_text_parsed_as('\$t', '\$t') }
test('\#t') { assert_text_parsed_as('\#t', '\#t') }
test('\z') { assert_text_parsed_as('\z', '\z') }
test('\0') { assert_text_parsed_as('\0', '\0') }
test('\1') { assert_text_parsed_as('\1', '\1') }
test('.') { assert_text_parsed_as('.', '.') }
test('*') { assert_text_parsed_as('*', '*') }
test('@') { assert_text_parsed_as('@', '@') }
test('#{test}') { assert_text_parsed_as('#{test}', '#{test}') }
test('$') { assert_text_parsed_as('$', '$') }
test('$t') { assert_text_parsed_as('$t', '$t') }
test('$}') { assert_text_parsed_as('$}', '$}') }
test('\\\\') { assert_text_parsed_as('\\\\', '\\\\') }
test('\[') { assert_text_parsed_as('\[', '\[') }
test('#foo') { assert_text_parsed_as('#foo', '#foo') } # not comment out
test('foo#bar') { assert_text_parsed_as('foo#bar', 'foo#bar') } # not comment out
test(' text') { assert_text_parsed_as('text', ' text') } # remove starting spaces
test(' #foo') { assert_text_parsed_as('#foo', ' #foo') } # remove starting spaces
test('foo #bar') { assert_text_parsed_as('foo', 'foo #bar') } # comment out
test('foo\t#bar') { assert_text_parsed_as('foo', "foo\t#bar") } # comment out
test('t') { assert_text_parsed_as('t', 't') }
test('T') { assert_text_parsed_as('T', 'T') }
test('_') { assert_text_parsed_as('_', '_') }
test('T1') { assert_text_parsed_as('T1', 'T1') }
test('_2') { assert_text_parsed_as('_2', '_2') }
test('t0') { assert_text_parsed_as('t0', 't0') }
test('t@') { assert_text_parsed_as('t@', 't@') }
test('t-') { assert_text_parsed_as('t-', 't-') }
test('t.') { assert_text_parsed_as('t.', 't.') }
test('t+') { assert_text_parsed_as('t+', 't+') }
test('t/') { assert_text_parsed_as('t/', 't/') }
test('t=') { assert_text_parsed_as('t=', 't=') }
test('t,') { assert_text_parsed_as('t,', 't,') }
test('0t') { assert_text_parsed_as('0t', "0t") }
test('@1t') { assert_text_parsed_as('@1t', '@1t') }
test('-1t') { assert_text_parsed_as('-1t', '-1t') }
test('.1t') { assert_text_parsed_as('.1t', '.1t') }
test(',1t') { assert_text_parsed_as(',1t', ',1t') }
test('.t') { assert_text_parsed_as('.t', '.t') }
test('*t') { assert_text_parsed_as('*t', '*t') }
test('@t') { assert_text_parsed_as('@t', '@t') }
test('{t') { assert_parse_error('{t') } # '{' begins map
test('t{') { assert_text_parsed_as('t{', 't{') }
test('}t') { assert_text_parsed_as('}t', '}t') }
test('[t') { assert_parse_error('[t') } # '[' begins array
test('t[') { assert_text_parsed_as('t[', 't[') }
test(']t') { assert_text_parsed_as(']t', ']t') }
test('t:') { assert_text_parsed_as('t:', 't:') }
test('t;') { assert_text_parsed_as('t;', 't;') }
test('t?') { assert_text_parsed_as('t?', 't?') }
test('t^') { assert_text_parsed_as('t^', 't^') }
test('t`') { assert_text_parsed_as('t`', 't`') }
test('t~') { assert_text_parsed_as('t~', 't~') }
test('t|') { assert_text_parsed_as('t|', 't|') }
test('t>') { assert_text_parsed_as('t>', 't>') }
test('t<') { assert_text_parsed_as('t<', 't<') }
test('t(') { assert_text_parsed_as('t(', 't(') }
end
sub_test_case 'embedded ruby code parsing' do
test('"#{v1}"') { assert_text_parsed_as("#{v1}", '"#{v1}"') }
test('"#{v2}"') { assert_text_parsed_as("#{v2}", '"#{v2}"') }
test('"#{v3}"') { assert_text_parsed_as("#{v3}", '"#{v3}"') }
test('"#{1+1}"') { assert_text_parsed_as("2", '"#{1+1}"') }
test('"#{}"') { assert_text_parsed_as("", '"#{}"') }
test('"t#{v1}"') { assert_text_parsed_as("t#{v1}", '"t#{v1}"') }
test('"t#{v1}t"') { assert_text_parsed_as("t#{v1}t", '"t#{v1}t"') }
test('"#{"}"}"') { assert_text_parsed_as("}", '"#{"}"}"') }
test('"#{#}"') { assert_parse_error('"#{#}"') } # error in embedded ruby code
test("\"\#{\n=begin\n}\"") { assert_parse_error("\"\#{\n=begin\n}\"") } # error in embedded ruby code
test('"#{v1}foo#{v2}"') { assert_text_parsed_as("#{v1}foo#{v2}", '"#{v1}foo#{v2}"') }
test('"#{1+1}foo#{2+2}bar"') { assert_text_parsed_as("#{1+1}foo#{2+2}bar", '"#{1+1}foo#{2+2}bar"') }
test('"foo#{hostname}"') { assert_text_parsed_as("foo#{Socket.gethostname}", '"foo#{hostname}"') }
test('"foo#{worker_id}"') {
ENV.delete('SERVERENGINE_WORKER_ID')
assert_text_parsed_as("foo", '"foo#{worker_id}"')
ENV['SERVERENGINE_WORKER_ID'] = '1'
assert_text_parsed_as("foo1", '"foo#{worker_id}"')
ENV.delete('SERVERENGINE_WORKER_ID')
}
test('nil') { assert_text_parsed_as(nil, '"#{raise SetNil}"') }
test('default') { assert_text_parsed_as(:default, '"#{raise SetDefault}"') }
test('nil helper') { assert_text_parsed_as(nil, '"#{use_nil}"') }
test('default helper') { assert_text_parsed_as(:default, '"#{use_default}"') }
end
sub_test_case 'array parsing' do
test('[]') { assert_text_parsed_as_json([], '[]') }
test('[1]') { assert_text_parsed_as_json([1], '[1]') }
test('[1,2]') { assert_text_parsed_as_json([1,2], '[1,2]') }
test('[1, 2]') { assert_text_parsed_as_json([1,2], '[1, 2]') }
test('[ 1 , 2 ]') { assert_text_parsed_as_json([1,2], '[ 1 , 2 ]') }
test('[1,2,]') { assert_parse_error('[1,2,]') } # TODO: Need trailing commas support?
test("[\n1\n,\n2\n]") { assert_text_parsed_as_json([1,2], "[\n1\n,\n2\n]") }
test('["a"]') { assert_text_parsed_as_json(["a"], '["a"]') }
test('["a","b"]') { assert_text_parsed_as_json(["a","b"], '["a","b"]') }
test('[ "a" , "b" ]') { assert_text_parsed_as_json(["a","b"], '[ "a" , "b" ]') }
test("[\n\"a\"\n,\n\"b\"\n]") { assert_text_parsed_as_json(["a","b"], "[\n\"a\"\n,\n\"b\"\n]") }
test('["ab","cd"]') { assert_text_parsed_as_json(["ab","cd"], '["ab","cd"]') }
test('["a","#{v1}"') { assert_text_parsed_as_json(["a","#{v1}"], '["a","#{v1}"]') }
test('["a","#{v1}","#{v2}"]') { assert_text_parsed_as_json(["a","#{v1}","#{v2}"], '["a","#{v1}","#{v2}"]') }
test('["a","#{v1} #{v2}"]') { assert_text_parsed_as_json(["a","#{v1} #{v2}"], '["a","#{v1} #{v2}"]') }
test('["a","#{hostname}"]') { assert_text_parsed_as_json(["a","#{Socket.gethostname}"], '["a","#{hostname}"]') }
test('["a","foo#{worker_id}"]') {
ENV.delete('SERVERENGINE_WORKER_ID')
assert_text_parsed_as('["a","foo"]', '["a","foo#{worker_id}"]')
ENV['SERVERENGINE_WORKER_ID'] = '1'
assert_text_parsed_as('["a","foo1"]', '["a","foo#{worker_id}"]')
ENV.delete('SERVERENGINE_WORKER_ID')
}
json_array_with_js_comment = <<EOA
[
"a", // this is a
"b", // this is b
"c" // this is c
]
EOA
test(json_array_with_js_comment) { assert_text_parsed_as_json(["a","b","c"], json_array_with_js_comment) }
json_array_with_comment = <<EOA
[
"a", # this is a
"b", # this is b
"c" # this is c
]
EOA
test(json_array_with_comment) { assert_text_parsed_as_json(["a","b","c"], json_array_with_comment) }
json_array_with_tailing_comma = <<EOA
[
"a", # this is a
"b", # this is b
"c", # this is c
]
EOA
test(json_array_with_tailing_comma) { assert_parse_error(json_array_with_tailing_comma) }
end
sub_test_case 'map parsing' do
test('{}') { assert_text_parsed_as_json({}, '{}') }
test('{"a":1}') { assert_text_parsed_as_json({"a"=>1}, '{"a":1}') }
test('{"a":1,"b":2}') { assert_text_parsed_as_json({"a"=>1,"b"=>2}, '{"a":1,"b":2}') }
test('{ "a" : 1 , "b" : 2 }') { assert_text_parsed_as_json({"a"=>1,"b"=>2}, '{ "a" : 1 , "b" : 2 }') }
test('{"a":1,"b":2,}') { assert_parse_error('{"a":1,"b":2,}') } # TODO: Need trailing commas support?
test('{\n\"a\"\n:\n1\n,\n\"b\"\n:\n2\n}') { assert_text_parsed_as_json({"a"=>1,"b"=>2}, "{\n\"a\"\n:\n1\n,\n\"b\"\n:\n2\n}") }
test('{"a":"b"}') { assert_text_parsed_as_json({"a"=>"b"}, '{"a":"b"}') }
test('{"a":"b","c":"d"}') { assert_text_parsed_as_json({"a"=>"b","c"=>"d"}, '{"a":"b","c":"d"}') }
test('{ "a" : "b" , "c" : "d" }') { assert_text_parsed_as_json({"a"=>"b","c"=>"d"}, '{ "a" : "b" , "c" : "d" }') }
test('{\n\"a\"\n:\n\"b\"\n,\n\"c\"\n:\n\"d\"\n}') { assert_text_parsed_as_json({"a"=>"b","c"=>"d"}, "{\n\"a\"\n:\n\"b\"\n,\n\"c\"\n:\n\"d\"\n}") }
test('{"a":"b","c":"#{v1}"}') { assert_text_parsed_as_json({"a"=>"b","c"=>"#{v1}"}, '{"a":"b","c":"#{v1}"}') }
test('{"a":"b","#{v1}":"d"}') { assert_text_parsed_as_json({"a"=>"b","#{v1}"=>"d"}, '{"a":"b","#{v1}":"d"}') }
test('{"a":"#{v1}","c":"#{v2}"}') { assert_text_parsed_as_json({"a"=>"#{v1}","c"=>"#{v2}"}, '{"a":"#{v1}","c":"#{v2}"}') }
test('{"a":"b","c":"d #{v1} #{v2}"}') { assert_text_parsed_as_json({"a"=>"b","c"=>"d #{v1} #{v2}"}, '{"a":"b","c":"d #{v1} #{v2}"}') }
test('{"a":"#{hostname}"}') { assert_text_parsed_as_json({"a"=>"#{Socket.gethostname}"}, '{"a":"#{hostname}"}') }
test('{"a":"foo#{worker_id}"}') {
ENV.delete('SERVERENGINE_WORKER_ID')
assert_text_parsed_as('{"a":"foo"}', '{"a":"foo#{worker_id}"}')
ENV['SERVERENGINE_WORKER_ID'] = '1'
assert_text_parsed_as('{"a":"foo1"}', '{"a":"foo#{worker_id}"}')
ENV.delete('SERVERENGINE_WORKER_ID')
}
test('no quote') { assert_text_parsed_as_json({'a'=>'b','c'=>'test'}, '{"a":"b","c":"#{v1}"}') }
test('single quote') { assert_text_parsed_as_json({'a'=>'b','c'=>'#{v1}'}, '\'{"a":"b","c":"#{v1}"}\'') }
test('double quote') { assert_text_parsed_as_json({'a'=>'b','c'=>'test'}, '"{\"a\":\"b\",\"c\":\"#{v1}\"}"') }
json_hash_with_comment = <<EOH
{
"a": 1, # this is a
"b": 2, # this is b
"c": 3 # this is c
}
EOH
test(json_hash_with_comment) { assert_text_parsed_as_json({"a"=>1,"b"=>2,"c"=>3}, json_hash_with_comment) }
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/config/test_system_config.rb | test/config/test_system_config.rb | require_relative '../helper'
require 'fluent/configurable'
require 'fluent/config/element'
require 'fluent/config/section'
require 'fluent/system_config'
module Fluent::Config
class FakeSupervisor
attr_writer :log_level
def initialize(**opt)
@system_config = nil
@cl_opt = {
workers: nil,
restart_worker_interval: nil,
root_dir: nil,
log_level: Fluent::Log::LEVEL_INFO,
suppress_interval: nil,
suppress_config_dump: nil,
suppress_repeated_stacktrace: nil,
log_event_label: nil,
log_event_verbose: nil,
without_source: nil,
with_source_only: nil,
enable_input_metrics: nil,
enable_size_metrics: nil,
emit_error_log_interval: nil,
file_permission: nil,
dir_permission: nil,
}.merge(opt)
end
def for_system_config
opt = {}
# this is copy from Supervisor#build_system_config
Fluent::SystemConfig::SYSTEM_CONFIG_PARAMETERS.each do |param|
if @cl_opt.key?(param) && !@cl_opt[param].nil?
if param == :log_level && @cl_opt[:log_level] == Fluent::Log::LEVEL_INFO
# info level can't be specified via command line option.
# log_level is info here, it is default value and <system>'s log_level should be applied if exists.
next
end
opt[param] = @cl_opt[param]
end
end
opt
end
end
class TestSystemConfig < ::Test::Unit::TestCase
TMP_DIR = File.expand_path(File.dirname(__FILE__) + "/tmp/system_config/#{ENV['TEST_ENV_NUMBER']}")
def parse_text(text)
basepath = File.expand_path(File.dirname(__FILE__) + '/../../')
Fluent::Config.parse(text, '(test)', basepath, true).elements.find { |e| e.name == 'system' }
end
test 'should not override default configurations when no parameters' do
conf = parse_text(<<-EOS)
<system>
</system>
EOS
s = FakeSupervisor.new
sc = Fluent::SystemConfig.new(conf)
sc.overwrite_variables(**s.for_system_config)
assert_equal(1, sc.workers)
assert_equal(0, sc.restart_worker_interval)
assert_nil(sc.root_dir)
assert_equal(Fluent::Log::LEVEL_INFO, sc.log_level)
assert_nil(sc.suppress_repeated_stacktrace)
assert_nil(sc.ignore_repeated_log_interval)
assert_nil(sc.emit_error_log_interval)
assert_nil(sc.suppress_config_dump)
assert_nil(sc.without_source)
assert_nil(sc.with_source_only)
assert_true(sc.enable_input_metrics)
assert_nil(sc.enable_size_metrics)
assert_nil(sc.enable_msgpack_time_support)
assert(!sc.enable_jit)
assert_nil(sc.log.path)
assert_equal(:text, sc.log.format)
assert_equal('%Y-%m-%d %H:%M:%S %z', sc.log.time_format)
assert_equal(:none, sc.log.forced_stacktrace_level)
end
data(
'workers' => ['workers', 3],
'restart_worker_interval' => ['restart_worker_interval', 60],
'root_dir' => ['root_dir', File.join(TMP_DIR, 'root')],
'log_level' => ['log_level', 'error'],
'suppress_repeated_stacktrace' => ['suppress_repeated_stacktrace', true],
'ignore_repeated_log_interval' => ['ignore_repeated_log_interval', 10],
'log_event_verbose' => ['log_event_verbose', true],
'suppress_config_dump' => ['suppress_config_dump', true],
'without_source' => ['without_source', true],
'with_source_only' => ['with_source_only', true],
'strict_config_value' => ['strict_config_value', true],
'enable_msgpack_time_support' => ['enable_msgpack_time_support', true],
'enable_input_metrics' => ['enable_input_metrics', false],
'enable_size_metrics' => ['enable_size_metrics', true],
'enable_jit' => ['enable_jit', true],
)
test "accepts parameters" do |(k, v)|
conf = parse_text(<<-EOS)
<system>
#{k} #{v}
</system>
EOS
s = FakeSupervisor.new
sc = Fluent::SystemConfig.new(conf)
sc.overwrite_variables(**s.for_system_config)
if k == 'log_level'
assert_equal(Fluent::Log::LEVEL_ERROR, sc.__send__(k))
else
assert_equal(v, sc.__send__(k))
end
end
test "log parameters" do
conf = parse_text(<<-EOS)
<system>
<log>
path /tmp/fluentd.log
format json
time_format %Y
forced_stacktrace_level info
</log>
</system>
EOS
s = FakeSupervisor.new
sc = Fluent::SystemConfig.new(conf)
sc.overwrite_variables(**s.for_system_config)
assert_equal('/tmp/fluentd.log', sc.log.path)
assert_equal(:json, sc.log.format)
assert_equal('%Y', sc.log.time_format)
assert_equal(Fluent::Log::LEVEL_INFO, sc.log.forced_stacktrace_level)
end
# info is removed because info level can't be specified via command line
data('trace' => Fluent::Log::LEVEL_TRACE,
'debug' => Fluent::Log::LEVEL_DEBUG,
'warn' => Fluent::Log::LEVEL_WARN,
'error' => Fluent::Log::LEVEL_ERROR,
'fatal' => Fluent::Log::LEVEL_FATAL)
test 'log_level is ignored when log_level related command line option is passed' do |level|
conf = parse_text(<<-EOS)
<system>
log_level info
</system>
EOS
s = FakeSupervisor.new(log_level: level)
sc = Fluent::SystemConfig.new(conf)
sc.overwrite_variables(**s.for_system_config)
assert_equal(level, sc.log_level)
end
sub_test_case "log rotation" do
data('daily' => "daily",
'weekly' => 'weekly',
'monthly' => 'monthly')
test "strings for rotate_age" do |age|
conf = parse_text(<<-EOS)
<system>
<log>
rotate_age #{age}
</log>
</system>
EOS
sc = Fluent::SystemConfig.new(conf)
assert_equal(age, sc.log.rotate_age)
end
test "numeric number for rotate age" do
conf = parse_text(<<-EOS)
<system>
<log>
rotate_age 3
</log>
</system>
EOS
sc = Fluent::SystemConfig.new(conf)
assert_equal(3, sc.log.rotate_age)
end
data(h: ['100', 100],
k: ['1k', 1024],
m: ['1m', 1024 * 1024],
g: ['1g', 1024 * 1024 * 1024])
test "numeric and SI prefix for rotate_size" do |(label, size)|
conf = parse_text(<<-EOS)
<system>
<log>
rotate_size #{label}
</log>
</system>
EOS
sc = Fluent::SystemConfig.new(conf)
assert_equal(size, sc.log.rotate_size)
end
end
test "source-only-buffer parameters" do
conf = parse_text(<<~EOS)
<system>
<source_only_buffer>
flush_thread_count 4
overflow_action throw_exception
path /tmp/source-only-buffer
flush_interval 1
chunk_limit_size 100
total_limit_size 1000
compress gzip
</source_only_buffer>
</system>
EOS
s = FakeSupervisor.new
sc = Fluent::SystemConfig.new(conf)
sc.overwrite_variables(**s.for_system_config)
assert_equal(
[
4,
:throw_exception,
"/tmp/source-only-buffer",
1,
100,
1000,
:gzip,
],
[
sc.source_only_buffer.flush_thread_count,
sc.source_only_buffer.overflow_action,
sc.source_only_buffer.path,
sc.source_only_buffer.flush_interval,
sc.source_only_buffer.chunk_limit_size,
sc.source_only_buffer.total_limit_size,
sc.source_only_buffer.compress,
]
)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/config/test_element.rb | test/config/test_element.rb | require_relative '../helper'
require 'fluent/config/element'
require 'fluent/config/configure_proxy'
require 'fluent/configurable'
require 'pp'
class TestConfigElement < ::Test::Unit::TestCase
def element(name = 'ROOT', arg = '', attrs = {}, elements = [], unused = nil)
Fluent::Config::Element.new(name, arg, attrs, elements, unused)
end
sub_test_case '#elements=' do
test 'elements can be set by others' do
e = element()
assert_equal [], e.elements
e1 = element('e1')
e2 = element('e2')
e.elements = [e1, e2]
assert_equal [e1, e2], e.elements
end
end
sub_test_case '#elements' do
setup do
@c1 = element('source')
@c2 = element('source', 'yay')
@c3 = element('match', 'test.**')
@c4 = element('label', '@mytest', {}, [ element('filter', '**'), element('match', '**') ])
children = [@c1, @c2, @c3, @c4]
@e = Fluent::Config::Element.new('ROOT', '', {}, children)
end
test 'returns all elements without arguments' do
assert_equal [@c1, @c2, @c3, @c4], @e.elements
end
test 'returns elements with specified names' do
assert_equal [@c1, @c2, @c3], @e.elements('source', 'match')
assert_equal [@c3, @c4], @e.elements('match', 'label')
end
test 'returns elements with specified name, and arg if specified' do
assert_equal [@c1, @c2], @e.elements(name: 'source')
assert_equal [@c2], @e.elements(name: 'source', arg: 'yay')
end
test 'keyword argument name/arg and names are exclusive' do
assert_raise ArgumentError do
@e.elements('source', name: 'match')
end
assert_raise ArgumentError do
@e.elements('source', 'match', name: 'label', arg: '@mytest')
end
end
test 'specifying only arg without name is invalid' do
assert_raise ArgumentError do
@e.elements(arg: '@mytest')
end
end
end
sub_test_case '#initialize' do
test 'creates object with blank attrs and elements' do
e = element('ROOT', '', {}, [])
assert_equal([], e.elements)
end
test 'creates object which contains attrs and elements' do
e = element('ROOT', '', {"k1" => "v1", "k2" => "v2"}, [
element('test', 'mydata', {'k3' => 'v3'}, [])
])
assert_equal('ROOT', e.name)
assert_equal('', e.arg)
assert_equal('v1', e["k1"])
assert_equal('v2', e["k2"])
assert_equal(1, e.elements.size)
e.each_element('test') do |el|
assert_equal('test', el.name)
assert_equal('mydata', el.arg)
assert_equal('v3', el["k3"])
end
end
test 'creates object which contains attrs, elements and unused' do
e = element('ROOT', '', {"k1" => "v1", "k2" => "v2", "k4" => "v4"}, [
element('test', 'mydata', {'k3' => 'v3'}, [])
], "k3")
assert_equal("k3", e.unused)
assert_equal('ROOT', e.name)
assert_equal('', e.arg)
assert_equal('v1', e["k1"])
assert_equal('v2', e["k2"])
assert_equal('v4', e["k4"])
assert_equal(1, e.elements.size)
e.each_element('test') do |el|
assert_equal('test', el.name)
assert_equal('mydata', el.arg)
assert_equal('v3', el["k3"])
end
assert_equal("k3", e.unused)
end
end
sub_test_case "@unused" do
sub_test_case '#[] has side effect for @unused' do
test 'without unused argument' do
e = element('ROOT', '', {"k1" => "v1", "k2" => "v2", "k4" => "v4"}, [
element('test', 'mydata', {'k3' => 'v3'}, [])
])
assert_equal(["k1", "k2", "k4"], e.unused)
assert_equal('v1', e["k1"])
assert_equal(["k2", "k4"], e.unused)
assert_equal('v2', e["k2"])
assert_equal(["k4"], e.unused)
assert_equal('v4', e["k4"])
assert_equal([], e.unused)
end
test 'with unused argument' do
e = element('ROOT', '', {"k1" => "v1", "k2" => "v2", "k4" => "v4"}, [
element('test', 'mydata', {'k3' => 'v3'}, [])
], ["k4"])
assert_equal(["k4"], e.unused)
assert_equal('v1', e["k1"])
assert_equal(["k4"], e.unused)
assert_equal('v2', e["k2"])
assert_equal(["k4"], e.unused)
assert_equal('v4', e["k4"])
# only consume for "k4"
assert_equal([], e.unused)
end
end
end
sub_test_case '#add_element' do
test 'elements can be set by #add_element' do
e = element()
assert_equal [], e.elements
e.add_element('e1', '')
e.add_element('e2', '')
assert_equal [element('e1', ''), element('e2', '')], e.elements
end
end
sub_test_case '#==' do
sub_test_case 'compare with two element objects' do
test 'equal' do
e1 = element('ROOT', '', {}, [])
e2 = element('ROOT', '', {}, [])
assert_true(e1 == e2)
end
data("differ args" => [Fluent::Config::Element.new('ROOT', '', {}, []),
Fluent::Config::Element.new('ROOT', 'mydata', {}, [])],
"differ keys" => [Fluent::Config::Element.new('ROOT', 'mydata', {}, []),
Fluent::Config::Element.new('ROOT', 'mydata', {"k1" => "v1"}, [])],
"differ elements" =>
[Fluent::Config::Element.new('ROOT', 'mydata', {"k1" => "v1"}, []),
Fluent::Config::Element.new('ROOT', 'mydata', {"k1" => "v1"}, [
Fluent::Config::Element.new('test', 'mydata', {'k3' => 'v3'}, [])
])])
test 'not equal' do |data|
e1, e2 = data
assert_false(e1 == e2)
end
end
end
sub_test_case '#+' do
test 'can merge 2 elements: object side is primary' do
e1 = element('ROOT', 'mydata', {"k1" => "v1"}, [])
e2 = element('ROOT', 'mydata2', {"k1" => "ignored", "k2" => "v2"}, [
element('test', 'ext', {'k3' => 'v3'}, [])
])
e = e1 + e2
assert_equal('ROOT', e.name)
assert_equal('mydata', e.arg)
assert_equal('v1', e['k1'])
assert_equal('v2', e['k2'])
assert_equal(1, e.elements.size)
e.each_element('test') do |el|
assert_equal('test', el.name)
assert_equal('ext', el.arg)
assert_equal('v3', el["k3"])
end
end
end
sub_test_case '#check_not_fetched' do
sub_test_case 'without unused' do
test 'can get attribute keys and original Config::Element' do
e = element('ROOT', 'mydata', {"k1" => "v1"}, [])
e.check_not_fetched { |key, elem|
assert_equal("k1", key)
assert_equal(e, elem)
}
end
end
sub_test_case 'with unused' do
test 'can get unused marked attribute keys and original Config::Element' do
e = element('ROOT', 'mydata', {"k1" => "v1", "k2" => "unused", "k3" => "k3"})
e.unused = "k2"
e.check_not_fetched { |key, elem|
assert_equal("k2", key)
assert_equal(e, elem)
}
end
end
end
sub_test_case '#has_key?' do
test 'can get boolean with key name' do
e = element('ROOT', 'mydata', {"k1" => "v1"}, [])
assert_true(e.has_key?("k1"))
assert_false(e.has_key?("noexistent"))
end
end
sub_test_case '#to_s' do
data("without v1_config" => [false, <<-CONF
<ROOT>
k1 v1
k2 "stringVal"
<test ext>
k2 v2
</test>
</ROOT>
CONF
],
"with v1_config" => [true, <<-CONF
<ROOT>
k1 v1
k2 "stringVal"
<test ext>
k2 v2
</test>
</ROOT>
CONF
],
)
test 'dump config element with #to_s' do |data|
v1_config, expected = data
e = element('ROOT', '', {'k1' => 'v1', "k2" =>"\"stringVal\""}, [
element('test', 'ext', {'k2' => 'v2'}, [])
])
e.v1_config = v1_config
dump = expected
assert_not_equal(e.inspect, e.to_s)
assert_equal(dump, e.to_s)
end
test 'dump nil and default for v1' do
expected = <<-CONF
<ROOT>
str1
str2 defstring
</ROOT>
CONF
e = element('ROOT', '', {'str1' => nil, "str2" => :default}, [])
type_lookup = ->(type){ Fluent::Configurable.lookup_type(type) }
p = Fluent::Config::ConfigureProxy.new("test", type_lookup: type_lookup)
p.config_param :str1, :string
p.config_param :str2, :string, default: "defstring"
e.corresponding_proxies << p
e.v1_config = true
assert_not_equal(e.inspect, e.to_s)
assert_equal(expected, e.to_s)
end
end
sub_test_case '#inspect' do
test 'dump config element with #inspect' do
e = element('ROOT', '', {'k1' => 'v1'}, [
element('test', 'ext', {'k2' => 'v2'}, [])
])
dump = <<-CONF
<name:ROOT, arg:, attrs:{\"k1\"=>\"v1\"}, elements:[<name:test, arg:ext, attrs:{\"k2\"=>\"v2\"}, elements:[]>]>
CONF
assert_not_equal(e.to_s, e.inspect.gsub(' => ', '=>'))
assert_equal(dump.chomp, e.inspect.gsub(' => ', '=>'))
end
end
sub_test_case 'for sections which has secret parameter' do
setup do
@type_lookup = ->(type){ Fluent::Configurable.lookup_type(type) }
p1 = Fluent::Config::ConfigureProxy.new(:match, type_lookup: @type_lookup)
p1.config_param :str1, :string
p1.config_param :str2, :string, secret: true
p1.config_param :enum1, :enum, list: [:a, :b, :c]
p1.config_param :enum2, :enum, list: [:a, :b, :c], secret: true
p1.config_param :bool1, :bool
p1.config_param :bool2, :bool, secret: true
p1.config_param :int1, :integer
p1.config_param :int2, :integer, secret: true
p1.config_param :float1, :float
p1.config_param :float2, :float, secret: true
p2 = Fluent::Config::ConfigureProxy.new(:match, type_lookup: @type_lookup)
p2.config_param :size1, :size
p2.config_param :size2, :size, secret: true
p2.config_param :time1, :time
p2.config_param :time2, :time, secret: true
p2.config_param :array1, :array
p2.config_param :array2, :array, secret: true
p2.config_param :hash1, :hash
p2.config_param :hash2, :hash, secret: true
p1.config_section :mysection do
config_param :str1, :string
config_param :str2, :string, secret: true
config_param :enum1, :enum, list: [:a, :b, :c]
config_param :enum2, :enum, list: [:a, :b, :c], secret: true
config_param :bool1, :bool
config_param :bool2, :bool, secret: true
config_param :int1, :integer
config_param :int2, :integer, secret: true
config_param :float1, :float
config_param :float2, :float, secret: true
config_param :size1, :size
config_param :size2, :size, secret: true
config_param :time1, :time
config_param :time2, :time, secret: true
config_param :array1, :array
config_param :array2, :array, secret: true
config_param :hash1, :hash
config_param :hash2, :hash, secret: true
end
params = {
'str1' => 'aaa', 'str2' => 'bbb', 'enum1' => 'a', 'enum2' => 'b', 'bool1' => 'true', 'bool2' => 'yes',
'int1' => '1', 'int2' => '2', 'float1' => '1.0', 'float2' => '0.5', 'size1' => '1k', 'size2' => '1m',
'time1' => '5m', 'time2' => '3h', 'array1' => 'a,b,c', 'array2' => 'd,e,f',
'hash1' => 'a:1,b:2', 'hash2' => 'a:2,b:4',
'unknown1' => 'yay', 'unknown2' => 'boo',
}
e2 = Fluent::Config::Element.new('mysection', '', params.dup, [])
e2.corresponding_proxies << p1.sections.values.first
@e = Fluent::Config::Element.new('match', '**', params, [e2])
@e.corresponding_proxies << p1
@e.corresponding_proxies << p2
end
sub_test_case '#to_masked_element' do
test 'returns a new element object which has masked values for secret parameters and elements' do
e = @e.to_masked_element
assert_equal 'aaa', e['str1']
assert_equal 'xxxxxx', e['str2']
assert_equal 'a', e['enum1']
assert_equal 'xxxxxx', e['enum2']
assert_equal 'true', e['bool1']
assert_equal 'xxxxxx', e['bool2']
assert_equal '1', e['int1']
assert_equal 'xxxxxx', e['int2']
assert_equal '1.0', e['float1']
assert_equal 'xxxxxx', e['float2']
assert_equal '1k', e['size1']
assert_equal 'xxxxxx', e['size2']
assert_equal '5m', e['time1']
assert_equal 'xxxxxx', e['time2']
assert_equal 'a,b,c', e['array1']
assert_equal 'xxxxxx', e['array2']
assert_equal 'a:1,b:2', e['hash1']
assert_equal 'xxxxxx', e['hash2']
assert_equal 'yay', e['unknown1']
assert_equal 'boo', e['unknown2']
e2 = e.elements.first
assert_equal 'aaa', e2['str1']
assert_equal 'xxxxxx', e2['str2']
assert_equal 'a', e2['enum1']
assert_equal 'xxxxxx', e2['enum2']
assert_equal 'true', e2['bool1']
assert_equal 'xxxxxx', e2['bool2']
assert_equal '1', e2['int1']
assert_equal 'xxxxxx', e2['int2']
assert_equal '1.0', e2['float1']
assert_equal 'xxxxxx', e2['float2']
assert_equal '1k', e2['size1']
assert_equal 'xxxxxx', e2['size2']
assert_equal '5m', e2['time1']
assert_equal 'xxxxxx', e2['time2']
assert_equal 'a,b,c', e2['array1']
assert_equal 'xxxxxx', e2['array2']
assert_equal 'a:1,b:2', e2['hash1']
assert_equal 'xxxxxx', e2['hash2']
assert_equal 'yay', e2['unknown1']
assert_equal 'boo', e2['unknown2']
end
end
sub_test_case '#secret_param?' do
test 'returns boolean which shows values of given key will be masked' do
assert !@e.secret_param?('str1')
assert @e.secret_param?('str2')
assert !@e.elements.first.secret_param?('str1')
assert @e.elements.first.secret_param?('str2')
end
end
sub_test_case '#param_type' do
test 'returns parameter type which are registered in corresponding proxy' do
assert_equal :string, @e.param_type('str1')
assert_equal :string, @e.param_type('str2')
assert_equal :enum, @e.param_type('enum1')
assert_equal :enum, @e.param_type('enum2')
assert_nil @e.param_type('unknown1')
assert_nil @e.param_type('unknown2')
end
end
# sub_test_case '#dump_value'
sub_test_case '#dump_value' do
test 'dumps parameter_name and values with leading indentation' do
assert_equal "str1 aaa\n", @e.dump_value("str1", @e["str1"], "")
assert_equal "str2 xxxxxx\n", @e.dump_value("str2", @e["str2"], "")
end
end
end
sub_test_case '#set_target_worker' do
test 'set target_worker_id recursively' do
e = element('label', '@mytest', {}, [ element('filter', '**'), element('match', '**', {}, [ element('store'), element('store') ]) ])
e.set_target_worker_id(1)
assert_equal [1], e.target_worker_ids
assert_equal [1], e.elements[0].target_worker_ids
assert_equal [1], e.elements[1].target_worker_ids
assert_equal [1], e.elements[1].elements[0].target_worker_ids
assert_equal [1], e.elements[1].elements[1].target_worker_ids
end
end
sub_test_case '#for_every_workers?' do
test 'has target_worker_id' do
e = element()
e.set_target_worker_id(1)
assert_false e.for_every_workers?
end
test "doesn't have target_worker_id" do
e = element()
assert e.for_every_workers?
end
end
sub_test_case '#for_this_workers?' do
test 'target_worker_id == current worker_id' do
e = element()
e.set_target_worker_id(0)
assert e.for_this_worker?
end
test 'target_worker_ids includes current worker_id' do
e = element()
e.set_target_worker_ids([0])
assert e.for_this_worker?
end
test 'target_worker_id != current worker_id' do
e = element()
e.set_target_worker_id(1)
assert_false e.for_this_worker?
end
test 'target_worker_ids does not includes current worker_id' do
e = element()
e.set_target_worker_ids([1, 2])
assert_false e.for_this_worker?
end
test "doesn't have target_worker_id" do
e = element()
assert_false e.for_this_worker?
end
end
sub_test_case '#for_another_worker?' do
test 'target_worker_id == current worker_id' do
e = element()
e.set_target_worker_id(0)
assert_false e.for_another_worker?
end
test 'target_worker_ids contains current worker_id' do
e = element()
e.set_target_worker_ids([0, 1])
assert_false e.for_another_worker?
end
test 'target_worker_id != current worker_id' do
e = element()
e.set_target_worker_id(1)
assert e.for_another_worker?
end
test 'target_worker_ids does not contains current worker_id' do
e = element()
e.set_target_worker_ids([1, 2])
assert e.for_another_worker?
end
test "doesn't have target_worker_id" do
e = element()
assert_false e.for_another_worker?
end
end
sub_test_case '#pretty_print' do
test 'prints inspect to pp object' do
q = PP.new
e = element()
e.pretty_print(q)
assert_equal e.inspect, q.output
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/config/test_configure_proxy.rb | test/config/test_configure_proxy.rb | require_relative '../helper'
require 'fluent/config/configure_proxy'
module Fluent::Config
class TestConfigureProxy < ::Test::Unit::TestCase
setup do
@type_lookup = ->(type) { Fluent::Configurable.lookup_type(type) }
end
sub_test_case 'to generate a instance' do
sub_test_case '#initialize' do
test 'has default values' do
proxy = Fluent::Config::ConfigureProxy.new('section', type_lookup: @type_lookup)
assert_equal(:section, proxy.name)
proxy = Fluent::Config::ConfigureProxy.new(:section, type_lookup: @type_lookup)
assert_equal(:section, proxy.name)
assert_nil(proxy.param_name)
assert_equal(:section, proxy.variable_name)
assert_false(proxy.root?)
assert_nil(proxy.init)
assert_nil(proxy.required)
assert_false(proxy.required?)
assert_nil(proxy.multi)
assert_true(proxy.multi?)
end
test 'can specify param_name/required/multi with optional arguments' do
proxy = Fluent::Config::ConfigureProxy.new(:section, param_name: 'sections', init: true, required: false, multi: true, type_lookup: @type_lookup)
assert_equal(:section, proxy.name)
assert_equal(:sections, proxy.param_name)
assert_equal(:sections, proxy.variable_name)
assert_false(proxy.required)
assert_false(proxy.required?)
assert_true(proxy.multi)
assert_true(proxy.multi?)
proxy = Fluent::Config::ConfigureProxy.new(:section, param_name: :sections, init: false, required: true, multi: false, type_lookup: @type_lookup)
assert_equal(:section, proxy.name)
assert_equal(:sections, proxy.param_name)
assert_equal(:sections, proxy.variable_name)
assert_true(proxy.required)
assert_true(proxy.required?)
assert_false(proxy.multi)
assert_false(proxy.multi?)
end
test 'raise error if both of init and required are true' do
assert_raise RuntimeError.new("init and required are exclusive") do
Fluent::Config::ConfigureProxy.new(:section, init: true, required: true, type_lookup: @type_lookup)
end
end
end
sub_test_case '#merge' do
test 'generate a new instance which values are overwritten by the argument object' do
proxy = p1 = Fluent::Config::ConfigureProxy.new(:section, type_lookup: @type_lookup)
assert_equal(:section, proxy.name)
assert_nil(proxy.param_name)
assert_equal(:section, proxy.variable_name)
assert_nil(proxy.init)
assert_nil(proxy.required)
assert_false(proxy.required?)
assert_nil(proxy.multi)
assert_true(proxy.multi?)
assert_nil(proxy.configured_in_section)
p2 = Fluent::Config::ConfigureProxy.new(:section, init: false, required: true, multi: false, type_lookup: @type_lookup)
proxy = p1.merge(p2)
assert_equal(:section, proxy.name)
assert_nil(proxy.param_name)
assert_equal(:section, proxy.variable_name)
assert_false(proxy.init)
assert_false(proxy.init?)
assert_true(proxy.required)
assert_true(proxy.required?)
assert_false(proxy.multi)
assert_false(proxy.multi?)
assert_nil(proxy.configured_in_section)
end
test 'does not overwrite with argument object without any specifications of required/multi' do
p1 = Fluent::Config::ConfigureProxy.new(:section1, param_name: :sections, type_lookup: @type_lookup)
p1.configured_in_section = :subsection
p2 = Fluent::Config::ConfigureProxy.new(:section2, init: false, required: true, multi: false, type_lookup: @type_lookup)
p3 = Fluent::Config::ConfigureProxy.new(:section3, type_lookup: @type_lookup)
proxy = p1.merge(p2).merge(p3)
assert_equal(:section1, proxy.name)
assert_equal(:sections, proxy.param_name)
assert_equal(:sections, proxy.variable_name)
assert_false(proxy.init)
assert_false(proxy.init?)
assert_true(proxy.required)
assert_true(proxy.required?)
assert_false(proxy.multi)
assert_false(proxy.multi?)
assert_equal :subsection, proxy.configured_in_section
end
test "does overwrite name of proxy for root sections which are used for plugins" do
# latest plugin class shows actual plugin implementation
p1 = Fluent::Config::ConfigureProxy.new('Fluent::Plugin::MyP1'.to_sym, root: true, required: true, multi: false, type_lookup: @type_lookup)
p1.config_param :key1, :integer
p2 = Fluent::Config::ConfigureProxy.new('Fluent::Plugin::MyP2'.to_sym, root: true, required: true, multi: false, type_lookup: @type_lookup)
p2.config_param :key2, :string, default: "value2"
merged = p1.merge(p2)
assert_equal 'Fluent::Plugin::MyP2'.to_sym, merged.name
assert_true merged.root?
end
end
sub_test_case '#overwrite_defaults' do
test 'overwrites only defaults with others defaults' do
type_lookup = ->(type) { Fluent::Configurable.lookup_type(type) }
p1 = Fluent::Config::ConfigureProxy.new(:mychild, type_lookup: type_lookup)
p1.configured_in_section = :child
p1.config_param(:k1a, :string)
p1.config_param(:k1b, :string)
p1.config_param(:k2a, :integer, default: 0)
p1.config_param(:k2b, :integer, default: 0)
p1.config_section(:sub1) do
config_param :k3, :time, default: 30
end
p0 = Fluent::Config::ConfigureProxy.new(:myparent, type_lookup: type_lookup)
p0.config_section(:child) do
config_set_default :k1a, "v1a"
config_param :k1b, :string, default: "v1b"
config_set_default :k2a, 21
config_param :k2b, :integer, default: 22
config_section :sub1 do
config_set_default :k3, 60
end
end
p1.overwrite_defaults(p0.sections[:child])
assert_equal "v1a", p1.defaults[:k1a]
assert_equal "v1b", p1.defaults[:k1b]
assert_equal 21, p1.defaults[:k2a]
assert_equal 22, p1.defaults[:k2b]
assert_equal 60, p1.sections[:sub1].defaults[:k3]
end
end
sub_test_case '#configured_in' do
test 'sets a section name which have configuration parameters of target plugin in owners configuration' do
proxy = Fluent::Config::ConfigureProxy.new(:section, type_lookup: @type_lookup)
proxy.configured_in(:mysection)
assert_equal :mysection, proxy.configured_in_section
end
test 'do not permit to be called twice' do
proxy = Fluent::Config::ConfigureProxy.new(:section, type_lookup: @type_lookup)
proxy.configured_in(:mysection)
assert_raise(ArgumentError) { proxy.configured_in(:myothersection) }
end
end
sub_test_case '#config_param / #config_set_default / #config_argument' do
setup do
@proxy = Fluent::Config::ConfigureProxy.new(:section, type_lookup: @type_lookup)
end
test 'handles configuration parameters without type as string' do
@proxy.config_argument(:label)
@proxy.config_param(:name)
assert_equal :label, @proxy.argument[0]
assert_equal :string, @proxy.argument[2][:type]
assert_equal :string, @proxy.params[:name][1][:type]
end
data(
default: [:default, nil],
alias: [:alias, :alias_name_in_config],
secret: [:secret, true],
skip_accessor: [:skip_accessor, true],
deprecated: [:deprecated, 'it is deprecated'],
obsoleted: [:obsoleted, 'it is obsoleted'],
desc: [:desc, "description"],
)
test 'always allow options for all types' do |(option, value)|
opt = {option => value}
assert_nothing_raised{ @proxy.config_argument(:param0, **opt) }
assert_nothing_raised{ @proxy.config_param(:p1, :string, **opt) }
assert_nothing_raised{ @proxy.config_param(:p2, :enum, list: [:a, :b, :c], **opt) }
assert_nothing_raised{ @proxy.config_param(:p3, :integer, **opt) }
assert_nothing_raised{ @proxy.config_param(:p4, :float, **opt) }
assert_nothing_raised{ @proxy.config_param(:p5, :size, **opt) }
assert_nothing_raised{ @proxy.config_param(:p6, :bool, **opt) }
assert_nothing_raised{ @proxy.config_param(:p7, :time, **opt) }
assert_nothing_raised{ @proxy.config_param(:p8, :hash, **opt) }
assert_nothing_raised{ @proxy.config_param(:p9, :array, **opt) }
assert_nothing_raised{ @proxy.config_param(:pa, :regexp, **opt) }
end
data(string: :string, integer: :integer, float: :float, size: :size, bool: :bool, time: :time, hash: :hash, array: :array, regexp: :regexp)
test 'deny list for non-enum types' do |type|
assert_raise ArgumentError.new(":list is valid only for :enum type, but #{type}: arg") do
@proxy.config_argument(:arg, type, list: [:a, :b])
end
assert_raise ArgumentError.new(":list is valid only for :enum type, but #{type}: p1") do
@proxy.config_param(:p1, type, list: [:a, :b])
end
end
data(string: :string, integer: :integer, float: :float, size: :size, bool: :bool, time: :time, regexp: :regexp)
test 'deny value_type for non-hash/array types' do |type|
assert_raise ArgumentError.new(":value_type is valid only for :hash and :array, but #{type}: arg") do
@proxy.config_argument(:arg, type, value_type: :string)
end
assert_raise ArgumentError.new(":value_type is valid only for :hash and :array, but #{type}: p1") do
@proxy.config_param(:p1, type, value_type: :integer)
end
end
data(string: :string, integer: :integer, float: :float, size: :size, bool: :bool, time: :time, array: :array, regexp: :regexp)
test 'deny symbolize_keys for non-hash types' do |type|
assert_raise ArgumentError.new(":symbolize_keys is valid only for :hash, but #{type}: arg") do
@proxy.config_argument(:arg, type, symbolize_keys: true)
end
assert_raise ArgumentError.new(":symbolize_keys is valid only for :hash, but #{type}: p1") do
@proxy.config_param(:p1, type, symbolize_keys: true)
end
end
data(string: :string, integer: :integer, float: :float, size: :size, bool: :bool, time: :time, hash: :hash, array: :array)
test 'deny unknown options' do |type|
assert_raise ArgumentError.new("unknown option 'required' for configuration parameter: arg") do
@proxy.config_argument(:arg, type, required: true)
end
assert_raise ArgumentError.new("unknown option 'param_name' for configuration parameter: p1") do
@proxy.config_argument(:p1, type, param_name: :yay)
end
end
test 'desc gets string' do
assert_nothing_raised do
@proxy.config_param(:name, :string, desc: "it is description")
end
assert_raise ArgumentError.new("name1: desc must be a String, but Symbol") do
@proxy.config_param(:name1, :string, desc: :yaaaaaaaay)
end
end
test 'alias gets symbol' do
assert_nothing_raised do
@proxy.config_param(:name, :string, alias: :label)
end
assert_raise ArgumentError.new("name1: alias must be a Symbol, but String") do
@proxy.config_param(:name1, :string, alias: 'label1')
end
end
test 'secret gets true/false' do
assert_nothing_raised do
@proxy.config_param(:name1, :string, secret: false)
end
assert_nothing_raised do
@proxy.config_param(:name2, :string, secret: true)
end
assert_raise ArgumentError.new("name3: secret must be true or false, but String") do
@proxy.config_param(:name3, :string, secret: 'yes')
end
assert_raise ArgumentError.new("name4: secret must be true or false, but NilClass") do
@proxy.config_param(:name4, :string, secret: nil)
end
end
test 'symbolize_keys gets true/false' do
assert_nothing_raised do
@proxy.config_param(:data1, :hash, symbolize_keys: false)
end
assert_nothing_raised do
@proxy.config_param(:data2, :hash, symbolize_keys: true)
end
assert_raise ArgumentError.new("data3: symbolize_keys must be true or false, but NilClass") do
@proxy.config_param(:data3, :hash, symbolize_keys: nil)
end
end
test 'value_type gets symbol' do
assert_nothing_raised do
@proxy.config_param(:data1, :array, value_type: :integer)
end
assert_raise ArgumentError.new("data2: value_type must be a Symbol, but Class") do
@proxy.config_param(:data2, :array, value_type: Integer)
end
end
test 'list gets an array of symbols' do
assert_nothing_raised do
@proxy.config_param(:proto1, :enum, list: [:a, :b])
end
assert_raise ArgumentError.new("proto2: enum parameter requires :list of Symbols") do
@proxy.config_param(:proto2, :enum, list: nil)
end
assert_raise ArgumentError.new("proto3: enum parameter requires :list of Symbols") do
@proxy.config_param(:proto3, :enum, list: ['a', 'b'])
end
assert_raise ArgumentError.new("proto4: enum parameter requires :list of Symbols") do
@proxy.config_param(:proto4, :enum, list: [])
end
end
test 'deprecated gets string' do
assert_nothing_raised do
@proxy.config_param(:name1, :string, deprecated: "use name2 instead")
end
assert_raise ArgumentError.new("name2: deprecated must be a String, but TrueClass") do
@proxy.config_param(:name2, :string, deprecated: true)
end
end
test 'obsoleted gets string' do
assert_nothing_raised do
@proxy.config_param(:name1, :string, obsoleted: "use name2 instead")
end
assert_raise ArgumentError.new("name2: obsoleted must be a String, but TrueClass") do
@proxy.config_param(:name2, :string, obsoleted: true)
end
end
test 'skip_accessor gets true/false' do
assert_nothing_raised do
@proxy.config_param(:format1, :string, skip_accessor: false)
end
assert_nothing_raised do
@proxy.config_param(:format2, :string, skip_accessor: true)
end
assert_raise ArgumentError.new("format2: skip_accessor must be true or false, but String") do
@proxy.config_param(:format2, :string, skip_accessor: 'yes')
end
end
test 'list is required for :enum' do
assert_nothing_raised do
@proxy.config_param(:proto1, :enum, list: [:a, :b])
end
assert_raise ArgumentError.new("proto1: enum parameter requires :list of Symbols") do
@proxy.config_param(:proto1, :enum, default: :a)
end
end
test 'does not permit config_set_default for param w/ :default option' do
@proxy.config_param(:name, :string, default: "name1")
assert_raise(ArgumentError) { @proxy.config_set_default(:name, "name2") }
end
test 'does not permit default value specification twice' do
@proxy.config_param(:name, :string)
@proxy.config_set_default(:name, "name1")
assert_raise(ArgumentError) { @proxy.config_set_default(:name, "name2") }
end
test 'does not permit default value specification twice, even on config_argument' do
@proxy.config_param(:name, :string)
@proxy.config_set_default(:name, "name1")
@proxy.config_argument(:name)
assert_raise(ArgumentError) { @proxy.config_argument(:name, default: "name2") }
end
end
sub_test_case '#config_set_desc' do
setup do
@proxy = Fluent::Config::ConfigureProxy.new(:section, type_lookup: @type_lookup)
end
test 'does not permit description specification twice w/ :desc option' do
@proxy.config_param(:name, :string, desc: "description")
assert_raise(ArgumentError) { @proxy.config_set_desc(:name, "description2") }
end
test 'does not permit description specification twice' do
@proxy.config_param(:name, :string)
@proxy.config_set_desc(:name, "description")
assert_raise(ArgumentError) { @proxy.config_set_desc(:name, "description2") }
end
end
sub_test_case '#desc' do
setup do
@proxy = Fluent::Config::ConfigureProxy.new(:section, type_lookup: @type_lookup)
end
test 'permit to specify description twice' do
@proxy.desc("description1")
@proxy.desc("description2")
@proxy.config_param(:name, :string)
assert_equal("description2", @proxy.descriptions[:name])
end
test 'does not permit description specification twice' do
@proxy.desc("description1")
assert_raise(ArgumentError) do
@proxy.config_param(:name, :string, desc: "description2")
end
end
end
sub_test_case '#dump_config_definition' do
setup do
@proxy = Fluent::Config::ConfigureProxy.new(:section, type_lookup: @type_lookup)
end
test 'empty proxy' do
assert_equal({}, @proxy.dump_config_definition)
end
test 'plain proxy w/o default value' do
@proxy.config_param(:name, :string)
expected = {
name: { type: :string, required: true }
}
assert_equal(expected, @proxy.dump_config_definition)
end
test 'plain proxy w/ default value' do
@proxy.config_param(:name, :string, default: "name1")
expected = {
name: { type: :string, default: "name1", required: false }
}
assert_equal(expected, @proxy.dump_config_definition)
end
test 'plain proxy w/ default value using config_set_default' do
@proxy.config_param(:name, :string)
@proxy.config_set_default(:name, "name1")
expected = {
name: { type: :string, default: "name1", required: false }
}
assert_equal(expected, @proxy.dump_config_definition)
end
test 'plain proxy w/ argument' do
@proxy.instance_eval do
config_argument(:argname, :string)
config_param(:name, :string, default: "name1")
end
expected = {
argname: { type: :string, required: true, argument: true },
name: { type: :string, default: "name1", required: false }
}
assert_equal(expected, @proxy.dump_config_definition)
end
test 'plain proxy w/ argument default value' do
@proxy.instance_eval do
config_argument(:argname, :string, default: "value")
config_param(:name, :string, default: "name1")
end
expected = {
argname: { type: :string, default: "value", required: false, argument: true },
name: { type: :string, default: "name1", required: false }
}
assert_equal(expected, @proxy.dump_config_definition)
end
test 'plain proxy w/ argument overwriting default value' do
@proxy.instance_eval do
config_argument(:argname, :string)
config_param(:name, :string, default: "name1")
config_set_default(:argname, "value1")
end
expected = {
argname: { type: :string, default: "value1", required: false, argument: true },
name: { type: :string, default: "name1", required: false }
}
assert_equal(expected, @proxy.dump_config_definition)
end
test 'single sub proxy' do
@proxy.config_section(:sub) do
config_param(:name, :string, default: "name1")
end
expected = {
sub: {
alias: nil,
multi: true,
required: false,
section: true,
name: { type: :string, default: "name1", required: false }
}
}
assert_equal(expected, @proxy.dump_config_definition)
end
test 'nested sub proxy' do
@proxy.config_section(:sub) do
config_param(:name1, :string, default: "name1")
config_param(:name2, :string, default: "name2")
config_section(:sub2) do
config_param(:name3, :string, default: "name3")
config_param(:name4, :string, default: "name4")
end
end
expected = {
sub: {
alias: nil,
multi: true,
required: false,
section: true,
name1: { type: :string, default: "name1", required: false },
name2: { type: :string, default: "name2", required: false },
sub2: {
alias: nil,
multi: true,
required: false,
section: true,
name3: { type: :string, default: "name3", required: false },
name4: { type: :string, default: "name4", required: false },
}
}
}
assert_equal(expected, @proxy.dump_config_definition)
end
sub_test_case 'w/ description' do
test 'single proxy' do
@proxy.config_param(:name, :string, desc: "description for name")
expected = {
name: { type: :string, desc: "description for name", required: true }
}
assert_equal(expected, @proxy.dump_config_definition)
end
test 'single proxy using config_set_desc' do
@proxy.config_param(:name, :string)
@proxy.config_set_desc(:name, "description for name")
expected = {
name: { type: :string, desc: "description for name", required: true }
}
assert_equal(expected, @proxy.dump_config_definition)
end
test 'sub proxy' do
@proxy.config_section(:sub) do
config_param(:name1, :string, default: "name1", desc: "desc1")
config_param(:name2, :string, default: "name2", desc: "desc2")
config_section(:sub2) do
config_param(:name3, :string, default: "name3")
config_param(:name4, :string, default: "name4", desc: "desc4")
end
end
expected = {
sub: {
alias: nil,
multi: true,
required: false,
section: true,
name1: { type: :string, default: "name1", desc: "desc1", required: false },
name2: { type: :string, default: "name2", desc: "desc2", required: false },
sub2: {
alias: nil,
multi: true,
required: false,
section: true,
name3: { type: :string, default: "name3", required: false },
name4: { type: :string, default: "name4", desc: "desc4", required: false },
}
}
}
assert_equal(expected, @proxy.dump_config_definition)
end
test 'sub proxy w/ desc method' do
@proxy.config_section(:sub) do
desc("desc1")
config_param(:name1, :string, default: "name1")
config_param(:name2, :string, default: "name2", desc: "desc2")
config_section(:sub2) do
config_param(:name3, :string, default: "name3")
desc("desc4")
config_param(:name4, :string, default: "name4")
end
end
expected = {
sub: {
alias: nil,
multi: true,
required: false,
section: true,
name1: { type: :string, default: "name1", desc: "desc1", required: false },
name2: { type: :string, default: "name2", desc: "desc2", required: false },
sub2: {
alias: nil,
multi: true,
required: false,
section: true,
name3: { type: :string, default: "name3", required: false },
name4: { type: :string, default: "name4", desc: "desc4", required: false },
}
}
}
assert_equal(expected, @proxy.dump_config_definition)
end
end
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/config/test_dsl.rb | test/config/test_dsl.rb | require_relative '../helper'
require 'fluent/config/element'
require "fluent/config/dsl"
require 'tempfile'
TMP_DIR = File.dirname(__FILE__) + "/tmp/config_dsl#{ENV['TEST_ENV_NUMBER']}"
def write_config(path, data)
FileUtils.mkdir_p(File.dirname(path))
File.open(path, "w") {|f| f.write data }
end
def prepare_config1
write_config "#{TMP_DIR}/config_test_1.conf", %[
k1 root_config
include dir/config_test_2.conf #
@include #{TMP_DIR}/config_test_4.conf
include file://#{TMP_DIR}/config_test_5.conf
@include config.d/*.conf
]
write_config "#{TMP_DIR}/dir/config_test_2.conf", %[
k2 relative_path_include
@include ../config_test_3.conf
]
write_config "#{TMP_DIR}/config_test_3.conf", %[
k3 relative_include_in_included_file
]
write_config "#{TMP_DIR}/config_test_4.conf", %[
k4 absolute_path_include
]
write_config "#{TMP_DIR}/config_test_5.conf", %[
k5 uri_include
]
write_config "#{TMP_DIR}/config.d/config_test_6.conf", %[
k6 wildcard_include_1
<elem1 name>
include normal_parameter
</elem1>
]
write_config "#{TMP_DIR}/config.d/config_test_7.conf", %[
k7 wildcard_include_2
]
write_config "#{TMP_DIR}/config.d/config_test_8.conf", %[
<elem2 name>
@include ../dir/config_test_9.conf
</elem2>
]
write_config "#{TMP_DIR}/dir/config_test_9.conf", %[
k9 embedded
<elem3 name>
nested nested_value
include hoge
</elem3>
]
write_config "#{TMP_DIR}/config.d/00_config_test_8.conf", %[
k8 wildcard_include_3
<elem4 name>
include normal_parameter
</elem4>
]
end
def prepare_config2
write_config "#{TMP_DIR}/config_test_1.rb", DSL_CONFIG_EXAMPLE
end
DSL_CONFIG_EXAMPLE = %q[
worker {
hostname = "myhostname"
(0..9).each { |i|
source {
type :tail
path "/var/log/httpd/access.part#{i}.log"
filter ('bar.**') {
type :hoge
val1 "moge"
val2 ["foo", "bar", "baz"]
val3 10
id :hoge
subsection {
foo "bar"
}
subsection {
foo "baz"
}
}
filter ('foo.**') {
type "pass"
}
match ('{foo,bar}.**') {
type "file"
path "/var/log/httpd/access.#{hostname}.#{i}.log"
}
}
}
}
]
DSL_CONFIG_EXAMPLE_WITHOUT_WORKER = %q[
hostname = "myhostname"
source {
type :tail
path "/var/log/httpd/access.part.log"
element {
name "foo"
}
match ('{foo,bar}.**') {
type "file"
path "/var/log/httpd/access.full.log"
}
}
]
DSL_CONFIG_EXAMPLE_FOR_INCLUDE_CONF = %q[
include "#{TMP_DIR}/config_test_1.conf"
]
DSL_CONFIG_EXAMPLE_FOR_INCLUDE_RB = %q[
include "#{TMP_DIR}/config_test_1.rb"
]
DSL_CONFIG_RETURNS_NON_ELEMENT = %q[
worker {
}
[]
]
DSL_CONFIG_WRONG_SYNTAX1 = %q[
match
]
DSL_CONFIG_WRONG_SYNTAX2 = %q[
match('aa','bb'){
type :null
}
]
DSL_CONFIG_WRONG_SYNTAX3 = %q[
match('aa','bb')
]
DSL_CONFIG_WRONG_SYNTAX4 = %q[
include
]
module Fluent::Config
class TestDSLParser < ::Test::Unit::TestCase
sub_test_case 'with worker tag on top level' do
def setup
@root = Fluent::Config::DSL::Parser.parse(DSL_CONFIG_EXAMPLE, 'dsl_config.rb')
end
sub_test_case '.parse' do
test 'makes root element' do
assert_equal('ROOT', @root.name)
assert_predicate(@root.arg, :empty?)
assert_equal(0, @root.keys.size)
end
test 'makes worker element for worker tag' do
assert_equal(1, @root.elements.size)
worker = @root.elements.first
assert_equal('worker', worker.name)
assert_predicate(worker.arg, :empty?)
assert_equal(0, worker.keys.size)
assert_equal(10, worker.elements.size)
end
test 'makes subsections for blocks, with variable substitution' do
ele4 = @root.elements.first.elements[4]
assert_equal('source', ele4.name)
assert_predicate(ele4.arg, :empty?)
assert_equal(2, ele4.keys.size)
assert_equal('tail', ele4['@type'])
assert_equal("/var/log/httpd/access.part4.log", ele4['path'])
end
test 'makes user-defined sections with blocks' do
filter0 = @root.elements.first.elements[4].elements.first
assert_equal('filter', filter0.name)
assert_equal('bar.**', filter0.arg)
assert_equal('hoge', filter0['@type'])
assert_equal('moge', filter0['val1'])
assert_equal(JSON.dump(['foo', 'bar', 'baz']), filter0['val2'])
assert_equal('10', filter0['val3'])
assert_equal('hoge', filter0['@id'])
assert_equal(2, filter0.elements.size)
assert_equal('subsection', filter0.elements[0].name)
assert_equal('bar', filter0.elements[0]['foo'])
assert_equal('subsection', filter0.elements[1].name)
assert_equal('baz', filter0.elements[1]['foo'])
end
test 'makes values with user-assigned variable substitutions' do
match0 = @root.elements.first.elements[4].elements.last
assert_equal('match', match0.name)
assert_equal('{foo,bar}.**', match0.arg)
assert_equal('file', match0['@type'])
assert_equal('/var/log/httpd/access.myhostname.4.log', match0['path'])
end
end
end
sub_test_case 'without worker tag on top level' do
def setup
@root = Fluent::Config::DSL::Parser.parse(DSL_CONFIG_EXAMPLE_WITHOUT_WORKER, 'dsl_config_without_worker.rb')
end
sub_test_case '.parse' do
test 'makes root element' do
assert_equal('ROOT', @root.name)
assert_predicate(@root.arg, :empty?)
assert_equal(0, @root.keys.size)
end
test 'does not make worker element implicitly because DSL configuration does not support v10 compat mode' do
assert_equal(1, @root.elements.size)
assert_equal('source', @root.elements.first.name)
refute(@root.elements.find { |e| e.name == 'worker' })
end
end
end
sub_test_case 'with include conf' do
def setup
prepare_config1
@root = Fluent::Config::DSL::Parser.parse(DSL_CONFIG_EXAMPLE_FOR_INCLUDE_CONF, 'dsl_config_for_include.conf')
end
test 'include config' do
assert_equal('root_config', @root['k1'])
assert_equal('relative_path_include', @root['k2'])
assert_equal('relative_include_in_included_file', @root['k3'])
assert_equal('absolute_path_include', @root['k4'])
assert_equal('uri_include', @root['k5'])
assert_equal('wildcard_include_1', @root['k6'])
assert_equal('wildcard_include_2', @root['k7'])
assert_equal('wildcard_include_3', @root['k8'])
assert_equal([
'k1',
'k2',
'k3',
'k4',
'k5',
'k8', # Because of the file name this comes first.
'k6',
'k7',
], @root.keys)
elem1 = @root.elements.find { |e| e.name == 'elem1' }
assert(elem1)
assert_equal('name', elem1.arg)
assert_equal('normal_parameter', elem1['include'])
elem2 = @root.elements.find { |e| e.name == 'elem2' }
assert(elem2)
assert_equal('name', elem2.arg)
assert_equal('embedded', elem2['k9'])
assert_not_include(elem2, 'include')
elem3 = elem2.elements.find { |e| e.name == 'elem3' }
assert(elem3)
assert_equal('nested_value', elem3['nested'])
assert_equal('hoge', elem3['include'])
end
# TODO: Add uri based include spec
end
sub_test_case 'with include rb' do
def setup
prepare_config2
@root = Fluent::Config::DSL::Parser.parse(DSL_CONFIG_EXAMPLE_FOR_INCLUDE_RB, 'dsl_config_for_include.rb')
end
sub_test_case '.parse' do
test 'makes root element' do
assert_equal('ROOT', @root.name)
assert_predicate(@root.arg, :empty?)
assert_equal(0, @root.keys.size)
end
test 'makes worker element for worker tag' do
assert_equal(1, @root.elements.size)
worker = @root.elements.first
assert_equal('worker', worker.name)
assert_predicate(worker.arg, :empty?)
assert_equal(0, worker.keys.size)
assert_equal(10, worker.elements.size)
end
test 'makes subsections for blocks, with variable substitution' do
ele4 = @root.elements.first.elements[4]
assert_equal('source', ele4.name)
assert_predicate(ele4.arg, :empty?)
assert_equal(2, ele4.keys.size)
assert_equal('tail', ele4['@type'])
assert_equal("/var/log/httpd/access.part4.log", ele4['path'])
end
test 'makes user-defined sections with blocks' do
filter0 = @root.elements.first.elements[4].elements.first
assert_equal('filter', filter0.name)
assert_equal('bar.**', filter0.arg)
assert_equal('hoge', filter0['@type'])
assert_equal('moge', filter0['val1'])
assert_equal(JSON.dump(['foo', 'bar', 'baz']), filter0['val2'])
assert_equal('10', filter0['val3'])
assert_equal('hoge', filter0['@id'])
assert_equal(2, filter0.elements.size)
assert_equal('subsection', filter0.elements[0].name)
assert_equal('bar', filter0.elements[0]['foo'])
assert_equal('subsection', filter0.elements[1].name)
assert_equal('baz', filter0.elements[1]['foo'])
end
test 'makes values with user-assigned variable substitutions' do
match0 = @root.elements.first.elements[4].elements.last
assert_equal('match', match0.name)
assert_equal('{foo,bar}.**', match0.arg)
assert_equal('file', match0['@type'])
assert_equal('/var/log/httpd/access.myhostname.4.log', match0['path'])
end
end
end
sub_test_case 'with configuration that returns non element on top' do
sub_test_case '.parse' do
test 'does not crash' do
Fluent::Config::DSL::Parser.parse(DSL_CONFIG_RETURNS_NON_ELEMENT, 'dsl_config_returns_non_element.rb')
end
end
end
sub_test_case 'with configuration with wrong arguments for specific elements' do
sub_test_case '.parse' do
test 'raises ArgumentError correctly' do
assert_raise(ArgumentError) { Fluent::Config::DSL::Parser.parse(DSL_CONFIG_WRONG_SYNTAX1, 'dsl_config_wrong_syntax1') }
assert_raise(ArgumentError) { Fluent::Config::DSL::Parser.parse(DSL_CONFIG_WRONG_SYNTAX2, 'dsl_config_wrong_syntax2') }
assert_raise(ArgumentError) { Fluent::Config::DSL::Parser.parse(DSL_CONFIG_WRONG_SYNTAX3, 'dsl_config_wrong_syntax3') }
assert_raise(ArgumentError) { Fluent::Config::DSL::Parser.parse(DSL_CONFIG_WRONG_SYNTAX4, 'dsl_config_wrong_syntax4') }
end
end
end
sub_test_case 'with ruby keyword, that provides ruby Kernel module features' do
sub_test_case '.parse' do
test 'can get result of Kernel.open() by ruby.open()' do
uname_string = `uname -a`
tmpfile = Tempfile.create('fluentd-test')
tmpfile.write(uname_string)
tmpfile.close
root = Fluent::Config::DSL::Parser.parse(<<DSL)
worker {
uname_str = ruby.open("#{tmpfile.path}"){|out| out.read}
source {
uname uname_str
}
}
DSL
worker = root.elements.first
assert_equal('worker', worker.name)
source = worker.elements.first
assert_equal('source', source.name)
assert_equal(1, source.keys.size)
assert_equal(uname_string, source['uname'])
ensure
File.delete(tmpfile.path)
end
test 'accepts ruby keyword with block, which allow to use methods included from ::Kernel' do
root = Fluent::Config::DSL::Parser.parse(<<DSL)
worker {
ruby_version = ruby {
require 'erb'
ERB.new('<%= RUBY_VERSION %> from erb').result
}
source {
version ruby_version
}
}
DSL
worker = root.elements.first
assert_equal('worker', worker.name)
source = worker.elements.first
assert_equal('source', source.name)
assert_equal(1, source.keys.size)
assert_equal("#{RUBY_VERSION} from erb", source['version'])
end
test 'raises NoMethodError when configuration DSL elements are written in ruby block' do
conf = <<DSL
worker {
ruby {
source {
type "tail"
}
}
source {
uname uname_str
}
}
DSL
assert_raise(NoMethodError) { Fluent::Config::DSL::Parser.parse(conf) }
end
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/config/test_types.rb | test/config/test_types.rb | require 'helper'
require 'fluent/config/types'
class TestConfigTypes < ::Test::Unit::TestCase
include Fluent
sub_test_case 'Config.size_value' do
data("2k" => [2048, "2k"],
"2K" => [2048, "2K"],
"3m" => [3145728, "3m"],
"3M" => [3145728, "3M"],
"4g" => [4294967296, "4g"],
"4G" => [4294967296, "4G"],
"5t" => [5497558138880, "5t"],
"5T" => [5497558138880, "5T"],
"6" => [6, "6"])
test 'normal case' do |(expected, val)|
assert_equal(expected, Config.size_value(val))
assert_equal(expected, Config.size_value(val, { strict: true }))
end
data("integer" => [6, 6],
"hoge" => [0, "hoge"],
"empty" => [0, ""])
test 'not assumed case' do |(expected, val)|
assert_equal(expected, Config.size_value(val))
end
test 'nil' do
assert_equal(nil, Config.size_value(nil))
end
data("integer" => [6, 6],
"hoge" => [Fluent::ConfigError.new('name1: invalid value for Integer(): "hoge"'), "hoge"],
"empty" => [Fluent::ConfigError.new('name1: invalid value for Integer(): ""'), ""])
test 'not assumed case with strict' do |(expected, val)|
if expected.kind_of? Exception
assert_raise(expected) do
Config.size_value(val, { strict: true }, "name1")
end
else
assert_equal(expected, Config.size_value(val, { strict: true }, "name1"))
end
end
test 'nil with strict' do
assert_equal(nil, Config.size_value(nil, { strict: true }))
end
end
sub_test_case 'Config.time_value' do
data("10s" => [10, "10s"],
"10sec" => [10, "10sec"],
"2m" => [120, "2m"],
"3h" => [10800, "3h"],
"4d" => [345600, "4d"])
test 'normal case' do |(expected, val)|
assert_equal(expected, Config.time_value(val))
assert_equal(expected, Config.time_value(val, { strict: true }))
end
data("integer" => [4.0, 4],
"float" => [0.4, 0.4],
"hoge" => [0.0, "hoge"],
"empty" => [0.0, ""])
test 'not assumed case' do |(expected, val)|
assert_equal(expected, Config.time_value(val))
end
test 'nil' do
assert_equal(nil, Config.time_value(nil))
end
data("integer" => [6, 6],
"hoge" => [Fluent::ConfigError.new('name1: invalid value for Float(): "hoge"'), "hoge"],
"empty" => [Fluent::ConfigError.new('name1: invalid value for Float(): ""'), ""])
test 'not assumed case with strict' do |(expected, val)|
if expected.kind_of? Exception
assert_raise(expected) do
Config.time_value(val, { strict: true }, "name1")
end
else
assert_equal(expected, Config.time_value(val, { strict: true }, "name1"))
end
end
test 'nil with strict' do
assert_equal(nil, Config.time_value(nil, { strict: true }))
end
end
sub_test_case 'Config.bool_value' do
data("true" => [true, "true"],
"yes" => [true, "yes"],
"empty" => [true, ""],
"false" => [false, "false"],
"no" => [false, "no"])
test 'normal case' do |(expected, val)|
assert_equal(expected, Config.bool_value(val))
end
data("true" => [true, true],
"false" => [false, false],
"hoge" => [nil, "hoge"],
"nil" => [nil, nil],
"integer" => [nil, 10])
test 'not assumed case' do |(expected, val)|
assert_equal(expected, Config.bool_value(val))
end
data("true" => [true, true],
"false" => [false, false],
"hoge" => [Fluent::ConfigError.new("name1: invalid bool value: hoge"), "hoge"],
"nil" => [nil, nil],
"integer" => [Fluent::ConfigError.new("name1: invalid bool value: 10"), 10])
test 'not assumed case with strict' do |(expected, val)|
if expected.kind_of? Exception
assert_raise(expected) do
Config.bool_value(val, { strict: true }, "name1")
end
else
assert_equal(expected, Config.bool_value(val, { strict: true }, "name1"))
end
end
end
sub_test_case 'Config.regexp_value' do
data("empty" => [//, "//"],
"plain" => [/regexp/, "/regexp/"],
"zero width" => [/^$/, "/^$/"],
"character classes" => [/[a-z]/, "/[a-z]/"],
"meta charactersx" => [/.+.*?\d\w\s\S/, '/.+.*?\d\w\s\S/'])
test 'normal case' do |(expected, str)|
assert_equal(expected, Config.regexp_value(str))
end
data("empty" => [//, ""],
"plain" => [/regexp/, "regexp"],
"zero width" => [/^$/, "^$"],
"character classes" => [/[a-z]/, "[a-z]"],
"meta charactersx" => [/.+.*?\d\w\s\S/, '.+.*?\d\w\s\S'])
test 'w/o slashes' do |(expected, str)|
assert_equal(expected, Config.regexp_value(str))
end
data("missing right slash" => "/regexp",
"too many options" => "/regexp/imx",)
test 'invalid regexp' do |(str)|
assert_raise(Fluent::ConfigError.new("invalid regexp: missing right slash: #{str}")) do
Config.regexp_value(str)
end
end
test 'nil' do
assert_equal nil, Config.regexp_value(nil)
end
end
sub_test_case 'type converters for config_param definitions' do
data("test" => ['test', 'test'],
"1" => ['1', '1'],
"spaces" => [' ', ' '])
test 'string' do |(expected, val)|
assert_equal expected, Config::STRING_TYPE.call(val, {})
assert_equal Encoding::UTF_8, Config::STRING_TYPE.call(val, {}).encoding
end
test 'string nil' do
assert_equal nil, Config::STRING_TYPE.call(nil, {})
end
data('latin' => 'Märch',
'ascii' => 'ascii',
'space' => ' ',
'number' => '1',
'Hiragana' => 'あいうえお')
test 'string w/ binary' do |str|
actual = Config::STRING_TYPE.call(str.b, {})
assert_equal str, actual
assert_equal Encoding::UTF_8, actual.encoding
end
data('starts_with_semicolon' => [:conor, ':conor'],
'simple_string' => [:conor, 'conor'],
'empty_string' => [nil, ''])
test 'symbol' do |(expected, val)|
assert_equal Config::SYMBOL_TYPE.call(val, {}), expected
end
data("val" => [:val, 'val'],
"v" => [:v, 'v'],
"value" => [:value, 'value'])
test 'enum' do |(expected, val)|
assert_equal expected, Config::ENUM_TYPE.call(val, {list: [:val, :value, :v]})
end
test 'enum: pick unknown choice' do
assert_raises(Fluent::ConfigError.new("valid options are val,value,v but got x")) do
Config::ENUM_TYPE.call('x', {list: [:val, :value, :v]})
end
end
data("empty list" => {},
"string list" => {list: ["val", "value", "v"]})
test 'enum: invalid choices' do | list |
assert_raises(RuntimeError.new("Plugin BUG: config type 'enum' requires :list of symbols")) do
Config::ENUM_TYPE.call('val', list)
end
end
test 'enum: nil' do
assert_equal nil, Config::ENUM_TYPE.call(nil)
end
data("1" => [1, '1'],
"1.0" => [1, '1.0'],
"1_000" => [1000, '1_000'],
"1x" => [1, '1x'])
test 'integer' do |(expected, val)|
assert_equal expected, Config::INTEGER_TYPE.call(val, {})
end
data("integer" => [6, 6],
"hoge" => [0, "hoge"],
"empty" => [0, ""])
test 'integer: not assumed case' do |(expected, val)|
assert_equal expected, Config::INTEGER_TYPE.call(val, {})
end
test 'integer: nil' do
assert_equal nil, Config::INTEGER_TYPE.call(nil, {})
end
data("integer" => [6, 6],
"hoge" => [Fluent::ConfigError.new('name1: invalid value for Integer(): "hoge"'), "hoge"],
"empty" => [Fluent::ConfigError.new('name1: invalid value for Integer(): ""'), ""])
test 'integer: not assumed case with strict' do |(expected, val)|
if expected.kind_of? Exception
assert_raise(expected) do
Config::INTEGER_TYPE.call(val, { strict: true }, "name1")
end
else
assert_equal expected, Config::INTEGER_TYPE.call(val, { strict: true }, "name1")
end
end
test 'integer: nil with strict' do
assert_equal nil, Config::INTEGER_TYPE.call(nil, { strict: true })
end
data("1" => [1.0, '1'],
"1.0" => [1.0, '1.0'],
"1.00" => [1.0, '1.00'],
"1e0" => [1.0, '1e0'])
test 'float' do |(expected, val)|
assert_equal expected, Config::FLOAT_TYPE.call(val, {})
end
data("integer" => [6, 6],
"hoge" => [0, "hoge"],
"empty" => [0, ""])
test 'float: not assumed case' do |(expected, val)|
assert_equal expected, Config::FLOAT_TYPE.call(val, {})
end
test 'float: nil' do
assert_equal nil, Config::FLOAT_TYPE.call(nil, {})
end
data("integer" => [6, 6],
"hoge" => [Fluent::ConfigError.new('name1: invalid value for Float(): "hoge"'), "hoge"],
"empty" => [Fluent::ConfigError.new('name1: invalid value for Float(): ""'), ""])
test 'float: not assumed case with strict' do |(expected, val)|
if expected.kind_of? Exception
assert_raise(expected) do
Config::FLOAT_TYPE.call(val, { strict: true }, "name1")
end
else
assert_equal expected, Config::FLOAT_TYPE.call(val, { strict: true }, "name1")
end
end
test 'float: nil with strict' do
assert_equal nil, Config::FLOAT_TYPE.call(nil, { strict: true })
end
data("1000" => [1000, '1000'],
"1k" => [1024, '1k'],
"1m" => [1024*1024, '1m'])
test 'size' do |(expected, val)|
assert_equal expected, Config::SIZE_TYPE.call(val, {})
end
data("true" => [true, 'true'],
"yes" => [true, 'yes'],
"no" => [false, 'no'],
"false" => [false, 'false'],
"TRUE" => [nil, 'TRUE'],
"True" => [nil, 'True'],
"Yes" => [nil, 'Yes'],
"No" => [nil, 'No'],
"empty" => [true, ''],
"unexpected_string" => [nil, 'unexpected_string'])
test 'bool' do |(expected, val)|
assert_equal expected, Config::BOOL_TYPE.call(val, {})
end
data("0" => [0, '0'],
"1" => [1.0, '1'],
"1.01" => [1.01, '1.01'],
"1s" => [1, '1s'],
"1," => [60, '1m'],
"1h" => [3600, '1h'],
"1d" => [86400, '1d'])
test 'time' do |(expected, val)|
assert_equal expected, Config::TIME_TYPE.call(val, {})
end
data("empty" => [//, "//"],
"plain" => [/regexp/, "/regexp/"],
"zero width" => [/^$/, "/^$/"],
"character classes" => [/[a-z]/, "/[a-z]/"],
"meta charactersx" => [/.+.*?\d\w\s\S/, '/.+.*?\d\w\s\S/'])
test 'regexp' do |(expected, str)|
assert_equal(expected, Config::REGEXP_TYPE.call(str, {}))
end
data("string and integer" => [{"x"=>"v","k"=>1}, '{"x":"v","k":1}', {}],
"strings" => [{"x"=>"v","k"=>"1"}, 'x:v,k:1', {}],
"w/ space" => [{"x"=>"v","k"=>"1"}, 'x:v, k:1', {}],
"heading space" => [{"x"=>"v","k"=>"1"}, ' x:v, k:1 ', {}],
"trailing space" => [{"x"=>"v","k"=>"1"}, 'x:v , k:1 ', {}],
"multiple colons" => [{"x"=>"v:v","k"=>"1"}, 'x:v:v, k:1', {}],
"symbolize keys" => [{x: "v", k: 1}, '{"x":"v","k":1}', {symbolize_keys: true}],
"value_type: :string" => [{x: "v", k: "1"}, 'x:v,k:1', {symbolize_keys: true, value_type: :string}],
"value_type: :string 2" => [{x: "v", k: "1"}, '{"x":"v","k":1}', {symbolize_keys: true, value_type: :string}],
"value_type: :integer" => [{x: 0, k: 1}, 'x:0,k:1', {symbolize_keys: true, value_type: :integer}],
"time 1" => [{"x"=>1,"y"=>60,"z"=>3600}, '{"x":"1s","y":"1m","z":"1h"}', {value_type: :time}],
"time 2" => [{"x"=>1,"y"=>60,"z"=>3600}, 'x:1s,y:1m,z:1h', {value_type: :time}])
test 'hash' do |(expected, val, opts)|
assert_equal(expected, Config::HASH_TYPE.call(val, opts))
end
test 'hash w/ unknown type' do
assert_raise(RuntimeError.new("unknown type in REFORMAT: foo")) do
Config::HASH_TYPE.call("x:1,y:2", {value_type: :foo})
end
end
test 'hash w/ strict option' do
assert_raise(Fluent::ConfigError.new('y: invalid value for Integer(): "hoge"')) do
Config::HASH_TYPE.call("x:1,y:hoge", {value_type: :integer, strict: true})
end
end
data('latin' => ['3:Märch', {"3"=>"Märch"}],
'ascii' => ['ascii:ascii', {"ascii"=>"ascii"}],
'number' => ['number:1', {"number"=>"1"}],
'Hiragana' => ['hiragana:あいうえお', {"hiragana"=>"あいうえお"}])
test 'hash w/ binary' do |(target, expected)|
assert_equal(expected, Config::HASH_TYPE.call(target.b, { value_type: :string }))
end
test 'hash w/ nil' do
assert_equal(nil, Config::HASH_TYPE.call(nil))
end
data("strings and integer" => [["1","2",1], '["1","2",1]', {}],
"number strings" => [["1","2","1"], '1,2,1', {}],
"alphabets" => [["a","b","c"], '["a","b","c"]', {}],
"alphabets w/o quote" => [["a","b","c"], 'a,b,c', {}],
"w/ spaces" => [["a","b","c"], 'a, b, c', {}],
"w/ space before comma" => [["a","b","c"], 'a , b , c', {}],
"comma or space w/ qupte" => [["a a","b,b"," c "], '["a a","b,b"," c "]', {}],
"space in a value w/o qupte" => [["a a","b","c"], 'a a,b,c', {}],
"integers" => [[1,2,1], '[1,2,1]', {}],
"value_type: :integer w/ quote" => [[1,2,1], '["1","2","1"]', {value_type: :integer}],
"value_type: :integer w/o quote" => [[1,2,1], '1,2,1', {value_type: :integer}])
test 'array' do |(expected, val, opts)|
assert_equal(expected, Config::ARRAY_TYPE.call(val, opts))
end
data('["1","2"]' => [["1","2"], '["1","2"]'],
'["3"]' => [["3"], '["3"]'])
test 'array w/ default values' do |(expected, val)|
array_options = {
default: [],
}
assert_equal(expected, Config::ARRAY_TYPE.call(val, array_options))
end
test 'array w/ unknown type' do
assert_raise(RuntimeError.new("unknown type in REFORMAT: foo")) do
Config::ARRAY_TYPE.call("1,2", {value_type: :foo})
end
end
test 'array w/ strict option' do
assert_raise(Fluent::ConfigError.new(': invalid value for Integer(): "hoge"')) do
Config::ARRAY_TYPE.call("1,hoge", {value_type: :integer, strict: true}, "name1")
end
end
test 'array w/ nil' do
assert_equal(nil, Config::ARRAY_TYPE.call(nil))
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/config/test_plugin_configuration.rb | test/config/test_plugin_configuration.rb | require_relative '../helper'
require 'fluent/plugin/input'
require 'fluent/test/driver/input'
module ConfigurationForPlugins
class AllBooleanParams < Fluent::Plugin::Input
config_param :flag1, :bool, default: true
config_param :flag2, :bool, default: true
config_param :flag3, :bool, default: false
config_param :flag4, :bool, default: false
config_section :child, param_name: :children, multi: true, required: true do
config_param :flag1, :bool, default: true
config_param :flag2, :bool, default: true
config_param :flag3, :bool, default: false
config_param :flag4, :bool, default: false
end
end
class BooleanParamsWithoutValue < ::Test::Unit::TestCase
CONFIG = <<CONFIG
flag1
flag2 # yaaaaaaaaaay
flag3
flag4 # yaaaaaaaaaay
<child>
flag1
flag2 # yaaaaaaaaaay
flag3
flag4 # yaaaaaaaaaay
</child>
<child>
flag1 # yaaaaaaaaaay
flag2
flag3 # yaaaaaaaaaay
flag4
</child>
# with following whitespace
<child>
flag1
flag2
flag3
flag4
</child>
CONFIG
test 'create plugin via driver' do
d = Fluent::Test::Driver::Input.new(AllBooleanParams)
d.configure(CONFIG)
assert_equal([true] * 4, [d.instance.flag1, d.instance.flag2, d.instance.flag3, d.instance.flag4])
num_of_sections = 3
assert_equal num_of_sections, d.instance.children.size
assert_equal([true] * (num_of_sections * 4), d.instance.children.map{|c| [c.flag1, c.flag2, c.flag3, c.flag4]}.flatten)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_storage.rb | test/plugin_helper/test_storage.rb | require_relative '../helper'
require 'fluent/plugin_helper/storage'
require 'fluent/plugin/base'
class ExampleStorage < Fluent::Plugin::Storage
Fluent::Plugin.register_storage('example', self)
attr_reader :data, :saved, :load_times, :save_times
def initialize
super
@data = {}
@saved = {}
@load_times = 0
@save_times = 0
end
def load
@data ||= {}
@load_times += 1
end
def save
@saved = @data.dup
@save_times += 1
end
def get(key)
@data[key]
end
def fetch(key, defval)
@data.fetch(key, defval)
end
def put(key, value)
@data[key] = value
end
def delete(key)
@data.delete(key)
end
def update(key, &block)
@data[key] = block.call(@data[key])
end
def close
@data = {}
super
end
def terminate
@saved = {}
@load_times = @save_times = 0
super
end
end
class Example2Storage < ExampleStorage
Fluent::Plugin.register_storage('ex2', self)
config_param :dummy_path, :string, default: 'dummy'
end
class Example3Storage < ExampleStorage
Fluent::Plugin.register_storage('ex3', self)
def synchronized?
true
end
end
class Example4Storage < ExampleStorage
Fluent::Plugin.register_storage('ex4', self)
def persistent_always?
true
end
def synchronized?
true
end
end
class StorageHelperTest < Test::Unit::TestCase
class Dummy < Fluent::Plugin::TestBase
helpers :storage
end
class Dummy2 < Fluent::Plugin::TestBase
helpers :storage
config_section :storage do
config_set_default :@type, 'ex2'
config_set_default :dummy_path, '/tmp/yay'
end
end
setup do
@d = nil
end
teardown do
if @d
@d.stop unless @d.stopped?
@d.shutdown unless @d.shutdown?
@d.close unless @d.closed?
@d.terminate unless @d.terminated?
end
end
test 'can be initialized without any storages at first' do
d = Dummy.new
assert_equal 0, d._storages.size
end
test 'can be configured with hash' do
d = Dummy.new
d.configure(config_element())
conf = { '@type' => 'example' }
assert_nothing_raised do
d.storage_create(conf: conf)
end
end
test 'can override default configuration parameters, but not overwrite whole definition' do
d = Dummy.new
d.configure(config_element())
assert_equal 1, d.storage_configs.size
assert_equal 'local', d.storage_configs.first[:@type]
d = Dummy2.new
d.configure(config_element('ROOT', '', {}, [config_element('storage', '', {}, [])]))
assert_raise NoMethodError do
d.storage
end
assert_equal 1, d.storage_configs.size
assert_equal 'ex2', d.storage_configs.first[:@type]
assert_equal '/tmp/yay', d.storage_configs.first.dummy_path
end
test 'creates instance of type specified by conf, or default_type if @type is missing in conf' do
d = Dummy2.new
d.configure(config_element())
i = d.storage_create(conf: config_element('format', '', {'@type' => 'example'}), default_type: 'ex2')
assert{ i.is_a?(Fluent::PluginHelper::Storage::SynchronizeWrapper) && i.instance_eval{ @storage }.is_a?(ExampleStorage) }
d = Dummy2.new
d.configure(config_element())
i = d.storage_create(conf: nil, default_type: 'ex2')
assert{ i.is_a?(Fluent::PluginHelper::Storage::SynchronizeWrapper) && i.instance_eval{ @storage }.is_a?(Example2Storage) }
end
test 'raises config error if config section is specified, but @type is not specified' do
d = Dummy2.new
d.configure(config_element())
assert_raise Fluent::ConfigError.new("@type is required in <storage>") do
d.storage_create(conf: config_element('storage', 'foo', {}), default_type: 'ex2')
end
end
test 'raises config error if config argument has invalid characters' do
d = Dummy.new
assert_raise Fluent::ConfigError.new("Argument in <storage ARG> uses invalid characters: 'yaa y'") do
d.configure(config_element('root', '', {}, [config_element('storage', 'yaa y', {'@type' => 'local'})]))
end
d.configure(config_element())
assert_raise Fluent::ConfigError.new("Argument in <storage ARG> uses invalid characters: 'a,b'") do
d.storage_create(usage: 'a,b', type: 'local')
end
end
test 'can be configured without storage sections' do
d = Dummy.new
assert_nothing_raised do
d.configure(config_element())
end
assert_equal 1, d._storages.size
end
test 'can be configured with a storage section' do
d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('storage', '', {'@type' => 'example'})
])
assert_nothing_raised do
d.configure(conf)
end
assert_equal 1, d._storages.size
assert{ d._storages.values.all?{ |s| !s.running } }
end
test 'can be configured with 2 or more storage sections with different usages with each other' do
d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('storage', 'default', {'@type' => 'example'}),
config_element('storage', 'extra', {'@type' => 'ex2', 'dummy_path' => 'v'}),
])
assert_nothing_raised do
d.configure(conf)
end
assert_equal 2, d._storages.size
assert{ d._storages.values.all?{ |s| !s.running } }
end
test 'cannot be configured with 2 storage sections with same usage' do
d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('storage', 'default', {'@type' => 'example'}),
config_element('storage', 'extra', {'@type' => 'ex2', 'dummy_path' => 'v'}),
config_element('storage', 'extra', {'@type' => 'ex2', 'dummy_path' => 'v2'}),
])
assert_raises Fluent::ConfigError do
d.configure(conf)
end
end
test 'creates a storage plugin instance which is already configured without usage' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('storage', '', {'@type' => 'example'})
])
d.configure(conf)
d.start
s = d.storage_create
assert{ s.implementation.is_a? ExampleStorage }
end
test 'creates a storage plugin instance which is already configured with usage' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('storage', 'mydata', {'@type' => 'example'})
])
d.configure(conf)
d.start
s = d.storage_create(usage: 'mydata')
assert{ s.implementation.is_a? ExampleStorage }
end
test 'creates a storage plugin without configurations' do
@d = d = Dummy.new
d.configure(config_element())
d.start
s = d.storage_create(usage: 'mydata', type: 'example', conf: config_element('storage', 'mydata'))
assert{ s.implementation.is_a? ExampleStorage }
end
test 'creates 2 or more storage plugin instances' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('storage', 'mydata', {'@type' => 'example'}),
config_element('storage', 'secret', {'@type' => 'ex2', 'dummy_path' => 'yay!'}),
])
d.configure(conf)
d.start
s1 = d.storage_create(usage: 'mydata')
s2 = d.storage_create(usage: 'secret')
assert{ s1.implementation.is_a? ExampleStorage }
assert{ s2.implementation.is_a? Example2Storage }
assert_equal 'yay!', s2.implementation.dummy_path
end
test 'creates wrapped instances for non-synchronized plugin in default' do # and check operations
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('storage', 'mydata', {'@type' => 'example'})
])
d.configure(conf)
d.start
s = d.storage_create(usage: 'mydata')
assert !s.implementation.synchronized?
assert{ s.is_a? Fluent::PluginHelper::Storage::SynchronizeWrapper }
assert s.synchronized?
assert s.autosave
assert s.save_at_shutdown
assert_nil s.get('key')
assert_equal 'value', s.put('key', 'value')
assert_equal 'value', s.fetch('key', 'v1')
assert_equal 'v2', s.update('key'){|v| v[0] + '2' }
assert_equal 'v2', s.get('key')
assert_equal 'v2', s.delete('key')
end
test 'creates wrapped instances for non-persistent plugins when configured as persistent' do # and check operations
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('storage', 'mydata', {'@type' => 'example', 'persistent' => 'true'})
])
d.configure(conf)
d.start
s = d.storage_create(usage: 'mydata')
assert !s.implementation.persistent_always?
assert{ s.is_a? Fluent::PluginHelper::Storage::PersistentWrapper }
assert s.persistent
assert s.persistent_always?
assert s.synchronized?
assert !s.autosave
assert s.save_at_shutdown
assert_nil s.get('key')
assert_equal 'value', s.put('key', 'value')
assert_equal 'value', s.fetch('key', 'v1')
assert_equal 'v2', s.update('key'){|v| v[0] + '2' }
assert_equal 'v2', s.get('key')
assert_equal 'v2', s.delete('key')
end
test 'creates bare instances for synchronized plugin in default' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('storage', 'mydata', {'@type' => 'ex3'})
])
d.configure(conf)
d.start
s = d.storage_create(usage: 'mydata')
assert s.implementation.synchronized?
assert{ s.is_a? Example3Storage }
assert s.synchronized?
end
test 'creates bare instances for persistent-always plugin when configured as persistent' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('storage', 'mydata', {'@type' => 'ex4', 'persistent' => 'true'})
])
d.configure(conf)
d.start
s = d.storage_create(usage: 'mydata')
assert s.implementation.persistent_always?
assert{ s.is_a? Example4Storage }
assert s.persistent
assert s.persistent_always?
assert s.synchronized?
end
test 'does not execute timer if autosave is not specified' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('storage', 'mydata', {'@type' => 'example', 'autosave' => 'false'})
])
d.configure(conf)
d.start
d.storage_create(usage: 'mydata')
assert_equal 0, d._timers.size
end
test 'executes timer if autosave is specified and plugin is not persistent' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('storage', 'mydata', {'@type' => 'example', 'autosave_interval' => '1s', 'persistent' => 'false'})
])
d.configure(conf)
d.start
d.storage_create(usage: 'mydata')
assert_equal 1, d._timers.size
end
test 'executes timer for autosave, which calls #save periodically' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('storage', 'mydata', {'@type' => 'example', 'autosave_interval' => '1s', 'persistent' => 'false'})
])
d.configure(conf)
d.start
s = d.storage_create(usage: 'mydata')
assert_equal 1, d._timers.size
timeout = Time.now + 3
while Time.now < timeout
s.put('k', 'v')
sleep 0.2
end
d.stop
assert{ s.implementation.save_times > 0 }
end
test 'saves data for each operations if plugin storage is configured as persistent, and wrapped' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('storage', 'mydata', {'@type' => 'example', 'persistent' => 'true'})
])
d.configure(conf)
d.start
s = d.storage_create(usage: 'mydata')
assert_equal 1, s.implementation.load_times
assert_equal 0, s.implementation.save_times
s.get('k1')
assert_equal 2, s.implementation.load_times
assert_equal 0, s.implementation.save_times
s.put('k1', 'v1')
assert_equal 3, s.implementation.load_times
assert_equal 1, s.implementation.save_times
s.fetch('k2', 'v2')
assert_equal 4, s.implementation.load_times
assert_equal 1, s.implementation.save_times
s.delete('k1')
assert_equal 5, s.implementation.load_times
assert_equal 2, s.implementation.save_times
end
test 'stops timer for autosave by #stop, calls #save by #shutdown if save_at_shutdown is specified' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('storage', 'mydata', {'@type' => 'example', 'save_at_shutdown' => 'true'})
])
d.configure(conf)
assert !d.timer_running?
d.start
assert d.timer_running?
s = d.storage_create(usage: 'mydata')
assert s.autosave
assert_equal 1, d._timers.size
d.stop
assert !d.timer_running?
assert_equal 1, s.implementation.load_times
assert_equal 0, s.implementation.save_times
d.before_shutdown
d.shutdown
assert_equal 1, s.implementation.load_times
assert_equal 1, s.implementation.save_times
end
test 'calls #close and #terminate for all plugin instances by #close/#shutdown' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('storage', 'mydata', {'@type' => 'example', 'autosave' => 'false', 'save_at_shutdown' => 'true'})
])
d.configure(conf)
d.start
s = d.storage_create(usage: 'mydata')
s.put('k1', 'v1')
s.put('k2', 2)
s.put('k3', true)
d.stop
assert_equal 3, s.implementation.data.size
assert_equal 0, s.implementation.saved.size
d.shutdown
assert_equal 3, s.implementation.data.size
assert_equal 3, s.implementation.saved.size
d.close
assert_equal 0, s.implementation.data.size
assert_equal 3, s.implementation.saved.size
d.shutdown
assert_equal 0, s.implementation.data.size
assert_equal 0, s.implementation.saved.size
end
test 'calls lifecycle methods for all plugin instances via owner plugin' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [ config_element('storage', '', {'@type' => 'example'}), config_element('storage', 'e2', {'@type' => 'example'}) ])
d.configure(conf)
d.start
i1 = d.storage_create(usage: '')
i2 = d.storage_create(usage: 'e2')
i3 = d.storage_create(usage: 'e3', type: 'ex2')
assert i1.started?
assert i2.started?
assert i3.started?
assert !i1.stopped?
assert !i2.stopped?
assert !i3.stopped?
d.stop
assert i1.stopped?
assert i2.stopped?
assert i3.stopped?
assert !i1.before_shutdown?
assert !i2.before_shutdown?
assert !i3.before_shutdown?
d.before_shutdown
assert i1.before_shutdown?
assert i2.before_shutdown?
assert i3.before_shutdown?
assert !i1.shutdown?
assert !i2.shutdown?
assert !i3.shutdown?
d.shutdown
assert i1.shutdown?
assert i2.shutdown?
assert i3.shutdown?
assert !i1.after_shutdown?
assert !i2.after_shutdown?
assert !i3.after_shutdown?
d.after_shutdown
assert i1.after_shutdown?
assert i2.after_shutdown?
assert i3.after_shutdown?
assert !i1.closed?
assert !i2.closed?
assert !i3.closed?
d.close
assert i1.closed?
assert i2.closed?
assert i3.closed?
assert !i1.terminated?
assert !i2.terminated?
assert !i3.terminated?
d.terminate
assert i1.terminated?
assert i2.terminated?
assert i3.terminated?
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_formatter.rb | test/plugin_helper/test_formatter.rb | require_relative '../helper'
require 'fluent/plugin_helper/formatter'
require 'fluent/plugin/base'
class FormatterHelperTest < Test::Unit::TestCase
class ExampleFormatter < Fluent::Plugin::Formatter
Fluent::Plugin.register_formatter('example', self)
def format(tag, time, record)
"#{tag},#{time.to_i},#{record.keys.sort.join(',')}" # hey, you miss values! :P
end
end
class Example2Formatter < Fluent::Plugin::Formatter
Fluent::Plugin.register_formatter('example2', self)
def format(tag, time, record)
"#{tag},#{time.to_i},#{record.values.sort.join(',')}" # key...
end
end
class Dummy < Fluent::Plugin::TestBase
helpers :formatter
config_section :format do
config_set_default :@type, 'example'
end
end
class Dummy2 < Fluent::Plugin::TestBase
helpers :formatter
config_section :format do
config_set_default :@type, 'example2'
end
end
setup do
@d = nil
end
teardown do
if @d
@d.stop unless @d.stopped?
@d.shutdown unless @d.shutdown?
@d.close unless @d.closed?
@d.terminate unless @d.terminated?
end
end
test 'can be initialized without any formatters at first' do
d = Dummy.new
assert_equal 0, d._formatters.size
end
test 'can override default configuration parameters, but not overwrite whole definition' do
d = Dummy.new
assert_equal [], d.formatter_configs
d = Dummy2.new
d.configure(config_element('ROOT', '', {}, [config_element('format', '', {}, [])]))
assert_raise NoMethodError do
d.format
end
assert_equal 1, d.formatter_configs.size
assert_equal 'example2', d.formatter_configs.first[:@type]
end
test 'creates instance of type specified by conf, or default_type if @type is missing in conf' do
d = Dummy2.new
d.configure(config_element())
i = d.formatter_create(conf: config_element('format', '', {'@type' => 'example'}), default_type: 'example2')
assert{ i.is_a?(ExampleFormatter) }
d = Dummy2.new
d.configure(config_element())
i = d.formatter_create(conf: nil, default_type: 'example2')
assert{ i.is_a?(Example2Formatter) }
end
test 'raises config error if config section is specified, but @type is not specified' do
d = Dummy2.new
d.configure(config_element())
assert_raise Fluent::ConfigError.new("@type is required in <format>") do
d.formatter_create(conf: config_element('format', '', {}), default_type: 'example2')
end
end
test 'can be configured with default type without format sections' do
d = Dummy.new
assert_nothing_raised do
d.configure(config_element())
end
assert_equal 1, d._formatters.size
end
test 'can be configured with a format section' do
d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('format', '', {'@type' => 'example'})
])
assert_nothing_raised do
d.configure(conf)
end
assert_equal 1, d._formatters.size
assert{ d._formatters.values.all?{ |formatter| !formatter.started? } }
end
test 'can be configured with 2 or more format sections with different usages with each other' do
d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('format', 'default', {'@type' => 'example'}),
config_element('format', 'extra', {'@type' => 'example2'}),
])
assert_nothing_raised do
d.configure(conf)
end
assert_equal 2, d._formatters.size
assert{ d._formatters.values.all?{ |formatter| !formatter.started? } }
end
test 'cannot be configured with 2 format sections with same usage' do
d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('format', 'default', {'@type' => 'example'}),
config_element('format', 'extra', {'@type' => 'example2'}),
config_element('format', 'extra', {'@type' => 'example2'}),
])
assert_raises Fluent::ConfigError do
d.configure(conf)
end
end
test 'creates a format plugin instance which is already configured without usage' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('format', '', {'@type' => 'example'})
])
d.configure(conf)
d.start
formatter = d.formatter_create
assert{ formatter.is_a? ExampleFormatter }
assert formatter.started?
end
test 'creates a formatter plugin instance which is already configured with usage' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('format', 'mydata', {'@type' => 'example'})
])
d.configure(conf)
d.start
formatter = d.formatter_create(usage: 'mydata')
assert{ formatter.is_a? ExampleFormatter }
assert formatter.started?
end
test 'creates a formatter plugin without configurations' do
@d = d = Dummy.new
d.configure(config_element())
d.start
formatter = d.formatter_create(usage: 'mydata', type: 'example', conf: config_element('format', 'mydata'))
assert{ formatter.is_a? ExampleFormatter }
assert formatter.started?
end
test 'creates 2 or more formatter plugin instances' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('format', 'mydata', {'@type' => 'example'}),
config_element('format', 'secret', {'@type' => 'example2'})
])
d.configure(conf)
d.start
p1 = d.formatter_create(usage: 'mydata')
p2 = d.formatter_create(usage: 'secret')
assert{ p1.is_a? ExampleFormatter }
assert p1.started?
assert{ p2.is_a? Example2Formatter }
assert p2.started?
end
test 'calls lifecycle methods for all plugin instances via owner plugin' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [ config_element('format', '', {'@type' => 'example'}), config_element('format', 'e2', {'@type' => 'example'}) ])
d.configure(conf)
d.start
i1 = d.formatter_create(usage: '')
i2 = d.formatter_create(usage: 'e2')
i3 = d.formatter_create(usage: 'e3', type: 'example2')
assert i1.started?
assert i2.started?
assert i3.started?
assert !i1.stopped?
assert !i2.stopped?
assert !i3.stopped?
d.stop
assert i1.stopped?
assert i2.stopped?
assert i3.stopped?
assert !i1.before_shutdown?
assert !i2.before_shutdown?
assert !i3.before_shutdown?
d.before_shutdown
assert i1.before_shutdown?
assert i2.before_shutdown?
assert i3.before_shutdown?
assert !i1.shutdown?
assert !i2.shutdown?
assert !i3.shutdown?
d.shutdown
assert i1.shutdown?
assert i2.shutdown?
assert i3.shutdown?
assert !i1.after_shutdown?
assert !i2.after_shutdown?
assert !i3.after_shutdown?
d.after_shutdown
assert i1.after_shutdown?
assert i2.after_shutdown?
assert i3.after_shutdown?
assert !i1.closed?
assert !i2.closed?
assert !i3.closed?
d.close
assert i1.closed?
assert i2.closed?
assert i3.closed?
assert !i1.terminated?
assert !i2.terminated?
assert !i3.terminated?
d.terminate
assert i1.terminated?
assert i2.terminated?
assert i3.terminated?
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_event_loop.rb | test/plugin_helper/test_event_loop.rb | require_relative '../helper'
require 'fluent/plugin_helper/event_loop'
require 'fluent/plugin/base'
class EventLoopTest < Test::Unit::TestCase
class Dummy < Fluent::Plugin::TestBase
helpers :event_loop
def configure(conf)
super
@_event_loop_run_timeout = 0.1
end
end
test 'can be instantiated to be able to create event loop' do
d1 = Dummy.new
assert d1.respond_to?(:event_loop_attach)
assert d1.respond_to?(:event_loop_running?)
assert d1.respond_to?(:_event_loop)
assert d1._event_loop
assert !d1.event_loop_running?
end
test 'can be configured' do
d1 = Dummy.new
assert_nothing_raised do
d1.configure(config_element())
end
assert d1.plugin_id
assert d1.log
end
test 'can run event loop by start, stop by shutdown/close and clear by terminate' do
d1 = Dummy.new
d1.configure(config_element())
assert !d1.event_loop_running?
d1.start
d1.event_loop_wait_until_start
assert d1.event_loop_running?
assert_equal 1, d1._event_loop.watchers.size
d1.shutdown
d1.close
assert !d1.event_loop_running?
d1.terminate
assert_nil d1._event_loop
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_record_accessor.rb | test/plugin_helper/test_record_accessor.rb | require_relative '../helper'
require 'fluent/plugin_helper/record_accessor'
require 'fluent/plugin/base'
require 'time'
class RecordAccessorHelperTest < Test::Unit::TestCase
class Dummy < Fluent::Plugin::TestBase
helpers :record_accessor
end
sub_test_case 'parse nested key expression' do
data('normal' => 'key1',
'space' => 'ke y2',
'dot key' => 'this.is.key3')
test 'parse single key' do |param|
result = Fluent::PluginHelper::RecordAccessor::Accessor.parse_parameter(param)
assert_equal param, result
end
test "nested bracket keys with dot" do
result = Fluent::PluginHelper::RecordAccessor::Accessor.parse_parameter("$['key1']['this.is.key3']")
assert_equal ['key1', 'this.is.key3'], result
end
data('dot' => '$.key1.key2[0]',
'bracket' => "$['key1']['key2'][0]",
'bracket w/ double quotes' => '$["key1"]["key2"][0]')
test "nested keys ['key1', 'key2', 0]" do |param|
result = Fluent::PluginHelper::RecordAccessor::Accessor.parse_parameter(param)
assert_equal ['key1', 'key2', 0], result
end
data('bracket' => "$['key1'][0]['ke y2']",
'bracket w/ double quotes' => '$["key1"][0]["ke y2"]')
test "nested keys ['key1', 0, 'ke y2']" do |param|
result = Fluent::PluginHelper::RecordAccessor::Accessor.parse_parameter(param)
assert_equal ['key1', 0, 'ke y2'], result
end
data('dot' => '$.[0].key1.[1].key2',
'bracket' => "$[0]['key1'][1]['key2']",
'bracket w/ double quotes' => '$[0]["key1"][1]["key2"]')
test "nested keys [0, 'key1', 1, 'key2']" do |param|
result = Fluent::PluginHelper::RecordAccessor::Accessor.parse_parameter(param)
assert_equal [0, 'key1', 1, 'key2'], result
end
data("missing ']'" => "$['key1'",
"missing array index with dot" => "$.hello[]",
"missing array index with bracket" => "$[]",
"more chars" => "$.key1[0]foo",
"whitespace char included key in dot notation" => "$.key[0].ke y",
"empty keys with dot" => "$.",
"empty keys with bracket" => "$[",
"mismatched quotes1" => "$['key1']['key2\"]",
"mismatched quotes2" => '$["key1"]["key2\']')
test 'invalid syntax' do |param|
assert_raise Fluent::ConfigError do
Fluent::PluginHelper::RecordAccessor::Accessor.parse_parameter(param)
end
end
end
sub_test_case 'attr_reader :keys' do
setup do
@d = Dummy.new
end
data('normal' => 'key1',
'space' => 'ke y2',
'dot key' => 'this.is.key3')
test 'access single key' do |param|
accessor = @d.record_accessor_create(param)
assert_equal param, accessor.keys
end
test "nested bracket keys with dot" do
accessor = @d.record_accessor_create("$['key1']['this.is.key3']")
assert_equal ['key1','this.is.key3'], accessor.keys
end
data('dot' => '$.key1.key2[0]',
'bracket' => "$['key1']['key2'][0]",
'bracket w/ double quotes' => '$["key1"]["key2"][0]')
test "nested keys ['key1', 'key2', 0]" do |param|
accessor = @d.record_accessor_create(param)
assert_equal ['key1', 'key2', 0], accessor.keys
end
data('bracket' => "$['key1'][0]['ke y2']",
'bracket w/ double quotes' => '$["key1"][0]["ke y2"]')
test "nested keys ['key1', 0, 'ke y2']" do |param|
accessor = @d.record_accessor_create(param)
assert_equal ['key1', 0, 'ke y2'], accessor.keys
end
end
sub_test_case Fluent::PluginHelper::RecordAccessor::Accessor do
setup do
@d = Dummy.new
end
data('normal' => 'key1',
'space' => 'ke y2',
'dot key' => 'this.is.key3')
test 'access single key' do |param|
r = {'key1' => 'v1', 'ke y2' => 'v2', 'this.is.key3' => 'v3'}
accessor = @d.record_accessor_create(param)
assert_equal r[param], accessor.call(r)
end
test "access single dot key using bracket style" do
r = {'key1' => 'v1', 'ke y2' => 'v2', 'this.is.key3' => 'v3'}
accessor = @d.record_accessor_create('$["this.is.key3"]')
assert_equal 'v3', accessor.call(r)
end
test "nested bracket keys with dot" do
r = {'key1' => {'this.is.key3' => 'value'}}
accessor = @d.record_accessor_create("$['key1']['this.is.key3']")
assert_equal 'value', accessor.call(r)
end
data('dot' => '$.key1.key2[0]',
'bracket' => "$['key1']['key2'][0]",
'bracket w/ double quotes' => '$["key1"]["key2"][0]')
test "nested keys ['key1', 'key2', 0]" do |param|
r = {'key1' => {'key2' => [1, 2, 3]}}
accessor = @d.record_accessor_create(param)
assert_equal 1, accessor.call(r)
end
data('bracket' => "$['key1'][0]['ke y2']",
'bracket w/ double quotes' => '$["key1"][0]["ke y2"]')
test "nested keys ['key1', 0, 'ke y2']" do |param|
r = {'key1' => [{'ke y2' => "value"}]}
accessor = @d.record_accessor_create(param)
assert_equal 'value', accessor.call(r)
end
data("missing ']'" => "$['key1'",
"missing array index with dot" => "$.hello[]",
"missing array index with bracket" => "$['hello'][]",
"whitespace char included key in dot notation" => "$.key[0].ke y",
"more chars" => "$.key1[0]foo",
"empty keys with dot" => "$.",
"empty keys with bracket" => "$[",
"mismatched quotes1" => "$['key1']['key2\"]",
"mismatched quotes2" => '$["key1"]["key2\']')
test 'invalid syntax' do |param|
assert_raise Fluent::ConfigError do
@d.record_accessor_create(param)
end
end
end
sub_test_case 'Fluent::PluginHelper::RecordAccessor::Accessor#delete' do
setup do
@d = Dummy.new
end
data('normal' => 'key1',
'space' => 'ke y2',
'dot key' => 'this.is.key3')
test 'delete top key' do |param|
r = {'key1' => 'v1', 'ke y2' => 'v2', 'this.is.key3' => 'v3'}
accessor = @d.record_accessor_create(param)
accessor.delete(r)
assert_not_include(r, param)
end
test "delete top key using bracket style" do
r = {'key1' => 'v1', 'ke y2' => 'v2', 'this.is.key3' => 'v3'}
accessor = @d.record_accessor_create('$["this.is.key3"]')
accessor.delete(r)
assert_not_include(r, 'this.is.key3')
end
data('bracket' => "$['key1'][0]['ke y2']",
'bracket w/ double quotes' => '$["key1"][0]["ke y2"]')
test "delete nested keys ['key1', 0, 'ke y2']" do |param|
r = {'key1' => [{'ke y2' => "value"}]}
accessor = @d.record_accessor_create(param)
accessor.delete(r)
assert_not_include(r['key1'][0], 'ke y2')
end
test "don't raise an error when unexpected record is coming" do
r = {'key1' => [{'key3' => "value"}]}
accessor = @d.record_accessor_create("$['key1']['key2']['key3']")
assert_nothing_raised do
assert_nil accessor.delete(r)
end
end
end
sub_test_case 'Fluent::PluginHelper::RecordAccessor::Accessor#set' do
setup do
@d = Dummy.new
end
data('normal' => 'key1',
'space' => 'ke y2',
'dot key' => 'this.is.key3')
test 'set top key' do |param|
r = {'key1' => 'v1', 'ke y2' => 'v2', 'this.is.key3' => 'v3'}
accessor = @d.record_accessor_create(param)
accessor.set(r, "test")
assert_equal "test", r[param]
end
test "set top key using bracket style" do
r = {'key1' => 'v1', 'ke y2' => 'v2', 'this.is.key3' => 'v3'}
accessor = @d.record_accessor_create('$["this.is.key3"]')
accessor.set(r, "test")
assert_equal "test", r["this.is.key3"]
end
data('bracket' => "$['key1'][0]['ke y2']",
'bracket w/ double quotes' => '$["key1"][0]["ke y2"]')
test "set nested keys ['key1', 0, 'ke y2']" do |param|
r = {'key1' => [{'ke y2' => "value"}]}
accessor = @d.record_accessor_create(param)
accessor.set(r, "nested_message")
assert_equal "nested_message", r['key1'][0]['ke y2']
end
test "don't raise an error when unexpected record is coming" do
r = {'key1' => [{'key3' => "value"}]}
accessor = @d.record_accessor_create("$['key1']['key2']['key3']")
assert_nothing_raised do
accessor.set(r, "unknown field")
end
assert_equal({'key1' => [{'key3' => "value"}]}, r)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_metrics.rb | test/plugin_helper/test_metrics.rb | require_relative '../helper'
require 'fluent/plugin_helper/metrics'
require 'fluent/plugin/base'
class MetricsTest < Test::Unit::TestCase
class Dummy < Fluent::Plugin::TestBase
helpers :metrics
def configure(conf)
super
end
end
setup do
@d = nil
end
teardown do
if @d
@d.stop unless @d.stopped?
@d.shutdown unless @d.shutdown?
@d.close unless @d.closed?
@d.terminate unless @d.terminated?
end
end
test 'can be initialized without any metrics at first' do
d = Dummy.new
assert_equal 0, d._metrics.size
end
test 'can be configured' do
d1 = Dummy.new
assert_nothing_raised do
d1.configure(config_element())
end
assert d1.plugin_id
assert d1.log
end
test 'creates metrics instances' do
d = Dummy.new
i = d.metrics_create(namespace: "fluentd_test", subsystem: "unit-test", name: "metrics1", help_text: "metrics testing")
d.configure(config_element())
assert do
d.instance_variable_get(:@plugin_type_or_id).include?("dummy.object")
end
assert{ i.is_a?(Fluent::Plugin::LocalMetrics) }
assert_true i.has_methods_for_counter
assert_false i.has_methods_for_gauge
d = Dummy.new
i = d.metrics_create(namespace: "fluentd_test", subsystem: "unit-test", name: "metrics2", help_text: "metrics testing", prefer_gauge: true)
d.configure(config_element())
assert do
d.instance_variable_get(:@plugin_type_or_id).include?("dummy.object")
end
assert{ i.is_a?(Fluent::Plugin::LocalMetrics) }
assert_false i.has_methods_for_counter
assert_true i.has_methods_for_gauge
end
test 'calls lifecycle methods for all plugin instances via owner plugin' do
@d = d = Dummy.new
i1 = d.metrics_create(namespace: "fluentd_test", subsystem: "unit-test", name: "metrics1", help_text: "metrics testing")
i2 = d.metrics_create(namespace: "fluentd_test", subsystem: "unit-test", name: "metrics2", help_text: "metrics testing", prefer_gauge: true)
i3 = d.metrics_create(namespace: "fluentd_test", subsystem: "unit-test", name: "metrics3", help_text: "metrics testing")
d.configure(config_element())
assert do
d.instance_variable_get(:@plugin_type_or_id).include?("dummy.object")
end
d.start
assert i1.started?
assert i2.started?
assert i3.started?
assert !i1.stopped?
assert !i2.stopped?
assert !i3.stopped?
d.stop
assert i1.stopped?
assert i2.stopped?
assert i3.stopped?
assert !i1.before_shutdown?
assert !i2.before_shutdown?
assert !i3.before_shutdown?
d.before_shutdown
assert i1.before_shutdown?
assert i2.before_shutdown?
assert i3.before_shutdown?
assert !i1.shutdown?
assert !i2.shutdown?
assert !i3.shutdown?
d.shutdown
assert i1.shutdown?
assert i2.shutdown?
assert i3.shutdown?
assert !i1.after_shutdown?
assert !i2.after_shutdown?
assert !i3.after_shutdown?
d.after_shutdown
assert i1.after_shutdown?
assert i2.after_shutdown?
assert i3.after_shutdown?
assert !i1.closed?
assert !i2.closed?
assert !i3.closed?
d.close
assert i1.closed?
assert i2.closed?
assert i3.closed?
assert !i1.terminated?
assert !i2.terminated?
assert !i3.terminated?
d.terminate
assert i1.terminated?
assert i2.terminated?
assert i3.terminated?
end
test 'can create getter method by metrics name' do
@d = d = Dummy.new
assert_raise(NoMethodError) do
d.foobarbaz
end
metrics = d.metrics_create(namespace: "fluentd_test", subsystem: "unit-test", name: "foobarbaz", help_text: "metrics testing")
metrics.inc
assert_equal(1, d.foobarbaz)
assert_equal(1, metrics.get)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_server.rb | test/plugin_helper/test_server.rb | require_relative '../helper'
require 'fluent/plugin_helper/server'
require 'fluent/plugin_helper/cert_option' # to create certs for tests
require 'fluent/plugin/base'
require 'timeout'
require 'serverengine'
require 'fileutils'
class ServerPluginHelperTest < Test::Unit::TestCase
class Dummy < Fluent::Plugin::TestBase
helpers :server
end
TMP_DIR = File.expand_path(File.dirname(__FILE__) + "/../tmp/plugin_helper_server")
setup do
@port = unused_port(protocol: :tcp)
if Fluent.windows?
@socket_manager_server = ServerEngine::SocketManager::Server.open
@socket_manager_path = @socket_manager_server.path
else
@socket_manager_path = ServerEngine::SocketManager::Server.generate_path
if @socket_manager_path.is_a?(String) && File.exist?(@socket_manager_path)
FileUtils.rm_f @socket_manager_path
end
@socket_manager_server = ServerEngine::SocketManager::Server.open(@socket_manager_path)
end
ENV['SERVERENGINE_SOCKETMANAGER_PATH'] = @socket_manager_path.to_s
@d = Dummy.new
@d.under_plugin_development = true
@d.start
@d.after_start
end
teardown do
(@d.stopped? || @d.stop) rescue nil
(@d.before_shutdown? || @d.before_shutdown) rescue nil
(@d.shutdown? || @d.shutdown) rescue nil
(@d.after_shutdown? || @d.after_shutdown) rescue nil
(@d.closed? || @d.close) rescue nil
(@d.terminated? || @d.terminate) rescue nil
@socket_manager_server.close
if @socket_manager_path.is_a?(String) && File.exist?(@socket_manager_path)
FileUtils.rm_f @socket_manager_path
end
end
sub_test_case 'plugin instance' do
test 'can be instantiated to be able to create threads' do
d = Dummy.new
assert d.respond_to?(:_servers)
assert d._servers.empty?
assert d.respond_to?(:server_wait_until_start)
assert d.respond_to?(:server_wait_until_stop)
assert d.respond_to?(:server_create_connection)
assert d.respond_to?(:server_create)
assert d.respond_to?(:server_create_tcp)
assert d.respond_to?(:server_create_udp)
assert d.respond_to?(:server_create_tls)
end
test 'can be configured' do
d = Dummy.new
assert_nothing_raised do
d.configure(config_element())
end
assert d.plugin_id
assert d.log
assert_equal 0, d.transport_config.linger_timeout
end
test 'can change linger_timeout option' do
d = Dummy.new
transport_opts = {
'linger_timeout' => 1,
}
transport_conf = config_element('transport', 'tcp', transport_opts)
conf = config_element('source', 'tag.*', {}, [transport_conf])
assert_nothing_raised do
d.configure(conf)
end
assert d.plugin_id
assert d.log
assert_equal 1, d.transport_config.linger_timeout
end
test 'can change receive_buffer_size option' do
d = Dummy.new
transport_opts = {
'receive_buffer_size' => 1024,
}
transport_conf = config_element('transport', 'tcp', transport_opts)
conf = config_element('source', 'tag.*', {}, [transport_conf])
assert_nothing_raised do
d.configure(conf)
end
assert d.plugin_id
assert d.log
assert_equal 1024, d.transport_config.receive_buffer_size
end
end
# run tests for tcp, udp, tls and unix
sub_test_case '#server_create and #server_create_connection' do
methods = {server_create: :server_create, server_create_connection: :server_create_connection}
data(methods)
test 'raise error if title is not specified or not a symbol' do |m|
assert_raise(ArgumentError.new("BUG: title must be a symbol")) do
@d.__send__(m, nil, @port){|x| x }
end
assert_raise(ArgumentError.new("BUG: title must be a symbol")) do
@d.__send__(m, "", @port){|x| x }
end
assert_raise(ArgumentError.new("BUG: title must be a symbol")) do
@d.__send__(m, "title", @port){|x| x }
end
assert_nothing_raised do
@d.__send__(m, :myserver, @port){|x| x }
end
end
data(methods)
test 'raise error if port is not specified or not an integer' do |m|
assert_raise(ArgumentError.new("BUG: port must be an integer")) do
@d.__send__(m, :myserver, nil){|x| x }
end
assert_raise(ArgumentError.new("BUG: port must be an integer")) do
@d.__send__(m, :myserver, "1"){|x| x }
end
assert_raise(ArgumentError.new("BUG: port must be an integer")) do
@d.__send__(m, :myserver, 1.5){|x| x }
end
assert_nothing_raised do
@d.__send__(m, :myserver, @port){|x| x }
end
end
data(methods)
test 'raise error if block is not specified' do |m|
assert_raise(ArgumentError) do
@d.__send__(m, :myserver, @port)
end
assert_nothing_raised do
@d.__send__(m, :myserver, @port){|x| x }
end
end
data(methods)
test 'creates tcp server, binds 0.0.0.0 in default' do |m|
@d.__send__(m, :myserver, @port){|x| x }
assert_equal 1, @d._servers.size
created_server_info = @d._servers.first
assert_equal :myserver, created_server_info.title
assert_equal @port, created_server_info.port
assert_equal :tcp, created_server_info.proto
assert_equal "0.0.0.0", created_server_info.bind
created_server = created_server_info.server
assert created_server.is_a?(Coolio::TCPServer)
assert_equal "0.0.0.0", created_server.instance_eval{ @listen_socket }.addr[3]
end
data(methods)
test 'creates tcp server if specified in proto' do |m|
@d.__send__(m, :myserver, @port, proto: :tcp){|x| x }
created_server_info = @d._servers.first
assert_equal :tcp, created_server_info.proto
created_server = created_server_info.server
assert created_server.is_a?(Coolio::TCPServer)
end
data(methods)
test 'creates tls server in default if transport section and tcp protocol specified' do |m|
@d = d = Dummy.new
transport_conf = config_element('transport', 'tcp', {}, [])
d.configure(config_element('ROOT', '', {}, [transport_conf]))
d.start
d.after_start
d.__send__(m, :myserver, @port){|x| x }
created_server_info = @d._servers.first
assert_equal :tcp, created_server_info.proto
created_server = created_server_info.server
assert created_server.is_a?(Coolio::TCPServer)
end
data(methods)
test 'creates tls server if specified in proto' do |m|
assert_raise(ArgumentError.new("BUG: TLS transport specified, but certification options are not specified")) do
@d.__send__(m, :myserver, @port, proto: :tls){|x| x }
end
@d.__send__(m, :myserver, @port, proto: :tls, tls_options: {insecure: true}){|x| x }
created_server_info = @d._servers.first
assert_equal :tls, created_server_info.proto
created_server = created_server_info.server
assert created_server.is_a?(Coolio::TCPServer) # yes, TCP here
end
data(methods)
test 'creates tls server in default if transport section and tls protocol specified' do |m|
@d = d = Dummy.new
transport_conf = config_element('transport', 'tls', {'insecure' => 'true'}, [])
d.configure(config_element('ROOT', '', {}, [transport_conf]))
d.start
d.after_start
d.__send__(m, :myserver, @port){|x| x }
created_server_info = @d._servers.first
assert_equal :tls, created_server_info.proto
created_server = created_server_info.server
assert created_server.is_a?(Coolio::TCPServer) # OK, it's Coolio::TCPServer
end
data(methods)
test 'creates unix server if specified in proto' do |m|
# pend "not implemented yet"
end
data(methods)
test 'raise error if unknown protocol specified' do |m|
assert_raise(ArgumentError.new("BUG: invalid protocol name")) do
@d.__send__(m, :myserver, @port, proto: :quic){|x| x }
end
end
data(
'server_create tcp' => [:server_create, :tcp],
'server_create tls' => [:server_create, :tls],
# 'server_create unix' => [:server_create, :unix],
'server_create_connection tcp' => [:server_create_connection, :tcp],
'server_create_connection tls' => [:server_create_connection, :tls],
# 'server_create_connection tcp' => [:server_create_connection, :unix],
)
test 'raise error if udp options specified for tcp/tls/unix' do |(m, proto)|
port = unused_port(protocol: proto)
assert_raise ArgumentError do
@d.__send__(m, :myserver, port, proto: proto, max_bytes: 128){|x| x }
end
assert_raise ArgumentError do
@d.__send__(m, :myserver, port, proto: proto, flags: 1){|x| x }
end
end
data(
'server_create udp' => [:server_create, :udp],
)
test 'raise error if tcp/tls options specified for udp' do |(m, proto)|
port = unused_port(protocol: proto)
assert_raise(ArgumentError.new("BUG: linger_timeout is available for tcp/tls")) do
@d.__send__(m, :myserver, port, proto: proto, linger_timeout: 1, max_bytes: 128){|x| x }
end
end
data(
'server_create udp' => [:server_create, :udp],
)
test 'raise error if tcp/tls/unix backlog options specified for udp' do |(m, proto)|
port = unused_port(protocol: proto)
assert_raise(ArgumentError.new("BUG: backlog is available for tcp/tls")) do
@d.__send__(m, :myserver, port, proto: proto, backlog: 500){|x| x }
end
end
data(
'server_create udp' => [:server_create, :udp],
)
test 'raise error if tcp/tls send_keepalive_packet option is specified for udp' do |(m, proto)|
port = unused_port(protocol: proto)
assert_raise(ArgumentError.new("BUG: send_keepalive_packet is available for tcp/tls")) do
@d.__send__(m, :myserver, port, proto: proto, send_keepalive_packet: true){|x| x }
end
end
data(
'server_create tcp' => [:server_create, :tcp, {}],
'server_create udp' => [:server_create, :udp, {max_bytes: 128}],
# 'server_create unix' => [:server_create, :unix, {}],
'server_create_connection tcp' => [:server_create_connection, :tcp, {}],
# 'server_create_connection unix' => [:server_create_connection, :unix, {}],
)
test 'raise error if tls options specified for tcp/udp/unix' do |(m, proto, kwargs)|
port = unused_port(protocol: proto)
assert_raise(ArgumentError.new("BUG: tls_options is available only for tls")) do
@d.__send__(m, :myserver, port, proto: proto, tls_options: {}, **kwargs){|x| x }
end
end
data(
'server_create tcp' => [:server_create, :tcp, {}],
'server_create udp' => [:server_create, :udp, {max_bytes: 128}],
'server_create tls' => [:server_create, :tls, {tls_options: {insecure: true}}],
'server_create_connection tcp' => [:server_create_connection, :tcp, {}],
'server_create_connection tls' => [:server_create_connection, :tls, {tls_options: {insecure: true}}],
)
test 'can bind specified IPv4 address' do |(m, proto, kwargs)|
port = unused_port(protocol: proto)
@d.__send__(m, :myserver, port, proto: proto, bind: "127.0.0.1", **kwargs){|x| x }
assert_equal "127.0.0.1", @d._servers.first.bind
assert_equal "127.0.0.1", @d._servers.first.server.instance_eval{ instance_variable_defined?(:@listen_socket) ? @listen_socket : @_io }.addr[3]
end
data(
'server_create tcp' => [:server_create, :tcp, {}],
'server_create udp' => [:server_create, :udp, {max_bytes: 128}],
'server_create tls' => [:server_create, :tls, {tls_options: {insecure: true}}],
'server_create_connection tcp' => [:server_create_connection, :tcp, {}],
'server_create_connection tls' => [:server_create_connection, :tls, {tls_options: {insecure: true}}],
)
test 'can bind specified IPv6 address' do |(m, proto, kwargs)| # if available
omit "IPv6 unavailable here" unless ipv6_enabled?
port = unused_port(protocol: proto)
@d.__send__(m, :myserver, port, proto: proto, bind: "::1", **kwargs){|x| x }
assert_equal "::1", @d._servers.first.bind
assert_equal "::1", @d._servers.first.server.instance_eval{ instance_variable_defined?(:@listen_socket) ? @listen_socket : @_io }.addr[3]
end
data(
'server_create tcp' => [:server_create, :tcp, {}],
'server_create udp' => [:server_create, :udp, {max_bytes: 128}],
'server_create tls' => [:server_create, :tls, {tls_options: {insecure: true}}],
# 'server_create unix' => [:server_create, :unix, {}],
'server_create_connection tcp' => [:server_create, :tcp, {}],
'server_create_connection tls' => [:server_create, :tls, {tls_options: {insecure: true}}],
# 'server_create_connection unix' => [:server_create, :unix, {}],
)
test 'can create 2 or more servers which share same bind address and port if shared option is true' do |(m, proto, kwargs)|
begin
d2 = Dummy.new; d2.start; d2.after_start
port = unused_port(protocol: proto)
assert_nothing_raised do
@d.__send__(m, :myserver, port, proto: proto, **kwargs){|x| x }
d2.__send__(m, :myserver, port, proto: proto, **kwargs){|x| x }
end
ensure
d2.stop; d2.before_shutdown; d2.shutdown; d2.after_shutdown; d2.close; d2.terminate
end
end
data(
'server_create tcp' => [:server_create, :tcp, {}],
# Disable udp test because the behaviour of SO_REUSEXXX option is different between BSD, Linux and others...
# Need to find good way for testing on local, CI service and others.
#'server_create udp' => [:server_create, :udp, {max_bytes: 128}],
'server_create tls' => [:server_create, :tls, {tls_options: {insecure: true}}],
# 'server_create unix' => [:server_create, :unix, {}],
'server_create_connection tcp' => [:server_create, :tcp, {}],
'server_create_connection tls' => [:server_create, :tls, {tls_options: {insecure: true}}],
# 'server_create_connection unix' => [:server_create, :unix, {}],
)
test 'cannot create 2 or more servers using same bind address and port if shared option is false' do |(m, proto, kwargs)|
begin
d2 = Dummy.new; d2.start; d2.after_start
port = unused_port(protocol: proto)
assert_nothing_raised do
@d.__send__(m, :myserver, port, proto: proto, shared: false, **kwargs){|x| x }
end
assert_raise(Errno::EADDRINUSE, Errno::EACCES) do
d2.__send__(m, :myserver, port, proto: proto, **kwargs){|x| x }
end
ensure
d2.stop; d2.before_shutdown; d2.shutdown; d2.after_shutdown; d2.close; d2.terminate
end
end
test 'close all connections by shutdown' do
@d.server_create_tcp(:s, @port) do |data, conn|
end
client_sockets = []
5.times do
client_sockets << TCPSocket.open("127.0.0.1", @port)
end
waiting(4){ sleep 0.1 until @d.instance_variable_get(:@_server_connections).size == 5 }
@d.stop
@d.before_shutdown
@d.shutdown
assert_true @d.instance_variable_get(:@_server_connections).empty?
ensure
client_sockets.each(&:close)
end
end
sub_test_case '#server_create' do
data(
'tcp' => [:tcp, {}],
'udp' => [:udp, {max_bytes: 128}],
'tls' => [:tls, {tls_options: {insecure: true}}],
# 'unix' => [:unix, {}],
)
test 'raise error if block argument is not specified or too many' do |(proto, kwargs)|
port = unused_port(protocol: proto)
assert_raise(ArgumentError.new("BUG: block must have 1 or 2 arguments")) do
@d.server_create(:myserver, port, proto: proto, **kwargs){ 1 }
end
assert_raise(ArgumentError.new("BUG: block must have 1 or 2 arguments")) do
@d.server_create(:myserver, port, proto: proto, **kwargs){|sock, conn, what_is_this| 1 }
end
end
test 'creates udp server if specified in proto' do
port = unused_port(protocol: :udp)
@d.server_create(:myserver, port, proto: :udp, max_bytes: 512){|x| x }
created_server_info = @d._servers.first
assert_equal :udp, created_server_info.proto
created_server = created_server_info.server
assert created_server.is_a?(Fluent::PluginHelper::Server::EventHandler::UDPServer)
end
end
sub_test_case '#server_create_tcp' do
test 'can accept all keyword arguments valid for tcp server' do
assert_nothing_raised do
@d.server_create_tcp(:s, @port, bind: '127.0.0.1', shared: false, resolve_name: true, linger_timeout: 10, backlog: 500, send_keepalive_packet: true) do |data, conn|
# ...
end
end
end
test 'creates a tcp server just to read data' do
received = ""
@d.server_create_tcp(:s, @port) do |data|
received << data
end
3.times do
sock = TCPSocket.new("127.0.0.1", @port)
sock.puts "yay"
sock.puts "foo"
sock.close
end
waiting(10){ sleep 0.1 until received.bytesize == 24 }
assert_equal "yay\nfoo\nyay\nfoo\nyay\nfoo\n", received
end
test 'creates a tcp server to read and write data' do
received = ""
responses = []
@d.server_create_tcp(:s, @port) do |data, conn|
received << data
conn.write "ack\n"
end
3.times do
TCPSocket.open("127.0.0.1", @port) do |sock|
sock.puts "yay"
sock.puts "foo"
responses << sock.readline
end
end
waiting(10){ sleep 0.1 until received.bytesize == 24 }
assert_equal "yay\nfoo\nyay\nfoo\nyay\nfoo\n", received
assert_equal ["ack\n","ack\n","ack\n"], responses
end
test 'creates a tcp server to read and write data using IPv6' do
omit "IPv6 unavailable here" unless ipv6_enabled?
received = ""
responses = []
@d.server_create_tcp(:s, @port, bind: "::1") do |data, conn|
received << data
conn.write "ack\n"
end
3.times do
TCPSocket.open("::1", @port) do |sock|
sock.puts "yay"
sock.puts "foo"
responses << sock.readline
end
end
waiting(10){ sleep 0.1 until received.bytesize == 24 }
assert_equal "yay\nfoo\nyay\nfoo\nyay\nfoo\n", received
assert_equal ["ack\n","ack\n","ack\n"], responses
end
test 'does not resolve name of client address in default' do
received = ""
sources = []
@d.server_create_tcp(:s, @port) do |data, conn|
received << data
sources << conn.remote_host
end
3.times do
TCPSocket.open("127.0.0.1", @port) do |sock|
sock.puts "yay"
end
end
waiting(10){ sleep 0.1 until received.bytesize == 12 }
assert_equal "yay\nyay\nyay\n", received
assert{ sources.all?("127.0.0.1") }
end
test 'does resolve name of client address if resolve_name is true' do
hostname = Socket.getnameinfo([nil, nil, nil, "127.0.0.1"])[0]
received = ""
sources = []
@d.server_create_tcp(:s, @port, resolve_name: true) do |data, conn|
received << data
sources << conn.remote_host
end
3.times do
TCPSocket.open("127.0.0.1", @port) do |sock|
sock.puts "yay"
end
end
waiting(10){ sleep 0.1 until received.bytesize == 12 }
assert_equal "yay\nyay\nyay\n", received
assert{ sources.all?(hostname) }
end
test 'can keep connections alive for tcp if keepalive specified' do
# pend "not implemented yet"
end
test 'raises error if plugin registers data callback for connection object from #server_create' do
received = ""
errors = []
@d.server_create_tcp(:s, @port) do |data, conn|
received << data
begin
conn.data{|d| received << d.upcase }
rescue => e
errors << e
end
end
TCPSocket.open("127.0.0.1", @port) do |sock|
sock.puts "foo"
end
waiting(10){ sleep 0.1 until received.bytesize == 4 || errors.size == 1 }
assert_equal "foo\n", received
assert{ errors.size > 0 } # it might be called twice (or more) when connection was accepted, and then data arrived (or more)
assert_equal "data callback can be registered just once, but registered twice", errors.first.message
end
test 'can call write_complete callback if registered' do
buffer = ""
lines = []
responses = []
response_completes = []
@d.server_create_tcp(:s, @port) do |data, conn|
conn.on(:write_complete){|c| response_completes << true }
buffer << data
if idx = buffer.index("\n")
lines << buffer.slice!(0,idx+1)
conn.write "ack\n"
end
end
3.times do
TCPSocket.open("127.0.0.1", @port) do |sock|
sock.write "yay"
sock.write "foo\n"
begin
responses << sock.readline
rescue EOFError, IOError, Errno::ECONNRESET
# ignore
end
sock.close
end
end
waiting(10){ sleep 0.1 until lines.size == 3 && response_completes.size == 3 }
assert_equal ["yayfoo\n", "yayfoo\n", "yayfoo\n"], lines
assert_equal ["ack\n","ack\n","ack\n"], responses
assert_equal [true, true, true], response_completes
end
test 'can call close callback if registered' do
buffer = ""
lines = []
callback_results = []
@d.server_create_tcp(:s, @port) do |data, conn|
conn.on(:close){|c| callback_results << "closed" }
buffer << data
if idx = buffer.index("\n")
lines << buffer.slice!(0,idx+1)
conn.write "ack\n"
end
end
3.times do
TCPSocket.open("127.0.0.1", @port) do |sock|
sock.write "yay"
sock.write "foo\n"
begin
while line = sock.readline
if line == "ack\n"
sock.close
end
end
rescue EOFError, IOError, Errno::ECONNRESET
# ignore
end
end
end
waiting(10){ sleep 0.1 until lines.size == 3 && callback_results.size == 3 }
assert_equal ["yayfoo\n", "yayfoo\n", "yayfoo\n"], lines
assert_equal ["closed", "closed", "closed"], callback_results
end
test 'can listen IPv4 / IPv6 together' do
omit "IPv6 unavailable here" unless ipv6_enabled?
assert_nothing_raised do
@d.server_create_tcp(:s_ipv4, @port, bind: '0.0.0.0', shared: false) do |data, conn|
# ...
end
@d.server_create_tcp(:s_ipv6, @port, bind: '::', shared: false) do |data, conn|
# ...
end
end
end
end
sub_test_case '#server_create_udp' do
test 'can accept all keyword arguments valid for udp server' do
assert_nothing_raised do
port = unused_port(protocol: :udp)
@d.server_create_udp(:s, port, bind: '127.0.0.1', shared: false, resolve_name: true, max_bytes: 100, flags: 1) do |data, conn|
# ...
end
end
end
test 'creates a udp server just to read data' do
received = ""
port = unused_port(protocol: :udp)
@d.server_create_udp(:s, port, max_bytes: 128) do |data|
received << data
end
bind_port = unused_port(protocol: :udp, bind: "127.0.0.1")
3.times do
sock = UDPSocket.new(Socket::AF_INET)
sock.bind("127.0.0.1", bind_port)
sock.connect("127.0.0.1", port)
sock.puts "yay"
sock.puts "foo"
sock.close
end
waiting(10){ sleep 0.1 until received.bytesize == 24 }
assert_equal "yay\nfoo\nyay\nfoo\nyay\nfoo\n", received
end
test 'creates a udp server to read and write data' do
received = ""
responses = []
port = unused_port(protocol: :udp)
@d.server_create_udp(:s, port, max_bytes: 128) do |data, sock|
received << data
sock.write "ack\n"
end
bind_port = unused_port(protocol: :udp)
3.times do
begin
sock = UDPSocket.new(Socket::AF_INET)
sock.bind("127.0.0.1", bind_port)
sock.connect("127.0.0.1", port)
th = Thread.new do
while true
begin
in_data, _addr = sock.recvfrom_nonblock(16)
if in_data
responses << in_data
break
end
rescue IO::WaitReadable
IO.select([sock])
end
end
true
end
sock.write "yay\nfoo\n"
th.join(5)
ensure
sock.close
end
end
waiting(10){ sleep 0.1 until received.bytesize == 24 }
assert_equal "yay\nfoo\nyay\nfoo\nyay\nfoo\n", received
assert_equal ["ack\n","ack\n","ack\n"], responses
end
test 'creates a udp server to read and write data using IPv6' do
omit "IPv6 unavailable here" unless ipv6_enabled?
received = ""
responses = []
port = unused_port(protocol: :udp, bind: "::1")
@d.server_create_udp(:s, port, bind: "::1", max_bytes: 128) do |data, sock|
received << data
sock.write "ack\n"
end
bind_port = unused_port(protocol: :udp, bind: "::1")
3.times do
begin
sock = UDPSocket.new(Socket::AF_INET6)
sock.bind("::1", bind_port)
th = Thread.new do
responses << sock.recv(16)
true
end
sock.connect("::1", port)
sock.write "yay\nfoo\n"
th.join(5)
ensure
sock.close
end
end
waiting(10){ sleep 0.1 until received.bytesize == 24 }
assert_equal "yay\nfoo\nyay\nfoo\nyay\nfoo\n", received
assert_equal ["ack\n","ack\n","ack\n"], responses
end
test 'does not resolve name of client address in default' do
received = ""
sources = []
port = unused_port(protocol: :udp)
@d.server_create_udp(:s, port, max_bytes: 128) do |data, sock|
received << data
sources << sock.remote_host
end
3.times do
sock = UDPSocket.new(Socket::AF_INET)
sock.connect("127.0.0.1", port)
sock.puts "yay"
sock.close
end
waiting(10){ sleep 0.1 until received.bytesize == 12 }
assert_equal "yay\nyay\nyay\n", received
assert{ sources.all?("127.0.0.1") }
end
test 'does resolve name of client address if resolve_name is true' do
hostname = Socket.getnameinfo([nil, nil, nil, "127.0.0.1"])[0]
received = ""
sources = []
port = unused_port(protocol: :udp)
@d.server_create_udp(:s, port, resolve_name: true, max_bytes: 128) do |data, sock|
received << data
sources << sock.remote_host
end
3.times do
sock = UDPSocket.new(Socket::AF_INET)
sock.connect("127.0.0.1", port)
sock.puts "yay"
sock.close
end
waiting(10){ sleep 0.1 until received.bytesize == 12 }
assert_equal "yay\nyay\nyay\n", received
assert{ sources.all?(hostname) }
end
test 'raises error if plugin registers data callback for connection object from #server_create' do
received = ""
errors = []
port = unused_port(protocol: :udp)
@d.server_create_udp(:s, port, max_bytes: 128) do |data, sock|
received << data
begin
sock.data{|d| received << d.upcase }
rescue => e
errors << e
end
end
sock = UDPSocket.new(Socket::AF_INET)
sock.connect("127.0.0.1", port)
sock.write "foo\n"
sock.close
waiting(10){ sleep 0.1 until received.bytesize == 4 && errors.size == 1 }
assert_equal "foo\n", received
assert_equal 1, errors.size
assert_equal "BUG: this event is disabled for udp: data", errors.first.message
end
test 'raise error if plugin registers write_complete callback for udp' do
received = ""
errors = []
port = unused_port(protocol: :udp)
@d.server_create_udp(:s, port, max_bytes: 128) do |data, sock|
received << data
begin
sock.on(:write_complete){|conn| "" }
rescue => e
errors << e
end
end
sock = UDPSocket.new(Socket::AF_INET)
sock.connect("127.0.0.1", port)
sock.write "foo\n"
sock.close
waiting(10){ sleep 0.1 until received.bytesize == 4 && errors.size == 1 }
assert_equal "foo\n", received
assert_equal 1, errors.size
assert_equal "BUG: this event is disabled for udp: write_complete", errors.first.message
end
test 'raises error if plugin registers close callback for udp' do
received = ""
errors = []
port = unused_port(protocol: :udp)
@d.server_create_udp(:s, port, max_bytes: 128) do |data, sock|
received << data
begin
sock.on(:close){|d| "" }
rescue => e
errors << e
end
end
sock = UDPSocket.new(Socket::AF_INET)
sock.connect("127.0.0.1", port)
sock.write "foo\n"
sock.close
waiting(10){ sleep 0.1 until received.bytesize == 4 && errors.size == 1 }
assert_equal "foo\n", received
assert_equal 1, errors.size
assert_equal "BUG: this event is disabled for udp: close", errors.first.message
end
test 'can bind IPv4 / IPv6 together' do
omit "IPv6 unavailable here" unless ipv6_enabled?
port = unused_port(protocol: :udp)
assert_nothing_raised do
@d.server_create_udp(:s_ipv4_udp, port, bind: '0.0.0.0', shared: false, max_bytes: 128) do |data, sock|
# ...
end
@d.server_create_udp(:s_ipv6_udp, port, bind: '::', shared: false, max_bytes: 128) do |data, sock|
# ...
end
end
end
sub_test_case 'over max_bytes' do
data("cut off on Non-Windows", { max_bytes: 32, records: ["a" * 40], expected: ["a" * 32] }, keep: true) unless Fluent.windows?
data("drop on Windows", { max_bytes: 32, records: ["a" * 40], expected: [] }, keep: true) if Fluent.windows?
test 'with sock' do |data|
max_bytes, records, expected = data.values
actual_records = []
port = unused_port(protocol: :udp)
@d.server_create_udp(:myserver, port, max_bytes: max_bytes) do |data, sock|
actual_records << data
end
open_client(:udp, "127.0.0.1", port) do |sock|
records.each do |record|
sock.send(record, 0)
end
end
waiting(10) { sleep 0.1 until actual_records.size >= expected.size }
sleep 1 if expected.size == 0 # To confirm no record recieved.
assert_equal expected, actual_records
end
test 'without sock' do |data|
max_bytes, records, expected = data.values
actual_records = []
port = unused_port(protocol: :udp)
@d.server_create_udp(:myserver, port, max_bytes: max_bytes) do |data|
actual_records << data
end
open_client(:udp, "127.0.0.1", port) do |sock|
records.each do |record|
sock.send(record, 0)
end
end
waiting(10) { sleep 0.1 until actual_records.size >= expected.size }
sleep 1 if expected.size == 0 # To confirm no record received.
assert_equal expected, actual_records
end
end
end
module CertUtil
extend Fluent::PluginHelper::CertOption
end
def create_ca_options
{
private_key_length: 2048,
country: 'US',
state: 'CA',
locality: 'Mountain View',
common_name: 'ca.testing.fluentd.org',
expiration: 30 * 86400,
digest: :sha256,
}
end
def create_server_options
{
private_key_length: 2048,
country: 'US',
state: 'CA',
locality: 'Mountain View',
common_name: 'server.testing.fluentd.org',
expiration: 30 * 86400,
digest: :sha256,
}
end
def write_cert_and_key(cert_path, cert, key_path, key, passphrase)
File.open(cert_path, "w"){|f| f.write(cert.to_pem) }
# Write the secret key (raw or encrypted by AES256) in PEM format
key_str = passphrase ? key.export(OpenSSL::Cipher.new("AES-256-CBC"), passphrase) : key.export
File.open(key_path, "w"){|f| f.write(key_str) }
File.chmod(0600, cert_path, key_path)
end
def create_server_pair_signed_by_self(cert_path, private_key_path, passphrase)
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_thread.rb | test/plugin_helper/test_thread.rb | require_relative '../helper'
require 'fluent/plugin_helper/thread'
require 'fluent/plugin/base'
require 'timeout'
class ThreadTest < Test::Unit::TestCase
class Dummy < Fluent::Plugin::TestBase
helpers :thread
def configure(conf)
super
@_thread_wait_seconds = 0.1
self
end
end
test 'can be instantiated to be able to create threads' do
d1 = Dummy.new
assert d1.respond_to?(:thread_current_running?)
assert d1.respond_to?(:thread_create)
assert d1.respond_to?(:_threads)
assert !d1.thread_current_running?
assert d1._threads.empty?
end
test 'can be configured' do
d1 = Dummy.new
assert_nothing_raised do
d1.configure(config_element())
end
assert d1.plugin_id
assert d1.log
end
test 'can create thread after prepared' do
d1 = Dummy.new
d1.configure(config_element())
d1.start
m1 = Mutex.new
m2 = Mutex.new
m1.lock
thread_run = false
Timeout.timeout(10) do
t = d1.thread_create(:test1) do
m2.lock
assert !d1._threads.empty? # this must be true always
assert d1.thread_current_running?
thread_run = true
m2.unlock
m1.lock
end
Thread.pass until m2.locked? || thread_run
m2.lock; m2.unlock
assert_equal 1, d1._threads.size
assert_equal :test1, t[:_fluentd_plugin_helper_thread_title]
assert t[:_fluentd_plugin_helper_thread_running]
assert !d1._threads.empty?
m1.unlock
while t[:_fluentd_plugin_helper_thread_running]
Thread.pass
end
end
assert d1._threads.empty?
d1.stop; d1.shutdown; d1.close; d1.terminate
end
test 'can wait until all threads start' do
d1 = Dummy.new.configure(config_element()).start
ary = []
d1.thread_create(:t1) do
ary << 1
end
d1.thread_create(:t2) do
ary << 2
end
d1.thread_create(:t3) do
ary << 3
end
Timeout.timeout(10) do
d1.thread_wait_until_start
end
assert_equal [1,2,3], ary.sort
d1.stop; d1.shutdown; d1.close; d1.terminate
end
test 'can stop threads which is watching thread_current_running?, and then close it' do
d1 = Dummy.new.configure(config_element()).start
m1 = Mutex.new
thread_in_run = false
Timeout.timeout(10) do
t = d1.thread_create(:test2) do
thread_in_run = true
m1.lock
while d1.thread_current_running?
Thread.pass
end
thread_in_run = false
m1.unlock
end
Thread.pass until m1.locked?
assert thread_in_run
assert !d1._threads.empty?
d1.stop
Thread.pass while m1.locked?
assert !t[:_fluentd_plugin_helper_thread_running]
assert t.stop?
end
assert d1._threads.empty?
d1.stop; d1.shutdown; d1.close; d1.terminate
end
test 'can terminate threads forcedly which is running forever' do
d1 = Dummy.new.configure(config_element()).start
m1 = Mutex.new
thread_in_run = false
Timeout.timeout(10) do
t = d1.thread_create(:test2) do
thread_in_run = true
m1.lock
while true
Thread.pass
end
thread_in_run = false
end
Thread.pass until m1.locked?
assert thread_in_run
assert !d1._threads.empty?
d1.stop
assert !t[:_fluentd_plugin_helper_thread_running]
assert t.alive?
d1.shutdown
assert t.alive?
assert !d1._threads.empty?
d1.close
assert t.alive?
assert !d1._threads.empty?
d1.terminate
assert t.stop?
assert d1._threads.empty?
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_service_discovery.rb | test/plugin_helper/test_service_discovery.rb | require_relative '../helper'
require 'flexmock/test_unit'
require 'fluent/plugin_helper/service_discovery'
require 'fluent/plugin/output'
class ServiceDiscoveryHelper < Test::Unit::TestCase
class Dummy < Fluent::Plugin::TestBase
helpers :service_discovery
# Make these method public
def service_discovery_create_manager(title, configurations:, load_balancer: nil, custom_build_method: nil, interval: 3)
super
end
def discovery_manager
super
end
end
class DummyPlugin < Fluent::Plugin::TestBase
helpers :service_discovery
def configure(conf)
super
service_discovery_configure(:service_discovery_helper_test, static_default_service_directive: 'node')
end
def select_service(&block)
service_discovery_select_service(&block)
end
# Make these method public
def discovery_manager
super
end
end
setup do
@sd_file_dir = File.expand_path('../plugin/data/sd_file', __dir__)
@d = nil
end
teardown do
if @d
@d.stop unless @d.stopped?
@d.shutdown unless @d.shutdown?
@d.after_shutdown unless @d.after_shutdown?
@d.close unless @d.closed?
@d.terminate unless @d.terminated?
end
end
test 'support calling #service_discovery_create_manager and #discovery_manager from plugin' do
d = @d = Dummy.new
d.service_discovery_create_manager(
:service_discovery_helper_test,
configurations: [{ type: :static, conf: config_element('root', '', {}, [config_element('service', '', { 'host' => '127.0.0.1', 'port' => '1234' })]) }],
)
assert_true !!d.discovery_manager
mock.proxy(d.discovery_manager).start.once
mock.proxy(d).timer_execute(:service_discovery_helper_test, anything).never
d.start
d.event_loop_wait_until_start
services = d.discovery_manager.services
assert_equal 1, services.size
assert_equal '127.0.0.1', services[0].host
assert_equal 1234, services[0].port
end
test 'start discovery manager' do
d = @d = DummyPlugin.new
services = [config_element('service', '', { 'host' => '127.0.0.1', 'port' => '1234' })]
d.configure(config_element('root', '', {}, [config_element('service_discovery', '', {'@type' => 'static'}, services)]))
assert_true !!d.discovery_manager
mock.proxy(d.discovery_manager).start.once
mock.proxy(d).timer_execute(:service_discovery_helper_test, anything).never
d.start
d.event_loop_wait_until_start
assert_equal 1, d.discovery_manager.services.size
d.select_service do |serv|
assert_equal "127.0.0.1", serv.host
assert_equal 1234, serv.port
end
end
test 'call timer_execute if dynamic configuration' do
d = @d = DummyPlugin.new
d.configure(config_element('root', '', {}, [config_element('service_discovery', '', { '@type' => 'file', 'path' => File.join(@sd_file_dir, 'config.yml' )})]))
assert_true !!d.discovery_manager
mock.proxy(d.discovery_manager).start.once
mock(d).timer_execute(:service_discovery_helper_test, anything).once
d.start
d.event_loop_wait_until_start
end
test 'exits service discovery instances without any errors' do
d = @d = DummyPlugin.new
mockv = flexmock('dns_resolver', getaddress: '127.0.0.1')
.should_receive(:getresources)
.and_return([Resolv::DNS::Resource::IN::SRV.new(1, 10, 8081, 'service1.example.com')])
.mock
mock(Resolv::DNS).new { mockv }
d.configure(config_element('root', '', {}, [config_element('service_discovery', '', { '@type' => 'srv', 'service' => 'service1', 'hostname' => 'example.com' })]))
assert_true !!d.discovery_manager
mock.proxy(d.discovery_manager).start.once
mock(d).timer_execute(:service_discovery_helper_test, anything).once
# To avoid clearing `@logs` during `terminate` step
# https://github.com/fluent/fluentd/blob/bc78d889f93dad8c2a4e0ad1ca802546185dacba/lib/fluent/test/log.rb#L33
mock(d.log).reset.times(3)
d.start
d.event_loop_wait_until_start
d.stop unless d.stopped?
d.shutdown unless d.shutdown?
d.after_shutdown unless d.after_shutdown?
d.close unless d.closed?
d.terminate unless d.terminated?
assert_false(d.log.out.logs.any? { |e| e.match?(/thread doesn't exit correctly/) })
end
test 'static service discovery will be configured automatically when default service directive is specified' do
d = @d = DummyPlugin.new
nodes = [
config_element('node', '', { 'host' => '192.168.0.1', 'port' => '24224' }),
config_element('node', '', { 'host' => '192.168.0.2', 'port' => '24224' })
]
d.configure(config_element('root', '', {}, nodes))
assert_true !!d.discovery_manager
mock.proxy(d.discovery_manager).start.once
mock.proxy(d).timer_execute(:service_discovery_helper_test, anything).never
d.start
d.event_loop_wait_until_start
assert_equal 2, d.discovery_manager.services.size
d.select_service do |serv|
assert_equal "192.168.0.1", serv.host
assert_equal 24224, serv.port
end
d.select_service do |serv|
assert_equal "192.168.0.2", serv.host
assert_equal 24224, serv.port
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_extract.rb | test/plugin_helper/test_extract.rb | require_relative '../helper'
require 'fluent/plugin_helper/extract'
require 'fluent/time'
class ExtractHelperTest < Test::Unit::TestCase
class Dummy < Fluent::Plugin::TestBase
helpers :extract
end
class Dummy2 < Fluent::Plugin::TestBase
helpers :extract
config_section :extract do
config_set_default :tag_key, 'tag2'
end
end
def config_extract_section(hash = {})
config_element('ROOT', '', {}, [config_element('extract', '', hash)])
end
setup do
Fluent::Test.setup
@d = Dummy.new
end
teardown do
if @d
@d.stop unless @d.stopped?
@d.shutdown unless @d.shutdown?
@d.close unless @d.closed?
@d.terminate unless @d.terminated?
end
end
test 'can override default parameters, but not overwrite whole definition' do
d = Dummy.new
d.configure(config_element())
assert_nil d.extract_config
d = Dummy2.new
d.configure(config_element('ROOT', '', {}, [config_element('extract')]))
assert d.extract_config
assert_equal 'tag2', d.extract_config.tag_key
end
test 'returns nil in default' do
@d.configure(config_extract_section())
@d.start
assert_nil @d.instance_eval{ @_extract_tag_key }
assert_nil @d.instance_eval{ @_extract_time_key }
assert_nil @d.instance_eval{ @_extract_time_parser }
record = {"key1" => "value1", "key2" => 2, "tag" => "yay", "time" => Time.now.to_i}
assert_nil @d.extract_tag_from_record(record)
assert_nil @d.extract_time_from_record(record)
end
test 'can be configured as specified' do
@d.configure(config_extract_section(
"tag_key" => "tag",
"time_key" => "time",
"time_type" => "unixtime",
))
assert_equal "tag", @d.instance_eval{ @_extract_tag_key }
assert_equal "time", @d.instance_eval{ @_extract_time_key }
assert_equal :unixtime, @d.instance_eval{ @extract_config.time_type }
assert_not_nil @d.instance_eval{ @_extract_time_parser }
end
sub_test_case 'extract_tag_from_record' do
test 'returns tag string from specified tag_key field' do
@d.configure(config_extract_section("tag_key" => "tag"))
@d.start
@d.after_start
tag = @d.extract_tag_from_record({"tag" => "tag.test.code", "message" => "yay!"})
assert_equal "tag.test.code", tag
end
test 'returns tag as string by stringifying values from specified key' do
@d.configure(config_extract_section("tag_key" => "tag"))
@d.start
@d.after_start
tag = @d.extract_tag_from_record({"tag" => 100, "message" => "yay!"})
assert_equal "100", tag
end
end
sub_test_case 'extract_time_from_record' do
test 'returns EventTime object from specified time_key field, parsed as float in default' do
@d.configure(config_extract_section("time_key" => "t"))
@d.start
@d.after_start
# 1473135272 => 2016-09-06 04:14:32 UTC
t = @d.extract_time_from_record({"t" => 1473135272.5, "message" => "yay!"})
assert_equal_event_time(Fluent::EventTime.new(1473135272, 500_000_000), t)
t = @d.extract_time_from_record({"t" => "1473135272.5", "message" => "yay!"})
assert_equal_event_time(Fluent::EventTime.new(1473135272, 500_000_000), t)
end
test 'returns EventTime object, parsed as unixtime when configured so' do
@d.configure(config_extract_section("time_key" => "t", "time_type" => "unixtime"))
@d.start
@d.after_start
t = @d.extract_time_from_record({"t" => 1473135272, "message" => "yay!"})
assert_equal_event_time(Fluent::EventTime.new(1473135272, 0), t)
t = @d.extract_time_from_record({"t" => "1473135272", "message" => "yay!"})
assert_equal_event_time(Fluent::EventTime.new(1473135272, 0), t)
t = @d.extract_time_from_record({"t" => 1473135272.5, "message" => "yay!"})
assert_equal_event_time(Fluent::EventTime.new(1473135272, 0), t)
end
test 'returns EventTime object, parsed by default time parser of ruby with timezone in data' do
t = with_timezone("UTC-02") do
@d.configure(config_extract_section("time_key" => "t", "time_type" => "string"))
@d.start
@d.after_start
@d.extract_time_from_record({"t" => "2016-09-06 13:27:01 +0900", "message" => "yay!"})
end
assert_equal_event_time(event_time("2016-09-06 13:27:01 +0900"), t)
end
test 'returns EventTime object, parsed by default time parser of ruby as localtime' do
t = with_timezone("UTC-02") do
@d.configure(config_extract_section("time_key" => "t", "time_type" => "string"))
@d.start
@d.after_start
@d.extract_time_from_record({"t" => "2016-09-06 13:27:01", "message" => "yay!"})
end
assert_equal_event_time(event_time("2016-09-06 13:27:01 +0200"), t)
end
test 'returns EventTime object, parsed as configured time_format with timezone' do
t = with_timezone("UTC-02") do
@d.configure(config_extract_section("time_key" => "t", "time_type" => "string", "time_format" => "%H:%M:%S, %m/%d/%Y, %z"))
@d.start
@d.after_start
@d.extract_time_from_record({"t" => "13:27:01, 09/06/2016, -0700", "message" => "yay!"})
end
assert_equal_event_time(event_time("2016-09-06 13:27:01 -0700"), t)
end
test 'returns EventTime object, parsed as configured time_format in localtime without timezone' do
t = with_timezone("UTC-02") do
@d.configure(config_extract_section("time_key" => "t", "time_type" => "string", "time_format" => "%H:%M:%S, %m/%d/%Y"))
@d.start
@d.after_start
@d.extract_time_from_record({"t" => "13:27:01, 09/06/2016", "message" => "yay!"})
end
assert_equal_event_time(event_time("2016-09-06 13:27:01 +0200"), t)
end
test 'returns EventTime object, parsed as configured time_format in utc without timezone, localtime: false' do
t = with_timezone("UTC-02") do
c = config_extract_section("time_key" => "t", "time_type" => "string", "time_format" => "%H:%M:%S, %m/%d/%Y", "localtime" => "false")
@d.configure(c)
@d.start
@d.after_start
@d.extract_time_from_record({"t" => "13:27:01, 09/06/2016", "message" => "yay!"})
end
assert_equal_event_time(event_time("2016-09-06 13:27:01 UTC"), t)
end
test 'returns EventTime object, parsed as configured time_format in utc without timezone, utc: true' do
t = with_timezone("UTC-02") do
c = config_extract_section("time_key" => "t", "time_type" => "string", "time_format" => "%H:%M:%S, %m/%d/%Y", "utc" => "true")
@d.configure(c)
@d.start
@d.after_start
@d.extract_time_from_record({"t" => "13:27:01, 09/06/2016", "message" => "yay!"})
end
assert_equal_event_time(event_time("2016-09-06 13:27:01 UTC"), t)
end
test 'returns EventTime object, parsed as configured time_format in configured timezone' do
t = with_timezone("UTC-02") do
c = config_extract_section("time_key" => "t", "time_type" => "string", "time_format" => "%H:%M:%S, %m/%d/%Y", "timezone" => "+09:00")
@d.configure(c)
@d.start
@d.after_start
@d.extract_time_from_record({"t" => "13:27:01, 09/06/2016", "message" => "yay!"})
end
assert_equal_event_time(event_time("2016-09-06 13:27:01 +0900"), t)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_event_emitter.rb | test/plugin_helper/test_event_emitter.rb | require_relative '../helper'
require 'fluent/plugin_helper/event_emitter'
require 'fluent/plugin/base'
require 'flexmock/test_unit'
class EventEmitterTest < Test::Unit::TestCase
setup do
Fluent::Test.setup
end
class Dummy0 < Fluent::Plugin::TestBase
end
class Dummy < Fluent::Plugin::TestBase
helpers :event_emitter
end
test 'can be instantiated to be able to emit with router' do
d0 = Dummy0.new
assert d0.respond_to?(:has_router?)
assert !d0.has_router?
assert !d0.respond_to?(:router)
d1 = Dummy.new
assert d1.respond_to?(:has_router?)
assert d1.has_router?
assert d1.respond_to?(:router)
d1.stop; d1.shutdown; d1.close; d1.terminate
end
test 'can be configured with valid router' do
d1 = Dummy.new
assert d1.has_router?
assert_nil d1.router
assert_nothing_raised do
d1.configure(config_element())
end
assert d1.router
d1.shutdown
assert d1.router
d1.close
assert_nil d1.router
d1.terminate
end
test 'should not have event_emitter_router' do
d0 = Dummy0.new
assert !d0.respond_to?(:event_emitter_router)
end
test 'should have event_emitter_router' do
d = Dummy.new
assert d.respond_to?(:event_emitter_router)
end
test 'get router' do
router_mock = flexmock('mytest')
label_mock = flexmock('mylabel')
label_mock.should_receive(:event_router).twice.and_return(router_mock)
Fluent::Engine.root_agent.labels['@mytest'] = label_mock
d = Dummy.new
d.configure(config_element('ROOT', '', {'@label' => '@mytest'}))
router = d.event_emitter_router("@mytest")
assert_equal router_mock, router
end
test 'get root router' do
d = Dummy.new
router = d.event_emitter_router("@ROOT")
assert_equal Fluent::Engine.root_agent.event_router, router
end
test '#router should return the root router by default' do
stub(Fluent::Engine.root_agent).event_router { "root_event_router" }
stub(Fluent::Engine.root_agent).source_only_router { "source_only_router" }
d = Dummy.new
d.configure(Fluent::Config::Element.new('source', '', {}, []))
assert_equal "root_event_router", d.router
end
test '#router should return source_only_router during source-only' do
stub(Fluent::Engine.root_agent).event_router { "root_event_router" }
stub(Fluent::Engine.root_agent).source_only_router { "source_only_router" }
d = Dummy.new
d.configure(Fluent::Config::Element.new('source', '', {}, []))
d.event_emitter_apply_source_only
router_when_source_only = d.router
d.event_emitter_cancel_source_only
router_after_canceled = d.router
assert_equal(
["source_only_router", "root_event_router"],
[router_when_source_only, router_after_canceled]
)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_cert_option.rb | test/plugin_helper/test_cert_option.rb | require_relative '../helper'
require 'fluent/plugin_helper/server'
require 'fluent/plugin_helper/cert_option'
class CertOptionPluginHelperTest < Test::Unit::TestCase
class Dummy < Fluent::Plugin::TestBase
helpers :cert_option
end
class DummyServer < Fluent::Plugin::TestBase
helpers :server
end
test 'can load PEM encoded certificate file' do
d = Dummy.new
certs = d.cert_option_certificates_from_file("test/plugin_helper/data/cert/cert.pem")
assert_equal(1, certs.length)
certs = d.cert_option_certificates_from_file("test/plugin_helper/data/cert/cert-with-no-newline.pem")
assert_equal(1, certs.length)
certs = d.cert_option_certificates_from_file("test/plugin_helper/data/cert/cert-with-CRLF.pem")
assert_equal(1, certs.length)
end
test 'raise an error for broken certificates_from_file file' do
d = Dummy.new
assert_raise Fluent::ConfigError do
d.cert_option_certificates_from_file("test/plugin_helper/data/cert/empty.pem")
end
end
sub_test_case "ensure OpenSSL FIPS mode" do
setup do
cert_dir = File.expand_path(File.join(File.dirname(__FILE__), "../plugin_helper/data/cert/"))
@tls_options = {
cert_path: File.join(cert_dir, "cert.pem"),
private_key_path: File.join(cert_dir, "cert-key.pem"),
}
@d = DummyServer.new
end
data(
enabled_fips_mode: [true, true, nil],
skip_checking_fips_mode: [true, false, nil],
block_incompatible_fips_mode: [false, true,
Fluent::ConfigError.new("Cannot enable FIPS compliant mode. OpenSSL FIPS configuration is disabled")],
not_care_fips_mode: [false, false, nil]
)
test 'ensure FIPS error' do |(fips_mode, ensure_fips, expected)|
stub(OpenSSL).fips_mode { fips_mode }
conf = @d.server_create_transport_section_object(@tls_options.merge({ensure_fips: ensure_fips}))
if expected
assert_raise(expected) do
@d.cert_option_create_context(Fluent::TLS::DEFAULT_VERSION,
false,
Fluent::TLS::CIPHERS_DEFAULT,
conf)
end
else
assert_nothing_raised do
@d.cert_option_create_context(Fluent::TLS::DEFAULT_VERSION,
false,
Fluent::TLS::CIPHERS_DEFAULT,
conf)
end
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_child_process.rb | test/plugin_helper/test_child_process.rb | # coding: utf-8
require_relative '../helper'
require 'fluent/plugin_helper/child_process'
require 'fluent/plugin/base'
require 'timeout'
require 'tempfile'
class ChildProcessTest < Test::Unit::TestCase
TEST_DEADLOCK_TIMEOUT = 30
TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING = 0.1 # This may be shorter than ruby's threading timer, but work well
# @nalsh says that ruby's cpu assignments for threads are almost 200ms or so.
# Loop interval (expected that it work as specified) should be longer than it.
TEST_WAIT_INTERVAL_FOR_LOOP = 0.5
setup do
@d = Dummy.new
@d.configure(config_element())
@d.start
end
teardown do
if @d
@d.stop unless @d.stopped?
@d.shutdown unless @d.shutdown?
@d.close unless @d.closed?
@d.terminate unless @d.terminated?
@d.log.reset
end
end
class Dummy < Fluent::Plugin::TestBase
helpers :child_process
def configure(conf)
super
@_child_process_kill_timeout = 1
end
end
test 'can be instantiated' do
d1 = Dummy.new
assert d1.respond_to?(:_child_process_processes)
end
test 'can be configured and started' do
d1 = Dummy.new
assert_nothing_raised do
d1.configure(config_element())
end
assert d1.plugin_id
assert d1.log
d1.start
end
test 'can execute external command asynchronously' do
m = Mutex.new
m.lock
ary = []
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
@d.child_process_execute(:t0, 'echo', arguments: ['foo', 'bar'], mode: [:read]) do |io|
m.lock
ran = true
io.read # discard
ary << 2
m.unlock
end
ary << 1
m.unlock
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
m.lock
m.unlock
end
assert_equal [1,2], ary
end
test 'can execute external command at just once, which finishes immediately' do
m = Mutex.new
t1 = Time.now
ary = []
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
@d.child_process_execute(:t1, 'echo', arguments: ['foo', 'bar'], mode: [:read]) do |io|
m.lock
ran = true
ary << io.read
m.unlock
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
m.lock
m.unlock
end
assert{ Time.now - t1 < 4.0 }
end
test 'can execute external command at just once, which can handle both of read and write' do
m = Mutex.new
ary = []
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
cmd = "ruby -e 'while !STDIN.eof? && line = STDIN.readline; puts line.chomp; STDOUT.flush rescue nil; end'"
@d.child_process_execute(:t2, cmd, mode: [:write, :read]) do |writeio, readio|
m.lock
ran = true
[[1,2],[3,4],[5,6]].each do |i,j|
writeio.write "my data#{i}\n"
writeio.write "my data#{j}\n"
writeio.flush
end
writeio.close
while line = readio.readline
ary << line
end
m.unlock
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
m.lock
m.unlock
end
assert_equal [], @d.log.out.logs
expected = (1..6).map{|i| "my data#{i}\n" }
assert_equal expected, ary
end
test 'can execute external command at just once, which can handle both of read and write. Ignore stderr message/no block' do
m = Mutex.new
ary = []
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
# lots of stderr message should not be blocked and message should not be printed in test.
cmd = "ruby -e 'while !STDIN.eof? && line = STDIN.readline; STDERR.puts line.chomp * 1000; STDOUT.puts line.chomp; STDOUT.flush rescue nil; end'"
@d.child_process_execute(:t2_and_ignore_stderr, cmd, mode: [:write, :read]) do |writeio, readio|
m.lock
ran = true
[[1,2],[3,4],[5,6]].each do |i,j|
writeio.write "my data#{i}\n"
writeio.write "my data#{j}\n"
writeio.flush
end
writeio.close
while line = readio.readline
ary << line
end
m.unlock
end
begin
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
m.lock
rescue
ensure
m.unlock
end
end
assert_equal [], @d.log.out.logs
expected = (1..6).map{|i| "my data#{i}\n" }
assert_equal expected, ary
end
test 'can execute external command at just once, which can handle all of read, write and stderr' do
m = Mutex.new
ary1 = []
ary2 = []
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
cmd = "ruby -e 'while !STDIN.eof? && line = STDIN.readline; puts line.chomp; STDOUT.flush rescue nil; STDERR.puts line.chomp; STDERR.flush rescue nil; end'"
@d.child_process_execute(:t2a, cmd, mode: [:write, :read, :stderr]) do |writeio, readio, stderrio|
m.lock
ran = true
[[1,2],[3,4],[5,6]].each do |i,j|
writeio.write "my data#{i}\n"
writeio.write "my data#{j}\n"
writeio.flush
end
writeio.close
while (line1 = readio.readline) && (line2 = stderrio.readline)
ary1 << line1
ary2 << line2
end
m.unlock
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
m.lock
m.unlock
end
assert_equal [], @d.log.out.logs
expected = (1..6).map{|i| "my data#{i}\n" }
assert_equal expected, ary1
assert_equal expected, ary2
end
test 'can execute external command at just once, which can handle both of write and read (with stderr)' do
m = Mutex.new
ary = []
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
cmd = "ruby"
args = ['-e', 'while !STDIN.eof? && line = STDIN.readline; puts "[s]" + line.chomp; STDOUT.flush rescue nil; STDERR.puts "[e]" + line.chomp; STDERR.flush rescue nil; end']
@d.child_process_execute(:t2b, cmd, arguments: args, mode: [:write, :read_with_stderr]) do |writeio, readio|
m.lock
ran = true
[[1,2],[3,4],[5,6]].each do |i,j|
writeio.write "my data#{i}\n"
writeio.write "my data#{j}\n"
writeio.flush
end
writeio.close
while line = readio.readline
ary << line
end
m.unlock
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
m.lock
m.unlock
end
assert_equal [], @d.log.out.logs
expected = (1..6).map{|i| ["[s]my data#{i}\n", "[e]my data#{i}\n"] }.flatten
assert_equal expected, ary
end
test 'can execute external command at just once, which runs forever' do
ary = []
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
args = ["-e", "while sleep 0.1; puts 1; STDOUT.flush; end"]
@d.child_process_execute(:t3, "ruby", arguments: args, mode: [:read]) do |io|
begin
while @d.child_process_running? && line = io.readline
ran ||= true
ary << line
end
rescue
# ignore
end
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until ran
sleep 1
@d.stop # nothing occurs
@d.shutdown
assert { ary.size >= 4 }
@d.close
@d.terminate
assert @d._child_process_processes.empty?
end
end
# In windows environment, child_process try KILL at first (because there's no SIGTERM)
test 'can execute external command just once, and can terminate it forcedly when shutdown/terminate even if it ignore SIGTERM' do
omit "SIGTERM is unavailable on Windows" if Fluent.windows?
m = Mutex.new
ary = []
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
@d.child_process_execute(:t4, "ruby -e 'Signal.trap(:TERM, nil); while sleep 0.1; puts 1; STDOUT.flush rescue nil; end'", mode: [:read]) do |io|
begin
while line = io.readline
unless ran
m.lock
ran = true
end
ary << line
end
rescue
# ignore
ensure
m.unlock
end
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
assert_equal [], @d.log.out.logs
@d.stop # nothing occurs
lines1 = nil
waiting(TEST_WAIT_INTERVAL_FOR_LOOP * 3) do
lines1 = ary.size
lines1 > 1
end
pid = @d._child_process_processes.keys.first
# default value 10 is too long for test
@d.instance_eval { @_child_process_exit_timeout = 1 }
@d.shutdown
sleep TEST_WAIT_INTERVAL_FOR_LOOP
lines2 = ary.size
assert { lines2 > lines1 }
@d.close
assert_nil((Process.waitpid(pid, Process::WNOHANG) rescue nil))
@d.terminate
assert @d._child_process_processes.empty?
begin
Process.waitpid(pid)
rescue Errno::ECHILD
end
# Process successfully KILLed if test reaches here
assert true
end
end
test 'can execute external command many times, which finishes immediately' do
ary = []
arguments = ["okay"]
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
start_time = Fluent::Clock.now
@d.child_process_execute(:t5, "echo", arguments: arguments, interval: 1, mode: [:read]) do |io|
ary << io.read.split("\n").map(&:chomp).join
end
1.upto(2) do |i|
sleep 0.1 while ary.size < i
elapsed = Fluent::Clock.now - start_time
assert_equal(i, ary.size)
assert_true(elapsed > i && elapsed < i + 0.5,
"actual elapsed: #{elapsed}")
end
assert_equal [], @d.log.out.logs
@d.stop
assert_equal [], @d.log.out.logs
@d.shutdown; @d.close; @d.terminate
end
end
test 'can execute external command many times, with leading once executed immediately' do
ary = []
arguments = ["okay"]
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
start_time = Fluent::Clock.now
@d.child_process_execute(:t6, "echo", arguments: arguments, interval: 1, immediate: true, mode: [:read]) do |io|
ary << io.read.split("\n").map(&:chomp).join
end
0.upto(1) do |i|
sleep 0.1 while ary.size < i + 1
elapsed = Fluent::Clock.now - start_time
assert_equal(i + 1, ary.size)
assert_true(elapsed > i && elapsed < i + 0.5,
"actual elapsed: #{elapsed}")
end
@d.stop; @d.shutdown; @d.close; @d.terminate
assert_equal [], @d.log.out.logs
end
end
test 'does not execute long running external command in parallel in default' do
ary = []
arguments = ["-e", "10.times{ sleep #{TEST_WAIT_INTERVAL_FOR_LOOP} }"] # 5 sec
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
assert_equal [], @d.log.out.logs
@d.log.out.singleton_class.module_eval do
define_method(:write){|message|
raise "boo" if message.include?('test: {"test":"test"}') || message.include?('test: {"test"=>"test"}')
@logs.push message
}
end
@d.child_process_execute(:t7, "ruby", arguments: arguments, interval: 1, immediate: true, mode: [:read]) do |io|
ary << io.read.split("\n").map(&:chomp).join
end
sleep 2
assert_equal 1, @d._child_process_processes.size
@d.stop
warn_msg = '[warn]: previous child process is still running. skipped. title=:t7 command="ruby" arguments=["-e", "10.times{ sleep 0.5 }"] interval=1 parallel=false' + "\n"
logs = @d.log.out.logs
assert{ logs.first.end_with?(warn_msg) }
assert{ logs.all?{|line| line.end_with?(warn_msg) } }
@d.shutdown; @d.close; @d.terminate
assert_equal [], @d.log.out.logs
end
end
test 'can execute long running external command in parallel if specified' do
ary = []
arguments = ["-e", "10.times{ puts 'okay'; STDOUT.flush rescue nil; sleep #{TEST_WAIT_INTERVAL_FOR_LOOP} }"] # 5 sec
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
@d.child_process_execute(:t8, "ruby", arguments: arguments, interval: 1, immediate: true, parallel: true, mode: [:read]) do |io|
ary << io.read.split("\n").map(&:chomp).join
end
sleep 2
processes = @d._child_process_processes.size
assert { processes >= 2 && processes <= 4 }
@d.stop; @d.shutdown; @d.close; @d.terminate
assert_equal [], @d.log.out.logs
end
end
test 'execute external processes only for writing' do
m = Mutex.new
unreadable = false
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
@d.child_process_execute(:t9, "ruby", arguments: ['-e', 'a=""; while b=STDIN.readline; a+=b; end'], mode: [:write]) do |io|
m.lock
ran = true
begin
io.read
rescue IOError
unreadable = true
end
50.times do
io.write "hahaha\n"
end
m.unlock
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
m.lock
m.unlock
assert unreadable
@d.stop; @d.shutdown; @d.close; @d.terminate
assert_equal [], @d.log.out.logs
end
end
test 'execute external processes only for reading' do
m = Mutex.new
unwritable = false
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
@d.child_process_execute(:t10, "ruby", arguments: ["-e", "while sleep #{TEST_WAIT_INTERVAL_FOR_LOOP}; puts 1; STDOUT.flush rescue nil; end"], mode: [:read]) do |io|
m.lock
ran = true
begin
io.write "foobar"
rescue IOError
unwritable = true
end
_data = io.readline
m.unlock
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
m.lock
m.unlock
@d.stop; @d.shutdown; @d.close; @d.terminate
assert unwritable
assert_equal [], @d.log.out.logs
end
end
test 'can control external encodings' do
m = Mutex.new
encodings = []
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
@d.child_process_execute(:t11, "ruby -e 'sleep 10'", external_encoding: 'ascii-8bit') do |r, w|
m.lock
ran = true
encodings << r.external_encoding
encodings << w.external_encoding
m.unlock
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
m.lock
assert_equal Encoding::ASCII_8BIT, encodings[0]
assert_equal Encoding::ASCII_8BIT, encodings[1]
@d.stop; @d.shutdown; @d.close; @d.terminate
end
end
test 'can control internal encodings' do
m = Mutex.new
encodings = []
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
@d.child_process_execute(:t12, "ruby -e 'sleep 10'", external_encoding: 'utf-8', internal_encoding: 'ascii-8bit') do |r, w|
m.lock
ran = true
encodings << r.internal_encoding
encodings << w.internal_encoding
m.unlock
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
m.lock
assert_equal Encoding::ASCII_8BIT, encodings[0]
assert_equal Encoding::ASCII_8BIT, encodings[1]
@d.stop; @d.shutdown; @d.close; @d.terminate
end
end
test 'can convert encodings from ascii-8bit to utf-8' do
m = Mutex.new
str = nil
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
args = ['-e', 'STDOUT.set_encoding("ascii-8bit"); STDOUT.write "\xA4\xB5\xA4\xC8\xA4\xB7"']
@d.child_process_execute(:t13, "ruby", arguments: args, external_encoding: 'euc-jp', internal_encoding: 'windows-31j', mode: [:read]) do |io|
m.lock
ran = true
str = io.read
m.unlock
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
m.lock
assert_equal Encoding.find('windows-31j'), str.encoding
expected = "さとし".encode('windows-31j')
assert_equal expected, str
@d.stop; @d.shutdown; @d.close; @d.terminate
end
end
test 'can scrub characters without exceptions' do
if Gem::Version.create(RUBY_VERSION) >= Gem::Version.create('3.3.0')
pend "Behaviour of IO#set_encoding is changed as of Ruby 3.3 (#4058)"
end
m = Mutex.new
str = nil
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
args = ['-e', 'STDOUT.set_encoding("ascii-8bit"); STDOUT.write "\xFF\xFF\x00\xF0\xF0"']
@d.child_process_execute(:t13a, "ruby", arguments: args, mode: [:read]) do |io|
m.lock
ran = true
str = io.read
m.unlock
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
m.lock
assert_equal Encoding.find('utf-8'), str.encoding
replacement = "\uFFFD" # U+FFFD (REPLACEMENT CHARACTER)
nul = "\x00" # U+0000 (NUL)
expected = replacement * 2 + nul + replacement * 2
assert_equal expected, str
@d.stop; @d.shutdown; @d.close; @d.terminate
end
end
test 'can scrub characters without exceptions and replace specified chars' do
if Gem::Version.create(RUBY_VERSION) >= Gem::Version.create('3.3.0')
pend "Behaviour of IO#set_encoding is changed as of Ruby 3.3 (#4058)"
end
m = Mutex.new
str = nil
replacement = "?"
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
args = ['-e', 'STDOUT.set_encoding("ascii-8bit"); STDOUT.write "\xFF\xFF\x00\xF0\xF0"']
@d.child_process_execute(:t13b, "ruby", arguments: args, mode: [:read], scrub: true, replace_string: replacement) do |io|
m.lock
ran = true
str = io.read
m.unlock
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
m.lock
assert_equal Encoding.find('utf-8'), str.encoding
nul = "\x00" # U+0000 (NUL)
expected = replacement * 2 + nul + replacement * 2
assert_equal expected, str
@d.stop; @d.shutdown; @d.close; @d.terminate
end
end
unless Fluent.windows?
test 'can specify subprocess name' do
io = IO.popen([["cat", "caaaaaaaaaaat"], '-'])
process_naming_enabled = (IO.popen(["ps", "opid,cmd"]){|_io| _io.readlines }.count{|line| line.include?("caaaaaaaaaaat") } > 0)
Process.kill(:TERM, io.pid) rescue nil
io.close rescue nil
# Does TravisCI prohibit process renaming?
# This test will be passed in such environment
pend unless process_naming_enabled
m = Mutex.new
pids = []
proc_lines = []
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
@d.child_process_execute(:t14, "/bin/sh", arguments:['-c', 'sleep 10; echo "hello"'], subprocess_name: "sleeeeeeeeeper", mode: [:read]) do |readio|
m.lock
ran = true
pids << @d.child_process_id
proc_lines += IO.popen(["ps", "opid,cmd"]){|_io| _io.readlines }
m.unlock
readio.read
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
m.lock
pid = pids.first
# 16357 sleeeeeeeeeper -e sleep 10; puts "hello"
assert{ proc_lines.find{|line| line =~ /^\s*#{pid}\s/ }.strip.split(/\s+/)[1] == "sleeeeeeeeeper" }
@d.stop; @d.shutdown; @d.close; @d.terminate
end
end
end
test 'can set ENV variables' do
m = Mutex.new
str = nil
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
args = ['-e', 'puts ENV["testing_child_process"]']
@d.child_process_execute(:t15a, "ruby", arguments: args, mode: [:read], env: {'testing_child_process' => 'Yes! True!'}) do |io|
m.lock
ran = true
str = io.read
m.unlock
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
m.lock
expected = "Yes! True!\n"
assert_equal expected, str
@d.stop; @d.shutdown; @d.close; @d.terminate
end
end
test 'can unset ENV variables of Fluentd process' do
m = Mutex.new
str = nil
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
current_env_path = ENV['PATH']
ran = false
args = ['-e', 'puts ENV["testing_child_process1"].to_s + ENV["testing_child_process2"].to_s']
ENV['testing_child_process1'] = "No! False!"
@d.child_process_execute(:t15b, "ruby", arguments: args, mode: [:read], unsetenv: true, env: {'testing_child_process2' => 'Yes! True!', 'PATH' => current_env_path}) do |io|
m.lock
ran = true
str = io.read
m.unlock
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
m.lock
expected = "Yes! True!\n"
assert_equal expected, str
@d.stop; @d.shutdown; @d.close; @d.terminate
end
end
unless Fluent.windows?
test 'can change working directory' do
# check my real /tmp directory (for mac)
cmd = ['ruby', '-e', 'Dir.chdir("/tmp"); puts Dir.pwd']
mytmpdir = IO.popen(cmd){|io| io.read.chomp }
m = Mutex.new
str = nil
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
ran = false
args = ['-e', 'puts Dir.pwd']
@d.child_process_execute(:t16, "ruby", arguments: args, mode: [:read], chdir: "/tmp") do |io|
m.lock
ran = true
str = io.read.chomp
m.unlock
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
m.lock
assert_equal mytmpdir, str
@d.stop; @d.shutdown; @d.close; @d.terminate
end
end
end
sub_test_case 'on_exit_callback is specified' do
setup do
@temp = Tempfile.create("child_process_wait_with_on_exit_callback")
@temp_path = @temp.path
@temp.close
end
teardown do
File.unlink @temp_path if File.exist?(@temp_path)
end
test 'can return exit status for child process successfully exits using on_exit_callback' do
assert File.exist?(@temp_path)
block_exits = false
callback_called = false
exit_status = nil
args = ['-e', 'puts "yay"; File.unlink ARGV[0]', @temp_path]
cb = ->(status){ exit_status = status; callback_called = true }
str = nil
pid = nil
@d.child_process_execute(:st1, "ruby", arguments: args, mode: [:read], on_exit_callback: cb) do |readio|
assert !callback_called # ensure yet to be called
pid = @d.instance_eval { child_process_id }
str = readio.read.chomp
block_exits = true
end
waiting(TEST_DEADLOCK_TIMEOUT){ sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING while @d.child_process_exist?(pid) } # to get exit status
waiting(TEST_DEADLOCK_TIMEOUT){ sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until block_exits }
waiting(TEST_DEADLOCK_TIMEOUT){ sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until callback_called }
assert callback_called
assert exit_status
assert_equal 0, exit_status.exitstatus
assert !File.exist?(@temp_path)
assert_equal "yay", str
end
test 'can return exit status with signal code for child process killed by signal using on_exit_callback' do
omit "SIGQUIT is unsupported on Windows" if Fluent.windows?
assert File.exist?(@temp_path)
block_exits = false
callback_called = false
exit_status = nil
args = ['-e', 'sleep ARGV[0].to_i; puts "yay"; File.unlink ARGV[1]', '100', @temp_path]
cb = ->(status){ exit_status = status; callback_called = true }
str = nil
pid = nil
@d.child_process_execute(:st1, "ruby", arguments: args, mode: [:read], on_exit_callback: cb) do |readio|
pid = @d.instance_eval { child_process_id }
sleep 1
Process.kill(:QUIT, pid)
str = readio.read.chomp rescue nil # empty string before EOF
block_exits = true
end
waiting(TEST_DEADLOCK_TIMEOUT){ sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING while @d.child_process_exist?(pid) } # to get exit status
waiting(TEST_DEADLOCK_TIMEOUT){ sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until block_exits }
waiting(TEST_DEADLOCK_TIMEOUT){ sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until callback_called }
assert callback_called
assert exit_status
assert_equal nil, exit_status.exitstatus
assert_equal 3, exit_status.termsig
assert File.exist?(@temp_path)
assert_equal '', str
end
test 'calls on_exit_callback for each process exits for interval call using on_exit_callback' do
read_data_list = []
exit_status_list = []
args = ['yay']
cb = ->(status){ exit_status_list << status }
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
@d.child_process_execute(:st1, "echo", arguments: args, immediate: true, interval: 1, mode: [:read], on_exit_callback: cb) do |readio|
read_data_list << readio.read.chomp
end
sleep 2.5
end
assert { read_data_list.size >= 2 }
assert { exit_status_list.size >= 2 }
end
test 'waits lasting child process until wait_timeout if block is not specified' do
assert File.exist?(@temp_path)
callback_called = false
exit_status = nil
args = ['-e', 'File.unlink ARGV[0]', @temp_path]
cb = ->(status){ exit_status = status; callback_called = true }
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
@d.child_process_execute(:t17, "ruby", arguments: args, on_exit_callback: cb, wait_timeout: 2)
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until callback_called
end
assert callback_called
assert exit_status
assert_equal 0, exit_status.exitstatus
assert !File.exist?(@temp_path)
end
test 'waits lasting child process until wait_timeout after block rans if block is specified' do
assert File.exist?(@temp_path)
callback_called = false
exit_status = nil
args = ['-e', 'File.unlink ARGV[0]', @temp_path]
cb = ->(status){ exit_status = status; callback_called = true }
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
@d.child_process_execute(:t17, "ruby", arguments: args, mode: nil, on_exit_callback: cb, wait_timeout: 10) do
sleep 1
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until callback_called
end
assert callback_called
assert exit_status
assert_equal 0, exit_status.exitstatus
assert !File.exist?(@temp_path)
end
test 'kills lasting child process after wait_timeout if block is not specified' do
assert File.exist?(@temp_path)
callback_called = false
exit_status = nil
args = ['-e', 'sleep ARGV[0].to_i; File.unlink ARGV[1]', '20', @temp_path]
cb = ->(status){ exit_status = status; callback_called = true }
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
@d.child_process_execute(:t17, "ruby", arguments: args, on_exit_callback: cb, wait_timeout: 1)
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until callback_called
end
assert callback_called
assert exit_status
unless Fluent.windows? # On Windows, exitstatus is always 0 and termsig is nil
assert_nil exit_status.exitstatus
assert_equal 9, exit_status.termsig # SIGKILL
end
assert File.exist?(@temp_path)
end
test 'kills lasting child process after block ran and wait_timeout expires if block is specified' do
assert File.exist?(@temp_path)
callback_called = false
exit_status = nil
args = ['-e', 'sleep ARGV[0].to_i; File.unlink ARGV[1]', '20', @temp_path]
cb = ->(status){ exit_status = status; callback_called = true }
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
@d.child_process_execute(:t17, "ruby", arguments: args, mode: nil, on_exit_callback: cb, wait_timeout: 1) do
sleep 1
end
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until callback_called
end
assert callback_called
assert exit_status
unless Fluent.windows? # On Windows, exitstatus is always 0 and termsig is nil
assert_nil exit_status.exitstatus
assert_equal 9, exit_status.termsig # SIGKILL
end
assert File.exist?(@temp_path)
end
test 'execute child process writing data to stdout which is unread' do
callback_called = false
exit_status = nil
prog = "echo writing to stdout"
callback = ->(status){ exit_status = status; callback_called = true }
Timeout.timeout(TEST_DEADLOCK_TIMEOUT) do
@d.child_process_execute(:out_exec_process, prog, stderr: :connect, immediate: true, parallel: true, mode: [], wait_timeout: 1, on_exit_callback: callback)
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until callback_called
end
assert callback_called
assert exit_status
assert exit_status.success?
assert_equal 0, exit_status.exitstatus
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_timer.rb | test/plugin_helper/test_timer.rb | require_relative '../helper'
require 'fluent/plugin_helper/timer'
require 'fluent/plugin/base'
class TimerTest < Test::Unit::TestCase
class Dummy < Fluent::Plugin::TestBase
helpers :timer
end
test 'can be instantiated under state that timer is not running' do
d1 = Dummy.new
assert d1.respond_to?(:timer_running?)
assert !d1.timer_running?
end
test 'can be configured' do
d1 = Dummy.new
assert_nothing_raised do
d1.configure(config_element())
end
assert d1.plugin_id
assert d1.log
end
test 'can start timers by start' do
d1 = Dummy.new
d1.configure(config_element())
assert !d1.timer_running?
d1.start
assert d1.timer_running?
counter = 0
d1.timer_execute(:test, 1) do
counter += 1
end
sleep 2
d1.stop
assert !d1.timer_running?
assert{ counter >= 1 && counter <= 2 }
d1.shutdown; d1.close; d1.terminate
end
test 'can run many timers' do
d1 = Dummy.new
d1.configure(config_element())
d1.start
counter1 = 0
counter2 = 0
d1.timer_execute(:t1, 0.2) do
counter1 += 1
end
d1.timer_execute(:t2, 0.2) do
counter2 += 1
end
sleep 1
d1.stop
assert{ counter1 >= 4 && counter1 <= 5 }
assert{ counter2 >= 4 && counter2 <= 5 }
d1.shutdown; d1.close; d1.terminate
end
test 'aborts timer which raises exceptions' do
d1 = Dummy.new
d1.configure(config_element())
d1.start
counter1 = 0
counter2 = 0
d1.timer_execute(:t1, 0.2) do
counter1 += 1
end
d1.timer_execute(:t2, 0.2) do
raise "abort!!!!!!" if counter2 > 1
counter2 += 1
end
sleep 1
d1.stop
assert{ counter1 >= 4 && counter1 <= 5 }
assert{ counter2 == 2 }
msg = "Unexpected error raised. Stopping the timer. title=:t2"
assert(d1.log.out.logs.any?{|line| line.include?("[error]:") && line.include?(msg) && line.include?("abort!!!!!!") })
assert(d1.log.out.logs.any?{|line| line.include?("[error]:") && line.include?("Timer detached. title=:t2") })
d1.shutdown; d1.close; d1.terminate
end
test 'can run at once' do
d1 = Dummy.new
d1.configure(config_element())
assert !d1.timer_running?
d1.start
assert d1.timer_running?
waiting_assertion = true
waiting_timer = true
counter = 0
d1.timer_execute(:test, 1, repeat: false) do
sleep(0.1) while waiting_assertion
counter += 1
waiting_timer = false
end
watchers = d1._event_loop.watchers.reject {|w| w.is_a?(Fluent::PluginHelper::EventLoop::DefaultWatcher) }
assert_equal(1, watchers.size)
assert(watchers.first.attached?)
waiting_assertion = false
sleep(0.1) while waiting_timer
assert_equal(1, counter)
waiting(4){ sleep 0.1 while watchers.first.attached? }
assert_false(watchers.first.attached?)
watchers = d1._event_loop.watchers.reject {|w| w.is_a?(Fluent::PluginHelper::EventLoop::DefaultWatcher) }
assert_equal(0, watchers.size)
d1.shutdown; d1.close; d1.terminate
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_retry_state.rb | test/plugin_helper/test_retry_state.rb | require_relative '../helper'
require 'fluent/plugin_helper/retry_state'
require 'fluent/plugin/base'
require 'time'
class RetryStateHelperTest < Test::Unit::TestCase
def override_current_time(state, time)
mojule = Module.new do
define_method(:current_time){ time }
end
state.singleton_class.module_eval do
prepend mojule
end
end
class Dummy < Fluent::Plugin::TestBase
helpers :retry_state
end
class RetryRecord
attr_reader :retry_count, :elapsed_sec, :is_secondary
def initialize(retry_count, elapsed_sec, is_secondary)
@retry_count = retry_count # This is Nth retryment
@elapsed_sec = elapsed_sec
@is_secondary = is_secondary
end
def ==(obj)
@retry_count == obj.retry_count &&
@elapsed_sec == obj.elapsed_sec &&
@is_secondary == obj.is_secondary
end
end
setup do
@d = Dummy.new
end
test 'randomize can generate value within specified +/- range' do
s = @d.retry_state_create(:t1, :exponential_backoff, 0.1, 30) # default enabled w/ 0.125
500.times do
r = s.randomize(1000)
assert{ r >= 875 && r < 1125 }
end
s = @d.retry_state_create(:t1, :exponential_backoff, 0.1, 30, randomize_width: 0.25)
500.times do
r = s.randomize(1000)
assert{ r >= 750 && r < 1250 }
end
end
test 'plugin can create retry_state machine' do
s = @d.retry_state_create(:t1, :exponential_backoff, 0.1, 30)
# attr_reader :title, :start, :steps, :next_time, :timeout_at, :current, :secondary_transition_at, :secondary_transition_times
assert_equal :t1, s.title
start_time = s.start
assert_equal 0, s.steps
assert_equal (start_time + 0.1).to_i, s.next_time.to_i
assert_equal (start_time + 0.1).nsec, s.next_time.nsec
assert_equal (start_time + 30), s.timeout_at
assert_equal :primary, s.current
assert{ s.is_a? Fluent::PluginHelper::RetryState::ExponentialBackOffRetry }
end
test 'periodic retries' do
s = @d.retry_state_create(:t2, :periodic, 3, 29, randomize: false)
dummy_current_time = s.start
override_current_time(s, dummy_current_time)
assert_equal dummy_current_time, s.current_time
assert_equal (dummy_current_time + 29), s.timeout_at
assert_equal (dummy_current_time + 3), s.next_time
i = 1
while i < 9
override_current_time(s, s.next_time)
s.step
assert_equal i, s.steps
assert_equal (s.current_time + 3), s.next_time
assert !s.limit?
i += 1
end
assert_equal 9, i
override_current_time(s, s.next_time)
s.step
assert_equal s.timeout_at, s.next_time
s.step
assert s.limit?
end
test 'periodic retries with max_steps' do
s = @d.retry_state_create(:t2, :periodic, 3, 29, randomize: false, max_steps: 5)
dummy_current_time = s.start
override_current_time(s, dummy_current_time)
assert_equal dummy_current_time, s.current_time
assert_equal (dummy_current_time + 29), s.timeout_at
assert_equal (dummy_current_time + 3), s.next_time
i = 1
while i < 5
override_current_time(s, s.next_time)
s.step
assert_equal i, s.steps
assert_equal (s.current_time + 3), s.next_time
assert !s.limit?
i += 1
end
assert_equal 5, i
override_current_time(s, s.next_time)
s.step
assert s.limit?
end
test 'periodic retries with secondary' do
s = @d.retry_state_create(:t3, :periodic, 3, 100, randomize: false, secondary: true) # threshold 0.8
dummy_current_time = s.start
override_current_time(s, dummy_current_time)
assert_equal dummy_current_time, s.current_time
assert_equal (dummy_current_time + 100), s.timeout_at
assert_equal (dummy_current_time + 100 * 0.8), s.secondary_transition_at
assert_equal (dummy_current_time + 3), s.next_time
assert !s.secondary?
i = 1
while i < 26
override_current_time(s, s.next_time)
assert !s.secondary?
s.step
assert_equal i, s.steps
assert_equal (s.current_time + 3), s.next_time
assert !s.limit?
i += 1
end
assert_equal 26, i
override_current_time(s, s.next_time) # 78
assert !s.secondary?
s.step
assert_equal 26, s.steps
assert_equal s.secondary_transition_at, s.next_time
assert !s.limit?
i += 1
assert_equal 27, i
override_current_time(s, s.next_time) # 80
assert s.secondary?
s.step
assert_equal (s.current_time + 3), s.next_time
assert_equal s.steps, s.secondary_transition_steps
assert !s.limit?
i += 1
while i < 33
override_current_time(s, s.next_time)
assert s.secondary?
s.step
assert_equal (s.current_time + 3), s.next_time
assert !s.limit?
i += 1
end
assert_equal 33, i
override_current_time(s, s.next_time) # 98
assert s.secondary?
s.step
assert_equal s.timeout_at, s.next_time # 100
s.step
assert s.limit?
end
test 'periodic retries with secondary and specified threshold' do
s = @d.retry_state_create(:t3, :periodic, 3, 100, randomize: false, secondary: true, secondary_threshold: 0.75)
dummy_current_time = s.start
override_current_time(s, dummy_current_time)
assert_equal dummy_current_time, s.current_time
assert_equal (dummy_current_time + 100), s.timeout_at
assert_equal (dummy_current_time + 100 * 0.75), s.secondary_transition_at
end
test 'periodic retries with secondary and max_steps' do
s = @d.retry_state_create(:t3, :periodic, 3, 100, max_steps: 5, randomize: false, secondary: true)
dummy_current_time = s.start
override_current_time(s, dummy_current_time)
assert_equal dummy_current_time, s.current_time
assert_equal (dummy_current_time + 100), s.timeout_at
assert_equal (dummy_current_time + 3 * 5 * 0.8), s.secondary_transition_at
end
test 'exponential backoff forever without randomization' do
s = @d.retry_state_create(:t11, :exponential_backoff, 0.1, 300, randomize: false, forever: true, backoff_base: 2)
dummy_current_time = s.start
override_current_time(s, dummy_current_time)
assert_equal dummy_current_time, s.current_time
assert_equal 0, s.steps
assert_equal (dummy_current_time + 0.1), s.next_time
i = 1
while i < 300
s.step
assert_equal i, s.steps
assert_equal (dummy_current_time + 0.1 * (2 ** i)), s.next_time
assert !s.limit?
i += 1
end
end
test 'exponential backoff with max_interval' do
s = @d.retry_state_create(:t12, :exponential_backoff, 0.1, 300, randomize: false, forever: true, backoff_base: 2, max_interval: 100)
dummy_current_time = s.start
override_current_time(s, dummy_current_time)
assert_equal dummy_current_time, s.current_time
assert_equal 0, s.steps
assert_equal (dummy_current_time + 0.1), s.next_time
# 0.1 * 2 ** 9 == 51.2
# 0.1 * 2 ** 10 == 102.4
i = 1
while i < 10
s.step
assert_equal i, s.steps
assert_equal (dummy_current_time + 0.1 * (2 ** i)), s.next_time, "start:#{dummy_current_time}, i:#{i}"
i += 1
end
s.step
assert_equal 10, s.steps
assert_equal (dummy_current_time + 100), s.next_time
s.step
assert_equal 11, s.steps
assert_equal (dummy_current_time + 100), s.next_time
end
test 'exponential backoff with shorter timeout' do
s = @d.retry_state_create(:t13, :exponential_backoff, 1, 12, randomize: false, backoff_base: 2, max_interval: 10)
dummy_current_time = s.start
override_current_time(s, dummy_current_time)
assert_equal dummy_current_time, s.current_time
assert_equal (dummy_current_time + 12), s.timeout_at
assert_equal 0, s.steps
assert_equal (dummy_current_time + 1), s.next_time
# 1 + 2 + 4 (=7)
override_current_time(s, s.next_time)
s.step
assert_equal 1, s.steps
assert_equal (s.current_time + 2), s.next_time
override_current_time(s, s.next_time)
s.step
assert_equal 2, s.steps
assert_equal (s.current_time + 4), s.next_time
assert !s.limit?
# + 8 (=15) > 12
override_current_time(s, s.next_time)
s.step
assert_equal 3, s.steps
assert_equal s.timeout_at, s.next_time
s.step
assert s.limit?
end
test 'exponential backoff with max_steps' do
s = @d.retry_state_create(:t14, :exponential_backoff, 1, 120, randomize: false, backoff_base: 2, max_interval: 10, max_steps: 6)
dummy_current_time = s.start
override_current_time(s, dummy_current_time)
assert_equal dummy_current_time, s.current_time
assert_equal (dummy_current_time + 120), s.timeout_at
assert_equal 0, s.steps
assert_equal (dummy_current_time + 1), s.next_time
override_current_time(s, s.next_time)
s.step
assert_equal 1, s.steps
assert_equal (s.current_time + 2), s.next_time
override_current_time(s, s.next_time)
s.step
assert_equal 2, s.steps
assert_equal (s.current_time + 4), s.next_time
override_current_time(s, s.next_time)
s.step
assert_equal 3, s.steps
assert_equal (s.current_time + 8), s.next_time
assert !s.limit?
override_current_time(s, s.next_time)
s.step
assert_equal 4, s.steps
assert_equal (s.current_time + 10), s.next_time
assert !s.limit?
override_current_time(s, s.next_time)
s.step
assert_equal 5, s.steps
assert_equal (s.current_time + 10), s.next_time
assert !s.limit?
override_current_time(s, s.next_time)
s.step
assert_equal 6, s.steps
assert s.limit?
end
test 'exponential backoff retries with secondary' do
s = @d.retry_state_create(:t15, :exponential_backoff, 1, 100, randomize: false, backoff_base: 2, secondary: true) # threshold 0.8
dummy_current_time = s.start
override_current_time(s, dummy_current_time)
assert_equal dummy_current_time, s.current_time
assert_equal (dummy_current_time + 100), s.timeout_at
assert_equal (dummy_current_time + 100 * 0.8), s.secondary_transition_at
assert_equal (dummy_current_time + 1), s.next_time
assert !s.secondary?
# primary: 3, 7, 15, 31, 63, 80 (timeout * threshold)
# secondary: 81, 83, 87, 95, 100
i = 1
while i < 6
override_current_time(s, s.next_time)
assert !s.secondary?
s.step
assert_equal i, s.steps
assert_equal (s.current_time + 1 * (2 ** i)), s.next_time
assert !s.limit?
i += 1
end
assert_equal 6, i
override_current_time(s, s.next_time) # 63
assert !s.secondary?
s.step
assert_equal 6, s.steps
assert_equal s.secondary_transition_at, s.next_time
assert !s.limit?
i += 1
assert_equal 7, i
override_current_time(s, s.next_time) # 80
assert s.secondary?
s.step
assert_equal 7, s.steps
assert_equal s.steps, s.secondary_transition_steps
assert_equal (s.secondary_transition_at + 1.0), s.next_time # 81
assert !s.limit?
assert_equal :secondary, s.current
# 83, 87, 95, 100
j = 1
while j < 4
override_current_time(s, s.next_time)
assert s.secondary?
assert_equal :secondary, s.current
s.step
assert_equal (7 + j), s.steps
assert_equal (s.current_time + (1 * (2 ** j))), s.next_time
assert !s.limit?, "j:#{j}"
j += 1
end
assert_equal 4, j
override_current_time(s, s.next_time) # 95
assert s.secondary?
s.step
assert_equal s.timeout_at, s.next_time # 100
s.step
assert s.limit?
end
test 'exponential backoff retries with secondary and specified threshold' do
s = @d.retry_state_create(:t16, :exponential_backoff, 1, 100, randomize: false, secondary: true, backoff_base: 2, secondary_threshold: 0.75)
dummy_current_time = s.start
override_current_time(s, dummy_current_time)
assert_equal dummy_current_time, s.current_time
assert_equal (dummy_current_time + 100), s.timeout_at
assert_equal (dummy_current_time + 100 * 0.75), s.secondary_transition_at
end
test 'exponential backoff retries with secondary and max_steps' do
s = @d.retry_state_create(:t15, :exponential_backoff, 1, 100, randomize: false, max_steps: 5, backoff_base: 2, secondary: true) # threshold 0.8
dummy_current_time = s.start
override_current_time(s, dummy_current_time)
timeout = 0
5.times { |i| timeout += 1.0 * (2 ** i) }
assert_equal dummy_current_time, s.current_time
assert_equal (dummy_current_time + 100), s.timeout_at
assert_equal (dummy_current_time + timeout * 0.8), s.secondary_transition_at
end
sub_test_case 'exponential backoff' do
test 'too big steps(check inf handling)' do
s = @d.retry_state_create(:t11, :exponential_backoff, 1, 300, randomize: false, forever: true, backoff_base: 2)
dummy_current_time = s.start
override_current_time(s, dummy_current_time)
i = 1
while i < 1027
if i >= 1025
# With this setting, 1025+ number causes inf in `calc_interval`, so 1024 value is used for next_time
assert_nothing_raised(FloatDomainError) { s.step }
assert_equal (dummy_current_time + (2 ** (1024 - 1))), s.next_time
else
s.step
end
i += 1
end
end
end
sub_test_case "ExponentialBackOff_ScenarioTests" do
data("Simple timeout", {
timeout: 100, max_steps: nil, max_interval: nil, use_sec: false, sec_thres: 0.8, wait: 1, backoff_base: 2,
expected: [
RetryRecord.new(1, 1, false),
RetryRecord.new(2, 3, false),
RetryRecord.new(3, 7, false),
RetryRecord.new(4, 15, false),
RetryRecord.new(5, 31, false),
RetryRecord.new(6, 63, false),
RetryRecord.new(7, 100, false),
],
})
data("Simple timeout with secondary", {
timeout: 100, max_steps: nil, max_interval: nil, use_sec: true, sec_thres: 0.8, wait: 1, backoff_base: 2,
expected: [
RetryRecord.new(1, 1, false),
RetryRecord.new(2, 3, false),
RetryRecord.new(3, 7, false),
RetryRecord.new(4, 15, false),
RetryRecord.new(5, 31, false),
RetryRecord.new(6, 63, false),
RetryRecord.new(7, 80, true),
RetryRecord.new(8, 81, true),
RetryRecord.new(9, 83, true),
RetryRecord.new(10, 87, true),
RetryRecord.new(11, 95, true),
RetryRecord.new(12, 100, true),
],
})
data("Simple timeout with custom wait and backoff_base", {
timeout: 1000, max_steps: nil, max_interval: nil, use_sec: false, sec_thres: 0.8, wait: 2, backoff_base: 3,
expected: [
RetryRecord.new(1, 2, false),
RetryRecord.new(2, 8, false),
RetryRecord.new(3, 26, false),
RetryRecord.new(4, 80, false),
RetryRecord.new(5, 242, false),
RetryRecord.new(6, 728, false),
RetryRecord.new(7, 1000, false),
],
})
data("Simple timeout with custom wait and backoff_base and secondary", {
timeout: 1000, max_steps: nil, max_interval: nil, use_sec: true, sec_thres: 0.8, wait: 2, backoff_base: 3,
expected: [
RetryRecord.new(1, 2, false),
RetryRecord.new(2, 8, false),
RetryRecord.new(3, 26, false),
RetryRecord.new(4, 80, false),
RetryRecord.new(5, 242, false),
RetryRecord.new(6, 728, false),
RetryRecord.new(7, 800, true),
RetryRecord.new(8, 802, true),
RetryRecord.new(9, 808, true),
RetryRecord.new(10, 826, true),
RetryRecord.new(11, 880, true),
RetryRecord.new(12, 1000, true),
],
})
data("Default timeout", {
timeout: 72*3600, max_steps: nil, max_interval: nil, use_sec: false, sec_thres: 0.8, wait: 1, backoff_base: 2,
expected: [
RetryRecord.new(1, 1, false),
RetryRecord.new(2, 3, false),
RetryRecord.new(3, 7, false),
RetryRecord.new(4, 15, false),
RetryRecord.new(5, 31, false),
RetryRecord.new(6, 63, false),
RetryRecord.new(7, 127, false),
RetryRecord.new(8, 255, false),
RetryRecord.new(9, 511, false),
RetryRecord.new(10, 1023, false),
RetryRecord.new(11, 2047, false),
RetryRecord.new(12, 4095, false),
RetryRecord.new(13, 8191, false),
RetryRecord.new(14, 16383, false),
RetryRecord.new(15, 32767, false),
RetryRecord.new(16, 65535, false),
RetryRecord.new(17, 131071, false),
RetryRecord.new(18, 259200, false),
],
})
data("Default timeout with secondary", {
timeout: 72*3600, max_steps: nil, max_interval: nil, use_sec: true, sec_thres: 0.8, wait: 1, backoff_base: 2,
expected: [
RetryRecord.new(1, 1, false),
RetryRecord.new(2, 3, false),
RetryRecord.new(3, 7, false),
RetryRecord.new(4, 15, false),
RetryRecord.new(5, 31, false),
RetryRecord.new(6, 63, false),
RetryRecord.new(7, 127, false),
RetryRecord.new(8, 255, false),
RetryRecord.new(9, 511, false),
RetryRecord.new(10, 1023, false),
RetryRecord.new(11, 2047, false),
RetryRecord.new(12, 4095, false),
RetryRecord.new(13, 8191, false),
RetryRecord.new(14, 16383, false),
RetryRecord.new(15, 32767, false),
RetryRecord.new(16, 65535, false),
RetryRecord.new(17, 131071, false),
RetryRecord.new(18, 207360, true),
RetryRecord.new(19, 207361, true),
RetryRecord.new(20, 207363, true),
RetryRecord.new(21, 207367, true),
RetryRecord.new(22, 207375, true),
RetryRecord.new(23, 207391, true),
RetryRecord.new(24, 207423, true),
RetryRecord.new(25, 207487, true),
RetryRecord.new(26, 207615, true),
RetryRecord.new(27, 207871, true),
RetryRecord.new(28, 208383, true),
RetryRecord.new(29, 209407, true),
RetryRecord.new(30, 211455, true),
RetryRecord.new(31, 215551, true),
RetryRecord.new(32, 223743, true),
RetryRecord.new(33, 240127, true),
RetryRecord.new(34, 259200, true),
],
})
data("Default timeout with secondary and custom threshold", {
timeout: 72*3600, max_steps: nil, max_interval: nil, use_sec: true, sec_thres: 0.5, wait: 1, backoff_base: 2,
expected: [
RetryRecord.new(1, 1, false),
RetryRecord.new(2, 3, false),
RetryRecord.new(3, 7, false),
RetryRecord.new(4, 15, false),
RetryRecord.new(5, 31, false),
RetryRecord.new(6, 63, false),
RetryRecord.new(7, 127, false),
RetryRecord.new(8, 255, false),
RetryRecord.new(9, 511, false),
RetryRecord.new(10, 1023, false),
RetryRecord.new(11, 2047, false),
RetryRecord.new(12, 4095, false),
RetryRecord.new(13, 8191, false),
RetryRecord.new(14, 16383, false),
RetryRecord.new(15, 32767, false),
RetryRecord.new(16, 65535, false),
RetryRecord.new(17, 129600, true),
RetryRecord.new(18, 129601, true),
RetryRecord.new(19, 129603, true),
RetryRecord.new(20, 129607, true),
RetryRecord.new(21, 129615, true),
RetryRecord.new(22, 129631, true),
RetryRecord.new(23, 129663, true),
RetryRecord.new(24, 129727, true),
RetryRecord.new(25, 129855, true),
RetryRecord.new(26, 130111, true),
RetryRecord.new(27, 130623, true),
RetryRecord.new(28, 131647, true),
RetryRecord.new(29, 133695, true),
RetryRecord.new(30, 137791, true),
RetryRecord.new(31, 145983, true),
RetryRecord.new(32, 162367, true),
RetryRecord.new(33, 195135, true),
RetryRecord.new(34, 259200, true),
],
})
data("Simple max_steps", {
timeout: 72*3600, max_steps: 10, max_interval: nil, use_sec: false, sec_thres: 0.8, wait: 1, backoff_base: 2,
expected: [
RetryRecord.new(1, 1, false),
RetryRecord.new(2, 3, false),
RetryRecord.new(3, 7, false),
RetryRecord.new(4, 15, false),
RetryRecord.new(5, 31, false),
RetryRecord.new(6, 63, false),
RetryRecord.new(7, 127, false),
RetryRecord.new(8, 255, false),
RetryRecord.new(9, 511, false),
RetryRecord.new(10, 1023, false),
],
})
data("Simple max_steps with secondary", {
timeout: 72*3600, max_steps: 10, max_interval: nil, use_sec: true, sec_thres: 0.8, wait: 1, backoff_base: 2,
expected: [
RetryRecord.new(1, 1, false),
RetryRecord.new(2, 3, false),
RetryRecord.new(3, 7, false),
RetryRecord.new(4, 15, false),
RetryRecord.new(5, 31, false),
RetryRecord.new(6, 63, false),
RetryRecord.new(7, 127, false),
RetryRecord.new(8, 255, false),
RetryRecord.new(9, 511, false),
RetryRecord.new(10, 818, true),
],
})
data("Simple interval", {
timeout: 72*3600, max_steps: nil, max_interval: 3600, use_sec: false, sec_thres: 0.8, wait: 1, backoff_base: 2,
expected: [
RetryRecord.new(1, 1, false),
RetryRecord.new(2, 3, false),
RetryRecord.new(3, 7, false),
RetryRecord.new(4, 15, false),
RetryRecord.new(5, 31, false),
RetryRecord.new(6, 63, false),
RetryRecord.new(7, 127, false),
RetryRecord.new(8, 255, false),
RetryRecord.new(9, 511, false),
RetryRecord.new(10, 1023, false),
RetryRecord.new(11, 2047, false),
RetryRecord.new(12, 4095, false),
RetryRecord.new(13, 7695, false),
RetryRecord.new(14, 11295, false),
RetryRecord.new(15, 14895, false),
RetryRecord.new(16, 18495, false),
RetryRecord.new(17, 22095, false),
RetryRecord.new(18, 25695, false),
RetryRecord.new(19, 29295, false),
RetryRecord.new(20, 32895, false),
RetryRecord.new(21, 36495, false),
RetryRecord.new(22, 40095, false),
RetryRecord.new(23, 43695, false),
RetryRecord.new(24, 47295, false),
RetryRecord.new(25, 50895, false),
RetryRecord.new(26, 54495, false),
RetryRecord.new(27, 58095, false),
RetryRecord.new(28, 61695, false),
RetryRecord.new(29, 65295, false),
RetryRecord.new(30, 68895, false),
RetryRecord.new(31, 72495, false),
RetryRecord.new(32, 76095, false),
RetryRecord.new(33, 79695, false),
RetryRecord.new(34, 83295, false),
RetryRecord.new(35, 86895, false),
RetryRecord.new(36, 90495, false),
RetryRecord.new(37, 94095, false),
RetryRecord.new(38, 97695, false),
RetryRecord.new(39, 101295, false),
RetryRecord.new(40, 104895, false),
RetryRecord.new(41, 108495, false),
RetryRecord.new(42, 112095, false),
RetryRecord.new(43, 115695, false),
RetryRecord.new(44, 119295, false),
RetryRecord.new(45, 122895, false),
RetryRecord.new(46, 126495, false),
RetryRecord.new(47, 130095, false),
RetryRecord.new(48, 133695, false),
RetryRecord.new(49, 137295, false),
RetryRecord.new(50, 140895, false),
RetryRecord.new(51, 144495, false),
RetryRecord.new(52, 148095, false),
RetryRecord.new(53, 151695, false),
RetryRecord.new(54, 155295, false),
RetryRecord.new(55, 158895, false),
RetryRecord.new(56, 162495, false),
RetryRecord.new(57, 166095, false),
RetryRecord.new(58, 169695, false),
RetryRecord.new(59, 173295, false),
RetryRecord.new(60, 176895, false),
RetryRecord.new(61, 180495, false),
RetryRecord.new(62, 184095, false),
RetryRecord.new(63, 187695, false),
RetryRecord.new(64, 191295, false),
RetryRecord.new(65, 194895, false),
RetryRecord.new(66, 198495, false),
RetryRecord.new(67, 202095, false),
RetryRecord.new(68, 205695, false),
RetryRecord.new(69, 209295, false),
RetryRecord.new(70, 212895, false),
RetryRecord.new(71, 216495, false),
RetryRecord.new(72, 220095, false),
RetryRecord.new(73, 223695, false),
RetryRecord.new(74, 227295, false),
RetryRecord.new(75, 230895, false),
RetryRecord.new(76, 234495, false),
RetryRecord.new(77, 238095, false),
RetryRecord.new(78, 241695, false),
RetryRecord.new(79, 245295, false),
RetryRecord.new(80, 248895, false),
RetryRecord.new(81, 252495, false),
RetryRecord.new(82, 256095, false),
RetryRecord.new(83, 259200, false),
],
})
data("Simple interval with secondary", {
timeout: 72*3600, max_steps: nil, max_interval: 3600, use_sec: true, sec_thres: 0.8, wait: 1, backoff_base: 2,
expected: [
RetryRecord.new(1, 1, false),
RetryRecord.new(2, 3, false),
RetryRecord.new(3, 7, false),
RetryRecord.new(4, 15, false),
RetryRecord.new(5, 31, false),
RetryRecord.new(6, 63, false),
RetryRecord.new(7, 127, false),
RetryRecord.new(8, 255, false),
RetryRecord.new(9, 511, false),
RetryRecord.new(10, 1023, false),
RetryRecord.new(11, 2047, false),
RetryRecord.new(12, 4095, false),
RetryRecord.new(13, 7695, false),
RetryRecord.new(14, 11295, false),
RetryRecord.new(15, 14895, false),
RetryRecord.new(16, 18495, false),
RetryRecord.new(17, 22095, false),
RetryRecord.new(18, 25695, false),
RetryRecord.new(19, 29295, false),
RetryRecord.new(20, 32895, false),
RetryRecord.new(21, 36495, false),
RetryRecord.new(22, 40095, false),
RetryRecord.new(23, 43695, false),
RetryRecord.new(24, 47295, false),
RetryRecord.new(25, 50895, false),
RetryRecord.new(26, 54495, false),
RetryRecord.new(27, 58095, false),
RetryRecord.new(28, 61695, false),
RetryRecord.new(29, 65295, false),
RetryRecord.new(30, 68895, false),
RetryRecord.new(31, 72495, false),
RetryRecord.new(32, 76095, false),
RetryRecord.new(33, 79695, false),
RetryRecord.new(34, 83295, false),
RetryRecord.new(35, 86895, false),
RetryRecord.new(36, 90495, false),
RetryRecord.new(37, 94095, false),
RetryRecord.new(38, 97695, false),
RetryRecord.new(39, 101295, false),
RetryRecord.new(40, 104895, false),
RetryRecord.new(41, 108495, false),
RetryRecord.new(42, 112095, false),
RetryRecord.new(43, 115695, false),
RetryRecord.new(44, 119295, false),
RetryRecord.new(45, 122895, false),
RetryRecord.new(46, 126495, false),
RetryRecord.new(47, 130095, false),
RetryRecord.new(48, 133695, false),
RetryRecord.new(49, 137295, false),
RetryRecord.new(50, 140895, false),
RetryRecord.new(51, 144495, false),
RetryRecord.new(52, 148095, false),
RetryRecord.new(53, 151695, false),
RetryRecord.new(54, 155295, false),
RetryRecord.new(55, 158895, false),
RetryRecord.new(56, 162495, false),
RetryRecord.new(57, 166095, false),
RetryRecord.new(58, 169695, false),
RetryRecord.new(59, 173295, false),
RetryRecord.new(60, 176895, false),
RetryRecord.new(61, 180495, false),
RetryRecord.new(62, 184095, false),
RetryRecord.new(63, 187695, false),
RetryRecord.new(64, 191295, false),
RetryRecord.new(65, 194895, false),
RetryRecord.new(66, 198495, false),
RetryRecord.new(67, 202095, false),
RetryRecord.new(68, 205695, false),
RetryRecord.new(69, 207360, true),
RetryRecord.new(70, 207361, true),
RetryRecord.new(71, 207363, true),
RetryRecord.new(72, 207367, true),
RetryRecord.new(73, 207375, true),
RetryRecord.new(74, 207391, true),
RetryRecord.new(75, 207423, true),
RetryRecord.new(76, 207487, true),
RetryRecord.new(77, 207615, true),
RetryRecord.new(78, 207871, true),
RetryRecord.new(79, 208383, true),
RetryRecord.new(80, 209407, true),
RetryRecord.new(81, 211455, true),
RetryRecord.new(82, 215055, true),
RetryRecord.new(83, 218655, true),
RetryRecord.new(84, 222255, true),
RetryRecord.new(85, 225855, true),
RetryRecord.new(86, 229455, true),
RetryRecord.new(87, 233055, true),
RetryRecord.new(88, 236655, true),
RetryRecord.new(89, 240255, true),
RetryRecord.new(90, 243855, true),
RetryRecord.new(91, 247455, true),
RetryRecord.new(92, 251055, true),
RetryRecord.new(93, 254655, true),
RetryRecord.new(94, 258255, true),
RetryRecord.new(95, 259200, true),
],
})
data("Max_steps and max_interval", {
timeout: 72*3600, max_steps: 30, max_interval: 3600, use_sec: false, sec_thres: 0.8, wait: 1, backoff_base: 2,
expected: [
RetryRecord.new(1, 1, false),
RetryRecord.new(2, 3, false),
RetryRecord.new(3, 7, false),
RetryRecord.new(4, 15, false),
RetryRecord.new(5, 31, false),
RetryRecord.new(6, 63, false),
RetryRecord.new(7, 127, false),
RetryRecord.new(8, 255, false),
RetryRecord.new(9, 511, false),
RetryRecord.new(10, 1023, false),
RetryRecord.new(11, 2047, false),
RetryRecord.new(12, 4095, false),
RetryRecord.new(13, 7695, false),
RetryRecord.new(14, 11295, false),
RetryRecord.new(15, 14895, false),
RetryRecord.new(16, 18495, false),
RetryRecord.new(17, 22095, false),
RetryRecord.new(18, 25695, false),
RetryRecord.new(19, 29295, false),
RetryRecord.new(20, 32895, false),
RetryRecord.new(21, 36495, false),
RetryRecord.new(22, 40095, false),
RetryRecord.new(23, 43695, false),
RetryRecord.new(24, 47295, false),
RetryRecord.new(25, 50895, false),
RetryRecord.new(26, 54495, false),
RetryRecord.new(27, 58095, false),
RetryRecord.new(28, 61695, false),
RetryRecord.new(29, 65295, false),
RetryRecord.new(30, 68895, false),
],
})
data("Max_steps and max_interval with secondary", {
timeout: 72*3600, max_steps: 30, max_interval: 3600, use_sec: true, sec_thres: 0.8, wait: 1, backoff_base: 2,
expected: [
RetryRecord.new(1, 1, false),
RetryRecord.new(2, 3, false),
RetryRecord.new(3, 7, false),
RetryRecord.new(4, 15, false),
RetryRecord.new(5, 31, false),
RetryRecord.new(6, 63, false),
RetryRecord.new(7, 127, false),
RetryRecord.new(8, 255, false),
RetryRecord.new(9, 511, false),
RetryRecord.new(10, 1023, false),
RetryRecord.new(11, 2047, false),
RetryRecord.new(12, 4095, false),
RetryRecord.new(13, 7695, false),
RetryRecord.new(14, 11295, false),
RetryRecord.new(15, 14895, false),
RetryRecord.new(16, 18495, false),
RetryRecord.new(17, 22095, false),
RetryRecord.new(18, 25695, false),
RetryRecord.new(19, 29295, false),
RetryRecord.new(20, 32895, false),
RetryRecord.new(21, 36495, false),
RetryRecord.new(22, 40095, false),
RetryRecord.new(23, 43695, false),
RetryRecord.new(24, 47295, false),
RetryRecord.new(25, 50895, false),
RetryRecord.new(26, 54495, false),
RetryRecord.new(27, 55116, true),
RetryRecord.new(28, 55117, true),
RetryRecord.new(29, 55119, true),
RetryRecord.new(30, 55123, true),
],
})
data("Max_steps and max_interval with timeout", {
timeout: 10000, max_steps: 30, max_interval: 1000, use_sec: false, sec_thres: 0.8, wait: 1, backoff_base: 2,
expected: [
RetryRecord.new(1, 1, false),
RetryRecord.new(2, 3, false),
RetryRecord.new(3, 7, false),
RetryRecord.new(4, 15, false),
RetryRecord.new(5, 31, false),
RetryRecord.new(6, 63, false),
RetryRecord.new(7, 127, false),
RetryRecord.new(8, 255, false),
RetryRecord.new(9, 511, false),
RetryRecord.new(10, 1023, false),
RetryRecord.new(11, 2023, false),
RetryRecord.new(12, 3023, false),
RetryRecord.new(13, 4023, false),
RetryRecord.new(14, 5023, false),
RetryRecord.new(15, 6023, false),
RetryRecord.new(16, 7023, false),
RetryRecord.new(17, 8023, false),
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_parser.rb | test/plugin_helper/test_parser.rb | require_relative '../helper'
require 'fluent/plugin_helper/parser'
require 'fluent/plugin/base'
require 'fluent/time'
class ParserHelperTest < Test::Unit::TestCase
class ExampleParser < Fluent::Plugin::Parser
Fluent::Plugin.register_parser('example', self)
def parse(text)
ary = text.split(/\s*,\s*/)
r = {}
ary.each_with_index do |v, i|
r[i.to_s] = v
end
yield Fluent::EventTime.now, r
end
end
class Example2Parser < Fluent::Plugin::Parser
Fluent::Plugin.register_parser('example2', self)
def parse(text)
ary = text.split(/\s*,\s*/)
r = {}
ary.each_with_index do |v, i|
r[i.to_s] = v
end
yield Fluent::EventTime.now, r
end
end
class Dummy < Fluent::Plugin::TestBase
helpers :parser
config_section :parse do
config_set_default :@type, 'example'
end
end
class Dummy2 < Fluent::Plugin::TestBase
helpers :parser
config_section :parse do
config_set_default :@type, 'example2'
end
end
setup do
@d = nil
end
teardown do
if @d
@d.stop unless @d.stopped?
@d.shutdown unless @d.shutdown?
@d.close unless @d.closed?
@d.terminate unless @d.terminated?
end
end
test 'can be initialized without any parsers at first' do
d = Dummy.new
assert_equal 0, d._parsers.size
end
test 'can override default configuration parameters, but not overwrite whole definition' do
d = Dummy.new
assert_equal [], d.parser_configs
d = Dummy2.new
d.configure(config_element('ROOT', '', {}, [config_element('parse', '', {}, [])]))
assert_raise NoMethodError do
d.parse
end
assert_equal 1, d.parser_configs.size
assert_equal 'example2', d.parser_configs.first[:@type]
end
test 'creates instance of type specified by conf, or default_type if @type is missing in conf' do
d = Dummy2.new
d.configure(config_element())
i = d.parser_create(conf: config_element('parse', '', {'@type' => 'example'}), default_type: 'example2')
assert{ i.is_a?(ExampleParser) }
d = Dummy2.new
d.configure(config_element())
i = d.parser_create(conf: nil, default_type: 'example2')
assert{ i.is_a?(Example2Parser) }
end
test 'raises config error if config section is specified, but @type is not specified' do
d = Dummy2.new
d.configure(config_element())
assert_raise Fluent::ConfigError.new("@type is required in <parse>") do
d.parser_create(conf: config_element('parse', '', {}), default_type: 'example2')
end
end
test 'can be configured with default type without parse sections' do
d = Dummy.new
d.configure(config_element())
assert_equal 1, d._parsers.size
end
test 'can be configured with a parse section' do
d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('parse', '', {'@type' => 'example'})
])
assert_nothing_raised do
d.configure(conf)
end
assert_equal 1, d._parsers.size
assert{ d._parsers.values.all?{ |parser| !parser.started? } }
end
test 'can be configured with 2 or more parse sections with different usages with each other' do
d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('parse', 'default', {'@type' => 'example'}),
config_element('parse', 'extra', {'@type' => 'example2'}),
])
assert_nothing_raised do
d.configure(conf)
end
assert_equal 2, d._parsers.size
assert{ d._parsers.values.all?{ |parser| !parser.started? } }
end
test 'cannot be configured with 2 parse sections with same usage' do
d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('parse', 'default', {'@type' => 'example'}),
config_element('parse', 'extra', {'@type' => 'example2'}),
config_element('parse', 'extra', {'@type' => 'example2'}),
])
assert_raises Fluent::ConfigError do
d.configure(conf)
end
end
test 'creates a parse plugin instance which is already configured without usage' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('parse', '', {'@type' => 'example'})
])
d.configure(conf)
d.start
parser = d.parser_create
assert{ parser.is_a? ExampleParser }
assert parser.started?
end
test 'creates a parser plugin instance which is already configured with usage' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('parse', 'mydata', {'@type' => 'example'})
])
d.configure(conf)
d.start
parser = d.parser_create(usage: 'mydata')
assert{ parser.is_a? ExampleParser }
assert parser.started?
end
test 'creates a parser plugin without configurations' do
@d = d = Dummy.new
d.configure(config_element())
d.start
parser = d.parser_create(usage: 'mydata', type: 'example', conf: config_element('parse', 'mydata'))
assert{ parser.is_a? ExampleParser }
assert parser.started?
end
test 'creates 2 or more parser plugin instances' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [
config_element('parse', 'mydata', {'@type' => 'example'}),
config_element('parse', 'secret', {'@type' => 'example2'})
])
d.configure(conf)
d.start
p1 = d.parser_create(usage: 'mydata')
p2 = d.parser_create(usage: 'secret')
assert{ p1.is_a? ExampleParser }
assert p1.started?
assert{ p2.is_a? Example2Parser }
assert p2.started?
end
test 'calls lifecycle methods for all plugin instances via owner plugin' do
@d = d = Dummy.new
conf = config_element('ROOT', '', {}, [ config_element('parse', '', {'@type' => 'example'}), config_element('parse', 'e2', {'@type' => 'example'}) ])
d.configure(conf)
d.start
i1 = d.parser_create(usage: '')
i2 = d.parser_create(usage: 'e2')
i3 = d.parser_create(usage: 'e3', type: 'example2')
assert i1.started?
assert i2.started?
assert i3.started?
assert !i1.stopped?
assert !i2.stopped?
assert !i3.stopped?
d.stop
assert i1.stopped?
assert i2.stopped?
assert i3.stopped?
assert !i1.before_shutdown?
assert !i2.before_shutdown?
assert !i3.before_shutdown?
d.before_shutdown
assert i1.before_shutdown?
assert i2.before_shutdown?
assert i3.before_shutdown?
assert !i1.shutdown?
assert !i2.shutdown?
assert !i3.shutdown?
d.shutdown
assert i1.shutdown?
assert i2.shutdown?
assert i3.shutdown?
assert !i1.after_shutdown?
assert !i2.after_shutdown?
assert !i3.after_shutdown?
d.after_shutdown
assert i1.after_shutdown?
assert i2.after_shutdown?
assert i3.after_shutdown?
assert !i1.closed?
assert !i2.closed?
assert !i3.closed?
d.close
assert i1.closed?
assert i2.closed?
assert i3.closed?
assert !i1.terminated?
assert !i2.terminated?
assert !i3.terminated?
d.terminate
assert i1.terminated?
assert i2.terminated?
assert i3.terminated?
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_http_server_helper.rb | test/plugin_helper/test_http_server_helper.rb | require_relative '../helper'
require 'flexmock/test_unit'
require 'fluent/plugin_helper/http_server'
require 'fluent/plugin/output'
require 'fluent/event'
require 'net/http'
require 'uri'
require 'openssl'
require 'async'
class HttpHelperTest < Test::Unit::TestCase
NULL_LOGGER = Logger.new(nil)
CERT_DIR = File.expand_path(File.dirname(__FILE__) + '/data/cert/without_ca')
CERT_CA_DIR = File.expand_path(File.dirname(__FILE__) + '/data/cert/with_ca')
def setup
@port = unused_port(protocol: :tcp)
end
def teardown
@port = nil
end
class Dummy < Fluent::Plugin::TestBase
helpers :http_server
end
def on_driver(config = nil)
config ||= Fluent::Config.parse(config || '', '(name)', '')
Fluent::Test.setup
driver = Dummy.new
driver.configure(config)
driver.start
driver.after_start
yield(driver)
ensure
unless driver.stopped?
driver.stop rescue nil
end
unless driver.before_shutdown?
driver.before_shutdown rescue nil
end
unless driver.shutdown?
driver.shutdown rescue nil
end
unless driver.after_shutdown?
driver.after_shutdown rescue nil
end
unless driver.closed?
driver.close rescue nil
end
unless driver.terminated?
driver.terminated rescue nil
end
end
def on_driver_transport(opts = {}, &block)
transport_conf = config_element('transport', 'tls', opts)
c = config_element('ROOT', '', {}, [transport_conf])
on_driver(c, &block)
end
%w[get head].each do |n|
define_method(n) do |uri, header = {}|
url = URI.parse(uri)
headers = { 'Content-Type' => 'application/x-www-form-urlencoded/' }.merge(header)
req = Net::HTTP.const_get(n.capitalize).new(url, headers)
Net::HTTP.start(url.host, url.port) do |http|
http.request(req)
end
end
define_method("secure_#{n}") do |uri, header = {}, verify: true, cert_path: nil, selfsigned: true, hostname: false|
url = URI.parse(uri)
headers = { 'Content-Type' => 'application/x-www-form-urlencoded/' }.merge(header)
start_https_request(url.host, url.port, verify: verify, cert_path: cert_path, selfsigned: selfsigned) do |https|
https.send(n, url.path, headers.to_a)
end
end
end
%w[post put patch delete options trace].each do |n|
define_method(n) do |uri, body = '', header = {}|
url = URI.parse(uri)
headers = { 'Content-Type' => 'application/x-www-form-urlencoded/' }.merge(header)
req = Net::HTTP.const_get(n.capitalize).new(url, headers)
req.body = body
Net::HTTP.start(url.host, url.port) do |http|
http.request(req)
end
end
end
# wrapper for net/http
Response = Struct.new(:code, :body, :headers)
# Use async-http as http client since net/http can't be set verify_hostname= now
# will be replaced when net/http supports verify_hostname=
def start_https_request(addr, port, verify: true, cert_path: nil, selfsigned: true, hostname: nil)
context = OpenSSL::SSL::SSLContext.new
context.set_params({})
if verify
cert_store = OpenSSL::X509::Store.new
cert_store.set_default_paths
if selfsigned && OpenSSL::X509.const_defined?('V_FLAG_CHECK_SS_SIGNATURE')
cert_store.flags = OpenSSL::X509::V_FLAG_CHECK_SS_SIGNATURE
end
if cert_path
cert_store.add_file(cert_path)
end
context.cert_store = cert_store
if !hostname
context.verify_hostname = false # In test code, using hostname to be connected is very difficult
end
context.verify_mode = OpenSSL::SSL::VERIFY_PEER
else
context.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
client = Async::HTTP::Client.new(Async::HTTP::Endpoint.parse("https://#{addr}:#{port}", ssl_context: context))
Console.logger = Fluent::Log::ConsoleAdapter.wrap(NULL_LOGGER)
resp = nil
error = nil
Sync do
Console.logger = Fluent::Log::ConsoleAdapter.wrap(NULL_LOGGER)
begin
response = yield(client)
rescue => e # Async::Reactor rescue all error. handle it by myself
error = e
end
if response
resp = Response.new(response.status.to_s, response.read, response.headers)
end
end
if error
raise error
else
resp
end
end
sub_test_case 'Create a HTTP server' do
test 'mount given path' do
on_driver do |driver|
driver.http_server_create_http_server(:http_server_helper_test, addr: '127.0.0.1', port: @port, logger: NULL_LOGGER) do |s|
s.get('/example/hello') { [200, { 'Content-Type' => 'text/plain' }, 'hello get'] }
s.post('/example/hello') { [200, { 'Content-Type' => 'text/plain' }, 'hello post'] }
s.head('/example/hello') { [200, { 'Content-Type' => 'text/plain' }, 'hello head'] }
s.put('/example/hello') { [200, { 'Content-Type' => 'text/plain' }, 'hello put'] }
s.patch('/example/hello') { [200, { 'Content-Type' => 'text/plain' }, 'hello patch'] }
s.delete('/example/hello') { [200, { 'Content-Type' => 'text/plain' }, 'hello delete'] }
s.trace('/example/hello') { [200, { 'Content-Type' => 'text/plain' }, 'hello trace'] }
s.options('/example/hello') { [200, { 'Content-Type' => 'text/plain' }, 'hello options'] }
end
resp = head("http://127.0.0.1:#{@port}/example/hello")
assert_equal('200', resp.code)
assert_equal(nil, resp.body)
assert_equal('text/plain', resp['Content-Type'])
%w[get put post put delete options trace].each do |n|
resp = send(n, "http://127.0.0.1:#{@port}/example/hello")
assert_equal('200', resp.code)
assert_equal("hello #{n}", resp.body)
assert_equal('text/plain', resp['Content-Type'])
end
end
end
test 'mount frozen path' do
on_driver do |driver|
driver.http_server_create_http_server(:http_server_helper_test, addr: '127.0.0.1', port: @port, logger: NULL_LOGGER) do |s|
s.get('/example/hello'.freeze) { [200, { 'Content-Type' => 'text/plain' }, 'hello get'] }
end
resp = get("http://127.0.0.1:#{@port}/example/hello/")
assert_equal('200', resp.code)
end
end
test 'when path does not start with `/` or ends with `/`' do
on_driver do |driver|
driver.http_server_create_http_server(:http_server_helper_test, addr: '127.0.0.1', port: @port, logger: NULL_LOGGER) do |s|
s.get('example/hello') { [200, { 'Content-Type' => 'text/plain' }, 'hello get'] }
s.get('/example/hello2/') { [200, { 'Content-Type' => 'text/plain' }, 'hello get'] }
end
resp = get("http://127.0.0.1:#{@port}/example/hello")
assert_equal('404', resp.code)
resp = get("http://127.0.0.1:#{@port}/example/hello2")
assert_equal('200', resp.code)
end
end
test 'when error raised' do
on_driver do |driver|
driver.http_server_create_http_server(:http_server_helper_test, addr: '127.0.0.1', port: @port, logger: NULL_LOGGER) do |s|
s.get('/example/hello') { raise 'error!' }
end
resp = get("http://127.0.0.1:#{@port}/example/hello")
assert_equal('500', resp.code)
end
end
test 'when path is not found' do
on_driver do |driver|
driver.http_server_create_http_server(:http_server_helper_test, addr: '127.0.0.1', port: @port, logger: NULL_LOGGER) do |s|
s.get('/example/hello') { [200, { 'Content-Type' => 'text/plain' }, 'hello get'] }
end
resp = get("http://127.0.0.1:#{@port}/example/hello/not_found")
assert_equal('404', resp.code)
end
end
test 'params and body' do
on_driver do |driver|
driver.http_server_create_http_server(:http_server_helper_test, addr: '127.0.0.1', port: @port, logger: NULL_LOGGER) do |s|
s.get('/example/hello') do |req|
assert_equal(req.query_string, nil)
assert_equal(req.body, nil)
[200, { 'Content-Type' => 'text/plain' }, 'hello get']
end
s.post('/example/hello') do |req|
assert_equal(req.query_string, nil)
assert_equal(req.body, 'this is body')
[200, { 'Content-Type' => 'text/plain' }, 'hello post']
end
s.get('/example/hello/params') do |req|
assert_equal(req.query_string, 'test=true')
assert_equal(req.body, nil)
[200, { 'Content-Type' => 'text/plain' }, 'hello get']
end
s.post('/example/hello/params') do |req|
assert_equal(req.query_string, 'test=true')
assert_equal(req.body, 'this is body')
[200, { 'Content-Type' => 'text/plain' }, 'hello post']
end
end
resp = get("http://127.0.0.1:#{@port}/example/hello")
assert_equal('200', resp.code)
resp = post("http://127.0.0.1:#{@port}/example/hello", 'this is body')
assert_equal('200', resp.code)
resp = get("http://127.0.0.1:#{@port}/example/hello/params?test=true")
assert_equal('200', resp.code)
resp = post("http://127.0.0.1:#{@port}/example/hello/params?test=true", 'this is body')
assert_equal('200', resp.code)
end
end
sub_test_case 'create a HTTPS server' do
test '#configure' do
driver = Dummy.new
transport_conf = config_element('transport', 'tls', { 'version' => 'TLSv1_1' })
driver.configure(config_element('ROOT', '', {}, [transport_conf]))
assert_equal :tls, driver.transport_config.protocol
assert_equal :TLSv1_1, driver.transport_config.version
end
sub_test_case '#http_server_create_https_server' do
test 'can overwrite settings by using tls_context' do
on_driver_transport({ 'insecure' => 'false' }) do |driver|
tls = { 'insecure' => 'true' } # overwrite
driver.http_server_create_https_server(:http_server_helper_test_tls, addr: '127.0.0.1', port: @port, logger: NULL_LOGGER, tls_opts: tls) do |s|
s.get('/example/hello') { [200, { 'Content-Type' => 'text/plain' }, 'hello get'] }
end
resp = secure_get("https://127.0.0.1:#{@port}/example/hello", verify: false)
assert_equal('200', resp.code)
assert_equal('hello get', resp.body)
end
end
test 'with insecure in transport section' do
on_driver_transport({ 'insecure' => 'true' }) do |driver|
driver.http_server_create_https_server(:http_server_helper_test_tls, addr: '127.0.0.1', port: @port, logger: NULL_LOGGER) do |s|
s.get('/example/hello') { [200, { 'Content-Type' => 'text/plain' }, 'hello get'] }
end
omit "TLS connection should be aborted due to `Errno::ECONNABORTED`. Need to debug." if Fluent.windows?
resp = secure_get("https://127.0.0.1:#{@port}/example/hello", verify: false)
assert_equal('200', resp.code)
assert_equal('hello get', resp.body)
assert_raise OpenSSL::SSL::SSLError do
secure_get("https://127.0.0.1:#{@port}/example/hello")
end
end
end
data(
'with passphrase' => ['apple', 'cert-pass.pem', 'cert-key-pass.pem'],
'without passphrase' => [nil, 'cert.pem', 'cert-key.pem'])
test 'load self-signed cert/key pair, verified from clients using cert files' do |(passphrase, cert, private_key)|
omit "Self signed certificate blocks TLS connection. Need to debug." if Fluent.windows?
cert_path = File.join(CERT_DIR, cert)
private_key_path = File.join(CERT_DIR, private_key)
opt = { 'insecure' => 'false', 'private_key_path' => private_key_path, 'cert_path' => cert_path }
if passphrase
opt['private_key_passphrase'] = passphrase
end
on_driver_transport(opt) do |driver|
driver.http_server_create_https_server(:http_server_helper_test_tls, addr: '127.0.0.1', port: @port, logger: NULL_LOGGER) do |s|
s.get('/example/hello') { [200, { 'Content-Type' => 'text/plain' }, 'hello get'] }
end
resp = secure_get("https://127.0.0.1:#{@port}/example/hello", cert_path: cert_path)
assert_equal('200', resp.code)
assert_equal('hello get', resp.body)
end
end
data(
'with passphrase' => ['apple', 'cert-pass.pem', 'cert-key-pass.pem', 'ca-cert-pass.pem'],
'without passphrase' => [nil, 'cert.pem', 'cert-key.pem', 'ca-cert.pem'])
test 'load cert by private CA cert file, verified from clients using CA cert file' do |(passphrase, cert, cert_key, ca_cert)|
omit "Self signed certificate blocks TLS connection. Need to debug." if Fluent.windows?
cert_path = File.join(CERT_CA_DIR, cert)
private_key_path = File.join(CERT_CA_DIR, cert_key)
ca_cert_path = File.join(CERT_CA_DIR, ca_cert)
opt = { 'insecure' => 'false', 'cert_path' => cert_path, 'private_key_path' => private_key_path }
if passphrase
opt['private_key_passphrase'] = passphrase
end
on_driver_transport(opt) do |driver|
driver.http_server_create_https_server(:http_server_helper_test_tls, addr: '127.0.0.1', port: @port, logger: NULL_LOGGER) do |s|
s.get('/example/hello') { [200, { 'Content-Type' => 'text/plain' }, 'hello get'] }
end
resp = secure_get("https://127.0.0.1:#{@port}/example/hello", cert_path: ca_cert_path)
assert_equal('200', resp.code)
assert_equal('hello get', resp.body)
end
end
end
end
test 'must be called #start and #stop' do
on_driver do |driver|
server = flexmock('Server') do |watcher|
watcher.should_receive(:start).once.and_return do |que|
que.push(:start)
end
watcher.should_receive(:stop).once
end
stub(Fluent::PluginHelper::HttpServer::Server).new(addr: anything, port: anything, logger: anything, default_app: anything) { server }
driver.http_server_create_http_server(:http_server_helper_test, addr: '127.0.0.1', port: @port, logger: NULL_LOGGER) do
# nothing
end
driver.stop
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_socket.rb | test/plugin_helper/test_socket.rb | require_relative '../helper'
require 'fluent/plugin_helper/socket'
require 'fluent/plugin/base'
require 'socket'
require 'openssl'
class SocketHelperTest < Test::Unit::TestCase
CERT_DIR = File.expand_path(File.dirname(__FILE__) + '/data/cert/without_ca')
CA_CERT_DIR = File.expand_path(File.dirname(__FILE__) + '/data/cert/with_ca')
CERT_CHAINS_DIR = File.expand_path(File.dirname(__FILE__) + '/data/cert/cert_chains')
def setup
@port = unused_port(protocol: :tcp)
end
def teardown
@port = nil
end
class SocketHelperTestPlugin < Fluent::Plugin::TestBase
helpers :socket
end
class EchoTLSServer
def initialize(port, host: '127.0.0.1', cert_path: nil, private_key_path: nil, ca_path: nil)
server = TCPServer.open(host, port)
ctx = OpenSSL::SSL::SSLContext.new
ctx.cert = OpenSSL::X509::Certificate.new(File.open(cert_path)) if cert_path
cert_store = OpenSSL::X509::Store.new
cert_store.set_default_paths
cert_store.add_file(ca_path) if ca_path
ctx.cert_store = cert_store
ctx.key = OpenSSL::PKey::RSA.new(File.open(private_key_path)) if private_key_path
ctx.verify_mode = OpenSSL::SSL::VERIFY_PEER
ctx.verify_hostname = false
@server = OpenSSL::SSL::SSLServer.new(server, ctx)
@thread = nil
@r, @w = IO.pipe
end
def start
do_start
if block_given?
begin
yield
@thread.join(5)
ensure
stop
end
end
end
def stop
unless @w.closed?
@w.write('stop')
end
[@server, @w, @r].each do |s|
next if s.closed?
s.close
end
@thread.join(5)
end
private
def do_start
@thread = Thread.new(@server) do |s|
socks, _, _ = IO.select([s.accept, @r], nil, nil)
if socks.include?(@r)
break
end
sock = socks.first
buf = +''
loop do
b = sock.read_nonblock(1024, nil, exception: false)
if b == :wait_readable || b.nil?
break
end
buf << b
end
sock.write(buf)
sock.close
end
end
end
test 'with self-signed cert/key pair' do
cert_path = File.join(CERT_DIR, 'cert.pem')
private_key_path = File.join(CERT_DIR, 'cert-key.pem')
EchoTLSServer.new(@port, cert_path: cert_path, private_key_path: private_key_path).start do
client = SocketHelperTestPlugin.new.socket_create_tls('127.0.0.1', @port, verify_fqdn: false, cert_paths: [cert_path])
client.write('hello')
assert_equal 'hello', client.readpartial(100)
client.close
end
end
test 'with cert/key signed by self-signed CA' do
cert_path = File.join(CA_CERT_DIR, 'cert.pem')
private_key_path = File.join(CA_CERT_DIR, 'cert-key.pem')
ca_cert_path = File.join(CA_CERT_DIR, 'ca-cert.pem')
EchoTLSServer.new(@port, cert_path: cert_path, private_key_path: private_key_path).start do
client = SocketHelperTestPlugin.new.socket_create_tls('127.0.0.1', @port, verify_fqdn: false, cert_paths: [ca_cert_path])
client.write('hello')
assert_equal 'hello', client.readpartial(100)
client.close
end
end
test 'with cert/key signed by self-signed CA in server and client cert chain' do
cert_path = File.join(CERT_DIR, 'cert.pem')
private_key_path = File.join(CERT_DIR, 'cert-key.pem')
client_ca_cert_path = File.join(CERT_CHAINS_DIR, 'ca-cert.pem')
client_cert_path = File.join(CERT_CHAINS_DIR, 'cert.pem')
client_private_key_path = File.join(CERT_CHAINS_DIR, 'cert-key.pem')
EchoTLSServer.new(@port, cert_path: cert_path, private_key_path: private_key_path, ca_path: client_ca_cert_path).start do
client = SocketHelperTestPlugin.new.socket_create_tls('127.0.0.1', @port, verify_fqdn: false, cert_path: client_cert_path, private_key_path: client_private_key_path, cert_paths: [cert_path])
client.write('hello')
assert_equal 'hello', client.readpartial(100)
client.close
end
end
test 'with empty cert file' do
cert_path = File.expand_path(File.dirname(__FILE__) + '/data/cert/empty.pem')
assert_raise Fluent::ConfigError do
SocketHelperTestPlugin.new.socket_create_tls('127.0.0.1', @port, cert_path: cert_path)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_compat_parameters.rb | test/plugin_helper/test_compat_parameters.rb | require_relative '../helper'
require 'fluent/plugin_helper/compat_parameters'
require 'fluent/plugin/input'
require 'fluent/plugin/output'
require 'fluent/time'
require 'time'
class CompatParameterTest < Test::Unit::TestCase
setup do
Fluent::Test.setup
@i = nil
@default_newline = if Fluent.windows?
"\r\n"
else
"\n"
end
end
teardown do
if @i
@i.stop unless @i.stopped?
@i.before_shutdown unless @i.before_shutdown?
@i.shutdown unless @i.shutdown?
@i.after_shutdown unless @i.after_shutdown?
@i.close unless @i.closed?
@i.terminate unless @i.terminated?
end
end
class DummyI0 < Fluent::Plugin::Input
helpers :compat_parameters, :parser, :extract
attr_reader :parser
def configure(conf)
compat_parameters_convert(conf, :extract, :parser)
super
end
def start
super
@parser = parser_create
end
def produce_events(input_data)
emit_events = [] # tag, time, record
@parser.parse(input_data) do |time, record|
tag = extract_tag_from_record(record) || 'dummy_tag'
emit_events << [tag, time, record]
end
emit_events
end
end
class DummyO0 < Fluent::Plugin::Output
helpers :compat_parameters
def configure(conf)
compat_parameters_buffer(conf, default_chunk_key: '')
super
end
def write(chunk); end # dummy
end
class DummyO1 < Fluent::Plugin::Output
helpers :compat_parameters
def configure(conf)
compat_parameters_buffer(conf, default_chunk_key: 'time')
super
end
def write(chunk); end # dummy
end
class DummyO2 < Fluent::Plugin::Output
helpers :compat_parameters
def configure(conf)
compat_parameters_buffer(conf, default_chunk_key: 'time')
super
end
def write(chunk); end # dummy
end
class DummyO3 < Fluent::Plugin::Output
helpers :compat_parameters
def configure(conf)
compat_parameters_buffer(conf, default_chunk_key: 'tag')
super
end
def write(chunk); end # dummy
end
class DummyO4 < Fluent::Plugin::Output
helpers :compat_parameters, :inject, :formatter
attr_reader :f
def configure(conf)
compat_parameters_convert(conf, :buffer, :inject, :formatter, default_chunk_key: 'tag')
super
end
def start
super
@f = formatter_create()
end
def write(chunk); end # dummy
end
sub_test_case 'output plugins which does not have default chunk key' do
test 'plugin helper converts parameters into plugin configuration parameters' do
hash = {
'num_threads' => 8,
'flush_interval' => '10s',
'buffer_chunk_limit' => '8m',
'buffer_queue_limit' => '1024',
'flush_at_shutdown' => 'yes',
}
conf = config_element('ROOT', '', hash)
@i = DummyO0.new
@i.configure(conf)
assert_equal 'memory', @i.buffer_config[:@type]
assert_equal [], @i.buffer_config.chunk_keys
assert_equal 8, @i.buffer_config.flush_thread_count
assert_equal 10, @i.buffer_config.flush_interval
assert_equal :default, @i.buffer_config.flush_mode
assert @i.buffer_config.flush_at_shutdown
assert_equal 8*1024*1024, @i.buffer.chunk_limit_size
assert_equal 1024, @i.buffer.queue_limit_length
end
end
sub_test_case 'output plugins which has default chunk key: time' do
test 'plugin helper converts parameters into plugin configuration parameters' do
hash = {
'buffer_type' => 'file',
'buffer_path' => '/tmp/mybuffer',
'disable_retry_limit' => 'yes',
'max_retry_wait' => '1h',
'buffer_queue_full_action' => 'block',
}
conf = config_element('ROOT', '', hash)
@i = DummyO1.new
@i.configure(conf)
assert_equal 'file', @i.buffer_config[:@type]
assert_equal 24*60*60, @i.buffer_config.timekey
assert @i.buffer_config.retry_forever
assert_equal 60*60, @i.buffer_config.retry_max_interval
assert_equal :block, @i.buffer_config.overflow_action
assert_equal :default, @i.buffer_config.flush_mode
assert !@i.chunk_key_tag
assert_equal [], @i.chunk_keys
assert_equal '/tmp/mybuffer/buffer.*.log', @i.buffer.path
end
end
sub_test_case 'output plugins which does not have default chunk key' do
test 'plugin helper converts parameters into plugin configuration parameters' do
hash = {
'buffer_type' => 'file',
'buffer_path' => '/tmp/mybuffer',
'time_slice_format' => '%Y%m%d%H',
'time_slice_wait' => '10',
'retry_limit' => '1024',
'buffer_queue_full_action' => 'drop_oldest_chunk',
}
conf = config_element('ROOT', '', hash)
@i = DummyO2.new
@i.configure(conf)
assert_equal 'file', @i.buffer_config[:@type]
assert_equal 60*60, @i.buffer_config.timekey
assert_equal 10, @i.buffer_config.timekey_wait
assert_equal 1024, @i.buffer_config.retry_max_times
assert_equal :drop_oldest_chunk, @i.buffer_config.overflow_action
assert @i.chunk_key_time
assert !@i.chunk_key_tag
assert_equal [], @i.chunk_keys
assert_equal '/tmp/mybuffer/buffer.*.log', @i.buffer.path
end
end
sub_test_case 'output plugins which has default chunk key: tag' do
test 'plugin helper converts parameters into plugin configuration parameters' do
hash = {
'buffer_type' => 'memory',
'num_threads' => '10',
'flush_interval' => '10s',
'try_flush_interval' => '0.1',
'queued_chunk_flush_interval' => '0.5',
}
conf = config_element('ROOT', '', hash)
@i = DummyO3.new
@i.configure(conf)
assert_equal 'memory', @i.buffer_config[:@type]
assert_equal 10, @i.buffer_config.flush_thread_count
assert_equal 10, @i.buffer_config.flush_interval
assert_equal 0.1, @i.buffer_config.flush_thread_interval
assert_equal 0.5, @i.buffer_config.flush_thread_burst_interval
assert !@i.chunk_key_time
assert @i.chunk_key_tag
assert_equal [], @i.chunk_keys
end
end
sub_test_case 'output plugins which has default chunk key: tag, and enables inject and formatter' do
test 'plugin helper converts parameters into plugin configuration parameters for all of buffer, inject and formatter' do
hash = {
'buffer_type' => 'file',
'buffer_path' => File.expand_path('../../tmp/compat_parameters/mybuffer.*.log', __FILE__),
'num_threads' => '10',
'format' => 'ltsv',
'delimiter' => ',',
'label_delimiter' => '%',
'include_time_key' => 'true', # default time_key 'time' and default time format (iso8601: 2016-06-24T15:57:38) at localtime
'include_tag_key' => 'yes', # default tag_key 'tag'
}
conf = config_element('ROOT', '', hash)
@i = DummyO4.new
@i.configure(conf)
@i.start
@i.after_start
assert_equal 'file', @i.buffer_config[:@type]
assert_equal 10, @i.buffer_config.flush_thread_count
formatter = @i.f
assert{ formatter.is_a? Fluent::Plugin::LabeledTSVFormatter }
assert_equal ',', @i.f.delimiter
assert_equal '%', @i.f.label_delimiter
assert !@i.chunk_key_time
assert @i.chunk_key_tag
assert_equal [], @i.chunk_keys
t = event_time('2016-06-24 16:05:01') # localtime
iso8601str = Time.at(t.to_i).iso8601
formatted = @i.f.format('tag.test', t, @i.inject_values_to_record('tag.test', t, {"value" => 1}))
assert_equal "value%1,tag%tag.test,time%#{iso8601str}#{@default_newline}", formatted
end
test 'plugin helper setups time injecting as unix time (integer from epoch)' do
hash = {
'buffer_type' => 'file',
'buffer_path' => File.expand_path('../../tmp/compat_parameters/mybuffer.*.log', __FILE__),
'num_threads' => '10',
'format' => 'ltsv',
'delimiter' => ',',
'label_delimiter' => '%',
'include_time_key' => 'true', # default time_key 'time' and default time format (iso8601: 2016-06-24T15:57:38) at localtime
'include_tag_key' => 'yes', # default tag_key 'tag'
}
conf = config_element('ROOT', '', hash)
@i = DummyO4.new
@i.configure(conf)
@i.start
@i.after_start
assert_equal 'file', @i.buffer_config[:@type]
assert_equal 10, @i.buffer_config.flush_thread_count
formatter = @i.f
assert{ formatter.is_a? Fluent::Plugin::LabeledTSVFormatter }
assert_equal ',', @i.f.delimiter
assert_equal '%', @i.f.label_delimiter
assert !@i.chunk_key_time
assert @i.chunk_key_tag
assert_equal [], @i.chunk_keys
t = event_time('2016-06-24 16:05:01') # localtime
iso8601str = Time.at(t.to_i).iso8601
formatted = @i.f.format('tag.test', t, @i.inject_values_to_record('tag.test', t, {"value" => 1}))
assert_equal "value%1,tag%tag.test,time%#{iso8601str}#{@default_newline}", formatted
end
end
sub_test_case 'input plugins' do
test 'plugin helper converts parameters into plugin configuration parameters for extract and parser' do
hash = {
'format' => 'ltsv',
'delimiter' => ',',
'label_delimiter' => '%',
'tag_key' => 't2',
'time_key' => 't',
'time_format' => '%Y-%m-%d.%H:%M:%S.%N',
'utc' => 'yes',
'types' => 'A integer|B string|C bool',
'types_delimiter' => '|',
'types_label_delimiter' => ' ',
}
conf = config_element('ROOT', '', hash)
@i = DummyI0.new
@i.configure(conf)
@i.start
@i.after_start
parser = @i.parser
assert{ parser.is_a? Fluent::Plugin::LabeledTSVParser }
assert_equal ',', parser.delimiter
assert_equal '%', parser.label_delimiter
events = @i.produce_events("A%1,B%x,C%true,t2%mytag,t%2016-10-20.03:50:11.987654321")
assert_equal 1, events.size
tag, time, record = events.first
assert_equal 'mytag', tag
assert_equal_event_time event_time("2016-10-20 03:50:11.987654321 +0000"), time
assert_equal 3, record.keys.size
assert_equal ['A','B','C'], record.keys.sort
assert_equal 1, record['A']
assert_equal 'x', record['B']
assert_equal true, record['C']
end
test 'plugin helper converts parameters into plugin configuration parameters for extract and parser, using numeric time' do
hash = {
'format' => 'ltsv',
'delimiter' => ',',
'label_delimiter' => '%',
'tag_key' => 't2',
'time_key' => 't',
'time_type' => 'float',
'localtime' => 'yes',
}
conf = config_element('ROOT', '', hash)
@i = DummyI0.new
@i.configure(conf)
@i.start
@i.after_start
parser = @i.parser
assert{ parser.is_a? Fluent::Plugin::LabeledTSVParser }
assert_equal ',', parser.delimiter
assert_equal '%', parser.label_delimiter
end
test 'plugin helper setups time extraction as unix time (integer from epoch)' do
# TODO:
end
end
sub_test_case 'parser plugins' do
test 'syslog parser parameters' do
hash = {
'format' => 'syslog',
'message_format' => 'rfc5424',
'with_priority' => 'true',
'rfc5424_time_format' => '%Y'
}
conf = config_element('ROOT', '', hash)
@i = DummyI0.new
@i.configure(conf)
@i.start
@i.after_start
parser = @i.parser
assert_kind_of(Fluent::Plugin::SyslogParser, parser)
assert_equal :rfc5424, parser.message_format
assert_equal true, parser.with_priority
assert_equal '%Y', parser.rfc5424_time_format
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/test_inject.rb | test/plugin_helper/test_inject.rb | require_relative '../helper'
require 'fluent/plugin_helper/inject'
require 'fluent/plugin/output'
require 'fluent/event'
require 'time'
class InjectHelperTest < Test::Unit::TestCase
class Dummy < Fluent::Plugin::TestBase
helpers :inject
end
class Dummy2 < Fluent::Plugin::TestBase
helpers :inject
config_section :inject do
config_set_default :hostname_key, 'host'
end
end
class Dummy3 < Fluent::Plugin::Output
helpers :inject
def write(chunk)
# dummy
end
end
def config_inject_section(hash = {})
config_element('ROOT', '', {}, [config_element('inject', '', hash)])
end
setup do
Fluent::Test.setup
@d = Dummy.new
end
teardown do
if @d
@d.stop unless @d.stopped?
@d.before_shutdown unless @d.before_shutdown?
@d.shutdown unless @d.shutdown?
@d.after_shutdown unless @d.after_shutdown?
@d.close unless @d.closed?
@d.terminate unless @d.terminated?
end
end
test 'can override default parameters, but not overwrite whole definition' do
d = Dummy.new
d.configure(config_element())
assert_nil d.inject_config
d = Dummy2.new
d.configure(config_element('ROOT', '', {}, [config_element('inject')]))
assert d.inject_config
assert_equal 'host', d.inject_config.hostname_key
end
test 'do nothing in default' do
@d.configure(config_inject_section())
@d.start
assert_nil @d.instance_eval{ @_inject_hostname_key }
assert_nil @d.instance_eval{ @_inject_hostname }
assert_nil @d.instance_eval{ @_inject_worker_id_key }
assert_nil @d.instance_eval{ @_inject_worker_id }
assert_nil @d.instance_eval{ @_inject_tag_key }
assert_nil @d.instance_eval{ @_inject_time_key }
assert_nil @d.instance_eval{ @_inject_time_formatter }
time = event_time()
record = {"key1" => "value1", "key2" => 2}
assert_equal record, @d.inject_values_to_record('tag', time, record)
assert_equal record.object_id, @d.inject_values_to_record('tag', time, record).object_id
es0 = Fluent::OneEventStream.new(time, {"key1" => "v", "key2" => 0})
es1 = Fluent::ArrayEventStream.new([ [time, {"key1" => "a", "key2" => 1}], [time, {"key1" => "b", "key2" => 2}] ])
es2 = Fluent::MultiEventStream.new
es2.add(event_time(), {"key1" => "a", "key2" => 1})
es2.add(event_time(), {"key1" => "b", "key2" => 2})
es3 = Fluent::MessagePackEventStream.new(es2.to_msgpack_stream)
[es0, es1, es2, es3].each do |es|
assert_equal es, @d.inject_values_to_event_stream('tag', es), "failed for #{es.class}"
assert_equal es.object_id, @d.inject_values_to_event_stream('tag', es).object_id, "failed for #{es.class}"
end
end
test 'can be configured as specified' do
with_worker_config(workers: 1, worker_id: 0) do
@d.configure(config_inject_section(
"hostname_key" => "hostname",
"hostname" => "myhost.local",
"worker_id_key" => "worker_id",
"tag_key" => "tag",
"time_key" => "time",
"time_type" => "string",
"time_format" => "%Y-%m-%d %H:%M:%S.%N",
"timezone" => "-0700",
))
end
assert_equal "hostname", @d.instance_eval{ @_inject_hostname_key }
assert_equal "myhost.local", @d.instance_eval{ @_inject_hostname }
assert_equal "worker_id", @d.instance_eval{ @_inject_worker_id_key }
assert_equal 0, @d.instance_eval{ @_inject_worker_id }
assert_equal "tag", @d.instance_eval{ @_inject_tag_key }
assert_equal "time", @d.instance_eval{ @_inject_time_key }
assert_equal :string, @d.instance_eval{ @inject_config.time_type }
assert_not_nil @d.instance_eval{ @_inject_time_formatter }
end
test 'raise an error when injected hostname is used in buffer chunk key too' do
@d = Dummy3.new
conf = config_element('ROOT', '', {}, [
config_element('inject', '', {'hostname_key' => 'h'}),
config_element('buffer', 'tag,h'),
])
assert_raise Fluent::ConfigError.new("the key specified by 'hostname_key' in <inject> cannot be used in buffering chunk key.") do
@d.configure(conf)
end
end
sub_test_case 'using inject_values_to_record' do
test 'injects hostname automatically detected' do
detected_hostname = `hostname`.chomp
@d.configure(config_inject_section("hostname_key" => "host"))
logs = @d.log.out.logs
assert{ logs.any?{|l| l.include?("[info]: using hostname for specified field host_key=\"host\" host_name=\"#{detected_hostname}\"") } }
@d.start
time = event_time()
record = {"key1" => "value1", "key2" => 2}
assert_equal record.merge({"host" => detected_hostname}), @d.inject_values_to_record('tag', time, record)
end
test 'injects hostname as specified value' do
@d.configure(config_inject_section("hostname_key" => "host", "hostname" => "myhost.yay.local"))
@d.start
time = event_time()
record = {"key1" => "value1", "key2" => 2}
assert_equal record.merge({"host" => "myhost.yay.local"}), @d.inject_values_to_record('tag', time, record)
end
test 'injects worker id' do
with_worker_config(workers: 3, worker_id: 2) do
@d.configure(config_inject_section("worker_id_key" => "workerid"))
end
@d.start
time = event_time()
record = {"key1" => "value1", "key2" => 2}
assert_equal record.merge({"workerid" => 2}), @d.inject_values_to_record('tag', time, record)
end
test 'injects tag into specified key' do
@d.configure(config_inject_section("tag_key" => "mytag"))
@d.start
time = event_time()
record = {"key1" => "value1", "key2" => 2}
assert_equal record.merge({"mytag" => "tag.test"}), @d.inject_values_to_record('tag.test', time, record)
end
test 'injects time as floating point value into specified key as default' do
time_in_unix = Time.parse("2016-06-21 08:10:11 +0900").to_i # 1466464211 in unix time
time_subsecond = 320_101_224
time = Fluent::EventTime.new(time_in_unix, time_subsecond)
float_time = 1466464211.320101 # microsecond precision in float
@d.configure(config_inject_section("time_key" => "timedata"))
@d.start
record = {"key1" => "value1", "key2" => 2}
assert_equal record.merge({"timedata" => float_time}), @d.inject_values_to_record('tag', time, record)
end
test 'injects time as unix time millis into specified key' do
time_in_unix = Time.parse("2016-06-21 08:10:11 +0900").to_i
time_subsecond = 320_101_224
time = Fluent::EventTime.new(time_in_unix, time_subsecond)
unixtime_millis = 1466464211320
@d.configure(config_inject_section("time_key" => "timedata", "time_type" => "unixtime_millis"))
@d.start
record = {"key1" => "value1", "key2" => 2}
assert_equal record.merge({"timedata" => unixtime_millis}), @d.inject_values_to_record('tag', time, record)
assert_equal record.merge({"timedata" => time_in_unix * 1_000}), @d.inject_values_to_record('tag', time_in_unix, record)
end
test 'injects time as unix time micros into specified key' do
time_in_unix = Time.parse("2016-06-21 08:10:11 +0900").to_i
time_subsecond = 320_101_224
time = Fluent::EventTime.new(time_in_unix, time_subsecond)
unixtime_micros = 1466464211320101
@d.configure(config_inject_section("time_key" => "timedata", "time_type" => "unixtime_micros"))
@d.start
record = {"key1" => "value1", "key2" => 2}
assert_equal record.merge({"timedata" => unixtime_micros}), @d.inject_values_to_record('tag', time, record)
assert_equal record.merge({"timedata" => time_in_unix * 1_000_000}), @d.inject_values_to_record('tag', time_in_unix, record)
end
test 'injects time as unix time nanos into specified key' do
time_in_unix = Time.parse("2016-06-21 08:10:11 +0900").to_i
time_subsecond = 320_101_224
time = Fluent::EventTime.new(time_in_unix, time_subsecond)
unixtime_nanos = 1466464211320101224
@d.configure(config_inject_section("time_key" => "timedata", "time_type" => "unixtime_nanos"))
@d.start
record = {"key1" => "value1", "key2" => 2}
assert_equal record.merge({"timedata" => unixtime_nanos}), @d.inject_values_to_record('tag', time, record)
assert_equal record.merge({"timedata" => time_in_unix * 1_000_000_000}), @d.inject_values_to_record('tag', time_in_unix, record)
end
test 'injects time as unix time into specified key' do
time_in_unix = Time.parse("2016-06-21 08:10:11 +0900").to_i
time_subsecond = 320_101_224
time = Fluent::EventTime.new(time_in_unix, time_subsecond)
int_time = 1466464211
@d.configure(config_inject_section("time_key" => "timedata", "time_type" => "unixtime"))
@d.start
record = {"key1" => "value1", "key2" => 2}
assert_equal record.merge({"timedata" => int_time}), @d.inject_values_to_record('tag', time, record)
end
test 'injects time as formatted string in localtime if timezone not specified' do
local_timezone = Time.now.strftime('%z')
time_in_unix = Time.parse("2016-06-21 08:10:11 #{local_timezone}").to_i
time_subsecond = 320_101_224
time = Fluent::EventTime.new(time_in_unix, time_subsecond)
@d.configure(config_inject_section("time_key" => "timedata", "time_type" => "string", "time_format" => "%Y_%m_%d %H:%M:%S %z"))
@d.start
record = {"key1" => "value1", "key2" => 2}
assert_equal record.merge({"timedata" => "2016_06_21 08:10:11 #{local_timezone}"}), @d.inject_values_to_record('tag', time, record)
end
test 'injects time as formatted string with nanosecond in localtime if timezone not specified' do
local_timezone = Time.now.strftime('%z')
time_in_unix = Time.parse("2016-06-21 08:10:11 #{local_timezone}").to_i
time_subsecond = 320_101_224
time = Fluent::EventTime.new(time_in_unix, time_subsecond)
@d.configure(config_inject_section("time_key" => "timedata", "time_type" => "string", "time_format" => "%Y_%m_%d %H:%M:%S.%N %z"))
@d.start
record = {"key1" => "value1", "key2" => 2}
assert_equal record.merge({"timedata" => "2016_06_21 08:10:11.320101224 #{local_timezone}"}), @d.inject_values_to_record('tag', time, record)
end
test 'injects time as formatted string with millisecond in localtime if timezone not specified' do
local_timezone = Time.now.strftime('%z')
time_in_unix = Time.parse("2016-06-21 08:10:11 #{local_timezone}").to_i
time_subsecond = 320_101_224
time = Fluent::EventTime.new(time_in_unix, time_subsecond)
@d.configure(config_inject_section("time_key" => "timedata", "time_type" => "string", "time_format" => "%Y_%m_%d %H:%M:%S.%3N %z"))
@d.start
record = {"key1" => "value1", "key2" => 2}
assert_equal record.merge({"timedata" => "2016_06_21 08:10:11.320 #{local_timezone}"}), @d.inject_values_to_record('tag', time, record)
end
test 'injects time as formatted string in specified timezone' do
time_in_unix = Time.parse("2016-06-21 08:10:11 +0000").to_i
time_subsecond = 320_101_224
time = Fluent::EventTime.new(time_in_unix, time_subsecond)
@d.configure(config_inject_section("time_key" => "timedata", "time_type" => "string", "time_format" => "%Y_%m_%d %H:%M:%S %z", "timezone" => "-0800"))
@d.start
record = {"key1" => "value1", "key2" => 2}
assert_equal record.merge({"timedata" => "2016_06_21 00:10:11 -0800"}), @d.inject_values_to_record('tag', time, record)
end
test 'injects hostname, tag and time' do
time_in_unix = Time.parse("2016-06-21 08:10:11 +0900").to_i
time_subsecond = 320_101_224
time = Fluent::EventTime.new(time_in_unix, time_subsecond)
@d.configure(config_inject_section(
"hostname_key" => "hostnamedata",
"hostname" => "myname.local",
"tag_key" => "tagdata",
"time_key" => "timedata",
"time_type" => "string",
"time_format" => "%Y_%m_%d %H:%M:%S.%N %z",
"timezone" => "+0000",
))
@d.start
record = {"key1" => "value1", "key2" => 2}
injected = {"hostnamedata" => "myname.local", "tagdata" => "tag", "timedata" => "2016_06_20 23:10:11.320101224 +0000"}
assert_equal record.merge(injected), @d.inject_values_to_record('tag', time, record)
end
end
sub_test_case 'using inject_values_to_event_stream' do
local_timezone = Time.now.strftime('%z')
time_in_unix = Time.parse("2016-06-21 08:10:11 #{local_timezone}").to_i
time_subsecond = 320_101_224
time_in_rational = Rational(time_in_unix * 1_000_000_000 + time_subsecond, 1_000_000_000)
time_in_localtime = Time.at(time_in_rational).localtime
time_in_utc = Time.at(time_in_rational).utc
time = Fluent::EventTime.new(time_in_unix, time_subsecond)
time_float = time.to_r.truncate(+6).to_f
data(
"OneEventStream" => Fluent::OneEventStream.new(time, {"key1" => "value1", "key2" => 0}),
"ArrayEventStream" => Fluent::ArrayEventStream.new([ [time, {"key1" => "value1", "key2" => 1}], [time, {"key1" => "value2", "key2" => 2}] ]),
)
test 'injects hostname automatically detected' do |data|
detected_hostname = `hostname`.chomp
@d.configure(config_inject_section("hostname_key" => "host"))
logs = @d.log.out.logs
assert{ logs.any?{|l| l.include?("[info]: using hostname for specified field host_key=\"host\" host_name=\"#{detected_hostname}\"") } }
@d.start
injected = {"host" => detected_hostname}
expected_es = Fluent::MultiEventStream.new
data.each do |t, r|
expected_es.add(t, r.merge(injected))
end
assert_equal expected_es, @d.inject_values_to_event_stream('tag', data)
end
data(
"OneEventStream" => Fluent::OneEventStream.new(time, {"key1" => "value1", "key2" => 0}),
"ArrayEventStream" => Fluent::ArrayEventStream.new([ [time, {"key1" => "value1", "key2" => 1}], [time, {"key1" => "value2", "key2" => 2}] ]),
)
test 'injects hostname as specified value' do |data|
@d.configure(config_inject_section("hostname_key" => "host", "hostname" => "myhost.yay.local"))
@d.start
injected = {"host" => "myhost.yay.local"}
expected_es = Fluent::MultiEventStream.new
data.each do |t, r|
expected_es.add(t, r.merge(injected))
end
assert_equal expected_es, @d.inject_values_to_event_stream('tag', data)
end
data(
"OneEventStream" => Fluent::OneEventStream.new(time, {"key1" => "value1", "key2" => 0}),
"ArrayEventStream" => Fluent::ArrayEventStream.new([ [time, {"key1" => "value1", "key2" => 1}], [time, {"key1" => "value2", "key2" => 2}] ]),
)
test 'injects tag into specified key' do |data|
@d.configure(config_inject_section("tag_key" => "mytag"))
@d.start
injected = {"mytag" => "tag"}
expected_es = Fluent::MultiEventStream.new
data.each do |t, r|
expected_es.add(t, r.merge(injected))
end
assert_equal expected_es, @d.inject_values_to_event_stream('tag', data)
end
data(
"OneEventStream" => Fluent::OneEventStream.new(time, {"key1" => "value1", "key2" => 0}),
"ArrayEventStream" => Fluent::ArrayEventStream.new([ [time, {"key1" => "value1", "key2" => 1}], [time, {"key1" => "value2", "key2" => 2}] ]),
)
test 'injects time as floating point value into specified key as default' do |data|
@d.configure(config_inject_section("time_key" => "timedata"))
@d.start
injected = {"timedata" => time_float }
expected_es = Fluent::MultiEventStream.new
data.each do |t, r|
expected_es.add(t, r.merge(injected))
end
assert_equal expected_es, @d.inject_values_to_event_stream('tag', data)
end
data(
"OneEventStream" => Fluent::OneEventStream.new(time, {"key1" => "value1", "key2" => 0}),
"ArrayEventStream" => Fluent::ArrayEventStream.new([ [time, {"key1" => "value1", "key2" => 1}], [time, {"key1" => "value2", "key2" => 2}] ]),
)
test 'injects time as unix time into specified key' do |data|
@d.configure(config_inject_section("time_key" => "timedata", "time_type" => "unixtime"))
@d.start
injected = {"timedata" => time_in_localtime.to_i}
expected_es = Fluent::MultiEventStream.new
data.each do |t, r|
expected_es.add(t, r.merge(injected))
end
assert_equal expected_es, @d.inject_values_to_event_stream('tag', data)
end
data(
"OneEventStream" => Fluent::OneEventStream.new(time, {"key1" => "value1", "key2" => 0}),
"ArrayEventStream" => Fluent::ArrayEventStream.new([ [time, {"key1" => "value1", "key2" => 1}], [time, {"key1" => "value2", "key2" => 2}] ]),
)
test 'injects time as formatted string in localtime if timezone not specified' do |data|
@d.configure(config_inject_section("time_key" => "timedata", "time_type" => "string", "time_format" => "%Y_%m_%d %H:%M:%S %z"))
@d.start
injected = {"timedata" => time_in_localtime.strftime("%Y_%m_%d %H:%M:%S %z")}
expected_es = Fluent::MultiEventStream.new
data.each do |t, r|
expected_es.add(t, r.merge(injected))
end
assert_equal expected_es, @d.inject_values_to_event_stream('tag', data)
end
data(
"OneEventStream" => Fluent::OneEventStream.new(time, {"key1" => "value1", "key2" => 0}),
"ArrayEventStream" => Fluent::ArrayEventStream.new([ [time, {"key1" => "value1", "key2" => 1}], [time, {"key1" => "value2", "key2" => 2}] ]),
)
test 'injects time as formatted string with nanosecond in localtime if timezone not specified' do |data|
@d.configure(config_inject_section("time_key" => "timedata", "time_type" => "string", "time_format" => "%Y_%m_%d %H:%M:%S.%N %z"))
@d.start
injected = {"timedata" => time_in_localtime.strftime("%Y_%m_%d %H:%M:%S.%N %z")}
expected_es = Fluent::MultiEventStream.new
data.each do |t, r|
expected_es.add(t, r.merge(injected))
end
assert_equal expected_es, @d.inject_values_to_event_stream('tag', data)
end
data(
"OneEventStream" => Fluent::OneEventStream.new(time, {"key1" => "value1", "key2" => 0}),
"ArrayEventStream" => Fluent::ArrayEventStream.new([ [time, {"key1" => "value1", "key2" => 1}], [time, {"key1" => "value2", "key2" => 2}] ]),
)
test 'injects time as formatted string with millisecond in localtime if timezone not specified' do |data|
@d.configure(config_inject_section("time_key" => "timedata", "time_type" => "string", "time_format" => "%Y_%m_%d %H:%M:%S.%3N %z"))
@d.start
injected = {"timedata" => time_in_localtime.strftime("%Y_%m_%d %H:%M:%S.%3N %z")}
expected_es = Fluent::MultiEventStream.new
data.each do |t, r|
expected_es.add(t, r.merge(injected))
end
assert_equal expected_es, @d.inject_values_to_event_stream('tag', data)
end
data(
"OneEventStream" => Fluent::OneEventStream.new(time, {"key1" => "value1", "key2" => 0}),
"ArrayEventStream" => Fluent::ArrayEventStream.new([ [time, {"key1" => "value1", "key2" => 1}], [time, {"key1" => "value2", "key2" => 2}] ]),
)
test 'injects time as formatted string in specified timezone' do |data|
@d.configure(config_inject_section("time_key" => "timedata", "time_type" => "string", "time_format" => "%Y_%m_%d %H:%M:%S %z", "timezone" => "-0800"))
@d.start
injected = {"timedata" => Time.at(time_in_unix).localtime("-08:00").strftime("%Y_%m_%d %H:%M:%S -0800")}
expected_es = Fluent::MultiEventStream.new
data.each do |t, r|
expected_es.add(t, r.merge(injected))
end
assert_equal expected_es, @d.inject_values_to_event_stream('tag', data)
end
data(
"OneEventStream" => Fluent::OneEventStream.new(time, {"key1" => "value1", "key2" => 0}),
"ArrayEventStream" => Fluent::ArrayEventStream.new([ [time, {"key1" => "value1", "key2" => 1}], [time, {"key1" => "value2", "key2" => 2}] ]),
)
test 'injects hostname, tag and time' do |data|
@d.configure(config_inject_section(
"hostname_key" => "hostnamedata",
"hostname" => "myname.local",
"tag_key" => "tagdata",
"time_key" => "timedata",
"time_type" => "string",
"time_format" => "%Y_%m_%d %H:%M:%S.%N %z",
"timezone" => "+0000",
))
@d.start
injected = {"hostnamedata" => "myname.local", "tagdata" => "tag", "timedata" => time_in_utc.strftime("%Y_%m_%d %H:%M:%S.%N %z")}
expected_es = Fluent::MultiEventStream.new
data.each do |t, r|
expected_es.add(t, r.merge(injected))
end
assert_equal expected_es, @d.inject_values_to_event_stream('tag', data)
end
end
sub_test_case 'time formatting with modified timezone' do
setup do
@time = event_time("2014-09-27 00:00:00 +00:00").to_i
end
def format(conf)
@d.configure(config_inject_section(
"hostname_key" => "hostnamedata",
"hostname" => "myname.local",
"tag_key" => "tagdata",
"time_key" => "timedata",
"time_type" => "string",
"time_format" => "%Y_%m_%d %H:%M:%S.%N %z",
"timezone" => "+0000",
))
@d.start
record = {"key1" => "value1", "key2" => 2}
injected = {"hostnamedata" => "myname.local", "tagdata" => "tag", "timedata" => "2016_06_20 23:10:11.320101224 +0000"}
assert_equal record.merge(injected), @d.inject_values_to_record('tag', time, record)
d = create_driver({'include_time_key' => true}.merge(conf))
formatted = d.instance.format("tag", @time, {})
# Drop the leading "time:" and the trailing "\n".
formatted[5..-2]
end
def test_nothing_specified_about_time_formatting
with_timezone("UTC-01") do
# 'localtime' is true by default.
@d.configure(config_inject_section("time_key" => "t", "time_type" => "string"))
@d.start
record = @d.inject_values_to_record('tag', @time, {"message" => "yay"})
assert_equal("2014-09-27T01:00:00+01:00", record['t'])
end
end
def test_utc
with_timezone("UTC-01") do
# 'utc' takes precedence over 'localtime'.
@d.configure(config_inject_section("time_key" => "t", "time_type" => "string", "utc" => "true"))
@d.start
record = @d.inject_values_to_record('tag', @time, {"message" => "yay"})
assert_equal("2014-09-27T00:00:00Z", record['t'])
end
end
def test_timezone
with_timezone("UTC-01") do
# 'timezone' takes precedence over 'localtime'.
@d.configure(config_inject_section("time_key" => "t", "time_type" => "string", "timezone" => "+02"))
@d.start
record = @d.inject_values_to_record('tag', @time, {"message" => "yay"})
assert_equal("2014-09-27T02:00:00+02:00", record['t'])
end
end
def test_utc_timezone
with_timezone("UTC-01") do
# 'timezone' takes precedence over 'utc'.
@d.configure(config_inject_section("time_key" => "t", "time_type" => "string", "timezone" => "Asia/Tokyo", "utc" => "true"))
@d.start
record = @d.inject_values_to_record('tag', @time, {"message" => "yay"})
assert_equal("2014-09-27T09:00:00+09:00", record['t'])
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/http_server/test_route.rb | test/plugin_helper/http_server/test_route.rb | require_relative '../../helper'
require 'flexmock/test_unit'
begin
require 'fluent/plugin_helper/http_server/router'
skip = false
rescue LoadError => _
skip = true
end
unless skip
class HttpHelperRouterTest < Test::Unit::TestCase
sub_test_case '#mount' do
test 'mount with method and path' do
router = Fluent::PluginHelper::HttpServer::Router.new
router.mount(:get, '/path/', ->(req) { req })
assert_equal(router.route!(:get, '/path/', 'request'), 'request')
end
test 'use default app if path is not found' do
router = Fluent::PluginHelper::HttpServer::Router.new
req = flexmock('request', path: 'path/')
assert_equal(router.route!(:get, '/path/', req), [404, { 'Content-Type' => 'text/plain' }, "404 Not Found\n"])
end
test 'default app is configurable' do
router = Fluent::PluginHelper::HttpServer::Router.new(->(req) { req })
assert_equal(router.route!(:get, '/path/', 'hello'), 'hello')
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/http_server/test_app.rb | test/plugin_helper/http_server/test_app.rb | require_relative '../../helper'
require 'flexmock/test_unit'
begin
require 'fluent/plugin_helper/http_server/app'
skip = false
rescue LoadError => _
skip = true
end
unless skip
class HttpHelperAppTest < Test::Unit::TestCase
NULL_LOGGER = Logger.new(nil)
class DummyRouter
def initialize(table = {})
@table = table
end
def route!(method, path, _req)
r = @table.fetch(method).fetch(path)
[200, {}, r]
end
end
sub_test_case '#call' do
data(
'GET request' => 'GET',
'POST request' => 'POST',
'DELETE request' => 'DELETE',
'PUT request' => 'PUT',
'PATCH request' => 'PATCH',
'OPTION request' => 'OPTIONS',
'CONNECT request' => 'CONNECT',
'TRACE request' => 'TRACE',
)
test 'dispatch correct path' do |method|
r = DummyRouter.new(method.downcase.to_sym => { '/path/' => 'hi' })
app = Fluent::PluginHelper::HttpServer::App.new(r, NULL_LOGGER)
m = flexmock('request', method: method, path: '/path/')
r = app.call(m)
assert_equal(r.body.read, 'hi')
assert_equal(r.status, 200)
end
test 'dispatch correct path for head' do |method|
r = DummyRouter.new(head: { '/path/' => 'hi' })
app = Fluent::PluginHelper::HttpServer::App.new(r, NULL_LOGGER)
m = flexmock('request', method: method, path: '/path')
r = app.call(m)
assert_equal(r.body.read, '')
assert_equal(r.status, 200)
end
test 'if path does not end with `/`' do |method|
r = DummyRouter.new(head: { '/path/' => 'hi' })
app = Fluent::PluginHelper::HttpServer::App.new(r, NULL_LOGGER)
m = flexmock('request', method: method, path: '/path')
r = app.call(m)
assert_equal(r.body.read, '')
assert_equal(r.status, 200)
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/http_server/test_request.rb | test/plugin_helper/http_server/test_request.rb | require_relative '../../helper'
require 'flexmock/test_unit'
require 'fluent/plugin_helper/http_server/request'
class HttpHelperRequestTest < Test::Unit::TestCase
def test_request
headers = Protocol::HTTP::Headers.new({ 'Content-Type' => 'text/html', 'Content-Encoding' => 'gzip' })
req = flexmock('request', path: '/path?foo=42', headers: headers)
request = Fluent::PluginHelper::HttpServer::Request.new(req)
assert_equal('/path', request.path)
assert_equal('foo=42', request.query_string)
assert_equal({'foo'=>['42']}, request.query)
assert_equal('text/html', request.headers['content-type'])
assert_equal(['gzip'], request.headers['content-encoding'])
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/data/cert/generate_cert.rb | test/plugin_helper/data/cert/generate_cert.rb | require 'fluent/plugin_helper/cert_option'
require 'fileutils'
module CertUtil
extend Fluent::PluginHelper::CertOption
end
WITHOUT_CA_DIR = './without_ca'.freeze
WITH_CA_DIR = './with_ca'.freeze
WITH_CERT_CHAIN_DIR = './cert_chains'.freeze
CA_OPTION = {
private_key_length: 2048,
country: 'US',
state: 'CA',
locality: 'Mountain View',
common_name: 'ca.testing.fluentd.org',
expiration: 30 * 86400 * 12 * 100,
digest: :sha256,
}
SERVER_OPTION = {
private_key_length: 2048,
country: 'US',
state: 'CA',
locality: 'Mountain View',
common_name: 'server.testing.fluentd.org',
expiration: 30 * 86400 * 12 * 100,
digest: :sha256,
}
def write_cert_and_key(cert_path, cert, key_path, key, passphrase)
File.open(cert_path, 'w') { |f| f.write(cert.to_pem) }
# Write the secret key (raw or encrypted by AES256) in PEM format
key_str = passphrase ? key.export(OpenSSL::Cipher.new('AES-256-CBC'), passphrase) : key.export
File.open(key_path, 'w') { |f| f.write(key_str) }
File.chmod(0o600, cert_path, key_path)
end
def create_server_pair_signed_by_self(cert_path, private_key_path, passphrase)
cert, key, _ = CertUtil.cert_option_generate_server_pair_self_signed(SERVER_OPTION)
write_cert_and_key(cert_path, cert, private_key_path, key, passphrase)
cert
end
def create_ca_pair_signed_by_self(cert_path, private_key_path, passphrase)
cert, key, _ = CertUtil.cert_option_generate_ca_pair_self_signed(CA_OPTION)
write_cert_and_key(cert_path, cert, private_key_path, key, passphrase)
cert
end
def create_server_pair_signed_by_ca(ca_cert_path, ca_key_path, ca_key_passphrase, cert_path, private_key_path, passphrase)
cert, key, _ = CertUtil.cert_option_generate_server_pair_by_ca(ca_cert_path, ca_key_path, ca_key_passphrase, SERVER_OPTION)
write_cert_and_key(cert_path, cert, private_key_path, key, passphrase)
cert
end
def create_without_ca
FileUtils.mkdir_p(WITHOUT_CA_DIR)
cert_path = File.join(WITHOUT_CA_DIR, 'cert.pem')
cert_key_path = File.join(WITHOUT_CA_DIR, 'cert-key.pem')
cert_pass_path = File.join(WITHOUT_CA_DIR, 'cert-pass.pem')
cert_key_pass_path = File.join(WITHOUT_CA_DIR, 'cert-key-pass.pem')
create_server_pair_signed_by_self(cert_path, cert_key_path, nil)
create_server_pair_signed_by_self(cert_pass_path, cert_key_pass_path, 'apple') # with passphrase
end
def create_with_ca
FileUtils.mkdir_p(WITH_CA_DIR)
cert_path = File.join(WITH_CA_DIR, 'cert.pem')
cert_key_path = File.join(WITH_CA_DIR, 'cert-key.pem')
ca_cert_path = File.join(WITH_CA_DIR, 'ca-cert.pem')
ca_key_path = File.join(WITH_CA_DIR, 'ca-cert-key.pem')
create_ca_pair_signed_by_self(ca_cert_path, ca_key_path, nil)
create_server_pair_signed_by_ca(ca_cert_path, ca_key_path, nil, cert_path, cert_key_path, nil)
cert_pass_path = File.join(WITH_CA_DIR, 'cert-pass.pem')
cert_key_pass_path = File.join(WITH_CA_DIR, 'cert-key-pass.pem')
ca_cert_pass_path = File.join(WITH_CA_DIR, 'ca-cert-pass.pem')
ca_key_pass_path = File.join(WITH_CA_DIR, 'ca-cert-key-pass.pem')
create_ca_pair_signed_by_self(ca_cert_pass_path, ca_key_pass_path, 'orange') # with passphrase
create_server_pair_signed_by_ca(ca_cert_pass_path, ca_key_pass_path, 'orange', cert_pass_path, cert_key_pass_path, 'apple')
end
def create_cert_pair_chained_with_root_ca(ca_cert_path, ca_key_path, ca_key_passphrase, cert_path, private_key_path, passphrase)
root_cert, root_key, _ = CertUtil.cert_option_generate_ca_pair_self_signed(CA_OPTION)
write_cert_and_key(ca_cert_path, root_cert, ca_key_path, root_key, ca_key_passphrase)
intermediate_ca_options = CA_OPTION.dup
intermediate_ca_options[:common_name] = 'ca2.testing.fluentd.org'
chain_cert, chain_key = CertUtil.cert_option_generate_pair(intermediate_ca_options, root_cert.subject)
chain_cert.add_extension(OpenSSL::X509::Extension.new('basicConstraints', OpenSSL::ASN1.Sequence([OpenSSL::ASN1::Boolean(true)])))
chain_cert.sign(root_key, 'sha256')
cert, server_key, _ = CertUtil.cert_option_generate_pair(SERVER_OPTION, chain_cert.subject)
cert.add_extension OpenSSL::X509::Extension.new('basicConstraints', OpenSSL::ASN1.Sequence([OpenSSL::ASN1::Boolean(false)]))
cert.sign(chain_key, 'sha256')
# write chained cert
File.open(cert_path, 'w') do |f|
f.write(cert.to_pem)
f.write(chain_cert.to_pem)
end
key_str = passphrase ? server_key.export(OpenSSL::Cipher.new("AES-256-CBC"), passphrase) : server_key.export
File.open(private_key_path, "w") { |f| f.write(key_str) }
File.chmod(0600, cert_path, private_key_path)
end
def create_cert_chain
FileUtils.mkdir_p(WITH_CERT_CHAIN_DIR)
ca_cert_path = File.join(WITH_CERT_CHAIN_DIR, 'ca-cert.pem')
ca_key_path = File.join(WITH_CERT_CHAIN_DIR, 'ca-cert-key.pem')
cert_path = File.join(WITH_CERT_CHAIN_DIR, 'cert.pem')
private_key_path = File.join(WITH_CERT_CHAIN_DIR, 'cert-key.pem')
create_server_pair_chained_with_root_ca(ca_cert_path, ca_key_path, nil, cert_path, private_key_path, nil)
end
create_without_ca
create_with_ca
create_cert_chain
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/service_discovery/test_round_robin_balancer.rb | test/plugin_helper/service_discovery/test_round_robin_balancer.rb | require_relative '../../helper'
require 'fluent/plugin_helper/service_discovery/round_robin_balancer'
class TestRoundRobinBalancer < ::Test::Unit::TestCase
test 'select_service' do
rrb = Fluent::PluginHelper::ServiceDiscovery::RoundRobinBalancer.new
rrb.rebalance([1, 2, 3])
rrb.select_service { |n| assert_equal 1, n }
rrb.select_service { |n| assert_equal 2, n }
rrb.select_service { |n| assert_equal 3, n }
rrb.select_service { |n| assert_equal 1, n }
rrb.select_service { |n| assert_equal 2, n }
rrb.select_service { |n| assert_equal 3, n }
rrb.rebalance([1, 2, 3, 4])
rrb.select_service { |n| assert_equal 1, n }
rrb.select_service { |n| assert_equal 2, n }
rrb.select_service { |n| assert_equal 3, n }
rrb.select_service { |n| assert_equal 4, n }
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/plugin_helper/service_discovery/test_manager.rb | test/plugin_helper/service_discovery/test_manager.rb | require_relative '../../helper'
require 'fluent/plugin_helper/service_discovery/manager'
class TestServiceDiscoveryManager < ::Test::Unit::TestCase
setup do
@sd_file_dir = File.expand_path('../../plugin/data/sd_file', __dir__)
end
class TestSdPlugin < Fluent::Plugin::ServiceDiscovery
Fluent::Plugin.register_sd('test_sd', self)
def initialize
super
end
def service_in(host, port)
s = Fluent::Plugin::ServiceDiscovery::Service.new(:sd_test, host, port)
@queue << Fluent::Plugin::ServiceDiscovery.service_in_msg(s)
end
def service_out(host, port)
s = Fluent::Plugin::ServiceDiscovery::Service.new(:sd_test, host, port)
@queue << Fluent::Plugin::ServiceDiscovery.service_out_msg(s)
end
def start(queue)
@queue = queue
super
end
end
sub_test_case '#configure' do
test 'build sd plugins and services' do
sdm = Fluent::PluginHelper::ServiceDiscovery::Manager.new(log: $log)
sdm.configure(
[
{ type: :file, conf: config_element('service_discovery', '', { 'path' => File.join(@sd_file_dir, 'config.yml') }) },
{ type: :static, conf: config_element('root', '', {}, [config_element('service', '', { 'host' => '127.0.0.2', 'port' => '5432' })]) },
],
)
assert_equal 3, sdm.services.size
assert_equal 24224, sdm.services[0].port
assert_equal '127.0.0.1', sdm.services[0].host
assert_equal 24225, sdm.services[1].port
assert_equal '127.0.0.1', sdm.services[1].host
assert_equal 5432, sdm.services[2].port
assert_equal '127.0.0.2', sdm.services[2].host
assert_false sdm.static_config?
end
test 'no need to timer if only static' do
sdm = Fluent::PluginHelper::ServiceDiscovery::Manager.new(log: $log)
sdm.configure(
[{ type: :static, conf: config_element('root', '', {}, [config_element('service', '', { 'host' => '127.0.0.2', 'port' => '5432' })]) }]
)
assert_equal 1, sdm.services.size
assert_equal 5432, sdm.services[0].port
assert_equal '127.0.0.2', sdm.services[0].host
assert_true sdm.static_config?
end
end
sub_test_case '#run_once' do
test 'if new service added and deleted' do
sdm = Fluent::PluginHelper::ServiceDiscovery::Manager.new(log: $log)
t = TestSdPlugin.new
mock(Fluent::Plugin).new_sd(:sd_test, parent: anything) { t }
sdm.configure([{ type: :sd_test, conf: config_element('service_discovery', '', {})}])
sdm.start
assert_equal 0, sdm.services.size
t.service_in('127.0.0.1', '1234')
sdm.run_once
assert_equal 1, sdm.services.size
assert_equal '127.0.0.1', sdm.services[0].host
assert_equal '1234', sdm.services[0].port
t.service_out('127.0.0.1', '1234')
sdm.run_once
assert_equal 0, sdm.services.size
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/log/test_console_adapter.rb | test/log/test_console_adapter.rb | require_relative '../helper'
require 'fluent/log'
require 'fluent/log/console_adapter'
class ConsoleAdapterTest < Test::Unit::TestCase
def setup
@timestamp = Time.parse("2023-01-01 15:32:41 +0000")
@timestamp_str = @timestamp.strftime("%Y-%m-%d %H:%M:%S %z")
Timecop.freeze(@timestamp)
@logdev = Fluent::Test::DummyLogDevice.new
@logger = ServerEngine::DaemonLogger.new(@logdev)
@fluent_log = Fluent::Log.new(@logger)
@console_logger = Fluent::Log::ConsoleAdapter.wrap(@fluent_log)
end
def teardown
Timecop.return
end
def test_expected_log_levels
assert_equal({debug: 0, info: 1, warn: 2, error: 3, fatal: 4},
Console::Logger::LEVELS)
end
data(trace: [Fluent::Log::LEVEL_TRACE, :debug],
debug: [Fluent::Log::LEVEL_DEBUG, :debug],
info: [Fluent::Log::LEVEL_INFO, :info],
warn: [Fluent::Log::LEVEL_WARN, :warn],
error: [Fluent::Log::LEVEL_ERROR, :error],
fatal: [Fluent::Log::LEVEL_FATAL, :fatal])
def test_reflect_log_level(data)
level, expected = data
@fluent_log.level = level
console_logger = Fluent::Log::ConsoleAdapter.wrap(@fluent_log)
assert_equal(Console::Logger::LEVELS[expected],
console_logger.level)
end
data(debug: :debug,
info: :info,
warn: :warn,
error: :error,
fatal: :fatal)
def test_string_subject(level)
@console_logger.send(level, "subject")
assert_equal(["#{@timestamp_str} [#{level}]: 0.0s: subject\n"],
@logdev.logs)
end
data(debug: :debug,
info: :info,
warn: :warn,
error: :error,
fatal: :fatal)
def test_args(level)
@console_logger.send(level, "subject", 1, 2, 3)
assert_equal([
"#{@timestamp_str} [#{level}]: 0.0s: subject\n" +
" | 1\n" +
" | 2\n" +
" | 3\n"
],
@logdev.logs)
end
data(debug: :debug,
info: :info,
warn: :warn,
error: :error,
fatal: :fatal)
def test_options(level)
@console_logger.send(level, "subject", kwarg1: "opt1", kwarg2: "opt2")
lines = @logdev.logs[0].split("\n")
args = JSON.parse(lines[1..].collect { |str| str.sub(/\s+\|/, "") }.join("\n"))
assert_equal([
1,
"#{@timestamp_str} [#{level}]: 0.0s: subject",
{ "kwarg1" => "opt1", "kwarg2" => "opt2" }
],
[
@logdev.logs.size,
lines[0],
args
])
end
data(debug: :debug,
info: :info,
warn: :warn,
error: :error,
fatal: :fatal)
def test_block(level)
@console_logger.send(level, "subject") { "block message" }
assert_equal([
"#{@timestamp_str} [#{level}]: 0.0s: subject\n" +
" | block message\n"
],
@logdev.logs)
end
data(debug: :debug,
info: :info,
warn: :warn,
error: :error,
fatal: :fatal)
def test_multiple_entries(level)
@console_logger.send(level, "subject1")
@console_logger.send(level, "line2")
assert_equal([
"#{@timestamp_str} [#{level}]: 0.0s: subject1\n",
"#{@timestamp_str} [#{level}]: 0.0s: line2\n"
],
@logdev.logs)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/supervisor.rb | lib/fluent/supervisor.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fileutils'
require 'open3'
require 'pathname'
require 'find'
require 'fluent/config'
require 'fluent/counter'
require 'fluent/env'
require 'fluent/engine'
require 'fluent/error'
require 'fluent/log'
require 'fluent/plugin'
require 'fluent/rpc'
require 'fluent/system_config'
require 'fluent/msgpack_factory'
require 'fluent/variable_store'
require 'serverengine'
if Fluent.windows?
require 'win32/ipc'
require 'win32/event'
end
module Fluent
module ServerModule
def before_run
@fluentd_conf = config[:fluentd_conf]
@rpc_endpoint = nil
@rpc_server = nil
@counter = nil
@socket_manager_server = nil
@starting_new_supervisor_with_zero_downtime = false
@new_supervisor_pid = nil
start_in_parallel = ENV.key?("FLUENT_RUNNING_IN_PARALLEL_WITH_OLD")
@zero_downtime_restart_mutex = Mutex.new
@fluentd_lock_dir = Dir.mktmpdir("fluentd-lock-")
ENV['FLUENTD_LOCK_DIR'] = @fluentd_lock_dir
if config[:rpc_endpoint] and not start_in_parallel
@rpc_endpoint = config[:rpc_endpoint]
@enable_get_dump = config[:enable_get_dump]
run_rpc_server
end
if Fluent.windows?
install_windows_event_handler
else
install_supervisor_signal_handlers
end
if counter = config[:counter_server] and not start_in_parallel
run_counter_server(counter)
end
if config[:disable_shared_socket]
$log.info "shared socket for multiple workers is disabled"
elsif start_in_parallel
begin
raise "[BUG] SERVERENGINE_SOCKETMANAGER_PATH env var must exist when starting in parallel" unless ENV.key?('SERVERENGINE_SOCKETMANAGER_PATH')
@socket_manager_server = ServerEngine::SocketManager::Server.share_sockets_with_another_server(ENV['SERVERENGINE_SOCKETMANAGER_PATH'])
$log.info "zero-downtime-restart: took over the shared sockets", path: ENV['SERVERENGINE_SOCKETMANAGER_PATH']
rescue => e
$log.error "zero-downtime-restart: cancel sequence because failed to take over the shared sockets", error: e
raise
end
else
@socket_manager_server = ServerEngine::SocketManager::Server.open
ENV['SERVERENGINE_SOCKETMANAGER_PATH'] = @socket_manager_server.path.to_s
end
stop_parallel_old_supervisor_after_delay if start_in_parallel
end
def after_run
stop_windows_event_thread if Fluent.windows?
stop_rpc_server if @rpc_endpoint
stop_counter_server if @counter
cleanup_lock_dir
Fluent::Supervisor.cleanup_socketmanager_path unless @starting_new_supervisor_with_zero_downtime
ensure
notify_new_supervisor_that_old_one_has_stopped if @starting_new_supervisor_with_zero_downtime
end
def cleanup_lock_dir
FileUtils.rm(Dir.glob(File.join(@fluentd_lock_dir, "fluentd-*.lock")))
FileUtils.rmdir(@fluentd_lock_dir)
end
def run_rpc_server
@rpc_server = RPC::Server.new(@rpc_endpoint, $log)
# built-in RPC for signals
@rpc_server.mount_proc('/api/processes.interruptWorkers') { |req, res|
$log.debug "fluentd RPC got /api/processes.interruptWorkers request"
Process.kill :INT, Process.pid
nil
}
@rpc_server.mount_proc('/api/processes.killWorkers') { |req, res|
$log.debug "fluentd RPC got /api/processes.killWorkers request"
Process.kill :TERM, Process.pid
nil
}
@rpc_server.mount_proc('/api/processes.flushBuffersAndKillWorkers') { |req, res|
$log.debug "fluentd RPC got /api/processes.flushBuffersAndKillWorkers request"
if Fluent.windows?
supervisor_sigusr1_handler
stop(true)
else
Process.kill :USR1, Process.pid
Process.kill :TERM, Process.pid
end
nil
}
unless Fluent.windows?
@rpc_server.mount_proc('/api/processes.zeroDowntimeRestart') { |req, res|
$log.debug "fluentd RPC got /api/processes.zeroDowntimeRestart request"
Process.kill :USR2, Process.pid
nil
}
end
@rpc_server.mount_proc('/api/plugins.flushBuffers') { |req, res|
$log.debug "fluentd RPC got /api/plugins.flushBuffers request"
if Fluent.windows?
supervisor_sigusr1_handler
else
Process.kill :USR1, Process.pid
end
nil
}
@rpc_server.mount_proc('/api/config.reload') { |req, res|
$log.debug "fluentd RPC got /api/config.reload request"
if Fluent.windows?
# restart worker with auto restarting by killing
kill_worker
else
Process.kill :HUP, Process.pid
end
nil
}
@rpc_server.mount_proc('/api/config.dump') { |req, res|
$log.debug "fluentd RPC got /api/config.dump request"
$log.info "dump in-memory config"
supervisor_dump_config_handler
nil
}
@rpc_server.mount_proc('/api/config.gracefulReload') { |req, res|
$log.debug "fluentd RPC got /api/config.gracefulReload request"
graceful_reload
nil
}
if @enable_get_dump
@rpc_server.mount_proc('/api/config.getDump') { |req, res|
$log.debug "fluentd RPC got /api/config.getDump request"
$log.info "get dump in-memory config via HTTP"
res.body = supervisor_get_dump_config_handler
[nil, nil, res]
}
end
@rpc_server.start
end
def stop_rpc_server
@rpc_server&.shutdown
end
def run_counter_server(counter_conf)
@counter = Fluent::Counter::Server.new(
counter_conf.scope,
{host: counter_conf.bind, port: counter_conf.port, log: $log, path: counter_conf.backup_path}
)
@counter.start
end
def stop_counter_server
@counter.stop
end
def stop_parallel_old_supervisor_after_delay
Thread.new do
# Delay to wait the new workers to start up.
# Even if it takes a long time to start the new workers and stop the old Fluentd first,
# it is no problem because the socket buffer works, as long as the capacity is not exceeded.
sleep 10
old_pid = ENV["FLUENT_RUNNING_IN_PARALLEL_WITH_OLD"]&.to_i
if old_pid
$log.info "zero-downtime-restart: stop the old supervisor"
Process.kill :TERM, old_pid
end
rescue => e
$log.warn "zero-downtime-restart: failed to stop the old supervisor." +
" If the old one does not exist, please send SIGWINCH to this new process to start to work fully." +
" If it exists, something went wrong. Please kill the old one manually.",
error: e
end
end
def notify_new_supervisor_that_old_one_has_stopped
if config[:pid_path]
new_pid = File.read(config[:pid_path]).to_i
else
raise "[BUG] new_supervisor_pid is not saved" unless @new_supervisor_pid
new_pid = @new_supervisor_pid
end
$log.info "zero-downtime-restart: notify the new supervisor (pid: #{new_pid}) that old one has stopped"
Process.kill :WINCH, new_pid
rescue => e
$log.error(
"zero-downtime-restart: failed to notify the new supervisor." +
" Please send SIGWINCH to the new supervisor process manually" +
" if it does not start to work fully.",
error: e
)
end
def install_supervisor_signal_handlers
return if Fluent.windows?
trap :HUP do
$log.debug "fluentd supervisor process get SIGHUP"
supervisor_sighup_handler
end
trap :USR1 do
$log.debug "fluentd supervisor process get SIGUSR1"
supervisor_sigusr1_handler
end
trap :USR2 do
$log.debug 'fluentd supervisor process got SIGUSR2'
if Fluent.windows?
graceful_reload
else
zero_downtime_restart
end
end
trap :WINCH do
$log.debug 'fluentd supervisor process got SIGWINCH'
cancel_source_only
end
end
if Fluent.windows?
# Override some methods of ServerEngine::MultiSpawnWorker
# Since Fluentd's Supervisor doesn't use ServerEngine's HUP, USR1 and USR2
# handlers (see install_supervisor_signal_handlers), they should be
# disabled also on Windows, just send commands to workers instead.
def restart(graceful)
@monitors.each do |m|
m.send_command(graceful ? "GRACEFUL_RESTART\n" : "IMMEDIATE_RESTART\n")
end
end
def reload
@monitors.each do |m|
m.send_command("RELOAD\n")
end
end
end
def install_windows_event_handler
return unless Fluent.windows?
@pid_signame = "fluentd_#{Process.pid}"
@signame = config[:signame]
Thread.new do
ipc = Win32::Ipc.new(nil)
events = [
{win32_event: Win32::Event.new("#{@pid_signame}_STOP_EVENT_THREAD"), action: :stop_event_thread},
{win32_event: Win32::Event.new("#{@pid_signame}"), action: :stop},
{win32_event: Win32::Event.new("#{@pid_signame}_HUP"), action: :hup},
{win32_event: Win32::Event.new("#{@pid_signame}_USR1"), action: :usr1},
{win32_event: Win32::Event.new("#{@pid_signame}_USR2"), action: :usr2},
{win32_event: Win32::Event.new("#{@pid_signame}_CONT"), action: :cont},
]
if @signame
signame_events = [
{win32_event: Win32::Event.new("#{@signame}"), action: :stop},
{win32_event: Win32::Event.new("#{@signame}_HUP"), action: :hup},
{win32_event: Win32::Event.new("#{@signame}_USR1"), action: :usr1},
{win32_event: Win32::Event.new("#{@signame}_USR2"), action: :usr2},
{win32_event: Win32::Event.new("#{@signame}_CONT"), action: :cont},
]
events.concat(signame_events)
end
begin
loop do
infinite = 0xFFFFFFFF
ipc_idx = ipc.wait_any(events.map {|e| e[:win32_event]}, infinite)
event_idx = ipc_idx - 1
if event_idx >= 0 && event_idx < events.length
$log.debug("Got Win32 event \"#{events[event_idx][:win32_event].name}\"")
else
$log.warn("Unexpected return value of Win32::Ipc#wait_any: #{ipc_idx}")
end
case events[event_idx][:action]
when :stop
stop(true)
when :hup
supervisor_sighup_handler
when :usr1
supervisor_sigusr1_handler
when :usr2
graceful_reload
when :cont
supervisor_dump_handler_for_windows
when :stop_event_thread
break
end
end
ensure
events.each { |event| event[:win32_event].close }
end
end
end
def stop_windows_event_thread
if Fluent.windows?
ev = Win32::Event.open("#{@pid_signame}_STOP_EVENT_THREAD")
ev.set
ev.close
end
end
def supervisor_sighup_handler
kill_worker
end
def supervisor_sigusr1_handler
reopen_log
send_signal_to_workers(:USR1)
end
def graceful_reload
conf = nil
t = Thread.new do
$log.info 'Reloading new config'
# Validate that loading config is valid at first
conf = Fluent::Config.build(
config_path: config[:config_path],
encoding: config[:conf_encoding],
additional_config: config[:inline_config],
use_v1_config: config[:use_v1_config],
)
Fluent::VariableStore.try_to_reset do
Fluent::Engine.reload_config(conf, supervisor: true)
end
end
t.report_on_exception = false # Error is handled by myself
t.join
reopen_log
send_signal_to_workers(:USR2)
@fluentd_conf = conf.to_s
rescue => e
$log.error "Failed to reload config file: #{e}"
end
def zero_downtime_restart
Thread.new do
@zero_downtime_restart_mutex.synchronize do
$log.info "start zero-downtime-restart sequence"
if @starting_new_supervisor_with_zero_downtime
$log.warn "zero-downtime-restart: canceled because it is already starting"
Thread.exit
end
if ENV.key?("FLUENT_RUNNING_IN_PARALLEL_WITH_OLD")
$log.warn "zero-downtime-restart: canceled because the previous sequence is still running"
Thread.exit
end
@starting_new_supervisor_with_zero_downtime = true
commands = [ServerEngine.ruby_bin_path, $0] + ARGV
env_to_add = {
"SERVERENGINE_SOCKETMANAGER_INTERNAL_TOKEN" => ServerEngine::SocketManager::INTERNAL_TOKEN,
"FLUENT_RUNNING_IN_PARALLEL_WITH_OLD" => "#{Process.pid}",
}
pid = Process.spawn(env_to_add, commands.join(" "))
@new_supervisor_pid = pid unless config[:daemonize]
if config[:daemonize]
Thread.new(pid) do |pid|
_, status = Process.wait2(pid)
# check if `ServerEngine::Daemon#daemonize_with_double_fork` succeeded or not
unless status.success?
@starting_new_supervisor_with_zero_downtime = false
$log.error "zero-downtime-restart: failed because new supervisor exits unexpectedly"
end
end
else
Thread.new(pid) do |pid|
_, status = Process.wait2(pid)
@starting_new_supervisor_with_zero_downtime = false
$log.error "zero-downtime-restart: failed because new supervisor exits unexpectedly", status: status
end
end
end
rescue => e
$log.error "zero-downtime-restart: failed", error: e
@starting_new_supervisor_with_zero_downtime = false
end
end
def cancel_source_only
if ENV.key?("FLUENT_RUNNING_IN_PARALLEL_WITH_OLD")
if config[:rpc_endpoint]
begin
@rpc_endpoint = config[:rpc_endpoint]
@enable_get_dump = config[:enable_get_dump]
run_rpc_server
rescue => e
$log.error "failed to start RPC server", error: e
end
end
if counter = config[:counter_server]
begin
run_counter_server(counter)
rescue => e
$log.error "failed to start counter server", error: e
end
end
$log.info "zero-downtime-restart: done all sequences, now new processes start to work fully"
ENV.delete("FLUENT_RUNNING_IN_PARALLEL_WITH_OLD")
end
send_signal_to_workers(:WINCH)
end
def supervisor_dump_handler_for_windows
# As for UNIX-like, SIGCONT signal to each process makes the process output its dump-file,
# and it is implemented before the implementation of the function for Windows.
# It is possible to trap SIGCONT and handle it here also on UNIX-like,
# but for backward compatibility, this handler is currently for a Windows-only.
raise "[BUG] This function is for Windows ONLY." unless Fluent.windows?
Thread.new do
begin
FluentSigdump.dump_windows
rescue => e
$log.error "failed to dump: #{e}"
end
end
send_signal_to_workers(:CONT)
rescue => e
$log.error "failed to dump: #{e}"
end
def kill_worker
if config[:worker_pid]
pids = config[:worker_pid].clone
config[:worker_pid].clear
pids.each_value do |pid|
if Fluent.windows?
Process.kill :KILL, pid
else
Process.kill :TERM, pid
end
end
end
end
def supervisor_dump_config_handler
$log.info @fluentd_conf
end
def supervisor_get_dump_config_handler
{ conf: @fluentd_conf }
end
def dump
super unless @stop
end
private
def reopen_log
if $log
# Creating new thread due to mutex can't lock
# in main thread during trap context
Thread.new do
$log.reopen!
end
end
end
def send_signal_to_workers(signal)
return unless config[:worker_pid]
if Fluent.windows?
send_command_to_workers(signal)
else
config[:worker_pid].each_value do |pid|
# don't rescue Errno::ESRCH here (invalid status)
Process.kill(signal, pid)
end
end
end
def send_command_to_workers(signal)
# Use SeverEngine's CommandSender on Windows
case signal
when :HUP
restart(false)
when :USR1
restart(true)
when :USR2
reload
when :CONT
dump_all_windows_workers
end
end
def dump_all_windows_workers
@monitors.each do |m|
m.send_command("DUMP\n")
end
end
end
module WorkerModule
def spawn(process_manager)
main_cmd = config[:main_cmd]
env = {
'SERVERENGINE_WORKER_ID' => @worker_id.to_i.to_s,
'FLUENT_INSTANCE_ID' => Fluent::INSTANCE_ID,
}
@pm = process_manager.spawn(env, *main_cmd)
end
def after_start
(config[:worker_pid] ||= {})[@worker_id] = @pm.pid
end
def dump
super unless @stop
end
end
class Supervisor
def self.serverengine_config(params = {})
# ServerEngine's "daemonize" option is boolean, and path of pid file is brought by "pid_path"
pid_path = params['daemonize']
daemonize = !!params['daemonize']
se_config = {
worker_type: 'spawn',
workers: params['workers'],
log_stdin: false,
log_stdout: false,
log_stderr: false,
enable_heartbeat: true,
auto_heartbeat: false,
unrecoverable_exit_codes: [2],
stop_immediately_at_unrecoverable_exit: true,
root_dir: params['root_dir'],
logger: $log,
log: $log&.out,
log_level: params['log_level'],
chuser: params['chuser'],
chgroup: params['chgroup'],
chumask: params['chumask'].is_a?(Integer) ? params['chumask'] : params['chumask']&.to_i(8),
daemonize: daemonize,
rpc_endpoint: params['rpc_endpoint'],
counter_server: params['counter_server'],
enable_get_dump: params['enable_get_dump'],
windows_daemon_cmdline: [ServerEngine.ruby_bin_path,
File.join(File.dirname(__FILE__), 'daemon.rb'),
ServerModule.name,
WorkerModule.name,
JSON.dump(params)],
command_sender: Fluent.windows? ? "pipe" : "signal",
config_path: params['fluentd_conf_path'],
fluentd_conf: params['fluentd_conf'],
conf_encoding: params['conf_encoding'],
inline_config: params['inline_config'],
main_cmd: params['main_cmd'],
signame: params['signame'],
disable_shared_socket: params['disable_shared_socket'],
restart_worker_interval: params['restart_worker_interval'],
}
se_config[:pid_path] = pid_path if daemonize
se_config
end
def self.default_options
{
config_path: Fluent::DEFAULT_CONFIG_PATH,
plugin_dirs: [Fluent::DEFAULT_PLUGIN_DIR],
log_level: Fluent::Log::LEVEL_INFO,
log_path: nil,
daemonize: nil,
libs: [],
setup_path: nil,
chuser: nil,
chgroup: nil,
chumask: "0",
root_dir: nil,
suppress_interval: 0,
suppress_repeated_stacktrace: true,
ignore_repeated_log_interval: nil,
without_source: nil,
with_source_only: nil,
enable_input_metrics: true,
enable_size_metrics: nil,
use_v1_config: true,
strict_config_value: nil,
supervise: true,
standalone_worker: false,
signame: nil,
conf_encoding: 'utf-8',
disable_shared_socket: nil,
config_file_type: :guess,
}
end
def self.cleanup_socketmanager_path
return if Fluent.windows?
return unless ENV.key?('SERVERENGINE_SOCKETMANAGER_PATH')
FileUtils.rm_f(ENV['SERVERENGINE_SOCKETMANAGER_PATH'])
end
def initialize(cl_opt)
@cl_opt = cl_opt
opt = self.class.default_options.merge(cl_opt)
@config_file_type = opt[:config_file_type]
@daemonize = opt[:daemonize]
@standalone_worker= opt[:standalone_worker]
@config_path = opt[:config_path]
@inline_config = opt[:inline_config]
@use_v1_config = opt[:use_v1_config]
@conf_encoding = opt[:conf_encoding]
@show_plugin_config = opt[:show_plugin_config]
@libs = opt[:libs]
@plugin_dirs = opt[:plugin_dirs]
@chgroup = opt[:chgroup]
@chuser = opt[:chuser]
@chumask = opt[:chumask]
@signame = opt[:signame]
# TODO: `@log_path`, `@log_rotate_age` and `@log_rotate_size` should be removed
# since it should be merged with SystemConfig in `build_system_config()`.
# We should always use `system_config.log.path`, `system_config.log.rotate_age`
# and `system_config.log.rotate_size`.
# However, currently, there is a bug that `system_config.log` parameters
# are not in `Fluent::SystemConfig::SYSTEM_CONFIG_PARAMETERS`, and these
# parameters are not merged in `build_system_config()`.
# Until we fix the bug of `Fluent::SystemConfig`, we need to use these instance variables.
@log_path = opt[:log_path]
@log_rotate_age = opt[:log_rotate_age]
@log_rotate_size = opt[:log_rotate_size]
@finished = false
end
def run_supervisor(dry_run: false)
if dry_run
$log.info "starting fluentd-#{Fluent::VERSION} as dry run mode", ruby: RUBY_VERSION
end
if @system_config.workers < 1
raise Fluent::ConfigError, "invalid number of workers (must be > 0):#{@system_config.workers}"
end
if Fluent.windows? && @system_config.with_source_only
raise Fluent::ConfigError, "with-source-only is not supported on Windows"
end
root_dir = @system_config.root_dir
if root_dir
if File.exist?(root_dir)
unless Dir.exist?(root_dir)
raise Fluent::InvalidRootDirectory, "non directory entry exists:#{root_dir}"
end
else
begin
FileUtils.mkdir_p(root_dir, mode: @system_config.dir_permission || Fluent::DEFAULT_DIR_PERMISSION)
rescue => e
raise Fluent::InvalidRootDirectory, "failed to create root directory:#{root_dir}, #{e.inspect}"
end
end
end
begin
ServerEngine::Privilege.change(@chuser, @chgroup)
MessagePackFactory.init(enable_time_support: @system_config.enable_msgpack_time_support)
Fluent::Engine.init(@system_config, supervisor_mode: true, start_in_parallel: ENV.key?("FLUENT_RUNNING_IN_PARALLEL_WITH_OLD"))
Fluent::Engine.run_configure(@conf, dry_run: dry_run)
rescue Fluent::ConfigError => e
$log.error 'config error', file: @config_path, error: e
$log.debug_backtrace
exit!(1)
rescue ScriptError => e # LoadError, NotImplementedError, SyntaxError
if e.respond_to?(:path)
$log.error e.message, path: e.path, error: e
else
$log.error e.message, error: e
end
$log.debug_backtrace
exit!(1)
rescue => e
$log.error "unexpected error", error: e
$log.debug_backtrace
exit!(1)
end
if dry_run
$log.info 'finished dry run mode'
exit 0
else
supervise
end
end
def options
{
'config_path' => @config_path,
'pid_file' => @daemonize,
'plugin_dirs' => @plugin_dirs,
'log_path' => @log_path,
'root_dir' => @system_config.root_dir,
}
end
def run_worker
Process.setproctitle("worker:#{@system_config.process_name}") if @process_name
if @standalone_worker && @system_config.workers != 1
raise Fluent::ConfigError, "invalid number of workers (must be 1 or unspecified) with --no-supervisor: #{@system_config.workers}"
end
if Fluent.windows? && @system_config.with_source_only
raise Fluent::ConfigError, "with-source-only is not supported on Windows"
end
install_main_process_signal_handlers
# This is the only log message for @standalone_worker
$log.info "starting fluentd-#{Fluent::VERSION} without supervision", pid: Process.pid, ruby: RUBY_VERSION if @standalone_worker
main_process do
create_socket_manager if @standalone_worker
if @standalone_worker
ServerEngine::Privilege.change(@chuser, @chgroup)
File.umask(@chumask.to_i(8))
end
MessagePackFactory.init(enable_time_support: @system_config.enable_msgpack_time_support)
Fluent::Engine.init(@system_config, start_in_parallel: ENV.key?("FLUENT_RUNNING_IN_PARALLEL_WITH_OLD"))
Fluent::Engine.run_configure(@conf)
Fluent::Engine.run
self.class.cleanup_socketmanager_path if @standalone_worker
exit 0
end
end
def configure(supervisor: false)
setup_global_logger(supervisor: supervisor)
if @show_plugin_config
show_plugin_config
end
if @inline_config == '-'
$log.warn('the value "-" for `inline_config` is deprecated. See https://github.com/fluent/fluentd/issues/2711')
@inline_config = STDIN.read
end
@conf = Fluent::Config.build(
config_path: @config_path,
encoding: @conf_encoding,
additional_config: @inline_config,
use_v1_config: @use_v1_config,
type: @config_file_type,
)
@system_config = build_system_config(@conf)
$log.info :supervisor, 'parsing config file is succeeded', path: @config_path
build_additional_configurations do |additional_conf|
@conf += additional_conf
end
@libs.each do |lib|
require lib
end
@plugin_dirs.each do |dir|
if Dir.exist?(dir)
dir = File.expand_path(dir)
Fluent::Plugin.add_plugin_dir(dir)
end
end
if supervisor
# plugins / configuration dumps
Gem::Specification.find_all.select { |x| x.name =~ /^fluent(d|-(plugin|mixin)-.*)$/ }.each do |spec|
$log.info("gem '#{spec.name}' version '#{spec.version}'")
end
end
end
private
def setup_global_logger(supervisor: false)
if supervisor
worker_id = 0
process_type = :supervisor
else
worker_id = ENV['SERVERENGINE_WORKER_ID'].to_i
process_type = case
when @standalone_worker then :standalone
when worker_id == 0 then :worker0
else :workers
end
end
# Parse configuration immediately to initialize logger in early stage.
# Since we can't confirm the log messages in this parsing process,
# we must parse the config again after initializing logger.
conf = Fluent::Config.build(
config_path: @config_path,
encoding: @conf_encoding,
additional_config: @inline_config,
use_v1_config: @use_v1_config,
type: @config_file_type,
)
system_config = build_system_config(conf)
# TODO: we should remove this logic. This merging process should be done
# in `build_system_config()`.
@log_path ||= system_config.log.path
@log_rotate_age ||= system_config.log.rotate_age
@log_rotate_size ||= system_config.log.rotate_size
rotate = @log_rotate_age || @log_rotate_size
actual_log_path = @log_path
# We need to prepare a unique path for each worker since Windows locks files.
if Fluent.windows? && rotate && @log_path && @log_path != "-"
actual_log_path = Fluent::Log.per_process_path(@log_path, process_type, worker_id)
end
if actual_log_path && actual_log_path != "-"
FileUtils.mkdir_p(File.dirname(actual_log_path)) unless File.exist?(actual_log_path)
if rotate
logdev = Fluent::LogDeviceIO.new(
actual_log_path,
shift_age: @log_rotate_age,
shift_size: @log_rotate_size,
)
else
logdev = File.open(actual_log_path, "a")
end
if @chuser || @chgroup
chuid = @chuser ? ServerEngine::Privilege.get_etc_passwd(@chuser).uid : nil
chgid = @chgroup ? ServerEngine::Privilege.get_etc_group(@chgroup).gid : nil
File.chown(chuid, chgid, actual_log_path)
end
if system_config.dir_permission
File.chmod(system_config.dir_permission || Fluent::DEFAULT_DIR_PERMISSION, File.dirname(actual_log_path))
end
else
logdev = STDOUT
end
$log = Fluent::Log.new(
# log_level: subtract 1 to match serverengine daemon logger side logging severity.
ServerEngine::DaemonLogger.new(logdev, log_level: system_config.log_level - 1),
path: actual_log_path,
process_type: process_type,
worker_id: worker_id,
format: system_config.log.format,
time_format: system_config.log.time_format,
suppress_repeated_stacktrace: system_config.suppress_repeated_stacktrace,
ignore_repeated_log_interval: system_config.ignore_repeated_log_interval,
ignore_same_log_interval: system_config.ignore_same_log_interval,
)
$log.force_stacktrace_level(system_config.log.forced_stacktrace_level) if system_config.force_stacktrace_level?
$log.enable_color(false) if actual_log_path
$log.enable_debug if system_config.log_level <= Fluent::Log::LEVEL_DEBUG
$log.info "init #{process_type} logger",
path: actual_log_path,
rotate_age: @log_rotate_age,
rotate_size: @log_rotate_size
end
def create_socket_manager
server = ServerEngine::SocketManager::Server.open
ENV['SERVERENGINE_SOCKETMANAGER_PATH'] = server.path.to_s
end
def show_plugin_config
name, type = @show_plugin_config.split(":") # input:tail
$log.info "show_plugin_config option is deprecated. Use fluent-plugin-config-format --format=txt #{name} #{type}"
exit 0
end
def supervise
Process.setproctitle("supervisor:#{@system_config.process_name}") if @system_config.process_name
$log.info "starting fluentd-#{Fluent::VERSION}", pid: Process.pid, ruby: RUBY_VERSION
fluentd_spawn_cmd = build_spawn_command
$log.info "spawn command to main: ", cmdline: fluentd_spawn_cmd
params = {
'main_cmd' => fluentd_spawn_cmd,
'daemonize' => @daemonize,
'inline_config' => @inline_config,
'chuser' => @chuser,
'chgroup' => @chgroup,
'chumask' => @chumask,
'fluentd_conf_path' => @config_path,
'fluentd_conf' => @conf.to_s,
'use_v1_config' => @use_v1_config,
'conf_encoding' => @conf_encoding,
'signame' => @signame,
'workers' => @system_config.workers,
'root_dir' => @system_config.root_dir,
'log_level' => @system_config.log_level,
'rpc_endpoint' => @system_config.rpc_endpoint,
'enable_get_dump' => @system_config.enable_get_dump,
'counter_server' => @system_config.counter_server,
'disable_shared_socket' => @system_config.disable_shared_socket,
'restart_worker_interval' => @system_config.restart_worker_interval,
}
se = ServerEngine.create(ServerModule, WorkerModule) {
# Note: This is called only at the initialization of ServerEngine, since
# Fluentd overwrites all related SIGNAL(HUP,USR1,USR2) and have own reloading feature.
Fluent::Supervisor.serverengine_config(params)
}
se.run
end
def install_main_process_signal_handlers
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/oj_options.rb | lib/fluent/oj_options.rb | require 'fluent/config/types'
module Fluent
class OjOptions
OPTIONS = {
'bigdecimal_load': :symbol,
'mode': :symbol,
'use_to_json': :bool
}
ALLOWED_VALUES = {
'bigdecimal_load': %i[bigdecimal float auto],
'mode': %i[strict null compat json rails custom]
}
DEFAULTS = {
'bigdecimal_load': :float,
'mode': :compat,
'use_to_json': true
}
@@available = false
def self.available?
@@available
end
def self.load_env
options = self.get_options
begin
require 'oj'
Oj.default_options = options
@@available = true
rescue LoadError
@@available = false
end
options
end
private
def self.get_options
options = {}
DEFAULTS.each { |key, value| options[key] = value }
OPTIONS.each do |key, type|
env_value = ENV["FLUENT_OJ_OPTION_#{key.upcase}"]
next if env_value.nil?
cast_value = Fluent::Config.reformatted_value(OPTIONS[key], env_value, { strict: true })
next if cast_value.nil?
next if ALLOWED_VALUES[key] && !ALLOWED_VALUES[key].include?(cast_value)
options[key.to_sym] = cast_value
end
options
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/test.rb | lib/fluent/test.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'test/unit'
require 'fluent/env' # for Fluent.windows?
require 'fluent/test/log'
require 'fluent/test/base'
require 'fluent/test/input_test'
require 'fluent/test/output_test'
require 'fluent/test/filter_test'
require 'fluent/test/parser_test'
require 'fluent/test/formatter_test'
require 'serverengine'
module Fluent
module Test
def self.dummy_logger
dl_opts = {log_level: ServerEngine::DaemonLogger::INFO}
logdev = Fluent::Test::DummyLogDevice.new
logger = ServerEngine::DaemonLogger.new(logdev, dl_opts)
Fluent::Log.new(logger)
end
def self.setup
ENV['SERVERENGINE_WORKER_ID'] = '0'
$log = dummy_logger
old_engine = Fluent.__send__(:remove_const, :Engine)
# Ensure that GC can remove the objects of the old engine.
# Some objects can still exist after `remove_const`. See https://github.com/fluent/fluentd/issues/5054.
old_engine.instance_variable_set(:@root_agent, nil)
engine = Fluent.const_set(:Engine, EngineClass.new).init(SystemConfig.new)
engine.define_singleton_method(:now=) {|n|
@now = n
}
engine.define_singleton_method(:now) {
@now ||= super()
}
nil
end
end
end
$log ||= Fluent::Test.dummy_logger
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/agent.rb | lib/fluent/agent.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/configurable'
require 'fluent/plugin'
require 'fluent/output'
require 'fluent/match'
module Fluent
#
# Agent is a resource unit who manages emittable plugins
#
# Next step: `fluentd/root_agent.rb`
# Next step: `fluentd/label.rb`
#
class Agent
include Configurable
def initialize(log:)
super()
@context = nil
@outputs = []
@filters = []
@lifecycle_control_list = nil
# lifecycle_control_list is the list of plugins in this agent, and ordered
# from plugins which DOES emit, then DOESN'T emit
# (input -> output w/ router -> filter -> output w/o router)
# for start: use this order DESC
# (because plugins which appears later in configurations will receive events from plugins which appears earlier)
# for stop/before_shutdown/shutdown/after_shutdown/close/terminate: use this order ASC
@lifecycle_cache = nil
@log = log
@event_router = EventRouter.new(NoMatchMatch.new(log), self)
@error_collector = nil
end
attr_reader :log
attr_reader :outputs
attr_reader :filters
attr_reader :context
attr_reader :event_router
attr_reader :error_collector
def configure(conf)
super
# initialize <match> and <filter> elements
conf.elements('filter', 'match').each { |e|
if !Fluent::Engine.supervisor_mode && e.for_another_worker?
next
end
pattern = e.arg.empty? ? '**' : e.arg
type = e['@type']
raise ConfigError, "Missing '@type' parameter on <#{e.name}> directive" unless type
if e.name == 'filter'
add_filter(type, pattern, e)
else
add_match(type, pattern, e)
end
}
end
def lifecycle_control_list
return @lifecycle_control_list if @lifecycle_control_list
lifecycle_control_list = {
input: [],
output_with_router: [],
filter: [],
output: [],
}
if self.respond_to?(:inputs)
inputs.each do |i|
lifecycle_control_list[:input] << i
end
end
outputs.each do |o|
if o.has_router?
lifecycle_control_list[:output_with_router] << o
else
lifecycle_control_list[:output] << o
end
end
filters.each do |f|
lifecycle_control_list[:filter] << f
end
@lifecycle_control_list = lifecycle_control_list
end
def lifecycle(desc: false)
kind_list = if desc
[:output, :filter, :output_with_router]
else
[:output_with_router, :filter, :output]
end
kind_list.each do |kind|
list = if desc
lifecycle_control_list[kind].reverse
else
lifecycle_control_list[kind]
end
display_kind = (kind == :output_with_router ? :output : kind)
list.each do |instance|
yield instance, display_kind
end
end
end
def add_match(type, pattern, conf)
log_type = conf.for_this_worker? ? :default : :worker0
log.info log_type, "adding match#{@context.nil? ? '' : " in #{@context}"}", pattern: pattern, type: type
output = Plugin.new_output(type)
output.context_router = @event_router
output.configure(conf)
@outputs << output
if output.respond_to?(:outputs) && output.respond_to?(:multi_output?) && output.multi_output?
# TODO: ruby 2.3 or later: replace `output.respond_to?(:multi_output?) && output.multi_output?` with output&.multi_output?
outputs = if output.respond_to?(:static_outputs)
output.static_outputs
else
output.outputs
end
@outputs.push(*outputs)
end
@event_router.add_rule(pattern, output)
output
end
def add_filter(type, pattern, conf)
log_type = conf.for_this_worker? ? :default : :worker0
log.info log_type, "adding filter#{@context.nil? ? '' : " in #{@context}"}", pattern: pattern, type: type
filter = Plugin.new_filter(type)
filter.context_router = @event_router
filter.configure(conf)
@filters << filter
@event_router.add_rule(pattern, filter)
filter
end
# For handling invalid record
def emit_error_event(tag, time, record, error)
end
def handle_emits_error(tag, es, error)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/clock.rb | lib/fluent/clock.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Fluent
module Clock
CLOCK_ID = Process::CLOCK_MONOTONIC_RAW rescue Process::CLOCK_MONOTONIC
@@block_level = 0
@@frozen_clock = nil
def self.now
@@frozen_clock || now_raw
end
def self.freeze(dst = nil, &block)
return freeze_block(dst, &block) if block_given?
dst = dst_clock_from_time(dst) if dst.is_a?(Time)
@@frozen_clock = dst || now_raw
end
def self.return
raise "invalid return while running code in blocks" if @@block_level > 0
@@frozen_clock = nil
end
# internal use
def self.now_raw
Process.clock_gettime(CLOCK_ID)
end
def self.real_now(unit = :second)
Process.clock_gettime(Process::CLOCK_REALTIME, unit)
end
def self.dst_clock_from_time(time)
diff_sec = Time.now - time
now_raw - diff_sec
end
def self.freeze_block(dst)
dst = dst_clock_from_time(dst) if dst.is_a?(Time)
pre_frozen_clock = @@frozen_clock
@@frozen_clock = dst || now_raw
@@block_level += 1
yield
ensure
@@block_level -= 1
@@frozen_clock = pre_frozen_clock
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/mixin.rb | lib/fluent/mixin.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/compat/record_filter_mixin'
require 'fluent/compat/handle_tag_name_mixin'
require 'fluent/compat/set_time_key_mixin'
require 'fluent/compat/set_tag_key_mixin'
require 'fluent/compat/type_converter'
require 'fluent/time' # Fluent::TimeFormatter
module Fluent
RecordFilterMixin = Fluent::Compat::RecordFilterMixin
HandleTagNameMixin = Fluent::Compat::HandleTagNameMixin
SetTimeKeyMixin = Fluent::Compat::SetTimeKeyMixin
SetTagKeyMixin = Fluent::Compat::SetTagKeyMixin
TypeConverter = Fluent::Compat::TypeConverter
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/version.rb | lib/fluent/version.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Fluent
VERSION = '1.19.0'
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/event.rb | lib/fluent/event.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/msgpack_factory'
require 'fluent/plugin/compressable'
module Fluent
class EventStream
include Enumerable
include Fluent::Plugin::Compressable
# dup does deep copy for event stream
def dup
raise NotImplementedError, "DO NOT USE THIS CLASS directly."
end
def size
raise NotImplementedError, "DO NOT USE THIS CLASS directly."
end
alias :length :size
def empty?
size == 0
end
# for tests
def ==(other)
other.is_a?(EventStream) && self.to_msgpack_stream == other.to_msgpack_stream
end
def repeatable?
false
end
def slice(index, num)
raise NotImplementedError, "DO NOT USE THIS CLASS directly."
end
def each(unpacker: nil, &block)
raise NotImplementedError, "DO NOT USE THIS CLASS directly."
end
def to_msgpack_stream(time_int: false, packer: nil)
return to_msgpack_stream_forced_integer(packer: packer) if time_int
out = packer || Fluent::MessagePackFactory.msgpack_packer
each {|time,record|
out.write([time,record])
}
out.full_pack
end
def to_compressed_msgpack_stream(time_int: false, packer: nil, type: :gzip)
packed = to_msgpack_stream(time_int: time_int, packer: packer)
compress(packed, type: type)
end
def to_msgpack_stream_forced_integer(packer: nil)
out = packer || Fluent::MessagePackFactory.msgpack_packer
each {|time,record|
out.write([time.to_i,record])
}
out.full_pack
end
end
class OneEventStream < EventStream
def initialize(time, record)
@time = time
@record = record
end
def dup
OneEventStream.new(@time, @record.dup)
end
def empty?
false
end
def size
1
end
def repeatable?
true
end
def slice(index, num)
if index > 0 || num == 0
ArrayEventStream.new([])
else
self.dup
end
end
def each(unpacker: nil, &block)
block.call(@time, @record)
nil
end
end
# EventStream from entries: Array of [time, record]
#
# Use this class for many events data with a tag
# and its representation is [ [time, record], [time, record], .. ]
class ArrayEventStream < EventStream
def initialize(entries)
@entries = entries
end
def dup
entries = @entries.map{ |time, record| [time, record.dup] }
ArrayEventStream.new(entries)
end
def size
@entries.size
end
def repeatable?
true
end
def empty?
@entries.empty?
end
def slice(index, num)
ArrayEventStream.new(@entries.slice(index, num))
end
def each(unpacker: nil, &block)
@entries.each(&block)
nil
end
end
# EventStream from entries: numbers of pairs of time and record.
#
# This class can handle many events more efficiently than ArrayEventStream
# because this class generate less objects than ArrayEventStream.
#
# Use this class as below, in loop of data-enumeration:
# 1. initialize blank stream:
# streams[tag] ||= MultiEventStream.new
# 2. add events
# stream[tag].add(time, record)
class MultiEventStream < EventStream
def initialize(time_array = [], record_array = [])
@time_array = time_array
@record_array = record_array
end
def dup
MultiEventStream.new(@time_array.dup, @record_array.map(&:dup))
end
def size
@time_array.size
end
def add(time, record)
@time_array << time
@record_array << record
end
def repeatable?
true
end
def empty?
@time_array.empty?
end
def slice(index, num)
MultiEventStream.new(@time_array.slice(index, num), @record_array.slice(index, num))
end
def each(unpacker: nil, &block)
time_array = @time_array
record_array = @record_array
for i in 0..time_array.length-1
block.call(time_array[i], record_array[i])
end
nil
end
end
class MessagePackEventStream < EventStream
# https://github.com/msgpack/msgpack-ruby/issues/119
# Keep cached_unpacker argument for existing plugins
def initialize(data, cached_unpacker = nil, size = 0, unpacked_times: nil, unpacked_records: nil)
@data = data
@size = size
@unpacked_times = unpacked_times
@unpacked_records = unpacked_records
end
def empty?
@data.empty?
end
def dup
if @unpacked_times
self.class.new(@data.dup, nil, @size, unpacked_times: @unpacked_times, unpacked_records: @unpacked_records.map(&:dup))
else
self.class.new(@data.dup, nil, @size)
end
end
def size
# @size is unbelievable always when @size == 0
# If the number of events is really zero, unpacking events takes very short time.
ensure_unpacked! if @size == 0
@size
end
def repeatable?
true
end
def ensure_unpacked!(unpacker: nil)
return if @unpacked_times && @unpacked_records
@unpacked_times = []
@unpacked_records = []
(unpacker || Fluent::MessagePackFactory.msgpack_unpacker).feed_each(@data) do |time, record|
@unpacked_times << time
@unpacked_records << record
end
# @size should be updated always right after unpack.
# The real size of unpacked objects are correct, rather than given size.
@size = @unpacked_times.size
end
# This method returns MultiEventStream, because there are no reason
# to survey binary serialized by msgpack.
def slice(index, num)
ensure_unpacked!
MultiEventStream.new(@unpacked_times.slice(index, num), @unpacked_records.slice(index, num))
end
def each(unpacker: nil, &block)
ensure_unpacked!(unpacker: unpacker)
@unpacked_times.each_with_index do |time, i|
block.call(time, @unpacked_records[i])
end
nil
end
def to_msgpack_stream(time_int: false, packer: nil)
# time_int is always ignored because @data is always packed binary in this class
@data
end
end
class CompressedMessagePackEventStream < MessagePackEventStream
def initialize(data, cached_unpacker = nil, size = 0, unpacked_times: nil, unpacked_records: nil, compress: :gzip)
super(data, cached_unpacker, size, unpacked_times: unpacked_times, unpacked_records: unpacked_records)
@decompressed_data = nil
@compressed_data = data
@type = compress
end
def empty?
ensure_decompressed!
super
end
def ensure_unpacked!(unpacker: nil)
ensure_decompressed!
super
end
def each(unpacker: nil, &block)
ensure_decompressed!
super
end
def to_msgpack_stream(time_int: false, packer: nil)
ensure_decompressed!
super
end
def to_compressed_msgpack_stream(time_int: false, packer: nil)
# time_int is always ignored because @data is always packed binary in this class
@compressed_data
end
private
def ensure_decompressed!
return if @decompressed_data
@data = @decompressed_data = decompress(@data, type: @type)
end
end
module ChunkMessagePackEventStreamer
# chunk.extend(ChunkMessagePackEventStreamer)
# => chunk.each{|time, record| ... }
def each(unpacker: nil, &block)
# Note: If need to use `unpacker`, then implement it,
# e.g., `unpacker.feed_each(io.read, &block)` (Not tested)
raise NotImplementedError, "'unpacker' argument is not implemented." if unpacker
open do |io|
Fluent::MessagePackFactory.msgpack_unpacker(io).each(&block)
end
nil
end
alias :msgpack_each :each
def to_msgpack_stream(time_int: false, packer: nil)
# time_int is always ignored because data is already packed and written in chunk
read
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/daemon.rb | lib/fluent/daemon.rb | #!/usr/bin/env ruby
# -*- coding: utf-8 -*-
here = File.dirname(__FILE__)
$LOAD_PATH << File.expand_path(File.join(here, '..'))
require 'serverengine'
require 'fluent/supervisor'
server_module = Fluent.const_get(ARGV[0])
worker_module = Fluent.const_get(ARGV[1])
params = JSON.parse(ARGV[2])
ServerEngine::Daemon.run_server(server_module, worker_module) { Fluent::Supervisor.serverengine_config(params) }
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/tls.rb | lib/fluent/tls.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'openssl'
require 'fluent/config/error'
module Fluent
module TLS
DEFAULT_VERSION = :'TLSv1_2'
SUPPORTED_VERSIONS = if defined?(OpenSSL::SSL::TLS1_3_VERSION)
[:'TLSv1_1', :'TLSv1_2', :'TLSv1_3', :'TLS1_1', :'TLS1_2', :'TLS1_3'].freeze
else
[:'TLSv1_1', :'TLSv1_2', :'TLS1_1', :'TLS1_2'].freeze
end
### follow httpclient configuration by nahi
# OpenSSL 0.9.8 default: "ALL:!ADH:!LOW:!EXP:!MD5:+SSLv2:@STRENGTH"
CIPHERS_DEFAULT = "ALL:!aNULL:!eNULL:!SSLv2".freeze # OpenSSL >1.0.0 default
METHODS_MAP = begin
map = {
TLSv1: OpenSSL::SSL::TLS1_VERSION,
TLSv1_1: OpenSSL::SSL::TLS1_1_VERSION,
TLSv1_2: OpenSSL::SSL::TLS1_2_VERSION
}
map[:'TLSv1_3'] = OpenSSL::SSL::TLS1_3_VERSION if defined?(OpenSSL::SSL::TLS1_3_VERSION)
MIN_MAX_AVAILABLE = true
map.freeze
rescue NameError
# ruby 2.4 doesn't have OpenSSL::SSL::TLSXXX constants and min_version=/max_version= methods
map = {
TLS1: :'TLSv1',
TLS1_1: :'TLSv1_1',
TLS1_2: :'TLSv1_2',
}.freeze
MIN_MAX_AVAILABLE = false
map
end
private_constant :METHODS_MAP
# Helper for old syntax/method support:
# ruby 2.4 uses ssl_version= but this method is now deprecated.
# min_version=/max_version= use 'TLS1_2' but ssl_version= uses 'TLSv1_2'
def set_version_to_context(ctx, version, min_version, max_version)
if MIN_MAX_AVAILABLE
case
when min_version.nil? && max_version.nil?
min_version = METHODS_MAP[version] || version
max_version = METHODS_MAP[version] || version
when min_version.nil? && max_version
raise Fluent::ConfigError, "When you set max_version, must set min_version together"
when min_version && max_version.nil?
raise Fluent::ConfigError, "When you set min_version, must set max_version together"
else
min_version = METHODS_MAP[min_version] || min_version
max_version = METHODS_MAP[max_version] || max_version
end
ctx.min_version = min_version
ctx.max_version = max_version
else
ctx.ssl_version = METHODS_MAP[version] || version
end
ctx
end
module_function :set_version_to_context
def set_version_to_options(opt, version, min_version, max_version)
if MIN_MAX_AVAILABLE
case
when min_version.nil? && max_version.nil?
min_version = METHODS_MAP[version] || version
max_version = METHODS_MAP[version] || version
when min_version.nil? && max_version
raise Fluent::ConfigError, "When you set max_version, must set min_version together"
when min_version && max_version.nil?
raise Fluent::ConfigError, "When you set min_version, must set max_version together"
else
min_version = METHODS_MAP[min_version] || min_version
max_version = METHODS_MAP[max_version] || max_version
end
opt[:min_version] = min_version
opt[:max_version] = max_version
else
opt[:ssl_version] = METHODS_MAP[version] || version
end
opt
end
module_function :set_version_to_options
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/load.rb | lib/fluent/load.rb | require 'socket'
require 'fcntl'
require 'time'
require 'stringio'
require 'fileutils'
require 'json'
require 'yajl'
require 'uri'
require 'msgpack'
require 'strptime'
begin
require 'sigdump/setup'
rescue
# ignore setup error on Win or similar platform which doesn't support signal
end
require 'cool.io'
require 'fluent/time'
require 'fluent/env'
require 'fluent/version'
require 'fluent/log'
require 'fluent/config'
require 'fluent/engine'
require 'fluent/rpc'
require 'fluent/mixin'
require 'fluent/plugin'
require 'fluent/parser'
require 'fluent/formatter'
require 'fluent/event'
require 'fluent/input'
require 'fluent/output'
require 'fluent/filter'
require 'fluent/match'
require 'fluent/ext_monitor_require'
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/rpc.rb | lib/fluent/rpc.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'webrick'
module Fluent
module RPC
class Server
def initialize(endpoint, log)
m = endpoint.match(/^\[?(?<host>[0-9a-zA-Z:\-\.]+)\]?:(?<port>[0-9]+)$/)
raise Fluent::ConfigError, "Invalid rpc_endpoint: #{endpoint}" unless m
@bind = m[:host]
@port = m[:port]
@log = log
@server = WEBrick::HTTPServer.new(
BindAddress: @bind,
Port: @port,
Logger: WEBrick::Log.new(STDERR, WEBrick::Log::FATAL),
AccessLog: [],
)
end
def mount(path, servlet, *args)
@server.mount(path, servlet, *args)
@log.debug "register #{path} RPC servlet"
end
def mount_proc(path, &block)
@server.mount_proc(path) { |req, res|
begin
code, header, response = block.call(req, res)
rescue => e
@log.warn "failed to handle RPC request", path: path, error: e.to_s
@log.warn_backtrace e.backtrace
code = 500
body = {
'message '=> 'Internal Server Error',
'error' => "#{e}",
'backtrace'=> e.backtrace,
}
end
code = 200 if code.nil?
header = {'Content-Type' => 'application/json'} if header.nil?
body = if response.nil?
'{"ok":true}'
else
response.body['ok'] = code == 200
response.body.to_json
end
res.status = code
header.each_pair { |k, v|
res[k] = v
}
res.body = body
}
@log.debug "register #{path} RPC handler"
end
def start
@log.debug "listening RPC http server on http://#{@bind}:#{@port}/"
@thread = Thread.new {
@server.start
}
end
def shutdown
if @server
@server.shutdown
@server = nil
end
if @thread
@thread.join
@thread = nil
end
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/log.rb | lib/fluent/log.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'forwardable'
require 'logger'
module Fluent
class Log
module TTYColor
RESET = "\033]R"
CRE = "\033[K"
CLEAR = "\033c"
NORMAL = "\033[0;39m"
RED = "\033[1;31m"
GREEN = "\033[1;32m"
YELLOW = "\033[1;33m"
BLUE = "\033[1;34m"
MAGENTA = "\033[1;35m"
CYAN = "\033[1;36m"
WHITE = "\033[1;37m"
end
LEVEL_TRACE = 0
LEVEL_DEBUG = 1
LEVEL_INFO = 2
LEVEL_WARN = 3
LEVEL_ERROR = 4
LEVEL_FATAL = 5
LEVEL_TEXT = %w(trace debug info warn error fatal)
LOG_EVENT_TAG_PREFIX = 'fluent'
LOG_EVENT_LABEL = '@FLUENT_LOG'
LOG_TYPE_SUPERVISOR = :supervisor # only in supervisor, or a worker with --no-supervisor
LOG_TYPE_WORKER0 = :worker0 # only in a worker with worker_id=0 (without showing worker id)
LOG_TYPE_DEFAULT = :default # show logs in all supervisor/workers, with worker id in workers (default)
LOG_TYPES = [LOG_TYPE_SUPERVISOR, LOG_TYPE_WORKER0, LOG_TYPE_DEFAULT].freeze
LOG_ROTATE_AGE = %w(daily weekly monthly)
IGNORE_SAME_LOG_MAX_CACHE_SIZE = 1000 # If need, make this an option of system config.
def self.str_to_level(log_level_str)
case log_level_str.downcase
when "trace" then LEVEL_TRACE
when "debug" then LEVEL_DEBUG
when "info" then LEVEL_INFO
when "warn" then LEVEL_WARN
when "error" then LEVEL_ERROR
when "fatal" then LEVEL_FATAL
else raise "Unknown log level: level = #{log_level_str}"
end
end
def self.event_tags
LEVEL_TEXT.map{|t| "#{LOG_EVENT_TAG_PREFIX}.#{t}" }
end
# Create a unique path for each process.
#
# >>> per_process_path("C:/tmp/test.log", :worker, 1)
# C:/tmp/test-1.log
# >>> per_process_path("C:/tmp/test.log", :supervisor, 0)
# C:/tmp/test-supervisor-0.log
def self.per_process_path(path, process_type, worker_id)
path = Pathname(path)
ext = path.extname
if process_type == :supervisor
suffix = "-#{process_type}-0#{ext}" # "-0" for backword compatibility.
else
suffix = "-#{worker_id}#{ext}"
end
return path.sub_ext(suffix).to_s
end
def initialize(logger, opts={})
# When ServerEngine changes the logger.level, the Fluentd logger level should also change.
# So overwrites logger.level= below.
# However, currently Fluentd doesn't use the ServerEngine's reloading feature,
# so maybe we don't need this overwriting anymore.
orig_logger_level_setter = logger.class.public_instance_method(:level=).bind(logger)
me = self
# The original ruby logger sets the number as each log level like below.
# DEBUG = 0
# INFO = 1
# WARN = 2
# ERROR = 3
# FATAL = 4
# Serverengine use this original log number. In addition to this, serverengine sets -1 as TRACE level.
# TRACE = -1
#
# On the other hand, in fluentd side, it sets the number like below.
# TRACE = 0
# DEBUG = 1
# INFO = 2
# WARN = 3
# ERROR = 4
# FATAL = 5
#
# Then fluentd's level is set as serverengine's level + 1.
# So if serverengine's logger level is changed, fluentd's log level will be changed to that + 1.
logger.define_singleton_method(:level=) {|level| orig_logger_level_setter.call(level); me.level = self.level + 1 }
@path = opts[:path]
@logger = logger
@out = logger.instance_variable_get(:@logdev)
@level = logger.level + 1
@debug_mode = false
@log_event_enabled = false
@depth_offset = 1
@format = nil
@time_format = nil
@formatter = nil
self.format = opts.fetch(:format, :text)
self.time_format = opts[:time_format] if opts.key?(:time_format)
enable_color out.tty?
# TODO: This variable name is unclear so we should change to better name.
@threads_exclude_events = []
# Fluent::Engine requires Fluent::Log, so we must take that object lazily
@engine = Fluent.const_get('Engine')
@optional_header = nil
@optional_attrs = nil
@suppress_repeated_stacktrace = opts[:suppress_repeated_stacktrace]
@forced_stacktrace_level = nil
@ignore_repeated_log_interval = opts[:ignore_repeated_log_interval]
@ignore_same_log_interval = opts[:ignore_same_log_interval]
@process_type = opts[:process_type] # :supervisor, :worker0, :workers Or :standalone
@process_type ||= :standalone # to keep behavior of existing code
case @process_type
when :supervisor
@show_supervisor_log = true
@show_worker0_log = false
when :worker0
@show_supervisor_log = false
@show_worker0_log = true
when :workers
@show_supervisor_log = false
@show_worker0_log = false
when :standalone
@show_supervisor_log = true
@show_worker0_log = true
else
raise "BUG: unknown process type for logger:#{@process_type}"
end
@worker_id = opts[:worker_id]
@worker_id_part = "##{@worker_id} " # used only for :default log type in workers
end
def dup
dl_opts = {}
dl_opts[:log_level] = @level - 1
logger = ServerEngine::DaemonLogger.new(@out, dl_opts)
clone = self.class.new(logger, suppress_repeated_stacktrace: @suppress_repeated_stacktrace, process_type: @process_type,
worker_id: @worker_id, ignore_repeated_log_interval: @ignore_repeated_log_interval,
ignore_same_log_interval: @ignore_same_log_interval)
clone.format = @format
clone.time_format = @time_format
clone.log_event_enabled = @log_event_enabled
clone.force_stacktrace_level(@forced_stacktrace_level)
# optional headers/attrs are not copied, because new PluginLogger should have another one of it
clone
end
attr_reader :format
attr_reader :time_format
attr_accessor :log_event_enabled, :ignore_repeated_log_interval, :ignore_same_log_interval, :suppress_repeated_stacktrace
attr_accessor :out
# Strictly speaking, we should also change @logger.level when the setter of @level is called.
# Currently, we don't need to do it, since Fluentd::Log doesn't use ServerEngine::DaemonLogger.level.
# Since We overwrites logger.level= so that @logger.level is applied to @level,
# we need to find a good way to do this, otherwise we will end up in an endless loop.
attr_accessor :level
attr_accessor :optional_header, :optional_attrs
def logdev=(logdev)
@out = logdev
@logger.instance_variable_set(:@logdev, logdev)
nil
end
def format=(fmt)
return if @format == fmt
@time_format = '%Y-%m-%d %H:%M:%S %z'
@time_formatter = Strftime.new(@time_format) rescue nil
case fmt
when :text
@format = :text
@formatter = Proc.new { |type, time, level, msg|
r = caller_line(type, time, @depth_offset, level)
r << msg
r
}
when :json
@format = :json
@formatter = Proc.new { |type, time, level, msg|
r = {
'time' => format_time(time),
'level' => LEVEL_TEXT[level],
'message' => msg
}
if wid = get_worker_id(type)
r['worker_id'] = wid
end
JSON.generate(r)
}
end
nil
end
def time_format=(time_fmt)
@time_format = time_fmt
@time_formatter = Strftime.new(@time_format) rescue nil
end
def stdout?
@out == $stdout
end
def reopen!
@out.reopen(@path, "a") if @path && @path != "-"
nil
end
def force_stacktrace_level?
not @forced_stacktrace_level.nil?
end
def force_stacktrace_level(level)
@forced_stacktrace_level = level
end
def enable_debug(b=true)
@debug_mode = b
self
end
def enable_event(b=true)
@log_event_enabled = b
self
end
# If you want to suppress event emitting in specific thread, please use this method.
# Events in passed thread are never emitted.
def disable_events(thread)
# this method is not symmetric with #enable_event.
@threads_exclude_events.push(thread) unless @threads_exclude_events.include?(thread)
end
def enable_color?
!@color_reset.empty?
end
def enable_color(b=true)
if b
@color_trace = TTYColor::BLUE
@color_debug = TTYColor::WHITE
@color_info = TTYColor::GREEN
@color_warn = TTYColor::YELLOW
@color_error = TTYColor::MAGENTA
@color_fatal = TTYColor::RED
@color_reset = TTYColor::NORMAL
else
@color_trace = ''
@color_debug = ''
@color_info = ''
@color_warn = ''
@color_error = ''
@color_fatal = ''
@color_reset = ''
end
self
end
def log_type(args)
if LOG_TYPES.include?(args.first)
args.shift
else
LOG_TYPE_DEFAULT
end
end
# TODO: skip :worker0 logs when Fluentd gracefully restarted
def skipped_type?(type)
case type
when LOG_TYPE_DEFAULT
false
when LOG_TYPE_WORKER0
!@show_worker0_log
when LOG_TYPE_SUPERVISOR
!@show_supervisor_log
else
raise "BUG: unknown log type:#{type}"
end
end
def on_trace
return if @level > LEVEL_TRACE
yield
end
def trace(*args, &block)
return if @level > LEVEL_TRACE
type = log_type(args)
return if skipped_type?(type)
args << block.call if block
time, msg = event(:trace, args)
return if time.nil?
puts [@color_trace, @formatter.call(type, time, LEVEL_TRACE, msg), @color_reset].join
rescue
# logger should not raise an exception. This rescue prevents unexpected behaviour.
end
alias TRACE trace
def trace_backtrace(backtrace=$!.backtrace, type: :default)
dump_stacktrace(type, backtrace, LEVEL_TRACE)
end
def on_debug
return if @level > LEVEL_DEBUG
yield
end
def debug(*args, &block)
return if @level > LEVEL_DEBUG
type = log_type(args)
return if skipped_type?(type)
args << block.call if block
time, msg = event(:debug, args)
return if time.nil?
puts [@color_debug, @formatter.call(type, time, LEVEL_DEBUG, msg), @color_reset].join
rescue
end
alias DEBUG debug
def debug_backtrace(backtrace=$!.backtrace, type: :default)
dump_stacktrace(type, backtrace, LEVEL_DEBUG)
end
def on_info
return if @level > LEVEL_INFO
yield
end
def info(*args, &block)
return if @level > LEVEL_INFO
type = log_type(args)
return if skipped_type?(type)
args << block.call if block
time, msg = event(:info, args)
return if time.nil?
puts [@color_info, @formatter.call(type, time, LEVEL_INFO, msg), @color_reset].join
rescue
end
alias INFO info
def info_backtrace(backtrace=$!.backtrace, type: :default)
dump_stacktrace(type, backtrace, LEVEL_INFO)
end
def on_warn
return if @level > LEVEL_WARN
yield
end
def warn(*args, &block)
return if @level > LEVEL_WARN
type = log_type(args)
return if skipped_type?(type)
args << block.call if block
time, msg = event(:warn, args)
return if time.nil?
puts [@color_warn, @formatter.call(type, time, LEVEL_WARN, msg), @color_reset].join
rescue
end
alias WARN warn
def warn_backtrace(backtrace=$!.backtrace, type: :default)
dump_stacktrace(type, backtrace, LEVEL_WARN)
end
def on_error
return if @level > LEVEL_ERROR
yield
end
def error(*args, &block)
return if @level > LEVEL_ERROR
type = log_type(args)
return if skipped_type?(type)
args << block.call if block
time, msg = event(:error, args)
return if time.nil?
puts [@color_error, @formatter.call(type, time, LEVEL_ERROR, msg), @color_reset].join
rescue
end
alias ERROR error
def error_backtrace(backtrace=$!.backtrace, type: :default)
dump_stacktrace(type, backtrace, LEVEL_ERROR)
end
def on_fatal
return if @level > LEVEL_FATAL
yield
end
def fatal(*args, &block)
return if @level > LEVEL_FATAL
type = log_type(args)
return if skipped_type?(type)
args << block.call if block
time, msg = event(:fatal, args)
return if time.nil?
puts [@color_fatal, @formatter.call(type, time, LEVEL_FATAL, msg), @color_reset].join
rescue
end
alias FATAL fatal
def fatal_backtrace(backtrace=$!.backtrace, type: :default)
dump_stacktrace(type, backtrace, LEVEL_FATAL)
end
def puts(msg)
@logger << msg + "\n"
@out.flush
msg
rescue
# FIXME
nil
end
def write(data)
@out.write(data)
end
# We need `#<<` method to use this logger class with other
# libraries such as aws-sdk
alias << write
def flush
@out.flush
end
def reset
@out.reset if @out.respond_to?(:reset)
end
CachedLog = Struct.new(:msg, :time)
def ignore_repeated_log?(key, time, message)
cached_log = Thread.current[key]
return false if cached_log.nil?
(cached_log.msg == message) && (time - cached_log.time <= @ignore_repeated_log_interval)
end
def ignore_same_log?(time, message)
cached_log = Thread.current[:last_same_log]
if cached_log.nil?
Thread.current[:last_same_log] = {message => time}
return false
end
prev_time = cached_log[message]
if prev_time
if (time - prev_time) <= @ignore_same_log_interval
true
else
cached_log[message] = time
false
end
else
if cached_log.size >= IGNORE_SAME_LOG_MAX_CACHE_SIZE
cached_log.reject! do |_, cached_time|
(time - cached_time) > @ignore_same_log_interval
end
end
# If the size is still over, we have no choice but to clear it.
cached_log.clear if cached_log.size >= IGNORE_SAME_LOG_MAX_CACHE_SIZE
cached_log[message] = time
false
end
end
def suppress_stacktrace?(backtrace)
cached_log = Thread.current[:last_repeated_stacktrace]
return false if cached_log.nil?
cached_log.msg == backtrace
end
def dump_stacktrace(type, backtrace, level)
return if @level > level
dump_stacktrace_internal(
type,
backtrace,
force_stacktrace_level? ? @forced_stacktrace_level : level,
)
end
def dump_stacktrace_internal(type, backtrace, level)
return if @level > level
time = Time.now
if @format == :text
line = caller_line(type, time, 5, level)
if @ignore_repeated_log_interval && ignore_repeated_log?(:last_repeated_stacktrace, time, backtrace)
return
elsif @suppress_repeated_stacktrace && suppress_stacktrace?(backtrace)
puts [" ", line, 'suppressed same stacktrace'].join
Thread.current[:last_repeated_stacktrace] = CachedLog.new(backtrace, time) if @ignore_repeated_log_interval
else
backtrace.each { |msg|
puts [" ", line, msg].join
}
Thread.current[:last_repeated_stacktrace] = CachedLog.new(backtrace, time) if @suppress_repeated_stacktrace
end
else
r = {
'time' => format_time(time),
'level' => LEVEL_TEXT[level],
}
if wid = get_worker_id(type)
r['worker_id'] = wid
end
if @ignore_repeated_log_interval && ignore_repeated_log?(:last_repeated_stacktrace, time, backtrace)
return
elsif @suppress_repeated_stacktrace && suppress_stacktrace?(backtrace)
r['message'] = 'suppressed same stacktrace'
Thread.current[:last_repeated_stacktrace] = CachedLog.new(backtrace, time) if @ignore_repeated_log_interval
else
r['message'] = backtrace.join("\n")
Thread.current[:last_repeated_stacktrace] = CachedLog.new(backtrace, time) if @suppress_repeated_stacktrace
end
puts JSON.generate(r)
end
nil
end
def get_worker_id(type)
if type == :default && (@process_type == :worker0 || @process_type == :workers)
@worker_id
else
nil
end
end
def event(level, args)
time = Time.now
message = @optional_header ? @optional_header.dup : ''
map = @optional_attrs ? @optional_attrs.dup : {}
args.each {|a|
if a.is_a?(Hash)
a.each_pair {|k,v|
map[k.to_s] = v
}
else
message << a.to_s
end
}
map.each_pair {|k,v|
if k == "error".freeze && v.is_a?(Exception) && !map.has_key?("error_class")
message << " error_class=#{v.class.to_s} error=#{v.to_s.inspect}"
else
message << " #{k}=#{v.inspect}"
end
}
if @ignore_same_log_interval
if ignore_same_log?(time, message)
return nil, nil
end
elsif @ignore_repeated_log_interval
if ignore_repeated_log?(:last_repeated_log, time, message)
return nil, nil
else
Thread.current[:last_repeated_log] = CachedLog.new(message, time)
end
end
if @log_event_enabled && !@threads_exclude_events.include?(Thread.current)
record = map.dup
record.keys.each {|key|
record[key] = record[key].inspect unless record[key].respond_to?(:to_msgpack)
}
record['message'] = message.dup
@engine.push_log_event("#{LOG_EVENT_TAG_PREFIX}.#{level}", Fluent::EventTime.from_time(time), record)
end
return time, message
end
def caller_line(type, time, depth, level)
worker_id_part = if type == :default && (@process_type == :worker0 || @process_type == :workers)
@worker_id_part
else
"".freeze
end
log_msg = "#{format_time(time)} [#{LEVEL_TEXT[level]}]: #{worker_id_part}"
if @debug_mode
line = caller(depth+1)[0]
if match = /^(.+?):(\d+)(?::in `(.*)')?/.match(line)
file = match[1].split('/')[-2,2].join('/')
line = match[2]
method = match[3]
return "#{log_msg}#{file}:#{line}:#{method}: "
end
end
return log_msg
end
def format_time(time)
@time_formatter ? @time_formatter.exec(time) : time.strftime(@time_format)
end
end
# PluginLogger has own log level separated from global $log object.
# This class enables log_level option in each plugin.
#
# PluginLogger has same functionality as Log but some methods are forwarded to internal logger
# for keeping logging action consistency in the process, e.g. color, event, etc.
class PluginLogger < Log
def initialize(logger)
@logger = logger
@level = @logger.level
@format = nil
@depth_offset = 2
if logger.instance_variable_defined?(:@suppress_repeated_stacktrace)
@suppress_repeated_stacktrace = logger.instance_variable_get(:@suppress_repeated_stacktrace)
end
if logger.instance_variable_defined?(:@forced_stacktrace_level)
@forced_stacktrace_level = logger.instance_variable_get(:@forced_stacktrace_level)
end
if logger.instance_variable_defined?(:@ignore_repeated_log_interval)
@ignore_repeated_log_interval = logger.instance_variable_get(:@ignore_repeated_log_interval)
end
if logger.instance_variable_defined?(:@ignore_same_log_interval)
@ignore_same_log_interval = logger.instance_variable_get(:@ignore_same_log_interval)
end
self.format = @logger.format
self.time_format = @logger.time_format
enable_color @logger.enable_color?
end
def level=(log_level_str)
@level = Log.str_to_level(log_level_str)
end
alias orig_format= format=
alias orig_time_format= time_format=
alias orig_enable_color enable_color
def format=(fmt)
self.orig_format = fmt
@logger.format = fmt
end
def time_format=(fmt)
self.orig_time_format = fmt
@logger.time_format = fmt
end
def enable_color(b = true)
orig_enable_color b
@logger.enable_color b
end
extend Forwardable
def_delegators '@logger', :get_worker_id, :enable_color?, :enable_debug, :enable_event,
:disable_events, :log_event_enabled, :log_event_enabled=, :event, :caller_line, :puts, :write,
:<<, :flush, :reset, :out, :out=, :optional_header, :optional_header=, :optional_attrs,
:optional_attrs=
end
module PluginLoggerMixin
def self.included(klass)
klass.instance_eval {
desc 'Allows the user to set different levels of logging for each plugin.'
config_param :@log_level, :string, default: nil, alias: :log_level # 'log_level' will be warned as deprecated
}
end
def initialize
super
@log = $log # Use $log object directly by default
end
attr_accessor :log
def configure(conf)
super
if plugin_id_configured? || conf['@log_level']
@log = PluginLogger.new($log.dup) unless @log.is_a?(PluginLogger)
@log.optional_attrs = {}
if level = conf['@log_level']
@log.level = level
end
if plugin_id_configured?
@log.optional_header = "[#{@id}] "
end
end
end
def terminate
super
@log.reset
end
end
# This class delegates some methods which are used in `Fluent::Logger` to a instance variable(`dev`) in `Logger::LogDevice` class
# https://github.com/ruby/ruby/blob/7b2d47132ff8ee950b0f978ab772dee868d9f1b0/lib/logger.rb#L661
class LogDeviceIO < ::Logger::LogDevice
def flush
if @dev.respond_to?(:flush)
@dev.flush
else
super
end
end
def tty?
if @dev.respond_to?(:tty?)
@dev.tty?
else
super
end
end
def sync=(v)
if @dev.respond_to?(:sync=)
@dev.sync = v
else
super
end
end
def reopen(path, mode)
if mode != 'a'
raise "Unsupported mode: #{mode}"
end
super(path)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/timezone.rb | lib/fluent/timezone.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'tzinfo'
require 'fluent/config/error'
# For v0.12. Will be removed after v2
module IntegerExt
refine Integer do
def to_time
Time.at(self)
end
end
end
module Fluent
class Timezone
# [+-]HH:MM, [+-]HHMM, [+-]HH
NUMERIC_PATTERN = %r{\A[+-]\d\d(:?\d\d)?\z}
# Region/Zone, Region/Zone/Zone
NAME_PATTERN = %r{\A[^/]+/[^/]+(/[^/]+)?\z}
# Validate the format of the specified timezone.
#
# Valid formats are as follows. Note that timezone abbreviations
# such as PST and JST are not supported intentionally.
#
# 1. [+-]HH:MM (e.g. "+09:00")
# 2. [+-]HHMM (e.g. "+0900")
# 3. [+-]HH (e.g. "+09")
# 4. Region/Zone (e.g. "Asia/Tokyo")
# 5. Region/Zone/Zone (e.g. "America/Argentina/Buenos_Aires")
#
# In the 4th and 5th cases, it is checked whether the specified
# timezone exists in the timezone database.
#
# When the given timezone is valid, true is returned. Otherwise,
# false is returned. When nil is given, false is returned.
def self.validate(timezone)
# If the specified timezone is nil.
if timezone.nil?
# Invalid.
return false
end
# [+-]HH:MM, [+-]HHMM, [+-]HH
if NUMERIC_PATTERN === timezone
# Valid. It can be parsed by Time.zone_offset method.
return true
end
# Region/Zone, Region/Zone/Zone
if NAME_PATTERN === timezone
begin
# Get a Timezone instance for the specified timezone.
TZInfo::Timezone.get(timezone)
rescue
# Invalid. The string does not exist in the timezone database.
return false
else
# Valid. The string was found in the timezone database.
return true
end
else
# Invalid. Timezone abbreviations are not supported.
return false
end
end
# Validate the format of the specified timezone.
#
# The implementation of this method calls validate(timezone) method
# to check whether the given timezone is valid. When invalid, this
# method raises a ConfigError.
def self.validate!(timezone)
unless validate(timezone)
raise ConfigError, "Unsupported timezone '#{timezone}'"
end
end
using IntegerExt
# Create a formatter for a timezone and optionally a format.
#
# An Proc object is returned. If the given timezone is invalid,
# nil is returned.
def self.formatter(timezone = nil, format = nil)
if timezone.nil?
return nil
end
# [+-]HH:MM, [+-]HHMM, [+-]HH
if NUMERIC_PATTERN === timezone
offset = Time.zone_offset(timezone)
case
when format.is_a?(String)
return Proc.new {|time|
time.to_time.localtime(offset).strftime(format)
}
when format.is_a?(Strftime)
return Proc.new {|time|
format.exec(time.to_time.localtime(offset))
}
else
return Proc.new {|time|
time.to_time.localtime(offset).iso8601
}
end
end
# Region/Zone, Region/Zone/Zone
if NAME_PATTERN === timezone
begin
tz = TZInfo::Timezone.get(timezone)
rescue
return nil
end
case
when format.is_a?(String)
return Proc.new {|time|
time = time.to_time
time.localtime(tz.period_for_utc(time).utc_total_offset).strftime(format)
}
when format.is_a?(Strftime)
return Proc.new {|time|
time = time.to_time
format.exec(time.localtime(tz.period_for_utc(time).utc_total_offset))
}
else
return Proc.new {|time|
time = time.to_time
time.localtime(tz.period_for_utc(time).utc_total_offset).iso8601
}
end
end
return nil
end
def self.utc_offset(timezone)
return 0 if timezone.nil?
case timezone
when NUMERIC_PATTERN
Time.zone_offset(timezone)
when NAME_PATTERN
tz = TZInfo::Timezone.get(timezone)
->(time) {
tz.period_for_utc(time.to_time).utc_total_offset
}
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/filter.rb | lib/fluent/filter.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/compat/filter'
module Fluent
Filter = Fluent::Compat::Filter
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/time.rb | lib/fluent/time.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'time'
require 'msgpack'
require 'strptime'
require 'fluent/timezone'
require 'fluent/configurable'
require 'fluent/config/error'
module Fluent
class EventTime
TYPE = 0
FORMATTER = Strftime.new('%Y-%m-%d %H:%M:%S.%N %z')
def initialize(sec, nsec = 0)
@sec = sec
@nsec = nsec
end
def ==(other)
if other.is_a?(Fluent::EventTime)
@sec == other.sec
else
@sec == other
end
end
def sec
@sec
end
def nsec
@nsec
end
def to_int
@sec
end
alias :to_i :to_int
def to_f
@sec + @nsec / 1_000_000_000.0
end
# for Time.at
def to_r
Rational(@sec * 1_000_000_000 + @nsec, 1_000_000_000)
end
# for > and others
def coerce(other)
[other, @sec]
end
def to_s
@sec.to_s
end
begin
# ruby 2.5 or later
Time.at(0, 0, :nanosecond)
def to_time
Time.at(@sec, @nsec, :nanosecond)
end
rescue
def to_time
Time.at(Rational(@sec * 1_000_000_000 + @nsec, 1_000_000_000))
end
end
def to_json(*args)
@sec.to_s
end
def to_msgpack(io = nil)
@sec.to_msgpack(io)
end
def to_msgpack_ext
[@sec, @nsec].pack('NN')
end
def self.from_msgpack_ext(data)
new(*data.unpack('NN'))
end
def self.from_time(time)
Fluent::EventTime.new(time.to_i, time.nsec)
end
def self.eq?(a, b)
if a.is_a?(Fluent::EventTime) && b.is_a?(Fluent::EventTime)
a.sec == b.sec && a.nsec == b.nsec
else
a == b
end
end
def self.now
# This method is called many time. so call Process.clock_gettime directly instead of Fluent::Clock.real_now
now = Process.clock_gettime(Process::CLOCK_REALTIME, :nanosecond)
Fluent::EventTime.new(now / 1_000_000_000, now % 1_000_000_000)
end
def self.parse(*args)
from_time(Time.parse(*args))
end
## TODO: For performance, implement +, -, and so on
def method_missing(name, *args, &block)
@sec.send(name, *args, &block)
end
def inspect
FORMATTER.exec(Time.at(self))
end
end
module TimeMixin
TIME_TYPES = ['string', 'unixtime', 'float', 'mixed']
TIME_PARAMETERS = [
[:time_format, :string, {default: nil}],
[:localtime, :bool, {default: true}], # UTC if :localtime is false and :timezone is nil
[:utc, :bool, {default: false}], # to turn :localtime false
[:timezone, :string, {default: nil}],
[:time_format_fallbacks, :array, {default: []}], # try time_format, then try fallbacks
]
TIME_FULL_PARAMETERS = [
# To avoid to define :time_type twice (in plugin_helper/inject)
[:time_type, :enum, {default: :string, list: TIME_TYPES.map(&:to_sym)}],
] + TIME_PARAMETERS
module TimeParameters
include Fluent::Configurable
TIME_FULL_PARAMETERS.each do |name, type, opts|
config_param(name, type, **opts)
end
def configure(conf)
if conf.has_key?('localtime') || conf.has_key?('utc')
if conf.has_key?('localtime')
conf['localtime'] = Fluent::Config.bool_value(conf['localtime'])
elsif conf.has_key?('utc')
conf['localtime'] = !(Fluent::Config.bool_value(conf['utc']))
# Specifying "localtime false" means using UTC in TimeFormatter
# And specifying "utc" is different from specifying "timezone +0000"(it's not always UTC).
# There are difference between "Z" and "+0000" in timezone formatting.
# TODO: add kwargs to TimeFormatter to specify "using localtime", "using UTC" or "using specified timezone" in more explicit way
end
end
super
if conf.has_key?('localtime') && conf.has_key?('utc') && !(@localtime ^ @utc)
raise Fluent::ConfigError, "both of utc and localtime are specified, use only one of them"
end
if conf.has_key?('time_type') and @time_type == :mixed
if @time_format.nil? and @time_format_fallbacks.empty?
raise Fluent::ConfigError, "time_type is :mixed but time_format and time_format_fallbacks is empty."
end
end
Fluent::Timezone.validate!(@timezone) if @timezone
end
end
module Parser
def self.included(mod)
mod.include TimeParameters
end
def time_parser_create(type: @time_type, format: @time_format, timezone: @timezone, force_localtime: false)
return MixedTimeParser.new(type, format, @localtime, timezone, @utc, force_localtime, @time_format_fallbacks) if type == :mixed
return NumericTimeParser.new(type) if type != :string
return TimeParser.new(format, true, nil) if force_localtime
localtime = @localtime && (timezone.nil? && !@utc)
TimeParser.new(format, localtime, timezone)
end
end
module Formatter
def self.included(mod)
mod.include TimeParameters
end
def time_formatter_create(type: @time_type, format: @time_format, timezone: @timezone, force_localtime: false)
return NumericTimeFormatter.new(type) if type != :string
return TimeFormatter.new(format, true, nil) if force_localtime
localtime = @localtime && (timezone.nil? && !@utc)
TimeFormatter.new(format, localtime, timezone)
end
end
end
class TimeParser
class TimeParseError < StandardError; end
def initialize(format = nil, localtime = true, timezone = nil)
if format.nil? && (timezone || !localtime)
raise Fluent::ConfigError, "specifying timezone requires time format"
end
@cache1_key = nil
@cache1_time = nil
@cache2_key = nil
@cache2_time = nil
format_with_timezone = format && (format.include?("%z") || format.include?("%Z"))
utc_offset = case
when format_with_timezone then
nil
when timezone then
Fluent::Timezone.utc_offset(timezone)
when localtime then
nil
else
0 # utc
end
strptime = format && (Strptime.new(format) rescue nil)
@parse = case
when format_with_timezone && strptime then ->(v){ Fluent::EventTime.from_time(strptime.exec(v)) }
when format_with_timezone then ->(v){ Fluent::EventTime.from_time(Time.strptime(v, format)) }
when format == '%iso8601' then ->(v){ Fluent::EventTime.from_time(Time.iso8601(v)) }
when strptime then
if utc_offset.nil?
->(v){ t = strptime.exec(v); Fluent::EventTime.new(t.to_i, t.nsec) }
elsif utc_offset.respond_to?(:call)
->(v) { t = strptime.exec(v); Fluent::EventTime.new(t.to_i + t.utc_offset - utc_offset.call(t), t.nsec) }
else
->(v) { t = strptime.exec(v); Fluent::EventTime.new(t.to_i + t.utc_offset - utc_offset, t.nsec) }
end
when format then
if utc_offset.nil?
->(v){ t = Time.strptime(v, format); Fluent::EventTime.new(t.to_i, t.nsec) }
elsif utc_offset.respond_to?(:call)
->(v){ t = Time.strptime(v, format); Fluent::EventTime.new(t.to_i + t.utc_offset - utc_offset.call(t), t.nsec) }
else
->(v){ t = Time.strptime(v, format); Fluent::EventTime.new(t.to_i + t.utc_offset - utc_offset, t.nsec) }
end
else ->(v){ Fluent::EventTime.parse(v) }
end
end
# TODO: new cache mechanism using format string
def parse(value)
unless value.is_a?(String)
raise TimeParseError, "value must be string: #{value}"
end
if @cache1_key == value
return @cache1_time
elsif @cache2_key == value
return @cache2_time
else
begin
time = @parse.call(value)
rescue => e
raise TimeParseError, "invalid time format: value = #{value}, error_class = #{e.class.name}, error = #{e.message}"
end
@cache1_key = @cache2_key
@cache1_time = @cache2_time
@cache2_key = value
@cache2_time = time
return time
end
end
alias :call :parse
end
class NumericTimeParser < TimeParser # to include TimeParseError
def initialize(type, localtime = nil, timezone = nil)
@cache1_key = @cache1_time = @cache2_key = @cache2_time = nil
if type == :unixtime
define_singleton_method(:parse, method(:parse_unixtime))
define_singleton_method(:call, method(:parse_unixtime))
else # :float
define_singleton_method(:parse, method(:parse_float))
define_singleton_method(:call, method(:parse_float))
end
end
def parse_unixtime(value)
unless value.is_a?(String) || value.is_a?(Numeric)
raise TimeParseError, "value must be a string or a number: #{value}(#{value.class})"
end
if @cache1_key == value
return @cache1_time
elsif @cache2_key == value
return @cache2_time
end
begin
time = Fluent::EventTime.new(value.to_i)
rescue => e
raise TimeParseError, "invalid time format: value = #{value}, error_class = #{e.class.name}, error = #{e.message}"
end
@cache1_key = @cache2_key
@cache1_time = @cache2_time
@cache2_key = value
@cache2_time = time
time
end
# rough benchmark result to compare handmade parser vs Fluent::EventTime.from_time(Time.at(value.to_r))
# full: with 9-digits of nsec after dot
# msec: with 3-digits of msec after dot
# 10_000_000 times loop on MacBookAir
## parse_by_myself(full): 12.162475 sec
## parse_by_myself(msec): 15.050435 sec
## parse_by_to_r (full): 28.722362 sec
## parse_by_to_r (msec): 28.232856 sec
def parse_float(value)
unless value.is_a?(String) || value.is_a?(Numeric)
raise TimeParseError, "value must be a string or a number: #{value}(#{value.class})"
end
if @cache1_key == value
return @cache1_time
elsif @cache2_key == value
return @cache2_time
end
begin
sec_s, nsec_s, _ = value.to_s.split('.', 3) # throw away second-dot and later
nsec_s = nsec_s && nsec_s[0..9] || '0'
nsec_s += '0' * (9 - nsec_s.size) if nsec_s.size < 9
time = Fluent::EventTime.new(sec_s.to_i, nsec_s.to_i)
rescue => e
raise TimeParseError, "invalid time format: value = #{value}, error_class = #{e.class.name}, error = #{e.message}"
end
@cache1_key = @cache2_key
@cache1_time = @cache2_time
@cache2_key = value
@cache2_time = time
time
end
end
class TimeFormatter
def initialize(format = nil, localtime = true, timezone = nil)
@tc1 = 0
@tc1_str = nil
@tc2 = 0
@tc2_str = nil
strftime = format && (Strftime.new(format) rescue nil)
if format && format =~ /(^|[^%])(%%)*%L|(^|[^%])(%%)*%\d*N/
define_singleton_method(:format, method(:format_with_subsec))
define_singleton_method(:call, method(:format_with_subsec))
else
define_singleton_method(:format, method(:format_without_subsec))
define_singleton_method(:call, method(:format_without_subsec))
end
formatter = Fluent::Timezone.formatter(timezone, strftime ? strftime : format)
@format_nocache = case
when formatter then formatter
when strftime && localtime then ->(time){ strftime.exec(Time.at(time)) }
when format && localtime then ->(time){ Time.at(time).strftime(format) }
when strftime then ->(time){ strftime.exec(Time.at(time).utc) }
when format then ->(time){ Time.at(time).utc.strftime(format) }
when localtime then ->(time){ Time.at(time).iso8601 }
else ->(time){ Time.at(time).utc.iso8601 }
end
end
def format_without_subsec(time)
if @tc1 == time
return @tc1_str
elsif @tc2 == time
return @tc2_str
else
str = format_nocache(time)
if @tc1 < @tc2
@tc1 = time
@tc1_str = str
else
@tc2 = time
@tc2_str = str
end
return str
end
end
def format_with_subsec(time)
if Fluent::EventTime.eq?(@tc1, time)
return @tc1_str
elsif Fluent::EventTime.eq?(@tc2, time)
return @tc2_str
else
str = format_nocache(time)
if @tc1 < @tc2
@tc1 = time
@tc1_str = str
else
@tc2 = time
@tc2_str = str
end
return str
end
end
## Dynamically defined in #initialize
# def format(time)
# end
def format_nocache(time)
@format_nocache.call(time)
end
end
class NumericTimeFormatter < TimeFormatter
def initialize(type, localtime = nil, timezone = nil)
@cache1_key = @cache1_time = @cache2_key = @cache2_time = nil
if type == :unixtime
define_singleton_method(:format, method(:format_unixtime))
define_singleton_method(:call, method(:format_unixtime))
else # :float
define_singleton_method(:format, method(:format_float))
define_singleton_method(:call, method(:format_float))
end
end
def format_unixtime(time)
time.to_i.to_s
end
def format_float(time)
if time.is_a?(Fluent::EventTime) || time.is_a?(Time)
# 10.015 secs for 10_000_000 times call on MacBookAir
nsec_s = time.nsec.to_s
nsec_s = '0' * (9 - nsec_s.size) if nsec_s.size < 9
"#{time.sec}.#{nsec_s}"
else # integer (or float?)
time.to_f.to_s
end
end
end
# MixedTimeParser is available when time_type is set to :mixed
#
# Use Case 1: primary format is specified explicitly in time_format
# time_type mixed
# time_format %iso8601
# time_format_fallbacks unixtime
# Use Case 2: time_format is omitted
# time_type mixed
# time_format_fallbacks %iso8601, unixtime
#
class MixedTimeParser < TimeParser # to include TimeParseError
def initialize(type, format = nil, localtime = nil, timezone = nil, utc = nil, force_localtime = nil, fallbacks = [])
@parsers = []
fallbacks.unshift(format).each do |fallback|
next unless fallback
case fallback
when 'unixtime', 'float'
@parsers << NumericTimeParser.new(fallback, localtime, timezone)
else
if force_localtime
@parsers << TimeParser.new(fallback, true, nil)
else
localtime = localtime && (timezone.nil? && !utc)
@parsers << TimeParser.new(fallback, localtime, timezone)
end
end
end
end
def parse(value)
@parsers.each do |parser|
begin
Float(value) if parser.class == Fluent::NumericTimeParser
rescue
next
end
begin
return parser.parse(value)
rescue
# skip TimeParseError
end
end
fallback_class = @parsers.collect do |parser| parser.class end.join(",")
raise TimeParseError, "invalid time format: value = #{value}, even though fallbacks: #{fallback_class}"
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/daemonizer.rb | lib/fluent/daemonizer.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/config/error'
module Fluent
class Daemonizer
def self.daemonize(pid_path, args = [], &block)
new.daemonize(pid_path, args, &block)
end
def daemonize(pid_path, args = [])
pid_fullpath = File.absolute_path(pid_path)
check_pidfile(pid_fullpath)
begin
Process.daemon(false, false)
File.write(pid_fullpath, Process.pid.to_s)
# install signal and set process name are performed by supervisor
install_at_exit_handlers(pid_fullpath)
yield
rescue NotImplementedError
daemonize_with_spawn(pid_fullpath, args)
end
end
private
def daemonize_with_spawn(pid_fullpath, args)
pid = Process.spawn(*['fluentd'].concat(args))
File.write(pid_fullpath, pid.to_s)
pid
end
def check_pidfile(pid_path)
if File.exist?(pid_path)
if !File.readable?(pid_path) || !File.writable?(pid_path)
raise Fluent::ConfigError, "Cannot access pid file: #{pid_path}"
end
pid =
begin
Integer(File.read(pid_path), 10)
rescue TypeError, ArgumentError
return # ignore
end
begin
Process.kill(0, pid)
raise Fluent::ConfigError, "pid(#{pid}) is running"
rescue Errno::EPERM
raise Fluent::ConfigError, "pid(#{pid}) is running"
rescue Errno::ESRCH
end
else
unless File.writable?(File.dirname(pid_path))
raise Fluent::ConfigError, "Cannot access directory for pid file: #{File.dirname(pid_path)}"
end
end
end
def install_at_exit_handlers(pidfile)
at_exit do
if File.exist?(pidfile)
File.delete(pidfile)
end
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/output.rb | lib/fluent/output.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/compat/output'
require 'fluent/output_chain'
module Fluent
Output = Fluent::Compat::Output
BufferedOutput = Fluent::Compat::BufferedOutput
ObjectBufferedOutput = Fluent::Compat::ObjectBufferedOutput
TimeSlicedOutput = Fluent::Compat::TimeSlicedOutput
MultiOutput = Fluent::Compat::MultiOutput
# Some input plugins refer BufferQueueLimitError for throttling
BufferQueueLimitError = Fluent::Compat::BufferQueueLimitError
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/plugin_id.rb | lib/fluent/plugin_id.rb | #
# Fluent
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'set'
require 'fluent/env'
require 'fluent/variable_store'
module Fluent
module PluginId
def initialize
super
@_plugin_id_variable_store = nil
@_plugin_root_dir = nil
@id = nil
end
def configure(conf)
@_plugin_id_variable_store = Fluent::VariableStore.fetch_or_build(:plugin_id, default_value: Set.new)
@id = conf['@id']
@_id_configured = !!@id # plugin id is explicitly configured by users (or not)
if @id
@id = @id.to_s
if @_plugin_id_variable_store.include?(@id) && !plugin_id_for_test?
raise Fluent::ConfigError, "Duplicated plugin id `#{@id}`. Check whole configuration and fix it."
end
@_plugin_id_variable_store.add(@id)
end
super
end
def plugin_id_for_test?
caller_locations.each do |location|
# Thread::Backtrace::Location#path returns base filename or absolute path.
# #absolute_path returns absolute_path always.
# https://bugs.ruby-lang.org/issues/12159
if /\/test_[^\/]+\.rb$/.match?(location.absolute_path) # location.path =~ /test_.+\.rb$/
return true
end
end
false
end
def plugin_id_configured?
if instance_variable_defined?(:@_id_configured)
@_id_configured
end
end
def plugin_id
if instance_variable_defined?(:@id)
@id || "object:#{object_id.to_s(16)}"
else
"object:#{object_id.to_s(16)}"
end
end
def plugin_root_dir
return @_plugin_root_dir if @_plugin_root_dir
return nil unless system_config.root_dir
return nil unless plugin_id_configured?
# Fluent::Plugin::Base#fluentd_worker_id
dir = File.join(system_config.root_dir, "worker#{fluentd_worker_id}", plugin_id)
FileUtils.mkdir_p(dir, mode: system_config.dir_permission || Fluent::DEFAULT_DIR_PERMISSION) unless Dir.exist?(dir)
@_plugin_root_dir = dir.freeze
dir
end
def stop
if @_plugin_id_variable_store
@_plugin_id_variable_store.delete(@id)
end
super
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/file_wrapper.rb | lib/fluent/file_wrapper.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
unless Fluent.windows?
Fluent::FileWrapper = File
else
require 'fluent/win32api'
module Fluent
module FileWrapper
def self.open(path, mode='r')
io = WindowsFile.new(path, mode).io
if block_given?
v = yield io
io.close
v
else
io
end
end
def self.stat(path)
f = WindowsFile.new(path)
s = f.stat
f.close
s
end
end
class WindowsFile
include File::Constants
attr_reader :io
INVALID_HANDLE_VALUE = -1
def initialize(path, mode_enc='r')
@path = path
mode, enc = mode_enc.split(":", 2)
@io = File.open(path, mode2flags(mode))
@io.set_encoding(enc) if enc
@file_handle = Win32API._get_osfhandle(@io.to_i)
@io.instance_variable_set(:@file_index, self.ino)
def @io.ino
@file_index
end
end
def close
@io.close
@file_handle = INVALID_HANDLE_VALUE
end
# To keep backward compatibility, we continue to use GetFileInformationByHandle()
# to get file id.
# Note that Ruby's File.stat uses GetFileInformationByHandleEx() with FileIdInfo
# and returned value is different with above one, former one is 64 bit while
# later one is 128bit.
def ino
by_handle_file_information = '\0'*(4+8+8+8+4+4+4+4+4+4) #72bytes
unless Win32API.GetFileInformationByHandle(@file_handle, by_handle_file_information)
return 0
end
by_handle_file_information.unpack("I11Q1")[11] # fileindex
end
def stat
raise Errno::ENOENT if delete_pending
s = File.stat(@path)
s.instance_variable_set :@ino, self.ino
def s.ino; @ino; end
s
end
private
def mode2flags(mode)
# Always inject File::Constants::SHARE_DELETE
# https://github.com/fluent/fluentd/pull/3585#issuecomment-1101502617
# To enable SHARE_DELETE, BINARY is also required.
# https://bugs.ruby-lang.org/issues/11218
# https://github.com/ruby/ruby/blob/d6684f063bc53e3cab025bd39526eca3b480b5e7/win32/win32.c#L6332-L6345
flags = BINARY | SHARE_DELETE
case mode.delete("b")
when "r"
flags |= RDONLY
when "r+"
flags |= RDWR
when "w"
flags |= WRONLY | CREAT | TRUNC
when "w+"
flags |= RDWR | CREAT | TRUNC
when "a"
flags |= WRONLY | CREAT | APPEND
when "a+"
flags |= RDWR | CREAT | APPEND
else
raise Errno::EINVAL.new("Unsupported mode by Fluent::FileWrapper: #{mode}")
end
end
# DeletePending is a Windows-specific file state that roughly means
# "this file is queued for deletion, so close any open handlers"
#
# This flag can be retrieved via GetFileInformationByHandleEx().
#
# https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-getfileinformationbyhandleex
#
def delete_pending
file_standard_info = 0x01
bufsize = 1024
buf = '\0' * bufsize
unless Win32API.GetFileInformationByHandleEx(@file_handle, file_standard_info, buf, bufsize)
return false
end
return buf.unpack("QQICC")[3] != 0
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/registry.rb | lib/fluent/registry.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/config/error'
module Fluent
class Registry
DEFAULT_PLUGIN_PATH = File.expand_path('../plugin', __FILE__)
FLUENT_LIB_PATH = File.dirname(File.dirname(DEFAULT_PLUGIN_PATH))
def initialize(kind, search_prefix, dir_search_prefix: nil)
@kind = kind
@search_prefix = search_prefix
@dir_search_prefix = dir_search_prefix
@map = {}
@paths = []
end
attr_reader :kind, :paths, :map, :dir_search_prefix
def register(type, value)
type = type.to_sym
@map[type] = value
end
def lookup(type)
type = type.to_sym
if value = @map[type]
return value
end
search(type)
if value = @map[type]
return value
end
raise NotFoundPluginError.new("Unknown #{@kind} plugin '#{type}'. Run 'gem search -rd fluent-plugin' to find plugins",
kind: @kind, type: type)
end
def reverse_lookup(value)
@map.each do |k, v|
return k if v == value
end
nil
end
def search(type)
# search from additional plugin directories
if @dir_search_prefix
path = "#{@dir_search_prefix}#{type}"
files = @paths.filter_map { |lp|
lpath = File.expand_path(File.join(lp, "#{path}.rb"))
File.exist?(lpath) ? lpath : nil
}
unless files.empty?
# prefer newer version
require files.max
return
end
end
path = "#{@search_prefix}#{type}"
# prefer LOAD_PATH than gems
files = $LOAD_PATH.filter_map { |lp|
if lp == FLUENT_LIB_PATH
nil
else
lpath = File.expand_path(File.join(lp, "#{path}.rb"))
File.exist?(lpath) ? lpath : nil
end
}
unless files.empty?
# prefer newer version
require files.max
return
end
# Find from gems and prefer newer version
specs = Gem::Specification.find_all { |spec|
if spec.name == 'fluentd'.freeze
false
else
spec.contains_requirable_file? path
end
}.sort_by { |spec| spec.version }
if spec = specs.last
spec.require_paths.each { |lib|
file = "#{spec.full_gem_path}/#{lib}/#{path}"
if File.exist?("#{file}.rb")
require file
return
end
}
end
# Lastly, load built-in plugin
lpath = File.expand_path(File.join(FLUENT_LIB_PATH, "#{@search_prefix}#{type}.rb"))
if File.exist?(lpath)
require lpath
return
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/plugin.rb | lib/fluent/plugin.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/registry'
require 'fluent/config/error'
module Fluent
module Plugin
SEARCH_PATHS = []
# plugins for fluentd: fluent/plugin/type_NAME.rb
# plugins for fluentd plugins: fluent/plugin/type/NAME.rb
# ex: storage, buffer chunk, ...
# first class plugins (instantiated by Engine)
INPUT_REGISTRY = Registry.new(:input, 'fluent/plugin/in_', dir_search_prefix: 'in_')
OUTPUT_REGISTRY = Registry.new(:output, 'fluent/plugin/out_', dir_search_prefix: 'out_')
FILTER_REGISTRY = Registry.new(:filter, 'fluent/plugin/filter_', dir_search_prefix: 'filter_')
# feature plugin: second class plugins (instantiated by Plugins or Helpers)
BUFFER_REGISTRY = Registry.new(:buffer, 'fluent/plugin/buf_', dir_search_prefix: 'buf_')
PARSER_REGISTRY = Registry.new(:parser, 'fluent/plugin/parser_', dir_search_prefix: 'parser_')
FORMATTER_REGISTRY = Registry.new(:formatter, 'fluent/plugin/formatter_', dir_search_prefix: 'formatter_')
STORAGE_REGISTRY = Registry.new(:storage, 'fluent/plugin/storage_', dir_search_prefix: 'storage_')
SD_REGISTRY = Registry.new(:sd, 'fluent/plugin/sd_', dir_search_prefix: 'sd_')
METRICS_REGISTRY = Registry.new(:metrics, 'fluent/plugin/metrics_', dir_search_prefix: 'metrics_')
REGISTRIES = [INPUT_REGISTRY, OUTPUT_REGISTRY, FILTER_REGISTRY, BUFFER_REGISTRY, PARSER_REGISTRY, FORMATTER_REGISTRY, STORAGE_REGISTRY, SD_REGISTRY, METRICS_REGISTRY]
def self.register_input(type, klass)
register_impl('input', INPUT_REGISTRY, type, klass)
end
def self.register_output(type, klass)
register_impl('output', OUTPUT_REGISTRY, type, klass)
end
def self.register_filter(type, klass)
register_impl('filter', FILTER_REGISTRY, type, klass)
end
def self.register_buffer(type, klass)
register_impl('buffer', BUFFER_REGISTRY, type, klass)
end
def self.register_sd(type, klass)
register_impl('sd', SD_REGISTRY, type, klass)
end
def self.register_metrics(type, klass)
register_impl('metrics', METRICS_REGISTRY, type, klass)
end
def self.register_parser(type, klass_or_proc)
if klass_or_proc.is_a?(Regexp)
# This usage is not recommended for new API
require 'fluent/parser'
register_impl('parser', PARSER_REGISTRY, type, Proc.new { Fluent::TextParser::RegexpParser.new(klass_or_proc) })
else
register_impl('parser', PARSER_REGISTRY, type, klass_or_proc)
end
end
def self.register_formatter(type, klass_or_proc)
if klass_or_proc.respond_to?(:call) && klass_or_proc.arity == 3 # Proc.new { |tag, time, record| }
# This usage is not recommended for new API
require 'fluent/formatter'
register_impl('formatter', FORMATTER_REGISTRY, type, Proc.new { Fluent::TextFormatter::ProcWrappedFormatter.new(klass_or_proc) })
else
register_impl('formatter', FORMATTER_REGISTRY, type, klass_or_proc)
end
end
def self.register_storage(type, klass)
register_impl('storage', STORAGE_REGISTRY, type, klass)
end
def self.lookup_type_from_class(klass_or_its_name)
klass = if klass_or_its_name.is_a? Class
klass_or_its_name
elsif klass_or_its_name.is_a? String
eval(klass_or_its_name) # const_get can't handle qualified klass name (ex: A::B)
else
raise ArgumentError, "invalid argument type #{klass_or_its_name.class}: #{klass_or_its_name}"
end
REGISTRIES.reduce(nil){|a, r| a || r.reverse_lookup(klass) }
end
def self.add_plugin_dir(dir)
REGISTRIES.each do |r|
r.paths.push(dir)
end
nil
end
def self.new_input(type)
new_impl('input', INPUT_REGISTRY, type)
end
def self.new_output(type)
new_impl('output', OUTPUT_REGISTRY, type)
end
def self.new_filter(type)
new_impl('filter', FILTER_REGISTRY, type)
end
def self.new_buffer(type, parent: nil)
new_impl('buffer', BUFFER_REGISTRY, type, parent)
end
def self.new_sd(type, parent: nil)
new_impl('sd', SD_REGISTRY, type, parent)
end
def self.new_metrics(type, parent: nil)
new_impl('metrics', METRICS_REGISTRY, type, parent)
end
class << self
# This should be defined for fluent-plugin-config-formatter type arguments.
alias_method :new_service_discovery, :new_sd
end
def self.new_parser(type, parent: nil)
if type[0] == '/' && type[-1] == '/'
# This usage is not recommended for new API... create RegexpParser directly
require 'fluent/parser'
impl = Fluent::TextParser.lookup(type)
impl.extend FeatureAvailabilityChecker
impl
else
new_impl('parser', PARSER_REGISTRY, type, parent)
end
end
def self.new_formatter(type, parent: nil)
new_impl('formatter', FORMATTER_REGISTRY, type, parent)
end
def self.new_storage(type, parent: nil)
new_impl('storage', STORAGE_REGISTRY, type, parent)
end
def self.register_impl(kind, registry, type, value)
if !value.is_a?(Class) && !value.respond_to?(:call)
raise Fluent::ConfigError, "Invalid implementation as #{kind} plugin: '#{type}'. It must be a Class, or callable."
end
registry.register(type, value)
$log.trace "registered #{kind} plugin '#{type}'" if defined?($log)
nil
end
def self.new_impl(kind, registry, type, parent=nil)
# "'type' not found" is handled by registry
obj = registry.lookup(type)
impl = case
when obj.is_a?(Class)
obj.new
when obj.respond_to?(:call) && obj.arity == 0
obj.call
else
raise Fluent::ConfigError, "#{kind} plugin '#{type}' is not a Class nor callable (without arguments)."
end
if parent && impl.respond_to?(:owner=)
impl.owner = parent
end
impl.extend FeatureAvailabilityChecker
impl
end
module FeatureAvailabilityChecker
def configure(conf)
super
# extend plugin instance by this module
# to run this check after all #configure methods of plugins and plugin helpers
sysconf = if self.respond_to?(:owner) && owner.respond_to?(:system_config)
owner.system_config
elsif self.respond_to?(:system_config)
self.system_config
else
nil
end
if sysconf && sysconf.workers > 1 && !self.multi_workers_ready?
type = Fluent::Plugin.lookup_type_from_class(self.class)
raise Fluent::ConfigError, "Plugin '#{type}' does not support multi workers configuration (#{self.class})"
end
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/parser.rb | lib/fluent/parser.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/compat/parser'
module Fluent
ParserError = Fluent::Compat::Parser::ParserError
Parser = Fluent::Compat::Parser
TextParser = Fluent::Compat::TextParser
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/fluent_log_event_router.rb | lib/fluent/fluent_log_event_router.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/log'
module Fluent
# DO NOT write any logic here
class NullFluentLogEventRouter
def start; end
def stop; end
def graceful_stop; end
def emit_event(_event); end
def emittable?
self.class != NullFluentLogEventRouter
end
end
# This class is for handling fluentd's inner log
# e.g. <label @FLUNT_LOG> section and <match fluent.**> section
class FluentLogEventRouter < NullFluentLogEventRouter
# @param root_agent [Fluent::RootAgent]
def self.build(root_agent)
log_event_router = nil
begin
log_event_agent = root_agent.find_label(Fluent::Log::LOG_EVENT_LABEL)
log_event_router = log_event_agent.event_router
# suppress mismatched tags only for <label @FLUENT_LOG> label.
# it's not suppressed in default event router for non-log-event events
log_event_router.suppress_missing_match!
unmatched_tags = Fluent::Log.event_tags.select { |t| !log_event_router.match?(t) }
unless unmatched_tags.empty?
$log.warn "match for some tags of log events are not defined in @FLUENT_LOG label (to be ignored)", tags: unmatched_tags
end
rescue ArgumentError # ArgumentError "#{label_name} label not found"
# use default event router if <label @FLUENT_LOG> is missing in configuration
root_log_event_router = root_agent.event_router
event_tags = Fluent::Log.event_tags
if event_tags.any? { |t| root_log_event_router.match?(t) }
log_event_router = root_log_event_router
unmatched_tags = event_tags.select { |t| !log_event_router.match?(t) }
if unmatched_tags.empty?
$log.warn "define <match fluent.**> to capture fluentd logs in top level is deprecated. Use <label @FLUENT_LOG> instead"
else
matched_sections = (event_tags - unmatched_tags).map { |tag| "<match #{tag}>" }.join(', ')
$log.warn "define #{matched_sections} to capture fluentd logs in top level is deprecated. Use <label @FLUENT_LOG> instead"
$log.warn "match for some tags of log events are not defined in top level (to be ignored)", tags: unmatched_tags
end
end
end
if log_event_router
FluentLogEventRouter.new(log_event_router)
else
$log.debug('No fluent logger for internal event')
NullFluentLogEventRouter.new
end
end
STOP = :stop
GRACEFUL_STOP = :graceful_stop
# @param event_router [Fluent::EventRouter]
def initialize(event_router)
@event_router = event_router
@thread = nil
@graceful_stop = false
@event_queue = Queue.new
end
def start
@thread = Thread.new do
$log.disable_events(Thread.current)
loop do
event = @event_queue.pop
case event
when GRACEFUL_STOP
@graceful_stop = true
when STOP
break
else
begin
tag, time, record = event
@event_router.emit(tag, time, record)
rescue => e
# This $log.error doesn't emit log events, because of `$log.disable_events(Thread.current)` above
$log.error "failed to emit fluentd's log event", tag: tag, event: record, error: e
end
end
if @graceful_stop && @event_queue.empty?
break
end
end
end
@thread.abort_on_exception = true
end
def stop
@event_queue.push(STOP)
# there is no problem calling Thread#join multiple times.
@thread && @thread.join
end
def graceful_stop
# to make sure to emit all log events into router, before shutting down
@event_queue.push(GRACEFUL_STOP)
@thread && @thread.join
end
def emit_event(event)
@event_queue.push(event)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/env.rb | lib/fluent/env.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'securerandom'
require 'serverengine/utils'
require 'fluent/oj_options'
module Fluent
DEFAULT_CONFIG_PATH = ENV['FLUENT_CONF'] || '/etc/fluent/fluent.conf'
DEFAULT_CONFIG_INCLUDE_DIR = ENV["FLUENT_CONF_INCLUDE_DIR"] || '/etc/fluent/conf.d'
DEFAULT_PLUGIN_DIR = ENV['FLUENT_PLUGIN'] || '/etc/fluent/plugin'
DEFAULT_SOCKET_PATH = ENV['FLUENT_SOCKET'] || '/var/run/fluent/fluent.sock'
DEFAULT_BACKUP_DIR = ENV['FLUENT_BACKUP_DIR'] || '/tmp/fluent'
DEFAULT_OJ_OPTIONS = Fluent::OjOptions.load_env
DEFAULT_DIR_PERMISSION = 0755
DEFAULT_FILE_PERMISSION = 0644
INSTANCE_ID = ENV['FLUENT_INSTANCE_ID'] || SecureRandom.uuid
def self.windows?
ServerEngine.windows?
end
def self.linux?
RUBY_PLATFORM.include?("linux")
end
def self.macos?
RUBY_PLATFORM.include?("darwin")
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/event_router.rb | lib/fluent/event_router.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/match'
require 'fluent/event'
require 'fluent/filter'
require 'fluent/msgpack_factory'
module Fluent
#
# EventRouter is responsible to route events to a collector.
#
# It has a list of MatchPattern and Collector pairs:
#
# +----------------+ +-----------------+
# | MatchPattern | | Collector |
# +----------------+ +-----------------+
# | access.** ---------> type forward |
# | logs.** ---------> type copy |
# | archive.** ---------> type s3 |
# +----------------+ +-----------------+
#
# EventRouter does:
#
# 1) receive an event at `#emit` methods
# 2) match the event's tag with the MatchPatterns
# 3) forward the event to the corresponding Collector
#
# Collector is either of Output, Filter or other EventRouter.
#
class EventRouter
def initialize(default_collector, emit_error_handler)
@match_rules = []
@match_cache = MatchCache.new
@default_collector = default_collector
@emit_error_handler = emit_error_handler
@metric_callbacks = {}
@caller_plugin_id = nil
end
attr_accessor :default_collector
attr_accessor :emit_error_handler
class Rule
def initialize(pattern, collector)
patterns = pattern.split(/\s+/).map { |str| MatchPattern.create(str) }
@pattern = if patterns.length == 1
patterns[0]
else
OrMatchPattern.new(patterns)
end
@pattern_str = pattern
@collector = collector
end
def match?(tag)
@pattern.match(tag)
end
attr_reader :collector
attr_reader :pattern_str
end
def suppress_missing_match!
if @default_collector.respond_to?(:suppress_missing_match!)
@default_collector.suppress_missing_match!
end
end
# called by Agent to add new match pattern and collector
def add_rule(pattern, collector)
@match_rules << Rule.new(pattern, collector)
end
def add_metric_callbacks(caller_plugin_id, callback)
@metric_callbacks[caller_plugin_id] = callback
end
def caller_plugin_id=(caller_plugin_id)
@caller_plugin_id = caller_plugin_id
end
def find_callback
if @caller_plugin_id
@metric_callbacks[@caller_plugin_id]
else
nil
end
end
def emit(tag, time, record)
unless record.nil?
emit_stream(tag, OneEventStream.new(time, record))
end
end
def emit_array(tag, array)
emit_stream(tag, ArrayEventStream.new(array))
end
def emit_stream(tag, es)
match(tag).emit_events(tag, es)
if callback = find_callback
callback.call(es)
end
rescue Pipeline::OutputError => e
@emit_error_handler.handle_emits_error(tag, e.processed_es, e.internal_error)
rescue => e
@emit_error_handler.handle_emits_error(tag, es, e)
end
def emit_error_event(tag, time, record, error)
@emit_error_handler.emit_error_event(tag, time, record, error)
end
def match?(tag)
!!find(tag)
end
def match(tag)
collector = @match_cache.get(tag) {
find(tag) || @default_collector
}
collector
end
class MatchCache
MATCH_CACHE_SIZE = 1024
def initialize
super
@map = {}
@keys = []
end
def get(key)
if collector = @map[key]
return collector
end
collector = @map[key] = yield
if @keys.size >= MATCH_CACHE_SIZE
# expire the oldest key
@map.delete @keys.shift
end
@keys << key
collector
end
end
private
class Pipeline
class OutputError < StandardError
attr_reader :internal_error
attr_reader :processed_es
def initialize(internal_error, processed_es)
@internal_error = internal_error
@processed_es = processed_es
end
end
def initialize
@filters = []
@output = nil
@optimizer = FilterOptimizer.new
end
def add_filter(filter)
@filters << filter
@optimizer.filters = @filters
end
def set_output(output)
@output = output
end
def emit_events(tag, es)
processed = @optimizer.filter_stream(tag, es)
begin
@output.emit_events(tag, processed)
rescue => e
raise OutputError.new(e, processed)
end
end
class FilterOptimizer
def initialize(filters = [])
@filters = filters
@optimizable = nil
end
def filters=(filters)
@filters = filters
reset_optimization
end
def filter_stream(tag, es)
if optimizable?
optimized_filter_stream(tag, es)
else
@filters.reduce(es) { |acc, filter|
filtered_es = filter.filter_stream(tag, acc)
filter.measure_metrics(filtered_es)
filtered_es
}
end
end
private
def optimized_filter_stream(tag, es)
new_es = MultiEventStream.new
es.each(unpacker: Fluent::MessagePackFactory.thread_local_msgpack_unpacker) do |time, record|
filtered_record = record
filtered_time = time
catch :break_loop do
@filters.each do |filter|
if filter.has_filter_with_time
begin
filtered_time, filtered_record = filter.filter_with_time(tag, filtered_time, filtered_record)
throw :break_loop unless filtered_record && filtered_time
filter.measure_metrics(OneEventStream.new(time, record))
rescue => e
filter.router.emit_error_event(tag, filtered_time, filtered_record, e)
end
else
begin
filtered_record = filter.filter(tag, filtered_time, filtered_record)
throw :break_loop unless filtered_record
filter.measure_metrics(OneEventStream.new(time, record))
rescue => e
filter.router.emit_error_event(tag, filtered_time, filtered_record, e)
end
end
end
new_es.add(filtered_time, filtered_record)
end
end
new_es
end
def optimizable?
return @optimizable unless @optimizable.nil?
fs_filters = filters_having_filter_stream
@optimizable = if fs_filters.empty?
true
else
# skip log message when filter is only 1, because its performance is same as non optimized chain.
if @filters.size > 1 && fs_filters.size >= 1
$log.info "disable filter chain optimization because #{fs_filters.map(&:class)} uses `#filter_stream` method."
end
false
end
end
def filters_having_filter_stream
@filters_having_filter_stream ||= @filters.select do |filter|
filter.class.instance_methods(false).include?(:filter_stream)
end
end
def reset_optimization
@optimizable = nil
@filters_having_filter_stream = nil
end
end
end
def find(tag)
pipeline = nil
@match_rules.each do |rule|
if rule.match?(tag)
if rule.collector.is_a?(Plugin::Filter)
pipeline ||= Pipeline.new
pipeline.add_filter(rule.collector)
else
if pipeline
pipeline.set_output(rule.collector)
else
# Use Output directly when filter is not matched
pipeline = rule.collector
end
return pipeline
end
end
end
if pipeline
# filter is matched but no match
pipeline.set_output(@default_collector)
pipeline
else
nil
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/ext_monitor_require.rb | lib/fluent/ext_monitor_require.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# To avoid duplicated requirements, extract this logic as file.
if Gem::Version.create(RUBY_VERSION) >= Gem::Version.create('2.7.0')
require 'monitor'
else
begin
# monitor_ext is bundled since ruby 2.7.0
require 'ext_monitor'
rescue LoadError => _
require 'monitor'
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/match.rb | lib/fluent/match.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Fluent
class MatchPattern
def self.create(str)
if str == '**'
AllMatchPattern.new
else
GlobMatchPattern.new(str)
end
end
end
class AllMatchPattern < MatchPattern
def match(str)
true
end
end
class GlobMatchPattern < MatchPattern
def initialize(pat)
if pat.start_with?('/')
if pat.end_with?('/')
@regex = Regexp.new("\\A"+pat[1..-2]+"\\Z")
return
else
raise Fluent::ConfigError, "invalid match - regex"
end
end
stack = []
regex = ['']
escape = false
dot = false
i = 0
while i < pat.length
c = pat[i,1]
if escape
regex.last << Regexp.escape(c)
escape = false
i += 1
next
elsif pat[i,2] == "**"
# recursive any
if dot
regex.last << "(?![^\\.])"
dot = false
end
if pat[i+2,1] == "."
regex.last << "(?:.*\\.|\\A)"
i += 3
else
regex.last << ".*"
i += 2
end
next
elsif dot
regex.last << "\\."
dot = false
end
if c == "\\"
escape = true
elsif c == "."
dot = true
elsif c == "*"
# any
regex.last << "[^\\.]*"
# TODO
#elsif c == "["
# # character class
# chars = ''
# while i < pat.length
# c = pat[i,1]
# if c == "]"
# break
# else
# chars << c
# end
# i += 1
# end
# regex.last << '['+Regexp.escape(chars).gsub("\\-",'-')+']'
elsif c == "{"
# or
stack.push []
regex.push ''
elsif c == "}" && !stack.empty?
stack.last << regex.pop
regex.last << Regexp.union(*stack.pop.map {|r| Regexp.new(r) }).to_s
elsif c == "," && !stack.empty?
stack.last << regex.pop
regex.push ''
elsif /[a-zA-Z0-9_]/.match?(c)
regex.last << c
else
regex.last << "\\#{c}"
end
i += 1
end
until stack.empty?
stack.last << regex.pop
regex.last << Regexp.union(*stack.pop).to_s
end
@regex = Regexp.new("\\A"+regex.last+"\\Z")
end
def match(str)
@regex.match?(str)
end
end
class OrMatchPattern < MatchPattern
def initialize(patterns)
@patterns = patterns
end
def match(str)
@patterns.any? {|pattern| pattern.match(str) }
end
end
class NoMatchMatch
def initialize(log)
@log = log
@count = 0
@warn_not_matched = true
end
def suppress_missing_match!
# for <label @FLUENT_LOG>
@warn_not_matched = false
end
def emit_events(tag, es)
return unless @warn_not_matched
# TODO use time instead of num of records
c = (@count += 1)
if c < 512
if Math.log(c) / Math.log(2) % 1.0 == 0
@log.warn "no patterns matched", tag: tag
return
end
else
if c % 512 == 0
@log.warn "no patterns matched", tag: tag
return
end
end
@log.on_trace { @log.trace "no patterns matched", tag: tag }
end
def start
end
def shutdown
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/input.rb | lib/fluent/input.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/compat/input'
module Fluent
Input = Fluent::Compat::Input
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/process.rb | lib/fluent/process.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/compat/detach_process_mixin'
module Fluent
DetachProcessMixin = Fluent::Compat::DetachProcessMixin
DetachMultiProcessMixin = Fluent::Compat::DetachMultiProcessMixin
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/counter.rb | lib/fluent/counter.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/counter/client'
require 'fluent/counter/server'
module Fluent
module Counter
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/system_config.rb | lib/fluent/system_config.rb | #
# Fluent
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/configurable'
require 'fluent/config/element'
require 'fluent/env'
module Fluent
class SystemConfig
include Configurable
SYSTEM_CONFIG_PARAMETERS = [
:workers, :restart_worker_interval, :root_dir, :log_level,
:suppress_repeated_stacktrace, :emit_error_log_interval, :suppress_config_dump,
:log_event_verbose, :ignore_repeated_log_interval, :ignore_same_log_interval,
:without_source, :with_source_only, :rpc_endpoint, :enable_get_dump, :process_name,
:file_permission, :dir_permission, :counter_server, :counter_client,
:strict_config_value, :enable_msgpack_time_support, :disable_shared_socket,
:metrics, :enable_input_metrics, :enable_size_metrics, :enable_jit, :source_only_buffer,
:config_include_dir
]
config_param :workers, :integer, default: 1
config_param :restart_worker_interval, :time, default: 0
config_param :root_dir, :string, default: nil
config_param :log_level, :enum, list: [:trace, :debug, :info, :warn, :error, :fatal], default: 'info'
config_param :suppress_repeated_stacktrace, :bool, default: nil
config_param :ignore_repeated_log_interval, :time, default: nil
config_param :ignore_same_log_interval, :time, default: nil
config_param :emit_error_log_interval, :time, default: nil
config_param :suppress_config_dump, :bool, default: nil
config_param :log_event_verbose, :bool, default: nil
config_param :without_source, :bool, default: nil
config_param :with_source_only, :bool, default: nil
config_param :rpc_endpoint, :string, default: nil
config_param :enable_get_dump, :bool, default: nil
config_param :process_name, :string, default: nil
config_param :strict_config_value, :bool, default: nil
config_param :enable_msgpack_time_support, :bool, default: nil
config_param :disable_shared_socket, :bool, default: nil
config_param :enable_input_metrics, :bool, default: true
config_param :enable_size_metrics, :bool, default: nil
config_param :enable_jit, :bool, default: false
config_param :file_permission, default: nil do |v|
v.to_i(8)
end
config_param :dir_permission, default: nil do |v|
v.to_i(8)
end
config_param :config_include_dir, default: Fluent::DEFAULT_CONFIG_INCLUDE_DIR
config_section :log, required: false, init: true, multi: false do
config_param :path, :string, default: nil
config_param :format, :enum, list: [:text, :json], default: :text
config_param :time_format, :string, default: '%Y-%m-%d %H:%M:%S %z'
config_param :rotate_age, default: nil do |v|
if Fluent::Log::LOG_ROTATE_AGE.include?(v)
v
else
begin
Integer(v)
rescue ArgumentError => e
raise Fluent::ConfigError, e.message
else
v.to_i
end
end
end
config_param :rotate_size, :size, default: nil
config_param :forced_stacktrace_level, :enum, list: [:none, :trace, :debug, :info, :warn, :error, :fatal], default: :none
end
config_section :counter_server, multi: false do
desc 'scope name of counter server'
config_param :scope, :string
desc 'the port of counter server to listen to'
config_param :port, :integer, default: nil
desc 'the bind address of counter server to listen to'
config_param :bind, :string, default: nil
desc 'backup file path of counter values'
config_param :backup_path, :string
end
config_section :counter_client, multi: false do
desc 'the port of counter server'
config_param :port, :integer, default: nil
desc 'the IP address or hostname of counter server'
config_param :host, :string
desc 'the timeout of each operation'
config_param :timeout, :time, default: nil
end
config_section :metrics, multi: false do
config_param :@type, :string, default: "local"
config_param :labels, :hash, default: {}
end
config_section :source_only_buffer, init: true, multi: false do
config_param :flush_thread_count, :integer, default: 1
config_param :overflow_action, :enum, list: [:throw_exception, :block, :drop_oldest_chunk], default: :drop_oldest_chunk
config_param :path, :string, default: nil
config_param :flush_interval, :time, default: nil
config_param :chunk_limit_size, :size, default: nil
config_param :total_limit_size, :size, default: nil
config_param :compress, :enum, list: [:text, :gzip], default: nil
end
def force_stacktrace_level?
@log.forced_stacktrace_level != :none
end
def self.create(conf, strict_config_value=false)
systems = conf.elements(name: 'system')
return SystemConfig.new if systems.empty?
raise Fluent::ConfigError, "<system> is duplicated. <system> should be only one" if systems.size > 1
SystemConfig.new(systems.first, strict_config_value)
end
def self.blank_system_config
Fluent::Config::Element.new('<SYSTEM>', '', {}, [])
end
def self.overwrite_system_config(hash)
older = defined?($_system_config) ? $_system_config : nil
begin
$_system_config = SystemConfig.new(Fluent::Config::Element.new('system', '', hash, []))
yield
ensure
$_system_config = older
end
end
def initialize(conf=nil, strict_config_value=false)
super()
conf ||= SystemConfig.blank_system_config
configure(conf, strict_config_value)
end
def configure(conf, strict_config_value=false)
strict = strict_config_value
if !strict && conf && conf.has_key?("strict_config_value")
strict = Fluent::Config.bool_value(conf["strict_config_value"])
end
begin
super(conf, strict)
rescue ConfigError => e
$log.error "config error in:\n#{conf}"
$log.error 'config error', error: e
$log.debug_backtrace
exit!(1)
end
@log_level = Log.str_to_level(@log_level.to_s) if @log_level
@log[:forced_stacktrace_level] = Log.str_to_level(@log.forced_stacktrace_level.to_s) if force_stacktrace_level?
end
def dup
s = SystemConfig.new
SYSTEM_CONFIG_PARAMETERS.each do |param|
s.__send__("#{param}=", instance_variable_get("@#{param}"))
end
s
end
def overwrite_variables(**opt)
SYSTEM_CONFIG_PARAMETERS.each do |param|
if opt.key?(param) && !opt[param].nil? && instance_variable_defined?("@#{param}")
instance_variable_set("@#{param}", opt[param])
end
end
end
module Mixin
def system_config
require 'fluent/engine'
unless defined?($_system_config)
$_system_config = nil
end
(instance_variable_defined?(:@_system_config) && @_system_config) ||
$_system_config || Fluent::Engine.system_config
end
def system_config_override(opts={})
require 'fluent/engine'
if !instance_variable_defined?(:@_system_config) || @_system_config.nil?
@_system_config = (defined?($_system_config) && $_system_config ? $_system_config : Fluent::Engine.system_config).dup
end
opts.each_pair do |key, value|
@_system_config.__send__(:"#{key.to_s}=", value)
end
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/winsvc.rb | lib/fluent/winsvc.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
begin
require 'optparse'
require 'win32/daemon'
require 'win32/event'
require 'win32/registry'
require 'serverengine'
include Win32
op = OptionParser.new
opts = {service_name: nil}
op.on('--service-name NAME', "The name of the Windows Service") {|name|
opts[:service_name] = name
}
op.parse(ARGV)
if opts[:service_name] == nil
raise "Error: No Windows Service name set. Use '--service-name'"
end
def read_fluentdopt(service_name)
Win32::Registry::HKEY_LOCAL_MACHINE.open("SYSTEM\\CurrentControlSet\\Services\\#{service_name}") do |reg|
reg.read("fluentdopt")[1] rescue ""
end
end
def service_main_start(service_name)
ruby_path = ServerEngine.ruby_bin_path
rubybin_dir = ruby_path[0, ruby_path.rindex("/")]
opt = read_fluentdopt(service_name)
Process.spawn("\"#{rubybin_dir}/ruby.exe\" \"#{rubybin_dir}/fluentd\" #{opt} -x #{service_name}")
end
class FluentdService < Daemon
ERROR_WAIT_NO_CHILDREN = 128
@pid = 0
@service_name = ''
def initialize(service_name)
@service_name = service_name
end
def service_main
@pid = service_main_start(@service_name)
begin
loop do
sleep 5
break unless running?
raise Errno::ECHILD if Process.waitpid(@pid, Process::WNOHANG)
end
rescue Errno::ECHILD
@pid = 0
SetEvent(@@hStopEvent)
SetTheServiceStatus.call(SERVICE_STOPPED, ERROR_WAIT_NO_CHILDREN, 0, 0)
end
end
def service_stop
wait_supervisor_finished if @pid > 0
end
def service_paramchange
set_event("#{@service_name}_USR2")
end
def service_user_defined_control(code)
case code
when 128
set_event("#{@service_name}_HUP")
when 129
set_event("#{@service_name}_USR1")
when 130
set_event("#{@service_name}_CONT")
end
end
private
def set_event(event_name)
ev = Win32::Event.open(event_name)
ev.set
ev.close
end
def repeat_set_event_several_times_until_success(event_name)
retries = 0
max_retries = 10
delay_sec = 3
begin
set_event(event_name)
rescue Errno::ENOENT
# This error occurs when the supervisor process has not yet created the event.
# If STOP is immediately executed, this state will occur.
# Retry `set_event' to wait for the initialization of the supervisor.
retries += 1
raise if max_retries < retries
sleep(delay_sec)
retry
end
end
def wait_supervisor_finished
repeat_set_event_several_times_until_success(@service_name)
Process.waitpid(@pid)
end
end
FluentdService.new(opts[:service_name]).mainloop
rescue Exception => err
raise
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/config.rb | lib/fluent/config.rb | #
# Fluent
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/config/error'
require 'fluent/config/element'
require 'fluent/configurable'
require 'fluent/config/yaml_parser'
module Fluent
module Config
# @param config_path [String] config file path
# @param encoding [String] encoding of config file
# @param additional_config [String] config which is added to last of config body
# @param use_v1_config [Bool] config is formatted with v1 or not
# @return [Fluent::Config]
def self.build(config_path:, encoding: 'utf-8', additional_config: nil, use_v1_config: true, type: nil)
if type == :guess
config_file_ext = File.extname(config_path)
if config_file_ext == '.yaml' || config_file_ext == '.yml'
type = :yaml
end
end
if type == :yaml || type == :yml
return Fluent::Config::YamlParser.parse(config_path)
end
config_fname = File.basename(config_path)
config_basedir = File.dirname(config_path)
config_data = File.open(config_path, "r:#{encoding}:utf-8") do |f|
s = f.read
if additional_config
c = additional_config.gsub("\\n", "\n")
s += "\n#{c}"
end
s
end
Fluent::Config.parse(config_data, config_fname, config_basedir, use_v1_config)
end
def self.parse(str, fname, basepath = Dir.pwd, v1_config = nil, syntax: :v1)
parser = if fname =~ /\.rb$/ || syntax == :ruby
:ruby
elsif v1_config.nil?
case syntax
when :v1 then :v1
when :v0 then :v0
else
raise ArgumentError, "Unknown Fluentd configuration syntax: '#{syntax}'"
end
elsif v1_config then :v1
else :v0
end
case parser
when :v1
require 'fluent/config/v1_parser'
V1Parser.parse(str, fname, basepath, Kernel.binding)
when :v0
# TODO: show deprecated message in v1
require 'fluent/config/parser'
Parser.parse(str, fname, basepath)
when :ruby
require 'fluent/config/dsl'
$log.warn("Ruby DSL configuration format is deprecated. Please use original configuration format. https://docs.fluentd.org/configuration/config-file") if $log
Config::DSL::Parser.parse(str, File.join(basepath, fname))
else
raise "[BUG] unknown configuration parser specification:'#{parser}'"
end
end
def self.new(name = '')
Element.new(name, '', {}, [])
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/formatter.rb | lib/fluent/formatter.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/compat/formatter'
module Fluent
Formatter = Fluent::Compat::Formatter
TextFormatter = Fluent::Compat::TextFormatter
# deprecate_constant is ruby 2.3 feature
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/unique_id.rb | lib/fluent/unique_id.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Fluent
module UniqueId
def self.generate
now = Time.now.utc
u1 = ((now.to_i * 1000 * 1000 + now.usec) << 12 | rand(0xfff))
[u1 >> 32, u1 & 0xffffffff, rand(0xffffffff), rand(0xffffffff)].pack('NNNN')
end
def self.hex(unique_id)
unique_id.unpack1('H*')
end
module Mixin
def generate_unique_id
Fluent::UniqueId.generate
end
def dump_unique_id_hex(unique_id)
Fluent::UniqueId.hex(unique_id)
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/engine.rb | lib/fluent/engine.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/config'
require 'fluent/event'
require 'fluent/event_router'
require 'fluent/msgpack_factory'
require 'fluent/root_agent'
require 'fluent/time'
require 'fluent/system_config'
require 'fluent/plugin'
require 'fluent/fluent_log_event_router'
require 'fluent/static_config_analysis'
module Fluent
class EngineClass
# For compat. remove it in fluentd v2
include Fluent::MessagePackFactory::Mixin
def initialize
@root_agent = nil
@engine_stopped = false
@_worker_id = nil
@log_event_verbose = false
@suppress_config_dump = false
@without_source = false
@fluent_log_event_router = nil
@system_config = SystemConfig.new
@supervisor_mode = false
@root_agent_mutex = Mutex.new
end
MAINLOOP_SLEEP_INTERVAL = 0.3
attr_reader :root_agent, :system_config, :supervisor_mode
def init(system_config, supervisor_mode: false, start_in_parallel: false)
@system_config = system_config
@supervisor_mode = supervisor_mode
@suppress_config_dump = system_config.suppress_config_dump unless system_config.suppress_config_dump.nil?
@without_source = system_config.without_source unless system_config.without_source.nil?
@log_event_verbose = system_config.log_event_verbose unless system_config.log_event_verbose.nil?
@root_agent = RootAgent.new(log: log, system_config: @system_config, start_in_parallel: start_in_parallel)
self
end
def log
$log
end
def parse_config(io, fname, basepath = Dir.pwd, v1_config = false)
if /\.rb$/.match?(fname)
require 'fluent/config/dsl'
Config::DSL::Parser.parse(io, File.join(basepath, fname))
else
Config.parse(io, fname, basepath, v1_config)
end
end
def run_configure(conf, dry_run: false)
configure(conf)
conf.check_not_fetched do |key, e|
parent_name, plugin_name = e.unused_in
message = if parent_name && plugin_name
"section <#{e.name}> is not used in <#{parent_name}> of #{plugin_name} plugin"
elsif parent_name
"section <#{e.name}> is not used in <#{parent_name}>"
elsif e.name != 'system' && !(@without_source && e.name == 'source')
"parameter '#{key}' in #{e.to_s.strip} is not used."
else
nil
end
next if message.nil?
if dry_run && @supervisor_mode
$log.warn :supervisor, message
elsif e.for_every_workers?
$log.warn :worker0, message
elsif e.for_this_worker?
$log.warn message
end
end
end
def configure(conf)
@root_agent.configure(conf)
@fluent_log_event_router = FluentLogEventRouter.build(@root_agent)
if @fluent_log_event_router.emittable?
$log.enable_event(true)
end
unless @suppress_config_dump
$log.info :supervisor, "using configuration file: #{conf.to_s.rstrip}"
end
end
def add_plugin_dir(dir)
$log.warn('Deprecated method: this method is going to be deleted. Use Fluent::Plugin.add_plugin_dir')
Plugin.add_plugin_dir(dir)
end
def emit(tag, time, record)
raise "BUG: use router.emit instead of Engine.emit"
end
def emit_array(tag, array)
raise "BUG: use router.emit_array instead of Engine.emit_array"
end
def emit_stream(tag, es)
raise "BUG: use router.emit_stream instead of Engine.emit_stream"
end
def flush!
@root_agent_mutex.synchronize do
@root_agent.flush!
end
end
def cancel_source_only!
@root_agent_mutex.synchronize do
@root_agent.cancel_source_only!
end
end
def now
# TODO thread update
Fluent::EventTime.now
end
def run
begin
$log.info "starting fluentd worker", pid: Process.pid, ppid: Process.ppid, worker: worker_id
@root_agent_mutex.synchronize do
start
end
@fluent_log_event_router.start
$log.info "fluentd worker is now running", worker: worker_id
sleep MAINLOOP_SLEEP_INTERVAL until @engine_stopped
$log.info "fluentd worker is now stopping", worker: worker_id
rescue Exception => e
$log.error "unexpected error", error: e
$log.error_backtrace
raise
end
@root_agent_mutex.synchronize do
stop_phase(@root_agent)
end
end
# @param conf [Fluent::Config]
# @param supervisor [Bool]
# @return nil
def reload_config(conf, supervisor: false)
@root_agent_mutex.synchronize do
# configure first to reduce down time while restarting
new_agent = RootAgent.new(log: log, system_config: @system_config)
ret = Fluent::StaticConfigAnalysis.call(conf, workers: system_config.workers)
ret.all_plugins.each do |plugin|
if plugin.respond_to?(:reloadable_plugin?) && !plugin.reloadable_plugin?
raise Fluent::ConfigError, "Unreloadable plugin plugin: #{Fluent::Plugin.lookup_type_from_class(plugin.class)}, plugin_id: #{plugin.plugin_id}, class_name: #{plugin.class})"
end
end
# Assign @root_agent to new root_agent
# for https://github.com/fluent/fluentd/blob/fcef949ce40472547fde295ddd2cfe297e1eddd6/lib/fluent/plugin_helper/event_emitter.rb#L50
old_agent, @root_agent = @root_agent, new_agent
begin
@root_agent.configure(conf)
rescue
@root_agent = old_agent
raise
end
unless @suppress_config_dump
$log.info :supervisor, "using configuration file: #{conf.to_s.rstrip}"
end
# supervisor doesn't handle actual data. so the following code is unnecessary.
if supervisor
old_agent.shutdown # to close thread created in #configure
return
end
stop_phase(old_agent)
$log.info 'restart fluentd worker', worker: worker_id
start_phase(new_agent)
end
end
def stop
@engine_stopped = true
nil
end
def push_log_event(tag, time, record)
@fluent_log_event_router.emit_event([tag, time, record])
end
def worker_id
if @supervisor_mode
return -1
end
return @_worker_id if @_worker_id
# if ENV doesn't have SERVERENGINE_WORKER_ID, it is a worker under --no-supervisor or in tests
# so it's (almost) a single worker, worker_id=0
@_worker_id = (ENV['SERVERENGINE_WORKER_ID'] || 0).to_i
@_worker_id
end
private
def stop_phase(root_agent)
unless @log_event_verbose
$log.enable_event(false)
@fluent_log_event_router.graceful_stop
end
$log.info 'shutting down fluentd worker', worker: worker_id
root_agent.shutdown
@fluent_log_event_router.stop
end
def start_phase(root_agent)
@fluent_log_event_router = FluentLogEventRouter.build(root_agent)
if @fluent_log_event_router.emittable?
$log.enable_event(true)
end
@root_agent.start
end
def start
@root_agent.start
end
end
Engine = EngineClass.new
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/output_chain.rb | lib/fluent/output_chain.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/compat/output_chain'
module Fluent
OutputChain = Fluent::Compat::OutputChain
CopyOutputChain = Fluent::Compat::CopyOutputChain
NullOutputChain = Fluent::Compat::NullOutputChain
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/configurable.rb | lib/fluent/configurable.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/config/configure_proxy'
require 'fluent/config/section'
require 'fluent/config/error'
require 'fluent/registry'
require 'fluent/plugin'
require 'fluent/config/types'
module Fluent
module Configurable
def self.included(mod)
mod.extend(ClassMethods)
end
def initialize
super
# to simulate implicit 'attr_accessor' by config_param / config_section and its value by config_set_default
proxy = self.class.merged_configure_proxy
proxy.params.each_key do |name|
next if name.to_s.start_with?('@')
if proxy.defaults.has_key?(name)
instance_variable_set("@#{name}".to_sym, proxy.defaults[name])
end
end
proxy.sections.each_key do |name|
next if name.to_s.start_with?('@')
subproxy = proxy.sections[name]
if subproxy.multi?
instance_variable_set("@#{subproxy.variable_name}".to_sym, [])
else
instance_variable_set("@#{subproxy.variable_name}".to_sym, nil)
end
end
end
def configure_proxy_generate
proxy = self.class.merged_configure_proxy
if self.respond_to?(:owner) && self.owner
owner_proxy = owner.class.merged_configure_proxy
if proxy.configured_in_section
owner_proxy = owner_proxy.sections[proxy.configured_in_section]
end
proxy.overwrite_defaults(owner_proxy) if owner_proxy
end
proxy
end
def configured_section_create(name, conf = nil)
conf ||= Fluent::Config::Element.new(name.to_s, '', {}, [])
root_proxy = configure_proxy_generate
proxy = if name.nil? # root
root_proxy
else
root_proxy.sections[name]
end
# take care to raise Fluent::ConfigError if conf mismatched to proxy
Fluent::Config::SectionGenerator.generate(proxy, conf, nil, nil)
end
def configure(conf, strict_config_value=false)
@config = conf
logger = if self.respond_to?(:log)
self.log
elsif self.respond_to?(:owner) && self.owner.respond_to?(:log)
self.owner.log
elsif defined?($log)
$log
else
nil
end
proxy = configure_proxy_generate
conf.corresponding_proxies << proxy
# In the nested section, can't get plugin class through proxies so get plugin class here
plugin_class = Fluent::Plugin.lookup_type_from_class(proxy.name.to_s)
root = Fluent::Config::SectionGenerator.generate(proxy, conf, logger, plugin_class, [], strict_config_value)
@config_root_section = root
root.instance_eval{ @params.keys }.each do |param_name|
next if param_name.to_s.start_with?('@')
varname = "@#{param_name}".to_sym
instance_variable_set(varname, root[param_name])
end
self
end
def config
@masked_config ||= @config.to_masked_element
end
CONFIG_TYPE_REGISTRY = Registry.new(:config_type, 'fluent/plugin/type_')
def self.register_type(type, callable = nil, &block)
callable ||= block
CONFIG_TYPE_REGISTRY.register(type, callable)
end
def self.lookup_type(type)
CONFIG_TYPE_REGISTRY.lookup(type)
end
{
string: Config::STRING_TYPE,
enum: Config::ENUM_TYPE,
integer: Config::INTEGER_TYPE,
float: Config::FLOAT_TYPE,
size: Config::SIZE_TYPE,
bool: Config::BOOL_TYPE,
time: Config::TIME_TYPE,
hash: Config::HASH_TYPE,
array: Config::ARRAY_TYPE,
regexp: Config::REGEXP_TYPE,
}.each do |name, type|
register_type(name, type)
end
module ClassMethods
def configure_proxy_map
map = {}
self.define_singleton_method(:configure_proxy_map){ map }
map
end
def configure_proxy(mod_name)
map = configure_proxy_map
unless map[mod_name]
type_lookup = ->(type) { Fluent::Configurable.lookup_type(type) }
proxy = Fluent::Config::ConfigureProxy.new(mod_name, root: true, required: true, multi: false, type_lookup: type_lookup)
map[mod_name] = proxy
end
map[mod_name]
end
def configured_in(section_name)
configure_proxy(self.name).configured_in(section_name)
end
def config_param(name, type = nil, **kwargs, &block)
configure_proxy(self.name).config_param(name, type, **kwargs, &block)
# reserved names '@foo' are invalid as attr_accessor name
attr_accessor(name) unless kwargs[:skip_accessor] || Fluent::Config::Element::RESERVED_PARAMETERS.include?(name.to_s)
end
def config_set_default(name, defval)
configure_proxy(self.name).config_set_default(name, defval)
end
def config_set_desc(name, desc)
configure_proxy(self.name).config_set_desc(name, desc)
end
def config_section(name, **kwargs, &block)
section_already_exists = !!merged_configure_proxy.sections[name]
configure_proxy(self.name).config_section(name, **kwargs, &block)
variable_name = configure_proxy(self.name).sections[name].variable_name
if !section_already_exists && !self.respond_to?(variable_name)
attr_accessor variable_name
end
end
def desc(description)
configure_proxy(self.name).desc(description)
end
def merged_configure_proxy
configurables = ancestors.reverse.select{ |a| a.respond_to?(:configure_proxy) }
# 'a.object_id.to_s' is to support anonymous class
# which created in tests to overwrite original behavior temporally
#
# p Module.new.name #=> nil
# p Class.new.name #=> nil
# p AnyGreatClass.dup.name #=> nil
configurables.map{ |a| a.configure_proxy(a.name || a.object_id.to_s) }.reduce(:merge)
end
def dump_config_definition
configure_proxy_map[self.to_s].dump_config_definition
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/root_agent.rb | lib/fluent/root_agent.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'delegate'
require 'fluent/config/error'
require 'fluent/agent'
require 'fluent/label'
require 'fluent/plugin'
require 'fluent/system_config'
require 'fluent/time'
require 'fluent/source_only_buffer_agent'
module Fluent
#
# Fluentd forms a tree structure to manage plugins:
#
# RootAgent
# |
# +------------+-------------+-------------+
# | | | |
# <label> <source> <filter> <match>
# |
# +----+----+
# | |
# <filter> <match>
#
# Relation:
# * RootAgent has many <label>, <source>, <filter> and <match>
# * <label> has many <match> and <filter>
#
# Next step: `fluentd/agent.rb`
# Next step: 'fluentd/label.rb'
#
class RootAgent < Agent
ERROR_LABEL = "@ERROR".freeze # @ERROR is built-in error label
class SourceOnlyMode
DISABLED = 0
NORMAL = 1
ONLY_ZERO_DOWNTIME_RESTART_READY = 2
def initialize(with_source_only, start_in_parallel)
if start_in_parallel
@mode = ONLY_ZERO_DOWNTIME_RESTART_READY
elsif with_source_only
@mode = NORMAL
else
@mode = DISABLED
end
end
def enabled?
@mode != DISABLED
end
def only_zero_downtime_restart_ready?
@mode == ONLY_ZERO_DOWNTIME_RESTART_READY
end
def disable!
@mode = DISABLED
end
end
def initialize(log:, system_config: SystemConfig.new, start_in_parallel: false)
super(log: log)
@labels = {}
@inputs = []
@suppress_emit_error_log_interval = 0
@next_emit_error_log_time = nil
@without_source = system_config.without_source || false
@source_only_mode = SourceOnlyMode.new(system_config.with_source_only, start_in_parallel)
@source_only_buffer_agent = nil
@enable_input_metrics = system_config.enable_input_metrics
suppress_interval(system_config.emit_error_log_interval) unless system_config.emit_error_log_interval.nil?
end
attr_reader :inputs
attr_reader :labels
def source_only_router
raise "[BUG] 'RootAgent#source_only_router' should not be called when 'with_source_only' is false" unless @source_only_mode.enabled?
@source_only_buffer_agent.event_router
end
def configure(conf)
used_worker_ids = []
available_worker_ids = (0..Fluent::Engine.system_config.workers - 1).to_a
# initialize <worker> elements
supported_directives = ['source', 'match', 'filter', 'label']
conf.elements(name: 'worker').each do |e|
target_worker_id_str = e.arg
if target_worker_id_str.empty?
raise Fluent::ConfigError, "Missing worker id on <worker> directive"
end
target_worker_ids = target_worker_id_str.split("-")
if target_worker_ids.size == 2
first_worker_id = target_worker_ids.first.to_i
last_worker_id = target_worker_ids.last.to_i
if first_worker_id > last_worker_id
raise Fluent::ConfigError, "greater first_worker_id<#{first_worker_id}> than last_worker_id<#{last_worker_id}> specified by <worker> directive is not allowed. Available multi worker assign syntax is <smaller_worker_id>-<greater_worker_id>"
end
target_worker_ids = []
first_worker_id.step(last_worker_id, 1) do |worker_id|
target_worker_id = worker_id.to_i
target_worker_ids << target_worker_id
if target_worker_id < 0 || target_worker_id > (Fluent::Engine.system_config.workers - 1)
raise Fluent::ConfigError, "worker id #{target_worker_id} specified by <worker> directive is not allowed. Available worker id is between 0 and #{(Fluent::Engine.system_config.workers - 1)}"
end
available_worker_ids.delete(target_worker_id) if available_worker_ids.include?(target_worker_id)
if used_worker_ids.include?(target_worker_id)
raise Fluent::ConfigError, "specified worker_id<#{worker_id}> collisions is detected on <worker> directive. Available worker id(s): #{available_worker_ids}"
end
used_worker_ids << target_worker_id
e.elements.each do |elem|
unless supported_directives.include?(elem.name)
raise Fluent::ConfigError, "<worker> section cannot have <#{elem.name}> directive"
end
end
unless target_worker_ids.empty?
e.set_target_worker_ids(target_worker_ids.uniq)
end
end
else
target_worker_id = target_worker_id_str.to_i
if target_worker_id < 0 || target_worker_id > (Fluent::Engine.system_config.workers - 1)
raise Fluent::ConfigError, "worker id #{target_worker_id} specified by <worker> directive is not allowed. Available worker id is between 0 and #{(Fluent::Engine.system_config.workers - 1)}"
end
e.elements.each do |elem|
unless supported_directives.include?(elem.name)
raise Fluent::ConfigError, "<worker> section cannot have <#{elem.name}> directive"
end
elem.set_target_worker_id(target_worker_id)
end
end
conf += e
end
conf.elements.delete_if{|e| e.name == 'worker'}
error_label_config = nil
# initialize <label> elements before configuring all plugins to avoid 'label not found' in input, filter and output.
label_configs = {}
conf.elements(name: 'label').each { |e|
if !Fluent::Engine.supervisor_mode && e.for_another_worker?
next
end
name = e.arg
raise ConfigError, "Missing symbol argument on <label> directive" if name.empty?
raise ConfigError, "@ROOT for <label> is not permitted, reserved for getting root router" if name == '@ROOT'
if name == ERROR_LABEL
error_label_config = e
else
add_label(name)
label_configs[name] = e
end
}
# Call 'configure' here to avoid 'label not found'
label_configs.each { |name, e| @labels[name].configure(e) }
setup_error_label(error_label_config) if error_label_config
super
setup_source_only_buffer_agent if @source_only_mode.enabled?
# initialize <source> elements
if @without_source
log.info :worker0, "'--without-source' is applied. Ignore <source> sections"
else
conf.elements(name: 'source').each { |e|
if !Fluent::Engine.supervisor_mode && e.for_another_worker?
next
end
type = e['@type']
raise ConfigError, "Missing '@type' parameter on <source> directive" unless type
add_source(type, e)
}
end
end
def setup_error_label(e)
error_label = add_label(ERROR_LABEL)
error_label.configure(e)
@error_collector = error_label.event_router
end
def setup_source_only_buffer_agent(flush: false)
@source_only_buffer_agent = SourceOnlyBufferAgent.new(log: log, system_config: Fluent::Engine.system_config)
@source_only_buffer_agent.configure(flush: flush)
end
def cleanup_source_only_buffer_agent
@source_only_buffer_agent&.cleanup
end
def lifecycle(desc: false, kind_callback: nil, kind_or_agent_list: nil)
only_zero_downtime_restart_ready = false
unless kind_or_agent_list
if @source_only_mode.enabled?
kind_or_agent_list = [:input, @source_only_buffer_agent]
only_zero_downtime_restart_ready = @source_only_mode.only_zero_downtime_restart_ready?
elsif @source_only_buffer_agent
# source_only_buffer_agent can re-reroute events, so the priority is equal to output_with_router.
kind_or_agent_list = [:input, :output_with_router, @source_only_buffer_agent, @labels.values, :filter, :output].flatten
else
kind_or_agent_list = [:input, :output_with_router, @labels.values, :filter, :output].flatten
end
kind_or_agent_list.reverse! if desc
end
kind_or_agent_list.each do |kind|
if kind.respond_to?(:lifecycle)
agent = kind
agent.lifecycle(desc: desc) do |plugin, display_kind|
yield plugin, display_kind
end
else
list = if desc
lifecycle_control_list[kind].reverse
else
lifecycle_control_list[kind]
end
display_kind = (kind == :output_with_router ? :output : kind)
list.each do |instance|
if only_zero_downtime_restart_ready
next unless instance.respond_to?(:zero_downtime_restart_ready?) and instance.zero_downtime_restart_ready?
end
yield instance, display_kind
end
end
if kind_callback
kind_callback.call
end
end
end
def start(kind_or_agent_list: nil)
lifecycle(desc: true, kind_or_agent_list: kind_or_agent_list) do |i| # instance
i.start unless i.started?
# Input#start sometimes emits lots of events with in_tail/`read_from_head true` case
# and it causes deadlock for small buffer/queue output. To avoid such problem,
# buffer related output threads should be run before `Input#start`.
# This is why after_start should be called immediately after start call.
# This depends on `desc: true` because calling plugin order of `desc: true` is
# Output, Filter, Label, Output with Router, then Input.
i.after_start unless i.after_started?
end
end
def flush!
log.info "flushing all buffer forcedly"
flushing_threads = []
lifecycle(desc: true) do |instance|
if instance.respond_to?(:force_flush)
t = Thread.new do
Thread.current.abort_on_exception = true
begin
instance.force_flush
rescue => e
log.warn "unexpected error while flushing buffer", plugin: instance.class, plugin_id: instance.plugin_id, error: e
log.warn_backtrace
end
end
flushing_threads << t
end
end
flushing_threads.each{|t| t.join }
end
def cancel_source_only!
unless @source_only_mode.enabled?
log.info "do nothing for canceling with-source-only because the current mode is not with-source-only."
return
end
log.info "cancel with-source-only mode and start the other plugins"
all_plugins = [:input, :output_with_router, @labels.values, :filter, :output].flatten.reverse
start(kind_or_agent_list: all_plugins)
lifecycle_control_list[:input].each(&:event_emitter_cancel_source_only)
# Want to make sure that the source_only_router finishes all process before
# shutting down the agent.
# Strictly speaking, it would be necessary to have exclusive lock between
# EventRouter and the shutting down process of this agent.
# However, adding lock to EventRouter would worsen its performance, and
# the entire shutting down process does not care about it either.
# So, sleep here just in case.
sleep 1
shutdown(kind_or_agent_list: [@source_only_buffer_agent])
@source_only_buffer_agent = nil
# This agent can stop after flushing its all buffer, but it is not implemented for now.
log.info "starts the loading agent for with-source-only"
setup_source_only_buffer_agent(flush: true)
start(kind_or_agent_list: [@source_only_buffer_agent])
@source_only_mode.disable!
end
def shutdown(kind_or_agent_list: nil)
# Fluentd's shutdown sequence is stop, before_shutdown, shutdown, after_shutdown, close, terminate for plugins
# These method callers does `rescue Exception` to call methods of shutdown sequence as far as possible
# if plugin methods does something like infinite recursive call, `exit`, unregistering signal handlers or others.
# Plugins should be separated and be in sandbox to protect data in each plugins/buffers.
lifecycle_safe_sequence = ->(method, checker) {
lifecycle(kind_or_agent_list: kind_or_agent_list) do |instance, kind|
begin
log.debug "calling #{method} on #{kind} plugin", type: Plugin.lookup_type_from_class(instance.class), plugin_id: instance.plugin_id
instance.__send__(method) unless instance.__send__(checker)
rescue Exception => e
log.warn "unexpected error while calling #{method} on #{kind} plugin", plugin: instance.class, plugin_id: instance.plugin_id, error: e
log.warn_backtrace
end
end
}
lifecycle_unsafe_sequence = ->(method, checker) {
operation = case method
when :shutdown then "shutting down"
when :close then "closing"
else
raise "BUG: unknown method name '#{method}'"
end
operation_threads = []
callback = ->(){
operation_threads.each{|t| t.join }
operation_threads.clear
}
lifecycle(kind_callback: callback, kind_or_agent_list: kind_or_agent_list) do |instance, kind|
t = Thread.new do
Thread.current.abort_on_exception = true
begin
if method == :shutdown
# To avoid Input#shutdown and Output#before_shutdown mismatch problem, combine before_shutdown and shutdown call in one sequence.
# The problem is in_tail flushes buffered multiline in shutdown but output's flush_at_shutdown is invoked in before_shutdown
operation = "preparing shutdown" # for logging
log.debug "#{operation} #{kind} plugin", type: Plugin.lookup_type_from_class(instance.class), plugin_id: instance.plugin_id
begin
instance.__send__(:before_shutdown) unless instance.__send__(:before_shutdown?)
rescue Exception => e
log.warn "unexpected error while #{operation} on #{kind} plugin", plugin: instance.class, plugin_id: instance.plugin_id, error: e
log.warn_backtrace
end
operation = "shutting down"
log.info "#{operation} #{kind} plugin", type: Plugin.lookup_type_from_class(instance.class), plugin_id: instance.plugin_id
instance.__send__(:shutdown) unless instance.__send__(:shutdown?)
else
log.debug "#{operation} #{kind} plugin", type: Plugin.lookup_type_from_class(instance.class), plugin_id: instance.plugin_id
instance.__send__(method) unless instance.__send__(checker)
end
rescue Exception => e
log.warn "unexpected error while #{operation} on #{kind} plugin", plugin: instance.class, plugin_id: instance.plugin_id, error: e
log.warn_backtrace
end
end
operation_threads << t
end
}
lifecycle_safe_sequence.call(:stop, :stopped?)
# before_shutdown does force_flush for output plugins: it should block, so it's unsafe operation
lifecycle_unsafe_sequence.call(:shutdown, :shutdown?)
lifecycle_safe_sequence.call(:after_shutdown, :after_shutdown?)
lifecycle_unsafe_sequence.call(:close, :closed?)
lifecycle_safe_sequence.call(:terminate, :terminated?)
cleanup_source_only_buffer_agent unless kind_or_agent_list
end
def suppress_interval(interval_time)
@suppress_emit_error_log_interval = interval_time
@next_emit_error_log_time = Time.now.to_i
end
def add_source(type, conf)
log_type = conf.for_this_worker? ? :default : :worker0
log.info log_type, "adding source", type: type
input = Plugin.new_input(type)
# <source> emits events to the top-level event router (RootAgent#event_router).
# Input#configure overwrites event_router to a label's event_router if it has `@label` parameter.
# See also 'fluentd/plugin/input.rb'
input.context_router = @event_router
input.configure(conf)
input.event_emitter_apply_source_only if @source_only_mode.enabled?
if @enable_input_metrics
@event_router.add_metric_callbacks(input.plugin_id, Proc.new {|es| input.metric_callback(es) })
end
@inputs << input
input
end
def add_label(name)
label = Label.new(name, log: log)
raise ConfigError, "Section <label #{name}> appears twice" if @labels[name]
label.root_agent = self
@labels[name] = label
end
def find_label(label_name)
if label = @labels[label_name]
label
else
raise ArgumentError, "#{label_name} label not found"
end
end
def emit_error_event(tag, time, record, error)
error_info = {error: error, location: (error.backtrace ? error.backtrace.first : nil), tag: tag, time: time}
if @error_collector
# A record is not included in the logs because <@ERROR> handles it. This warn is for the notification
log.warn "send an error event to @ERROR:", error_info
@error_collector.emit(tag, time, record)
else
error_info[:record] = record
log.warn "dump an error event:", error_info
end
end
def handle_emits_error(tag, es, error)
error_info = {error: error, location: (error.backtrace ? error.backtrace.first : nil), tag: tag}
if @error_collector
log.warn "send an error event stream to @ERROR:", error_info
@error_collector.emit_stream(tag, es)
else
now = Time.now.to_i
if @suppress_emit_error_log_interval.zero? || now > @next_emit_error_log_time
log.warn "emit transaction failed:", error_info
log.warn_backtrace
@next_emit_error_log_time = now + @suppress_emit_error_log_interval
end
raise error
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/plugin_helper.rb | lib/fluent/plugin_helper.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/plugin_helper/event_emitter'
require 'fluent/plugin_helper/thread'
require 'fluent/plugin_helper/event_loop'
require 'fluent/plugin_helper/timer'
require 'fluent/plugin_helper/child_process'
require 'fluent/plugin_helper/storage'
require 'fluent/plugin_helper/parser'
require 'fluent/plugin_helper/formatter'
require 'fluent/plugin_helper/http_server'
require 'fluent/plugin_helper/inject'
require 'fluent/plugin_helper/extract'
require 'fluent/plugin_helper/socket'
require 'fluent/plugin_helper/server'
require 'fluent/plugin_helper/counter'
require 'fluent/plugin_helper/retry_state'
require 'fluent/plugin_helper/record_accessor'
require 'fluent/plugin_helper/compat_parameters'
require 'fluent/plugin_helper/service_discovery'
require 'fluent/plugin_helper/metrics'
module Fluent
module PluginHelper
module Mixin
def self.included(mod)
mod.extend(Fluent::PluginHelper)
end
end
def self.extended(mod)
def mod.inherited(subclass)
subclass.module_eval do
@_plugin_helpers_list = []
end
end
end
def helpers_internal(*snake_case_symbols)
helper_modules = []
snake_case_symbols.each do |name|
begin
helper_modules << Fluent::PluginHelper.const_get(name.to_s.split('_').map(&:capitalize).join)
rescue NameError
raise "Unknown plugin helper:#{name}"
end
end
include(*helper_modules)
end
def helpers(*snake_case_symbols)
@_plugin_helpers_list ||= []
@_plugin_helpers_list.concat(snake_case_symbols)
helpers_internal(*snake_case_symbols)
end
def plugin_helpers
@_plugin_helpers_list || []
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/win32api.rb | lib/fluent/win32api.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/env'
module Fluent
module Win32API
require 'fiddle/import'
require 'fiddle/types'
extend Fiddle::Importer
if RUBY_PLATFORM.split('-')[-1] == "ucrt"
MSVCRT_DLL = 'ucrtbase.dll'
else
MSVCRT_DLL = 'msvcrt.dll'
end
dlload MSVCRT_DLL, "kernel32.dll"
include Fiddle::Win32Types
extern "intptr_t _get_osfhandle(int)"
extern "BOOL GetFileInformationByHandle(HANDLE, void *)"
extern "BOOL GetFileInformationByHandleEx(HANDLE, int, void *, DWORD)"
end if Fluent.windows?
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/msgpack_factory.rb | lib/fluent/msgpack_factory.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'msgpack'
require 'fluent/time'
module Fluent
module MessagePackFactory
@@engine_factory = nil
module Mixin
def msgpack_factory
unless @deprecated_log_done
deprecated_log('Deprecated method: this method is going to be deleted. Use Fluent::MessagePackFactory.engine_factory')
end
MessagePackFactory.engine_factory
end
def msgpack_packer(*args)
unless @deprecated_log_done
deprecated_log('Deprecated method: this method is going to be deleted. Use Fluent::MessagePackFactory.msgpack_packer')
end
MessagePackFactory.msgpack_packer(*args)
end
def msgpack_unpacker(*args)
unless @deprecated_log_done
deprecated_log('Deprecated method: this method is going to be deleted. Use Fluent::MessagePackFactory.msgpack_unpacker')
end
MessagePackFactory.msgpack_unpacker(*args)
end
def deprecated_log(str)
if $log
$log.warn(str)
@deprecated_log_done = true
end
end
end
def self.engine_factory(enable_time_support: false)
@@engine_factory || factory(enable_time_support: enable_time_support)
end
def self.msgpack_packer(*args)
engine_factory.packer(*args)
end
def self.msgpack_unpacker(*args)
engine_factory.unpacker(*args)
end
def self.factory(enable_time_support: false)
factory = MessagePack::Factory.new
factory.register_type(Fluent::EventTime::TYPE, Fluent::EventTime)
if enable_time_support
factory.register_type(
MessagePack::Timestamp::TYPE, Time,
packer: MessagePack::Time::Packer,
unpacker: MessagePack::Time::Unpacker)
end
factory
end
def self.packer(*args)
factory.packer(*args)
end
def self.unpacker(*args)
factory.unpacker(*args)
end
def self.init(enable_time_support: false)
factory = MessagePack::Factory.new
factory.register_type(Fluent::EventTime::TYPE, Fluent::EventTime)
if enable_time_support
factory.register_type(
MessagePack::Timestamp::TYPE, Time,
packer: MessagePack::Time::Packer,
unpacker: MessagePack::Time::Unpacker)
end
@@engine_factory = factory
end
def self.thread_local_msgpack_packer
Thread.current[:local_msgpack_packer] ||= MessagePackFactory.engine_factory.packer
end
def self.thread_local_msgpack_unpacker
unpacker = Thread.current[:local_msgpack_unpacker]
if unpacker.nil?
return Thread.current[:local_msgpack_unpacker] = MessagePackFactory.engine_factory.unpacker
end
unpacker.reset
unpacker
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/variable_store.rb | lib/fluent/variable_store.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Fluent
# VariableStore provides all plugins with the way to shared variable without using class variable
# it's for safe reloading mechanism
class VariableStore
@data = {}
class << self
def fetch_or_build(namespace, default_value: {})
@data[namespace] ||= default_value
end
def try_to_reset
@data, old = {}, @data
begin
yield
rescue
@data = old
raise
end
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/lib/fluent/error.rb | lib/fluent/error.rb | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Fluent
class UnrecoverableError < StandardError
def initialize(error_message = nil)
@message = error_message || "an unrecoverable error occurs in Fluentd process"
end
def to_s
@message
end
end
class InvalidRootDirectory < UnrecoverableError
end
class InvalidLockDirectory < UnrecoverableError
end
# For internal use
class UncatchableError < Exception
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.