repo stringlengths 5 92 | file_url stringlengths 80 287 | file_path stringlengths 5 197 | content stringlengths 0 32.8k | language stringclasses 1 value | license stringclasses 7 values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:37:27 2026-01-04 17:58:21 | truncated bool 2 classes |
|---|---|---|---|---|---|---|---|---|
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/dsl/declared.rb | lib/grape/dsl/declared.rb | # frozen_string_literal: true
module Grape
module DSL
module Declared
# Denotes a situation where a DSL method has been invoked in a
# filter which it should not yet be available in
class MethodNotYetAvailable < StandardError
def initialize(msg = '#declared is not available prior to parameter validation')
super
end
end
# A filtering method that will return a hash
# consisting only of keys that have been declared by a
# `params` statement against the current/target endpoint or parent
# namespaces.
# @param params [Hash] The initial hash to filter. Usually this will just be `params`
# @param options [Hash] Can pass `:include_missing`, `:stringify` and `:include_parent_namespaces`
# options. `:include_parent_namespaces` defaults to true, hence must be set to false if
# you want only to return params declared against the current/target endpoint.
def declared(passed_params, include_parent_namespaces: true, include_missing: true, evaluate_given: false, stringify: false)
raise MethodNotYetAvailable unless before_filter_passed
contract_key_map = inheritable_setting.namespace_stackable[:contract_key_map]
handler = DeclaredParamsHandler.new(include_missing:, evaluate_given:, stringify:, contract_key_map: contract_key_map)
declared_params = include_parent_namespaces ? inheritable_setting.route[:declared_params] : (inheritable_setting.namespace_stackable[:declared_params].last || [])
renamed_params = inheritable_setting.route[:renamed_params] || {}
route_params = options.dig(:route_options, :params) || {} # options = endpoint's option
handler.call(passed_params, declared_params, route_params, renamed_params)
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/dsl/logger.rb | lib/grape/dsl/logger.rb | # frozen_string_literal: true
module Grape
module DSL
module Logger
# Set or retrive the configured logger. If none was configured, this
# method will create a new one, logging to stdout.
# @param logger [Object] the new logger to use
def logger(logger = nil)
global_settings = inheritable_setting.global
if logger
global_settings[:logger] = logger
else
global_settings[:logger] || global_settings[:logger] = ::Logger.new($stdout)
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/dsl/middleware.rb | lib/grape/dsl/middleware.rb | # frozen_string_literal: true
module Grape
module DSL
module Middleware
# Apply a custom middleware to the API. Applies
# to the current namespace and any children, but
# not parents.
#
# @param middleware_class [Class] The class of the middleware you'd like
# to inject.
def use(middleware_class, *args, &block)
arr = [:use, middleware_class, *args]
arr << block if block
inheritable_setting.namespace_stackable[:middleware] = arr
end
%i[insert insert_before insert_after].each do |method_name|
define_method method_name do |*args, &block|
arr = [method_name, *args]
arr << block if block
inheritable_setting.namespace_stackable[:middleware] = arr
end
end
# Retrieve an array of the middleware classes
# and arguments that are currently applied to the
# application.
def middleware
inheritable_setting.namespace_stackable[:middleware] || []
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/dsl/routing.rb | lib/grape/dsl/routing.rb | # frozen_string_literal: true
module Grape
module DSL
module Routing
attr_reader :endpoints
def given(conditional_option, &)
return unless conditional_option
mounted(&)
end
def mounted(&block)
evaluate_as_instance_with_configuration(block, lazy: true)
end
def cascade(value = nil)
return inheritable_setting.namespace_inheritable.key?(:cascade) ? !inheritable_setting.namespace_inheritable[:cascade].nil? : true if value.nil?
inheritable_setting.namespace_inheritable[:cascade] = value
end
# Specify an API version.
#
# @example API with legacy support.
# class MyAPI < Grape::API
# version 'v2'
#
# get '/main' do
# {some: 'data'}
# end
#
# version 'v1' do
# get '/main' do
# {legacy: 'data'}
# end
# end
# end
#
def version(*args, **options, &block)
if args.any?
options = options.reverse_merge(using: :path)
requested_versions = args.flatten.map(&:to_s)
raise Grape::Exceptions::MissingVendorOption.new if options[:using] == :header && !options.key?(:vendor)
@versions = versions | requested_versions
if block
within_namespace do
inheritable_setting.namespace_inheritable[:version] = requested_versions
inheritable_setting.namespace_inheritable[:version_options] = options
instance_eval(&block)
end
else
inheritable_setting.namespace_inheritable[:version] = requested_versions
inheritable_setting.namespace_inheritable[:version_options] = options
end
end
@versions.last if instance_variable_defined?(:@versions) && @versions
end
# Define a root URL prefix for your entire API.
def prefix(prefix = nil)
return inheritable_setting.namespace_inheritable[:root_prefix] if prefix.nil?
inheritable_setting.namespace_inheritable[:root_prefix] = prefix.to_s
end
# Create a scope without affecting the URL.
#
# @param _name [Symbol] Purely placebo, just allows to name the scope to
# make the code more readable.
def scope(_name = nil, &block)
within_namespace do
nest(block)
end
end
def build_with(build_with)
inheritable_setting.namespace_inheritable[:build_params_with] = build_with
end
# Do not route HEAD requests to GET requests automatically.
def do_not_route_head!
inheritable_setting.namespace_inheritable[:do_not_route_head] = true
end
# Do not automatically route OPTIONS.
def do_not_route_options!
inheritable_setting.namespace_inheritable[:do_not_route_options] = true
end
def lint!
inheritable_setting.namespace_inheritable[:lint] = true
end
def do_not_document!
inheritable_setting.namespace_inheritable[:do_not_document] = true
end
def mount(mounts, *opts)
mounts = { mounts => '/' } unless mounts.respond_to?(:each_pair)
mounts.each_pair do |app, path|
if app.respond_to?(:mount_instance)
opts_with = opts.any? ? opts.first[:with] : {}
mount({ app.mount_instance(configuration: opts_with) => path }, *opts)
next
end
in_setting = inheritable_setting
if app.respond_to?(:inheritable_setting, true)
mount_path = Grape::Router.normalize_path(path)
app.top_level_setting.namespace_stackable[:mount_path] = mount_path
app.inherit_settings(inheritable_setting)
in_setting = app.top_level_setting
app.change!
change!
end
# When trying to mount multiple times the same endpoint, remove the previous ones
# from the list of endpoints if refresh_already_mounted parameter is true
refresh_already_mounted = opts.any? ? opts.first[:refresh_already_mounted] : false
if refresh_already_mounted && !endpoints.empty?
endpoints.delete_if do |endpoint|
endpoint.options[:app].to_s == app.to_s
end
end
endpoints << Grape::Endpoint.new(
in_setting,
method: :any,
path: path,
app: app,
route_options: { anchor: false },
forward_match: !app.respond_to?(:inheritable_setting),
for: self
)
end
end
# Defines a route that will be recognized
# by the Grape API.
#
# @param methods [HTTP Verb] One or more HTTP verbs that are accepted by this route. Set to `:any` if you want any verb to be accepted.
# @param paths [String] One or more strings representing the URL segment(s) for this route.
#
# @example Defining a basic route.
# class MyAPI < Grape::API
# route(:any, '/hello') do
# {hello: 'world'}
# end
# end
def route(methods, paths = ['/'], route_options = {}, &)
method = methods == :any ? '*' : methods
endpoint_params = inheritable_setting.namespace_stackable_with_hash(:params) || {}
endpoint_description = inheritable_setting.route[:description]
all_route_options = { params: endpoint_params }
all_route_options.deep_merge!(endpoint_description) if endpoint_description
all_route_options.deep_merge!(route_options) if route_options&.any?
new_endpoint = Grape::Endpoint.new(
inheritable_setting,
method: method,
path: paths,
for: self,
route_options: all_route_options,
&
)
endpoints << new_endpoint unless endpoints.any? { |e| e.equals?(new_endpoint) }
inheritable_setting.route_end
reset_validations!
end
Grape::HTTP_SUPPORTED_METHODS.each do |supported_method|
define_method supported_method.downcase do |*args, **options, &block|
paths = args.first || ['/']
route(supported_method, paths, options, &block)
end
end
# Declare a "namespace", which prefixes all subordinate routes with its
# name. Any endpoints within a namespace, group, resource or segment,
# etc., will share their parent context as well as any configuration
# done in the namespace context.
#
# @example
#
# namespace :foo do
# get 'bar' do
# # defines the endpoint: GET /foo/bar
# end
# end
def namespace(space = nil, requirements: nil, **options, &block)
return Namespace.joined_space_path(inheritable_setting.namespace_stackable[:namespace]) unless space || block
within_namespace do
nest(block) do
inheritable_setting.namespace_stackable[:namespace] = Grape::Namespace.new(space, requirements: requirements, **options) if space
end
end
end
alias group namespace
alias resource namespace
alias resources namespace
alias segment namespace
# An array of API routes.
def routes
@routes ||= endpoints.map(&:routes).flatten
end
# This method allows you to quickly define a parameter route segment
# in your API.
#
# @param param [Symbol] The name of the parameter you wish to declare.
# @option options [Regexp] You may supply a regular expression that the declared parameter must meet.
def route_param(param, requirements: nil, type: nil, **options, &)
requirements = { param.to_sym => requirements } if requirements.is_a?(Regexp)
Grape::Validations::ParamsScope.new(api: self) do
requires param, type: type
end if type
namespace(":#{param}", requirements: requirements, **options, &)
end
# @return array of defined versions
def versions
@versions ||= []
end
private
# Remove all defined routes.
def reset_routes!
endpoints.each(&:reset_routes!)
@routes = nil
end
def reset_endpoints!
@endpoints = []
end
def refresh_mounted_api(mounts, *opts)
opts << { refresh_already_mounted: true }
mount(mounts, *opts)
end
# Execute first the provided block, then each of the
# block passed in. Allows for simple 'before' setups
# of settings stack pushes.
def nest(*blocks, &block)
blocks.compact!
if blocks.any?
evaluate_as_instance_with_configuration(block) if block
blocks.each { |b| evaluate_as_instance_with_configuration(b) }
reset_validations!
else
instance_eval(&block)
end
end
def evaluate_as_instance_with_configuration(block, lazy: false)
lazy_block = Grape::Util::Lazy::Block.new do |configuration|
value_for_configuration = configuration
self.configuration = value_for_configuration.evaluate if value_for_configuration.respond_to?(:lazy?) && value_for_configuration.lazy?
response = instance_eval(&block)
self.configuration = value_for_configuration
response
end
if @base && base_instance? && lazy
lazy_block
else
lazy_block.evaluate_from(configuration)
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/dsl/inside_route.rb | lib/grape/dsl/inside_route.rb | # frozen_string_literal: true
module Grape
module DSL
module InsideRoute
include Declared
# Backward compatibility: alias exception class to previous location
MethodNotYetAvailable = Declared::MethodNotYetAvailable
# The API version as specified in the URL.
def version
env[Grape::Env::API_VERSION]
end
def configuration
options[:for].configuration.evaluate
end
# End the request and display an error to the
# end user with the specified message.
#
# @param message [String] The message to display.
# @param status [Integer] The HTTP Status Code. Defaults to default_error_status, 500 if not set.
# @param additional_headers [Hash] Addtional headers for the response.
# @param backtrace [Array<String>] The backtrace of the exception that caused the error.
# @param original_exception [Exception] The original exception that caused the error.
def error!(message, status = nil, additional_headers = nil, backtrace = nil, original_exception = nil)
status = self.status(status || inheritable_setting.namespace_inheritable[:default_error_status])
headers = additional_headers.present? ? header.merge(additional_headers) : header
throw :error,
message: message,
status: status,
headers: headers,
backtrace: backtrace,
original_exception: original_exception
end
# Redirect to a new url.
#
# @param url [String] The url to be redirect.
# @param permanent [Boolean] default false.
# @param body default a short message including the URL.
def redirect(url, permanent: false, body: nil)
body_message = body
if permanent
status 301
body_message ||= "This resource has been moved permanently to #{url}."
elsif http_version == 'HTTP/1.1' && !request.get?
status 303
body_message ||= "An alternate resource is located at #{url}."
else
status 302
body_message ||= "This resource has been moved temporarily to #{url}."
end
header 'Location', url
content_type 'text/plain'
body body_message
end
# Set or retrieve the HTTP status code.
#
# @param status [Integer] The HTTP Status Code to return for this request.
def status(status = nil)
case status
when Symbol
raise ArgumentError, "Status code :#{status} is invalid." unless Rack::Utils::SYMBOL_TO_STATUS_CODE.key?(status)
@status = Rack::Utils.status_code(status)
when Integer
@status = status
when nil
return @status if instance_variable_defined?(:@status) && @status
if request.post?
201
elsif request.delete?
if instance_variable_defined?(:@body) && @body.present?
200
else
204
end
else
200
end
else
raise ArgumentError, 'Status code must be Integer or Symbol.'
end
end
# Set response content-type
def content_type(val = nil)
if val
header(Rack::CONTENT_TYPE, val)
else
header[Rack::CONTENT_TYPE]
end
end
# Allows you to define the response body as something other than the
# return value.
#
# @example
# get '/body' do
# body "Body"
# "Not the Body"
# end
#
# GET /body # => "Body"
def body(value = nil)
if value
@body = value
elsif value == false
@body = ''
status 204
else
instance_variable_defined?(:@body) ? @body : nil
end
end
# Allows you to explicitly return no content.
#
# @example
# delete :id do
# return_no_content
# "not returned"
# end
#
# DELETE /12 # => 204 No Content, ""
def return_no_content
status 204
body false
end
# Allows you to send a file to the client via sendfile.
#
# @example
# get '/file' do
# sendfile FileStreamer.new(...)
# end
#
# GET /file # => "contents of file"
def sendfile(value = nil)
if value.is_a?(String)
file_body = Grape::ServeStream::FileBody.new(value)
@stream = Grape::ServeStream::StreamResponse.new(file_body)
elsif !value.is_a?(NilClass)
raise ArgumentError, 'Argument must be a file path'
else
stream
end
end
# Allows you to define the response as a streamable object.
#
# If Content-Length and Transfer-Encoding are blank (among other conditions),
# Rack assumes this response can be streamed in chunks.
#
# @example
# get '/stream' do
# stream FileStreamer.new(...)
# end
#
# GET /stream # => "chunked contents of file"
#
# See:
# * https://github.com/rack/rack/blob/99293fa13d86cd48021630fcc4bd5acc9de5bdc3/lib/rack/chunked.rb
# * https://github.com/rack/rack/blob/99293fa13d86cd48021630fcc4bd5acc9de5bdc3/lib/rack/etag.rb
def stream(value = nil)
return if value.nil? && @stream.nil?
header Rack::CONTENT_LENGTH, nil
header 'Transfer-Encoding', nil
header Rack::CACHE_CONTROL, 'no-cache' # Skips ETag generation (reading the response up front)
if value.is_a?(String)
file_body = Grape::ServeStream::FileBody.new(value)
@stream = Grape::ServeStream::StreamResponse.new(file_body)
elsif value.respond_to?(:each)
@stream = Grape::ServeStream::StreamResponse.new(value)
elsif !value.is_a?(NilClass)
raise ArgumentError, 'Stream object must respond to :each.'
else
@stream
end
end
# Allows you to make use of Grape Entities by setting
# the response body to the serializable hash of the
# entity provided in the `:with` option. This has the
# added benefit of automatically passing along environment
# and version information to the serialization, making it
# very easy to do conditional exposures. See Entity docs
# for more info.
#
# @example
#
# get '/users/:id' do
# present User.find(params[:id]),
# with: API::Entities::User,
# admin: current_user.admin?
# end
def present(*args, **options)
key, object = if args.count == 2 && args.first.is_a?(Symbol)
args
else
[nil, args.first]
end
entity_class = entity_class_for_obj(object, options)
root = options.delete(:root)
representation = if entity_class
entity_representation_for(entity_class, object, options)
else
object
end
representation = { root => representation } if root
if key
representation = (body || {}).merge(key => representation)
elsif entity_class.present? && body
raise ArgumentError, "Representation of type #{representation.class} cannot be merged." unless representation.respond_to?(:merge)
representation = body.merge(representation)
end
body representation
end
# Returns route information for the current request.
#
# @example
#
# desc "Returns the route description."
# get '/' do
# route.description
# end
def route
env[Grape::Env::GRAPE_ROUTING_ARGS][:route_info]
end
# Attempt to locate the Entity class for a given object, if not given
# explicitly. This is done by looking for the presence of Klass::Entity,
# where Klass is the class of the `object` parameter, or one of its
# ancestors.
# @param object [Object] the object to locate the Entity class for
# @param options [Hash]
# @option options :with [Class] the explicit entity class to use
# @return [Class] the located Entity class, or nil if none is found
def entity_class_for_obj(object, options)
entity_class = options.delete(:with)
return entity_class if entity_class
# entity class not explicitly defined, auto-detect from relation#klass or first object in the collection
object_class = if object.respond_to?(:klass)
object.klass
else
object.respond_to?(:first) ? object.first.class : object.class
end
representations = inheritable_setting.namespace_stackable_with_hash(:representations)
if representations
potential = object_class.ancestors.detect { |potential| representations.key?(potential) }
entity_class = representations[potential] if potential
end
entity_class = object_class.const_get(:Entity) if !entity_class && object_class.const_defined?(:Entity) && object_class.const_get(:Entity).respond_to?(:represent)
entity_class
end
# @return the representation of the given object as done through
# the given entity_class.
def entity_representation_for(entity_class, object, options)
embeds = { env: env }
embeds[:version] = env[Grape::Env::API_VERSION] if env.key?(Grape::Env::API_VERSION)
entity_class.represent(object, **embeds, **options)
end
def http_version
env.fetch('HTTP_VERSION') { env[Rack::SERVER_PROTOCOL] }
end
def api_format(format)
env[Grape::Env::API_FORMAT] = format
end
def context
self
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/dsl/callbacks.rb | lib/grape/dsl/callbacks.rb | # frozen_string_literal: true
module Grape
module DSL
module Callbacks
# before: execute the given block before validation, coercion, or any endpoint
# before_validation: execute the given block after `before`, but prior to validation or coercion
# after_validation: execute the given block after validations and coercions, but before any endpoint code
# after: execute the given block after the endpoint code has run except in unsuccessful
# finally: execute the given block after the endpoint code even if unsuccessful
%w[before before_validation after_validation after finally].each do |callback_method|
define_method callback_method.to_sym do |&block|
inheritable_setting.namespace_stackable[callback_method.pluralize.to_sym] = block
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/dsl/validations.rb | lib/grape/dsl/validations.rb | # frozen_string_literal: true
module Grape
module DSL
module Validations
# Opens a root-level ParamsScope, defining parameter coercions and
# validations for the endpoint.
# @yield instance context of the new scope
def params(&)
Grape::Validations::ParamsScope.new(api: self, type: Hash, &)
end
# Declare the contract to be used for the endpoint's parameters.
# @param contract [Class<Dry::Validation::Contract> | Dry::Schema::Processor]
# The contract or schema to be used for validation. Optional.
# @yield a block yielding a new instance of Dry::Schema::Params
# subclass, allowing to define the schema inline. When the
# +contract+ parameter is a schema, it will be used as a parent. Optional.
def contract(contract = nil, &block)
raise ArgumentError, 'Either contract or block must be provided' unless contract || block
raise ArgumentError, 'Cannot inherit from contract, only schema' if block && contract.respond_to?(:schema)
Grape::Validations::ContractScope.new(self, contract, &block)
end
private
# Clears all defined parameters and validations. The main purpose of it is to clean up
# settings, so next endpoint won't interfere with previous one.
#
# params do
# # params for the endpoint below this block
# end
# post '/current' do
# # whatever
# end
#
# # somewhere between them the reset_validations! method gets called
#
# params do
# # params for the endpoint below this block
# end
# post '/next' do
# # whatever
# end
def reset_validations!
inheritable_setting.namespace_stackable.delete(:declared_params, :params, :validations)
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/dsl/helpers.rb | lib/grape/dsl/helpers.rb | # frozen_string_literal: true
module Grape
module DSL
module Helpers
# Add helper methods that will be accessible from any
# endpoint within this namespace (and child namespaces).
#
# When called without a block, all known helpers within this scope
# are included.
#
# @param [Array] new_modules optional array of modules to include
# @param [Block] block optional block of methods to include
#
# @example Define some helpers.
#
# class ExampleAPI < Grape::API
# helpers do
# def current_user
# User.find_by_id(params[:token])
# end
# end
# end
#
# @example Include many modules
#
# class ExampleAPI < Grape::API
# helpers Authentication, Mailer, OtherModule
# end
#
def helpers(*new_modules, &block)
include_new_modules(new_modules)
include_block(block)
include_all_in_scope if !block && new_modules.empty?
end
private
def include_new_modules(modules)
return if modules.empty?
modules.each { |mod| make_inclusion(mod) }
end
def include_block(block)
return unless block
Module.new.tap do |mod|
make_inclusion(mod) { mod.class_eval(&block) }
end
end
def make_inclusion(mod, &)
define_boolean_in_mod(mod)
inject_api_helpers_to_mod(mod, &)
inheritable_setting.namespace_stackable[:helpers] = mod
end
def include_all_in_scope
Module.new.tap do |mod|
namespace_stackable(:helpers).each { |mod_to_include| mod.include mod_to_include }
change!
end
end
def define_boolean_in_mod(mod)
return if defined? mod::Boolean
mod.const_set(:Boolean, Grape::API::Boolean)
end
def inject_api_helpers_to_mod(mod, &block)
mod.extend(BaseHelper) unless mod.is_a?(BaseHelper)
yield if block
mod.api_changed(self)
end
# This module extends user defined helpers
# to provide some API-specific functionality.
module BaseHelper
attr_accessor :api
def params(name, &block)
@named_params ||= {}
@named_params[name] = block
end
def api_changed(new_api)
@api = new_api
process_named_params
end
protected
def process_named_params
return if @named_params.blank?
api.inheritable_setting.namespace_stackable[:named_params] = @named_params
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/dsl/parameters.rb | lib/grape/dsl/parameters.rb | # frozen_string_literal: true
module Grape
module DSL
# Defines DSL methods, meant to be applied to a ParamsScope, which define
# and describe the parameters accepted by an endpoint, or all endpoints
# within a namespace.
module Parameters
# Set the module used to build the request.params.
#
# @param build_with the ParamBuilder module to use when building request.params
# Available builders are:
#
# * Grape::Extensions::ActiveSupport::HashWithIndifferentAccess::ParamBuilder (default)
# * Grape::Extensions::Hash::ParamBuilder
# * Grape::Extensions::Hashie::Mash::ParamBuilder
#
# @example
#
# require 'grape/extenstions/hashie_mash'
# class API < Grape::API
# desc "Get collection"
# params do
# build_with :hashie_mash
# requires :user_id, type: Integer
# end
# get do
# params['user_id']
# end
# end
def build_with(build_with)
@api.inheritable_setting.namespace_inheritable[:build_params_with] = build_with
end
# Include reusable params rules among current.
# You can define reusable params with helpers method.
#
# @example
#
# class API < Grape::API
# helpers do
# params :pagination do
# optional :page, type: Integer
# optional :per_page, type: Integer
# end
# end
#
# desc "Get collection"
# params do
# use :pagination
# end
# get do
# Collection.page(params[:page]).per(params[:per_page])
# end
# end
def use(*names, **options)
named_params = @api.inheritable_setting.namespace_stackable_with_hash(:named_params) || {}
names.each do |name|
params_block = named_params.fetch(name) do
raise "Params :#{name} not found!"
end
if options.empty?
instance_exec(options, ¶ms_block)
else
instance_exec(**options, ¶ms_block)
end
end
end
alias use_scope use
alias includes use
# Require one or more parameters for the current endpoint.
#
# @param attrs list of parameters names, or, if :using is
# passed as an option, which keys to include (:all or :none) from
# the :using hash. The last key can be a hash, which specifies
# options for the parameters
# @option attrs :type [Class] the type to coerce this parameter to before
# passing it to the endpoint. See {Grape::Validations::Types} for a list of
# types that are supported automatically. Custom classes may be used
# where they define a class-level `::parse` method, or in conjunction
# with the `:coerce_with` parameter. `JSON` may be supplied to denote
# `JSON`-formatted objects or arrays of objects. `Array[JSON]` accepts
# the same values as `JSON` but will wrap single objects in an `Array`.
# @option attrs :types [Array<Class>] may be supplied in place of +:type+
# to declare an attribute that has multiple allowed types. See
# {Validations::Types::MultipleTypeCoercer} for more details on coercion
# and validation rules for variant-type parameters.
# @option attrs :desc [String] description to document this parameter
# @option attrs :default [Object] default value, if parameter is optional
# @option attrs :values [Array] permissable values for this field. If any
# other value is given, it will be handled as a validation error
# @option attrs :using [Hash[Symbol => Hash]] a hash defining keys and
# options, like that returned by {Grape::Entity#documentation}. The value
# of each key is an options hash accepting the same parameters
# @option attrs :except [Array[Symbol]] a list of keys to exclude from
# the :using Hash. The meaning of this depends on if :all or :none was
# passed; :all + :except will make the :except fields optional, whereas
# :none + :except will make the :except fields required
# @option attrs :coerce_with [#parse, #call] method to be used when coercing
# the parameter to the type named by `attrs[:type]`. Any class or object
# that defines `::parse` or `::call` may be used.
#
# @example
#
# params do
# # Basic usage: require a parameter of a certain type
# requires :user_id, type: Integer
#
# # You don't need to specify type; String is default
# requires :foo
#
# # Multiple params can be specified at once if they share
# # the same options.
# requires :x, :y, :z, type: Date
#
# # Nested parameters can be handled as hashes. You must
# # pass in a block, within which you can use any of the
# # parameters DSL methods.
# requires :user, type: Hash do
# requires :name, type: String
# end
# end
def requires(*attrs, **opts, &block)
opts[:presence] = { value: true, message: opts[:message] }
opts = @group.deep_merge(opts) if instance_variable_defined?(:@group) && @group
if opts[:using]
require_required_and_optional_fields(attrs.first, opts)
else
validate_attributes(attrs, opts, &block)
block ? new_scope(attrs, opts, &block) : push_declared_params(attrs, opts.slice(:as))
end
end
# Allow, but don't require, one or more parameters for the current
# endpoint.
# @param (see #requires)
# @option (see #requires)
def optional(*attrs, **opts, &block)
type = opts[:type]
opts = @group.deep_merge(opts) if instance_variable_defined?(:@group) && @group
# check type for optional parameter group
if attrs && block
raise Grape::Exceptions::MissingGroupType if type.nil?
raise Grape::Exceptions::UnsupportedGroupType unless Grape::Validations::Types.group?(type)
end
if opts[:using]
require_optional_fields(attrs.first, opts)
else
validate_attributes(attrs, opts, &block)
block ? new_scope(attrs, opts, true, &block) : push_declared_params(attrs, opts.slice(:as))
end
end
# Define common settings for one or more parameters
# @param (see #requires)
# @option (see #requires)
def with(*attrs, &)
new_group_attrs = [@group, attrs.clone.first].compact.reduce(&:deep_merge)
new_group_scope([new_group_attrs], &)
end
%i[mutually_exclusive exactly_one_of at_least_one_of all_or_none_of].each do |validator|
define_method validator do |*attrs, message: nil|
validates(attrs, validator => { value: true, message: message })
end
end
# Define a block of validations which should be applied if and only if
# the given parameter is present. The parameters are not nested.
# @param attr [Symbol] the parameter which, if present, triggers the
# validations
# @raise Grape::Exceptions::UnknownParameter if `attr` has not been
# defined in this scope yet
# @yield a parameter definition DSL
def given(*attrs, &)
attrs.each do |attr|
proxy_attr = first_hash_key_or_param(attr)
raise Grape::Exceptions::UnknownParameter.new(proxy_attr) unless declared_param?(proxy_attr)
end
new_lateral_scope(dependent_on: attrs, &)
end
# Test for whether a certain parameter has been defined in this params
# block yet.
# @return [Boolean] whether the parameter has been defined
def declared_param?(param)
if lateral?
# Elements of @declared_params of lateral scope are pushed in @parent. So check them in @parent.
@parent.declared_param?(param)
else
# @declared_params also includes hashes of options and such, but those
# won't be flattened out.
@declared_params.flatten.any? do |declared_param_attr|
first_hash_key_or_param(declared_param_attr.key) == param
end
end
end
alias group requires
class EmptyOptionalValue; end # rubocop:disable Lint/EmptyClass
def map_params(params, element, is_array = false)
if params.is_a?(Array)
params.map do |el|
map_params(el, element, true)
end
elsif params.is_a?(Hash)
params[element] || (@optional && is_array ? EmptyOptionalValue : {})
elsif params == EmptyOptionalValue
EmptyOptionalValue
else
{}
end
end
# @param params [Hash] initial hash of parameters
# @return hash of parameters relevant for the current scope
# @api private
def params(params)
params = @parent.params_meeting_dependency.presence || @parent.params(params) if instance_variable_defined?(:@parent) && @parent
params = map_params(params, @element) if instance_variable_defined?(:@element) && @element
params
end
private
def first_hash_key_or_param(parameter)
parameter.is_a?(Hash) ? parameter.keys.first : parameter
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/dsl/settings.rb | lib/grape/dsl/settings.rb | # frozen_string_literal: true
module Grape
module DSL
# Keeps track of settings (implemented as key-value pairs, grouped by
# types), in two contexts: top-level settings which apply globally no
# matter where they're defined, and inheritable settings which apply only
# in the current scope and scopes nested under it.
module Settings
attr_writer :inheritable_setting
# Fetch our top-level settings, which apply to all endpoints in the API.
def top_level_setting
@top_level_setting ||= Grape::Util::InheritableSetting.new.tap do |setting|
# Doesn't try to inherit settings from +Grape::API::Instance+ which also responds to
# +inheritable_setting+, however, it doesn't contain any user-defined settings.
# Otherwise, it would lead to an extra instance of +Grape::Util::InheritableSetting+
# in the chain for every endpoint.
setting.inherit_from superclass.inheritable_setting if defined?(superclass) && superclass.respond_to?(:inheritable_setting) && superclass != Grape::API::Instance
end
end
# Fetch our current inheritable settings, which are inherited by
# nested scopes but not shared across siblings.
def inheritable_setting
@inheritable_setting ||= Grape::Util::InheritableSetting.new.tap { |new_settings| new_settings.inherit_from top_level_setting }
end
def global_setting(key, value = nil)
get_or_set(inheritable_setting.global, key, value)
end
def route_setting(key, value = nil)
get_or_set(inheritable_setting.route, key, value)
end
def namespace_setting(key, value = nil)
get_or_set(inheritable_setting.namespace, key, value)
end
private
# Execute the block within a context where our inheritable settings are forked
# to a new copy (see #namespace_start).
def within_namespace
new_inheritable_settings = Grape::Util::InheritableSetting.new
new_inheritable_settings.inherit_from inheritable_setting
@inheritable_setting = new_inheritable_settings
result = yield
inheritable_setting.route_end
@inheritable_setting = inheritable_setting.parent
reset_validations!
result
end
def get_or_set(setting, key, value)
return setting[key] if value.nil?
setting[key] = value
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/dsl/request_response.rb | lib/grape/dsl/request_response.rb | # frozen_string_literal: true
module Grape
module DSL
module RequestResponse
# Specify the default format for the API's serializers.
# May be `:json` or `:txt` (default).
def default_format(new_format = nil)
return inheritable_setting.namespace_inheritable[:default_format] if new_format.nil?
inheritable_setting.namespace_inheritable[:default_format] = new_format.to_sym
end
# Specify the format for the API's serializers.
# May be `:json`, `:xml`, `:txt`, etc.
def format(new_format = nil)
return inheritable_setting.namespace_inheritable[:format] if new_format.nil?
symbolic_new_format = new_format.to_sym
inheritable_setting.namespace_inheritable[:format] = symbolic_new_format
inheritable_setting.namespace_inheritable[:default_error_formatter] = Grape::ErrorFormatter.formatter_for(symbolic_new_format)
content_type = content_types[symbolic_new_format]
raise Grape::Exceptions::MissingMimeType.new(new_format) unless content_type
inheritable_setting.namespace_stackable[:content_types] = { symbolic_new_format => content_type }
end
# Specify a custom formatter for a content-type.
def formatter(content_type, new_formatter)
inheritable_setting.namespace_stackable[:formatters] = { content_type.to_sym => new_formatter }
end
# Specify a custom parser for a content-type.
def parser(content_type, new_parser)
inheritable_setting.namespace_stackable[:parsers] = { content_type.to_sym => new_parser }
end
# Specify a default error formatter.
def default_error_formatter(new_formatter_name = nil)
return inheritable_setting.namespace_inheritable[:default_error_formatter] if new_formatter_name.nil?
new_formatter = Grape::ErrorFormatter.formatter_for(new_formatter_name)
inheritable_setting.namespace_inheritable[:default_error_formatter] = new_formatter
end
def error_formatter(format, options)
formatter = if options.is_a?(Hash) && options.key?(:with)
options[:with]
else
options
end
inheritable_setting.namespace_stackable[:error_formatters] = { format.to_sym => formatter }
end
# Specify additional content-types, e.g.:
# content_type :xls, 'application/vnd.ms-excel'
def content_type(key, val)
inheritable_setting.namespace_stackable[:content_types] = { key.to_sym => val }
end
# All available content types.
def content_types
c_types = inheritable_setting.namespace_stackable_with_hash(:content_types)
Grape::ContentTypes.content_types_for c_types
end
# Specify the default status code for errors.
def default_error_status(new_status = nil)
return inheritable_setting.namespace_inheritable[:default_error_status] if new_status.nil?
inheritable_setting.namespace_inheritable[:default_error_status] = new_status
end
# Allows you to rescue certain exceptions that occur to return
# a grape error rather than raising all the way to the
# server level.
#
# @example Rescue from custom exceptions
# class ExampleAPI < Grape::API
# class CustomError < StandardError; end
#
# rescue_from CustomError
# end
#
# @overload rescue_from(*exception_classes, **options)
# @param [Array] exception_classes A list of classes that you want to rescue, or
# the symbol :all to rescue from all exceptions.
# @param [Block] block Execution block to handle the given exception.
# @param [Hash] options Options for the rescue usage.
# @option options [Boolean] :backtrace Include a backtrace in the rescue response.
# @option options [Boolean] :rescue_subclasses Also rescue subclasses of exception classes
# @param [Proc] handler Execution proc to handle the given exception as an
# alternative to passing a block.
def rescue_from(*args, **options, &block)
if args.last.is_a?(Proc)
handler = args.pop
elsif block
handler = block
end
raise ArgumentError, 'both :with option and block cannot be passed' if block && options.key?(:with)
handler ||= extract_with(options)
if args.include?(:all)
inheritable_setting.namespace_inheritable[:rescue_all] = true
inheritable_setting.namespace_inheritable[:all_rescue_handler] = handler
elsif args.include?(:grape_exceptions)
inheritable_setting.namespace_inheritable[:rescue_all] = true
inheritable_setting.namespace_inheritable[:rescue_grape_exceptions] = true
inheritable_setting.namespace_inheritable[:grape_exceptions_rescue_handler] = handler
else
handler_type =
case options[:rescue_subclasses]
when nil, true
:rescue_handlers
else
:base_only_rescue_handlers
end
inheritable_setting.namespace_reverse_stackable[handler_type] = args.to_h { |arg| [arg, handler] }
end
inheritable_setting.namespace_stackable[:rescue_options] = options
end
# Allows you to specify a default representation entity for a
# class. This allows you to map your models to their respective
# entities once and then simply call `present` with the model.
#
# @example
# class ExampleAPI < Grape::API
# represent User, with: Entity::User
#
# get '/me' do
# present current_user # with: Entity::User is assumed
# end
# end
#
# Note that Grape will automatically go up the class ancestry to
# try to find a representing entity, so if you, for example, define
# an entity to represent `Object` then all presented objects will
# bubble up and utilize the entity provided on that `represent` call.
#
# @param model_class [Class] The model class that will be represented.
# @option options [Class] :with The entity class that will represent the model.
def represent(model_class, options)
raise Grape::Exceptions::InvalidWithOptionForRepresent.new unless options[:with].is_a?(Class)
inheritable_setting.namespace_stackable[:representations] = { model_class => options[:with] }
end
private
def extract_with(options)
return unless options.key?(:with)
with_option = options.delete(:with)
return with_option if with_option.instance_of?(Proc)
return with_option.to_sym if with_option.instance_of?(Symbol) || with_option.instance_of?(String)
raise ArgumentError, "with: #{with_option.class}, expected Symbol, String or Proc"
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/dsl/headers.rb | lib/grape/dsl/headers.rb | # frozen_string_literal: true
module Grape
module DSL
module Headers
# This method has four responsibilities:
# 1. Set a specifc header value by key
# 2. Retrieve a specifc header value by key
# 3. Retrieve all headers that have been set
# 4. Delete a specifc header key-value pair
def header(key = nil, val = nil)
if key
val ? header[key] = val : header.delete(key)
else
@header ||= Grape::Util::Header.new
end
end
alias headers header
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/error_formatter/txt.rb | lib/grape/error_formatter/txt.rb | # frozen_string_literal: true
module Grape
module ErrorFormatter
class Txt < Base
def self.format_structured_message(structured_message)
message = structured_message[:message] || Grape::Json.dump(structured_message)
Array.wrap(message).tap do |final_message|
if structured_message.key?(:backtrace)
final_message << 'backtrace:'
final_message.concat(structured_message[:backtrace])
end
if structured_message.key?(:original_exception)
final_message << 'original exception:'
final_message << structured_message[:original_exception]
end
end.join("\r\n ")
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/error_formatter/xml.rb | lib/grape/error_formatter/xml.rb | # frozen_string_literal: true
module Grape
module ErrorFormatter
class Xml < Base
def self.format_structured_message(structured_message)
structured_message.respond_to?(:to_xml) ? structured_message.to_xml(root: :error) : structured_message.to_s
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/error_formatter/json.rb | lib/grape/error_formatter/json.rb | # frozen_string_literal: true
module Grape
module ErrorFormatter
class Json < Base
class << self
def format_structured_message(structured_message)
::Grape::Json.dump(structured_message)
end
private
def wrap_message(message)
return message if message.is_a?(Hash)
return message.as_json if message.is_a?(Exceptions::ValidationErrors)
{ error: ensure_utf8(message) }
end
def ensure_utf8(message)
return message unless message.respond_to? :encode
message.encode('UTF-8', invalid: :replace, undef: :replace)
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/error_formatter/base.rb | lib/grape/error_formatter/base.rb | # frozen_string_literal: true
module Grape
module ErrorFormatter
class Base
class << self
def call(message, backtrace, options = {}, env = nil, original_exception = nil)
merge_backtrace = backtrace.present? && options.dig(:rescue_options, :backtrace)
merge_original_exception = original_exception && options.dig(:rescue_options, :original_exception)
wrapped_message = wrap_message(present(message, env))
if wrapped_message.is_a?(Hash)
wrapped_message[:backtrace] = backtrace if merge_backtrace
wrapped_message[:original_exception] = original_exception.inspect if merge_original_exception
end
format_structured_message(wrapped_message)
end
def present(message, env)
present_options = {}
presented_message = message
if presented_message.is_a?(Hash)
presented_message = presented_message.dup
present_options[:with] = presented_message.delete(:with)
end
presenter = env[Grape::Env::API_ENDPOINT].entity_class_for_obj(presented_message, present_options)
unless presenter || env[Grape::Env::GRAPE_ROUTING_ARGS].nil?
# env['api.endpoint'].route does not work when the error occurs within a middleware
# the Endpoint does not have a valid env at this moment
http_codes = env[Grape::Env::GRAPE_ROUTING_ARGS][:route_info].http_codes || []
found_code = http_codes.find do |http_code|
(http_code[0].to_i == env[Grape::Env::API_ENDPOINT].status) && http_code[2].respond_to?(:represent)
end if env[Grape::Env::API_ENDPOINT].request
presenter = found_code[2] if found_code
end
if presenter
embeds = { env: env }
embeds[:version] = env[Grape::Env::API_VERSION] if env.key?(Grape::Env::API_VERSION)
presented_message = presenter.represent(presented_message, embeds).serializable_hash
end
presented_message
end
def wrap_message(message)
return message if message.is_a?(Hash)
{ message: message }
end
def format_structured_message(_structured_message)
raise NotImplementedError
end
private
def inherited(klass)
super
ErrorFormatter.register(klass)
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/error_formatter/serializable_hash.rb | lib/grape/error_formatter/serializable_hash.rb | # frozen_string_literal: true
module Grape
module ErrorFormatter
class SerializableHash < Json; end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/api/instance.rb | lib/grape/api/instance.rb | # frozen_string_literal: true
module Grape
class API
# The API Instance class, is the engine behind Grape::API. Each class that inherits
# from this will represent a different API instance
class Instance
extend Grape::DSL::Settings
extend Grape::DSL::Desc
extend Grape::DSL::Validations
extend Grape::DSL::Callbacks
extend Grape::DSL::Logger
extend Grape::DSL::Middleware
extend Grape::DSL::RequestResponse
extend Grape::DSL::Routing
extend Grape::DSL::Helpers
extend Grape::Middleware::Auth::DSL
Boolean = Grape::API::Boolean
class << self
extend Forwardable
attr_accessor :configuration
def_delegators :@base, :to_s
def base=(grape_api)
@base = grape_api
grape_api.instances << self
end
def base_instance?
self == @base.base_instance
end
# A class-level lock to ensure the API is not compiled by multiple
# threads simultaneously within the same process.
LOCK = Mutex.new
# Clears all defined routes, endpoints, etc., on this API.
def reset!
reset_endpoints!
reset_routes!
reset_validations!
end
# This is the interface point between Rack and Grape; it accepts a request
# from Rack and ultimately returns an array of three values: the status,
# the headers, and the body. See [the rack specification]
# (http://www.rubydoc.info/github/rack/rack/master/file/SPEC) for more.
def call(env)
compile!
@instance.call(env)
end
def compile!
return if @instance
LOCK.synchronize { @instance ||= new }
end
# see Grape::Router#recognize_path
def recognize_path(path)
compile!
@instance.router.recognize_path(path)
end
# Wipe the compiled API so we can recompile after changes were made.
def change!
@instance = nil
end
protected
def inherit_settings(other_settings)
top_level_setting.inherit_from other_settings.point_in_time_copy
# Propagate any inherited params down to our endpoints, and reset any
# compiled routes.
endpoints.each do |e|
e.inherit_settings(top_level_setting.namespace_stackable)
e.reset_routes!
end
reset_routes!
end
private
def inherited(subclass)
super
subclass.reset!
subclass.logger logger.clone
end
end
attr_reader :router
# Builds the routes from the defined endpoints, effectively compiling
# this API into a usable form.
def initialize
@router = Router.new
add_head_not_allowed_methods_and_options_methods
self.class.endpoints.each do |endpoint|
endpoint.mount_in(@router)
end
@router.compile!
@router.freeze
end
# Handle a request. See Rack documentation for what `env` is.
def call(env)
status, headers, response = @router.call(env)
unless cascade?
headers = Grape::Util::Header.new.merge(headers)
headers.delete('X-Cascade')
end
[status, headers, response]
end
# Some requests may return a HTTP 404 error if grape cannot find a matching
# route. In this case, Grape::Router adds a X-Cascade header to the response
# and sets it to 'pass', indicating to grape's parents they should keep
# looking for a matching route on other resources.
#
# In some applications (e.g. mounting grape on rails), one might need to trap
# errors from reaching upstream. This is effectivelly done by unsetting
# X-Cascade. Default :cascade is true.
def cascade?
namespace_inheritable = self.class.inheritable_setting.namespace_inheritable
return namespace_inheritable[:cascade] if namespace_inheritable.key?(:cascade)
return namespace_inheritable[:version_options][:cascade] if namespace_inheritable[:version_options]&.key?(:cascade)
true
end
reset!
private
# For every resource add a 'OPTIONS' route that returns an HTTP 204 response
# with a list of HTTP methods that can be called. Also add a route that
# will return an HTTP 405 response for any HTTP method that the resource
# cannot handle.
def add_head_not_allowed_methods_and_options_methods
# The paths we collected are prepared (cf. Path#prepare), so they
# contain already versioning information when using path versioning.
all_routes = self.class.endpoints.flat_map(&:routes)
# Disable versioning so adding a route won't prepend versioning
# informations again.
without_root_prefix_and_versioning { collect_route_config_per_pattern(all_routes) }
end
def collect_route_config_per_pattern(all_routes)
routes_by_regexp = all_routes.group_by(&:pattern_regexp)
namespace_inheritable = self.class.inheritable_setting.namespace_inheritable
# Build the configuration based on the first endpoint and the collection of methods supported.
routes_by_regexp.each_value do |routes|
next if routes.any? { |route| route.request_method == '*' }
last_route = routes.last # Most of the configuration is taken from the last endpoint
allowed_methods = routes.map(&:request_method)
allowed_methods |= [Rack::HEAD] if !namespace_inheritable[:do_not_route_head] && allowed_methods.include?(Rack::GET)
allow_header = namespace_inheritable[:do_not_route_options] ? allowed_methods : [Rack::OPTIONS] | allowed_methods
last_route.app.options[:options_route_enabled] = true unless namespace_inheritable[:do_not_route_options] || allowed_methods.include?(Rack::OPTIONS)
greedy_route = Grape::Router::GreedyRoute.new(last_route.pattern, endpoint: last_route.app, allow_header: allow_header)
@router.associate_routes(greedy_route)
end
end
ROOT_PREFIX_VERSIONING_KEY = %i[version version_options root_prefix].freeze
private_constant :ROOT_PREFIX_VERSIONING_KEY
# Allows definition of endpoints that ignore the versioning configuration
# used by the rest of your API.
def without_root_prefix_and_versioning
inheritable_setting = self.class.inheritable_setting
deleted_values = inheritable_setting.namespace_inheritable.delete(*ROOT_PREFIX_VERSIONING_KEY)
yield
ensure
ROOT_PREFIX_VERSIONING_KEY.each_with_index do |key, index|
inheritable_setting.namespace_inheritable[key] = deleted_values[index]
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/single_attribute_iterator.rb | lib/grape/validations/single_attribute_iterator.rb | # frozen_string_literal: true
module Grape
module Validations
class SingleAttributeIterator < AttributesIterator
private
def yield_attributes(val, attrs)
return if skip?(val)
attrs.each do |attr_name|
yield val, attr_name, empty?(val)
end
end
# Primitives like Integers and Booleans don't respond to +empty?+.
# It could be possible to use +blank?+ instead, but
#
# false.blank?
# => true
def empty?(val)
val.respond_to?(:empty?) ? val.empty? : val.nil?
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/attributes_iterator.rb | lib/grape/validations/attributes_iterator.rb | # frozen_string_literal: true
module Grape
module Validations
class AttributesIterator
include Enumerable
attr_reader :scope
def initialize(validator, scope, params)
@scope = scope
@attrs = validator.attrs
@original_params = scope.params(params)
@params = Array.wrap(@original_params)
end
def each(&)
do_each(@params, &) # because we need recursion for nested arrays
end
private
def do_each(params_to_process, parent_indicies = [], &block)
@scope.reset_index # gets updated depending on the size of params_to_process
params_to_process.each_with_index do |resource_params, index|
# when we get arrays of arrays it means that target element located inside array
# we need this because we want to know parent arrays indicies
if resource_params.is_a?(Array)
do_each(resource_params, [index] + parent_indicies, &block)
next
end
if @scope.type == Array
next unless @original_params.is_a?(Array) # do not validate content of array if it isn't array
# fill current and parent scopes with correct array indicies
parent_scope = @scope.parent
parent_indicies.each do |parent_index|
parent_scope.index = parent_index
parent_scope = parent_scope.parent
end
@scope.index = index
end
yield_attributes(resource_params, @attrs, &block)
end
end
def yield_attributes(_resource_params, _attrs)
raise NotImplementedError
end
# This is a special case so that we can ignore tree's where option
# values are missing lower down. Unfortunately we can remove this
# are the parameter parsing stage as they are required to ensure
# the correct indexing is maintained
def skip?(val)
val == Grape::DSL::Parameters::EmptyOptionalValue
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/multiple_attributes_iterator.rb | lib/grape/validations/multiple_attributes_iterator.rb | # frozen_string_literal: true
module Grape
module Validations
class MultipleAttributesIterator < AttributesIterator
private
def yield_attributes(resource_params, _attrs)
yield resource_params unless skip?(resource_params)
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/params_documentation.rb | lib/grape/validations/params_documentation.rb | # frozen_string_literal: true
module Grape
module Validations
# Documents parameters of an endpoint. If documentation isn't needed (for instance, it is an
# internal API), the class only cleans up attributes to avoid junk in RAM.
module ParamsDocumentation
def document_params(attrs, validations, type = nil, values = nil, except_values = nil)
return validations.except!(:desc, :description, :documentation) if @api.inheritable_setting.namespace_inheritable[:do_not_document]
documented_attrs = attrs.each_with_object({}) do |name, memo|
memo[full_name(name)] = extract_details(validations, type, values, except_values)
end
@api.inheritable_setting.namespace_stackable[:params] = documented_attrs
end
private
def extract_details(validations, type, values, except_values)
{}.tap do |details|
details[:required] = validations.key?(:presence)
details[:type] = TypeCache[type] if type
details[:values] = values if values
details[:except_values] = except_values if except_values
details[:default] = validations[:default] if validations.key?(:default)
if validations.key?(:length)
details[:min_length] = validations[:length][:min] if validations[:length].key?(:min)
details[:max_length] = validations[:length][:max] if validations[:length].key?(:max)
end
desc = validations.delete(:desc) || validations.delete(:description)
details[:desc] = desc if desc
documentation = validations.delete(:documentation)
details[:documentation] = documentation if documentation
end
end
class TypeCache < Grape::Util::Cache
def initialize
super
@cache = Hash.new do |h, type|
h[type] = type.to_s
end
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/contract_scope.rb | lib/grape/validations/contract_scope.rb | # frozen_string_literal: true
module Grape
module Validations
class ContractScope
# Declare the contract to be used for the endpoint's parameters.
# @param api [API] the API endpoint to modify.
# @param contract the contract or schema to be used for validation. Optional.
# @yield a block yielding a new schema class. Optional.
def initialize(api, contract = nil, &block)
# When block is passed, the first arg is either schema or nil.
contract = Dry::Schema.Params(parent: contract, &block) if block
if contract.respond_to?(:schema)
# It's a Dry::Validation::Contract, then.
contract = contract.new
key_map = contract.schema.key_map
else
# Dry::Schema::Processor, hopefully.
key_map = contract.key_map
end
api.inheritable_setting.namespace_stackable[:contract_key_map] = key_map
validator_options = {
validator_class: Grape::Validations.require_validator(:contract_scope),
opts: { schema: contract, fail_fast: false }
}
api.inheritable_setting.namespace_stackable[:validations] = validator_options
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/types.rb | lib/grape/validations/types.rb | # frozen_string_literal: true
module Grape
module Validations
# Module for code related to grape's system for
# coercion and type validation of incoming request
# parameters.
#
# Grape uses a number of tests and assertions to
# work out exactly how a parameter should be handled,
# based on the +type+ and +coerce_with+ options that
# may be supplied to {Grape::Dsl::Parameters#requires}
# and {Grape::Dsl::Parameters#optional}. The main
# entry point for this process is {Types.build_coercer}.
module Types
module_function
PRIMITIVES = [
# Numerical
Integer,
Float,
BigDecimal,
Numeric,
# Date/time
Date,
DateTime,
Time,
# Misc
Grape::API::Boolean,
String,
Symbol,
TrueClass,
FalseClass
].freeze
# Types representing data structures.
STRUCTURES = [Hash, Array, Set].freeze
SPECIAL = {
::JSON => Json,
Array[JSON] => JsonArray,
::File => File,
Rack::Multipart::UploadedFile => File
}.freeze
GROUPS = [Array, Hash, JSON, Array[JSON]].freeze
# Is the given class a primitive type as recognized by Grape?
#
# @param type [Class] type to check
# @return [Boolean] whether or not the type is known by Grape as a valid
# type for a single value
def primitive?(type)
PRIMITIVES.include?(type)
end
# Is the given class a standard data structure (collection or map)
# as recognized by Grape?
#
# @param type [Class] type to check
# @return [Boolean] whether or not the type is known by Grape as a valid
# data structure type
def structure?(type)
STRUCTURES.include?(type)
end
# Is the declared type in fact an array of multiple allowed types?
# For example the declaration +types: [Integer,String]+ will attempt
# first to coerce given values to integer, but will also accept any
# other string.
#
# @param type [Array<Class>,Set<Class>] type (or type list!) to check
# @return [Boolean] +true+ if the given value will be treated as
# a list of types.
def multiple?(type)
(type.is_a?(Array) || type.is_a?(Set)) && type.size > 1
end
# Does Grape provide special coercion and validation
# routines for the given class? This does not include
# automatic handling for primitives, structures and
# otherwise recognized types. See {Types::SPECIAL}.
#
# @param type [Class] type to check
# @return [Boolean] +true+ if special routines are available
def special?(type)
SPECIAL.key? type
end
# Is the declared type a supported group type?
# Currently supported group types are Array, Hash, JSON, and Array[JSON]
#
# @param type [Array<Class>,Class] type to check
# @return [Boolean] +true+ if the type is a supported group type
def group?(type)
GROUPS.include? type
end
# A valid custom type must implement a class-level `parse` method, taking
# one String argument and returning the parsed value in its correct type.
#
# @param type [Class] type to check
# @return [Boolean] whether or not the type can be used as a custom type
def custom?(type)
!primitive?(type) &&
!structure?(type) &&
!multiple?(type) &&
type.respond_to?(:parse) &&
type.method(:parse).arity == 1
end
# Is the declared type an +Array+ or +Set+ of a {#custom?} type?
#
# @param type [Array<Class>,Class] type to check
# @return [Boolean] true if +type+ is a collection of a type that implements
# its own +#parse+ method.
def collection_of_custom?(type)
(type.is_a?(Array) || type.is_a?(Set)) &&
type.length == 1 &&
(custom?(type.first) || special?(type.first))
end
def map_special(type)
SPECIAL.fetch(type, type)
end
# Chooses the best coercer for the given type. For example, if the type
# is Integer, it will return a coercer which will be able to coerce a value
# to the integer.
#
# There are a few very special coercers which might be returned.
#
# +Grape::Types::MultipleTypeCoercer+ is a coercer which is returned when
# the given type implies values in an array with different types.
# For example, +[Integer, String]+ allows integer and string values in
# an array.
#
# +Grape::Types::CustomTypeCoercer+ is a coercer which is returned when
# a method is specified by a user with +coerce_with+ option or the user
# specifies a custom type which implements requirments of
# +Grape::Types::CustomTypeCoercer+.
#
# +Grape::Types::CustomTypeCollectionCoercer+ is a very similar to the
# previous one, but it expects an array or set of values having a custom
# type implemented by the user.
#
# There is also a group of custom types implemented by Grape, check
# +Grape::Validations::Types::SPECIAL+ to get the full list.
#
# @param type [Class] the type to which input strings
# should be coerced
# @param method [Class,#call] the coercion method to use
# @return [Object] object to be used
# for coercion and type validation
def build_coercer(type, method: nil, strict: false)
# no cache since unique
return create_coercer_instance(type, method, strict) if method.respond_to?(:call)
CoercerCache[[type, method, strict]]
end
def create_coercer_instance(type, method, strict)
# Maps a custom type provided by Grape, it doesn't map types wrapped by collections!!!
type = Types.map_special(type)
# Use a special coercer for multiply-typed parameters.
if Types.multiple?(type)
MultipleTypeCoercer.new(type, method)
# Use a special coercer for custom types and coercion methods.
elsif method || Types.custom?(type)
CustomTypeCoercer.new(type, method)
# Special coercer for collections of types that implement a parse method.
# CustomTypeCoercer (above) already handles such types when an explicit coercion
# method is supplied.
elsif Types.collection_of_custom?(type)
Types::CustomTypeCollectionCoercer.new(
Types.map_special(type.first), type.is_a?(Set)
)
else
DryTypeCoercer.coercer_instance_for(type, strict)
end
end
class CoercerCache < Grape::Util::Cache
def initialize
super
@cache = Hash.new do |h, (type, method, strict)|
h[[type, method, strict]] = Grape::Validations::Types.create_coercer_instance(type, method, strict)
end
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validator_factory.rb | lib/grape/validations/validator_factory.rb | # frozen_string_literal: true
module Grape
module Validations
class ValidatorFactory
def self.create_validator(options)
options[:validator_class].new(options[:attributes],
options[:options],
options[:required],
options[:params_scope],
options[:opts])
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/params_scope.rb | lib/grape/validations/params_scope.rb | # frozen_string_literal: true
module Grape
module Validations
class ParamsScope
attr_accessor :element, :parent, :index
attr_reader :type, :params_meeting_dependency
include Grape::DSL::Parameters
include Grape::Validations::ParamsDocumentation
# There are a number of documentation options on entities that don't have
# corresponding validators. Since there is nowhere that enumerates them all,
# we maintain a list of them here and skip looking up validators for them.
RESERVED_DOCUMENTATION_KEYWORDS = %i[as required param_type is_array format example].freeze
SPECIAL_JSON = [JSON, Array[JSON]].freeze
class Attr
attr_accessor :key, :scope
# Open up a new ParamsScope::Attr
# @param key [Hash, Symbol] key of attr
# @param scope [Grape::Validations::ParamsScope] scope of attr
def initialize(key, scope)
@key = key
@scope = scope
end
# @return Array[Symbol, Hash[Symbol => Array]] declared_params with symbol instead of Attr
def self.attrs_keys(declared_params)
declared_params.map do |declared_param_attr|
attr_key(declared_param_attr)
end
end
def self.attr_key(declared_param_attr)
return attr_key(declared_param_attr.key) if declared_param_attr.is_a?(self)
if declared_param_attr.is_a?(Hash)
declared_param_attr.transform_values { |value| attrs_keys(value) }
else
declared_param_attr
end
end
end
# Open up a new ParamsScope, allowing parameter definitions per
# Grape::DSL::Params.
# @param opts [Hash] options for this scope
# @option opts :element [Symbol] the element that contains this scope; for
# this to be relevant, @parent must be set
# @option opts :element_renamed [Symbol, nil] whenever this scope should
# be renamed and to what, given +nil+ no renaming is done
# @option opts :parent [ParamsScope] the scope containing this scope
# @option opts :api [API] the API endpoint to modify
# @option opts :optional [Boolean] whether or not this scope needs to have
# any parameters set or not
# @option opts :type [Class] a type meant to govern this scope (deprecated)
# @option opts :type [Hash] group options for this scope
# @option opts :dependent_on [Symbol] if present, this scope should only
# validate if this param is present in the parent scope
# @yield the instance context, open for parameter definitions
def initialize(opts, &block)
@element = opts[:element]
@element_renamed = opts[:element_renamed]
@parent = opts[:parent]
@api = opts[:api]
@optional = opts[:optional] || false
@type = opts[:type]
@group = opts[:group]
@dependent_on = opts[:dependent_on]
@params_meeting_dependency = []
@declared_params = []
@index = nil
instance_eval(&block) if block
configure_declared_params
end
def configuration
(@api.configuration.respond_to?(:evaluate) && @api.configuration.evaluate) || @api.configuration
end
# @return [Boolean] whether or not this entire scope needs to be
# validated
def should_validate?(parameters)
scoped_params = params(parameters)
return false if @optional && (scoped_params.blank? || all_element_blank?(scoped_params))
return false unless meets_dependency?(scoped_params, parameters)
return true if parent.nil?
parent.should_validate?(parameters)
end
def meets_dependency?(params, request_params)
return true unless @dependent_on
return false if @parent.present? && !@parent.meets_dependency?(@parent.params(request_params), request_params)
if params.is_a?(Array)
@params_meeting_dependency = params.flatten.filter { |param| meets_dependency?(param, request_params) }
return @params_meeting_dependency.present?
end
meets_hash_dependency?(params)
end
def attr_meets_dependency?(params)
return true unless @dependent_on
return false if @parent.present? && !@parent.attr_meets_dependency?(params)
meets_hash_dependency?(params)
end
def meets_hash_dependency?(params)
# params might be anything what looks like a hash, so it must implement a `key?` method
return false unless params.respond_to?(:key?)
@dependent_on.each do |dependency|
if dependency.is_a?(Hash)
dependency_key = dependency.keys[0]
proc = dependency.values[0]
return false unless proc.call(params[dependency_key])
elsif params[dependency].blank?
return false
end
end
true
end
# @return [String] the proper attribute name, with nesting considered.
def full_name(name, index: nil)
if nested?
# Find our containing element's name, and append ours.
"#{@parent.full_name(@element)}#{brackets(index || @index)}#{brackets(name)}"
elsif lateral?
# Find the name of the element as if it was at the same nesting level
# as our parent. We need to forward our index upward to achieve this.
@parent.full_name(name, index: @index)
else
# We must be the root scope, so no prefix needed.
name.to_s
end
end
def brackets(val)
"[#{val}]" if val
end
# @return [Boolean] whether or not this scope is the root-level scope
def root?
!@parent
end
# A nested scope is contained in one of its parent's elements.
# @return [Boolean] whether or not this scope is nested
def nested?
@parent && @element
end
# A lateral scope is subordinate to its parent, but its keys are at the
# same level as its parent and thus is not contained within an element.
# @return [Boolean] whether or not this scope is lateral
def lateral?
@parent && !@element
end
# @return [Boolean] whether or not this scope needs to be present, or can
# be blank
def required?
!@optional
end
def reset_index
@index = nil
end
protected
# Adds a parameter declaration to our list of validations.
# @param attrs [Array] (see Grape::DSL::Parameters#requires)
def push_declared_params(attrs, opts = {})
opts[:declared_params_scope] = self unless opts.key?(:declared_params_scope)
return @parent.push_declared_params(attrs, opts) if lateral?
push_renamed_param(full_path + [attrs.first], opts[:as]) if opts[:as]
@declared_params.concat(attrs.map { |attr| ::Grape::Validations::ParamsScope::Attr.new(attr, opts[:declared_params_scope]) })
end
# Get the full path of the parameter scope in the hierarchy.
#
# @return [Array<Symbol>] the nesting/path of the current parameter scope
def full_path
if nested?
(@parent.full_path + [@element])
elsif lateral?
@parent.full_path
else
[]
end
end
private
# Add a new parameter which should be renamed when using the +#declared+
# method.
#
# @param path [Array<String, Symbol>] the full path of the parameter
# (including the parameter name as last array element)
# @param new_name [String, Symbol] the new name of the parameter (the
# renamed name, with the +as: ...+ semantic)
def push_renamed_param(path, new_name)
api_route_setting = @api.inheritable_setting.route
base = api_route_setting[:renamed_params] || {}
base[Array(path).map(&:to_s)] = new_name.to_s
api_route_setting[:renamed_params] = base
end
def require_required_and_optional_fields(context, opts)
except_fields = Array.wrap(opts[:except])
using_fields = opts[:using].keys.delete_if { |f| except_fields.include?(f) }
if context == :all
optional_fields = except_fields
required_fields = using_fields
else # context == :none
required_fields = except_fields
optional_fields = using_fields
end
required_fields.each do |field|
field_opts = opts[:using][field]
raise ArgumentError, "required field not exist: #{field}" unless field_opts
requires(field, **field_opts)
end
optional_fields.each do |field|
field_opts = opts[:using][field]
optional(field, **field_opts) if field_opts
end
end
def require_optional_fields(context, opts)
optional_fields = opts[:using].keys
unless context == :all
except_fields = Array.wrap(opts[:except])
optional_fields.delete_if { |f| except_fields.include?(f) }
end
optional_fields.each do |field|
field_opts = opts[:using][field]
optional(field, **field_opts) if field_opts
end
end
def validate_attributes(attrs, opts, &block)
validations = opts.clone
validations[:type] ||= Array if block
validates(attrs, validations)
end
# Returns a new parameter scope, subordinate to the current one and nested
# under the parameter corresponding to `attrs.first`.
# @param attrs [Array] the attributes passed to the `requires` or
# `optional` invocation that opened this scope.
# @param optional [Boolean] whether the parameter this are nested under
# is optional or not (and hence, whether this block's params will be).
# @yield parameter scope
def new_scope(attrs, opts, optional = false, &)
# if required params are grouped and no type or unsupported type is provided, raise an error
type = opts[:type]
if attrs.first && !optional
raise Grape::Exceptions::MissingGroupType if type.nil?
raise Grape::Exceptions::UnsupportedGroupType unless Grape::Validations::Types.group?(type)
end
self.class.new(
api: @api,
element: attrs.first,
element_renamed: opts[:as],
parent: self,
optional: optional,
type: type || Array,
group: @group,
&
)
end
# Returns a new parameter scope, not nested under any current-level param
# but instead at the same level as the current scope.
# @param options [Hash] options to control how this new scope behaves
# @option options :dependent_on [Symbol] if given, specifies that this
# scope should only validate if this parameter from the above scope is
# present
# @yield parameter scope
def new_lateral_scope(options, &)
self.class.new(
api: @api,
element: nil,
parent: self,
options: @optional,
type: type == Array ? Array : Hash,
dependent_on: options[:dependent_on],
&
)
end
# Returns a new parameter scope, subordinate to the current one and nested
# under the parameter corresponding to `attrs.first`.
# @param attrs [Array] the attributes passed to the `requires` or
# `optional` invocation that opened this scope.
# @yield parameter scope
def new_group_scope(attrs, &)
self.class.new(api: @api, parent: self, group: attrs.first, &)
end
# Pushes declared params to parent or settings
def configure_declared_params
push_renamed_param(full_path, @element_renamed) if @element_renamed
if nested?
@parent.push_declared_params [element => @declared_params]
else
@api.inheritable_setting.namespace_stackable[:declared_params] = @declared_params
end
# params were stored in settings, it can be cleaned from the params scope
@declared_params = nil
end
def validates(attrs, validations)
coerce_type = infer_coercion(validations)
required = validations.key?(:presence)
default = validations[:default]
values = validations[:values].is_a?(Hash) ? validations.dig(:values, :value) : validations[:values]
except_values = validations[:except_values].is_a?(Hash) ? validations.dig(:except_values, :value) : validations[:except_values]
# NB. values and excepts should be nil, Proc, Array, or Range.
# Specifically, values should NOT be a Hash
# use values or excepts to guess coerce type when stated type is Array
coerce_type = guess_coerce_type(coerce_type, values, except_values)
# default value should be present in values array, if both exist and are not procs
check_incompatible_option_values(default, values, except_values)
# type should be compatible with values array, if both exist
validate_value_coercion(coerce_type, values, except_values)
document_params attrs, validations, coerce_type, values, except_values
opts = derive_validator_options(validations)
# Validate for presence before any other validators
validates_presence(validations, attrs, opts)
# Before we run the rest of the validators, let's handle
# whatever coercion so that we are working with correctly
# type casted values
coerce_type validations, attrs, required, opts
validations.each do |type, options|
# Don't try to look up validators for documentation params that don't have one.
next if RESERVED_DOCUMENTATION_KEYWORDS.include?(type)
validate(type, options, attrs, required, opts)
end
end
# Validate and comprehend the +:type+, +:types+, and +:coerce_with+
# options that have been supplied to the parameter declaration.
# The +:type+ and +:types+ options will be removed from the
# validations list, replaced appropriately with +:coerce+ and
# +:coerce_with+ options that will later be passed to
# {Validators::CoerceValidator}. The type that is returned may be
# used for documentation and further validation of parameter
# options.
#
# @param validations [Hash] list of validations supplied to the
# parameter declaration
# @return [class-like] type to which the parameter will be coerced
# @raise [ArgumentError] if the given type options are invalid
def infer_coercion(validations)
raise ArgumentError, ':type may not be supplied with :types' if validations.key?(:type) && validations.key?(:types)
validations[:coerce] = (options_key?(:type, :value, validations) ? validations[:type][:value] : validations[:type]) if validations.key?(:type)
validations[:coerce_message] = (options_key?(:type, :message, validations) ? validations[:type][:message] : nil) if validations.key?(:type)
validations[:coerce] = (options_key?(:types, :value, validations) ? validations[:types][:value] : validations[:types]) if validations.key?(:types)
validations[:coerce_message] = (options_key?(:types, :message, validations) ? validations[:types][:message] : nil) if validations.key?(:types)
validations.delete(:types) if validations.key?(:types)
coerce_type = validations[:coerce]
# Special case - when the argument is a single type that is a
# variant-type collection.
if Types.multiple?(coerce_type) && validations.key?(:type)
validations[:coerce] = Types::VariantCollectionCoercer.new(
coerce_type,
validations.delete(:coerce_with)
)
end
validations.delete(:type)
coerce_type
end
# Enforce correct usage of :coerce_with parameter.
# We do not allow coercion without a type, nor with
# +JSON+ as a type since this defines its own coercion
# method.
def check_coerce_with(validations)
return unless validations.key?(:coerce_with)
# type must be supplied for coerce_with..
raise ArgumentError, 'must supply type for coerce_with' unless validations.key?(:coerce)
# but not special JSON types, which
# already imply coercion method
return unless SPECIAL_JSON.include?(validations[:coerce])
raise ArgumentError, 'coerce_with disallowed for type: JSON'
end
# Add type coercion validation to this scope,
# if any has been specified.
# This validation has special handling since it is
# composited from more than one +requires+/+optional+
# parameter, and needs to be run before most other
# validations.
def coerce_type(validations, attrs, required, opts)
check_coerce_with(validations)
return unless validations.key?(:coerce)
coerce_options = {
type: validations[:coerce],
method: validations[:coerce_with],
message: validations[:coerce_message]
}
validate('coerce', coerce_options, attrs, required, opts)
validations.delete(:coerce_with)
validations.delete(:coerce)
validations.delete(:coerce_message)
end
def guess_coerce_type(coerce_type, *values_list)
return coerce_type unless coerce_type == Array
values_list.each do |values|
next if !values || values.is_a?(Proc)
return values.first.class if values.is_a?(Range) || !values.empty?
end
coerce_type
end
def check_incompatible_option_values(default, values, except_values)
return unless default && !default.is_a?(Proc)
raise Grape::Exceptions::IncompatibleOptionValues.new(:default, default, :values, values) if values && !values.is_a?(Proc) && !Array(default).all? { |def_val| values.include?(def_val) }
return unless except_values && !except_values.is_a?(Proc) && Array(default).any? { |def_val| except_values.include?(def_val) }
raise Grape::Exceptions::IncompatibleOptionValues.new(:default, default, :except, except_values)
end
def validate(type, options, attrs, required, opts)
validator_options = {
attributes: attrs,
options: options,
required: required,
params_scope: self,
opts: opts,
validator_class: Validations.require_validator(type)
}
@api.inheritable_setting.namespace_stackable[:validations] = validator_options
end
def validate_value_coercion(coerce_type, *values_list)
return unless coerce_type
coerce_type = coerce_type.first if coerce_type.is_a?(Enumerable)
values_list.each do |values|
next if !values || values.is_a?(Proc)
value_types = values.is_a?(Range) ? [values.begin, values.end].compact : values
value_types = value_types.map { |type| Grape::API::Boolean.build(type) } if coerce_type == Grape::API::Boolean
raise Grape::Exceptions::IncompatibleOptionValues.new(:type, coerce_type, :values, values) unless value_types.all?(coerce_type)
end
end
def extract_message_option(attrs)
return nil unless attrs.is_a?(Array)
opts = attrs.last.is_a?(Hash) ? attrs.pop : {}
opts.key?(:message) && !opts[:message].nil? ? opts.delete(:message) : nil
end
def options_key?(type, key, validations)
validations[type].respond_to?(:key?) && validations[type].key?(key) && !validations[type][key].nil?
end
def all_element_blank?(scoped_params)
scoped_params.respond_to?(:all?) && scoped_params.all?(&:blank?)
end
# Validators don't have access to each other and they don't need, however,
# some validators might influence others, so their options should be shared
def derive_validator_options(validations)
allow_blank = validations[:allow_blank]
{
allow_blank: allow_blank.is_a?(Hash) ? allow_blank[:value] : allow_blank,
fail_fast: validations.delete(:fail_fast) || false
}
end
def validates_presence(validations, attrs, opts)
return unless validations.key?(:presence) && validations[:presence]
validate('presence', validations.delete(:presence), attrs, true, opts)
validations.delete(:message) if validations.key?(:message)
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validators/as_validator.rb | lib/grape/validations/validators/as_validator.rb | # frozen_string_literal: true
module Grape
module Validations
module Validators
class AsValidator < Base
# We use a validator for renaming parameters. This is just a marker for
# the parameter scope to handle the renaming. No actual validation
# happens here.
def validate_param!(*); end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validators/all_or_none_of_validator.rb | lib/grape/validations/validators/all_or_none_of_validator.rb | # frozen_string_literal: true
module Grape
module Validations
module Validators
class AllOrNoneOfValidator < MultipleParamsBase
def validate_params!(params)
keys = keys_in_common(params)
return if keys.empty? || keys.length == all_keys.length
raise Grape::Exceptions::Validation.new(params: all_keys, message: message(:all_or_none))
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validators/same_as_validator.rb | lib/grape/validations/validators/same_as_validator.rb | # frozen_string_literal: true
module Grape
module Validations
module Validators
class SameAsValidator < Base
def validate_param!(attr_name, params)
confirmation = options_key?(:value) ? @option[:value] : @option
return if params[attr_name] == params[confirmation]
raise Grape::Exceptions::Validation.new(
params: [@scope.full_name(attr_name)],
message: build_message
)
end
private
def build_message
if options_key?(:message)
@option[:message]
else
format I18n.t(:same_as, scope: 'grape.errors.messages'), parameter: @option
end
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validators/default_validator.rb | lib/grape/validations/validators/default_validator.rb | # frozen_string_literal: true
module Grape
module Validations
module Validators
class DefaultValidator < Base
def initialize(attrs, options, required, scope, opts = {})
@default = options
super
end
def validate_param!(attr_name, params)
params[attr_name] = if @default.is_a? Proc
if @default.parameters.empty?
@default.call
else
@default.call(params)
end
elsif @default.frozen? || !@default.duplicable?
@default
else
@default.dup
end
end
def validate!(params)
attrs = SingleAttributeIterator.new(self, @scope, params)
attrs.each do |resource_params, attr_name|
next unless @scope.meets_dependency?(resource_params, params)
validate_param!(attr_name, resource_params) if resource_params.is_a?(Hash) && resource_params[attr_name].nil?
end
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validators/contract_scope_validator.rb | lib/grape/validations/validators/contract_scope_validator.rb | # frozen_string_literal: true
module Grape
module Validations
module Validators
class ContractScopeValidator < Base
attr_reader :schema
def initialize(_attrs, _options, _required, _scope, opts)
super
@schema = opts.fetch(:schema)
end
# Validates a given request.
# @param request [Grape::Request] the request currently being handled
# @raise [Grape::Exceptions::ValidationArrayErrors] if validation failed
# @return [void]
def validate(request)
res = schema.call(request.params)
if res.success?
request.params.deep_merge!(res.to_h)
return
end
raise Grape::Exceptions::ValidationArrayErrors.new(build_errors_from_messages(res.errors.messages))
end
private
def build_errors_from_messages(messages)
messages.map do |message|
full_name = message.path.first.to_s
full_name << "[#{message.path[1..].join('][')}]" if message.path.size > 1
Grape::Exceptions::Validation.new(params: [full_name], message: message.text)
end
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validators/coerce_validator.rb | lib/grape/validations/validators/coerce_validator.rb | # frozen_string_literal: true
module Grape
module Validations
module Validators
class CoerceValidator < Base
def initialize(attrs, options, required, scope, opts)
super
@converter = if type.is_a?(Grape::Validations::Types::VariantCollectionCoercer)
type
else
Types.build_coercer(type, method: @option[:method])
end
end
def validate_param!(attr_name, params)
raise validation_exception(attr_name) unless params.is_a? Hash
new_value = coerce_value(params[attr_name])
raise validation_exception(attr_name, new_value.message) unless valid_type?(new_value)
# Don't assign a value if it is identical. It fixes a problem with Hashie::Mash
# which looses wrappers for hashes and arrays after reassigning values
#
# h = Hashie::Mash.new(list: [1, 2, 3, 4])
# => #<Hashie::Mash list=#<Hashie::Array [1, 2, 3, 4]>>
# list = h.list
# h[:list] = list
# h
# => #<Hashie::Mash list=[1, 2, 3, 4]>
return if params[attr_name].instance_of?(new_value.class) && params[attr_name] == new_value
params[attr_name] = new_value
end
private
# @!attribute [r] converter
# Object that will be used for parameter coercion and type checking.
#
# See {Types.build_coercer}
#
# @return [Object]
attr_reader :converter
def valid_type?(val)
!val.is_a?(Types::InvalidValue)
end
def coerce_value(val)
converter.call(val)
# Some custom types might fail, so it should be treated as an invalid value
rescue StandardError
Types::InvalidValue.new
end
# Type to which the parameter will be coerced.
#
# @return [Class]
def type
@option[:type].is_a?(Hash) ? @option[:type][:value] : @option[:type]
end
def validation_exception(attr_name, custom_msg = nil)
Grape::Exceptions::Validation.new(
params: [@scope.full_name(attr_name)],
message: custom_msg || message(:coerce)
)
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validators/presence_validator.rb | lib/grape/validations/validators/presence_validator.rb | # frozen_string_literal: true
module Grape
module Validations
module Validators
class PresenceValidator < Base
def validate_param!(attr_name, params)
return if params.respond_to?(:key?) && params.key?(attr_name)
raise Grape::Exceptions::Validation.new(params: [@scope.full_name(attr_name)], message: message(:presence))
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validators/values_validator.rb | lib/grape/validations/validators/values_validator.rb | # frozen_string_literal: true
module Grape
module Validations
module Validators
class ValuesValidator < Base
def initialize(attrs, options, required, scope, opts)
@values = options.is_a?(Hash) ? options[:value] : options
super
end
def validate_param!(attr_name, params)
return unless params.is_a?(Hash)
val = params[attr_name]
return if val.nil? && !required_for_root_scope?
val = val.scrub if val.respond_to?(:valid_encoding?) && !val.valid_encoding?
# don't forget that +false.blank?+ is true
return if val != false && val.blank? && @allow_blank
return if check_values?(val, attr_name)
raise Grape::Exceptions::Validation.new(
params: [@scope.full_name(attr_name)],
message: message(:values)
)
end
private
def check_values?(val, attr_name)
values = @values.is_a?(Proc) && @values.arity.zero? ? @values.call : @values
return true if values.nil?
param_array = val.nil? ? [nil] : Array.wrap(val)
return param_array.all? { |param| values.include?(param) } unless values.is_a?(Proc)
begin
param_array.all? { |param| values.call(param) }
rescue StandardError => e
warn "Error '#{e}' raised while validating attribute '#{attr_name}'"
false
end
end
def required_for_root_scope?
return false unless @required
scope = @scope
scope = scope.parent while scope.lateral?
scope.root?
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validators/regexp_validator.rb | lib/grape/validations/validators/regexp_validator.rb | # frozen_string_literal: true
module Grape
module Validations
module Validators
class RegexpValidator < Base
def validate_param!(attr_name, params)
return unless params.respond_to?(:key) && params.key?(attr_name)
value = options_key?(:value) ? @option[:value] : @option
return if Array.wrap(params[attr_name]).all? { |param| param.nil? || scrub(param.to_s).match?(value) }
raise Grape::Exceptions::Validation.new(params: [@scope.full_name(attr_name)], message: message(:regexp))
end
private
def scrub(param)
return param if param.valid_encoding?
param.scrub
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validators/mutually_exclusive_validator.rb | lib/grape/validations/validators/mutually_exclusive_validator.rb | # frozen_string_literal: true
module Grape
module Validations
module Validators
class MutuallyExclusiveValidator < MultipleParamsBase
def validate_params!(params)
keys = keys_in_common(params)
return if keys.length <= 1
raise Grape::Exceptions::Validation.new(params: keys, message: message(:mutual_exclusion))
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validators/base.rb | lib/grape/validations/validators/base.rb | # frozen_string_literal: true
module Grape
module Validations
module Validators
class Base
attr_reader :attrs
# Creates a new Validator from options specified
# by a +requires+ or +optional+ directive during
# parameter definition.
# @param attrs [Array] names of attributes to which the Validator applies
# @param options [Object] implementation-dependent Validator options
# @param required [Boolean] attribute(s) are required or optional
# @param scope [ParamsScope] parent scope for this Validator
# @param opts [Hash] additional validation options
def initialize(attrs, options, required, scope, opts)
@attrs = Array(attrs)
@option = options
@required = required
@scope = scope
@fail_fast = opts[:fail_fast]
@allow_blank = opts[:allow_blank]
end
# Validates a given request.
# @note Override #validate! unless you need to access the entire request.
# @param request [Grape::Request] the request currently being handled
# @raise [Grape::Exceptions::Validation] if validation failed
# @return [void]
def validate(request)
return unless @scope.should_validate?(request.params)
validate!(request.params)
end
# Validates a given parameter hash.
# @note Override #validate if you need to access the entire request.
# @param params [Hash] parameters to validate
# @raise [Grape::Exceptions::Validation] if validation failed
# @return [void]
def validate!(params)
attributes = SingleAttributeIterator.new(self, @scope, params)
# we collect errors inside array because
# there may be more than one error per field
array_errors = []
attributes.each do |val, attr_name, empty_val|
next if !@scope.required? && empty_val
next unless @scope.meets_dependency?(val, params)
validate_param!(attr_name, val) if @required || (val.respond_to?(:key?) && val.key?(attr_name))
rescue Grape::Exceptions::Validation => e
array_errors << e
end
raise Grape::Exceptions::ValidationArrayErrors.new(array_errors) if array_errors.any?
end
def self.inherited(klass)
super
Validations.register(klass)
end
def message(default_key = nil)
options = instance_variable_get(:@option)
options_key?(:message) ? options[:message] : default_key
end
def options_key?(key, options = nil)
options = instance_variable_get(:@option) if options.nil?
options.respond_to?(:key?) && options.key?(key) && !options[key].nil?
end
def fail_fast?
@fail_fast
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validators/allow_blank_validator.rb | lib/grape/validations/validators/allow_blank_validator.rb | # frozen_string_literal: true
module Grape
module Validations
module Validators
class AllowBlankValidator < Base
def validate_param!(attr_name, params)
return if (options_key?(:value) ? @option[:value] : @option) || !params.is_a?(Hash)
value = params[attr_name]
value = value.scrub if value.respond_to?(:valid_encoding?) && !value.valid_encoding?
return if value == false || value.present?
raise Grape::Exceptions::Validation.new(params: [@scope.full_name(attr_name)], message: message(:blank))
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validators/except_values_validator.rb | lib/grape/validations/validators/except_values_validator.rb | # frozen_string_literal: true
module Grape
module Validations
module Validators
class ExceptValuesValidator < Base
def initialize(attrs, options, required, scope, opts)
@except = options.is_a?(Hash) ? options[:value] : options
super
end
def validate_param!(attr_name, params)
return unless params.respond_to?(:key?) && params.key?(attr_name)
excepts = @except.is_a?(Proc) ? @except.call : @except
return if excepts.nil?
param_array = params[attr_name].nil? ? [nil] : Array.wrap(params[attr_name])
raise Grape::Exceptions::Validation.new(params: [@scope.full_name(attr_name)], message: message(:except_values)) if param_array.any? { |param| excepts.include?(param) }
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validators/exactly_one_of_validator.rb | lib/grape/validations/validators/exactly_one_of_validator.rb | # frozen_string_literal: true
module Grape
module Validations
module Validators
class ExactlyOneOfValidator < MultipleParamsBase
def validate_params!(params)
keys = keys_in_common(params)
return if keys.length == 1
raise Grape::Exceptions::Validation.new(params: all_keys, message: message(:exactly_one)) if keys.empty?
raise Grape::Exceptions::Validation.new(params: keys, message: message(:mutual_exclusion))
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validators/at_least_one_of_validator.rb | lib/grape/validations/validators/at_least_one_of_validator.rb | # frozen_string_literal: true
module Grape
module Validations
module Validators
class AtLeastOneOfValidator < MultipleParamsBase
def validate_params!(params)
return unless keys_in_common(params).empty?
raise Grape::Exceptions::Validation.new(params: all_keys, message: message(:at_least_one))
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validators/length_validator.rb | lib/grape/validations/validators/length_validator.rb | # frozen_string_literal: true
module Grape
module Validations
module Validators
class LengthValidator < Base
def initialize(attrs, options, required, scope, opts)
@min = options[:min]
@max = options[:max]
@is = options[:is]
super
raise ArgumentError, 'min must be an integer greater than or equal to zero' if !@min.nil? && (!@min.is_a?(Integer) || @min.negative?)
raise ArgumentError, 'max must be an integer greater than or equal to zero' if !@max.nil? && (!@max.is_a?(Integer) || @max.negative?)
raise ArgumentError, "min #{@min} cannot be greater than max #{@max}" if !@min.nil? && !@max.nil? && @min > @max
return if @is.nil?
raise ArgumentError, 'is must be an integer greater than zero' if !@is.is_a?(Integer) || !@is.positive?
raise ArgumentError, 'is cannot be combined with min or max' if !@min.nil? || !@max.nil?
end
def validate_param!(attr_name, params)
param = params[attr_name]
return unless param.respond_to?(:length)
return unless (!@min.nil? && param.length < @min) || (!@max.nil? && param.length > @max) || (!@is.nil? && param.length != @is)
raise Grape::Exceptions::Validation.new(params: [@scope.full_name(attr_name)], message: build_message)
end
def build_message
if options_key?(:message)
@option[:message]
elsif @min && @max
format I18n.t(:length, scope: 'grape.errors.messages'), min: @min, max: @max
elsif @min
format I18n.t(:length_min, scope: 'grape.errors.messages'), min: @min
elsif @max
format I18n.t(:length_max, scope: 'grape.errors.messages'), max: @max
else
format I18n.t(:length_is, scope: 'grape.errors.messages'), is: @is
end
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/validators/multiple_params_base.rb | lib/grape/validations/validators/multiple_params_base.rb | # frozen_string_literal: true
module Grape
module Validations
module Validators
class MultipleParamsBase < Base
def validate!(params)
attributes = MultipleAttributesIterator.new(self, @scope, params)
array_errors = []
attributes.each do |resource_params|
validate_params!(resource_params)
rescue Grape::Exceptions::Validation => e
array_errors << e
end
raise Grape::Exceptions::ValidationArrayErrors.new(array_errors) if array_errors.any?
end
private
def keys_in_common(resource_params)
return [] unless resource_params.is_a?(Hash)
all_keys & resource_params.keys.map! { |attr| @scope.full_name(attr) }
end
def all_keys
attrs.map { |attr| @scope.full_name(attr) }
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/types/set_coercer.rb | lib/grape/validations/types/set_coercer.rb | # frozen_string_literal: true
module Grape
module Validations
module Types
# Takes the given array and converts it to a set. Every element of the set
# is also coerced.
class SetCoercer < ArrayCoercer
def initialize(type, strict = false)
super
@coercer = nil
end
def call(value)
return InvalidValue.new unless value.is_a?(Array)
coerce_elements(value)
end
protected
def coerce_elements(collection)
collection.each_with_object(Set.new) do |elem, memo|
coerced_elem = elem_coercer.call(elem)
return coerced_elem if coerced_elem.is_a?(InvalidValue)
memo.add(coerced_elem)
end
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/types/custom_type_collection_coercer.rb | lib/grape/validations/types/custom_type_collection_coercer.rb | # frozen_string_literal: true
module Grape
module Validations
module Types
# See {CustomTypeCoercer} for details on types
# that will be supported by this by this coercer.
# This coercer works in the same way as +CustomTypeCoercer+
# except that it expects to receive an array of strings to
# coerce and will return an array (or optionally, a set)
# of coerced values.
#
# +CustomTypeCoercer+ is already capable of providing type
# checking for arrays where an independent coercion method
# is supplied. As such, +CustomTypeCollectionCoercer+ does
# not allow for such a method to be supplied independently
# of the type.
class CustomTypeCollectionCoercer < CustomTypeCoercer
# A new coercer for collections of the given type.
#
# @param type [Class,#parse]
# type to which items in the array should be coerced.
# Must implement a +parse+ method which accepts a string,
# and for the purposes of type-checking it may either be
# a class, or it may implement a +coerced?+, +parsed?+ or
# +call+ method (in that order of precedence) which
# accepts a single argument and returns true if the given
# array item has been coerced correctly.
# @param set [Boolean]
# when true, a +Set+ will be returned by {#call} instead
# of an +Array+ and duplicate items will be discarded.
def initialize(type, set = false)
super(type)
@set = set
end
# Coerces the given value.
#
# @param value [Array<String>] an array of values to be coerced
# @return [Array,Set] the coerced result. May be an +Array+ or a
# +Set+ depending on the setting given to the constructor
def call(value)
coerced = value.map do |item|
coerced_item = super(item)
return coerced_item if coerced_item.is_a?(InvalidValue)
coerced_item
end
@set ? Set.new(coerced) : coerced
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/types/file.rb | lib/grape/validations/types/file.rb | # frozen_string_literal: true
module Grape
module Validations
module Types
# Implementation for parameters that are multipart file objects.
# Actual handling of these objects is provided by +Rack::Request+;
# this class is here only to assert that rack's handling has succeeded.
class File
class << self
def parse(input)
return if input.nil?
return InvalidValue.new unless parsed?(input)
# Processing of multipart file objects
# is already taken care of by Rack::Request.
# Nothing to do here.
input
end
def parsed?(value)
# Rack::Request creates a Hash with filename,
# content type and an IO object. Do a bit of basic
# duck-typing.
value.is_a?(::Hash) && value.key?(:tempfile) && value[:tempfile].is_a?(Tempfile)
end
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/types/dry_type_coercer.rb | lib/grape/validations/types/dry_type_coercer.rb | # frozen_string_literal: true
module Grape
module Validations
module Types
# A base class for classes which must identify a coercer to be used.
# If the +strict+ argument is true, it won't coerce the given value
# but check its type. More information there
# https://dry-rb.org/gems/dry-types/main/built-in-types/
class DryTypeCoercer
class << self
# Returns a collection coercer which corresponds to a given type.
# Example:
#
# collection_coercer_for(Array)
# #=> Grape::Validations::Types::ArrayCoercer
def collection_coercer_for(type)
case type
when Array
ArrayCoercer
when Set
SetCoercer
else
raise ArgumentError, "Unknown type: #{type}"
end
end
# Returns an instance of a coercer for a given type
def coercer_instance_for(type, strict = false)
klass = type.instance_of?(Class) ? PrimitiveCoercer : collection_coercer_for(type)
klass.new(type, strict)
end
end
def initialize(type, strict = false)
@type = type
@strict = strict
@cache_coercer = strict ? DryTypes::StrictCache : DryTypes::ParamsCache
end
# Coerces the given value to a type which was specified during
# initialization as a type argument.
#
# @param val [Object]
def call(val)
return if val.nil?
@coercer[val]
rescue Dry::Types::CoercionError
InvalidValue.new
end
protected
attr_reader :type, :strict, :cache_coercer
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/types/json.rb | lib/grape/validations/types/json.rb | # frozen_string_literal: true
module Grape
module Validations
module Types
# Handles coercion and type checking for parameters that are complex
# types given as JSON-encoded strings. It accepts both JSON objects
# and arrays of objects, and will coerce the input to a +Hash+
# or +Array+ object respectively. In either case the Grape
# validation system will apply nested validation rules to
# all returned objects.
class Json
class << self
# Coerce the input into a JSON-like data structure.
#
# @param input [String] a JSON-encoded parameter value
# @return [Hash,Array<Hash>,nil]
def parse(input)
return input if parsed?(input)
# Allow nulls and blank strings
return if input.nil? || input.match?(/^\s*$/)
JSON.parse(input, symbolize_names: true)
end
# Checks that the input was parsed successfully
# and isn't something odd such as an array of primitives.
#
# @param value [Object] result of {#parse}
# @return [true,false]
def parsed?(value)
value.is_a?(::Hash) || coerced_collection?(value)
end
protected
# Is the value an array of JSON-like objects?
#
# @param value [Object] result of {#parse}
# @return [true,false]
def coerced_collection?(value)
value.is_a?(::Array) && value.all?(::Hash)
end
end
end
# Specialization of the {Json} attribute that is guaranteed
# to return an array of objects. Accepts both JSON-encoded
# objects and arrays of objects, but wraps single objects
# in an Array.
class JsonArray < Json
class << self
# See {Json#parse}. Wraps single objects in an array.
#
# @param input [String] JSON-encoded parameter value
# @return [Array<Hash>]
def parse(input)
json = super
Array.wrap(json) unless json.nil?
end
# See {Json#coerced_collection?}
def parsed?(value)
coerced_collection? value
end
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/types/multiple_type_coercer.rb | lib/grape/validations/types/multiple_type_coercer.rb | # frozen_string_literal: true
module Grape
module Validations
module Types
# This class is intended for use with Grape endpoint parameters that
# have been declared to be of variant-type using the +:types+ option.
# +MultipleTypeCoercer+ will build a coercer for each type declared
# in the array passed to +:types+ using {Types.build_coercer}. It will
# apply these coercers to parameter values in the order given to
# +:types+, and will return the value returned by the first coercer
# to successfully coerce the parameter value. Therefore if +String+ is
# an allowed type it should be declared last, since it will always
# successfully "coerce" the value.
class MultipleTypeCoercer
# Construct a new coercer that will attempt to coerce
# values to the given list of types in the given order.
#
# @param types [Array<Class>] list of allowed types
# @param method [#call,#parse] method by which values should be
# coerced. See class docs for default behaviour.
def initialize(types, method = nil)
@method = method.respond_to?(:parse) ? method.method(:parse) : method
@type_coercers = types.map do |type|
if Types.multiple? type
VariantCollectionCoercer.new type, @method
else
Types.build_coercer type, strict: !@method.nil?
end
end
end
# Coerces the given value.
#
# @param val [String] value to be coerced, in grape
# this should always be a string.
# @return [Object,InvalidValue] the coerced result, or an instance
# of {InvalidValue} if the value could not be coerced.
def call(val)
# once the value is coerced by the custom method, its type should be checked
val = @method.call(val) if @method
coerced_val = InvalidValue.new
@type_coercers.each do |coercer|
coerced_val = coercer.call(val)
return coerced_val unless coerced_val.is_a?(InvalidValue)
end
coerced_val
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/types/primitive_coercer.rb | lib/grape/validations/types/primitive_coercer.rb | # frozen_string_literal: true
module Grape
module Validations
module Types
# Coerces the given value to a type defined via a +type+ argument during
# initialization. When +strict+ is true, it doesn't coerce a value but check
# that it has the proper type.
class PrimitiveCoercer < DryTypeCoercer
def initialize(type, strict = false)
super
@coercer = cache_coercer[type]
end
def call(val)
return InvalidValue.new if reject?(val)
return nil if val.nil? || treat_as_nil?(val)
super
end
protected
attr_reader :type
# This method maintains logic which was defined by Virtus. For example,
# dry-types is ok to convert an array or a hash to a string, it is supported,
# but Virtus wouldn't accept it. So, this method only exists to not introduce
# breaking changes.
def reject?(val)
(val.is_a?(Array) && type == String) ||
(val.is_a?(String) && type == Hash) ||
(val.is_a?(Hash) && type == String)
end
# Dry-Types treats an empty string as invalid. However, Grape considers an empty string as
# absence of a value and coerces it into nil. See a discussion there
# https://github.com/ruby-grape/grape/pull/2045
def treat_as_nil?(val)
val == '' && type != String
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/types/variant_collection_coercer.rb | lib/grape/validations/types/variant_collection_coercer.rb | # frozen_string_literal: true
module Grape
module Validations
module Types
# This class wraps {MultipleTypeCoercer}, for use with collections
# that allow members of more than one type.
class VariantCollectionCoercer
# Construct a new coercer that will attempt to coerce
# a list of values such that all members are of one of
# the given types. The container may also optionally be
# coerced to a +Set+. An arbitrary coercion +method+ may
# be supplied, which will be passed the entire collection
# as a parameter and should return a new collection, or
# may return the same one if no coercion was required.
#
# @param types [Array<Class>,Set<Class>] list of allowed types,
# also specifying the container type
# @param method [#call,#parse] method by which values should be coerced
def initialize(types, method = nil)
@types = types
@method = method.respond_to?(:parse) ? method.method(:parse) : method
# If we have a coercion method, pass it in here to save
# building another one, even though we call it directly.
@member_coercer = MultipleTypeCoercer.new types, method
end
# Coerce the given value.
#
# @param value [Array<String>] collection of values to be coerced
# @return [Array<Object>,Set<Object>,InvalidValue]
# the coerced result, or an instance
# of {InvalidValue} if the value could not be coerced.
def call(value)
return unless value.is_a? Array
value =
if @method
@method.call(value)
else
value.map { |v| @member_coercer.call(v) }
end
return Set.new value if @types.is_a? Set
value
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/types/array_coercer.rb | lib/grape/validations/types/array_coercer.rb | # frozen_string_literal: true
module Grape
module Validations
module Types
# Coerces elements in an array. It might be an array of strings or integers or
# an array of arrays of integers.
#
# It could've been possible to use an +of+
# method (https://dry-rb.org/gems/dry-types/main/array-with-member/)
# provided by dry-types. Unfortunately, it doesn't work for Grape because of
# behavior of Virtus which was used earlier, a `Grape::Validations::Types::PrimitiveCoercer`
# maintains Virtus behavior in coercing.
class ArrayCoercer < DryTypeCoercer
def initialize(type, strict = false)
super
@coercer = strict ? DryTypes::Strict::Array : DryTypes::Params::Array
@subtype = type.first
end
def call(_val)
collection = super
return collection if collection.is_a?(InvalidValue)
coerce_elements collection
end
protected
attr_reader :subtype
def coerce_elements(collection)
return if collection.nil?
collection.each_with_index do |elem, index|
return InvalidValue.new if reject?(elem)
coerced_elem = elem_coercer.call(elem)
return coerced_elem if coerced_elem.is_a?(InvalidValue)
collection[index] = coerced_elem
end
collection
end
# This method maintains logic which was defined by Virtus for arrays.
# Virtus doesn't allow nil in arrays.
def reject?(val)
val.nil?
end
def elem_coercer
@elem_coercer ||= DryTypeCoercer.coercer_instance_for(subtype, strict)
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/types/invalid_value.rb | lib/grape/validations/types/invalid_value.rb | # frozen_string_literal: true
module Grape
module Validations
module Types
# Instances of this class may be used as tokens to denote that a parameter value could not be
# coerced. The given message will be used as a validation error.
class InvalidValue
attr_reader :message
def initialize(message = nil)
@message = message
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/validations/types/custom_type_coercer.rb | lib/grape/validations/types/custom_type_coercer.rb | # frozen_string_literal: true
module Grape
module Validations
module Types
# This class will detect type classes that implement
# a class-level +parse+ method. The method should accept one
# +String+ argument and should return the value coerced to
# the appropriate type. The method may raise an exception if
# there are any problems parsing the string.
#
# Alternately an optional +method+ may be supplied (see the
# +coerce_with+ option of {Grape::Dsl::Parameters#requires}).
# This may be any class or object implementing +parse+ or +call+,
# with the same contract as described above.
#
# Type Checking
# -------------
#
# Calls to +coerced?+ will consult this class to check
# that the coerced value produced above is in fact of the
# expected type. By default this class performs a basic check
# against the type supplied, but this behaviour will be
# overridden if the class implements a class-level
# +coerced?+ or +parsed?+ method. This method
# will receive a single parameter that is the coerced value
# and should return +true+ if the value meets type expectations.
# Arbitrary assertions may be made here but the grape validation
# system should be preferred.
#
# Alternately a proc or other object responding to +call+ may be
# supplied in place of a type. This should implement the same
# contract as +coerced?+, and must be supplied with a coercion
# +method+.
class CustomTypeCoercer
# A new coercer for the given type specification
# and coercion method.
#
# @param type [Class,#coerced?,#parsed?,#call?]
# specifier for the target type. See class docs.
# @param method [#parse,#call]
# optional coercion method. See class docs.
def initialize(type, method = nil)
coercion_method = infer_coercion_method type, method
@method = enforce_symbolized_keys type, coercion_method
@type_check = infer_type_check(type)
end
# Coerces the given value.
#
# @param value [String] value to be coerced, in grape
# this should always be a string.
# @return [Object] the coerced result
def call(val)
coerced_val = @method.call(val)
return coerced_val if coerced_val.is_a?(InvalidValue)
return InvalidValue.new unless coerced?(coerced_val)
coerced_val
end
def coerced?(val)
val.nil? || @type_check.call(val)
end
private
# Determine the coercion method we're expected to use
# based on the parameters given.
#
# @param type see #new
# @param method see #new
# @return [#call] coercion method
def infer_coercion_method(type, method)
if method
if method.respond_to? :parse
method.method :parse
else
method
end
else
# Try to use parse() declared on the target type.
# This may raise an exception, but we are out of ideas anyway.
type.method :parse
end
end
# Determine how the type validity of a coerced
# value should be decided.
#
# @param type see #new
# @return [#call] a procedure which accepts a single parameter
# and returns +true+ if the passed object is of the correct type.
def infer_type_check(type)
# First check for special class methods
if type.respond_to? :coerced?
type.method :coerced?
elsif type.respond_to? :parsed?
type.method :parsed?
elsif type.respond_to? :call
# Arbitrary proc passed for type validation.
# Note that this will fail unless a method is also
# passed, or if the type also implements a parse() method.
type
elsif type.is_a?(Enumerable)
lambda do |value|
value.is_a?(Enumerable) && value.all? do |val|
recursive_type_check(type.first, val)
end
end
else
# By default, do a simple type check
->(value) { value.is_a? type }
end
end
def recursive_type_check(type, value)
if type.is_a?(Enumerable) && value.is_a?(Enumerable)
value.all? { |val| recursive_type_check(type.first, val) }
else
!type.is_a?(Enumerable) && value.is_a?(type)
end
end
# Enforce symbolized keys for complex types
# by wrapping the coercion method such that
# any Hash objects in the immediate heirarchy
# have their keys recursively symbolized.
# This helps common libs such as JSON to work easily.
#
# @param type see #new
# @param method see #infer_coercion_method
# @return [#call] +method+ wrapped in an additional
# key-conversion step, or just returns +method+
# itself if no conversion is deemed to be
# necessary.
def enforce_symbolized_keys(type, method)
# Collections have all values processed individually
if [Array, Set].include?(type)
lambda do |val|
method.call(val).tap do |new_val|
new_val.map do |item|
item.is_a?(Hash) ? item.deep_symbolize_keys : item
end
end
end
# Hash objects are processed directly
elsif type == Hash
lambda do |val|
method.call(val).deep_symbolize_keys
end
# Simple types are not processed.
# This includes Array<primitive> types.
else
method
end
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/router/greedy_route.rb | lib/grape/router/greedy_route.rb | # frozen_string_literal: true
# Act like a Grape::Router::Route but for greedy_match
# see @neutral_map
module Grape
class Router
class GreedyRoute < BaseRoute
extend Forwardable
def_delegators :@endpoint, :call
attr_reader :endpoint, :allow_header
def initialize(pattern, endpoint:, allow_header:)
super(pattern)
@endpoint = endpoint
@allow_header = allow_header
end
def params(_input = nil)
nil
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/router/route.rb | lib/grape/router/route.rb | # frozen_string_literal: true
module Grape
class Router
class Route < BaseRoute
extend Forwardable
FORWARD_MATCH_METHOD = ->(input, pattern) { input.start_with?(pattern.origin) }
NON_FORWARD_MATCH_METHOD = ->(input, pattern) { pattern.match?(input) }
attr_reader :app, :request_method, :index
def_delegators :@app, :call
def initialize(endpoint, method, pattern, options)
super(pattern, options)
@app = endpoint
@request_method = upcase_method(method)
@match_function = options[:forward_match] ? FORWARD_MATCH_METHOD : NON_FORWARD_MATCH_METHOD
end
def convert_to_head_request!
@request_method = Rack::HEAD
end
def apply(app)
@app = app
self
end
def match?(input)
return false if input.blank?
@match_function.call(input, pattern)
end
def params(input = nil)
return params_without_input if input.blank?
parsed = pattern.params(input)
return unless parsed
parsed.compact.symbolize_keys
end
private
def params_without_input
@params_without_input ||= pattern.captures_default.merge(options[:params])
end
def upcase_method(method)
method_s = method.to_s
Grape::HTTP_SUPPORTED_METHODS.detect { |m| m.casecmp(method_s).zero? } || method_s.upcase
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/router/base_route.rb | lib/grape/router/base_route.rb | # frozen_string_literal: true
module Grape
class Router
class BaseRoute
extend Forwardable
delegate_missing_to :@options
attr_reader :options, :pattern
def_delegators :@pattern, :path, :origin
def_delegators :@options, :description, :version, :requirements, :prefix, :anchor, :settings, :forward_match, *Grape::Util::ApiDescription::DSL_METHODS
def initialize(pattern, options = {})
@pattern = pattern
@options = options.is_a?(ActiveSupport::OrderedOptions) ? options : ActiveSupport::OrderedOptions.new.update(options)
end
# see https://github.com/ruby-grape/grape/issues/1348
def namespace
@namespace ||= @options[:namespace]
end
def regexp_capture_index
@regexp_capture_index ||= CaptureIndexCache[@index]
end
def pattern_regexp
@pattern.to_regexp
end
def to_regexp(index)
@index = index
Regexp.new("(?<#{regexp_capture_index}>#{pattern_regexp})")
end
class CaptureIndexCache < Grape::Util::Cache
def initialize
super
@cache = Hash.new do |h, index|
h[index] = "_#{index}"
end
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/router/pattern.rb | lib/grape/router/pattern.rb | # frozen_string_literal: true
module Grape
class Router
class Pattern
extend Forwardable
DEFAULT_CAPTURES = %w[format version].freeze
attr_reader :origin, :path, :pattern, :to_regexp
def_delegators :pattern, :params
def_delegators :to_regexp, :===
alias match? ===
def initialize(origin:, suffix:, anchor:, params:, format:, version:, requirements:)
@origin = origin
@path = PatternCache[[build_path_from_pattern(@origin, anchor), suffix]]
@pattern = Mustermann::Grape.new(@path, uri_decode: true, params: params, capture: extract_capture(format, version, requirements))
@to_regexp = @pattern.to_regexp
end
def captures_default
to_regexp.names
.delete_if { |n| DEFAULT_CAPTURES.include?(n) }
.to_h { |k| [k, ''] }
end
private
def extract_capture(format, version, requirements)
capture = {}
capture[:format] = map_str(format) if format.present?
capture[:version] = map_str(version) if version.present?
return capture if requirements.blank?
requirements.merge(capture)
end
def build_path_from_pattern(pattern, anchor)
if pattern.end_with?('*path')
pattern.dup.insert(pattern.rindex('/') + 1, '?')
elsif anchor
pattern
elsif pattern.end_with?('/')
"#{pattern}?*path"
else
"#{pattern}/?*path"
end
end
def map_str(value)
Array.wrap(value).map(&:to_s)
end
class PatternCache < Grape::Util::Cache
def initialize
super
@cache = Hash.new do |h, (pattern, suffix)|
h[[pattern, suffix]] = -"#{pattern}#{suffix}"
end
end
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/parser/xml.rb | lib/grape/parser/xml.rb | # frozen_string_literal: true
module Grape
module Parser
class Xml < Base
def self.call(object, _env)
::Grape::Xml.parse(object)
rescue ::Grape::Xml::ParseError
# handle XML parsing errors via the rescue handlers or provide error message
raise Grape::Exceptions::InvalidMessageBody.new('application/xml')
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/parser/json.rb | lib/grape/parser/json.rb | # frozen_string_literal: true
module Grape
module Parser
class Json < Base
def self.call(object, _env)
::Grape::Json.load(object)
rescue ::Grape::Json::ParseError
# handle JSON parsing errors via the rescue handlers or provide error message
raise Grape::Exceptions::InvalidMessageBody.new('application/json')
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
ruby-grape/grape | https://github.com/ruby-grape/grape/blob/17fb0cf64296e9c9c968ea7b62f7614538070be4/lib/grape/parser/base.rb | lib/grape/parser/base.rb | # frozen_string_literal: true
module Grape
module Parser
class Base
def self.call(_object, _env)
raise NotImplementedError
end
def self.inherited(klass)
super
Parser.register(klass)
end
end
end
end
| ruby | MIT | 17fb0cf64296e9c9c968ea7b62f7614538070be4 | 2026-01-04T15:38:22.454413Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/tasks/benchmark.rb | tasks/benchmark.rb | require "json"
require "fileutils"
BENCHMARK_FILE_SIZE = 1 * 1024 * 1024 * 1024
BENCHMARK_FILE_PATH = File.expand_path("./tmp/benchmark/data.log")
namespace :benchmark do
task :init do
# Synchronize stdout because the output order is not as intended on Windows environment
STDOUT.sync = true
end
task :prepare_1GB do
FileUtils.mkdir_p(File.dirname(BENCHMARK_FILE_PATH))
File.open(BENCHMARK_FILE_PATH, "w") do |f|
data = { "message": "a" * 1024 }.to_json
loop do
f.puts data
break if File.size(BENCHMARK_FILE_PATH) > BENCHMARK_FILE_SIZE
end
end
end
task :show_info do
# Output the information with markdown format
puts "### Environment"
puts "```"
system "bundle exec ruby --version"
system "bundle exec ruby bin/fluentd --version"
puts "```\n"
end
desc "Run in_tail benchmark"
task :"run:in_tail" => [:init, :prepare_1GB, :show_info] do
# Output the results with markdown format
puts "### in_tail with 1 GB file"
puts "```"
system "bundle exec ruby bin/fluentd -r ./tasks/benchmark/patch_in_tail.rb --no-supervisor -c ./tasks/benchmark/conf/in_tail.conf -o ./tmp/benchmark/fluent.log"
puts "```"
Rake::Task["benchmark:clean"].invoke
end
task :clean do
FileUtils.rm_rf(File.dirname(BENCHMARK_FILE_PATH))
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/tasks/backport.rb | tasks/backport.rb | require_relative 'backport/backporter'
=begin
When you want to manually execute backporting, set the following
environment variables:
* GITHUB_REPOSITORY: fluent/fluentd
* GITHUB_TOKEN: ${PERSONAL_ACCESS_TOKEN}
Optional:
* REPOSITORY_REMOTE: origin
If you execute in forked repository, it might be 'upstream'
=end
def append_additional_arguments(commands)
if ENV['DRY_RUN']
commands << '--dry-run'
end
if ENV['GITHUB_REPOSITORY']
commands << '--upstream'
commands << ENV['GITHUB_REPOSITORY']
end
if ENV['REPOSITORY_REMOTE']
commands << '--remote'
commands << ENV['REPOSITORY_REMOTE']
end
commands
end
namespace :backport do
desc "Backport PR to v1.16 branch"
task :v1_16 do
backporter = PullRequestBackporter.new
commands = ['--branch', 'v1.16', '--log-level', 'debug']
commands = append_additional_arguments(commands)
backporter.run(commands)
end
desc "Backport PR to v1.19 branch"
task :v1_19 do
commands = ['--branch', 'v1.19', '--log-level', 'debug']
commands = append_additional_arguments(commands)
backporter = PullRequestBackporter.new
backporter.run(commands)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/tasks/backport/backporter.rb | tasks/backport/backporter.rb | require 'open-uri'
require 'json'
require 'optparse'
require 'logger'
class PullRequestBackporter
def initialize
@logger = Logger.new(STDOUT)
@options = {
upstream: "fluent/fluentd",
branch: "v1.16",
dry_run: false,
log_level: Logger::Severity::INFO,
remote: 'origin'
}
end
def current_branch
branch = IO.popen(["git", "branch", "--contains"]) do |io|
io.read
end
branch.split.last
end
def parse_command_line(argv)
opt = OptionParser.new
opt.on('--upstream REPOSITORY',
'Specify upstream repository (e.g. fluent/fluentd)') {|v| @options[:upstream] = v }
opt.on('--branch BRANCH') {|v| @options[:branch] = v }
opt.on('--dry-run') {|v| @options[:dry_run] = true }
opt.on('--log-level LOG_LEVEL (e.g. debug,info)') {|v|
@options[:log_level] = case v
when "error"
Logger::Severity::ERROR
when "warn"
Logger::Severity::WARN
when "debug"
Logger::Severity::DEBUG
when "info"
Logger::Severity::INFO
else
puts "unknown log level: <#{v}>"
exit 1
end
}
opt.on('--remote REMOTE') {|v| @options[:remote] = v }
opt.parse!(argv)
end
def collect_backports
backports = []
pages = 5
pages.times.each do |page|
@logger.debug "Collecting backport information (#{page + 1}/#{pages})"
URI.open("https://api.github.com/repos/#{@options[:upstream]}/pulls?state=closed&per_page=100&page=#{page+1}",
"Accept" => "application/vnd.github+json",
"Authorization" => "Bearer #{ENV['GITHUB_TOKEN']}",
"X-GitHub-Api-Version" => "2022-11-28") do |request|
JSON.parse(request.read).each do |pull_request|
unless pull_request["labels"].empty?
labels = pull_request["labels"].collect { |label| label["name"] }
unless labels.include?("backport to #{@options[:branch]}")
next
end
if labels.include?("backported")
@logger.info "[DONE] \##{pull_request['number']} #{pull_request['title']} LABELS: #{pull_request['labels'].collect { |label| label['name'] }}"
next
end
@logger.info "* \##{pull_request['number']} #{pull_request['title']} LABELS: #{pull_request['labels'].collect { |label| label['name'] }}"
# merged into this commit
@logger.debug "MERGE_COMMIT_SHA: #{pull_request['merge_commit_sha']}"
body = pull_request["body"].gsub(/\*\*Which issue\(s\) this PR fixes\*\*: \r\n/,
"**Which issue(s) this PR fixes**: \r\nBackport \##{pull_request['number']}\r\n")
backports << {
number: pull_request["number"],
merge_commit_sha: pull_request["merge_commit_sha"],
title: "Backport(#{@options[:branch]}): #{pull_request['title']} (\##{pull_request['number']})",
body: body
}
end
end
end
end
backports
end
def create_pull_requests
backports = collect_backports
if backports.empty?
@logger.info "No need to backport pull requests"
return
end
failed = []
original_branch = current_branch
backports.each do |backport|
@logger.info "Backport #{backport[:number]} #{backport[:title]}"
if @options[:dry_run]
@logger.info "DRY_RUN: PR was created: \##{backport[:number]} #{backport[:title]}"
next
end
begin
branch = "backport-to-#{@options[:branch]}/pr#{backport[:number]}"
@logger.debug "git switch --create #{branch} --track #{@options[:remote]}/#{@options[:branch]}"
IO.popen(["git", "switch", "--create", branch, "--track", "#{@options[:remote]}/#{@options[:branch]}"]) do |io|
@logger.debug io.read
end
@logger.info `git branch`
@logger.info "cherry-pick for #{backport[:number]}"
@logger.debug "git cherry-pick --signoff #{backport[:merge_commit_sha]}"
IO.popen(["git", "cherry-pick", "--signoff", backport[:merge_commit_sha]]) do |io|
@logger.debug io.read
end
if $? != 0
@logger.warn "Give up cherry-pick for #{backport[:number]}"
@logger.debug `git cherry-pick --abort`
failed << backport
next
else
@logger.info "Push branch: #{branch}"
@logger.debug `git push origin #{branch}`
end
upstream_repo = "/repos/#{@options[:upstream]}/pulls"
owner = @options[:upstream].split('/').first
head = "#{owner}:#{branch}"
@logger.debug "Create pull request repo: #{upstream_repo} head: #{head} base: #{@options[:branch]}"
IO.popen(["gh", "api", "--method", "POST",
"-H", "Accept: application/vnd.github+json",
"-H", "X-GitHub-Api-Version: 2022-11-28",
upstream_repo,
"-f", "title=#{backport[:title]}",
"-f", "body=#{backport[:body]}",
"-f", "head=#{head}",
"-f", "base=#{@options[:branch]}"]) do |io|
json = JSON.parse(io.read)
@logger.info "PR was created: #{json['url']}"
end
rescue => e
@logger.error "ERROR: #{backport[:number]} #{e.message}"
ensure
IO.popen(["git", "checkout", original_branch]) do |io|
@logger.debug io.read
end
end
end
failed.each do |backport|
@logger.error "FAILED: #{backport[:number]} #{backport[:title]}"
end
end
def run(argv)
parse_command_line(argv)
@logger.info("Target upstream: #{@options[:upstream]} target branch: #{@options[:branch]}")
create_pull_requests
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/tasks/benchmark/patch_in_tail.rb | tasks/benchmark/patch_in_tail.rb | require 'benchmark'
require 'fluent/plugin/in_tail'
class Fluent::Plugin::TailInput::TailWatcher::IOHandler
alias_method :original_with_io, :with_io
def with_io(&block)
@benchmark_measured_in_tail ||= false
# Measure the benchmark only once.
return original_with_io(&block) if @benchmark_measured_in_tail
Benchmark.bm do |x|
x.report {
original_with_io(&block)
@benchmark_measured_in_tail = true
}
end
exit 0
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_event.rb | test/test_event.rb | require_relative 'helper'
require 'fluent/test'
require 'fluent/event'
require 'fluent/plugin/compressable'
module EventTest
module DeepCopyAssertion
def assert_duplicated_records(es1, es2)
ary1 = []
es1.each do |_, record|
ary1 << record
end
ary2 = []
es2.each do |_, record|
ary2 << record
end
assert_equal ary1.size, ary2.size
ary1.each_with_index do |r, i|
assert_not_equal r.object_id, ary2[i].object_id
end
end
end
class OneEventStreamTest < ::Test::Unit::TestCase
include Fluent
include DeepCopyAssertion
include Fluent::Plugin::Compressable
def setup
@time = event_time()
@record = {'k' => 'v', 'n' => 1}
@es = OneEventStream.new(@time, @record)
end
test 'empty?' do
assert_false @es.empty?
end
test 'size' do
assert_equal 1, @es.size
end
test 'repeatable?' do
assert_true @es.repeatable?
end
test 'dup' do
dupped = @es.dup
assert_kind_of OneEventStream, dupped
assert_not_equal @es.object_id, dupped.object_id
assert_duplicated_records @es, dupped
end
test 'slice' do
assert_equal 0, @es.slice(1, 1).size
assert_equal 0, @es.slice(0, 0).size
sliced = @es.slice(0, 1)
assert_kind_of EventStream, sliced
assert_equal 1, sliced.size
sliced.each do |time, record|
assert_equal @time, time
assert_equal @record, record
end
end
test 'each' do
@es.each { |time, record|
assert_equal @time, time
assert_equal @record, record
}
end
test 'to_msgpack_stream' do
stream = @es.to_msgpack_stream
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(stream) { |time, record|
assert_equal @time, time
assert_equal @record, record
}
end
test 'to_msgpack_stream with time_int argument' do
stream = @es.to_msgpack_stream(time_int: true)
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(stream) { |time, record|
assert_equal @time.to_i, time
assert_equal @record, record
}
end
test 'to_compressed_msgpack_stream' do
stream = @es.to_compressed_msgpack_stream
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(decompress(stream)) { |time, record|
assert_equal @time, time
assert_equal @record, record
}
end
test 'to_compressed_msgpack_stream with time_int argument' do
stream = @es.to_compressed_msgpack_stream(time_int: true)
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(decompress(stream)) { |time, record|
assert_equal @time.to_i, time
assert_equal @record, record
}
end
end
class ArrayEventStreamTest < ::Test::Unit::TestCase
include Fluent
include DeepCopyAssertion
include Fluent::Plugin::Compressable
def setup
time = Engine.now
@times = [Fluent::EventTime.new(time.sec), Fluent::EventTime.new(time.sec + 1)]
@records = [{'k' => 'v1', 'n' => 1}, {'k' => 'v2', 'n' => 2}]
@es = ArrayEventStream.new(@times.zip(@records))
end
test 'repeatable?' do
assert_true @es.repeatable?
end
test 'dup' do
dupped = @es.dup
assert_kind_of ArrayEventStream, dupped
assert_not_equal @es.object_id, dupped.object_id
assert_duplicated_records @es, dupped
end
test 'empty?' do
assert_not_empty @es
assert_true ArrayEventStream.new([]).empty?
end
test 'size' do
assert_equal 2, @es.size
assert_equal 0, ArrayEventStream.new([]).size
end
test 'slice' do
sliced = @es.slice(1,1)
assert_kind_of EventStream, sliced
assert_equal 1, sliced.size
sliced.each do |time, record|
assert_equal @times[1], time
assert_equal 'v2', record['k']
assert_equal 2, record['n']
end
sliced = @es.slice(0,2)
assert_kind_of EventStream, sliced
assert_equal 2, sliced.size
counter = 0
sliced.each do |time, record|
assert_equal @times[counter], time
assert_equal @records[counter]['k'], record['k']
assert_equal @records[counter]['n'], record['n']
counter += 1
end
end
test 'each' do
i = 0
@es.each { |time, record|
assert_equal @times[i], time
assert_equal @records[i], record
i += 1
}
end
test 'to_msgpack_stream' do
i = 0
stream = @es.to_msgpack_stream
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(stream) { |time, record|
assert_equal @times[i], time
assert_equal @records[i], record
i += 1
}
end
test 'to_compressed_msgpack_stream' do
i = 0
compressed_stream = @es.to_compressed_msgpack_stream
stream = decompress(compressed_stream)
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(stream) { |time, record|
assert_equal @times[i], time
assert_equal @records[i], record
i += 1
}
end
test 'to_compressed_msgpack_stream with time_int argument' do
i = 0
compressed_stream = @es.to_compressed_msgpack_stream(time_int: true)
stream = decompress(compressed_stream)
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(stream) { |time, record|
assert_equal @times[i].to_i, time
assert_equal @records[i], record
i += 1
}
end
end
class MultiEventStreamTest < ::Test::Unit::TestCase
include Fluent
include DeepCopyAssertion
include Fluent::Plugin::Compressable
def setup
time = Engine.now
@times = [Fluent::EventTime.new(time.sec), Fluent::EventTime.new(time.sec + 1)]
@records = [{'k' => 'v1', 'n' => 1}, {'k' => 'v2', 'n' => 2}]
@es = MultiEventStream.new
@times.zip(@records).each { |_time, record|
@es.add(_time, record)
}
end
test 'repeatable?' do
assert_true @es.repeatable?
end
test 'dup' do
dupped = @es.dup
assert_kind_of MultiEventStream, dupped
assert_not_equal @es.object_id, dupped.object_id
assert_duplicated_records @es, dupped
end
test 'empty?' do
assert_not_empty @es
assert_true MultiEventStream.new.empty?
end
test 'size' do
assert_equal 2, @es.size
assert_equal 0, MultiEventStream.new.size
end
test 'slice' do
sliced = @es.slice(1,1)
assert_kind_of EventStream, sliced
assert_equal 1, sliced.size
sliced.each do |time, record|
assert_equal @times[1], time
assert_equal 'v2', record['k']
assert_equal 2, record['n']
end
sliced = @es.slice(0,2)
assert_kind_of EventStream, sliced
assert_equal 2, sliced.size
counter = 0
sliced.each do |time, record|
assert_equal @times[counter], time
assert_equal @records[counter]['k'], record['k']
assert_equal @records[counter]['n'], record['n']
counter += 1
end
end
test 'each' do
i = 0
@es.each { |time, record|
assert_equal @times[i], time
assert_equal @records[i], record
i += 1
}
end
test 'to_msgpack_stream' do
i = 0
stream = @es.to_msgpack_stream
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(stream) { |time, record|
assert_equal @times[i], time
assert_equal @records[i], record
i += 1
}
end
test 'to_compressed_msgpack_stream' do
i = 0
compressed_stream = @es.to_compressed_msgpack_stream
stream = decompress(compressed_stream)
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(stream) { |time, record|
assert_equal @times[i], time
assert_equal @records[i], record
i += 1
}
end
test 'to_compressed_msgpack_stream with time_int argument' do
i = 0
compressed_stream = @es.to_compressed_msgpack_stream(time_int: true)
stream = decompress(compressed_stream)
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(stream) { |time, record|
assert_equal @times[i].to_i, time
assert_equal @records[i], record
i += 1
}
end
end
class MessagePackEventStreamTest < ::Test::Unit::TestCase
include Fluent
include DeepCopyAssertion
include Fluent::Plugin::Compressable
def setup
pk = Fluent::MessagePackFactory.msgpack_packer
time = Engine.now
@times = [Fluent::EventTime.new(time.sec), Fluent::EventTime.new(time.sec + 1)]
@records = [{'k' => 'v1', 'n' => 1}, {'k' => 'v2', 'n' => 2}]
@times.zip(@records).each { |_time, record|
pk.write([_time, record])
}
@es = MessagePackEventStream.new(pk.to_s)
end
test 'dup' do
dupped = @es.dup
assert_kind_of MessagePackEventStream, dupped
assert_not_equal @es.object_id, dupped.object_id
assert_duplicated_records @es, dupped
# After iteration of events (done in assert_duplicated_records),
# duplicated event stream still has unpacked objects and correct size
dupped = @es.dup
assert_equal 2, dupped.instance_eval{ @size }
end
test 'empty?' do
assert_false @es.empty?
assert_true MessagePackEventStream.new('', 0).empty?
end
test 'size' do
assert_equal 2, @es.size
assert_equal 0, MessagePackEventStream.new('').size
end
test 'repeatable?' do
assert_true @es.repeatable?
end
test 'slice' do
sliced = @es.slice(1,1)
assert_kind_of EventStream, sliced
assert_equal 1, sliced.size
sliced.each do |time, record|
assert_equal @times[1], time
assert_equal 'v2', record['k']
assert_equal 2, record['n']
end
sliced = @es.slice(0,2)
assert_kind_of EventStream, sliced
assert_equal 2, sliced.size
counter = 0
sliced.each do |time, record|
assert_equal @times[counter], time
assert_equal @records[counter]['k'], record['k']
assert_equal @records[counter]['n'], record['n']
counter += 1
end
end
test 'each' do
i = 0
@es.each { |time, record|
assert_equal @times[i], time
assert_equal @records[i], record
i += 1
}
end
test 'to_msgpack_stream' do
i = 0
stream = @es.to_msgpack_stream
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(stream) { |time, record|
assert_equal @times[i], time
assert_equal @records[i], record
i += 1
}
end
test 'to_compressed_msgpack_stream' do
i = 0
compressed_stream = @es.to_compressed_msgpack_stream
stream = decompress(compressed_stream)
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(stream) { |time, record|
assert_equal @times[i], time
assert_equal @records[i], record
i += 1
}
end
# `any?` represents an Enumerable method which calls `each` internally
test 'size_after_any' do
@es.any?
assert_equal 2, @es.size
end
# `any?` represents an Enumerable method which calls `each` internally
test 'each_after_any' do
@es.any?
count = 0
@es.each { |time, record| count += 1 }
assert_equal 2, count
end
end
class CompressedMessagePackEventStreamTest < ::Test::Unit::TestCase
include Fluent
include DeepCopyAssertion
include Fluent::Plugin::Compressable
def setup
time = Engine.now
@times = [Fluent::EventTime.new(time.sec), Fluent::EventTime.new(time.sec + 1)]
@records = [{ 'k' => 'v1', 'n' => 1 }, { 'k' => 'v2', 'n' => 2 }]
@packed_record = ''
@entries = ''
@times.zip(@records).each do |_time, record|
v = [_time, record].to_msgpack
@packed_record += v
@entries += compress(v)
end
@es = CompressedMessagePackEventStream.new(@entries)
end
def ensure_data_is_decompressed
assert_equal @entries, @es.instance_variable_get(:@data)
yield
assert_equal @packed_record, @es.instance_variable_get(:@data)
end
test 'dup' do
dupped = @es.dup
assert_kind_of CompressedMessagePackEventStream, dupped
assert_not_equal @es.object_id, dupped.object_id
assert_duplicated_records @es, dupped
# After iteration of events (done in assert_duplicated_records),
# duplicated event stream still has unpacked objects and correct size
dupped = @es.dup
assert_equal 2, dupped.instance_eval{ @size }
end
test 'repeatable?' do
assert_true @es.repeatable?
end
test 'size' do
assert_equal 0, CompressedMessagePackEventStream.new('').size
ensure_data_is_decompressed { assert_equal 2, @es.size }
end
test 'each' do
i = 0
ensure_data_is_decompressed do
@es.each do |time, record|
assert_equal @times[i], time
assert_equal @records[i], record
i += 1
end
end
end
test 'slice' do
sliced = nil
ensure_data_is_decompressed { sliced = @es.slice(1,1) }
assert_kind_of EventStream, sliced
assert_equal 1, sliced.size
sliced.each do |time, record|
assert_equal @times[1], time
assert_equal 'v2', record['k']
assert_equal 2, record['n']
end
sliced = @es.slice(0,2)
assert_kind_of EventStream, sliced
assert_equal 2, sliced.size
counter = 0
sliced.each do |time, record|
assert_equal @times[counter], time
assert_equal @records[counter]['k'], record['k']
assert_equal @records[counter]['n'], record['n']
counter += 1
end
end
test 'to_msgpack_stream' do
i = 0
stream = nil
ensure_data_is_decompressed { stream = @es.to_msgpack_stream }
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(stream) { |time, record|
assert_equal @times[i], time
assert_equal @records[i], record
i += 1
}
end
test 'to_compressed_msgpack_stream' do
i = 0
# Do not call ensure_decompressed!
assert_equal @entries, @es.instance_variable_get(:@data)
compressed_stream = @es.to_compressed_msgpack_stream
assert_equal @entries, @es.instance_variable_get(:@data)
stream = decompress(compressed_stream)
Fluent::MessagePackFactory.msgpack_unpacker.feed_each(stream) { |time, record|
assert_equal @times[i], time
assert_equal @records[i], record
i += 1
}
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_event_router.rb | test/test_event_router.rb | require_relative 'helper'
require 'fluent/event_router'
require_relative 'test_plugin_classes'
class EventRouterTest < ::Test::Unit::TestCase
include Fluent
include FluentTest
teardown do
@output = nil
@filter = nil
@compat_filter = nil
@error_output = nil
@emit_handler = nil
@default_collector = nil
end
def output
@output ||= FluentTestOutput.new
end
def filter
@filter ||= FluentTestFilter.new
end
def compat_filter
@compat_filter ||= FluentCompatTestFilter.new
end
def error_output
@error_output ||= FluentTestErrorOutput.new
end
def emit_handler
@emit_handler ||= TestEmitErrorHandler.new
end
def default_collector
@default_collector ||= FluentTestOutput.new
end
def event(record, time = Engine.now)
OneEventStream.new(time, record)
end
DEFAULT_EVENT_NUM = 5
def events(num = DEFAULT_EVENT_NUM)
es = MultiEventStream.new
num.times { |i|
es.add(Engine.now, 'key' => "value#{i}")
}
es
end
sub_test_case EventRouter::MatchCache do
setup do
@match_cache = EventRouter::MatchCache.new
end
test "call block when non-cached key" do
assert_raise(RuntimeError.new('Test!')) {
@match_cache.get('test') { raise 'Test!' }
}
end
test "don't call block when cached key" do
@match_cache.get('test') { "I'm cached" }
assert_nothing_raised {
@match_cache.get('test') { raise 'Test!' }
}
assert_equal "I'm cached", @match_cache.get('test') { raise 'Test!' }
end
test "call block when keys are expired" do
cache_size = EventRouter::MatchCache::MATCH_CACHE_SIZE
cache_size.times { |i|
@match_cache.get("test#{i}") { "I'm cached #{i}" }
}
assert_nothing_raised {
cache_size.times { |i|
@match_cache.get("test#{i}") { raise "Why called?" }
}
}
# expire old keys
cache_size.times { |i|
@match_cache.get("new_test#{i}") { "I'm young #{i}" }
}
num_called = 0
cache_size.times { |i|
@match_cache.get("test#{i}") { num_called += 1 }
}
assert_equal cache_size, num_called
end
end
sub_test_case EventRouter::Pipeline do
setup do
@pipeline = EventRouter::Pipeline.new
@es = event('key' => 'value')
end
test 'set one output' do
@pipeline.set_output(output)
@pipeline.emit_events('test', @es)
assert_equal 1, output.events.size
assert_equal 'value', output.events['test'].first['key']
end
sub_test_case 'with filter' do
setup do
@pipeline.set_output(output)
end
data('Filter plugin' => 'filter',
'Compat::Filter plugin' => 'compat_filter')
test 'set one filer' do |filter_type|
@pipeline.add_filter(filter_type == 'filter' ? filter : compat_filter)
@pipeline.emit_events('test', @es)
assert_equal 1, output.events.size
assert_equal 'value', output.events['test'].first['key']
assert_equal 0, output.events['test'].first['__test__']
end
data('Filter plugin' => 'filter',
'Compat::Filter plugin' => 'compat_filter')
test 'set one filer with multi events' do |filter_type|
@pipeline.add_filter(filter_type == 'filter' ? filter : compat_filter)
@pipeline.emit_events('test', events)
assert_equal 1, output.events.size
assert_equal 5, output.events['test'].size
DEFAULT_EVENT_NUM.times { |i|
assert_equal "value#{i}", output.events['test'][i]['key']
assert_equal i, output.events['test'][i]['__test__']
}
end
end
end
sub_test_case EventRouter do
teardown do
@event_router = nil
end
def event_router
@event_router ||= EventRouter.new(default_collector, emit_handler)
end
sub_test_case 'default collector' do
test 'call default collector when no output' do
assert_rr do
mock(default_collector).emit_events('test', is_a(OneEventStream))
event_router.emit('test', Engine.now, 'k' => 'v')
end
end
test "call default collector when only filter" do
event_router.add_rule('test', filter)
assert_rr do
# After apply Filter, EventStream becomes MultiEventStream by default
mock(default_collector).emit_events('test', is_a(MultiEventStream))
event_router.emit('test', Engine.now, 'k' => 'v')
end
assert_equal 1, filter.num
end
test "call default collector when no matched with output" do
event_router.add_rule('test', output)
assert_rr do
mock(default_collector).emit_events('dummy', is_a(OneEventStream))
event_router.emit('dummy', Engine.now, 'k' => 'v')
end
end
test "don't call default collector when tag matched" do
event_router.add_rule('test', output)
assert_rr do
mock(default_collector).emit_events('test', is_a(OneEventStream)).never
event_router.emit('test', Engine.now, 'k' => 'v')
end
# check emit handler doesn't catch rr error
assert_empty emit_handler.events
end
end
sub_test_case 'filter' do
test 'filter should be called when tag matched' do
filter = Class.new(FluentTestFilter) { |x|
def filter_stream(_tag, es); end
}.new
event_router.add_rule('test', filter)
assert_rr do
mock(filter).filter_stream('test', is_a(OneEventStream)) { events }
event_router.emit('test', Engine.now, 'k' => 'v')
end
end
test 'filter should not be called when tag mismatched' do
event_router.add_rule('test', filter)
assert_rr do
mock(filter).filter_stream('test', is_a(OneEventStream)).never
event_router.emit('foo', Engine.now, 'k' => 'v')
end
end
test 'filter changes records' do
event_router.add_rule('test', filter)
event_router.add_rule('test', output)
event_router.emit('test', Engine.now, 'k' => 'v')
assert_equal 1, filter.num
assert_equal 1, output.events['test'].size
assert_equal 0, output.events['test'].first['__test__']
assert_equal 'v', output.events['test'].first['k']
end
test 'filter can be chained' do
other_filter = FluentTestFilter.new('__hoge__')
event_router.add_rule('test', filter)
event_router.add_rule('test', other_filter)
event_router.add_rule('test', output)
event_router.emit('test', Engine.now, 'k' => 'v')
assert_equal 1, filter.num
assert_equal 1, other_filter.num
assert_equal 1, output.events['test'].size
assert_equal 0, output.events['test'].first['__test__']
assert_equal 0, output.events['test'].first['__hoge__']
assert_equal 'v', output.events['test'].first['k']
end
end
sub_test_case 'optimized filter' do
setup do
@record = { 'k' => 'v' }
@now = Engine.now
end
test 'call optimized filter when the filter plugin implements #filter without #filter_stream' do
event_router.add_rule('test', filter)
assert_rr do
mock(filter).filter('test', @now, @record) { @record }
event_router.emit('test', @now, @record)
end
end
test 'call optimized filter when the filter plugin implements #filter_with_time without #filter_stream' do
filter = Class.new(FluentTestFilter) {
undef_method :filter
def filter_with_time(tag, time, record); end
}.new
event_router.add_rule('test', filter)
assert_rr do
mock(filter).filter_with_time('test', @now, @record) { [time, @record] }
event_router.emit('test', @now, @record)
end
end
test "don't call optimized filter when filter plugins implement #filter_stream" do
filter = Class.new(FluentTestFilter) {
undef_method :filter
def filter_stream(tag, time, record); end
}.new
event_router.add_rule('test', filter)
assert_rr do
mock(filter).filter_stream('test', is_a(OneEventStream)) { OneEventStream.new(@now, @record) }
event_router.emit('test', @now, @record)
end
end
test 'call optimized filter when filter plugins have #filter_with_time instead of #filter' do
filter_with_time = Class.new(FluentTestFilter) {
undef_method :filter
def filter_with_time(tag, time, record); end
}.new
event_router.add_rule('test', filter_with_time)
event_router.add_rule('test', filter)
assert_rr do
mock(filter_with_time).filter_with_time('test', @now, @record) { [@now + 1, @record] }
mock(filter).filter('test', @now + 1, @record) { @record }
event_router.emit('test', @now, @record)
end
end
test "don't call optimized filter even if just a filter of some filters implements #filter_stream method" do
filter_stream = Class.new(FluentTestFilter) {
def filter_stream(tag, record); end
}.new
filter_with_time = Class.new(FluentTestFilter) {
undef_method :filter
def filter_with_time(tag, time, record); end
}.new
filters = [filter_stream, filter_with_time, filter]
filters.each { |f| event_router.add_rule('test', f) }
e = OneEventStream.new(@now, @record)
assert_rr do
mock($log).info("disable filter chain optimization because #{[filter_stream].map(&:class)} uses `#filter_stream` method.")
mock(filter_stream).filter_stream('test', is_a(OneEventStream)) { e }
mock(filter).filter_stream('test', is_a(OneEventStream)) { e }
mock(filter_with_time).filter_stream('test', is_a(OneEventStream)) { e }
event_router.emit('test', @now, @record)
end
end
end
sub_test_case 'emit_error_handler' do
test 'call handle_emits_error when emit failed' do
event_router.add_rule('test', error_output)
event_router.emit('test', Engine.now, 'k' => 'v')
assert_rr do
mock(emit_handler).handle_emits_error('test', is_a(OneEventStream), is_a(RuntimeError))
event_router.emit('test', Engine.now, 'k' => 'v')
end
end
test 'can pass records modified by filters to handle_emits_error' do
filter = Class.new(FluentTestFilter) {
def filter_stream(_tag, es); end
}.new
event_router.add_rule('test', filter)
event_router.add_rule('test', error_output)
time = Engine.now
modified_es = OneEventStream.new(time, 'modified_label' => 'modified_value')
assert_rr do
stub(filter).filter_stream { modified_es }
mock(emit_handler).handle_emits_error('test', modified_es, is_a(RuntimeError))
event_router.emit('test', time, 'pre_label' => 'pre_value')
end
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_tls.rb | test/test_tls.rb | require_relative 'helper'
require 'fluent/tls'
class UniqueIdTest < Test::Unit::TestCase
TEST_TLS1_1_CASES = {
'New TLS v1.1' => :'TLS1_1',
'Old TLS v1.1' => :'TLSv1_1',
}
TEST_TLS1_2_CASES = {
'New TLS v1.2' => :'TLS1_2',
'Old TLS v1.2' => :'TLSv1_2'
}
TEST_TLS1_3_CASES = {
'New TLS v1.3' => :'TLS1_3',
'Old TLS v1.3' => :'TLSv1_3'
} if defined?(OpenSSL::SSL::TLS1_3_VERSION)
TEST_TLS_CASES = TEST_TLS1_1_CASES.merge(TEST_TLS1_2_CASES)
sub_test_case 'constants' do
test 'default version' do
assert_equal :'TLSv1_2', Fluent::TLS::DEFAULT_VERSION
end
data(TEST_TLS_CASES)
test 'supported versions' do |ver|
assert_include Fluent::TLS::SUPPORTED_VERSIONS, ver
end
test 'default ciphers' do
assert_equal "ALL:!aNULL:!eNULL:!SSLv2", Fluent::TLS::CIPHERS_DEFAULT
end
end
sub_test_case 'set_version_to_context' do
setup do
@ctx = OpenSSL::SSL::SSLContext.new
end
# TODO: After openssl module supports min_version/max_version accessor, add assert for it.
data(TEST_TLS_CASES)
test 'with version' do |ver|
assert_nothing_raised {
Fluent::TLS.set_version_to_context(@ctx, ver, nil, nil)
}
end
data(TEST_TLS_CASES)
test 'can specify old/new syntax to min_version/max_version' do |ver|
omit "min_version=/max_version= is not supported" unless Fluent::TLS::MIN_MAX_AVAILABLE
assert_nothing_raised {
Fluent::TLS.set_version_to_context(@ctx, Fluent::TLS::DEFAULT_VERSION, ver, ver)
}
end
test 'raise ConfigError when either one of min_version/max_version is not specified' do
omit "min_version=/max_version= is not supported" unless Fluent::TLS::MIN_MAX_AVAILABLE
ver = Fluent::TLS::DEFAULT_VERSION
assert_raise(Fluent::ConfigError) {
Fluent::TLS.set_version_to_context(@ctx, ver, ver, nil)
}
assert_raise(Fluent::ConfigError) {
Fluent::TLS.set_version_to_context(@ctx, ver, nil, ver)
}
end
end
sub_test_case 'set_version_to_options' do
setup do
@opt = {}
end
test 'set min_version/max_version when supported' do
omit "min_version=/max_version= is not supported" unless Fluent::TLS::MIN_MAX_AVAILABLE
ver = Fluent::TLS::DEFAULT_VERSION
assert_raise(Fluent::ConfigError) {
Fluent::TLS.set_version_to_options(@opt, ver, ver, nil)
}
assert_raise(Fluent::ConfigError) {
Fluent::TLS.set_version_to_options(@opt, ver, nil, ver)
}
ver = :'TLSv1_3' if defined?(OpenSSL::SSL::TLS1_3_VERSION)
assert_equal Fluent::TLS.const_get(:METHODS_MAP)[ver], Fluent::TLS.set_version_to_options(@opt, ver, nil, nil)[:min_version]
assert_equal Fluent::TLS.const_get(:METHODS_MAP)[ver], Fluent::TLS.set_version_to_options(@opt, ver, nil, nil)[:max_version]
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_configdsl.rb | test/test_configdsl.rb | require_relative 'helper'
require 'fluent/config/dsl'
require 'fluent/test'
require 'tempfile'
class ConfigDSLTest < Test::Unit::TestCase
# TEST_CONFIG1 = %[
# <source>
# type forward
# port 24224
# </source>
# <match test.**>
# type forward
# flush_interval 1s
# <server>
# host host0.example.com
# port 24224
# </server>
# <server>
# host host1.example.com
# port 24224
# </server>
# </match>
# ]
TEST_DSL_CONFIG1 = %q[
source {
type "forward"
port 24224
}
match('test.**') {
type "forward"
flush_interval "1s"
(0..1).each do |i|
server {
host "host#{i}.example.com"
port 24224
}
end
}
]
TEST_DSL_CONFIG2 = %q[
v = [0, 1, 2]
]
TEST_DSL_CONFIG3 = %q[
match
]
TEST_DSL_CONFIG4 = %q[
match('aa', 'bb'){
type :null
}
]
TEST_DSL_CONFIG5 = %q[
match('aa')
]
def test_parse
root = Fluent::Config::DSL::Parser.parse(TEST_DSL_CONFIG1)
assert_equal 0, root.keys.size
assert_equal 2, root.elements.size
e0 = root.elements[0]
assert_equal 'source', e0.name
assert_equal '', e0.arg
assert_equal 'forward', e0['@type']
assert_equal '24224', e0['port']
e1 = root.elements[1]
assert_equal 'match', e1.name
assert_equal 'test.**', e1.arg
assert_equal 'forward', e1['@type']
assert_equal '1s', e1['flush_interval']
assert_equal 2, e1.elements.size
e1s0 = e1.elements[0]
assert_equal 'server', e1s0.name
assert_equal 'host0.example.com', e1s0['host']
assert_equal '24224', e1s0['port']
e1s1 = e1.elements[1]
assert_equal 'server', e1s1.name
assert_equal 'host1.example.com', e1s1['host']
assert_equal '24224', e1s1['port']
end
def test_parse2
root = Fluent::Config::DSL::Parser.parse(TEST_DSL_CONFIG2)
assert_equal 0, root.keys.size
assert_equal 0, root.elements.size
end
def test_config_error
assert_raise(ArgumentError) {
Fluent::Config::DSL::Parser.parse(TEST_DSL_CONFIG3)
}
assert_raise(ArgumentError) {
Fluent::Config::DSL::Parser.parse(TEST_DSL_CONFIG4)
}
assert_raise(ArgumentError) {
Fluent::Config::DSL::Parser.parse(TEST_DSL_CONFIG5)
}
end
def test_with_ruby_keyword
uname_string = `uname -a`
tmpfile = Tempfile.create('fluentd-test')
tmpfile.write(uname_string)
tmpfile.close
root1 = Fluent::Config::DSL::Parser.parse(<<DSL)
uname_str = ruby.open("#{tmpfile.path}"){|out| out.read}
source {
uname uname_str
}
DSL
source1 = root1.elements.first
assert_equal 'source', source1.name
assert_equal 1, source1.keys.size
assert_equal uname_string, source1['uname']
root2 = Fluent::Config::DSL::Parser.parse(<<DSL)
ruby_version = ruby {
require 'erb'
ERB.new('<%= RUBY_VERSION %> from erb').result
}
source {
version ruby_version
}
DSL
source2 = root2.elements.first
assert_equal 'source', source2.name
assert_equal 1, source2.keys.size
assert_equal "#{RUBY_VERSION} from erb", source2['version']
# Parser#parse raises NoMethodError when configuration dsl elements are written in ruby block
conf3 = <<DSL
ruby {
source {
type "tail"
}
}
source {
uname uname_str
}
DSL
assert_raise (NoMethodError) { Fluent::Config::DSL::Parser.parse(conf3) }
ensure
File.delete(tmpfile.path)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_variable_store.rb | test/test_variable_store.rb | require_relative 'helper'
require 'fluent/variable_store'
class VariableStoreTest < Test::Unit::TestCase
def setup
end
def teardown
Fluent::VariableStore.try_to_reset do
# nothing
end
end
sub_test_case '#fetch_or_build' do
test 'fetch same object when the same key is passed' do
c1 = Fluent::VariableStore.fetch_or_build(:test)
c2 = Fluent::VariableStore.fetch_or_build(:test)
assert_equal c1, c2
assert_equal c1.object_id, c2.object_id
c3 = Fluent::VariableStore.fetch_or_build(:test2)
assert_not_equal c1.object_id, c3.object_id
end
test 'can be passed a default value' do
c1 = Fluent::VariableStore.fetch_or_build(:test, default_value: Set.new)
c2 = Fluent::VariableStore.fetch_or_build(:test)
assert_kind_of Set, c1
assert_equal c1, c2
assert_equal c1.object_id, c2.object_id
end
end
sub_test_case '#try_to_reset' do
test 'reset all values' do
c1 = Fluent::VariableStore.fetch_or_build(:test)
c1[:k1] = 1
assert_equal 1, c1[:k1]
Fluent::VariableStore.try_to_reset do
# nothing
end
c1 = Fluent::VariableStore.fetch_or_build(:test)
assert_nil c1[:k1]
end
test 'rollback resetting if error raised' do
c1 = Fluent::VariableStore.fetch_or_build(:test)
c1[:k1] = 1
assert_equal 1, c1[:k1]
assert_raise(RuntimeError.new('pass')) do
Fluent::VariableStore.try_to_reset do
raise 'pass'
end
end
c1 = Fluent::VariableStore.fetch_or_build(:test)
assert_equal 1, c1[:k1]
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_formatter.rb | test/test_formatter.rb | require_relative 'helper'
require 'fluent/test'
require 'fluent/formatter'
module FormatterTest
include Fluent
def tag
'tag'
end
def record
{'message' => 'awesome', 'greeting' => 'hello'}
end
class BaseFormatterTest < ::Test::Unit::TestCase
include FormatterTest
def test_call
formatter = Formatter.new
formatter.configure(config_element())
assert_raise NotImplementedError do
formatter.format('tag', Engine.now, {})
end
end
end
class BaseFormatterTestWithTestDriver < ::Test::Unit::TestCase
include FormatterTest
def create_driver(conf={})
Fluent::Test::FormatterTestDriver.new(Formatter).configure(conf)
end
def test_call
d = create_driver
assert_raise NotImplementedError do
d.format('tag', Engine.now, {})
end
end
def test_call_with_string_literal_configure
d = create_driver('')
assert_raise NotImplementedError do
d.format('tag', Engine.now, {})
end
end
end
class OutFileFormatterTest < ::Test::Unit::TestCase
include FormatterTest
def setup
@formatter = Fluent::Test::FormatterTestDriver.new('out_file')
@time = Engine.now
@newline = if Fluent.windows?
"\r\n"
else
"\n"
end
end
def configure(conf)
@formatter.configure({'utc' => true}.merge(conf))
end
def test_format
configure({})
formatted = @formatter.format(tag, @time, record)
assert_equal("#{time2str(@time)}\t#{tag}\t#{Yajl.dump(record)}#{@newline}", formatted)
end
def test_format_without_time
configure('output_time' => 'false')
formatted = @formatter.format(tag, @time, record)
assert_equal("#{tag}\t#{Yajl.dump(record)}#{@newline}", formatted)
end
def test_format_without_tag
configure('output_tag' => 'false')
formatted = @formatter.format(tag, @time, record)
assert_equal("#{time2str(@time)}\t#{Yajl.dump(record)}#{@newline}", formatted)
end
def test_format_without_time_and_tag
configure('output_tag' => 'false', 'output_time' => 'false')
formatted = @formatter.format('tag', @time, record)
assert_equal("#{Yajl.dump(record)}#{@newline}", formatted)
end
def test_format_without_time_and_tag_against_string_literal_configure
@formatter.configure(%[
utc true
output_tag false
output_time false
])
formatted = @formatter.format('tag', @time, record)
assert_equal("#{Yajl.dump(record)}#{@newline}", formatted)
end
end
class JsonFormatterTest < ::Test::Unit::TestCase
include FormatterTest
def setup
@formatter = Fluent::Test::FormatterTestDriver.new(TextFormatter::JSONFormatter)
@time = Engine.now
@newline = if Fluent.windows?
"\r\n"
else
"\n"
end
end
data('oj' => 'oj', 'yajl' => 'yajl')
def test_format(data)
@formatter.configure('json_parser' => data)
formatted = @formatter.format(tag, @time, record)
assert_equal("#{Yajl.dump(record)}#{@newline}", formatted)
end
end
class MessagePackFormatterTest < ::Test::Unit::TestCase
include FormatterTest
def setup
@formatter = Fluent::Test::FormatterTestDriver.new(TextFormatter::MessagePackFormatter)
@time = Engine.now
end
def test_format
@formatter.configure({})
formatted = @formatter.format(tag, @time, record)
assert_equal(record.to_msgpack, formatted)
end
end
class LabeledTSVFormatterTest < ::Test::Unit::TestCase
include FormatterTest
def setup
@formatter = Fluent::Test::FormatterTestDriver.new(TextFormatter::LabeledTSVFormatter)
@time = Engine.now
@newline = if Fluent.windows?
"\r\n"
else
"\n"
end
end
def test_config_params
assert_equal "\t", @formatter.instance.delimiter
assert_equal ":", @formatter.instance.label_delimiter
@formatter.configure(
'delimiter' => ',',
'label_delimiter' => '=',
)
assert_equal ",", @formatter.instance.delimiter
assert_equal "=", @formatter.instance.label_delimiter
end
def test_format
@formatter.configure({})
formatted = @formatter.format(tag, @time, record)
assert_equal("message:awesome\tgreeting:hello#{@newline}", formatted)
end
def test_format_with_customized_delimiters
@formatter.configure(
'delimiter' => ',',
'label_delimiter' => '=',
)
formatted = @formatter.format(tag, @time, record)
assert_equal("message=awesome,greeting=hello#{@newline}", formatted)
end
def record_with_tab
{'message' => "awe\tsome", 'greeting' => "hello\t"}
end
def test_format_suppresses_tab
@formatter.configure({})
formatted = @formatter.format(tag, @time, record_with_tab)
assert_equal("message:awe some\tgreeting:hello #{@newline}", formatted)
end
def test_format_suppresses_tab_custom_replacement
@formatter.configure(
'replacement' => 'X',
)
formatted = @formatter.format(tag, @time, record_with_tab)
assert_equal("message:aweXsome\tgreeting:helloX#{@newline}", formatted)
end
def test_format_suppresses_custom_delimiter
@formatter.configure(
'delimiter' => 'w',
'label_delimiter' => '=',
)
formatted = @formatter.format(tag, @time, record)
assert_equal("message=a esomewgreeting=hello#{@newline}", formatted)
end
end
class CsvFormatterTest < ::Test::Unit::TestCase
include FormatterTest
def setup
@formatter = Fluent::Test::FormatterTestDriver.new(TextFormatter::CsvFormatter)
@time = Engine.now
end
def test_config_params
assert_equal ',', @formatter.instance.delimiter
assert_equal true, @formatter.instance.force_quotes
assert_nil @formatter.instance.fields
end
data(
'tab_char' => ["\t", '\t'],
'tab_string' => ["\t", 'TAB'],
'pipe' => ['|', '|'])
def test_config_params_with_customized_delimiters(data)
expected, target = data
@formatter.configure('delimiter' => target, 'fields' => 'a,b,c')
assert_equal expected, @formatter.instance.delimiter
assert_equal ['a', 'b', 'c'], @formatter.instance.fields
end
def test_format
@formatter.configure('fields' => 'message,message2')
formatted = @formatter.format(tag, @time, {
'message' => 'awesome',
'message2' => 'awesome2'
})
assert_equal("\"awesome\",\"awesome2\"\n", formatted)
end
def test_format_with_customized_delimiters
@formatter.configure(
'fields' => 'message,message2',
'delimiter' => '\t'
)
formatted = @formatter.format(tag, @time, {
'message' => 'awesome',
'message2' => 'awesome2'
})
assert_equal("\"awesome\"\t\"awesome2\"\n", formatted)
end
def test_format_with_non_quote
@formatter.configure(
'fields' => 'message,message2',
'force_quotes' => 'false'
)
formatted = @formatter.format(tag, @time, {
'message' => 'awesome',
'message2' => 'awesome2'
})
assert_equal("awesome,awesome2\n", formatted)
end
data(
'nil' => {
'message' => 'awesome',
'message2' => nil,
'message3' => 'awesome3'
},
'blank' => {
'message' => 'awesome',
'message2' => '',
'message3' => 'awesome3'
})
def test_format_with_empty_fields(data)
@formatter.configure(
'fields' => 'message,message2,message3'
)
formatted = @formatter.format(tag, @time, data)
assert_equal("\"awesome\",\"\",\"awesome3\"\n", formatted)
end
data(
'normally' => 'one,two,three',
'white_space' => 'one , two , three',
'blank' => 'one,,two,three')
def test_config_params_with_fields(data)
@formatter.configure('fields' => data)
assert_equal %w(one two three), @formatter.instance.fields
end
end
class SingleValueFormatterTest < ::Test::Unit::TestCase
include FormatterTest
def setup
@newline = if Fluent.windows?
"\r\n"
else
"\n"
end
end
def create_driver(klass_or_str)
Fluent::Test::FormatterTestDriver.new(klass_or_str)
end
def test_config_params
formatter = create_driver(TextFormatter::SingleValueFormatter)
assert_equal "message", formatter.instance.message_key
formatter.configure('message_key' => 'foobar')
assert_equal "foobar", formatter.instance.message_key
end
def test_format
formatter = create_driver('single_value')
formatter.configure({})
formatted = formatter.format('tag', Engine.now, {'message' => 'awesome'})
assert_equal("awesome#{@newline}", formatted)
end
def test_format_without_newline
formatter = create_driver('single_value')
formatter.configure('add_newline' => 'false')
formatted = formatter.format('tag', Engine.now, {'message' => 'awesome'})
assert_equal("awesome", formatted)
end
def test_format_with_message_key
formatter = create_driver(TextFormatter::SingleValueFormatter)
formatter.configure('message_key' => 'foobar')
formatted = formatter.format('tag', Engine.now, {'foobar' => 'foo'})
assert_equal("foo#{@newline}", formatted)
end
end
class FormatterLookupTest < ::Test::Unit::TestCase
include FormatterTest
def test_unknown_format
assert_raise NotFoundPluginError do
Fluent::Plugin.new_formatter('unknown')
end
end
data('register_formatter' => 'known', 'register_template' => 'known_old')
def test_find_formatter(data)
$LOAD_PATH.unshift(File.join(File.expand_path(File.dirname(__FILE__)), 'scripts'))
assert_nothing_raised ConfigError do
Fluent::Plugin.new_formatter(data)
end
$LOAD_PATH.shift
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_msgpack_factory.rb | test/test_msgpack_factory.rb | require_relative 'helper'
require 'fluent/msgpack_factory'
class MessagePackFactoryTest < Test::Unit::TestCase
test 'call log.warn only once' do
klass = Class.new do
include Fluent::MessagePackFactory::Mixin
end
mp = klass.new
mock.proxy($log).warn(anything).once
assert mp.msgpack_factory
assert mp.msgpack_factory
assert mp.msgpack_factory
end
sub_test_case 'thread_local_msgpack_packer' do
test 'packer is cached' do
packer1 = Fluent::MessagePackFactory.thread_local_msgpack_packer
packer2 = Fluent::MessagePackFactory.thread_local_msgpack_packer
assert_equal packer1, packer2
end
end
sub_test_case 'thread_local_msgpack_unpacker' do
test 'unpacker is cached' do
unpacker1 = Fluent::MessagePackFactory.thread_local_msgpack_unpacker
unpacker2 = Fluent::MessagePackFactory.thread_local_msgpack_unpacker
assert_equal unpacker1, unpacker2
end
# We need to reset the buffer every time so that received incomplete data
# must not affect data from other senders.
test 'reset the internal buffer of unpacker every time' do
unpacker1 = Fluent::MessagePackFactory.thread_local_msgpack_unpacker
unpacker1.feed_each("\xA6foo") do |result|
flunk("This callback must not be called since the data is uncomplete.")
end
records = []
unpacker2 = Fluent::MessagePackFactory.thread_local_msgpack_unpacker
unpacker2.feed_each("\xA3foo") do |result|
records.append(result)
end
assert_equal ["foo"], records
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_input.rb | test/test_input.rb | require_relative 'helper'
require 'fluent/input'
class FluentInputTest < ::Test::Unit::TestCase
include Fluent
def setup
Fluent::Test.setup
end
def create_driver(conf = '')
Fluent::Test::InputTestDriver.new(Fluent::Input).configure(conf, true)
end
def test_router
d = create_driver
assert_equal Engine.root_agent.event_router, d.instance.router
d = nil
assert_nothing_raised {
d = create_driver('@label @known')
}
expected = Engine.root_agent.find_label('@known').event_router
assert_equal expected, d.instance.router
# TestDriver helps to create a label instance automatically, so directly test here
assert_raise(ArgumentError) {
Fluent::Input.new.configure(Config.parse('@label @unknown', '(test)', '(test_dir)', true))
}
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_filter.rb | test/test_filter.rb | require_relative 'helper'
require 'fluent/filter'
class FilterTest < Test::Unit::TestCase
include Fluent
setup do
Fluent::Test.setup
@time = Fluent::Engine.now
end
def create_driver(klass = Fluent::Filter, conf = '')
Test::FilterTestDriver.new(klass).configure(conf, true)
end
def emit(klass, msgs, conf = '')
d = create_driver(klass, conf)
d.run {
msgs.each {|msg|
d.emit({'message' => msg}, @time)
}
}.filtered
end
sub_test_case 'configure' do
test 'check to implement `filter` method' do
klass = Class.new(Fluent::Filter) do |c|
def filter(tag, time, record); end
end
assert_nothing_raised do
klass.new
end
end
test 'check to implement `filter_with_time` method' do
klass = Class.new(Fluent::Filter) do |c|
def filter_with_time(tag, time, record); end
end
assert_nothing_raised do
klass.new
end
end
test 'DO NOT check when implement `filter_stream`' do
klass = Class.new(Fluent::Filter) do |c|
def filter_stream(tag, es); end
end
assert_nothing_raised do
klass.new
end
end
test 'NotImplementedError' do
klass = Class.new(Fluent::Filter)
assert_raise NotImplementedError do
klass.new
end
end
test 'duplicated method implementation' do
klass = Class.new(Fluent::Filter) do |c|
def filter(tag, time, record); end
def filter_with_time(tag, time, record); end
end
assert_raise do
klass.new
end
end
end
sub_test_case 'filter' do
test 'null filter' do
null_filter = Class.new(Fluent::Filter) do |c|
def filter(tag, time, record)
nil
end
end
es = emit(null_filter, ['foo'])
assert_equal(0, es.instance_variable_get(:@record_array).size)
end
test 'pass filter' do
pass_filter = Class.new(Fluent::Filter) do |c|
def filter(tag, time, record)
record
end
end
es = emit(pass_filter, ['foo'])
assert_equal(1, es.instance_variable_get(:@record_array).size)
end
end
sub_test_case 'filter_stream' do
test 'null filter' do
null_filter = Class.new(Fluent::Filter) do |c|
def filter_stream(tag, es)
MultiEventStream.new
end
def filter(tag, time, record); record; end
end
es = emit(null_filter, ['foo'])
assert_equal(0, es.instance_variable_get(:@record_array).size)
end
test 'pass filter' do
pass_filter = Class.new(Fluent::Filter) do |c|
def filter_stream(tag, es)
es
end
def filter(tag, time, record); record; end
end
es = emit(pass_filter, ['foo'])
assert_equal(1, es.instance_variable_get(:@record_array).size)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_source_only_buffer_agent.rb | test/test_source_only_buffer_agent.rb | require_relative 'helper'
class SourceOnlyBufferAgentTest < ::Test::Unit::TestCase
def log
logger = ServerEngine::DaemonLogger.new(
Fluent::Test::DummyLogDevice.new,
{ log_level: ServerEngine::DaemonLogger::INFO }
)
Fluent::Log.new(logger)
end
def setup
omit "Not supported on Windows" if Fluent.windows?
@log = log
end
sub_test_case "#configure" do
test "default" do
system_config = Fluent::SystemConfig.new
root_agent = Fluent::RootAgent.new(log: @log, system_config: system_config)
stub(Fluent::Engine).root_agent { root_agent }
stub(Fluent::Engine).system_config { system_config }
root_agent.configure(config_element)
agent = Fluent::SourceOnlyBufferAgent.new(log: @log, system_config: system_config)
agent.configure
assert_equal(
{
"num of filter plugins" => 0,
"num of output plugins" => 1,
"base_buffer_dir" => agent.instance_variable_get(:@default_buffer_path),
"actual_buffer_dir" => agent.instance_variable_get(:@default_buffer_path),
"EventRouter of BufferOutput" => root_agent.event_router.object_id,
"flush_thread_count" => 0,
"flush_at_shutdown" => false,
},
{
"num of filter plugins" => agent.filters.size,
"num of output plugins" => agent.outputs.size,
"base_buffer_dir" => agent.instance_variable_get(:@base_buffer_dir),
"actual_buffer_dir" => agent.instance_variable_get(:@actual_buffer_dir),
"EventRouter of BufferOutput" => agent.outputs[0].router.object_id,
"flush_thread_count" => agent.outputs[0].buffer_config.flush_thread_count,
"flush_at_shutdown" => agent.outputs[0].buffer_config.flush_at_shutdown,
}
)
assert do
@log.out.logs.any? { |log| log.include? "the emitted data will be stored in the buffer files" }
end
end
test "flush: true" do
system_config = Fluent::SystemConfig.new
root_agent = Fluent::RootAgent.new(log: @log, system_config: system_config)
stub(Fluent::Engine).root_agent { root_agent }
stub(Fluent::Engine).system_config { system_config }
root_agent.configure(config_element)
agent = Fluent::SourceOnlyBufferAgent.new(log: @log, system_config: system_config)
agent.configure(flush: true)
assert_equal(
{
"num of filter plugins" => 0,
"num of output plugins" => 1,
"base_buffer_dir" => agent.instance_variable_get(:@default_buffer_path),
"actual_buffer_dir" => agent.instance_variable_get(:@default_buffer_path),
"EventRouter of BufferOutput" => root_agent.event_router.object_id,
"flush_thread_count" => 1,
"flush_at_shutdown" => true,
},
{
"num of filter plugins" => agent.filters.size,
"num of output plugins" => agent.outputs.size,
"base_buffer_dir" => agent.instance_variable_get(:@base_buffer_dir),
"actual_buffer_dir" => agent.instance_variable_get(:@actual_buffer_dir),
"EventRouter of BufferOutput" => agent.outputs[0].router.object_id,
"flush_thread_count" => agent.outputs[0].buffer_config.flush_thread_count,
"flush_at_shutdown" => agent.outputs[0].buffer_config.flush_at_shutdown,
}
)
assert do
not @log.out.logs.any? { |log| log.include? "the emitted data will be stored in the buffer files" }
end
end
test "multiple workers" do
system_config = Fluent::SystemConfig.new(config_element("system", "", {"workers" => 2}))
root_agent = Fluent::RootAgent.new(log: @log, system_config: system_config)
stub(Fluent::Engine).root_agent { root_agent }
stub(Fluent::Engine).system_config { system_config }
root_agent.configure(config_element)
agent = Fluent::SourceOnlyBufferAgent.new(log: @log, system_config: system_config)
agent.configure
assert_equal(
{
"num of filter plugins" => 0,
"num of output plugins" => 1,
"base_buffer_dir" => agent.instance_variable_get(:@default_buffer_path),
"actual_buffer_dir" => "#{agent.instance_variable_get(:@default_buffer_path)}/worker0",
"EventRouter of BufferOutput" => root_agent.event_router.object_id,
"flush_thread_count" => 0,
"flush_at_shutdown" => false,
},
{
"num of filter plugins" => agent.filters.size,
"num of output plugins" => agent.outputs.size,
"base_buffer_dir" => agent.instance_variable_get(:@base_buffer_dir),
"actual_buffer_dir" => agent.instance_variable_get(:@actual_buffer_dir),
"EventRouter of BufferOutput" => agent.outputs[0].router.object_id,
"flush_thread_count" => agent.outputs[0].buffer_config.flush_thread_count,
"flush_at_shutdown" => agent.outputs[0].buffer_config.flush_at_shutdown,
}
)
end
test "full setting with flush:true" do
system_config = Fluent::SystemConfig.new(config_element("system", "", {}, [
config_element("source_only_buffer", "", {
"flush_thread_count" => 4,
"overflow_action" => :throw_exception,
"path" => "tmp_buffer_path",
"flush_interval" => 1,
"chunk_limit_size" => 100,
"total_limit_size" => 1000,
"compress" => :gzip,
})
]))
root_agent = Fluent::RootAgent.new(log: @log, system_config: system_config)
stub(Fluent::Engine).root_agent { root_agent }
stub(Fluent::Engine).system_config { system_config }
root_agent.configure(config_element)
agent = Fluent::SourceOnlyBufferAgent.new(log: @log, system_config: system_config)
agent.configure(flush: true)
assert_equal(
{
"num of filter plugins" => 0,
"num of output plugins" => 1,
"base_buffer_dir" => "tmp_buffer_path",
"actual_buffer_dir" => "tmp_buffer_path",
"EventRouter of BufferOutput" => root_agent.event_router.object_id,
"flush_thread_count" => 4,
"flush_at_shutdown" => true,
"overflow_action" => :throw_exception,
"flush_interval" => 1,
"chunk_limit_size" => 100,
"total_limit_size" => 1000,
"compress" => :gzip,
},
{
"num of filter plugins" => agent.filters.size,
"num of output plugins" => agent.outputs.size,
"base_buffer_dir" => agent.instance_variable_get(:@base_buffer_dir),
"actual_buffer_dir" => agent.instance_variable_get(:@actual_buffer_dir),
"EventRouter of BufferOutput" => agent.outputs[0].router.object_id,
"flush_thread_count" => agent.outputs[0].buffer_config.flush_thread_count,
"flush_at_shutdown" => agent.outputs[0].buffer_config.flush_at_shutdown,
"overflow_action" => agent.outputs[0].buffer_config.overflow_action,
"flush_interval" => agent.outputs[0].buffer_config.flush_interval,
"chunk_limit_size" => agent.outputs[0].buffer.chunk_limit_size,
"total_limit_size" => agent.outputs[0].buffer.total_limit_size,
"compress" => agent.outputs[0].buffer.compress,
}
)
end
end
sub_test_case "#cleanup" do
test "do not remove the buffer if it is not empty" do
system_config = Fluent::SystemConfig.new
root_agent = Fluent::RootAgent.new(log: @log, system_config: system_config)
stub(Fluent::Engine).root_agent { root_agent }
stub(Fluent::Engine).system_config { system_config }
root_agent.configure(config_element)
agent = Fluent::SourceOnlyBufferAgent.new(log: @log, system_config: system_config)
agent.configure
stub(Dir).empty?(agent.instance_variable_get(:@actual_buffer_dir)) { false }
mock(FileUtils).remove_dir.never
agent.cleanup
assert do
@log.out.logs.any? { |log| log.include? "some buffer files remain in" }
end
end
test "remove the buffer if it is empty" do
system_config = Fluent::SystemConfig.new
root_agent = Fluent::RootAgent.new(log: @log, system_config: system_config)
stub(Fluent::Engine).root_agent { root_agent }
stub(Fluent::Engine).system_config { system_config }
root_agent.configure(config_element)
agent = Fluent::SourceOnlyBufferAgent.new(log: @log, system_config: system_config)
agent.configure
stub(Dir).empty?(agent.instance_variable_get(:@actual_buffer_dir)) { true }
mock(FileUtils).remove_dir(agent.instance_variable_get(:@base_buffer_dir)).times(1)
agent.cleanup
assert do
not @log.out.logs.any? { |log| log.include? "some buffer files remain in" }
end
end
end
sub_test_case "error" do
test "#emit_error_event" do
system_config = Fluent::SystemConfig.new
root_agent = Fluent::RootAgent.new(log: @log, system_config: system_config)
stub(Fluent::Engine).root_agent { root_agent }
stub(Fluent::Engine).system_config { system_config }
root_agent.configure(config_element)
agent = Fluent::SourceOnlyBufferAgent.new(log: @log, system_config: system_config)
agent.configure
agent.event_router.emit_error_event("tag", 0, "hello", Exception.new)
assert do
@log.out.logs.any? { |log| log.include? "SourceOnlyBufferAgent: dump an error event" }
end
end
test "#handle_emits_error" do
system_config = Fluent::SystemConfig.new
root_agent = Fluent::RootAgent.new(log: @log, system_config: system_config)
stub(Fluent::Engine).root_agent { root_agent }
stub(Fluent::Engine).system_config { system_config }
root_agent.configure(config_element)
agent = Fluent::SourceOnlyBufferAgent.new(log: @log, system_config: system_config)
agent.configure
stub(agent.outputs[0]).emit_events { raise "test error" }
agent.event_router.emit("foo", 0, "hello")
assert do
@log.out.logs.any? { |log| log.include? "SourceOnlyBufferAgent: emit transaction failed" }
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_static_config_analysis.rb | test/test_static_config_analysis.rb | require_relative 'helper'
require 'fluent/config'
require 'fluent/static_config_analysis'
require 'fluent/plugin/out_forward'
require 'fluent/plugin/out_stdout'
require 'fluent/plugin/out_exec'
require 'fluent/plugin/in_forward'
require 'fluent/plugin/in_sample'
require 'fluent/plugin/filter_grep'
require 'fluent/plugin/filter_stdout'
require 'fluent/plugin/filter_parser'
class StaticConfigAnalysisTest < ::Test::Unit::TestCase
sub_test_case '.call' do
test 'returns outputs, inputs and filters' do
conf_data = <<-CONF
<source>
@type forward
</source>
<filter>
@type grep
</filter>
<match>
@type forward
</match>
CONF
c = Fluent::Config.parse(conf_data, '(test)', '(test_dir)', true)
ret = Fluent::StaticConfigAnalysis.call(c)
assert_equal 1, ret.outputs.size
assert_kind_of Fluent::Plugin::ForwardOutput, ret.outputs[0].plugin
assert_equal 1, ret.inputs.size
assert_kind_of Fluent::Plugin::ForwardInput, ret.inputs[0].plugin
assert_equal 1, ret.filters.size
assert_kind_of Fluent::Plugin::GrepFilter, ret.filters[0].plugin
assert_empty ret.labels
assert_equal [Fluent::Plugin::ForwardOutput, Fluent::Plugin::ForwardInput, Fluent::Plugin::GrepFilter], ret.all_plugins.map(&:class)
end
test 'returns wrapped element with worker and label section' do
conf_data = <<-CONF
<source>
@type forward
</source>
<filter>
@type grep
</filter>
<match>
@type forward
</match>
<worker 0>
<source>
@type dummy
</source>
<filter>
@type parser
</filter>
<match>
@type exec
</match>
</worker>
<label @test>
<filter>
@type stdout
</filter>
<match>
@type stdout
</match>
</label>
CONF
c = Fluent::Config.parse(conf_data, '(test)', '(test_dir)', true)
ret = Fluent::StaticConfigAnalysis.call(c)
assert_equal [Fluent::Plugin::ExecOutput, Fluent::Plugin::StdoutOutput, Fluent::Plugin::ForwardOutput], ret.outputs.map { |x| x.plugin.class }
assert_equal [Fluent::Plugin::SampleInput, Fluent::Plugin::ForwardInput], ret.inputs.map { |x| x.plugin.class }
assert_equal [Fluent::Plugin::ParserFilter, Fluent::Plugin::StdoutFilter, Fluent::Plugin::GrepFilter], ret.filters.map { |x| x.plugin.class }
assert_equal 1, ret.labels.size
assert_equal '@test', ret.labels[0].name
end
sub_test_case 'raises config error' do
data(
'empty' => ['', 'Missing worker id on <worker> directive'],
'invalid number' => ['a', 'worker id should be integer: a'],
'worker id is negative' => ['-1', 'worker id should be integer: -1'],
'min worker id is less than 0' => ['-1-1', 'worker id should be integer: -1-1'],
'max worker id is less than 0' => ['1--1', 'worker id -1 specified by <worker> directive is not allowed. Available worker id is between 0 and 1'],
'min worker id is greater than workers' => ['0-2', 'worker id 2 specified by <worker> directive is not allowed. Available worker id is between 0 and 1'],
'max worker is less than min worker' => ['1-0', "greater first_worker_id<1> than last_worker_id<0> specified by <worker> directive is not allowed. Available multi worker assign syntax is <smaller_worker_id>-<greater_worker_id>"],
)
test 'when worker number is invalid' do |v|
val, msg = v
conf_data = <<-CONF
<worker #{val}>
</worker>
CONF
c = Fluent::Config.parse(conf_data, '(test)', '(test_dir)', true)
assert_raise(Fluent::ConfigError.new(msg)) do
Fluent::StaticConfigAnalysis.call(c, workers: 2)
end
end
test 'when worker number is duplicated' do
conf_data = <<-CONF
<worker 0-1>
</worker>
<worker 0-1>
</worker>
CONF
c = Fluent::Config.parse(conf_data, '(test)', '(test_dir)', true)
assert_raise(Fluent::ConfigError.new("specified worker_id<0> collisions is detected on <worker> directive. Available worker id(s): []")) do
Fluent::StaticConfigAnalysis.call(c, workers: 2)
end
end
test 'duplicated label exits' do
conf_data = <<-CONF
<label @dup>
</label>
<label @dup>
</label>
CONF
c = Fluent::Config.parse(conf_data, '(test)', '(test_dir)', true)
assert_raise(Fluent::ConfigError.new('Section <label @dup> appears twice')) do
Fluent::StaticConfigAnalysis.call(c, workers: 2)
end
end
test 'empty label' do
conf_data = <<-CONF
<label>
</label>
CONF
c = Fluent::Config.parse(conf_data, '(test)', '(test_dir)', true)
assert_raise(Fluent::ConfigError.new('Missing symbol argument on <label> directive')) do
Fluent::StaticConfigAnalysis.call(c, workers: 2)
end
end
data(
'in filter' => 'filter',
'in source' => 'source',
'in match' => 'match',
)
test 'when @type is missing' do |name|
conf_data = <<-CONF
<#{name}>
@type
</#{name}>
CONF
c = Fluent::Config.parse(conf_data, '(test)', '(test_dir)', true)
assert_raise(Fluent::ConfigError.new("Missing '@type' parameter on <#{name}> directive")) do
Fluent::StaticConfigAnalysis.call(c)
end
end
test 'when worker has worker section' do
conf_data = <<-CONF
<worker 0>
<worker 0>
</worker>
</worker>
CONF
c = Fluent::Config.parse(conf_data, '(test)', '(test_dir)', true)
assert_raise(Fluent::ConfigError.new("<worker> section cannot have <worker> directive")) do
Fluent::StaticConfigAnalysis.call(c)
end
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_output.rb | test/test_output.rb | require_relative 'helper'
require 'fluent/test'
require 'fluent/output'
require 'fluent/output_chain'
require 'fluent/plugin/buffer'
require 'timecop'
require 'flexmock/test_unit'
module FluentOutputTest
include Fluent
include FlexMock::TestCase
class BufferedOutputTest < ::Test::Unit::TestCase
include FluentOutputTest
class << self
def startup
$LOAD_PATH.unshift File.expand_path(File.join(File.dirname(__FILE__), 'scripts'))
require 'fluent/plugin/out_test'
require 'fluent/plugin/out_test2'
end
def shutdown
$LOAD_PATH.shift
end
end
def setup
Fluent::Test.setup
end
CONFIG = %[]
def create_driver(conf=CONFIG)
Fluent::Test::BufferedOutputTestDriver.new(Fluent::BufferedOutput) do
def write(chunk)
chunk.read
end
end.configure(conf)
end
def test_configure
# default
d = create_driver
assert_equal 'memory', d.instance.buffer_type
assert_equal 60, d.instance.flush_interval
assert_equal false, d.instance.disable_retry_limit
assert_equal 17, d.instance.retry_limit
assert_equal 1.0, d.instance.retry_wait
assert_equal nil, d.instance.max_retry_wait
assert_equal 1.0, d.instance.retry_wait
assert_equal 1, d.instance.num_threads
assert_equal 1, d.instance.queued_chunk_flush_interval
# max_retry_wait
d = create_driver(CONFIG + %[max_retry_wait 4])
assert_equal 4, d.instance.max_retry_wait
# disable_retry_limit
d = create_driver(CONFIG + %[disable_retry_limit true])
assert_equal true, d.instance.disable_retry_limit
#### retry_state cares it
# # retry_wait is converted to Float for calc_retry_wait
# d = create_driver(CONFIG + %[retry_wait 1s])
# assert_equal Float, d.instance.retry_wait.class
end
class FormatterInjectTestOutput < Fluent::Output
def initialize
super
@formatter = nil
end
end
def test_start
i = FormatterInjectTestOutput.new
i.configure(config_element('ROOT', '', {}, [config_element('inject', '', {'hostname_key' => "host"})]))
assert_nothing_raised do
i.start
end
end
def create_mock_driver(conf=CONFIG)
Fluent::Test::BufferedOutputTestDriver.new(Fluent::BufferedOutput) do
attr_accessor :submit_flush_threads
def start_mock
@started = false
start
# ensure OutputThread to start successfully
submit_flush
sleep 0.5
while !@started
submit_flush
sleep 0.5
end
end
def try_flush
@started = true
@submit_flush_threads ||= {}
@submit_flush_threads[Thread.current] ||= 0
@submit_flush_threads[Thread.current] += 1
end
def write(chunk)
chunk.read
end
end.configure(conf)
end
def test_secondary
d = Fluent::Test::BufferedOutputTestDriver.new(Fluent::BufferedOutput) do
def write(chunk)
chunk.read
end
end
mock(d.instance.log).warn("Use different plugin for secondary. Check the plugin works with primary like secondary_file",
primary: d.instance.class.to_s, secondary: "Fluent::Plugin::Test2Output")
d.configure(CONFIG + %[
<secondary>
type test2
name c0
</secondary>
])
assert_not_nil d.instance.instance_variable_get(:@secondary).router
end
def test_secondary_with_no_warn_log
# ObjectBufferedOutput doesn't implement `custom_filter`
d = Fluent::Test::BufferedOutputTestDriver.new(Fluent::ObjectBufferedOutput)
mock(d.instance.log).warn("Use different plugin for secondary. Check the plugin works with primary like secondary_file",
primary: d.instance.class.to_s, secondary: "Fluent::Plugin::Test2Output").never
d.configure(CONFIG + %[
<secondary>
type test2
name c0
</secondary>
])
assert_not_nil d.instance.instance_variable_get(:@secondary).router
end
test 'BufferQueueLimitError compatibility' do
assert_equal Fluent::Plugin::Buffer::BufferOverflowError, Fluent::BufferQueueLimitError
end
end
class ObjectBufferedOutputTest < ::Test::Unit::TestCase
include FluentOutputTest
def setup
Fluent::Test.setup
end
CONFIG = %[]
def create_driver(conf=CONFIG)
Fluent::Test::OutputTestDriver.new(Fluent::ObjectBufferedOutput).configure(conf, true)
end
def test_configure
# default
d = create_driver
assert_equal true, d.instance.time_as_integer
end
end
class TimeSlicedOutputTest < ::Test::Unit::TestCase
include FluentOutputTest
include FlexMock::TestCase
def setup
Fluent::Test.setup
FileUtils.rm_rf(TMP_DIR)
FileUtils.mkdir_p(TMP_DIR)
end
TMP_DIR = File.expand_path(File.dirname(__FILE__) + "/tmp/time_sliced_output")
CONFIG = %[
buffer_path #{TMP_DIR}/foo
time_slice_format %Y%m%d%H
]
class TimeSlicedOutputTestPlugin < Fluent::TimeSlicedOutput
attr_reader :written_chunk_keys, :errors_in_write
def initialize
super
@written_chunk_keys = []
@errors_in_write = []
end
def configure(conf)
super
@formatter = Fluent::Plugin.new_formatter('out_file')
@formatter.configure(conf)
end
def format(tag, time, record)
@formatter.format(tag, time, record)
end
def write(chunk)
@written_chunk_keys << chunk.key
true
rescue => e
@errors_in_write << e
end
end
def create_driver(conf=CONFIG)
Fluent::Test::TimeSlicedOutputTestDriver.new(TimeSlicedOutputTestPlugin).configure(conf, true)
end
data(:none => '',
:utc => "utc",
:localtime => 'localtime',
:timezone => 'timezone +0000')
test 'configure with timezone related parameters' do |param|
assert_nothing_raised {
create_driver(CONFIG + param)
}
end
sub_test_case "test emit" do
setup do
@time = Time.parse("2011-01-02 13:14:15 UTC")
Timecop.freeze(@time)
@newline = if Fluent.windows?
"\r\n"
else
"\n"
end
end
teardown do
Timecop.return
end
test "emit with invalid event" do
d = create_driver
d.instance.start
d.instance.after_start
assert_raise ArgumentError, "time must be a Fluent::EventTime (or Integer)" do
d.instance.emit_events('test', OneEventStream.new('string', 10))
end
end
test "plugin can get key of chunk in #write" do
d = create_driver
d.instance.start
d.instance.after_start
d.instance.emit_events('test', OneEventStream.new(event_time("2016-11-08 17:44:30 +0900"), {"message" => "yay"}))
d.instance.force_flush
waiting(10) do
sleep 0.1 until d.instance.written_chunk_keys.size == 1
end
assert_equal [], d.instance.errors_in_write
assert_equal ["2016110808"], d.instance.written_chunk_keys # default timezone is UTC
end
test "check formatted time compatibility with utc. Should Z, not +00:00" do
d = create_driver(CONFIG + %[
utc
include_time_key
])
time = Time.parse("2016-11-08 12:00:00 UTC").to_i
d.emit({"a" => 1}, time)
d.expect_format %[2016-11-08T12:00:00Z\ttest\t{"a":1,"time":"2016-11-08T12:00:00Z"}#{@newline}]
d.run
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_fluent_log_event_router.rb | test/test_fluent_log_event_router.rb | require_relative 'helper'
require 'fluent/fluent_log_event_router'
require 'fluent/root_agent'
require 'fluent/system_config'
class FluentLogEventRouterTest < ::Test::Unit::TestCase
# @param config [String]
def build_config(config)
Fluent::Config.parse(config, 'fluent_log_event', '', syntax: :v1)
end
sub_test_case 'NullFluentLogEventRouter does nothing' do
test 'emittable? returns false but others does nothing' do
null_event_router = Fluent::NullFluentLogEventRouter.new
null_event_router.start
null_event_router.stop
null_event_router.graceful_stop
null_event_router.emit_event(nil)
assert_false null_event_router.emittable?
end
end
sub_test_case '#build' do
test 'NullFluentLogEventRouter if root_agent have not internal logger' do
root_agent = Fluent::RootAgent.new(log: $log, system_config: Fluent::SystemConfig.new)
root_agent.configure(build_config(''))
d = Fluent::FluentLogEventRouter.build(root_agent)
assert_equal Fluent::NullFluentLogEventRouter, d.class
end
test 'FluentLogEventRouter if <match fluent.*> exists in config' do
root_agent = Fluent::RootAgent.new(log: $log, system_config: Fluent::SystemConfig.new)
root_agent.configure(build_config(<<-CONFIG))
<match fluent.*>
@type null
</match>
CONFIG
d = Fluent::FluentLogEventRouter.build(root_agent)
assert_equal Fluent::FluentLogEventRouter, d.class
end
test 'FluentLogEventRouter if <label @FLUENT_LOG> exists in config' do
root_agent = Fluent::RootAgent.new(log: $log, system_config: Fluent::SystemConfig.new)
root_agent.configure(build_config(<<-CONFIG))
<label @FLUENT_LOG>
<match *>
@type null
</match>
</label>
CONFIG
d = Fluent::FluentLogEventRouter.build(root_agent)
assert_equal Fluent::FluentLogEventRouter, d.class
end
end
test 'when calling graceful_stop, it flushes all events' do
event_router = []
stub(event_router).emit do |tag, time, record|
event_router.push([tag, time, record])
end
d = Fluent::FluentLogEventRouter.new(event_router)
t = Time.now
msg = ['tag', t, { 'key' => 'value' }]
d.emit_event(msg)
d.graceful_stop
d.emit_event(msg)
d.start
d.graceful_stop # to call join
assert_equal 2, event_router.size
assert_equal msg, event_router[0]
assert_equal msg, event_router[1]
end
test 'when calling stop, it ignores existing events' do
event_router = []
stub(event_router).emit do |tag, time, record|
event_router.push([tag, time, record])
end
d = Fluent::FluentLogEventRouter.new(event_router)
t = Time.now
msg = ['tag', t, { 'key' => 'value' }]
d.emit_event(msg)
d.stop
d.emit_event(msg)
d.start
d.stop # to call join
assert_equal 1, event_router.size
assert_equal msg, event_router[0]
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_capability.rb | test/test_capability.rb | require_relative 'helper'
require 'fluent/test'
require 'fluent/capability'
class FluentCapabilityTest < ::Test::Unit::TestCase
setup do
@capability = Fluent::Capability.new(:current_process)
omit "Fluent::Capability class is not usable on this environment" unless @capability.usable?
end
sub_test_case "check capability" do
test "effective" do
@capability.clear(:both)
assert_true @capability.update(:add, :effective, :dac_read_search)
assert_equal CapNG::Result::PARTIAL, @capability.have_capabilities?(:caps)
assert_nothing_raised do
@capability.apply(:caps)
end
assert_equal CapNG::Result::NONE, @capability.have_capabilities?(:bounds)
assert_true @capability.have_capability?(:effective, :dac_read_search)
assert_false @capability.have_capability?(:inheritable, :dac_read_search)
assert_false @capability.have_capability?(:permitted, :dac_read_search)
end
test "inheritable" do
@capability.clear(:both)
capabilities = [:chown, :dac_override]
assert_equal [true, true], @capability.update(:add, :inheritable, capabilities)
assert_equal CapNG::Result::NONE, @capability.have_capabilities?(:caps)
assert_nothing_raised do
@capability.apply(:caps)
end
assert_equal CapNG::Result::NONE, @capability.have_capabilities?(:bounds)
capabilities.each do |capability|
assert_false @capability.have_capability?(:effective, capability)
assert_true @capability.have_capability?(:inheritable, capability)
assert_false @capability.have_capability?(:permitted, capability)
end
end
test "permitted" do
@capability.clear(:both)
capabilities = [:fowner, :fsetid, :kill]
assert_equal [true, true, true], @capability.update(:add, :permitted, capabilities)
assert_equal CapNG::Result::NONE, @capability.have_capabilities?(:caps)
assert_nothing_raised do
@capability.apply(:caps)
end
assert_equal CapNG::Result::NONE, @capability.have_capabilities?(:bounds)
capabilities.each do |capability|
assert_false @capability.have_capability?(:effective, capability)
assert_false @capability.have_capability?(:inheritable, capability)
assert_true @capability.have_capability?(:permitted, capability)
end
end
test "effective/inheritable/permitted" do
@capability.clear(:both)
capabilities = [:setpcap, :net_admin, :net_raw, :sys_boot, :sys_time]
update_type = CapNG::Type::EFFECTIVE | CapNG::Type::INHERITABLE | CapNG::Type::PERMITTED
assert_equal [true, true, true, true, true], @capability.update(:add, update_type, capabilities)
assert_equal CapNG::Result::PARTIAL, @capability.have_capabilities?(:caps)
assert_nothing_raised do
@capability.apply(:caps)
end
assert_equal CapNG::Result::NONE, @capability.have_capabilities?(:bounds)
capabilities.each do |capability|
assert_true @capability.have_capability?(:effective, capability)
assert_true @capability.have_capability?(:inheritable, capability)
assert_true @capability.have_capability?(:permitted, capability)
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_plugin.rb | test/test_plugin.rb | require_relative 'helper'
require 'fluent/plugin'
require 'fluent/plugin/input'
require 'fluent/plugin/filter'
require 'fluent/plugin/output'
require 'fluent/plugin/buffer'
require 'fluent/plugin/parser'
require 'fluent/plugin/formatter'
require 'fluent/plugin/storage'
class PluginTest < Test::Unit::TestCase
class Dummy1Input < Fluent::Plugin::Input
Fluent::Plugin.register_input('plugin_test_dummy1', self)
end
class Dummy2Input < Fluent::Plugin::Input
Fluent::Plugin.register_input('plugin_test_dummy2', self)
helpers :storage
config_section :storage do
config_set_default :@type, 'plugin_test_dummy1'
end
def multi_workers_ready?
true
end
end
class DummyFilter < Fluent::Plugin::Filter
Fluent::Plugin.register_filter('plugin_test_dummy', self)
helpers :parser, :formatter
config_section :parse do
config_set_default :@type, 'plugin_test_dummy'
end
config_section :format do
config_set_default :@type, 'plugin_test_dummy'
end
def filter(tag, time, record)
record
end
end
class Dummy1Output < Fluent::Plugin::Output
Fluent::Plugin.register_output('plugin_test_dummy1', self)
def write(chunk)
# drop
end
end
class Dummy2Output < Fluent::Plugin::Output
Fluent::Plugin.register_output('plugin_test_dummy2', self)
config_section :buffer do
config_set_default :@type, 'plugin_test_dummy1'
end
def multi_workers_ready?
true
end
def write(chunk)
# drop
end
end
class Dummy1Buffer < Fluent::Plugin::Buffer
Fluent::Plugin.register_buffer('plugin_test_dummy1', self)
end
class Dummy2Buffer < Fluent::Plugin::Buffer
Fluent::Plugin.register_buffer('plugin_test_dummy2', self)
def multi_workers_ready?
false
end
end
class DummyParser < Fluent::Plugin::Parser
Fluent::Plugin.register_parser('plugin_test_dummy', self)
end
class DummyFormatter < Fluent::Plugin::Formatter
Fluent::Plugin.register_formatter('plugin_test_dummy', self)
end
class Dummy1Storage < Fluent::Plugin::Storage
Fluent::Plugin.register_storage('plugin_test_dummy1', self)
end
class Dummy2Storage < Fluent::Plugin::Storage
Fluent::Plugin.register_storage('plugin_test_dummy2', self)
def multi_workers_ready?
false
end
end
class DummyOwner < Fluent::Plugin::Base
include Fluent::PluginId
include Fluent::PluginLoggerMixin
end
class DummyEventRouter
def emit(tag, time, record); end
def emit_array(tag, array); end
def emit_stream(tag, es); end
def emit_error_event(tag, time, record, error); end
end
sub_test_case '#new_* methods' do
data(
input1: ['plugin_test_dummy1', Dummy1Input, :new_input],
input2: ['plugin_test_dummy2', Dummy2Input, :new_input],
filter: ['plugin_test_dummy', DummyFilter, :new_filter],
output1: ['plugin_test_dummy1', Dummy1Output, :new_output],
output2: ['plugin_test_dummy2', Dummy2Output, :new_output],
)
test 'returns plugin instances of registered plugin classes' do |(type, klass, m)|
instance = Fluent::Plugin.__send__(m, type)
assert_kind_of klass, instance
end
data(
buffer1: ['plugin_test_dummy1', Dummy1Buffer, :new_buffer],
buffer2: ['plugin_test_dummy2', Dummy2Buffer, :new_buffer],
parser: ['plugin_test_dummy', DummyParser, :new_parser],
formatter: ['plugin_test_dummy', DummyFormatter, :new_formatter],
storage1: ['plugin_test_dummy1', Dummy1Storage, :new_storage],
storage2: ['plugin_test_dummy2', Dummy2Storage, :new_storage],
)
test 'returns plugin instances of registered owned plugin classes' do |(type, klass, m)|
owner = DummyOwner.new
instance = Fluent::Plugin.__send__(m, type, parent: owner)
assert_kind_of klass, instance
end
data(
input1: ['plugin_test_dummy1', Dummy1Input, :new_input, nil],
input2: ['plugin_test_dummy2', Dummy2Input, :new_input, nil],
filter: ['plugin_test_dummy', DummyFilter, :new_filter, nil],
output1: ['plugin_test_dummy1', Dummy1Output, :new_output, nil],
output2: ['plugin_test_dummy2', Dummy2Output, :new_output, nil],
buffer1: ['plugin_test_dummy1', Dummy1Buffer, :new_buffer, {parent: DummyOwner.new}],
buffer2: ['plugin_test_dummy2', Dummy2Buffer, :new_buffer, {parent: DummyOwner.new}],
parser: ['plugin_test_dummy', DummyParser, :new_parser, {parent: DummyOwner.new}],
formatter: ['plugin_test_dummy', DummyFormatter, :new_formatter, {parent: DummyOwner.new}],
storage1: ['plugin_test_dummy1', Dummy1Storage, :new_storage, {parent: DummyOwner.new}],
storage2: ['plugin_test_dummy2', Dummy2Storage, :new_storage, {parent: DummyOwner.new}],
)
test 'returns plugin instances which are extended by FeatureAvailabilityChecker module' do |(type, _, m, kwargs)|
instance = if kwargs
Fluent::Plugin.__send__(m, type, **kwargs)
else
Fluent::Plugin.__send__(m, type)
end
assert_kind_of Fluent::Plugin::FeatureAvailabilityChecker, instance
end
end
sub_test_case 'with default system configuration' do
data(
input1: ['plugin_test_dummy1', Dummy1Input, :new_input, nil],
input2: ['plugin_test_dummy2', Dummy2Input, :new_input, nil],
filter: ['plugin_test_dummy', DummyFilter, :new_filter, nil],
output1: ['plugin_test_dummy1', Dummy1Output, :new_output, nil],
output2: ['plugin_test_dummy2', Dummy2Output, :new_output, nil],
buffer1: ['plugin_test_dummy1', Dummy1Buffer, :new_buffer, {parent: DummyOwner.new}],
buffer2: ['plugin_test_dummy2', Dummy2Buffer, :new_buffer, {parent: DummyOwner.new}],
parser: ['plugin_test_dummy', DummyParser, :new_parser, {parent: DummyOwner.new}],
formatter: ['plugin_test_dummy', DummyFormatter, :new_formatter, {parent: DummyOwner.new}],
storage1: ['plugin_test_dummy1', Dummy1Storage, :new_storage, {parent: DummyOwner.new}],
storage2: ['plugin_test_dummy2', Dummy2Storage, :new_storage, {parent: DummyOwner.new}],
)
test '#configure does not raise anything' do |(type, _, m, kwargs)|
instance = if kwargs
Fluent::Plugin.__send__(m, type, **kwargs)
else
Fluent::Plugin.__send__(m, type)
end
if instance.respond_to?(:context_router=)
instance.context_router = DummyEventRouter.new
end
assert_nothing_raised do
instance.configure(config_element())
end
end
end
sub_test_case 'with single worker configuration' do
data(
input1: ['plugin_test_dummy1', Dummy1Input, :new_input, nil],
input2: ['plugin_test_dummy2', Dummy2Input, :new_input, nil],
filter: ['plugin_test_dummy', DummyFilter, :new_filter, nil],
output1: ['plugin_test_dummy1', Dummy1Output, :new_output, nil],
output2: ['plugin_test_dummy2', Dummy2Output, :new_output, nil],
buffer1: ['plugin_test_dummy1', Dummy1Buffer, :new_buffer, {parent: DummyOwner.new}],
buffer2: ['plugin_test_dummy2', Dummy2Buffer, :new_buffer, {parent: DummyOwner.new}],
parser: ['plugin_test_dummy', DummyParser, :new_parser, {parent: DummyOwner.new}],
formatter: ['plugin_test_dummy', DummyFormatter, :new_formatter, {parent: DummyOwner.new}],
storage1: ['plugin_test_dummy1', Dummy1Storage, :new_storage, {parent: DummyOwner.new}],
storage2: ['plugin_test_dummy2', Dummy2Storage, :new_storage, {parent: DummyOwner.new}],
)
test '#configure does not raise anything' do |(type, _, m, kwargs)|
instance = if kwargs
Fluent::Plugin.__send__(m, type, **kwargs)
else
Fluent::Plugin.__send__(m, type)
end
if instance.respond_to?(:context_router=)
instance.context_router = DummyEventRouter.new
end
assert_nothing_raised do
instance.system_config_override('workers' => 1)
instance.configure(config_element())
end
end
end
sub_test_case 'with multi workers configuration' do
data(
input1: ['plugin_test_dummy1', Dummy1Input, :new_input],
output1: ['plugin_test_dummy1', Dummy1Output, :new_output],
)
test '#configure raise configuration error if plugins are not ready for multi workers' do |(type, klass, new_method)|
conf = config_element()
instance = Fluent::Plugin.__send__(new_method, type)
if instance.respond_to?(:context_router=)
instance.context_router = DummyEventRouter.new
end
assert_raise Fluent::ConfigError.new("Plugin '#{type}' does not support multi workers configuration (#{klass})") do
instance.system_config_override('workers' => 3)
instance.configure(conf)
end
end
data(
input2: ['plugin_test_dummy2', Dummy2Input, :new_input], # with Dummy1Storage
filter: ['plugin_test_dummy', DummyFilter, :new_filter], # with DummyParser and DummyFormatter
output2: ['plugin_test_dummy2', Dummy2Output, :new_output], # with Dummy1Buffer
)
test '#configure does not raise any errors if plugins and its owned plugins are ready for multi workers' do |(type, _klass, new_method)|
conf = config_element()
instance = Fluent::Plugin.__send__(new_method, type)
if instance.respond_to?(:context_router=)
instance.context_router = DummyEventRouter.new
end
assert_nothing_raised do
instance.system_config_override('workers' => 3)
instance.configure(conf)
end
end
data(
input2: ['plugin_test_dummy2', Dummy2Input, :new_input, 'storage', 'plugin_test_dummy2', Dummy2Storage],
output2: ['plugin_test_dummy2', Dummy2Output, :new_output, 'buffer', 'plugin_test_dummy2', Dummy2Buffer],
)
test '#configure raise configuration error if configured owned plugins are not ready for multi workers' do |(type, _klass, new_method, subsection, subsection_type, problematic)|
conf = config_element('root', '', {}, [config_element(subsection, '', {'@type' => subsection_type})])
instance = Fluent::Plugin.__send__(new_method, type)
if instance.respond_to?(:context_router=)
instance.context_router = DummyEventRouter.new
end
assert_raise Fluent::ConfigError.new("Plugin '#{subsection_type}' does not support multi workers configuration (#{problematic})") do
instance.system_config_override('workers' => 3)
instance.configure(conf)
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_time_formatter.rb | test/test_time_formatter.rb | require_relative 'helper'
require 'fluent/test'
require 'fluent/time'
class TimeFormatterTest < ::Test::Unit::TestCase
setup do
@fmt ="%Y%m%d %H%M%z" # YYYYMMDD HHMM[+-]HHMM
end
def format(format, localtime, timezone)
formatter = Fluent::TimeFormatter.new(format, localtime, timezone)
formatter.format(@time)
end
module TestLists
def test_default_utc_nil
assert_equal("2014-09-27T00:00:00Z", format(nil, false, nil))
end
def test_default_utc_pHH_MM
assert_equal("2014-09-27T01:30:00+01:30", format(nil, false, "+01:30"))
end
def test_default_utc_nHH_MM
assert_equal("2014-09-26T22:30:00-01:30", format(nil, false, "-01:30"))
end
def test_default_utc_pHHMM
assert_equal("2014-09-27T02:30:00+02:30", format(nil, false, "+0230"))
end
def test_default_utc_nHHMM
assert_equal("2014-09-26T21:30:00-02:30", format(nil, false, "-0230"))
end
def test_default_utc_pHH
assert_equal("2014-09-27T03:00:00+03:00", format(nil, false, "+03"))
end
def test_default_utc_nHH
assert_equal("2014-09-26T21:00:00-03:00", format(nil, false, "-03"))
end
def test_default_utc_timezone_1
# Asia/Tokyo (+09:00) does not have daylight saving time.
assert_equal("2014-09-27T09:00:00+09:00", format(nil, false, "Asia/Tokyo"))
end
def test_default_utc_timezone_2
# Pacific/Honolulu (-10:00) does not have daylight saving time.
assert_equal("2014-09-26T14:00:00-10:00", format(nil, false, "Pacific/Honolulu"))
end
def test_default_utc_timezone_3
# America/Argentina/Buenos_Aires (-03:00) does not have daylight saving time.
assert_equal("2014-09-26T21:00:00-03:00", format(nil, false, "America/Argentina/Buenos_Aires"))
end
def test_default_utc_timezone_4
# Europe/Paris has daylight saving time. Its UTC offset is +01:00 and its
# UTC offset in DST is +02:00. In September, Europe/Paris is in DST.
assert_equal("2014-09-27T02:00:00+02:00", format(nil, false, "Europe/Paris"))
end
def test_default_utc_timezone_5
# Europe/Paris has daylight saving time. Its UTC offset is +01:00 and its
# UTC offset in DST is +02:00. In January, Europe/Paris is not in DST.
@time = Time.new(2014, 1, 24, 0, 0, 0, 0).to_i
assert_equal("2014-01-24T01:00:00+01:00", format(nil, false, "Europe/Paris"))
end
def test_default_utc_invalid
assert_equal("2014-09-27T00:00:00Z", format(nil, false, "Invalid"))
end
def test_default_localtime_nil_1
with_timezone("UTC-04") do
assert_equal("2014-09-27T04:00:00+04:00", format(nil, true, nil))
end
end
def test_default_localtime_nil_2
with_timezone("UTC+05") do
assert_equal("2014-09-26T19:00:00-05:00", format(nil, true, nil))
end
end
def test_default_localtime_timezone
# 'timezone' takes precedence over 'localtime'.
with_timezone("UTC-06") do
assert_equal("2014-09-27T07:00:00+07:00", format(nil, true, "+07"))
end
end
def test_specific_utc_nil
assert_equal("20140927 0000+0000", format(@fmt, false, nil))
end
def test_specific_utc_pHH_MM
assert_equal("20140927 0830+0830", format(@fmt, false, "+08:30"))
end
def test_specific_utc_nHH_MM
assert_equal("20140926 1430-0930", format(@fmt, false, "-09:30"))
end
def test_specific_utc_pHHMM
assert_equal("20140927 1030+1030", format(@fmt, false, "+1030"))
end
def test_specific_utc_nHHMM
assert_equal("20140926 1230-1130", format(@fmt, false, "-1130"))
end
def test_specific_utc_pHH
assert_equal("20140927 1200+1200", format(@fmt, false, "+12"))
end
def test_specific_utc_nHH
assert_equal("20140926 1100-1300", format(@fmt, false, "-13"))
end
def test_specific_utc_timezone_1
# Europe/Moscow (+04:00) does not have daylight saving time.
assert_equal("20140927 0400+0400", format(@fmt, false, "Europe/Moscow"))
end
def test_specific_utc_timezone_2
# Pacific/Galapagos (-06:00) does not have daylight saving time.
assert_equal("20140926 1800-0600", format(@fmt, false, "Pacific/Galapagos"))
end
def test_specific_utc_timezone_3
# America/Argentina/Buenos_Aires (-03:00) does not have daylight saving time.
assert_equal("20140926 2100-0300", format(@fmt, false, "America/Argentina/Buenos_Aires"))
end
def test_specific_utc_timezone_4
# America/Los_Angeles has daylight saving time. Its UTC offset is -08:00 and its
# UTC offset in DST is -07:00. In September, America/Los_Angeles is in DST.
assert_equal("20140926 1700-0700", format(@fmt, false, "America/Los_Angeles"))
end
def test_specific_utc_timezone_5
# America/Los_Angeles has daylight saving time. Its UTC offset is -08:00 and its
# UTC offset in DST is -07:00. In January, America/Los_Angeles is not in DST.
@time = Time.new(2014, 1, 24, 0, 0, 0, 0).to_i
assert_equal("20140123 1600-0800", format(@fmt, false, "America/Los_Angeles"))
end
def test_specific_utc_invalid
assert_equal("20140927 0000+0000", format(@fmt, false, "Invalid"))
end
def test_specific_localtime_nil_1
with_timezone("UTC-07") do
assert_equal("20140927 0700+0700", format(@fmt, true, nil))
end
end
def test_specific_localtime_nil_2
with_timezone("UTC+08") do
assert_equal("20140926 1600-0800", format(@fmt, true, nil))
end
end
def test_specific_localtime_timezone
# 'timezone' takes precedence over 'localtime'.
with_timezone("UTC-09") do
assert_equal("20140926 1400-1000", format(@fmt, true, "-10"))
end
end
end
sub_test_case 'Fluent::EventTime time' do
setup do
@time = Fluent::EventTime.from_time(Time.new(2014, 9, 27, 0, 0, 0, 0))
end
include TestLists
end
# for v0.12 compatibility
sub_test_case 'Integer time' do
setup do
@time = Time.new(2014, 9, 27, 0, 0, 0, 0).to_i
end
include TestLists
end
def test_format_with_subsec
time = Time.new(2014, 9, 27, 0, 0, 0, 0).to_i
time = Fluent::EventTime.new(time)
formatter = Fluent::TimeFormatter.new("%Y%m%d %H%M.%N", false, nil)
assert_equal("20140927 0000.000000000", formatter.format(time))
end
sub_test_case 'TimeMixin::Formatter' do
class DummyForTimeFormatter
include Fluent::Configurable
include Fluent::TimeMixin::Formatter
end
test 'provides configuration parameters for TimeFormatter with default values for localtime' do
str = with_timezone("UTC+07") do
i = DummyForTimeFormatter.new
i.configure(config_element('format'))
assert_nil i.time_format
assert_true i.localtime
assert_false i.utc
assert_nil i.timezone
fmt = i.time_formatter_create
fmt.format(event_time("2016-09-02 18:42:31.012345678 UTC", format: '%Y-%m-%d %H:%M:%S.%N %z'))
end
assert_equal "2016-09-02T11:42:31-07:00", str
end
test 'provides configuration parameters for TimeFormatter, configurable for any time format' do
str = with_timezone("UTC+07") do
i = DummyForTimeFormatter.new
i.configure(config_element('format', '', {'time_format' => '%Y-%m-%d %H:%M:%S.%N %z'}))
fmt = i.time_formatter_create
fmt.format(event_time("2016-09-02 18:42:31.012345678 UTC", format: '%Y-%m-%d %H:%M:%S.%N %z'))
end
assert_equal "2016-09-02 11:42:31.012345678 -0700", str
end
test 'provides configuration parameters for TimeFormatter, configurable for UTC' do
str = with_timezone("UTC+07") do
i = DummyForTimeFormatter.new
i.configure(config_element('format', '', {'time_format' => '%Y-%m-%d %H:%M:%S.%N %z', 'utc' => 'true'}))
fmt = i.time_formatter_create
fmt.format(event_time("2016-09-02 18:42:31.012345678 UTC", format: '%Y-%m-%d %H:%M:%S.%N %z'))
end
assert_equal "2016-09-02 18:42:31.012345678 +0000", str
end
test 'provides configuration parameters for TimeFormatter, configurable for any timezone' do
str = with_timezone("UTC+07") do
i = DummyForTimeFormatter.new
i.configure(config_element('format', '', {'time_format' => '%Y-%m-%d %H:%M:%S.%N %z', 'timezone' => '+0900'}))
fmt = i.time_formatter_create
fmt.format(event_time("2016-09-02 18:42:31.012345678 UTC", format: '%Y-%m-%d %H:%M:%S.%N %z'))
end
assert_equal "2016-09-03 03:42:31.012345678 +0900", str
end
test '#time_formatter_create returns TimeFormatter with specified time format and timezone' do
str = with_timezone("UTC+07") do
i = DummyForTimeFormatter.new
i.configure(config_element('format', '', {'time_format' => '%Y-%m-%d %H:%M:%S.%N %z', 'timezone' => '+0900'}))
fmt = i.time_formatter_create(format: '%m/%d/%Y %H-%M-%S %N', timezone: '+0000')
fmt.format(event_time("2016-09-02 18:42:31.012345678 UTC", format: '%Y-%m-%d %H:%M:%S.%N %z'))
end
assert_equal "09/02/2016 18-42-31 012345678", str
end
test '#time_formatter_create returns TimeFormatter with localtime besides any configuration parameters' do
str = with_timezone("UTC+07") do
i = DummyForTimeFormatter.new
i.configure(config_element('format', '', {'time_format' => '%Y-%m-%d %H:%M:%S.%N %z', 'utc' => 'true'}))
fmt = i.time_formatter_create(format: '%m/%d/%Y %H-%M-%S %N', force_localtime: true)
fmt.format(event_time("2016-09-02 18:42:31.012345678 UTC", format: '%Y-%m-%d %H:%M:%S.%N %z'))
end
assert_equal "09/02/2016 11-42-31 012345678", str
str = with_timezone("UTC+07") do
i = DummyForTimeFormatter.new
i.configure(config_element('format', '', {'time_format' => '%Y-%m-%d %H:%M:%S.%N %z', 'timezone' => '+0900'}))
fmt = i.time_formatter_create(format: '%m/%d/%Y %H-%M-%S %N', force_localtime: true)
fmt.format(event_time("2016-09-02 18:42:31.012345678 UTC", format: '%Y-%m-%d %H:%M:%S.%N %z'))
end
assert_equal "09/02/2016 11-42-31 012345678", str
end
end
test '#time_formatter_create returns NumericTimeFormatter to format time as unixtime when time_type unixtime specified' do
i = DummyForTimeFormatter.new
i.configure(config_element('format', '', {'time_type' => 'unixtime'}))
fmt = i.time_formatter_create
time = event_time("2016-10-03 20:08:30.123456789 +0100", format: '%Y-%m-%d %H:%M:%S.%N %z')
assert_equal "#{time.sec}", fmt.format(time)
end
test '#time_formatter_create returns NumericTimeFormatter to format time as float when time_type float specified' do
i = DummyForTimeFormatter.new
i.configure(config_element('format', '', {'time_type' => 'float'}))
fmt = i.time_formatter_create
time = event_time("2016-10-03 20:08:30.123456789 +0100", format: '%Y-%m-%d %H:%M:%S.%N %z')
assert_equal "#{time.sec}.#{time.nsec}", fmt.format(time)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_plugin_classes.rb | test/test_plugin_classes.rb | require_relative 'helper'
require 'fluent/plugin/input'
require 'fluent/plugin/output'
require 'fluent/plugin/bare_output'
require 'fluent/plugin/filter'
module FluentTest
class FluentTestCounterMetrics < Fluent::Plugin::Metrics
Fluent::Plugin.register_metrics('test_counter', self)
attr_reader :data
def initialize
super
@data = 0
end
def get
@data
end
def inc
@data +=1
end
def add(value)
@data += value
end
def set(value)
@data = value
end
def close
@data = 0
super
end
end
class FluentTestGaugeMetrics < Fluent::Plugin::Metrics
Fluent::Plugin.register_metrics('test_gauge', self)
attr_reader :data
def initialize
super
@data = 0
end
def get
@data
end
def inc
@data += 1
end
def dec
@data -=1
end
def add(value)
@data += value
end
def sub(value)
@data -= value
end
def set(value)
@data = value
end
def close
@data = 0
super
end
end
class FluentTestInput < ::Fluent::Plugin::Input
::Fluent::Plugin.register_input('test_in', self)
attr_reader :started
def initialize
super
# stub metrics instances
@emit_records_metrics = FluentTest::FluentTestCounterMetrics.new
@emit_size_metrics = FluentTest::FluentTestCounterMetrics.new
end
def start
super
@started = true
end
def shutdown
@started = false
super
end
end
class FluentTestGenInput < ::Fluent::Plugin::Input
::Fluent::Plugin.register_input('test_in_gen', self)
helpers :thread
attr_reader :started
config_param :num, :integer, default: 10000
config_param :interval_sec, :float, default: nil
config_param :async, :bool, default: false
def initialize
super
# stub metrics instances
@emit_records_metrics = FluentTest::FluentTestCounterMetrics.new
@emit_size_metrics = FluentTest::FluentTestCounterMetrics.new
end
def multi_workers_ready?
true
end
def zero_downtime_restart_ready?
true
end
def start
super
@started = true
if @async
thread_create(:test_in_gen, &method(:emit))
else
emit
end
end
def emit
@num.times { |i|
break if @async and not thread_current_running?
router.emit("test.event", Fluent::EventTime.now, {'message' => 'Hello!', 'key' => "value#{i}", 'num' => i})
sleep @interval_sec if @interval_sec
}
end
def shutdown
@started = false
super
end
end
class FluentTestOutput < ::Fluent::Plugin::Output
::Fluent::Plugin.register_output('test_out', self)
def initialize
super
@events = Hash.new { |h, k| h[k] = [] }
# stub metrics instances
@num_errors_metrics = FluentTest::FluentTestCounterMetrics.new
@emit_count_metrics = FluentTest::FluentTestCounterMetrics.new
@emit_records_metrics = FluentTest::FluentTestCounterMetrics.new
@emit_size_metrics = FluentTest::FluentTestCounterMetrics.new
@write_count_metrics = FluentTest::FluentTestCounterMetrics.new
@write_secondary_count_metrics = FluentTest::FluentTestCounterMetrics.new
@rollback_count_metrics = FluentTest::FluentTestCounterMetrics.new
@flush_time_count_metrics = FluentTest::FluentTestCounterMetrics.new
@slow_flush_count_metrics = FluentTest::FluentTestCounterMetrics.new
end
attr_reader :events
attr_reader :started
def start
super
@started = true
end
def shutdown
@started = false
super
end
def process(tag, es)
es.each do |time, record|
@events[tag] << record
end
end
end
class FluentTestDynamicOutput < ::Fluent::Plugin::BareOutput
::Fluent::Plugin.register_output('test_dynamic_out', self)
attr_reader :child
attr_reader :started
def start
super
@started = true
@child = Fluent::Plugin.new_output('copy')
conf = config_element('DYNAMIC', '', {}, [
config_element('store', '', {'@type' => 'test_out', '@id' => 'dyn_out1'}),
config_element('store', '', {'@type' => 'test_out', '@id' => 'dyn_out2'}),
])
@child.configure(conf)
@child.start
end
def after_start
super
@child.after_start
end
def stop
super
@child.stop
end
def before_shutdown
super
@child.before_shutdown
end
def shutdown
@started = false
super
@child.shutdown
end
def after_shutdown
super
@child.after_shutdown
end
def close
super
@child.close
end
def terminate
super
@child.terminate
end
def process(tag, es)
es.each do |time, record|
@events[tag] << record
end
end
end
class FluentTestBufferedOutput < ::Fluent::Plugin::Output
::Fluent::Plugin.register_output('test_out_buffered', self)
attr_reader :started
def start
super
@started = true
end
def shutdown
@started = false
super
end
def write(chunk)
# drop everything
end
end
class FluentTestEmitOutput < ::Fluent::Plugin::Output
::Fluent::Plugin.register_output('test_out_emit', self)
helpers :event_emitter
def write(chunk)
tag = chunk.metadata.tag || 'test'
array = []
chunk.each do |time, record|
array << [time, record]
end
router.emit_array(tag, array)
end
end
class FluentTestErrorOutput < ::Fluent::Plugin::Output
::Fluent::Plugin.register_output('test_out_error', self)
def initialize
super
# stub metrics instances
@num_errors_metrics = FluentTest::FluentTestCounterMetrics.new
@emit_count_metrics = FluentTest::FluentTestCounterMetrics.new
@emit_records_metrics = FluentTest::FluentTestCounterMetrics.new
@emit_size_metrics = FluentTest::FluentTestCounterMetrics.new
@write_count_metrics = FluentTest::FluentTestCounterMetrics.new
@write_secondary_count_metrics = FluentTest::FluentTestCounterMetrics.new
@rollback_count_metrics = FluentTest::FluentTestCounterMetrics.new
@flush_time_count_metrics = FluentTest::FluentTestCounterMetrics.new
@slow_flush_count_metrics = FluentTest::FluentTestCounterMetrics.new
end
def format(tag, time, record)
raise "emit error!"
end
def write(chunk)
raise "chunk error!"
end
end
class FluentCompatTestFilter < ::Fluent::Filter
::Fluent::Plugin.register_filter('test_compat_filter', self)
def initialize(field = '__test__')
super()
@num = 0
@field = field
# stub metrics instances
@emit_records_metrics = FluentTest::FluentTestCounterMetrics.new
@emit_size_metrics = FluentTest::FluentTestCounterMetrics.new
end
attr_reader :num
attr_reader :started
def start
super
@started = true
end
def shutdown
@started = false
super
end
def filter(tag, time, record)
record[@field] = @num
@num += 1
record
end
end
class FluentTestFilter < ::Fluent::Plugin::Filter
::Fluent::Plugin.register_filter('test_filter', self)
def initialize(field = '__test__')
super()
@num = 0
@field = field
# stub metrics instances
@emit_records_metrics = FluentTest::FluentTestCounterMetrics.new
@emit_size_metrics = FluentTest::FluentTestCounterMetrics.new
end
attr_reader :num
attr_reader :started
def start
super
@started = true
end
def shutdown
@started = false
super
end
def filter(tag, time, record)
record[@field] = @num
@num += 1
record
end
end
class FluentTestBuffer < Fluent::Plugin::Buffer
::Fluent::Plugin.register_buffer('test_buffer', self)
def resume
return {}, []
end
def generate_chunk(metadata)
end
def multi_workers_ready?
false
end
end
class TestEmitErrorHandler
def initialize
@events = Hash.new { |h, k| h[k] = [] }
end
attr_reader :events
def handle_emit_error(tag, time, record, error)
@events[tag] << record
end
def handle_emits_error(tag, es, error)
es.each { |time,record| handle_emit_error(tag, time, record, error) }
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_supervisor.rb | test/test_supervisor.rb | require_relative 'helper'
require 'fluent/event_router'
require 'fluent/system_config'
require 'fluent/supervisor'
require 'fluent/file_wrapper'
require_relative 'test_plugin_classes'
require 'net/http'
require 'uri'
require 'fileutils'
require 'tempfile'
require 'securerandom'
require 'pathname'
if Fluent.windows?
require 'win32/event'
end
class SupervisorTest < ::Test::Unit::TestCase
class DummyServer
include Fluent::ServerModule
attr_accessor :rpc_endpoint, :enable_get_dump, :socket_manager_server
def config
{}
end
end
def tmp_dir
File.join(File.dirname(__FILE__), "tmp", "supervisor#{ENV['TEST_ENV_NUMBER']}", SecureRandom.hex(10))
end
def setup
@stored_global_logger = $log
@tmp_dir = tmp_dir
@tmp_root_dir = File.join(@tmp_dir, 'root')
FileUtils.mkdir_p(@tmp_dir)
@sigdump_path = "/tmp/sigdump-#{Process.pid}.log"
end
def teardown
$log = @stored_global_logger
begin
FileUtils.rm_rf(@tmp_dir)
rescue Errno::EACCES
# It may occur on Windows because of delete pending state due to delayed GC.
# Ruby 3.2 or later doesn't ignore Errno::EACCES:
# https://github.com/ruby/ruby/commit/983115cf3c8f75b1afbe3274f02c1529e1ce3a81
end
end
def write_config(path, data)
FileUtils.mkdir_p(File.dirname(path))
Fluent::FileWrapper.open(path, "w") {|f| f.write data }
end
def test_system_config
sv = Fluent::Supervisor.new({})
conf_data = <<-EOC
<system>
rpc_endpoint 127.0.0.1:24445
suppress_repeated_stacktrace false
suppress_config_dump true
without_source true
with_source_only true
enable_get_dump true
enable_input_metrics false
process_name "process_name"
log_level info
root_dir #{@tmp_root_dir}
<log>
path /tmp/fluentd.log
format json
time_format %Y
</log>
<counter_server>
bind 127.0.0.1
port 24321
scope server1
backup_path /tmp/backup
</counter_server>
<counter_client>
host 127.0.0.1
port 24321
timeout 2
</counter_client>
<source_only_buffer>
flush_thread_count 4
overflow_action throw_exception
path /tmp/source-only-buffer
flush_interval 1
chunk_limit_size 100
total_limit_size 1000
compress gzip
</source_only_buffer>
</system>
EOC
conf = Fluent::Config.parse(conf_data, "(test)", "(test_dir)", true)
sys_conf = sv.__send__(:build_system_config, conf)
assert_equal '127.0.0.1:24445', sys_conf.rpc_endpoint
assert_equal false, sys_conf.suppress_repeated_stacktrace
assert_equal true, sys_conf.suppress_config_dump
assert_equal true, sys_conf.without_source
assert_equal true, sys_conf.with_source_only
assert_equal true, sys_conf.enable_get_dump
assert_equal false, sys_conf.enable_input_metrics
assert_equal "process_name", sys_conf.process_name
assert_equal 2, sys_conf.log_level
assert_equal @tmp_root_dir, sys_conf.root_dir
assert_equal "/tmp/fluentd.log", sys_conf.log.path
assert_equal :json, sys_conf.log.format
assert_equal '%Y', sys_conf.log.time_format
counter_server = sys_conf.counter_server
assert_equal '127.0.0.1', counter_server.bind
assert_equal 24321, counter_server.port
assert_equal 'server1', counter_server.scope
assert_equal '/tmp/backup', counter_server.backup_path
counter_client = sys_conf.counter_client
assert_equal '127.0.0.1', counter_client.host
assert_equal 24321, counter_client.port
assert_equal 2, counter_client.timeout
source_only_buffer = sys_conf.source_only_buffer
assert_equal 4, source_only_buffer.flush_thread_count
assert_equal :throw_exception, source_only_buffer.overflow_action
assert_equal "/tmp/source-only-buffer", source_only_buffer.path
assert_equal 1, source_only_buffer.flush_interval
assert_equal 100, source_only_buffer.chunk_limit_size
assert_equal 1000, source_only_buffer.total_limit_size
assert_equal :gzip, source_only_buffer.compress
end
sub_test_case "yaml config" do
def parse_yaml(yaml)
context = Kernel.binding
config = nil
Tempfile.open do |file|
file.puts(yaml)
file.flush
s = Fluent::Config::YamlParser::Loader.new(context).load(Pathname.new(file))
config = Fluent::Config::YamlParser::Parser.new(s).build.to_element
end
config
end
def test_system_config
sv = Fluent::Supervisor.new({})
conf_data = <<-EOC
system:
rpc_endpoint: 127.0.0.1:24445
suppress_repeated_stacktrace: true
suppress_config_dump: true
without_source: true
with_source_only: true
enable_get_dump: true
process_name: "process_name"
log_level: info
root_dir: !fluent/s "#{@tmp_root_dir}"
log:
path: /tmp/fluentd.log
format: json
time_format: "%Y"
counter_server:
bind: 127.0.0.1
port: 24321
scope: server1
backup_path: /tmp/backup
counter_client:
host: 127.0.0.1
port: 24321
timeout: 2
source_only_buffer:
flush_thread_count: 4
overflow_action: throw_exception
path: /tmp/source-only-buffer
flush_interval: 1
chunk_limit_size: 100
total_limit_size: 1000
compress: gzip
EOC
conf = parse_yaml(conf_data)
sys_conf = sv.__send__(:build_system_config, conf)
counter_client = sys_conf.counter_client
counter_server = sys_conf.counter_server
source_only_buffer = sys_conf.source_only_buffer
assert_equal(
[
'127.0.0.1:24445',
true,
true,
true,
true,
true,
"process_name",
2,
@tmp_root_dir,
"/tmp/fluentd.log",
:json,
'%Y',
'127.0.0.1',
24321,
'server1',
'/tmp/backup',
'127.0.0.1',
24321,
2,
4,
:throw_exception,
"/tmp/source-only-buffer",
1,
100,
1000,
:gzip,
],
[
sys_conf.rpc_endpoint,
sys_conf.suppress_repeated_stacktrace,
sys_conf.suppress_config_dump,
sys_conf.without_source,
sys_conf.with_source_only,
sys_conf.enable_get_dump,
sys_conf.process_name,
sys_conf.log_level,
sys_conf.root_dir,
sys_conf.log.path,
sys_conf.log.format,
sys_conf.log.time_format,
counter_server.bind,
counter_server.port,
counter_server.scope,
counter_server.backup_path,
counter_client.host,
counter_client.port,
counter_client.timeout,
source_only_buffer.flush_thread_count,
source_only_buffer.overflow_action,
source_only_buffer.path,
source_only_buffer.flush_interval,
source_only_buffer.chunk_limit_size,
source_only_buffer.total_limit_size,
source_only_buffer.compress,
])
end
end
def test_usr1_in_main_process_signal_handlers
omit "Windows cannot handle signals" if Fluent.windows?
create_info_dummy_logger
sv = Fluent::Supervisor.new({})
sv.send(:install_main_process_signal_handlers)
Process.kill :USR1, Process.pid
sleep 1
info_msg = "[info]: force flushing buffered events\n"
assert{ $log.out.logs.first.end_with?(info_msg) }
ensure
$log.out.reset if $log&.out&.respond_to?(:reset)
end
def test_cont_in_main_process_signal_handlers
omit "Windows cannot handle signals" if Fluent.windows?
# https://github.com/fluent/fluentd/issues/4063
GC.start
sv = Fluent::Supervisor.new({})
sv.send(:install_main_process_signal_handlers)
Process.kill :CONT, Process.pid
sleep 1
assert{ File.exist?(@sigdump_path) }
ensure
File.delete(@sigdump_path) if File.exist?(@sigdump_path)
end
def test_term_cont_in_main_process_signal_handlers
omit "Windows cannot handle signals" if Fluent.windows?
# https://github.com/fluent/fluentd/issues/4063
GC.start
create_debug_dummy_logger
sv = Fluent::Supervisor.new({})
sv.send(:install_main_process_signal_handlers)
Process.kill :TERM, Process.pid
Process.kill :CONT, Process.pid
sleep 1
debug_msg = "[debug]: fluentd main process get SIGTERM\n"
logs = $log.out.logs
assert{ logs.any?{|log| log.include?(debug_msg) } }
assert{ not File.exist?(@sigdump_path) }
ensure
$log.out.reset if $log&.out&.respond_to?(:reset)
File.delete(@sigdump_path) if File.exist?(@sigdump_path)
end
def test_winch_in_main_process_signal_handlers
omit "Windows cannot handle signals" if Fluent.windows?
mock(Fluent::Engine).cancel_source_only!
create_info_dummy_logger
sv = Fluent::Supervisor.new({})
sv.send(:install_main_process_signal_handlers)
Process.kill :WINCH, Process.pid
sleep 1
info_msg = "[info]: try to cancel with-source-only mode\n"
assert{ $log.out.logs.first.end_with?(info_msg) }
ensure
$log.out.reset if $log&.out&.respond_to?(:reset)
end
def test_main_process_command_handlers
omit "Only for Windows, alternative to UNIX signals" unless Fluent.windows?
create_info_dummy_logger
sv = Fluent::Supervisor.new({})
r, w = IO.pipe
$stdin = r
sv.send(:install_main_process_signal_handlers)
begin
w.write("GRACEFUL_RESTART\n")
w.flush
ensure
$stdin = STDIN
end
sleep 1
info_msg = "[info]: force flushing buffered events\n"
assert{ $log.out.logs.first.end_with?(info_msg) }
ensure
$log.out.reset if $log&.out&.respond_to?(:reset)
end
def test_usr1_in_supervisor_signal_handler
omit "Windows cannot handle signals" if Fluent.windows?
create_debug_dummy_logger
server = DummyServer.new
server.install_supervisor_signal_handlers
Process.kill :USR1, Process.pid
sleep 1
debug_msg = '[debug]: fluentd supervisor process get SIGUSR1'
logs = $log.out.logs
assert{ logs.any?{|log| log.include?(debug_msg) } }
ensure
$log.out.reset if $log&.out&.respond_to?(:reset)
end
def test_cont_in_supervisor_signal_handler
omit "Windows cannot handle signals" if Fluent.windows?
# https://github.com/fluent/fluentd/issues/4063
GC.start
server = DummyServer.new
server.install_supervisor_signal_handlers
Process.kill :CONT, Process.pid
sleep 1
assert{ File.exist?(@sigdump_path) }
ensure
File.delete(@sigdump_path) if File.exist?(@sigdump_path)
end
def test_term_cont_in_supervisor_signal_handler
omit "Windows cannot handle signals" if Fluent.windows?
# https://github.com/fluent/fluentd/issues/4063
GC.start
server = DummyServer.new
server.install_supervisor_signal_handlers
Process.kill :TERM, Process.pid
Process.kill :CONT, Process.pid
assert{ not File.exist?(@sigdump_path) }
ensure
File.delete(@sigdump_path) if File.exist?(@sigdump_path)
end
def test_winch_in_supervisor_signal_handler
omit "Windows cannot handle signals" if Fluent.windows?
create_debug_dummy_logger
server = DummyServer.new
server.install_supervisor_signal_handlers
Process.kill :WINCH, Process.pid
sleep 1
debug_msg = '[debug]: fluentd supervisor process got SIGWINCH'
logs = $log.out.logs
assert{ logs.any?{|log| log.include?(debug_msg) } }
ensure
$log.out.reset if $log&.out&.respond_to?(:reset)
end
def test_windows_shutdown_event
omit "Only for Windows platform" unless Fluent.windows?
create_debug_dummy_logger
server = DummyServer.new
def server.config
{:signame => "TestFluentdEvent"}
end
mock(server).stop(true)
stub(Process).kill.times(0)
server.install_windows_event_handler
begin
sleep 0.1 # Wait for starting windows event thread
event = Win32::Event.open("TestFluentdEvent")
event.set
event.close
sleep 1.0 # Wait for dumping
ensure
server.stop_windows_event_thread
end
debug_msg = '[debug]: Got Win32 event "TestFluentdEvent"'
logs = $log.out.logs
assert{ logs.any?{|log| log.include?(debug_msg) } }
ensure
$log.out.reset if $log&.out&.respond_to?(:reset)
end
def test_supervisor_event_handler
omit "Only for Windows, alternative to UNIX signals" unless Fluent.windows?
create_debug_dummy_logger
server = DummyServer.new
def server.config
{:signame => "TestFluentdEvent"}
end
server.install_windows_event_handler
begin
sleep 0.1 # Wait for starting windows event thread
event = Win32::Event.open("TestFluentdEvent_USR1")
event.set
event.close
sleep 1.0 # Wait for dumping
ensure
server.stop_windows_event_thread
end
debug_msg = '[debug]: Got Win32 event "TestFluentdEvent_USR1"'
logs = $log.out.logs
assert{ logs.any?{|log| log.include?(debug_msg) } }
ensure
$log.out.reset if $log&.out&.respond_to?(:reset)
end
data("Normal", {raw_path: "C:\\Windows\\Temp\\sigdump.log", expected: "C:\\Windows\\Temp\\sigdump-#{Process.pid}.log"})
data("UNIX style", {raw_path: "/Windows/Temp/sigdump.log", expected: "/Windows/Temp/sigdump-#{Process.pid}.log"})
data("No extension", {raw_path: "C:\\Windows\\Temp\\sigdump", expected: "C:\\Windows\\Temp\\sigdump-#{Process.pid}"})
data("Multi-extension", {raw_path: "C:\\Windows\\Temp\\sig.dump.bk", expected: "C:\\Windows\\Temp\\sig.dump-#{Process.pid}.bk"})
def test_fluentsigdump_get_path_with_pid(data)
path = Fluent::FluentSigdump.get_path_with_pid(data[:raw_path])
assert_equal(data[:expected], path)
end
def test_supervisor_event_dump_windows
omit "Only for Windows, alternative to UNIX signals" unless Fluent.windows?
# https://github.com/fluent/fluentd/issues/4063
GC.start
ENV['SIGDUMP_PATH'] = @tmp_dir + "/sigdump.log"
server = DummyServer.new
def server.config
{:signame => "TestFluentdEvent"}
end
server.install_windows_event_handler
begin
sleep 0.1 # Wait for starting windows event thread
event = Win32::Event.open("TestFluentdEvent_CONT")
event.set
event.close
sleep 1.0 # Wait for dumping
ensure
server.stop_windows_event_thread
end
result_filepaths = Dir.glob("#{@tmp_dir}/*")
assert {result_filepaths.length > 0}
ensure
ENV.delete('SIGDUMP_PATH')
end
data(:ipv4 => ["0.0.0.0", "127.0.0.1", false],
:ipv6 => ["[::]", "[::1]", true],
:localhost_ipv4 => ["localhost", "127.0.0.1", false])
def test_rpc_server(data)
omit "Windows cannot handle signals" if Fluent.windows?
bindaddr, localhost, ipv6 = data
omit "IPv6 is not supported on this environment" if ipv6 && !ipv6_enabled?
create_info_dummy_logger
sv = Fluent::Supervisor.new({})
conf_data = <<-EOC
<system>
rpc_endpoint "#{bindaddr}:24447"
</system>
EOC
conf = Fluent::Config.parse(conf_data, "(test)", "(test_dir)", true)
sys_conf = sv.__send__(:build_system_config, conf)
server = DummyServer.new
server.rpc_endpoint = sys_conf.rpc_endpoint
server.enable_get_dump = sys_conf.enable_get_dump
server.run_rpc_server
sv.send(:install_main_process_signal_handlers)
response = Net::HTTP.get(URI.parse("http://#{localhost}:24447/api/plugins.flushBuffers"))
info_msg = "[info]: force flushing buffered events\n"
server.stop_rpc_server
# In TravisCI with OSX(Xcode), it seems that can't use rpc server.
# This test will be passed in such environment.
pend unless $log.out.logs.first
assert_equal('{"ok":true}', response)
assert{ $log.out.logs.first.end_with?(info_msg) }
ensure
$log.out.reset if $log.out.is_a?(Fluent::Test::DummyLogDevice)
end
data(:no_port => ["127.0.0.1"],
:invalid_addr => ["*:24447"])
def test_invalid_rpc_endpoint(data)
endpoint = data[0]
sv = Fluent::Supervisor.new({})
conf_data = <<-EOC
<system>
rpc_endpoint "#{endpoint}"
</system>
EOC
conf = Fluent::Config.parse(conf_data, "(test)", "(test_dir)", true)
sys_conf = sv.__send__(:build_system_config, conf)
server = DummyServer.new
server.rpc_endpoint = sys_conf.rpc_endpoint
assert_raise(Fluent::ConfigError.new("Invalid rpc_endpoint: #{endpoint}")) do
server.run_rpc_server
end
end
data(:ipv4 => ["0.0.0.0", "127.0.0.1", false],
:ipv6 => ["[::]", "[::1]", true],
:localhost_ipv4 => ["localhost", "127.0.0.1", true])
def test_rpc_server_windows(data)
omit "Only for windows platform" unless Fluent.windows?
bindaddr, localhost, ipv6 = data
omit "IPv6 is not supported on this environment" if ipv6 && !ipv6_enabled?
create_info_dummy_logger
sv = Fluent::Supervisor.new({})
conf_data = <<-EOC
<system>
rpc_endpoint "#{bindaddr}:24447"
</system>
EOC
conf = Fluent::Config.parse(conf_data, "(test)", "(test_dir)", true)
sys_conf = sv.__send__(:build_system_config, conf)
server = DummyServer.new
def server.config
{
:signame => "TestFluentdEvent",
:worker_pid => 5963,
}
end
server.rpc_endpoint = sys_conf.rpc_endpoint
server.run_rpc_server
mock(server).restart(true) { nil }
response = Net::HTTP.get(URI.parse("http://#{localhost}:24447/api/plugins.flushBuffers"))
server.stop_rpc_server
assert_equal('{"ok":true}', response)
end
sub_test_case "serverengine_config" do
def test_normal
params = {}
params['workers'] = 1
params['fluentd_conf_path'] = "fluentd.conf"
params['use_v1_config'] = true
params['conf_encoding'] = 'utf-8'
params['log_level'] = Fluent::Log::LEVEL_INFO
load_config_proc = Proc.new { Fluent::Supervisor.serverengine_config(params) }
se_config = load_config_proc.call
assert_equal Fluent::Log::LEVEL_INFO, se_config[:log_level]
assert_equal 'spawn', se_config[:worker_type]
assert_equal 1, se_config[:workers]
assert_equal false, se_config[:log_stdin]
assert_equal false, se_config[:log_stdout]
assert_equal false, se_config[:log_stderr]
assert_equal true, se_config[:enable_heartbeat]
assert_equal false, se_config[:auto_heartbeat]
assert_equal "fluentd.conf", se_config[:config_path]
assert_equal false, se_config[:daemonize]
assert_nil se_config[:pid_path]
end
def test_daemonize
params = {}
params['workers'] = 1
params['fluentd_conf_path'] = "fluentd.conf"
params['use_v1_config'] = true
params['conf_encoding'] = 'utf-8'
params['log_level'] = Fluent::Log::LEVEL_INFO
params['daemonize'] = './fluentd.pid'
load_config_proc = Proc.new { Fluent::Supervisor.serverengine_config(params) }
se_config = load_config_proc.call
assert_equal Fluent::Log::LEVEL_INFO, se_config[:log_level]
assert_equal 'spawn', se_config[:worker_type]
assert_equal 1, se_config[:workers]
assert_equal false, se_config[:log_stdin]
assert_equal false, se_config[:log_stdout]
assert_equal false, se_config[:log_stderr]
assert_equal true, se_config[:enable_heartbeat]
assert_equal false, se_config[:auto_heartbeat]
assert_equal "fluentd.conf", se_config[:config_path]
assert_equal true, se_config[:daemonize]
assert_equal './fluentd.pid', se_config[:pid_path]
end
data("nil", [nil, nil])
data("default", ["0", 0])
data("000", ["000", 0])
data("0000", ["0000", 0])
data("2", ["2", 2])
data("222", ["222", 146])
data("0222", ["0222", 146])
data("0 as integer", [0, 0])
def test_chumask((chumask, expected))
params = { "chumask" => chumask }
load_config_proc = Proc.new { Fluent::Supervisor.serverengine_config(params) }
se_config = load_config_proc.call
assert_equal expected, se_config[:chumask]
end
end
data("default", [{}, "0"])
data("222", [{chumask: "222"}, "222"])
def test_chumask_should_be_passed_to_ServerEngine((cl_opt, expected_chumask_value))
proxy.mock(Fluent::Supervisor).serverengine_config(hash_including("chumask" => expected_chumask_value))
any_instance_of(ServerEngine::Daemon) { |daemon| mock(daemon).run.once }
supervisor = Fluent::Supervisor.new(cl_opt)
stub(Fluent::Config).build { config_element('ROOT') }
stub(supervisor).build_spawn_command { "dummy command line" }
supervisor.configure(supervisor: true)
supervisor.run_supervisor
end
sub_test_case "init logger" do
data(supervisor: true)
data(worker: false)
def test_init_for_logger(supervisor)
tmp_conf_path = "#{@tmp_dir}/dir/test_init_for_logger.conf"
conf_info_str = <<~EOC
<system>
log_level warn # To suppress logs
suppress_repeated_stacktrace false
ignore_repeated_log_interval 10s
ignore_same_log_interval 20s
<log>
format json
time_format %FT%T.%L%z
forced_stacktrace_level info
</log>
</system>
EOC
write_config tmp_conf_path, conf_info_str
s = Fluent::Supervisor.new({config_path: tmp_conf_path})
s.configure(supervisor: supervisor)
assert_equal :json, $log.format
assert_equal '%FT%T.%L%z', $log.time_format
assert_equal false, $log.suppress_repeated_stacktrace
assert_equal 10, $log.ignore_repeated_log_interval
assert_equal 20, $log.ignore_same_log_interval
assert_equal Fluent::Log::LEVEL_INFO, $log.instance_variable_get(:@forced_stacktrace_level)
assert_true $log.force_stacktrace_level?
end
data(
daily_age: 'daily',
weekly_age: 'weekly',
monthly_age: 'monthly',
integer_age: 2,
)
def test_logger_with_rotate_age_and_rotate_size(rotate_age)
config_path = "#{@tmp_dir}/empty.conf"
write_config config_path, ""
sv = Fluent::Supervisor.new(
config_path: config_path,
log_path: "#{@tmp_dir}/test",
log_rotate_age: rotate_age,
log_rotate_size: 10,
)
sv.__send__(:setup_global_logger)
assert_equal Fluent::LogDeviceIO, $log.out.class
assert_equal rotate_age, $log.out.instance_variable_get(:@shift_age)
assert_equal 10, $log.out.instance_variable_get(:@shift_size)
end
def test_can_start_with_rotate_but_no_log_path
config_path = "#{@tmp_dir}/empty.conf"
write_config config_path, ""
sv = Fluent::Supervisor.new(
config_path: config_path,
log_rotate_age: 5,
)
sv.__send__(:setup_global_logger)
assert_true $log.stdout?
end
sub_test_case "system log rotation" do
def parse_text(text)
basepath = File.expand_path(File.dirname(__FILE__) + '/../../')
Fluent::Config.parse(text, '(test)', basepath, true).elements.find { |e| e.name == 'system' }
end
def test_override_default_log_rotate
Tempfile.open do |file|
config = parse_text(<<-EOS)
<system>
<log>
rotate_age 3
rotate_size 300
</log>
</system>
EOS
file.puts(config)
file.flush
sv = Fluent::Supervisor.new({log_path: "#{@tmp_dir}/test.log", config_path: file.path})
sv.__send__(:setup_global_logger)
logger = $log.instance_variable_get(:@logger)
assert_equal Fluent::LogDeviceIO, $log.out.class
assert_equal 3, $log.out.instance_variable_get(:@shift_age)
assert_equal 300, $log.out.instance_variable_get(:@shift_size)
end
end
def test_override_default_log_rotate_with_yaml_config
Tempfile.open do |file|
config = <<-EOS
system:
log:
rotate_age: 3
rotate_size: 300
EOS
file.puts(config)
file.flush
sv = Fluent::Supervisor.new({log_path: "#{@tmp_dir}/test.log", config_path: file.path, config_file_type: :yaml})
sv.__send__(:setup_global_logger)
logger = $log.instance_variable_get(:@logger)
assert_equal Fluent::LogDeviceIO, $log.out.class
assert_equal 3, $log.out.instance_variable_get(:@shift_age)
assert_equal 300, $log.out.instance_variable_get(:@shift_size)
end
end
end
def test_log_level_affects
sv = Fluent::Supervisor.new({})
c = Fluent::Config::Element.new('system', '', { 'log_level' => 'error' }, [])
stub(Fluent::Config).build { config_element('ROOT', '', {}, [c]) }
sv.configure
assert_equal Fluent::Log::LEVEL_ERROR, $log.level
end
data(supervisor: true)
data(worker: false)
def test_log_path(supervisor)
log_path = Pathname(@tmp_dir) + "fluentd.log"
config_path = Pathname(@tmp_dir) + "fluentd.conf"
write_config config_path.to_s, ""
s = Fluent::Supervisor.new(config_path: config_path.to_s, log_path: log_path.to_s)
assert_rr do
mock.proxy(File).chmod(0o777, log_path.parent.to_s).never
s.__send__(:setup_global_logger, supervisor: supervisor)
end
assert { log_path.parent.exist? }
ensure
$log.out.close
end
data(supervisor: true)
data(worker: false)
def test_dir_permission(supervisor)
omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
log_path = Pathname(@tmp_dir) + "fluentd.log"
config_path = Pathname(@tmp_dir) + "fluentd.conf"
conf = <<~EOC
<system>
dir_permission 0o777
</system>
EOC
write_config config_path.to_s, conf
s = Fluent::Supervisor.new(config_path: config_path.to_s, log_path: log_path.to_s)
assert_rr do
mock.proxy(File).chmod(0o777, log_path.parent.to_s).once
s.__send__(:setup_global_logger, supervisor: supervisor)
end
assert { log_path.parent.exist? }
assert { (File.stat(log_path.parent).mode & 0xFFF) == 0o777 }
ensure
$log.out.close
end
def test_files_for_each_process_with_rotate_on_windows
omit "Only for Windows." unless Fluent.windows?
log_path = Pathname(@tmp_dir) + "log" + "fluentd.log"
config_path = Pathname(@tmp_dir) + "fluentd.conf"
conf = <<~EOC
<system>
<log>
rotate_age 5
</log>
</system>
EOC
write_config config_path.to_s, conf
s = Fluent::Supervisor.new(config_path: config_path.to_s, log_path: log_path.to_s)
s.__send__(:setup_global_logger, supervisor: true)
$log.out.close
s = Fluent::Supervisor.new(config_path: config_path.to_s, log_path: log_path.to_s)
s.__send__(:setup_global_logger, supervisor: false)
$log.out.close
ENV["SERVERENGINE_WORKER_ID"] = "1"
s = Fluent::Supervisor.new(config_path: config_path.to_s, log_path: log_path.to_s)
s.__send__(:setup_global_logger, supervisor: false)
$log.out.close
assert { log_path.parent.entries.size == 5 } # [".", "..", "logfile.log", ...]
ensure
ENV.delete("SERVERENGINE_WORKER_ID")
end
end
def test_enable_shared_socket
server = DummyServer.new
begin
ENV.delete('SERVERENGINE_SOCKETMANAGER_PATH')
server.before_run
sleep 0.1 if Fluent.windows? # Wait for starting windows event thread
assert_not_nil(ENV['SERVERENGINE_SOCKETMANAGER_PATH'])
ensure
server.after_run
ENV.delete('SERVERENGINE_SOCKETMANAGER_PATH')
end
end
def test_disable_shared_socket
server = DummyServer.new
def server.config
{
:disable_shared_socket => true,
}
end
begin
ENV.delete('SERVERENGINE_SOCKETMANAGER_PATH')
server.before_run
sleep 0.1 if Fluent.windows? # Wait for starting windows event thread
assert_nil(ENV['SERVERENGINE_SOCKETMANAGER_PATH'])
ensure
server.after_run
ENV.delete('SERVERENGINE_SOCKETMANAGER_PATH')
end
end
sub_test_case "zero_downtime_restart" do
setup do
omit "Not supported on Windows" if Fluent.windows?
end
data(
# When daemonize, exit-status is important. The new spawned process does double-fork and exits soon.
"daemonize and succeeded double-fork of new process" => [true, true, 0, false],
"daemonize and failed double-fork of new process" => [true, false, 0, true],
# When no daemon, whether the new spawned process is alive is important, not exit-status.
"no daemon and new process alive" => [false, false, 3, false],
"no daemon and new process dead" => [false, false, 0, true],
)
def test_zero_downtime_restart((daemonize, wait_success, wait_sleep, restart_canceled))
# == Arrange ==
env_spawn = {}
pid_wait = nil
server = DummyServer.new
stub(server).config do
{
daemonize: daemonize,
pid_path: "test-pid-file",
}
end
process_stub = stub(Process)
process_stub.spawn do |env, commands|
env_spawn = env
-1
end
process_stub.wait2 do |pid|
pid_wait = pid
sleep wait_sleep
if wait_success
status = Class.new{def success?; true; end}.new
else
status = Class.new{def success?; false; end}.new
end
[pid, status]
end
stub(File).read("test-pid-file") { -1 }
# mock to check notify_new_supervisor_that_old_one_has_stopped sends SIGWINCH
if restart_canceled
mock(Process).kill(:WINCH, -1).never
else
mock(Process).kill(:WINCH, -1)
end
# == Act and Assert ==
server.before_run
server.zero_downtime_restart.join
sleep 1 # To wait a sub thread for waitpid in zero_downtime_restart
server.after_run
assert_equal(
[
!restart_canceled,
true,
Process.pid,
-1,
],
[
server.instance_variable_get(:@starting_new_supervisor_with_zero_downtime),
env_spawn.key?("SERVERENGINE_SOCKETMANAGER_INTERNAL_TOKEN"),
env_spawn["FLUENT_RUNNING_IN_PARALLEL_WITH_OLD"].to_i,
pid_wait,
]
)
ensure
Fluent::Supervisor.cleanup_socketmanager_path
ENV.delete('SERVERENGINE_SOCKETMANAGER_PATH')
end
def test_share_sockets
server = DummyServer.new
server.before_run
path = ENV['SERVERENGINE_SOCKETMANAGER_PATH']
client = ServerEngine::SocketManager::Client.new(path)
udp_port = unused_port(protocol: :udp)
tcp_port = unused_port(protocol: :tcp)
client.listen_udp("localhost", udp_port)
client.listen_tcp("localhost", tcp_port)
ENV['FLUENT_RUNNING_IN_PARALLEL_WITH_OLD'] = ""
new_server = DummyServer.new
stub(new_server).stop_parallel_old_supervisor_after_delay
new_server.before_run
assert_equal(
[[udp_port], [tcp_port]],
[
new_server.socket_manager_server.udp_sockets.values.map { |v| v.addr[1] },
new_server.socket_manager_server.tcp_sockets.values.map { |v| v.addr[1] },
]
)
ensure
server&.after_run
new_server&.after_run
ENV.delete('SERVERENGINE_SOCKETMANAGER_PATH')
ENV.delete("FLUENT_RUNNING_IN_PARALLEL_WITH_OLD")
end
def test_stop_parallel_old_supervisor_after_delay
ENV['SERVERENGINE_SOCKETMANAGER_PATH'] = ""
ENV['FLUENT_RUNNING_IN_PARALLEL_WITH_OLD'] = "-1"
stub(ServerEngine::SocketManager::Server).share_sockets_with_another_server
mock(Process).kill(:TERM, -1)
server = DummyServer.new
server.before_run
sleep 12 # Can't we skip the delay for this test?
ensure
server&.after_run
ENV.delete('SERVERENGINE_SOCKETMANAGER_PATH')
ENV.delete("FLUENT_RUNNING_IN_PARALLEL_WITH_OLD")
end
end
sub_test_case "include additional configuration" do
setup do
@config_include_dir = File.join(@tmp_dir, "conf.d")
FileUtils.mkdir_p(@config_include_dir)
end
test "no additional configuration" do
c = Fluent::Config::Element.new('system', '', { 'config_include_dir' => '' }, [])
stub(Fluent::Config).build { config_element('ROOT', '', {}, [c]) }
supervisor = Fluent::Supervisor.new({})
supervisor.configure(supervisor: true)
assert_equal([c], supervisor.instance_variable_get(:@conf).elements)
end
data(
"single source" => ["forward"],
"multiple sources" => ["forward", "tcp"])
test "additional configuration" do |sources|
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_config.rb | test/test_config.rb | require_relative 'helper'
require 'fluent/config'
require 'fluent/config/parser'
require 'fluent/supervisor'
require 'fluent/load'
require 'fileutils'
class ConfigTest < Test::Unit::TestCase
include Fluent
TMP_DIR = File.dirname(__FILE__) + "/tmp/config#{ENV['TEST_ENV_NUMBER']}"
def read_config(path, use_yaml: false)
path = File.expand_path(path)
if use_yaml
context = Kernel.binding
s = Fluent::Config::YamlParser::Loader.new(context).load(Pathname.new(path))
Fluent::Config::YamlParser::Parser.new(s).build.to_element
else
File.open(path) { |io|
Fluent::Config::Parser.parse(io, File.basename(path), File.dirname(path))
}
end
end
def prepare_config
write_config "#{TMP_DIR}/config_test_1.conf", %[
k1 root_config
include dir/config_test_2.conf #
include #{TMP_DIR}/config_test_4.conf
include file://#{TMP_DIR}/config_test_5.conf
<include config.d/*.conf />
]
write_config "#{TMP_DIR}/dir/config_test_2.conf", %[
k2 relative_path_include
include ../config_test_3.conf
]
write_config "#{TMP_DIR}/config_test_3.conf", %[
k3 relative_include_in_included_file
]
write_config "#{TMP_DIR}/config_test_4.conf", %[
k4 absolute_path_include
]
write_config "#{TMP_DIR}/config_test_5.conf", %[
k5 uri_include
]
write_config "#{TMP_DIR}/config.d/config_test_6.conf", %[
k6 wildcard_include_1
<elem1 name>
include normal_parameter
</elem1>
]
write_config "#{TMP_DIR}/config.d/config_test_7.conf", %[
k7 wildcard_include_2
]
write_config "#{TMP_DIR}/config.d/config_test_8.conf", %[
<elem2 name>
<include ../dir/config_test_9.conf />
</elem2>
]
write_config "#{TMP_DIR}/dir/config_test_9.conf", %[
k9 embedded
<elem3 name>
nested nested_value
include hoge
</elem3>
]
write_config "#{TMP_DIR}/config.d/00_config_test_8.conf", %[
k8 wildcard_include_3
<elem4 name>
include normal_parameter
</elem4>
]
end
def test_include
prepare_config
c = read_config("#{TMP_DIR}/config_test_1.conf")
assert_equal 'root_config', c['k1']
assert_equal 'relative_path_include', c['k2']
assert_equal 'relative_include_in_included_file', c['k3']
assert_equal 'absolute_path_include', c['k4']
assert_equal 'uri_include', c['k5']
assert_equal 'wildcard_include_1', c['k6']
assert_equal 'wildcard_include_2', c['k7']
assert_equal 'wildcard_include_3', c['k8']
assert_equal [
'k1',
'k2',
'k3',
'k4',
'k5',
'k8', # Because of the file name this comes first.
'k6',
'k7',
], c.keys
elem1 = c.elements.find { |e| e.name == 'elem1' }
assert_not_nil elem1
assert_equal 'name', elem1.arg
assert_equal 'normal_parameter', elem1['include']
elem2 = c.elements.find { |e| e.name == 'elem2' }
assert_not_nil elem2
assert_equal 'name', elem2.arg
assert_equal 'embedded', elem2['k9']
assert !elem2.has_key?('include')
elem3 = elem2.elements.find { |e| e.name == 'elem3' }
assert_not_nil elem3
assert_equal 'nested_value', elem3['nested']
assert_equal 'hoge', elem3['include']
end
def test_check_not_fetchd
write_config "#{TMP_DIR}/config_test_not_fetched.conf", %[
<match dummy>
type rewrite
add_prefix filtered
<rule>
key path
pattern ^[A-Z]+
replace
</rule>
</match>
]
root_conf = read_config("#{TMP_DIR}/config_test_not_fetched.conf")
match_conf = root_conf.elements.first
rule_conf = match_conf.elements.first
not_fetched = []; root_conf.check_not_fetched {|key, e| not_fetched << key }
assert_equal %w[type add_prefix key pattern replace], not_fetched
not_fetched = []; match_conf.check_not_fetched {|key, e| not_fetched << key }
assert_equal %w[type add_prefix key pattern replace], not_fetched
not_fetched = []; rule_conf.check_not_fetched {|key, e| not_fetched << key }
assert_equal %w[key pattern replace], not_fetched
# accessing should delete
match_conf['type']
rule_conf['key']
not_fetched = []; root_conf.check_not_fetched {|key, e| not_fetched << key }
assert_equal %w[add_prefix pattern replace], not_fetched
not_fetched = []; match_conf.check_not_fetched {|key, e| not_fetched << key }
assert_equal %w[add_prefix pattern replace], not_fetched
not_fetched = []; rule_conf.check_not_fetched {|key, e| not_fetched << key }
assert_equal %w[pattern replace], not_fetched
# repeatedly accessing should not grow memory usage
before_size = match_conf.unused.size
10.times { match_conf['type'] }
assert_equal before_size, match_conf.unused.size
end
sub_test_case "yaml config" do
def test_included
write_config "#{TMP_DIR}/config_test_not_fetched.yaml", <<-EOS
config:
- source:
$type: dummy
tag: tag.dummy
- source:
$type: tcp
$log_level: info
tag: tag.tcp
parse:
$arg:
- why.parse.section.doesnot.have.arg
- huh
$type: none
- match:
$tag: tag.*
$type: stdout
$log_level: debug
buffer:
$type: memory
flush_interval: 1s
- !include fluent-included.yaml
EOS
write_config "#{TMP_DIR}/fluent-included.yaml", <<-EOS
- label:
$name: '@FLUENT_LOG'
config:
- match:
$type: "null"
$tag: "**"
buffer:
$type: memory
flush_mode: interval
flush_interval: 1s
EOS
root_conf = read_config("#{TMP_DIR}/config_test_not_fetched.yaml", use_yaml: true)
dummy_source_conf = root_conf.elements.first
tcp_source_conf = root_conf.elements[1]
parse_tcp_conf = tcp_source_conf.elements.first
match_conf = root_conf.elements[2]
label_conf = root_conf.elements[3]
fluent_log_conf = label_conf.elements.first
fluent_log_buffer_conf = fluent_log_conf.elements.first
assert_equal(
[
'dummy',
'tag.dummy',
'tcp',
'tag.tcp',
'info',
'none',
'why.parse.section.doesnot.have.arg,huh',
'stdout',
'tag.*',
'debug',
'null',
'**',
'@FLUENT_LOG',
'memory',
'interval',
'1s',
],
[
dummy_source_conf['@type'],
dummy_source_conf['tag'],
tcp_source_conf['@type'],
tcp_source_conf['tag'],
tcp_source_conf['@log_level'],
parse_tcp_conf['@type'],
parse_tcp_conf.arg,
match_conf['@type'],
match_conf.arg,
match_conf['@log_level'],
fluent_log_conf['@type'],
fluent_log_conf.arg,
label_conf.arg,
fluent_log_buffer_conf['@type'],
fluent_log_buffer_conf['flush_mode'],
fluent_log_buffer_conf['flush_interval'],
])
end
def test_included_glob
write_config "#{TMP_DIR}/config.yaml", <<-EOS
config:
- !include "include/*.yaml"
EOS
write_config "#{TMP_DIR}/include/02_source2.yaml", <<-EOS
- source:
$type: dummy
tag: tag.dummy
EOS
write_config "#{TMP_DIR}/include/01_source1.yaml", <<-EOS
- source:
$type: tcp
tag: tag.tcp
parse:
$arg:
- why.parse.section.doesnot.have.arg
- huh
$type: none
EOS
write_config "#{TMP_DIR}/include/03_match1.yaml", <<-EOS
- match:
$tag: tag.*
$type: stdout
buffer:
$type: memory
flush_interval: 1s
EOS
root_conf = read_config("#{TMP_DIR}/config.yaml", use_yaml: true)
tcp_source_conf = root_conf.elements.first
dummy_source_conf = root_conf.elements[1]
parse_tcp_conf = tcp_source_conf.elements.first
match_conf = root_conf.elements[2]
assert_equal(
[
'tcp',
'tag.tcp',
'none',
'why.parse.section.doesnot.have.arg,huh',
'dummy',
'tag.dummy',
'stdout',
'tag.*',
],
[
tcp_source_conf['@type'],
tcp_source_conf['tag'],
parse_tcp_conf['@type'],
parse_tcp_conf.arg,
dummy_source_conf['@type'],
dummy_source_conf['tag'],
match_conf['@type'],
match_conf.arg,
])
end
def test_check_not_fetchd
write_config "#{TMP_DIR}/config_test_not_fetched.yaml", <<-EOS
config:
- match:
$arg: dummy
$type: rewrite
add_prefix: filtered
rule:
key: path
pattern: "^[A-Z]+"
replace: true
EOS
root_conf = read_config("#{TMP_DIR}/config_test_not_fetched.yaml", use_yaml: true)
match_conf = root_conf.elements.first
rule_conf = match_conf.elements.first
not_fetched = []; root_conf.check_not_fetched {|key, e| not_fetched << key }
assert_equal %w[@type $arg add_prefix key pattern replace], not_fetched
not_fetched = []; match_conf.check_not_fetched {|key, e| not_fetched << key }
assert_equal %w[@type $arg add_prefix key pattern replace], not_fetched
not_fetched = []; rule_conf.check_not_fetched {|key, e| not_fetched << key }
assert_equal %w[key pattern replace], not_fetched
# accessing should delete
match_conf['type']
rule_conf['key']
not_fetched = []; root_conf.check_not_fetched {|key, e| not_fetched << key }
assert_equal %w[@type $arg add_prefix pattern replace], not_fetched
not_fetched = []; match_conf.check_not_fetched {|key, e| not_fetched << key }
assert_equal %w[@type $arg add_prefix pattern replace], not_fetched
not_fetched = []; rule_conf.check_not_fetched {|key, e| not_fetched << key }
assert_equal %w[pattern replace], not_fetched
# repeatedly accessing should not grow memory usage
before_size = match_conf.unused.size
10.times { match_conf['type'] }
assert_equal before_size, match_conf.unused.size
end
data(
"One String for $arg" => <<~CONF,
config:
- source:
$type: sample
tag: test
- match:
$type: stdout
$tag: test.**
buffer:
$arg: tag
$type: memory
flush_mode: immediate
CONF
"Comma-separated String for $arg" => <<~CONF,
config:
- source:
$type: sample
tag: test
- match:
$type: stdout
$tag: test.**
buffer:
$arg: tag, time
$type: memory
timekey: 1h
flush_mode: immediate
CONF
"One-liner Array for $arg" => <<~CONF,
config:
- source:
$type: sample
tag: test
- match:
$type: stdout
$tag: test.**
buffer:
$arg: [tag, time]
$type: memory
timekey: 1h
flush_mode: immediate
CONF
"Multi-liner Array for $arg" => <<~CONF,
config:
- source:
$type: sample
tag: test
- match:
$type: stdout
$tag: test.**
buffer:
$arg:
- tag
- time
$type: memory
timekey: 1h
flush_mode: immediate
CONF
"One String for normal Array option" => <<~CONF,
config:
- source:
$type: sample
tag: test
- match:
$type: stdout
$tag: test.**
format:
$type: csv
fields: message
CONF
"Comma-separated String for normal Array option" => <<~CONF,
config:
- source:
$type: sample
tag: test
- match:
$type: stdout
$tag: test.**
inject:
time_key: timestamp
time_type: string
format:
$type: csv
fields: timestamp, message
CONF
"One-liner Array for normal Array option" => <<~CONF,
config:
- source:
$type: sample
tag: test
- match:
$type: stdout
$tag: test.**
inject:
time_key: timestamp
time_type: string
format:
$type: csv
fields: [timestamp, message]
CONF
"Multi-liner Array for normal Array option" => <<~CONF,
config:
- source:
$type: sample
tag: test
- match:
$type: stdout
$tag: test.**
inject:
time_key: timestamp
time_type: string
format:
$type: csv
fields:
- timestamp
- message
CONF
"Multiple sections" => <<~CONF,
config:
- source:
$type: sample
tag: test
- match:
$type: copy
$tag: test.**
store:
- $type: relabel
$label: "@foo"
- $type: relabel
$label: "@bar"
- label:
$name: "@foo"
config:
- match:
$type: stdout
$tag: test.**
- label:
$name: "@bar"
config:
- match:
$type: stdout
$tag: test.**
CONF
)
test "Can parse config without error" do |conf|
write_config "#{TMP_DIR}/config.yaml", conf
read_config("#{TMP_DIR}/config.yaml", use_yaml: true)
end
end
def write_config(path, data, encoding: 'utf-8')
FileUtils.mkdir_p(File.dirname(path))
File.open(path, "w:#{encoding}:utf-8") {|f| f.write data }
end
sub_test_case '.build' do
test 'read config' do
write_config("#{TMP_DIR}/build/config_build.conf", 'key value')
c = Fluent::Config.build(config_path: "#{TMP_DIR}/build/config_build.conf")
assert_equal('value', c['key'])
end
test 'read config with encoding' do
write_config("#{TMP_DIR}/build/config_build2.conf", "#てすと\nkey value", encoding: 'shift_jis')
c = Fluent::Config.build(config_path: "#{TMP_DIR}/build/config_build2.conf", encoding: 'shift_jis')
assert_equal('value', c['key'])
end
test 'read config with additional_config' do
write_config("#{TMP_DIR}/build/config_build2.conf", "key value")
c = Fluent::Config.build(config_path: "#{TMP_DIR}/build/config_build2.conf", additional_config: 'key2 value2')
assert_equal('value', c['key'])
assert_equal('value2', c['key2'])
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_event_time.rb | test/test_event_time.rb | require_relative 'helper'
require 'timecop'
require 'oj'
require 'yajl'
class EventTimeTest < Test::Unit::TestCase
setup do
@now = Time.now
Timecop.freeze(@now)
end
teardown do
Timecop.return
end
test '#sec' do
assert_equal(1, Fluent::EventTime.new(1, 2).sec)
end
test '#nsec' do
assert_equal(2, Fluent::EventTime.new(1, 2).nsec)
assert_equal(0, Fluent::EventTime.new(1).nsec)
end
test '#to_int' do
assert_equal(1, Fluent::EventTime.new(1, 2).to_int)
end
test '#to_r' do
assert_equal(Rational(1_000_000_002, 1_000_000_000), Fluent::EventTime.new(1, 2).to_r)
end
test '#to_s' do
time = Fluent::EventTime.new(100)
assert_equal('100', time.to_s)
assert_equal('100', "#{time}")
end
test '#to_time' do
time = Fluent::EventTime.new(@now.to_i, @now.nsec).to_time
assert_instance_of(Time, time)
assert_equal(@now.to_i, time.to_i)
begin
::Time.at(0, 0, :nanosecond)
assert_equal(@now.nsec, time.nsec)
rescue
# Time.at(@sec, @nsec / 1000.0) sometimes cause 1 diff error in nsec by 1000.0
assert_in_delta(@now.nsec, time.nsec, 1)
end
end
test '#to_json' do
time = Fluent::EventTime.new(100)
assert_equal('100', time.to_json)
assert_equal('{"time":100}', {'time' => time}.to_json)
assert_equal('["tag",100,{"key":"value"}]', ["tag", time, {"key" => "value"}].to_json)
end
test 'JSON.dump' do
time = Fluent::EventTime.new(100)
assert_equal('{"time":100}', JSON.dump({'time' => time}))
assert_equal('["tag",100,{"key":"value"}]', JSON.dump(["tag", time, {"key" => "value"}]))
end
test 'Oj.dump' do
time = Fluent::EventTime.new(100)
require 'fluent/oj_options'
Fluent::OjOptions.load_env
assert_equal('{"time":100}', Oj.dump({'time' => time}))
assert_equal('["tag",100,{"key":"value"}]', Oj.dump(["tag", time, {"key" => "value"}], mode: :compat))
end
test 'Yajl.dump' do
time = Fluent::EventTime.new(100)
assert_equal('{"time":100}', Yajl.dump({'time' => time}))
assert_equal('["tag",100,{"key":"value"}]', Yajl.dump(["tag", time, {"key" => "value"}]))
end
test '.from_time' do
sec = 1000
usec = 2
time = Fluent::EventTime.from_time(Time.at(sec, usec))
assert_equal(time.sec, sec)
assert_equal(time.nsec, usec * 1000)
end
test 'now' do
assert_equal(@now.to_i, Fluent::EventTime.now.sec)
assert_equal(@now.nsec, Fluent::EventTime.now.nsec)
end
test 'parse' do
assert_equal(Time.parse("2011-01-02 13:14:15").to_i, Fluent::EventTime.parse("2011-01-02 13:14:15").sec)
assert_equal(Time.parse("2011-01-02 13:14:15").nsec, Fluent::EventTime.parse("2011-01-02 13:14:15").nsec)
end
test 'eq?' do
assert(Fluent::EventTime.eq?(Fluent::EventTime.new(1, 2), Fluent::EventTime.new(1, 2)))
refute(Fluent::EventTime.eq?(Fluent::EventTime.new(1, 2), Fluent::EventTime.new(1, 3)))
refute(Fluent::EventTime.eq?(Fluent::EventTime.new(1, 2), Fluent::EventTime.new(3, 2)))
refute(Fluent::EventTime.eq?(Fluent::EventTime.new(1, 2), Fluent::EventTime.new(3, 4)))
assert(Fluent::EventTime.eq?(Fluent::EventTime.new(1, 2), 1))
refute(Fluent::EventTime.eq?(Fluent::EventTime.new(1, 2), 2))
assert(Fluent::EventTime.eq?(1, Fluent::EventTime.new(1, 2)))
refute(Fluent::EventTime.eq?(2, Fluent::EventTime.new(1, 2)))
end
test '==' do
assert(Fluent::EventTime.new(1, 2) == Fluent::EventTime.new(1, 2))
assert(Fluent::EventTime.new(1, 2) == Fluent::EventTime.new(1, 3))
refute(Fluent::EventTime.new(1, 2) == Fluent::EventTime.new(3, 2))
refute(Fluent::EventTime.new(1, 2) == Fluent::EventTime.new(3, 4))
assert(Fluent::EventTime.new(1, 2) == 1)
refute(Fluent::EventTime.new(1, 2) == 2)
assert(1 == Fluent::EventTime.new(1, 2))
refute(2 == Fluent::EventTime.new(1, 2))
end
test '+' do
assert_equal(4, Fluent::EventTime.new(1, 2) + Fluent::EventTime.new(3, 4))
assert_equal(6, Fluent::EventTime.new(1, 2) + 5)
assert_equal(6, 5 + Fluent::EventTime.new(1, 2))
end
test '-' do
assert_equal(-2, Fluent::EventTime.new(1, 2) - Fluent::EventTime.new(3, 4))
assert_equal(-4, Fluent::EventTime.new(1, 2) - 5)
assert_equal(4, 5 - Fluent::EventTime.new(1, 2))
end
test '>' do
assert(Fluent::EventTime.new(2) > Fluent::EventTime.new(1))
refute(Fluent::EventTime.new(1) > Fluent::EventTime.new(1))
refute(Fluent::EventTime.new(1) > Fluent::EventTime.new(2))
assert(Fluent::EventTime.new(2) > 1)
refute(Fluent::EventTime.new(1) > 1)
refute(Fluent::EventTime.new(1) > 2)
assert(2 > Fluent::EventTime.new(1))
refute(1 > Fluent::EventTime.new(1))
refute(1 > Fluent::EventTime.new(2))
end
test '>=' do
assert(Fluent::EventTime.new(2) >= Fluent::EventTime.new(1))
assert(Fluent::EventTime.new(1) >= Fluent::EventTime.new(1))
refute(Fluent::EventTime.new(1) >= Fluent::EventTime.new(2))
assert(Fluent::EventTime.new(2) >= 1)
assert(Fluent::EventTime.new(1) >= 1)
refute(Fluent::EventTime.new(1) >= 2)
assert(2 >= Fluent::EventTime.new(1))
assert(1 >= Fluent::EventTime.new(1))
refute(1 >= Fluent::EventTime.new(2))
end
test '<' do
assert(Fluent::EventTime.new(1) < Fluent::EventTime.new(2))
refute(Fluent::EventTime.new(1) < Fluent::EventTime.new(1))
refute(Fluent::EventTime.new(2) < Fluent::EventTime.new(1))
assert(Fluent::EventTime.new(1) < 2)
refute(Fluent::EventTime.new(1) < 1)
refute(Fluent::EventTime.new(2) < 1)
assert(1 < Fluent::EventTime.new(2))
refute(1 < Fluent::EventTime.new(1))
refute(2 < Fluent::EventTime.new(1))
end
test '=<' do
assert(Fluent::EventTime.new(1) <= Fluent::EventTime.new(2))
assert(Fluent::EventTime.new(1) <= Fluent::EventTime.new(1))
refute(Fluent::EventTime.new(2) <= Fluent::EventTime.new(1))
assert(Fluent::EventTime.new(1) <= 2)
assert(Fluent::EventTime.new(1) <= 1)
refute(Fluent::EventTime.new(2) <= 1)
assert(1 <= Fluent::EventTime.new(2))
assert(1 <= Fluent::EventTime.new(1))
refute(2 <= Fluent::EventTime.new(1))
end
test 'Time.at' do
sec = 1000
nsec = 2000
ntime = Fluent::EventTime.new(sec, nsec)
time = Time.at(ntime)
assert_equal(sec, time.to_i)
assert_equal(nsec, time.nsec)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_plugin_id.rb | test/test_plugin_id.rb | require_relative 'helper'
require 'fluent/plugin/base'
require 'fluent/system_config'
require 'fileutils'
class PluginIdTest < Test::Unit::TestCase
TMP_DIR = File.expand_path(File.dirname(__FILE__) + "/tmp/plugin_id/#{ENV['TEST_ENV_NUMBER']}")
class MyPlugin < Fluent::Plugin::Base
include Fluent::PluginId
end
setup do
@p = MyPlugin.new
end
sub_test_case '#plugin_id_for_test?' do
test 'returns true always in test files' do
assert @p.plugin_id_for_test?
end
test 'returns false always out of test files' do
# TODO: no good way to write this test....
end
end
sub_test_case 'configured without @id' do
setup do
@p.configure(config_element())
end
test '#plugin_id_configured? returns false' do
assert_false @p.plugin_id_configured?
end
test '#plugin_id returns object_id based string' do
assert_kind_of String, @p.plugin_id
assert @p.plugin_id =~ /^object:[0-9a-f]+/
end
test '#plugin_root_dir returns nil' do
assert_nil @p.plugin_root_dir
end
end
sub_test_case 'configured with @id' do
setup do
FileUtils.rm_rf(TMP_DIR)
FileUtils.mkdir_p(TMP_DIR)
@p.configure(config_element('ROOT', '', {'@id' => 'testing_plugin_id'}))
end
test '#plugin_id_configured? returns true' do
assert @p.plugin_id_configured?
end
test '#plugin_id returns the configured value' do
assert_equal 'testing_plugin_id', @p.plugin_id
end
test '#plugin_root_dir returns nil without system root directory configuration' do
assert_nil @p.plugin_root_dir
end
test '#plugin_root_dir returns an existing directory path frozen String' do
root_dir = Fluent::SystemConfig.overwrite_system_config('root_dir' => File.join(TMP_DIR, "myroot")) do
@p.plugin_root_dir
end
assert_kind_of String, root_dir
assert Dir.exist?(root_dir)
assert root_dir =~ %r!/worker0/!
assert root_dir.frozen?
end
test '#plugin_root_dir returns the same value for 2nd or more call' do
root_dir = Fluent::SystemConfig.overwrite_system_config('root_dir' => File.join(TMP_DIR, "myroot")) do
@p.plugin_root_dir
end
twice = Fluent::SystemConfig.overwrite_system_config('root_dir' => File.join(TMP_DIR, "myroot")) do
@p.plugin_root_dir
end
assert_equal root_dir.object_id, twice.object_id
end
test '#plugin_root_dir refers SERVERENGINE_WORKER_ID environment path to create it' do
prev_env_val = ENV['SERVERENGINE_WORKER_ID']
begin
ENV['SERVERENGINE_WORKER_ID'] = '7'
root_dir = Fluent::SystemConfig.overwrite_system_config('root_dir' => File.join(TMP_DIR, "myroot")) do
@p.plugin_root_dir
end
assert_kind_of String, root_dir
assert Dir.exist?(root_dir)
assert root_dir =~ %r!/worker7/!
assert root_dir.frozen?
ensure
ENV['SERVERENGINE_WORKER_ID'] = prev_env_val
end
end
test '#plugin_root_dir create directory with specify mode if not exists ' do
omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
root_dir = Fluent::SystemConfig.overwrite_system_config({ 'root_dir' => File.join(TMP_DIR, "myroot"), 'dir_permission' => '0777' }) do
@p.plugin_root_dir
end
assert_equal '777', File.stat(root_dir).mode.to_s(8)[-3, 3]
end
test '#plugin_root_dir create directory with default permission if not exists ' do
root_dir = Fluent::SystemConfig.overwrite_system_config({ 'root_dir' => File.join(TMP_DIR, "myroot") }) do
@p.plugin_root_dir
end
assert_equal '755', File.stat(root_dir).mode.to_s(8)[-3, 3]
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_process.rb | test/test_process.rb | require_relative 'helper'
require 'fluent/process'
class ProcessCompatibilityTest < ::Test::Unit::TestCase
test 'DetachProcessMixin is defined' do
assert defined?(::Fluent::DetachProcessMixin)
assert_equal ::Fluent::DetachProcessMixin, ::Fluent::Compat::DetachProcessMixin
end
test 'DetachMultiProcessMixin is defined' do
assert defined?(::Fluent::DetachMultiProcessMixin)
assert_equal ::Fluent::DetachMultiProcessMixin, ::Fluent::Compat::DetachMultiProcessMixin
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_engine.rb | test/test_engine.rb | require_relative 'helper'
require 'fluent/engine'
require 'fluent/config'
require 'fluent/input'
require 'fluent/system_config'
class EngineTest < ::Test::Unit::TestCase
class DummyEngineTestOutput < Fluent::Plugin::Output
Fluent::Plugin.register_output('dummy_engine_test', self)
def write(chunk); end
end
class DummyEngineTest2Output < Fluent::Plugin::Output
Fluent::Plugin.register_output('dummy_engine_test2', self)
def write(chunk); end
end
class DummyEngineTestInput < Fluent::Plugin::Input
Fluent::Plugin.register_input('dummy_engine_test', self)
def multi_workers_ready?; true; end
end
class DummyEngineTest2Input < Fluent::Plugin::Input
Fluent::Plugin.register_input('dummy_engine_test2', self)
def multi_workers_ready?; true; end
end
class DummyEngineClassVarTestInput < Fluent::Plugin::Input
Fluent::Plugin.register_input('dummy_engine_class_var_test', self)
@@test = nil
def multi_workers_ready?; true; end
end
sub_test_case '#reload_config' do
test 'reload new configuration' do
conf_data = <<-CONF
<source>
@type dummy_engine_test
</source>
<match>
@type dummy_engine_test
</match>
CONF
conf = Fluent::Config.parse(conf_data, '(test)', '(test_dir)', true)
system_config = Fluent::SystemConfig.create(conf)
engine = Fluent::EngineClass.new
engine.init(system_config)
engine.configure(conf)
assert_kind_of DummyEngineTestInput, engine.root_agent.inputs[0]
assert_kind_of DummyEngineTestOutput, engine.root_agent.outputs[0]
new_conf_data = <<-CONF
<source>
@type dummy_engine_test2
</source>
<match>
@type dummy_engine_test2
</match>
CONF
new_conf = Fluent::Config.parse(new_conf_data, '(test)', '(test_dir)', true)
agent = Fluent::RootAgent.new(log: $log, system_config: system_config)
stub(Fluent::RootAgent).new do
stub(agent).start.once
agent
end
engine.reload_config(new_conf)
assert_kind_of DummyEngineTest2Input, engine.root_agent.inputs[0]
assert_kind_of DummyEngineTest2Output, engine.root_agent.outputs[0]
end
test "doesn't start RootAgent when supervisor is true" do
conf_data = <<-CONF
<source>
@type dummy_engine_test
</source>
<match>
@type dummy_engine_test
</match>
CONF
conf = Fluent::Config.parse(conf_data, '(test)', '(test_dir)', true)
system_config = Fluent::SystemConfig.create(conf)
engine = Fluent::EngineClass.new
engine.init(system_config)
engine.configure(conf)
assert_kind_of DummyEngineTestInput, engine.root_agent.inputs[0]
assert_kind_of DummyEngineTestOutput, engine.root_agent.outputs[0]
new_conf_data = <<-CONF
<source>
@type dummy_engine_test2
</source>
<match>
@type dummy_engine_test2
</match>
CONF
new_conf = Fluent::Config.parse(new_conf_data, '(test)', '(test_dir)', true)
agent = Fluent::RootAgent.new(log: $log, system_config: system_config)
stub(Fluent::RootAgent).new do
stub(agent).start.never
agent
end
engine.reload_config(new_conf, supervisor: true)
assert_kind_of DummyEngineTest2Input, engine.root_agent.inputs[0]
assert_kind_of DummyEngineTest2Output, engine.root_agent.outputs[0]
end
test 'raise an error when conf is invalid' do
conf_data = <<-CONF
<source>
@type dummy_engine_test
</source>
<match>
@type dummy_engine_test
</match>
CONF
conf = Fluent::Config.parse(conf_data, '(test)', '(test_dir)', true)
system_config = Fluent::SystemConfig.create(conf)
engine = Fluent::EngineClass.new
engine.init(system_config)
engine.configure(conf)
assert_kind_of DummyEngineTestInput, engine.root_agent.inputs[0]
assert_kind_of DummyEngineTestOutput, engine.root_agent.outputs[0]
new_conf_data = <<-CONF
<source>
@type
</source>
CONF
new_conf = Fluent::Config.parse(new_conf_data, '(test)', '(test_dir)', true)
agent = Fluent::RootAgent.new(log: $log, system_config: system_config)
stub(Fluent::RootAgent).new do
stub(agent).start.never
agent
end
assert_raise(Fluent::ConfigError.new("Missing '@type' parameter on <source> directive")) do
engine.reload_config(new_conf)
end
assert_kind_of DummyEngineTestInput, engine.root_agent.inputs[0]
assert_kind_of DummyEngineTestOutput, engine.root_agent.outputs[0]
end
test 'raise an error when unreloadable exists' do
conf_data = <<-CONF
<source>
@type dummy_engine_test
</source>
<match>
@type dummy_engine_test
</match>
CONF
conf = Fluent::Config.parse(conf_data, '(test)', '(test_dir)', true)
system_config = Fluent::SystemConfig.create(conf)
engine = Fluent::EngineClass.new
engine.init(system_config)
engine.configure(conf)
assert_kind_of DummyEngineTestInput, engine.root_agent.inputs[0]
assert_kind_of DummyEngineTestOutput, engine.root_agent.outputs[0]
conf_data = <<-CONF
<source>
@type dummy_engine_class_var_test
</source>
<match>
@type dummy_engine_test
</match>
CONF
new_conf = Fluent::Config.parse(conf_data, '(test)', '(test_dir)', true)
e = assert_raise(Fluent::ConfigError) do
engine.reload_config(new_conf)
end
assert e.message.match?('Unreloadable plugin plugin: dummy_engine_class_var_test')
assert_kind_of DummyEngineTestInput, engine.root_agent.inputs[0]
assert_kind_of DummyEngineTestOutput, engine.root_agent.outputs[0]
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_clock.rb | test/test_clock.rb | require_relative 'helper'
require 'fluent/clock'
require 'timecop'
class ClockTest < ::Test::Unit::TestCase
teardown do
Fluent::Clock.return # call it always not to affect other tests
end
sub_test_case 'without any pre-operation' do
test 'clock can provides incremental floating point number based on second' do
c1 = Fluent::Clock.now
assert_kind_of Float, c1
sleep 1.1
c2 = Fluent::Clock.now
assert{ c2 >= c1 + 1.0 && c2 < c1 + 9.0 } # if clock returns deci-second (fantastic!), c2 should be larger than c1 + 10
end
test 'clock value will proceed even if timecop freezes Time' do
Timecop.freeze(Time.now) do
c1 = Fluent::Clock.now
assert_kind_of Float, c1
sleep 1.1
c2 = Fluent::Clock.now
assert{ c2 >= c1 + 1.0 && c2 < c1 + 9.0 }
end
end
end
sub_test_case 'using #freeze without any arguments' do
test 'Clock.freeze without arguments freezes clock with current clock value' do
c0 = Fluent::Clock.now
Fluent::Clock.freeze
c1 = Fluent::Clock.now
Fluent::Clock.return
c2 = Fluent::Clock.now
assert{ c0 <= c1 && c1 <= c2 }
end
test 'Clock.return raises an error if it is called in block' do
assert_raise RuntimeError.new("invalid return while running code in blocks") do
Fluent::Clock.freeze do
Fluent::Clock.return
end
end
end
end
sub_test_case 'using #freeze with clock value' do
test 'Clock.now always returns frozen time until #return called' do
c0 = Fluent::Clock.now
Fluent::Clock.freeze(c0)
assert_equal c0, Fluent::Clock.now
sleep 0.5
assert_equal c0, Fluent::Clock.now
sleep 0.6
assert_equal c0, Fluent::Clock.now
Fluent::Clock.return
c1 = Fluent::Clock.now
assert{ c1 >= c0 + 1.0 }
end
test 'Clock.now returns frozen time in the block argument of #freeze' do
c0 = Fluent::Clock.now
Fluent::Clock.freeze(c0) do
assert_equal c0, Fluent::Clock.now
sleep 0.5
assert_equal c0, Fluent::Clock.now
sleep 0.6
assert_equal c0, Fluent::Clock.now
end
c1 = Fluent::Clock.now
assert{ c1 >= c0 + 1.0 }
end
test 'Clock.now returns unfrozen value after jumping out from block by raising errors' do
c0 = Fluent::Clock.now
rescued_error = nil
begin
Fluent::Clock.freeze(c0) do
assert_equal c0, Fluent::Clock.now
sleep 0.5
assert_equal c0, Fluent::Clock.now
sleep 0.6
assert_equal c0, Fluent::Clock.now
raise "bye!"
end
rescue => e
rescued_error = e
end
assert rescued_error # ensure to rescue an error
c1 = Fluent::Clock.now
assert{ c1 >= c0 + 1.0 }
end
test 'Clock.return cancels all Clock.freeze effects by just once' do
c0 = Fluent::Clock.now
sleep 0.1
c1 = Fluent::Clock.now
sleep 0.1
c2 = Fluent::Clock.now
Fluent::Clock.freeze(c0)
sleep 0.1
assert_equal c0, Fluent::Clock.now
Fluent::Clock.freeze(c1)
sleep 0.1
assert_equal c1, Fluent::Clock.now
Fluent::Clock.freeze(c2)
sleep 0.1
assert_equal c2, Fluent::Clock.now
Fluent::Clock.return
assert{ Fluent::Clock.now > c2 }
end
test 'Clock.freeze allows nested blocks by itself' do
c0 = Fluent::Clock.now
sleep 0.1
c1 = Fluent::Clock.now
sleep 0.1
c2 = Fluent::Clock.now
Fluent::Clock.freeze(c0) do
sleep 0.1
assert_equal c0, Fluent::Clock.now
Fluent::Clock.freeze(c1) do
sleep 0.1
assert_equal c1, Fluent::Clock.now
Fluent::Clock.freeze(c2) do
sleep 0.1
assert_equal c2, Fluent::Clock.now
end
assert_equal c1, Fluent::Clock.now
end
assert_equal c0, Fluent::Clock.now
end
assert{ Fluent::Clock.now > c0 }
end
end
sub_test_case 'using #freeze with Time argument' do
test 'Clock.freeze returns the clock value which should be produced when the time is at the specified time' do
c0 = Fluent::Clock.now
t0 = Time.now
t1 = t0 - 30
assert_kind_of Time, t1
t2 = t0 + 30
assert_kind_of Time, t2
# 31 is for error of floating point value
Fluent::Clock.freeze(t1) do
c1 = Fluent::Clock.now
assert{ c1 >= c0 - 31 && c1 <= c0 - 31 + 10 } # +10 is for threading schedule error
end
# 29 is for error of floating point value
Fluent::Clock.freeze(t2) do
c2 = Fluent::Clock.now
assert{ c2 >= c0 + 29 && c2 <= c0 + 29 + 10 } # +10 is for threading schedule error
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_plugin_helper.rb | test/test_plugin_helper.rb | require_relative 'helper'
require 'fluent/plugin_helper'
require 'fluent/plugin/base'
class PluginHelperTest < Test::Unit::TestCase
module FluentTest; end
sub_test_case 'Fluent::Plugin::Base.helpers method works as shortcut to include helper modules' do
class FluentTest::PluginTest1 < Fluent::Plugin::TestBase
helpers :event_emitter
end
class FluentTest::PluginTest2 < Fluent::Plugin::TestBase
helpers :thread
end
class FluentTest::PluginTest3 < Fluent::Plugin::TestBase
helpers :event_loop
end
class FluentTest::PluginTest4 < Fluent::Plugin::TestBase
helpers :timer
end
class FluentTest::PluginTest5 < Fluent::Plugin::TestBase
helpers :child_process
end
class FluentTest::PluginTest6 < Fluent::Plugin::TestBase
helpers :retry_state
end
class FluentTest::PluginTest0 < Fluent::Plugin::TestBase
helpers :event_emitter, :thread, :event_loop, :timer, :child_process, :retry_state
end
test 'plugin can include helper event_emitter' do
assert FluentTest::PluginTest1.include?(Fluent::PluginHelper::EventEmitter)
p1 = FluentTest::PluginTest1.new
assert p1.respond_to?(:has_router?)
assert p1.has_router?
end
test 'plugin can include helper thread' do
assert FluentTest::PluginTest2.include?(Fluent::PluginHelper::Thread)
p2 = FluentTest::PluginTest2.new
assert p2.respond_to?(:thread_current_running?)
assert p2.respond_to?(:thread_create)
end
test 'plugin can include helper event_loop' do
assert FluentTest::PluginTest3.include?(Fluent::PluginHelper::EventLoop)
p3 = FluentTest::PluginTest3.new
assert p3.respond_to?(:event_loop_attach)
assert p3.respond_to?(:event_loop_running?)
end
test 'plugin can include helper timer' do
assert FluentTest::PluginTest4.include?(Fluent::PluginHelper::Timer)
p4 = FluentTest::PluginTest4.new
assert p4.respond_to?(:timer_execute)
end
test 'plugin can include helper child_process' do
assert FluentTest::PluginTest5.include?(Fluent::PluginHelper::ChildProcess)
p5 = FluentTest::PluginTest5.new
assert p5.respond_to?(:child_process_execute)
end
test 'plugin can 2 or more helpers at once' do
assert FluentTest::PluginTest0.include?(Fluent::PluginHelper::EventEmitter)
assert FluentTest::PluginTest0.include?(Fluent::PluginHelper::Thread)
assert FluentTest::PluginTest0.include?(Fluent::PluginHelper::EventLoop)
assert FluentTest::PluginTest0.include?(Fluent::PluginHelper::Timer)
assert FluentTest::PluginTest0.include?(Fluent::PluginHelper::ChildProcess)
p0 = FluentTest::PluginTest0.new
assert p0.respond_to?(:child_process_execute)
assert p0.respond_to?(:timer_execute)
assert p0.respond_to?(:event_loop_attach)
assert p0.respond_to?(:event_loop_running?)
assert p0.respond_to?(:thread_current_running?)
assert p0.respond_to?(:thread_create)
assert p0.respond_to?(:has_router?)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_mixin.rb | test/test_mixin.rb | require_relative 'helper'
require 'fluent/mixin'
require 'fluent/env'
require 'fluent/plugin'
require 'fluent/config'
require 'fluent/test'
require 'timecop'
module MixinTest
module Utils
def setup
super
Fluent::Test.setup
@time = Time.utc(1,2,3,4,5,2010,nil,nil,nil,nil)
Timecop.freeze(@time)
end
def teardown
super
Timecop.return
GC.start
end
module Checker
extend self
def format_check(tag, time, record); end
end
@@num = 0
def create_register_output_name
@@num += 1
"mixin_text_#{@@num}"
end
def format_check(hash, tagname = 'test')
mock(Checker).format_check(tagname, @time.to_i, hash)
end
def create_driver(include_klass, conf = '', tag = "test", &block)
register_output_name = create_register_output_name
include_klasses = [include_klass].flatten
klass = Class.new(Fluent::BufferedOutput) {
include_klasses.each {|k| include k }
Fluent::Plugin.register_output(register_output_name, self)
def format(tag, time, record)
Checker.format_check(tag, time, record)
[tag, time, record].to_msgpack
end
def write(chunk); end
}
if block
Utils.const_set("MixinTestClass#{@@num}", klass)
klass.module_eval(&block)
end
Fluent::Test::BufferedOutputTestDriver.new(klass, tag) {
}.configure("type #{register_output_name}" + conf)
end
end
class SetTagKeyMixinText < Test::Unit::TestCase
include Utils
def test_tag_key_default
format_check({
'a' => 1
})
d = create_driver(Fluent::SetTagKeyMixin, %[
])
d.emit({'a' => 1})
d.run
end
def test_include_tag_key_true
format_check({
'tag' => 'test',
'a' => 1
})
d = create_driver(Fluent::SetTagKeyMixin, %[
include_tag_key true
])
d.emit({'a' => 1})
d.run
end
def test_include_tag_key_false
format_check({
'a' => 1
})
d = create_driver(Fluent::SetTagKeyMixin, %[
include_tag_key false
])
d.emit({'a' => 1})
d.run
end
def test_tag_key_set
format_check({
'tag_key_changed' => 'test',
'a' => 1
})
d = create_driver(Fluent::SetTagKeyMixin, %[
include_tag_key true
tag_key tag_key_changed
])
d.emit({'a' => 1})
d.run
end
sub_test_case "mixin" do
data(
'true' => true,
'false' => false)
test 'include_tag_key' do |param|
d = create_driver(Fluent::SetTagKeyMixin) {
config_set_default :include_tag_key, param
}
assert_equal(param, d.instance.include_tag_key)
end
end
end
class SetTimeKeyMixinText < Test::Unit::TestCase
include Utils
def test_time_key_default
format_check({
'a' => 1
})
d = create_driver(Fluent::SetTimeKeyMixin, %[
])
d.emit({'a' => 1})
d.run
end
def test_include_time_key_true
format_check({
'time' => "2010-05-04T03:02:01Z",
'a' => 1
})
d = create_driver(Fluent::SetTimeKeyMixin, %[
include_time_key true
])
d.emit({'a' => 1})
d.run
end
def test_time_format
format_check({
'time' => "20100504",
'a' => 1
})
d = create_driver(Fluent::SetTimeKeyMixin, %[
include_time_key true
time_format %Y%m%d
])
d.emit({'a' => 1})
d.run
end
def test_timezone_1
format_check({
'time' => "2010-05-03T17:02:01-10:00",
'a' => 1
})
d = create_driver(Fluent::SetTimeKeyMixin, %[
include_time_key true
timezone Pacific/Honolulu
])
d.emit({'a' => 1})
d.run
end
def test_timezone_2
format_check({
'time' => "2010-05-04T08:32:01+05:30",
'a' => 1
})
d = create_driver(Fluent::SetTimeKeyMixin, %[
include_time_key true
timezone +05:30
])
d.emit({'a' => 1})
d.run
end
def test_timezone_invalid
assert_raise(Fluent::ConfigError) do
create_driver(Fluent::SetTimeKeyMixin, %[
include_time_key true
timezone Invalid/Invalid
])
end
end
sub_test_case "mixin" do
data(
'true' => true,
'false' => false)
test 'include_time_key' do |param|
d = create_driver(Fluent::SetTimeKeyMixin) {
config_set_default :include_time_key, param
}
assert_equal(param, d.instance.include_time_key)
end
end
end
class HandleTagMixinTest < Test::Unit::TestCase
include Utils
def test_add_tag_prefix
format_check({
'a' => 1
}, 'tag_prefix.test')
format_check({
'a' => 2
}, 'tag_prefix.test')
d = create_driver(Fluent::HandleTagNameMixin, %[
add_tag_prefix tag_prefix.
include_tag_key true
])
d.emit({'a' => 1})
d.emit({'a' => 2})
d.run
end
def test_add_tag_suffix
format_check({
'a' => 1
}, 'test.test_suffix')
format_check({
'a' => 2
}, 'test.test_suffix')
d = create_driver(Fluent::HandleTagNameMixin, %[
add_tag_suffix .test_suffix
include_tag_key true
])
d.emit({'a' => 1})
d.emit({'a' => 2})
d.run
end
def test_remove_tag_prefix
format_check({
'a' => 1
}, 'test')
format_check({
'a' => 2
}, 'test')
d = create_driver(Fluent::HandleTagNameMixin, %[
remove_tag_prefix te
include_tag_key true
], "tetest")
d.emit({'a' => 1})
d.emit({'a' => 2})
d.run
end
def test_remove_tag_suffix
format_check({
'a' => 1
}, 'test')
format_check({
'a' => 2
}, 'test')
d = create_driver(Fluent::HandleTagNameMixin, %[
remove_tag_suffix st
include_tag_key true
], "testst")
d.emit({'a' => 1})
d.emit({'a' => 2})
d.run
end
def test_mix_tag_handle
format_check({
'a' => 1
}, 'prefix.t')
d = create_driver(Fluent::HandleTagNameMixin, %[
remove_tag_prefix tes
add_tag_prefix prefix.
])
d.emit({'a' => 1})
d.run
end
def test_with_set_tag_key_mixin
format_check({
'tag' => 'tag_prefix.test',
'a' => 1
}, 'tag_prefix.test')
d = create_driver([Fluent::SetTagKeyMixin, Fluent::HandleTagNameMixin], %[
add_tag_prefix tag_prefix.
include_tag_key true
])
d.emit({'a' => 1})
d.run
end
def test_with_set_tag_key_mixin_include_order_reverse
format_check({
'tag' => 'tag_prefix.test',
'a' => 1
}, 'tag_prefix.test')
d = create_driver([Fluent::HandleTagNameMixin, Fluent::SetTagKeyMixin], %[
add_tag_prefix tag_prefix.
include_tag_key true
])
d.emit({'a' => 1})
d.run
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_daemonizer.rb | test/test_daemonizer.rb | require_relative 'helper'
require 'fluent/daemonizer'
class DaemonizerTest < ::Test::Unit::TestCase
TMP_DIR = File.join(File.dirname(__FILE__), 'tmp', 'daemonizer')
setup do
FileUtils.mkdir_p(TMP_DIR)
end
teardown do
FileUtils.rm_rf(TMP_DIR) rescue nil
end
test 'makes pid file' do
pid_path = File.join(TMP_DIR, 'file.pid')
mock(Process).daemon(anything, anything).once
r = Fluent::Daemonizer.daemonize(pid_path) { 'ret' }
assert_equal 'ret', r
assert File.exist?(pid_path)
assert Process.pid.to_s, File.read(pid_path).to_s
end
test 'in platforms which do not support fork' do
pid_path = File.join(TMP_DIR, 'file.pid')
mock(Process).daemon(anything, anything) { raise NotImplementedError }
args = ['-c', 'test.conf']
mock(Process).spawn(anything, *args) { Process.pid }
Fluent::Daemonizer.daemonize(pid_path, args) { 'ret' }
assert File.exist?(pid_path)
assert Process.pid.to_s, File.read(pid_path).to_s
end
sub_test_case 'when pid file already exists' do
test 'raise an error when process is running' do
omit 'chmod of file does not affect root user' if Process.uid.zero?
pid_path = File.join(TMP_DIR, 'file.pid')
File.write(pid_path, '1')
mock(Process).daemon(anything, anything).never
mock(Process).kill(0, 1).once
assert_raise(Fluent::ConfigError.new('pid(1) is running')) do
Fluent::Daemonizer.daemonize(pid_path) { 'ret' }
end
end
test 'raise an error when file is not readable' do
omit 'chmod of file does not affect root user' if Process.uid.zero?
not_readable_path = File.join(TMP_DIR, 'not_readable.pid')
File.write(not_readable_path, '1')
FileUtils.chmod(0333, not_readable_path)
mock(Process).daemon(anything, anything).never
assert_raise(Fluent::ConfigError.new("Cannot access pid file: #{File.absolute_path(not_readable_path)}")) do
Fluent::Daemonizer.daemonize(not_readable_path) { 'ret' }
end
end
test 'raise an error when file is not writable' do
omit 'chmod of file does not affect root user' if Process.uid.zero?
not_writable_path = File.join(TMP_DIR, 'not_writable.pid')
File.write(not_writable_path, '1')
FileUtils.chmod(0555, not_writable_path)
mock(Process).daemon(anything, anything).never
assert_raise(Fluent::ConfigError.new("Cannot access pid file: #{File.absolute_path(not_writable_path)}")) do
Fluent::Daemonizer.daemonize(not_writable_path) { 'ret' }
end
end
test 'raise an error when directory is not writable' do
omit 'chmod of file does not affect root user' if Process.uid.zero?
not_writable_dir = File.join(TMP_DIR, 'not_writable')
pid_path = File.join(not_writable_dir, 'file.pid')
FileUtils.mkdir_p(not_writable_dir)
FileUtils.chmod(0555, not_writable_dir)
mock(Process).daemon(anything, anything).never
assert_raise(Fluent::ConfigError.new("Cannot access directory for pid file: #{File.absolute_path(not_writable_dir)}")) do
Fluent::Daemonizer.daemonize(pid_path) { 'ret' }
end
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/helper.rb | test/helper.rb | # simplecov must be loaded before any of target code
if ENV['SIMPLE_COV']
require 'simplecov'
if defined?(SimpleCov::SourceFile)
mod = SimpleCov::SourceFile
def mod.new(*args, &block)
m = allocate
m.instance_eval do
begin
initialize(*args, &block)
rescue Encoding::UndefinedConversionError
@src = "".force_encoding('UTF-8')
end
end
m
end
end
unless SimpleCov.running
SimpleCov.start do
add_filter '/test/'
add_filter '/gems/'
end
end
end
# Some tests use Hash instead of Element for configure.
# We should rewrite these tests in the future and remove this ad-hoc code
class Hash
def corresponding_proxies
@corresponding_proxies ||= []
end
def to_masked_element
self
end
end
require 'rr'
require 'test/unit'
require 'test/unit/rr'
require 'fileutils'
require 'fluent/config/element'
require 'fluent/log'
require 'fluent/test'
require 'fluent/test/helpers'
require 'fluent/plugin/base'
require 'fluent/plugin_id'
require 'fluent/plugin_helper'
require 'fluent/msgpack_factory'
require 'fluent/time'
require 'serverengine'
require_relative 'helpers/fuzzy_assert'
require_relative 'helpers/process_extenstion'
module Fluent
module Plugin
class TestBase < Base
# a base plugin class, but not input nor output
# mainly for helpers and owned plugins
include PluginId
include PluginLoggerMixin
include PluginHelper::Mixin
end
end
end
unless defined?(Test::Unit::AssertionFailedError)
class Test::Unit::AssertionFailedError < StandardError
end
end
include Fluent::Test::Helpers
def unused_port(num = 1, protocol:, bind: "0.0.0.0")
case protocol
when :tcp, :tls
unused_port_tcp(num)
when :udp
unused_port_udp(num, bind: bind)
when :all
unused_port_tcp_udp(num)
else
raise ArgumentError, "unknown protocol: #{protocol}"
end
end
def unused_port_tcp_udp(num = 1)
raise "not support num > 1" if num > 1
# The default maximum number of file descriptors in macOS is 256.
# It might need to set num to a smaller value than that.
tcp_ports = unused_port_tcp(200)
port = unused_port_udp(1, port_list: tcp_ports)
raise "can't find unused port" unless port
port
end
def unused_port_tcp(num = 1)
ports = []
sockets = []
num.times do
s = TCPServer.open(0)
sockets << s
ports << s.addr[1]
end
sockets.each(&:close)
if num == 1
return ports.first
else
return *ports
end
end
PORT_RANGE_AVAILABLE = (1024...65535)
def unused_port_udp(num = 1, port_list: [], bind: "0.0.0.0")
family = IPAddr.new(IPSocket.getaddress(bind)).ipv4? ? ::Socket::AF_INET : ::Socket::AF_INET6
ports = []
sockets = []
use_random_port = port_list.empty?
i = 0
loop do
port = use_random_port ? rand(PORT_RANGE_AVAILABLE) : port_list[i]
u = UDPSocket.new(family)
if (u.bind(bind, port) rescue nil)
ports << port
sockets << u
else
u.close
end
i += 1
break if ports.size >= num
break if !use_random_port && i >= port_list.size
end
sockets.each(&:close)
if num == 1
return ports.first
else
return *ports
end
end
def waiting(seconds, logs: nil, plugin: nil)
begin
Timeout.timeout(seconds) do
yield
end
rescue Timeout::Error
if logs
STDERR.print(*logs)
elsif plugin
STDERR.print(*plugin.log.out.logs)
end
raise
end
end
def ipv6_enabled?
require 'socket'
begin
TCPServer.open("::1", 0)
true
rescue
false
end
end
dl_opts = {}
dl_opts[:log_level] = ServerEngine::DaemonLogger::WARN
logdev = Fluent::Test::DummyLogDevice.new
logger = ServerEngine::DaemonLogger.new(logdev, dl_opts)
$log ||= Fluent::Log.new(logger)
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_match.rb | test/test_match.rb | require_relative 'helper'
require 'fluent/match'
class MatchTest < Test::Unit::TestCase
include Fluent
def test_simple
assert_glob_match('a', 'a')
assert_glob_match('a.b', 'a.b')
assert_glob_not_match('a', 'b')
assert_glob_not_match('a.b', 'aab')
end
def test_wildcard
assert_glob_match('a*', 'a')
assert_glob_match('a*', 'ab')
assert_glob_match('a*', 'abc')
assert_glob_match('*a', 'a')
assert_glob_match('*a', 'ba')
assert_glob_match('*a', 'cba')
assert_glob_match('*a*', 'a')
assert_glob_match('*a*', 'ba')
assert_glob_match('*a*', 'ac')
assert_glob_match('*a*', 'bac')
assert_glob_not_match('a*', 'a.b')
assert_glob_not_match('a*', 'ab.c')
assert_glob_not_match('a*', 'ba')
assert_glob_not_match('*a', 'ab')
assert_glob_match('a.*', 'a.b')
assert_glob_match('a.*', 'a.c')
assert_glob_not_match('a.*', 'ab')
assert_glob_match('a.*.c', 'a.b.c')
assert_glob_match('a.*.c', 'a.c.c')
assert_glob_not_match('a.*.c', 'a.c')
end
def test_recursive_wildcard
assert_glob_match('a.**', 'a')
assert_glob_not_match('a.**', 'ab')
assert_glob_not_match('a.**', 'abc')
assert_glob_match('a.**', 'a.b')
assert_glob_not_match('a.**', 'ab.c')
assert_glob_not_match('a.**', 'ab.d.e')
assert_glob_match('a**', 'a')
assert_glob_match('a**', 'ab')
assert_glob_match('a**', 'abc')
assert_glob_match('a**', 'a.b')
assert_glob_match('a**', 'ab.c')
assert_glob_match('a**', 'ab.d.e')
assert_glob_match('**.a', 'a')
assert_glob_not_match('**.a', 'ba')
assert_glob_not_match('**.a', 'c.ba')
assert_glob_match('**.a', 'b.a')
assert_glob_match('**.a', 'cb.a')
assert_glob_match('**.a', 'd.e.a')
assert_glob_match('**a', 'a')
assert_glob_match('**a', 'ba')
assert_glob_match('**a', 'c.ba')
assert_glob_match('**a', 'b.a')
assert_glob_match('**a', 'cb.a')
assert_glob_match('**a', 'd.e.a')
end
def test_or
assert_glob_match('a.{b,c}', 'a.b')
assert_glob_match('a.{b,c}', 'a.c')
assert_glob_not_match('a.{b,c}', 'a.d')
assert_glob_match('a.{b,c}.**', 'a.b')
assert_glob_match('a.{b,c}.**', 'a.c')
assert_glob_not_match('a.{b,c}.**', 'a.d')
assert_glob_not_match('a.{b,c}.**', 'a.cd')
assert_glob_match('a.{b.**,c}', 'a.b')
assert_glob_match('a.{b.**,c}', 'a.b.c')
assert_glob_match('a.{b.**,c}', 'a.c')
assert_glob_not_match('a.{b.**,c}', 'a.c.d')
end
def test_multi_pattern_or
assert_or_match('a.b a.c', 'a.b')
assert_or_match('a.b a.c', 'a.c')
assert_or_not_match('a.b a.c', 'a.d')
assert_or_match('a.b.** a.c.**', 'a.b')
assert_or_match('a.b.** a.c.**', 'a.c')
assert_or_not_match('a.b.** a.c.**', 'a.d')
assert_or_not_match('a.b.** a.c.**', 'a.cd')
assert_or_match('a.b.** a.c', 'a.b')
assert_or_match('a.b.** a.c', 'a.b.c')
assert_or_match('a.b.** a.c', 'a.c')
assert_or_not_match('a.b.** a.c', 'a.c.d')
end
def test_regex_pattern
assert_glob_match('/a/', 'a')
assert_glob_not_match('/a/', 'abc')
assert_glob_match('/a.*/', 'abc')
assert_glob_not_match('/b.*/', 'abc')
assert_glob_match('/a\..*/', 'a.b.c')
assert_glob_not_match('/(?!a\.).*/', 'a.b.c')
assert_glob_not_match('/a\..*/', 'b.b.c')
assert_glob_match('/(?!a\.).*/', 'b.b.c')
end
#def test_character_class
# assert_match('[a]', 'a')
# assert_match('[ab]', 'a')
# assert_match('[ab]', 'b')
# assert_not_match('[ab]', 'c')
#
# assert_match('[a-b]', 'a')
# assert_match('[a-b]', 'a')
# assert_match('[a-b]', 'b')
# assert_not_match('[a-b]', 'c')
#
# assert_match('[a-b0-9]', 'a')
# assert_match('[a-b0-9]', '0')
# assert_not_match('[a-b0-9]', 'c')
#end
def assert_glob_match(pat, str)
assert_true GlobMatchPattern.new(pat).match(str)
assert_true EventRouter::Rule.new(pat, nil).match?(str)
end
def assert_glob_not_match(pat, str)
assert_false GlobMatchPattern.new(pat).match(str)
assert_false EventRouter::Rule.new(pat, nil).match?(str)
end
def assert_or_match(pats, str)
assert_true EventRouter::Rule.new(pats, nil).match?(str)
end
def assert_or_not_match(pats, str)
assert_false EventRouter::Rule.new(pats, nil).match?(str)
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_file_wrapper.rb | test/test_file_wrapper.rb | require_relative 'helper'
require 'fluent/file_wrapper'
class FileWrapperTest < Test::Unit::TestCase
TMP_DIR = File.dirname(__FILE__) + "/../tmp/file_wrapper#{ENV['TEST_ENV_NUMBER']}"
def setup
FileUtils.mkdir_p(TMP_DIR)
end
def teardown
FileUtils.rm_rf(TMP_DIR)
end
sub_test_case 'WindowsFile exceptions' do
test 'nothing raised' do
begin
path = "#{TMP_DIR}/test_windows_file.txt"
file1 = file2 = nil
file1 = File.open(path, "wb") do |f|
end
assert_nothing_raised do
file2 = Fluent::WindowsFile.new(path)
ensure
file2.close
end
ensure
file1.close if file1
end
end
test 'Errno::ENOENT raised' do
path = "#{TMP_DIR}/nofile.txt"
file = nil
assert_raise(Errno::ENOENT) do
file = Fluent::WindowsFile.new(path)
ensure
file.close if file
end
end
test 'Errno::ENOENT raised on DeletePending' do
path = "#{TMP_DIR}/deletepending.txt"
file = Fluent::WindowsFile.new(path, mode='w')
File.delete(path)
assert_raise(Errno::ENOENT) do
file.stat
ensure
file.close if file
end
end
end
end if Fluent.windows?
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_time_parser.rb | test/test_time_parser.rb | require_relative 'helper'
require 'fluent/test'
require 'fluent/time'
class TimeParserTest < ::Test::Unit::TestCase
def setup
Fluent::Test.setup
end
def test_call_with_parse
parser = Fluent::TimeParser.new
assert(parser.parse('2013-09-18 12:00:00 +0900').is_a?(Fluent::EventTime))
time = event_time('2013-09-18 12:00:00 +0900')
assert_equal(time, parser.parse('2013-09-18 12:00:00 +0900'))
end
def test_parse_with_strptime
parser = Fluent::TimeParser.new('%d/%b/%Y:%H:%M:%S %z')
assert(parser.parse('28/Feb/2013:12:00:00 +0900').is_a?(Fluent::EventTime))
time = event_time('28/Feb/2013:12:00:00 +0900', format: '%d/%b/%Y:%H:%M:%S %z')
assert_equal(time, parser.parse('28/Feb/2013:12:00:00 +0900'))
end
def test_parse_nsec_with_strptime
parser = Fluent::TimeParser.new('%d/%b/%Y:%H:%M:%S:%N %z')
assert(parser.parse('28/Feb/2013:12:00:00:123456789 +0900').is_a?(Fluent::EventTime))
time = event_time('28/Feb/2013:12:00:00:123456789 +0900', format: '%d/%b/%Y:%H:%M:%S:%N %z')
assert_equal_event_time(time, parser.parse('28/Feb/2013:12:00:00:123456789 +0900'))
end
def test_parse_iso8601
parser = Fluent::TimeParser.new('%iso8601')
assert(parser.parse('2017-01-01T12:00:00+09:00').is_a?(Fluent::EventTime))
time = event_time('2017-01-01T12:00:00+09:00')
assert_equal(time, parser.parse('2017-01-01T12:00:00+09:00'))
time_with_msec = event_time('2017-01-01T12:00:00.123+09:00')
assert_equal(time_with_msec, parser.parse('2017-01-01T12:00:00.123+09:00'))
end
def test_parse_with_invalid_argument
parser = Fluent::TimeParser.new
[[], {}, nil, true, 10000, //, ->{}, '', :symbol].each { |v|
assert_raise Fluent::TimeParser::TimeParseError do
parser.parse(v)
end
}
end
def test_parse_time_in_localtime
time = with_timezone("UTC+02") do
parser = Fluent::TimeParser.new("%Y-%m-%d %H:%M:%S.%N", true)
parser.parse("2016-09-02 18:42:31.123456789")
end
assert_equal_event_time(time, event_time("2016-09-02 18:42:31.123456789 -02:00", format: '%Y-%m-%d %H:%M:%S.%N %z'))
end
def test_parse_time_in_utc
time = with_timezone("UTC-09") do
parser = Fluent::TimeParser.new("%Y-%m-%d %H:%M:%S.%N", false)
parser.parse("2016-09-02 18:42:31.123456789")
end
assert_equal_event_time(time, event_time("2016-09-02 18:42:31.123456789 UTC", format: '%Y-%m-%d %H:%M:%S.%N %z'))
end
def test_parse_string_with_expected_timezone
time = with_timezone("UTC-09") do
parser = Fluent::TimeParser.new("%Y-%m-%d %H:%M:%S.%N", nil, "-07:00")
parser.parse("2016-09-02 18:42:31.123456789")
end
assert_equal_event_time(time, event_time("2016-09-02 18:42:31.123456789 -07:00", format: '%Y-%m-%d %H:%M:%S.%N %z'))
end
def test_parse_time_with_expected_timezone_name
time = with_timezone("UTC-09") do
parser = Fluent::TimeParser.new("%Y-%m-%d %H:%M:%S.%N", nil, "Europe/Zurich")
parser.parse("2016-12-02 18:42:31.123456789")
end
assert_equal_event_time(time, event_time("2016-12-02 18:42:31.123456789 +01:00", format: '%Y-%m-%d %H:%M:%S.%N %z'))
end
sub_test_case 'TimeMixin::Parser' do
class DummyForTimeParser
include Fluent::Configurable
include Fluent::TimeMixin::Parser
end
test 'provides configuration parameters for TimeParser with default values for localtime' do
time = with_timezone("UTC+07") do
i = DummyForTimeParser.new
i.configure(config_element('parse'))
assert_nil i.time_format
assert_true i.localtime
assert_false i.utc
assert_nil i.timezone
parser = i.time_parser_create
# time_format unspecified
# localtime
parser.parse("2016-09-02 18:42:31.012345678")
end
assert_equal_event_time(event_time("2016-09-02 18:42:31.012345678 -07:00", format: '%Y-%m-%d %H:%M:%S.%N %z'), time)
end
test 'provides configuration parameters for TimeParser, configurable for any time format' do
time = with_timezone("UTC+07") do
i = DummyForTimeParser.new
i.configure(config_element('parse', '', {'time_format' => '%m/%d/%Y %H-%M-%S %N'}))
parser = i.time_parser_create
# time_format specified
# localtime
parser.parse("09/02/2016 18-42-31 012345678")
end
assert_equal_event_time(event_time("2016-09-02 18:42:31.012345678 -07:00", format: '%Y-%m-%d %H:%M:%S.%N %z'), time)
end
test 'provides configuration parameters for TimeParser, configurable for UTC by localtime=false' do
time = with_timezone("UTC+07") do
i = DummyForTimeParser.new
i.configure(config_element('parse', '', {'time_format' => '%m/%d/%Y %H-%M-%S %N', 'localtime' => 'false'}))
parser = i.time_parser_create
# time_format specified
# utc
parser.parse("09/02/2016 18-42-31 012345678")
end
assert_equal_event_time(event_time("2016-09-02 18:42:31.012345678 UTC", format: '%Y-%m-%d %H:%M:%S.%N %z'), time)
end
test 'provides configuration parameters for TimeParser, configurable for UTC by utc=true' do
time = with_timezone("UTC+07") do
i = DummyForTimeParser.new
i.configure(config_element('parse', '', {'time_format' => '%m/%d/%Y %H-%M-%S %N', 'utc' => 'true'}))
parser = i.time_parser_create
# time_format specified
# utc
parser.parse("09/02/2016 18-42-31 012345678")
end
assert_equal_event_time(event_time("2016-09-02 18:42:31.012345678 UTC", format: '%Y-%m-%d %H:%M:%S.%N %z'), time)
end
test 'provides configuration parameters for TimeParser, configurable for any timezone' do
time = with_timezone("UTC+07") do
i = DummyForTimeParser.new
i.configure(config_element('parse', '', {'time_format' => '%m/%d/%Y %H-%M-%S %N', 'timezone' => '-01:00'}))
parser = i.time_parser_create
# time_format specified
# -01:00
parser.parse("09/02/2016 18-42-31 012345678")
end
assert_equal_event_time(event_time("2016-09-02 18:42:31.012345678 -01:00", format: '%Y-%m-%d %H:%M:%S.%N %z'), time)
end
test 'specifying timezone without time format raises configuration error' do
assert_raise Fluent::ConfigError.new("specifying timezone requires time format") do
i = DummyForTimeParser.new
i.configure(config_element('parse', '', {'utc' => 'true'}))
i.time_parser_create
end
assert_raise Fluent::ConfigError.new("specifying timezone requires time format") do
i = DummyForTimeParser.new
i.configure(config_element('parse', '', {'localtime' => 'false'}))
i.time_parser_create
end
assert_raise Fluent::ConfigError.new("specifying timezone requires time format") do
i = DummyForTimeParser.new
i.configure(config_element('parse', '', {'timezone' => '-0700'}))
i.time_parser_create
end
end
test '#time_parser_create returns TimeParser with specified time format and timezone' do
time = with_timezone("UTC-09") do
i = DummyForTimeParser.new
i.configure(config_element('parse', '', {'time_format' => '%m/%d/%Y %H-%M-%S %N'}))
assert_equal '%m/%d/%Y %H-%M-%S %N', i.time_format
assert_true i.localtime
parser = i.time_parser_create(format: '%Y-%m-%d %H:%M:%S.%N %z')
parser.parse("2016-09-05 17:59:38.987654321 -03:00")
end
assert_equal_event_time(event_time("2016-09-05 17:59:38.987654321 -03:00", format: '%Y-%m-%d %H:%M:%S.%N %z'), time)
end
test '#time_parser_create returns TimeParser with localtime when specified it forcedly besides any configuration parameters' do
time = with_timezone("UTC-09") do
i = DummyForTimeParser.new
i.configure(config_element('parse', '', {'time_format' => '%m/%d/%Y %H-%M-%S', 'utc' => 'true'}))
assert_equal '%m/%d/%Y %H-%M-%S', i.time_format
assert_true i.utc
parser = i.time_parser_create(format: '%Y-%m-%d %H:%M:%S.%N', force_localtime: true)
parser.parse("2016-09-05 17:59:38.987654321")
end
assert_equal_event_time(event_time("2016-09-05 17:59:38.987654321 +09:00", format: '%Y-%m-%d %H:%M:%S.%N %z'), time)
time = with_timezone("UTC-09") do
i = DummyForTimeParser.new
i.configure(config_element('parse', '', {'time_format' => '%m/%d/%Y %H-%M-%S', 'timezone' => '+0000'}))
assert_equal '%m/%d/%Y %H-%M-%S', i.time_format
assert_equal '+0000', i.timezone
parser = i.time_parser_create(format: '%Y-%m-%d %H:%M:%S.%N', force_localtime: true)
parser.parse("2016-09-05 17:59:38.987654321")
end
assert_equal_event_time(event_time("2016-09-05 17:59:38.987654321 +09:00", format: '%Y-%m-%d %H:%M:%S.%N %z'), time)
end
test '#time_parser_create returns NumericTimeParser to parse time as unixtime when time_type unixtime specified' do
i = DummyForTimeParser.new
i.configure(config_element('parse', '', {'time_type' => 'unixtime'}))
parser = i.time_parser_create
time = event_time("2016-10-03 20:08:30.123456789 +0100", format: '%Y-%m-%d %H:%M:%S.%N %z')
assert_equal_event_time(Fluent::EventTime.new(time.to_i), parser.parse("#{time.sec}"))
end
test '#time_parser_create returns NumericTimeParser to parse time as float when time_type float specified' do
i = DummyForTimeParser.new
i.configure(config_element('parse', '', {'time_type' => 'float'}))
parser = i.time_parser_create
time = event_time("2016-10-03 20:08:30.123456789 +0100", format: '%Y-%m-%d %H:%M:%S.%N %z')
assert_equal_event_time(time, parser.parse("#{time.sec}.#{time.nsec}"))
end
end
sub_test_case 'MixedTimeParser fallback' do
class DummyForTimeParser
include Fluent::Configurable
include Fluent::TimeMixin::Parser
end
test 'no time_format_fallbacks failure' do
i = DummyForTimeParser.new
assert_raise(Fluent::ConfigError.new("time_type is :mixed but time_format and time_format_fallbacks is empty.")) do
i.configure(config_element('parse', '', {'time_type' => 'mixed'}))
end
end
test 'fallback time format failure' do
i = DummyForTimeParser.new
i.configure(config_element('parse', '',
{'time_type' => 'mixed',
'time_format_fallbacks' => ['%iso8601']}))
parser = i.time_parser_create
assert_raise(Fluent::TimeParser::TimeParseError.new("invalid time format: value = INVALID, even though fallbacks: Fluent::TimeParser")) do
parser.parse("INVALID")
end
end
test 'primary format is unixtime, secondary %iso8601 is used' do
i = DummyForTimeParser.new
i.configure(config_element('parse', '',
{'time_type' => 'mixed',
'time_format' => 'unixtime',
'time_format_fallbacks' => ['%iso8601']}))
parser = i.time_parser_create
time = event_time('2021-01-01T12:00:00+0900')
assert_equal_event_time(time, parser.parse('2021-01-01T12:00:00+0900'))
end
test 'primary format is %iso8601, secondary unixtime is used' do
i = DummyForTimeParser.new
i.configure(config_element('parse', '',
{'time_type' => 'mixed',
'time_format' => '%iso8601',
'time_format_fallbacks' => ['unixtime']}))
parser = i.time_parser_create
time = event_time('2021-01-01T12:00:00+0900')
assert_equal_event_time(time, parser.parse("#{time.sec}"))
end
test 'primary format is %iso8601, no secondary is used' do
i = DummyForTimeParser.new
i.configure(config_element('parse', '',
{'time_type' => 'mixed',
'time_format' => '%iso8601'}))
parser = i.time_parser_create
time = event_time('2021-01-01T12:00:00+0900')
assert_equal_event_time(time, parser.parse("2021-01-01T12:00:00+0900"))
end
test 'primary format is unixtime, no secondary is used' do
i = DummyForTimeParser.new
i.configure(config_element('parse', '',
{'time_type' => 'mixed',
'time_format' => 'unixtime'}))
parser = i.time_parser_create
time = event_time('2021-01-01T12:00:00+0900')
assert_equal_event_time(time, parser.parse("#{time.sec}"))
end
test 'primary format is %iso8601, raise error because of no appropriate secondary' do
i = DummyForTimeParser.new
i.configure(config_element('parse', '',
{'time_type' => 'mixed',
'time_format' => '%iso8601'}))
parser = i.time_parser_create
time = event_time('2021-01-01T12:00:00+0900')
assert_raise("Fluent::TimeParser::TimeParseError: invalid time format: value = #{time.sec}, even though fallbacks: Fluent::TimeParser") do
parser.parse("#{time.sec}")
end
end
test 'primary format is unixtime, raise error because of no appropriate secondary' do
i = DummyForTimeParser.new
i.configure(config_element('parse', '',
{'time_type' => 'mixed',
'time_format' => 'unixtime'}))
parser = i.time_parser_create
time = event_time('2021-01-01T12:00:00+0900')
assert_raise("Fluent::TimeParser::TimeParseError: invalid time format: value = #{time}, even though fallbacks: Fluent::NumericTimeParser") do
parser.parse("2021-01-01T12:00:00+0900")
end
end
test 'fallback to unixtime' do
i = DummyForTimeParser.new
i.configure(config_element('parse', '', {'time_type' => 'mixed',
'time_format_fallbacks' => ['%iso8601', 'unixtime']}))
parser = i.time_parser_create
time = event_time('2021-01-01T12:00:00+0900')
assert_equal_event_time(Fluent::EventTime.new(time.to_i), parser.parse("#{time.sec}"))
end
test 'fallback to %iso8601' do
i = DummyForTimeParser.new
i.configure(config_element('parse', '', {'time_type' => 'mixed',
'time_format_fallbacks' => ['unixtime', '%iso8601']}))
parser = i.time_parser_create
time = event_time('2021-01-01T12:00:00+0900')
assert_equal_event_time(time, parser.parse('2021-01-01T12:00:00+0900'))
end
end
# https://github.com/fluent/fluentd/issues/3195
test 'change timezone without zone specifier in a format' do
expected = 1607457600 # 2020-12-08T20:00:00Z
time1 = time2 = nil
with_timezone("UTC-05") do # EST
i = DummyForTimeParser.new
i.configure(config_element('parse', '', {'time_type' => 'string',
'time_format' => '%Y-%m-%dT%H:%M:%SZ',
'utc' => true}))
parser = i.time_parser_create
time1 = parser.parse('2020-12-08T20:00:00Z').to_i
time2 = with_timezone("UTC-04") do # EDT
# to avoid using cache, increment 1 sec
parser.parse('2020-12-08T20:00:01Z').to_i
end
end
assert_equal([expected, expected + 1], [time1, time2])
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_test_drivers.rb | test/test_test_drivers.rb | require_relative 'helper'
require 'fluent/plugin/input'
require 'fluent/test/driver/input'
require 'fluent/plugin/output'
require 'fluent/test/driver/output'
require 'fluent/plugin/filter'
require 'fluent/test/driver/filter'
require 'fluent/plugin/multi_output'
require 'fluent/test/driver/multi_output'
require 'fluent/plugin/parser'
require 'fluent/test/driver/parser'
require 'fluent/plugin/formatter'
require 'fluent/test/driver/formatter'
require 'timecop'
class TestDriverTest < ::Test::Unit::TestCase
def setup
Fluent::Test.setup
end
sub_test_case 'plugin test driver' do
data(
'input plugin test driver' => [Fluent::Test::Driver::Input, Fluent::Plugin::Input],
'multi_output plugin test driver' => [Fluent::Test::Driver::MultiOutput, Fluent::Plugin::MultiOutput],
'parser plugin test driver' => [Fluent::Test::Driver::Parser, Fluent::Plugin::Parser],
'formatter plugin test driver' => [Fluent::Test::Driver::Formatter, Fluent::Plugin::Formatter],
)
test 'returns the block value as the return value of #run' do |args|
driver_class, plugin_class = args
d = driver_class.new(Class.new(plugin_class))
v = d.run do
x = 1 + 2
y = 2 + 4
3 || x || y
end
assert_equal 3, v
end
data(
'input plugin test driver' => [Fluent::Test::Driver::Input, Fluent::Plugin::Input],
'multi_output plugin test driver' => [Fluent::Test::Driver::MultiOutput, Fluent::Plugin::MultiOutput],
'parser plugin test driver' => [Fluent::Test::Driver::Parser, Fluent::Plugin::Parser],
'formatter plugin test driver' => [Fluent::Test::Driver::Formatter, Fluent::Plugin::Formatter],
)
test 'raises error for hard timeout' do |args|
driver_class, plugin_class = args
d = driver_class.new(Class.new(plugin_class))
assert_raise Fluent::Test::Driver::TestTimedOut do
d.run(timeout: 0.5) do
sleep 2
end
end
end
data(
'input plugin test driver' => [Fluent::Test::Driver::Input, Fluent::Plugin::Input],
'multi_output plugin test driver' => [Fluent::Test::Driver::MultiOutput, Fluent::Plugin::MultiOutput],
'parser plugin test driver' => [Fluent::Test::Driver::Parser, Fluent::Plugin::Parser],
'formatter plugin test driver' => [Fluent::Test::Driver::Formatter, Fluent::Plugin::Formatter],
)
test 'can stop with soft timeout for blocks never stops, even with Timecop' do |args|
Timecop.freeze(Time.parse("2016-11-04 18:49:00"))
begin
driver_class, plugin_class = args
d = driver_class.new(Class.new(plugin_class))
assert_nothing_raised do
before = Process.clock_gettime(Process::CLOCK_MONOTONIC)
d.end_if{ false }
d.run(timeout: 1) do
sleep 0.1 until d.stop?
end
after = Process.clock_gettime(Process::CLOCK_MONOTONIC)
assert{ after >= before + 1.0 }
end
ensure
Timecop.return
end
end
test 'raise errors raised in threads' do
d = Fluent::Test::Driver::Input.new(Fluent::Plugin::Input) do
helpers :thread
def start
super
thread_create(:input_thread_for_test_driver_test) do
sleep 0.5
raise "yaaaaaaaaaay!"
end
end
end
assert_raise RuntimeError.new("yaaaaaaaaaay!") do
d.end_if{ false }
d.run(timeout: 3) do
sleep 0.1 until d.stop?
end
end
end
end
sub_test_case 'output plugin test driver' do
test 'returns the block value as the return value of #run' do
d = Fluent::Test::Driver::Output.new(Fluent::Plugin::Output) do
def prefer_buffered_processing
false
end
def process(tag, es)
# drop
end
end
v = d.run do
x = 1 + 2
y = 2 + 4
3 || x || y
end
assert_equal 3, v
end
end
sub_test_case 'filter plugin test driver' do
test 'returns the block value as the return value of #run' do
d = Fluent::Test::Driver::Filter.new(Fluent::Plugin::Filter) do
def filter(tag, time, record)
record
end
end
v = d.run do
x = 1 + 2
y = 2 + 4
3 || x || y
end
assert_equal 3, v
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_log.rb | test/test_log.rb | require_relative 'helper'
require 'fluent/test/driver/input'
require 'fluent/engine'
require 'fluent/log'
require 'timecop'
require 'logger'
require 'securerandom'
require 'pathname'
class LogTest < Test::Unit::TestCase
def tmp_dir
File.join(File.dirname(__FILE__), "tmp", "log", "#{ENV['TEST_ENV_NUMBER']}", SecureRandom.hex(10))
end
def setup
@tmp_dir = tmp_dir
FileUtils.mkdir_p(@tmp_dir)
@log_device = Fluent::Test::DummyLogDevice.new
@timestamp = Time.parse("2016-04-21 02:58:41 +0000")
@timestamp_str = @timestamp.strftime("%Y-%m-%d %H:%M:%S %z")
Timecop.freeze(@timestamp)
end
def teardown
@log_device.reset
Timecop.return
Thread.current[:last_repeated_stacktrace] = nil
begin
FileUtils.rm_rf(@tmp_dir)
rescue Errno::EACCES
# It may occur on Windows because of delete pending state due to delayed GC.
# Ruby 3.2 or later doesn't ignore Errno::EACCES:
# https://github.com/ruby/ruby/commit/983115cf3c8f75b1afbe3274f02c1529e1ce3a81
end
end
def test_per_process_path
path = Fluent::Log.per_process_path("C:/tmp/test.log", :supervisor, 0)
assert_equal(path, "C:/tmp/test-supervisor-0.log")
path = Fluent::Log.per_process_path("C:/tmp/test.log", :worker, 1)
assert_equal(path, "C:/tmp/test-1.log")
end
sub_test_case "log level" do
data(
trace: [Fluent::Log::LEVEL_TRACE, 0],
debug: [Fluent::Log::LEVEL_DEBUG, 1],
info: [Fluent::Log::LEVEL_INFO, 2],
warn: [Fluent::Log::LEVEL_WARN, 3],
error: [Fluent::Log::LEVEL_ERROR, 4],
fatal: [Fluent::Log::LEVEL_FATAL, 5],
)
def test_output(data)
log_level, start = data
logdev = @log_device
logger = ServerEngine::DaemonLogger.new(logdev)
log = Fluent::Log.new(logger)
log.level = log_level
log.trace "trace log"
log.debug "debug log"
log.info "info log"
log.warn "warn log"
log.error "error log"
log.fatal "fatal log"
expected = [
"#{@timestamp_str} [trace]: trace log\n",
"#{@timestamp_str} [debug]: debug log\n",
"#{@timestamp_str} [info]: info log\n",
"#{@timestamp_str} [warn]: warn log\n",
"#{@timestamp_str} [error]: error log\n",
"#{@timestamp_str} [fatal]: fatal log\n"
][start..-1]
assert_equal(expected, log.out.logs)
end
data(
trace: [ServerEngine::DaemonLogger::TRACE, 0],
debug: [ServerEngine::DaemonLogger::DEBUG, 1],
info: [ServerEngine::DaemonLogger::INFO, 2],
warn: [ServerEngine::DaemonLogger::WARN, 3],
error: [ServerEngine::DaemonLogger::ERROR, 4],
fatal: [ServerEngine::DaemonLogger::FATAL, 5],
)
def test_output_with_serverengine_loglevel(data)
log_level, start = data
dl_opts = {}
dl_opts[:log_level] = log_level
logdev = @log_device
logger = ServerEngine::DaemonLogger.new(logdev, dl_opts)
log = Fluent::Log.new(logger)
log.trace "trace log"
log.debug "debug log"
log.info "info log"
log.warn "warn log"
log.error "error log"
log.fatal "fatal log"
expected = [
"#{@timestamp_str} [trace]: trace log\n",
"#{@timestamp_str} [debug]: debug log\n",
"#{@timestamp_str} [info]: info log\n",
"#{@timestamp_str} [warn]: warn log\n",
"#{@timestamp_str} [error]: error log\n",
"#{@timestamp_str} [fatal]: fatal log\n"
][start..-1]
assert_equal(expected, log.out.logs)
end
data(
trace: [Fluent::Log::LEVEL_TRACE, 0],
debug: [Fluent::Log::LEVEL_DEBUG, 1],
info: [Fluent::Log::LEVEL_INFO, 2],
warn: [Fluent::Log::LEVEL_WARN, 3],
error: [Fluent::Log::LEVEL_ERROR, 4],
fatal: [Fluent::Log::LEVEL_FATAL, 5],
)
def test_output_with_block(data)
log_level, start = data
logdev = @log_device
logger = ServerEngine::DaemonLogger.new(logdev)
log = Fluent::Log.new(logger)
log.level = log_level
log.trace { "trace log" }
log.debug { "debug log" }
log.info { "info log" }
log.warn { "warn log" }
log.error { "error log" }
log.fatal { "fatal log" }
expected = [
"#{@timestamp_str} [trace]: trace log\n",
"#{@timestamp_str} [debug]: debug log\n",
"#{@timestamp_str} [info]: info log\n",
"#{@timestamp_str} [warn]: warn log\n",
"#{@timestamp_str} [error]: error log\n",
"#{@timestamp_str} [fatal]: fatal log\n"
][start..-1]
assert_equal(expected, log.out.logs)
end
data(
trace: [ServerEngine::DaemonLogger::TRACE, 0],
debug: [ServerEngine::DaemonLogger::DEBUG, 1],
info: [ServerEngine::DaemonLogger::INFO, 2],
warn: [ServerEngine::DaemonLogger::WARN, 3],
error: [ServerEngine::DaemonLogger::ERROR, 4],
fatal: [ServerEngine::DaemonLogger::FATAL, 5],
)
def test_output_with_block_with_serverengine_loglevel(data)
log_level, start = data
dl_opts = {}
dl_opts[:log_level] = log_level
logdev = @log_device
logger = ServerEngine::DaemonLogger.new(logdev, dl_opts)
log = Fluent::Log.new(logger)
log.trace { "trace log" }
log.debug { "debug log" }
log.info { "info log" }
log.warn { "warn log" }
log.error { "error log" }
log.fatal { "fatal log" }
expected = [
"#{@timestamp_str} [trace]: trace log\n",
"#{@timestamp_str} [debug]: debug log\n",
"#{@timestamp_str} [info]: info log\n",
"#{@timestamp_str} [warn]: warn log\n",
"#{@timestamp_str} [error]: error log\n",
"#{@timestamp_str} [fatal]: fatal log\n"
][start..-1]
assert_equal(expected, log.out.logs)
end
data(
trace: [Fluent::Log::LEVEL_TRACE, { trace: true, debug: true, info: true, warn: true, error: true, fatal: true }],
debug: [Fluent::Log::LEVEL_DEBUG, { trace: false, debug: true, info: true, warn: true, error: true, fatal: true }],
info: [Fluent::Log::LEVEL_INFO, { trace: false, debug: false, info: true, warn: true, error: true, fatal: true }],
warn: [Fluent::Log::LEVEL_WARN, { trace: false, debug: false, info: false, warn: true, error: true, fatal: true }],
error: [Fluent::Log::LEVEL_ERROR, { trace: false, debug: false, info: false, warn: false, error: true, fatal: true }],
fatal: [Fluent::Log::LEVEL_FATAL, { trace: false, debug: false, info: false, warn: false, error: false, fatal: true }],
)
def test_execute_block(data)
log_level, expected = data
logdev = @log_device
logger = ServerEngine::DaemonLogger.new(logdev)
log = Fluent::Log.new(logger)
log.level = log_level
block_called = {
trace: false,
debug: false,
info: false,
warn: false,
error: false,
fatal: false,
}
log.trace { block_called[:trace] = true }
log.debug { block_called[:debug] = true }
log.info { block_called[:info] = true }
log.warn { block_called[:warn] = true }
log.error { block_called[:error] = true }
log.fatal { block_called[:fatal] = true }
assert_equal(expected, block_called)
end
data(
trace: [ServerEngine::DaemonLogger::TRACE, { trace: true, debug: true, info: true, warn: true, error: true, fatal: true }],
debug: [ServerEngine::DaemonLogger::DEBUG, { trace: false, debug: true, info: true, warn: true, error: true, fatal: true }],
info: [ServerEngine::DaemonLogger::INFO, { trace: false, debug: false, info: true, warn: true, error: true, fatal: true }],
warn: [ServerEngine::DaemonLogger::WARN, { trace: false, debug: false, info: false, warn: true, error: true, fatal: true }],
error: [ServerEngine::DaemonLogger::ERROR, { trace: false, debug: false, info: false, warn: false, error: true, fatal: true }],
fatal: [ServerEngine::DaemonLogger::FATAL, { trace: false, debug: false, info: false, warn: false, error: false, fatal: true }],
)
def test_execute_block_with_serverengine_loglevel(data)
log_level, expected = data
dl_opts = {}
dl_opts[:log_level] = log_level
logdev = @log_device
logger = ServerEngine::DaemonLogger.new(logdev, dl_opts)
log = Fluent::Log.new(logger)
block_called = {
trace: false,
debug: false,
info: false,
warn: false,
error: false,
fatal: false,
}
log.trace { block_called[:trace] = true }
log.debug { block_called[:debug] = true }
log.info { block_called[:info] = true }
log.warn { block_called[:warn] = true }
log.error { block_called[:error] = true }
log.fatal { block_called[:fatal] = true }
assert_equal(expected, block_called)
end
data(
trace: [Fluent::Log::LEVEL_TRACE, 0],
debug: [Fluent::Log::LEVEL_DEBUG, 3],
info: [Fluent::Log::LEVEL_INFO, 6],
warn: [Fluent::Log::LEVEL_WARN, 9],
error: [Fluent::Log::LEVEL_ERROR, 12],
fatal: [Fluent::Log::LEVEL_FATAL, 15],
)
def test_backtrace(data)
log_level, start = data
backtrace = ["line 1", "line 2", "line 3"]
logdev = @log_device
logger = ServerEngine::DaemonLogger.new(logdev)
log = Fluent::Log.new(logger)
log.level = log_level
log.trace_backtrace(backtrace)
log.debug_backtrace(backtrace)
log.info_backtrace(backtrace)
log.warn_backtrace(backtrace)
log.error_backtrace(backtrace)
log.fatal_backtrace(backtrace)
expected = [
" #{@timestamp_str} [trace]: line 1\n",
" #{@timestamp_str} [trace]: line 2\n",
" #{@timestamp_str} [trace]: line 3\n",
" #{@timestamp_str} [debug]: line 1\n",
" #{@timestamp_str} [debug]: line 2\n",
" #{@timestamp_str} [debug]: line 3\n",
" #{@timestamp_str} [info]: line 1\n",
" #{@timestamp_str} [info]: line 2\n",
" #{@timestamp_str} [info]: line 3\n",
" #{@timestamp_str} [warn]: line 1\n",
" #{@timestamp_str} [warn]: line 2\n",
" #{@timestamp_str} [warn]: line 3\n",
" #{@timestamp_str} [error]: line 1\n",
" #{@timestamp_str} [error]: line 2\n",
" #{@timestamp_str} [error]: line 3\n",
" #{@timestamp_str} [fatal]: line 1\n",
" #{@timestamp_str} [fatal]: line 2\n",
" #{@timestamp_str} [fatal]: line 3\n"
][start..-1]
assert_equal(expected, log.out.logs)
end
data(
trace: [ServerEngine::DaemonLogger::TRACE, 0],
debug: [ServerEngine::DaemonLogger::DEBUG, 3],
info: [ServerEngine::DaemonLogger::INFO, 6],
warn: [ServerEngine::DaemonLogger::WARN, 9],
error: [ServerEngine::DaemonLogger::ERROR, 12],
fatal: [ServerEngine::DaemonLogger::FATAL, 15],
)
def test_backtrace_with_serverengine_loglevel(data)
log_level, start = data
backtrace = ["line 1", "line 2", "line 3"]
dl_opts = {}
dl_opts[:log_level] = log_level
logdev = @log_device
logger = ServerEngine::DaemonLogger.new(logdev, dl_opts)
log = Fluent::Log.new(logger)
log.trace_backtrace(backtrace)
log.debug_backtrace(backtrace)
log.info_backtrace(backtrace)
log.warn_backtrace(backtrace)
log.error_backtrace(backtrace)
log.fatal_backtrace(backtrace)
expected = [
" #{@timestamp_str} [trace]: line 1\n",
" #{@timestamp_str} [trace]: line 2\n",
" #{@timestamp_str} [trace]: line 3\n",
" #{@timestamp_str} [debug]: line 1\n",
" #{@timestamp_str} [debug]: line 2\n",
" #{@timestamp_str} [debug]: line 3\n",
" #{@timestamp_str} [info]: line 1\n",
" #{@timestamp_str} [info]: line 2\n",
" #{@timestamp_str} [info]: line 3\n",
" #{@timestamp_str} [warn]: line 1\n",
" #{@timestamp_str} [warn]: line 2\n",
" #{@timestamp_str} [warn]: line 3\n",
" #{@timestamp_str} [error]: line 1\n",
" #{@timestamp_str} [error]: line 2\n",
" #{@timestamp_str} [error]: line 3\n",
" #{@timestamp_str} [fatal]: line 1\n",
" #{@timestamp_str} [fatal]: line 2\n",
" #{@timestamp_str} [fatal]: line 3\n"
][start..-1]
assert_equal(expected, log.out.logs)
end
end
sub_test_case "suppress repeated backtrace" do
def test_same_log_level
backtrace = ["line 1", "line 2", "line 3"]
dl_opts = {}
dl_opts[:log_level] = ServerEngine::DaemonLogger::TRACE
logdev = @log_device
logger = ServerEngine::DaemonLogger.new(logdev, dl_opts)
opts = {}
opts[:suppress_repeated_stacktrace] = true
log = Fluent::Log.new(logger, opts)
log.trace_backtrace(backtrace)
log.trace_backtrace(backtrace)
log.trace_backtrace(backtrace + ["line 4"])
log.trace_backtrace(backtrace)
log.trace_backtrace(backtrace)
expected = [
" #{@timestamp_str} [trace]: line 1\n",
" #{@timestamp_str} [trace]: line 2\n",
" #{@timestamp_str} [trace]: line 3\n",
" #{@timestamp_str} [trace]: suppressed same stacktrace\n",
" #{@timestamp_str} [trace]: line 1\n",
" #{@timestamp_str} [trace]: line 2\n",
" #{@timestamp_str} [trace]: line 3\n",
" #{@timestamp_str} [trace]: line 4\n",
" #{@timestamp_str} [trace]: line 1\n",
" #{@timestamp_str} [trace]: line 2\n",
" #{@timestamp_str} [trace]: line 3\n",
" #{@timestamp_str} [trace]: suppressed same stacktrace\n",
]
assert_equal(expected, log.out.logs)
end
def test_different_log_level
backtrace = ["line 1", "line 2", "line 3"]
dl_opts = {}
dl_opts[:log_level] = ServerEngine::DaemonLogger::TRACE
logdev = @log_device
logger = ServerEngine::DaemonLogger.new(logdev, dl_opts)
opts = {}
opts[:suppress_repeated_stacktrace] = true
log = Fluent::Log.new(logger, opts)
log.trace_backtrace(backtrace)
log.debug_backtrace(backtrace)
log.info_backtrace(backtrace)
log.warn_backtrace(backtrace)
log.error_backtrace(backtrace)
log.fatal_backtrace(backtrace)
expected = [
" #{@timestamp_str} [trace]: line 1\n",
" #{@timestamp_str} [trace]: line 2\n",
" #{@timestamp_str} [trace]: line 3\n",
" #{@timestamp_str} [debug]: suppressed same stacktrace\n",
" #{@timestamp_str} [info]: suppressed same stacktrace\n",
" #{@timestamp_str} [warn]: suppressed same stacktrace\n",
" #{@timestamp_str} [error]: suppressed same stacktrace\n",
" #{@timestamp_str} [fatal]: suppressed same stacktrace\n",
]
assert_equal(expected, log.out.logs)
end
end
sub_test_case "force_stacktrace_level" do
data(
none: [ nil, ["trace", "debug", "info", "warn", "error", "fatal"] ],
trace: [ Fluent::Log::LEVEL_TRACE, ["trace", "trace", "trace", "trace", "trace", "trace"] ],
debug: [ Fluent::Log::LEVEL_DEBUG, ["debug", "debug", "debug", "debug", "debug", "debug"] ],
info: [ Fluent::Log::LEVEL_INFO, ["info", "info", "info", "info", "info", "info"] ],
warn: [ Fluent::Log::LEVEL_WARN, ["warn", "warn", "warn", "warn", "warn", "warn"] ],
error: [ Fluent::Log::LEVEL_ERROR, ["error", "error", "error", "error", "error", "error"] ],
fatal: [ Fluent::Log::LEVEL_FATAL, ["fatal", "fatal", "fatal", "fatal", "fatal", "fatal"] ],
)
test "level should be forced" do |(level, expected)|
backtrace = ["backtrace"]
logger = Fluent::Log.new(
ServerEngine::DaemonLogger.new(
@log_device,
log_level: ServerEngine::DaemonLogger::TRACE,
)
)
logger.force_stacktrace_level(level) unless level.nil?
logger.trace_backtrace(backtrace)
logger.debug_backtrace(backtrace)
logger.info_backtrace(backtrace)
logger.warn_backtrace(backtrace)
logger.error_backtrace(backtrace)
logger.fatal_backtrace(backtrace)
assert do
expected == logger.out.logs.map { |log| log.match(/ \[([a-z]+)\]: backtrace$/)[1] }
end
end
test "stacktraces that do not meet log_level initially should be discarded" do
logger = Fluent::Log.new(
ServerEngine::DaemonLogger.new(
@log_device,
log_level: ServerEngine::DaemonLogger::INFO,
)
)
logger.force_stacktrace_level(Fluent::Log::LEVEL_INFO)
logger.trace_backtrace(["trace"])
logger.debug_backtrace(["debug"])
logger.info_backtrace(["info"])
logger.warn_backtrace(["warn"])
logger.error_backtrace(["error"])
logger.fatal_backtrace(["fatal"])
assert_equal(
[
" #{@timestamp_str} [info]: info\n",
" #{@timestamp_str} [info]: warn\n",
" #{@timestamp_str} [info]: error\n",
" #{@timestamp_str} [info]: fatal\n",
],
logger.out.logs,
)
end
test "stacktraces that do not meet log_level finally should be discarded" do
logger = Fluent::Log.new(
ServerEngine::DaemonLogger.new(
@log_device,
log_level: ServerEngine::DaemonLogger::INFO,
)
)
logger.force_stacktrace_level(Fluent::Log::LEVEL_DEBUG)
logger.trace_backtrace(["trace"])
logger.debug_backtrace(["debug"])
logger.info_backtrace(["info"])
logger.warn_backtrace(["warn"])
logger.error_backtrace(["error"])
logger.fatal_backtrace(["fatal"])
assert_equal([], logger.out.logs)
end
end
sub_test_case "ignore_repeated_log_interval" do
def test_same_message
message = "This is test"
logger = ServerEngine::DaemonLogger.new(@log_device, {log_level: ServerEngine::DaemonLogger::INFO})
log = Fluent::Log.new(logger, {ignore_repeated_log_interval: 5})
log.error message
10.times { |i|
Timecop.freeze(@timestamp + i)
log.error message
}
expected = [
"2016-04-21 02:58:41 +0000 [error]: This is test\n",
"2016-04-21 02:58:47 +0000 [error]: This is test\n"
]
assert_equal(expected, log.out.logs)
end
def test_different_message
message = "This is test"
logger = ServerEngine::DaemonLogger.new(@log_device, {log_level: ServerEngine::DaemonLogger::INFO})
log = Fluent::Log.new(logger, {ignore_repeated_log_interval: 10})
log.error message
3.times { |i|
Timecop.freeze(@timestamp + i)
log.error message
log.error message
log.info "Hello! " + message
}
expected = [
"2016-04-21 02:58:41 +0000 [error]: This is test\n",
"2016-04-21 02:58:41 +0000 [info]: Hello! This is test\n",
"2016-04-21 02:58:42 +0000 [error]: This is test\n",
"2016-04-21 02:58:42 +0000 [info]: Hello! This is test\n",
"2016-04-21 02:58:43 +0000 [error]: This is test\n",
"2016-04-21 02:58:43 +0000 [info]: Hello! This is test\n",
]
assert_equal(expected, log.out.logs)
end
end
sub_test_case "ignore_same_log_interval" do
teardown do
Thread.current[:last_same_log] = nil
end
def test_same_message
message = "This is test"
logger = ServerEngine::DaemonLogger.new(@log_device, {log_level: ServerEngine::DaemonLogger::INFO})
log = Fluent::Log.new(logger, {ignore_same_log_interval: 5})
log.error message
10.times { |i|
Timecop.freeze(@timestamp + i + 1)
log.error message
}
expected = [
"2016-04-21 02:58:41 +0000 [error]: This is test\n",
"2016-04-21 02:58:47 +0000 [error]: This is test\n"
]
assert_equal(expected, log.out.logs)
end
def test_different_message
message = "This is test"
logger = ServerEngine::DaemonLogger.new(@log_device, {log_level: ServerEngine::DaemonLogger::INFO})
log = Fluent::Log.new(logger, {ignore_same_log_interval: 10})
log.error message
3.times { |i|
Timecop.freeze(@timestamp + i)
log.error message
log.error message
log.info "Hello! " + message
}
expected = [
"2016-04-21 02:58:41 +0000 [error]: This is test\n",
"2016-04-21 02:58:41 +0000 [info]: Hello! This is test\n",
]
assert_equal(expected, log.out.logs)
end
def test_reject_on_max_size
ignore_same_log_interval = 10
logger = Fluent::Log.new(
ServerEngine::DaemonLogger.new(@log_device, log_level: ServerEngine::DaemonLogger::INFO),
ignore_same_log_interval: ignore_same_log_interval,
)
# Output unique log every second.
Fluent::Log::IGNORE_SAME_LOG_MAX_CACHE_SIZE.times do |i|
logger.info "Test #{i}"
Timecop.freeze(@timestamp + i)
end
logger.info "Over max size!"
# The newest cache and the latest caches in `ignore_same_log_interval` should exist.
assert { Thread.current[:last_same_log].size == ignore_same_log_interval + 1 }
end
def test_clear_on_max_size
ignore_same_log_interval = 10
logger = Fluent::Log.new(
ServerEngine::DaemonLogger.new(@log_device, log_level: ServerEngine::DaemonLogger::INFO),
ignore_same_log_interval: ignore_same_log_interval,
)
# Output unique log at the same time.
Fluent::Log::IGNORE_SAME_LOG_MAX_CACHE_SIZE.times do |i|
logger.info "Test #{i}"
end
logger.info "Over max size!"
# Can't reject old logs, so all cache should be cleared and only the newest should exist.
assert { Thread.current[:last_same_log].size == 1 }
end
end
def test_dup
dl_opts = {}
dl_opts[:log_level] = ServerEngine::DaemonLogger::TRACE
logdev = @log_device
logger = ServerEngine::DaemonLogger.new(logdev, dl_opts)
log1 = Fluent::Log.new(logger)
log2 = log1.dup
log1.level = Fluent::Log::LEVEL_DEBUG
assert_equal(Fluent::Log::LEVEL_DEBUG, log1.level)
assert_equal(Fluent::Log::LEVEL_TRACE, log2.level)
end
def test_format_json
logdev = @log_device
logger = ServerEngine::DaemonLogger.new(logdev)
log = Fluent::Log.new(logger)
log.format = :json
log.level = Fluent::Log::LEVEL_TRACE
log.trace "trace log"
log.debug "debug log"
log.info "info log"
log.warn "warn log"
log.error "error log"
log.fatal "fatal log"
expected = [
"#{@timestamp_str} [trace]: trace log\n",
"#{@timestamp_str} [debug]: debug log\n",
"#{@timestamp_str} [info]: info log\n",
"#{@timestamp_str} [warn]: warn log\n",
"#{@timestamp_str} [error]: error log\n",
"#{@timestamp_str} [fatal]: fatal log\n"
]
assert_equal(expected, log.out.logs.map { |l|
r = JSON.parse(l)
"#{r['time']} [#{r['level']}]: #{r['message']}\n"
})
end
def test_time_format
logdev = @log_device
logger = ServerEngine::DaemonLogger.new(logdev)
log = Fluent::Log.new(logger)
log.time_format = "%Y"
log.level = Fluent::Log::LEVEL_TRACE
log.trace "trace log"
log.debug "debug log"
log.info "info log"
log.warn "warn log"
log.error "error log"
log.fatal "fatal log"
timestamp_str = @timestamp.strftime("%Y")
expected = [
"#{timestamp_str} [trace]: trace log\n",
"#{timestamp_str} [debug]: debug log\n",
"#{timestamp_str} [info]: info log\n",
"#{timestamp_str} [warn]: warn log\n",
"#{timestamp_str} [error]: error log\n",
"#{timestamp_str} [fatal]: fatal log\n"
]
assert_equal(expected, log.out.logs)
end
def test_disable_events
dl_opts = {}
dl_opts[:log_level] = ServerEngine::DaemonLogger::TRACE
logdev = @log_device
logger = ServerEngine::DaemonLogger.new(logdev, dl_opts)
log = Fluent::Log.new(logger)
log.enable_event(true)
engine = log.instance_variable_get(:@engine)
mock(engine).push_log_event(anything, anything, anything).once
log.trace "trace log"
log.disable_events(Thread.current)
log.trace "trace log"
end
def test_level_reload
dl_opts = {}
dl_opts[:log_level] = ServerEngine::DaemonLogger::TRACE
logdev = @log_device
logger = ServerEngine::DaemonLogger.new(logdev, dl_opts)
log = Fluent::Log.new(logger)
assert_equal(ServerEngine::DaemonLogger::TRACE, logger.level)
assert_equal(Fluent::Log::LEVEL_TRACE, log.level)
# change daemon logger side level
logger.level = ServerEngine::DaemonLogger::DEBUG
assert_equal(ServerEngine::DaemonLogger::DEBUG, logger.level)
# check fluentd log side level is also changed
assert_equal(Fluent::Log::LEVEL_DEBUG, log.level)
end
DAY_SEC = 60 * 60 * 24
data(
rotate_daily_age: ['daily', 100000, DAY_SEC + 1],
rotate_weekly_age: ['weekly', 100000, DAY_SEC * 7 + 1],
rotate_monthly_age: ['monthly', 100000, DAY_SEC * 31 + 1],
rotate_size: [1, 100, 0, '0'],
)
def test_log_with_logdevio(expected)
with_timezone('utc') do
@timestamp = Time.parse("2016-04-21 00:00:00 +0000")
@timestamp_str = @timestamp.strftime("%Y-%m-%d %H:%M:%S %z")
Timecop.freeze(@timestamp)
rotate_age, rotate_size, travel_term = expected
path = "#{@tmp_dir}/log-dev-io-#{rotate_size}-#{rotate_age}"
logdev = Fluent::LogDeviceIO.new(path, shift_age: rotate_age, shift_size: rotate_size)
logger = ServerEngine::DaemonLogger.new(logdev)
log = Fluent::Log.new(logger)
msg = 'a' * 101
log.info msg
assert_match msg, File.read(path)
Timecop.freeze(@timestamp + travel_term)
msg2 = 'b' * 101
log.info msg2
c = File.read(path)
assert_match msg2, c
assert_not_equal msg, c
end
end
def test_log_rotates_specified_size_with_logdevio
with_timezone('utc') do
begin
rotate_age = 2
rotate_size = 100
path = "#{@tmp_dir}/log-dev-io-#{rotate_size}-#{rotate_age}"
path0 = path + '.0'
path1 = path + '.1'
logdev = Fluent::LogDeviceIO.new(path, shift_age: rotate_age, shift_size: rotate_size)
logger = ServerEngine::DaemonLogger.new(logdev)
log = Fluent::Log.new(logger)
msg = 'a' * 101
log.info msg
assert_match msg, File.read(path)
assert_true File.exist?(path)
assert_true !File.exist?(path0)
assert_true !File.exist?(path1)
# create log.0
msg2 = 'b' * 101
log.info msg2
c = File.read(path)
c0 = File.read(path0)
assert_match msg2, c
assert_match msg, c0
assert_true File.exist?(path)
assert_true File.exist?(path0)
assert_true !File.exist?(path1)
# rotate
msg3 = 'c' * 101
log.info msg3
c = File.read(path)
c0 = File.read(path0)
assert_match msg3, c
assert_match msg2, c0
assert_true File.exist?(path)
assert_true File.exist?(path0)
assert_true !File.exist?(path1)
ensure
logdev&.close
end
end
end
def test_reopen
path = Pathname(@tmp_dir) + "fluent.log"
logdev = Fluent::LogDeviceIO.new(path.to_s)
logger = ServerEngine::DaemonLogger.new(logdev)
log = Fluent::Log.new(logger, path: path)
message = "This is test message."
log.info message
log.reopen!
log.info message
assert { path.read.lines.count{ |line| line.include?(message) } == 2 }
# Assert reopening the same file.
# Especially, on Windows, the filepath is fixed for each process with rotate,
# so we need to care about this.
assert { path.parent.entries.size == 3 } # [".", "..", "fluent.log"]
ensure
logdev&.close
end
end
class PluginLoggerTest < Test::Unit::TestCase
def setup
@log_device = Fluent::Test::DummyLogDevice.new
@timestamp = Time.parse("2016-04-21 02:58:41 +0000")
@timestamp_str = @timestamp.strftime("%Y-%m-%d %H:%M:%S %z")
Timecop.freeze(@timestamp)
dl_opts = {}
dl_opts[:log_level] = ServerEngine::DaemonLogger::TRACE
logdev = @log_device
logger = ServerEngine::DaemonLogger.new(logdev, dl_opts)
@logger = Fluent::Log.new(logger)
end
def teardown
@log_device.reset
Timecop.return
Thread.current[:last_repeated_stacktrace] = nil
end
def test_initialize
log = Fluent::PluginLogger.new(@logger)
logger = log.instance_variable_get(:@logger)
assert_equal(logger, @logger)
end
def test_enable_color
log = Fluent::PluginLogger.new(@logger)
log.enable_color(true)
assert_equal(true, log.enable_color?)
assert_equal(true, @logger.enable_color?)
log.enable_color(false)
assert_equal(false, log.enable_color?)
assert_equal(false, @logger.enable_color?)
log.enable_color
assert_equal(true, log.enable_color?)
assert_equal(true, @logger.enable_color?)
end
def test_log_type_in_default
mock(@logger).caller_line(:default, Time.now, 1, Fluent::Log::LEVEL_TRACE).once
mock(@logger).caller_line(:default, Time.now, 1, Fluent::Log::LEVEL_DEBUG).once
mock(@logger).caller_line(:default, Time.now, 1, Fluent::Log::LEVEL_INFO).once
mock(@logger).caller_line(:default, Time.now, 1, Fluent::Log::LEVEL_WARN).once
mock(@logger).caller_line(:default, Time.now, 1, Fluent::Log::LEVEL_ERROR).once
mock(@logger).caller_line(:default, Time.now, 1, Fluent::Log::LEVEL_FATAL).once
@logger.trace "trace log 1"
@logger.debug "debug log 2"
@logger.info "info log 3"
@logger.warn "warn log 4"
@logger.error "error log 5"
@logger.fatal "fatal log 6"
end
def test_log_types
mock(@logger).caller_line(:default, Time.now, 1, Fluent::Log::LEVEL_TRACE).once
mock(@logger).caller_line(:supervisor, Time.now, 1, Fluent::Log::LEVEL_DEBUG).once
mock(@logger).caller_line(:worker0, Time.now, 1, Fluent::Log::LEVEL_INFO).once
mock(@logger).caller_line(:default, Time.now, 1, Fluent::Log::LEVEL_WARN).once
mock(@logger).caller_line(:supervisor, Time.now, 1, Fluent::Log::LEVEL_ERROR).once
mock(@logger).caller_line(:worker0, Time.now, 1, Fluent::Log::LEVEL_FATAL).once
@logger.trace :default, "trace log 1"
@logger.debug :supervisor, "debug log 2"
@logger.info :worker0, "info log 3"
@logger.warn :default, "warn log 4"
@logger.error :supervisor, "error log 5"
@logger.fatal :worker0, "fatal log 6"
end
sub_test_case "take over the parent logger" do
def test_level
log = Fluent::PluginLogger.new(@logger)
assert_equal(log.level, @logger.level)
log.level = "fatal"
assert_equal(Fluent::Log::LEVEL_FATAL, log.level)
assert_equal(Fluent::Log::LEVEL_TRACE, @logger.level)
end
def test_options
parent_log = Fluent::Log.new(
ServerEngine::DaemonLogger.new(
@log_device,
log_level: ServerEngine::DaemonLogger::INFO,
),
suppress_repeated_stacktrace: true,
ignore_repeated_log_interval: 10,
ignore_same_log_interval: 10,
)
parent_log.force_stacktrace_level(Fluent::Log::LEVEL_INFO)
log = Fluent::PluginLogger.new(parent_log)
assert_equal(
[
true,
Fluent::Log::LEVEL_INFO,
10,
10,
],
[
log.instance_variable_get(:@suppress_repeated_stacktrace),
log.instance_variable_get(:@forced_stacktrace_level),
log.instance_variable_get(:@ignore_repeated_log_interval),
log.instance_variable_get(:@ignore_same_log_interval),
]
)
end
end
sub_test_case "supervisor process type" do
setup do
dl_opts = {}
dl_opts[:log_level] = ServerEngine::DaemonLogger::TRACE
logdev = @log_device
logger = ServerEngine::DaemonLogger.new(logdev, dl_opts)
@logger = Fluent::Log.new(logger, process_type: :supervisor)
end
test 'default type logs are shown w/o worker id' do
@logger.info "yaaay"
@logger.info :default, "booo"
assert{ @log_device.logs.include?("#{@timestamp_str} [info]: yaaay\n") }
assert{ @log_device.logs.include?("#{@timestamp_str} [info]: booo\n") }
end
test 'supervisor type logs are shown w/o worker id' do
@logger.info :supervisor, "yaaay"
assert{ @log_device.logs.include?("#{@timestamp_str} [info]: yaaay\n") }
end
test 'worker0 type logs are not shown' do
@logger.info :worker0, "yaaay"
assert{ !@log_device.logs.include?("#{@timestamp_str} [info]: yaaay\n") }
end
end
sub_test_case "worker0 process type" do
setup do
dl_opts = {}
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | true |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_oj_options.rb | test/test_oj_options.rb | require_relative 'helper'
require 'fluent/test'
require 'fluent/oj_options'
class OjOptionsTest < ::Test::Unit::TestCase
begin
require 'oj'
@@oj_is_available = true
rescue LoadError
@@oj_is_available = false
end
setup do
@orig_env = {}
ENV.each do |key, value|
@orig_env[key] = value if key.start_with?("FLUENT_OJ_OPTION_")
end
end
teardown do
ENV.delete_if { |key| key.start_with?("FLUENT_OJ_OPTION_") }
@orig_env.each { |key, value| ENV[key] = value }
end
test "available?" do
assert_equal(@@oj_is_available, Fluent::OjOptions.available?)
end
sub_test_case "set by environment variable" do
test "when no env vars set, returns default options" do
ENV.delete_if { |key| key.start_with?("FLUENT_OJ_OPTION_") }
defaults = Fluent::OjOptions::DEFAULTS
assert_equal(defaults, Fluent::OjOptions.load_env)
assert_equal(defaults, Oj.default_options.slice(*defaults.keys)) if @@oj_is_available
end
test "valid env var passed with valid value, default is overridden" do
ENV["FLUENT_OJ_OPTION_BIGDECIMAL_LOAD"] = ":bigdecimal"
assert_equal(:bigdecimal, Fluent::OjOptions.load_env[:bigdecimal_load])
assert_equal(:bigdecimal, Oj.default_options[:bigdecimal_load]) if @@oj_is_available
end
test "valid env var passed with invalid value, default is not overridden" do
ENV["FLUENT_OJ_OPTION_BIGDECIMAL_LOAD"] = ":conor"
assert_equal(:float, Fluent::OjOptions.load_env[:bigdecimal_load])
assert_equal(:float, Oj.default_options[:bigdecimal_load]) if @@oj_is_available
end
test "invalid env var passed, nothing done with it" do
ENV["FLUENT_OJ_OPTION_CONOR"] = ":conor"
assert_equal(nil, Fluent::OjOptions.load_env[:conor])
assert_equal(nil, Oj.default_options[:conor]) if @@oj_is_available
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
fluent/fluentd | https://github.com/fluent/fluentd/blob/088cb0c98b56feeec0e6da70d1314a25ffd19d0a/test/test_unique_id.rb | test/test_unique_id.rb | require_relative 'helper'
require 'fluent/plugin/base'
require 'fluent/unique_id'
module UniqueIdTestEnv
class Dummy < Fluent::Plugin::Base
include Fluent::UniqueId::Mixin
end
end
class UniqueIdTest < Test::Unit::TestCase
sub_test_case 'module used directly' do
test '.generate generates 128bit length unique id (16bytes)' do
assert_equal 16, Fluent::UniqueId.generate.bytesize
ary = []
100_000.times do
ary << Fluent::UniqueId.generate
end
assert_equal 100_000, ary.uniq.size
end
test '.hex dumps 16bytes id into 32 chars' do
assert_equal 32, Fluent::UniqueId.hex(Fluent::UniqueId.generate).size
assert(Fluent::UniqueId.hex(Fluent::UniqueId.generate) =~ /^[0-9a-z]{32}$/)
end
end
sub_test_case 'mixin' do
setup do
@i = UniqueIdTestEnv::Dummy.new
end
test '#generate_unique_id generates 128bit length id (16bytes)' do
assert_equal 16, @i.generate_unique_id.bytesize
ary = []
100_000.times do
ary << @i.generate_unique_id
end
assert_equal 100_000, ary.uniq.size
end
test '#dump_unique_id_hex dumps 16bytes id into 32 chars' do
assert_equal 32, @i.dump_unique_id_hex(@i.generate_unique_id).size
assert(@i.dump_unique_id_hex(@i.generate_unique_id) =~ /^[0-9a-z]{32}$/)
end
end
end
| ruby | Apache-2.0 | 088cb0c98b56feeec0e6da70d1314a25ffd19d0a | 2026-01-04T15:37:30.958053Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.