repo stringlengths 5 92 | file_url stringlengths 80 287 | file_path stringlengths 5 197 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:37:27 2026-01-04 17:58:21 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/params.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/params.rb | require 'bindata/lazy'
module BinData
module AcceptedParametersPlugin
# Mandatory parameters must be present when instantiating a data object.
def mandatory_parameters(*args)
accepted_parameters.mandatory(*args)
end
# Optional parameters may be present when instantiating a data object.
def optional_parameters(*args)
accepted_parameters.optional(*args)
end
# Default parameters can be overridden when instantiating a data object.
def default_parameters(*args)
accepted_parameters.default(*args)
end
# Mutually exclusive parameters may not all be present when
# instantiating a data object.
def mutually_exclusive_parameters(*args)
accepted_parameters.mutually_exclusive(*args)
end
alias mandatory_parameter mandatory_parameters
alias optional_parameter optional_parameters
alias default_parameter default_parameters
def accepted_parameters # :nodoc:
@accepted_parameters ||= begin
ancestor_params = superclass.respond_to?(:accepted_parameters) ?
superclass.accepted_parameters : nil
AcceptedParameters.new(ancestor_params)
end
end
# BinData objects accept parameters when initializing. AcceptedParameters
# allow a BinData class to declaratively identify accepted parameters as
# mandatory, optional, default or mutually exclusive.
class AcceptedParameters
def initialize(ancestor_parameters = nil)
if ancestor_parameters
@mandatory = ancestor_parameters.mandatory.dup
@optional = ancestor_parameters.optional.dup
@default = ancestor_parameters.default.dup
@mutually_exclusive = ancestor_parameters.mutually_exclusive.dup
else
@mandatory = []
@optional = []
@default = Hash.new
@mutually_exclusive = []
end
end
def mandatory(*args)
unless args.empty?
@mandatory.concat(to_syms(args))
@mandatory.uniq!
end
@mandatory
end
def optional(*args)
unless args.empty?
@optional.concat(to_syms(args))
@optional.uniq!
end
@optional
end
def default(args = nil)
if args
to_syms(args.keys) # call for side effect of validating names
args.each_pair do |param, value|
@default[param.to_sym] = value
end
end
@default
end
def mutually_exclusive(*args)
arg1 = args.shift
until args.empty?
args.each do |arg2|
@mutually_exclusive.push([arg1.to_sym, arg2.to_sym])
@mutually_exclusive.uniq!
end
arg1 = args.shift
end
@mutually_exclusive
end
def all
(@mandatory + @optional + @default.keys).uniq
end
#---------------
private
def to_syms(args)
syms = args.collect(&:to_sym)
ensure_valid_names(syms)
syms
end
def ensure_valid_names(names)
invalid_names = self.class.invalid_parameter_names
names.each do |name|
if invalid_names.include?(name)
raise NameError.new("Rename parameter '#{name}' " \
"as it shadows an existing method.", name)
end
end
end
class << self
def invalid_parameter_names
@invalid_parameter_names ||= begin
all_names = LazyEvaluator.instance_methods(true)
allowed_names = [:name, :type]
invalid_names = (all_names - allowed_names).uniq
Hash[*invalid_names.collect { |key| [key.to_sym, true] }.flatten]
end
end
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/skip.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/skip.rb | require 'bindata/base_primitive'
require 'bindata/dsl'
module BinData
# Skip will skip over bytes from the input stream. If the stream is not
# seekable, then the bytes are consumed and discarded.
#
# When writing, skip will write the appropriate number of zero bytes.
#
# require 'bindata'
#
# class A < BinData::Record
# skip length: 5
# string :a, read_length: 5
# end
#
# obj = A.read("abcdefghij")
# obj.a #=> "fghij"
#
#
# class B < BinData::Record
# skip do
# string read_length: 2, assert: 'ef'
# end
# string :s, read_length: 5
# end
#
# obj = B.read("abcdefghij")
# obj.s #=> "efghi"
#
#
# == Parameters
#
# Skip objects accept all the params that BinData::BasePrimitive
# does, as well as the following:
#
# <tt>:length</tt>:: The number of bytes to skip.
# <tt>:to_abs_offset</tt>:: Skips to the given absolute offset.
# <tt>:until_valid</tt>:: Skips until a given byte pattern is matched.
# This parameter contains a type that will raise
# a BinData::ValidityError unless an acceptable byte
# sequence is found. The type is represented by a
# Symbol, or if the type is to have params
# passed to it, then it should be provided as
# <tt>[type_symbol, hash_params]</tt>.
#
class Skip < BinData::BasePrimitive
extend DSLMixin
dsl_parser :skip
arg_processor :skip
optional_parameters :length, :to_abs_offset, :until_valid
mutually_exclusive_parameters :length, :to_abs_offset, :until_valid
def initialize_shared_instance
extend SkipLengthPlugin if has_parameter?(:length)
extend SkipToAbsOffsetPlugin if has_parameter?(:to_abs_offset)
extend SkipUntilValidPlugin if has_parameter?(:until_valid)
super
end
#---------------
private
def value_to_binary_string(_)
len = skip_length
if len.negative?
raise ArgumentError,
"#{debug_name} attempted to seek backwards by #{len.abs} bytes"
end
"\000" * skip_length
end
def read_and_return_value(io)
len = skip_length
if len.negative?
raise ArgumentError,
"#{debug_name} attempted to seek backwards by #{len.abs} bytes"
end
io.skipbytes(len)
""
end
def sensible_default
""
end
# Logic for the :length parameter
module SkipLengthPlugin
def skip_length
eval_parameter(:length)
end
end
# Logic for the :to_abs_offset parameter
module SkipToAbsOffsetPlugin
def skip_length
eval_parameter(:to_abs_offset) - abs_offset
end
end
# Logic for the :until_valid parameter
module SkipUntilValidPlugin
def skip_length
@skip_length ||= 0
end
def read_and_return_value(io)
prototype = get_parameter(:until_valid)
validator = prototype.instantiate(nil, self)
fs = fast_search_for_obj(validator)
io.transform(ReadaheadIO.new) do |transformed_io, raw_io|
pos = 0
loop do
seek_to_pos(pos, raw_io)
validator.clear
validator.do_read(transformed_io)
break
rescue ValidityError
pos += 1
if fs
seek_to_pos(pos, raw_io)
pos += next_search_index(raw_io, fs)
end
end
seek_to_pos(pos, raw_io)
@skip_length = pos
end
end
def seek_to_pos(pos, io)
io.rollback
io.skip(pos)
end
# A fast search has a pattern string at a specific offset.
FastSearch = ::Struct.new('FastSearch', :pattern, :offset)
def fast_search_for(obj)
if obj.respond_to?(:asserted_binary_s)
FastSearch.new(obj.asserted_binary_s, obj.rel_offset)
else
nil
end
end
# If a search object has an +asserted_value+ field then we
# perform a faster search for a valid object.
def fast_search_for_obj(obj)
if BinData::Struct === obj
obj.each_pair(true) do |_, field|
fs = fast_search_for(field)
return fs if fs
end
elsif BinData::BasePrimitive === obj
return fast_search_for(obj)
end
nil
end
SEARCH_SIZE = 100_000
def next_search_index(io, fs)
buffer = binary_string("")
# start searching at fast_search offset
pos = fs.offset
io.skip(fs.offset)
loop do
data = io.read(SEARCH_SIZE)
raise EOFError, "no match" if data.nil?
buffer << data
index = buffer.index(fs.pattern)
if index
return pos + index - fs.offset
end
# advance buffer
searched = buffer.slice!(0..-fs.pattern.size)
pos += searched.size
end
end
class ReadaheadIO < BinData::IO::Transform
def before_transform
if !seekable?
raise IOError, "readahead is not supported on unseekable streams"
end
@mark = offset
end
def rollback
seek_abs(@mark)
end
end
end
end
class SkipArgProcessor < BaseArgProcessor
def sanitize_parameters!(obj_class, params)
params.merge!(obj_class.dsl_params)
unless params.has_at_least_one_of?(:length, :to_abs_offset, :until_valid)
raise ArgumentError,
"#{obj_class} requires :length, :to_abs_offset or :until_valid"
end
params.must_be_integer(:to_abs_offset, :length)
params.sanitize_object_prototype(:until_valid)
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/base.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/base.rb | require 'bindata/framework'
require 'bindata/io'
require 'bindata/lazy'
require 'bindata/name'
require 'bindata/params'
require 'bindata/registry'
require 'bindata/sanitize'
module BinData
# This is the abstract base class for all data objects.
class Base
extend AcceptedParametersPlugin
include Framework
include RegisterNamePlugin
class << self
# Instantiates this class and reads from +io+, returning the newly
# created data object. +args+ will be used when instantiating.
def read(io, *args, &block)
obj = new(*args)
obj.read(io, &block)
obj
end
# The arg processor for this class.
def arg_processor(name = nil)
@arg_processor ||= nil
if name
@arg_processor = "#{name}_arg_processor".gsub(/(?:^|_)(.)/) { $1.upcase }.to_sym
elsif @arg_processor.is_a? Symbol
@arg_processor = BinData.const_get(@arg_processor).new
elsif @arg_processor.nil?
@arg_processor = superclass.arg_processor
else
@arg_processor
end
end
# The name of this class as used by Records, Arrays etc.
def bindata_name
RegisteredClasses.underscore_name(name)
end
# Call this method if this class is abstract and not to be used.
def unregister_self
RegisteredClasses.unregister(name)
end
# Registers all subclasses of this class for use
def register_subclasses # :nodoc:
singleton_class.send(:undef_method, :inherited)
define_singleton_method(:inherited) do |subclass|
RegisteredClasses.register(subclass.name, subclass)
register_subclasses
end
end
private :unregister_self, :register_subclasses
end
# Register all subclasses of this class.
register_subclasses
# Set the initial arg processor.
arg_processor :base
# Creates a new data object.
#
# Args are optional, but if present, must be in the following order.
#
# +value+ is a value that is +assign+ed immediately after initialization.
#
# +parameters+ is a hash containing symbol keys. Some parameters may
# reference callable objects (methods or procs).
#
# +parent+ is the parent data object (e.g. struct, array, choice) this
# object resides under.
#
def initialize(*args)
value, @params, @parent = extract_args(args)
initialize_shared_instance
initialize_instance
assign(value) if value
end
attr_accessor :parent
protected :parent=
# Creates a new data object based on this instance.
#
# This implements the prototype design pattern.
#
# All parameters will be be duplicated. Use this method
# when creating multiple objects with the same parameters.
def new(value = nil, parent = nil)
obj = clone
obj.parent = parent if parent
obj.initialize_instance
obj.assign(value) if value
obj
end
# Returns the result of evaluating the parameter identified by +key+.
#
# +overrides+ is an optional +parameters+ like hash that allow the
# parameters given at object construction to be overridden.
#
# Returns nil if +key+ does not refer to any parameter.
def eval_parameter(key, overrides = nil)
value = get_parameter(key)
if value.is_a?(Symbol) || value.respond_to?(:arity)
lazy_evaluator.lazy_eval(value, overrides)
else
value
end
end
# Returns a lazy evaluator for this object.
def lazy_evaluator # :nodoc:
@lazy_evaluator ||= LazyEvaluator.new(self)
end
# Returns the parameter referenced by +key+.
# Use this method if you are sure the parameter is not to be evaluated.
# You most likely want #eval_parameter.
def get_parameter(key)
@params[key]
end
# Returns whether +key+ exists in the +parameters+ hash.
def has_parameter?(key)
@params.has_parameter?(key)
end
# Resets the internal state to that of a newly created object.
def clear
initialize_instance
end
# Reads data into this data object.
def read(io, &block)
io = BinData::IO::Read.new(io) unless BinData::IO::Read === io
start_read do
clear
do_read(io)
end
block.call(self) if block_given?
self
end
# Writes the value for this data object to +io+.
def write(io, &block)
io = BinData::IO::Write.new(io) unless BinData::IO::Write === io
do_write(io)
io.flush
block.call(self) if block_given?
self
end
# Returns the number of bytes it will take to write this data object.
def num_bytes
do_num_bytes.ceil
end
# Returns the string representation of this data object.
def to_binary_s(&block)
io = BinData::IO.create_string_io
write(io, &block)
io.string
end
# Returns the hexadecimal string representation of this data object.
def to_hex(&block)
to_binary_s(&block).unpack1('H*')
end
# Return a human readable representation of this data object.
def inspect
snapshot.inspect
end
# Return a string representing this data object.
def to_s
snapshot.to_s
end
# Work with Ruby's pretty-printer library.
def pretty_print(pp) # :nodoc:
pp.pp(snapshot)
end
# Override and delegate =~ as it is defined in Object.
def =~(other)
snapshot =~ other
end
# Returns a user friendly name of this object for debugging purposes.
def debug_name
@parent ? @parent.debug_name_of(self) : 'obj'
end
# Returns the offset (in bytes) of this object with respect to its most
# distant ancestor.
def abs_offset
@parent ? @parent.abs_offset + @parent.offset_of(self) : 0
end
# Returns the offset (in bytes) of this object with respect to its parent.
def rel_offset
@parent ? @parent.offset_of(self) : 0
end
def ==(other) # :nodoc:
# double dispatch
other == snapshot
end
# A version of +respond_to?+ used by the lazy evaluator. It doesn't
# reinvoke the evaluator so as to avoid infinite evaluation loops.
def safe_respond_to?(symbol, include_private = false) # :nodoc:
base_respond_to?(symbol, include_private)
end
alias base_respond_to? respond_to?
#---------------
private
def extract_args(args)
self.class.arg_processor.extract_args(self.class, args)
end
def start_read
top_level_set(:in_read, true)
yield
ensure
top_level_set(:in_read, false)
end
# Is this object tree currently being read? Used by BasePrimitive.
def reading?
top_level_get(:in_read)
end
def top_level_set(sym, value)
top_level.instance_variable_set("@tl_#{sym}", value)
end
def top_level_get(sym)
tl = top_level
tl.instance_variable_defined?("@tl_#{sym}") &&
tl.instance_variable_get("@tl_#{sym}")
end
def top_level
if parent.nil?
tl = self
else
tl = parent
tl = tl.parent while tl.parent
end
tl
end
def binary_string(str)
str.to_s.dup.force_encoding(Encoding::BINARY)
end
end
# ArgProcessors process the arguments passed to BinData::Base.new into
# the form required to initialise the BinData object.
#
# Any passed parameters are sanitized so the BinData object doesn't
# need to perform error checking on the parameters.
class BaseArgProcessor
@@empty_hash = Hash.new.freeze
# Takes the arguments passed to BinData::Base.new and
# extracts [value, sanitized_parameters, parent].
def extract_args(obj_class, obj_args)
value, params, parent = separate_args(obj_class, obj_args)
sanitized_params = SanitizedParameters.sanitize(params, obj_class)
[value, sanitized_params, parent]
end
# Separates the arguments passed to BinData::Base.new into
# [value, parameters, parent]. Called by #extract_args.
def separate_args(_obj_class, obj_args)
args = obj_args.dup
value = parameters = parent = nil
if args.length > 1 && args.last.is_a?(BinData::Base)
parent = args.pop
end
if args.length > 0 && args.last.is_a?(Hash)
parameters = args.pop
end
if args.length > 0
value = args.pop
end
parameters ||= @@empty_hash
[value, parameters, parent]
end
# Performs sanity checks on the given parameters.
# This method converts the parameters to the form expected
# by the data object.
def sanitize_parameters!(obj_class, obj_params); end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/name.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/name.rb | module BinData
# == Parameters
#
# Parameters may be provided at initialisation to control the behaviour of
# an object. These parameters are:
#
# <tt>:name</tt>:: The name that this object can be referred to may be
# set explicitly. This is only useful when dynamically
# generating types.
# <code><pre>
# BinData::Struct.new(name: :my_struct, fields: ...)
# array = BinData::Array.new(type: :my_struct)
# </pre></code>
module RegisterNamePlugin
def self.included(base) # :nodoc:
# The registered name may be provided explicitly.
base.optional_parameter :name
end
def initialize_shared_instance
if has_parameter?(:name)
RegisteredClasses.register(get_parameter(:name), self)
end
super
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/string.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/string.rb | require 'bindata/base_primitive'
module BinData
# A String is a sequence of bytes. This is the same as strings in Ruby 1.8.
# The issue of character encoding is ignored by this class.
#
# require 'bindata'
#
# data = "abcdefghij"
#
# obj = BinData::String.new(read_length: 5)
# obj.read(data)
# obj #=> "abcde"
#
# obj = BinData::String.new(length: 6)
# obj.read(data)
# obj #=> "abcdef"
# obj.assign("abcdefghij")
# obj #=> "abcdef"
# obj.assign("abcd")
# obj #=> "abcd\000\000"
#
# obj = BinData::String.new(length: 6, trim_padding: true)
# obj.assign("abcd")
# obj #=> "abcd"
# obj.to_binary_s #=> "abcd\000\000"
#
# obj = BinData::String.new(length: 6, pad_byte: 'A')
# obj.assign("abcd")
# obj #=> "abcdAA"
# obj.to_binary_s #=> "abcdAA"
#
# == Parameters
#
# String objects accept all the params that BinData::BasePrimitive
# does, as well as the following:
#
# <tt>:read_length</tt>:: The length in bytes to use when reading a value.
# <tt>:length</tt>:: The fixed length of the string. If a shorter
# string is set, it will be padded to this length.
# <tt>:pad_byte</tt>:: The byte to use when padding a string to a
# set length. Valid values are Integers and
# Strings of length 1. "\0" is the default.
# <tt>:pad_front</tt>:: Signifies that the padding occurs at the front
# of the string rather than the end. Default
# is false.
# <tt>:trim_padding</tt>:: Boolean, default false. If set, #value will
# return the value with all pad_bytes trimmed
# from the end of the string. The value will
# not be trimmed when writing.
class String < BinData::BasePrimitive
arg_processor :string
optional_parameters :read_length, :length, :trim_padding, :pad_front, :pad_left
default_parameters pad_byte: "\0"
mutually_exclusive_parameters :read_length, :length
mutually_exclusive_parameters :length, :value
def initialize_shared_instance
if (has_parameter?(:value) || has_parameter?(:asserted_value)) &&
!has_parameter?(:read_length)
extend WarnNoReadLengthPlugin
end
super
end
def assign(val)
super(binary_string(val))
end
def snapshot
# override to trim padding
snap = super
snap = clamp_to_length(snap)
if get_parameter(:trim_padding)
trim_padding(snap)
else
snap
end
end
#---------------
private
def clamp_to_length(str)
str = binary_string(str)
len = eval_parameter(:length) || str.length
if str.length == len
str
elsif str.length > len
str.slice(0, len)
else
padding = (eval_parameter(:pad_byte) * (len - str.length))
if get_parameter(:pad_front)
padding + str
else
str + padding
end
end
end
def trim_padding(str)
if get_parameter(:pad_front)
str.sub(/\A#{eval_parameter(:pad_byte)}*/, "")
else
str.sub(/#{eval_parameter(:pad_byte)}*\z/, "")
end
end
def value_to_binary_string(val)
clamp_to_length(val)
end
def read_and_return_value(io)
len = eval_parameter(:read_length) || eval_parameter(:length) || 0
io.readbytes(len)
end
def sensible_default
""
end
# Warns when reading if :value && no :read_length
module WarnNoReadLengthPlugin
def read_and_return_value(io)
Kernel.warn "#{debug_name} does not have a :read_length parameter - returning empty string"
""
end
end
end
class StringArgProcessor < BaseArgProcessor
def sanitize_parameters!(obj_class, params)
params.warn_replacement_parameter(:initial_length, :read_length)
params.must_be_integer(:read_length, :length)
params.rename_parameter(:pad_left, :pad_front)
params.sanitize(:pad_byte) { |byte| sanitized_pad_byte(byte) }
end
#-------------
private
def sanitized_pad_byte(byte)
pad_byte = byte.is_a?(Integer) ? byte.chr : byte.to_s
if pad_byte.bytesize > 1
raise ArgumentError, ":pad_byte must not contain more than 1 byte"
end
pad_byte
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/choice.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/choice.rb | require 'bindata/base'
require 'bindata/dsl'
module BinData
# A Choice is a collection of data objects of which only one is active
# at any particular time. Method calls will be delegated to the active
# choice.
#
# require 'bindata'
#
# type1 = [:string, {value: "Type1"}]
# type2 = [:string, {value: "Type2"}]
#
# choices = {5 => type1, 17 => type2}
# a = BinData::Choice.new(choices: choices, selection: 5)
# a # => "Type1"
#
# choices = [ type1, type2 ]
# a = BinData::Choice.new(choices: choices, selection: 1)
# a # => "Type2"
#
# choices = [ nil, nil, nil, type1, nil, type2 ]
# a = BinData::Choice.new(choices: choices, selection: 3)
# a # => "Type1"
#
#
# Chooser = Struct.new(:choice)
# mychoice = Chooser.new
# mychoice.choice = 'big'
#
# choices = {'big' => :uint16be, 'little' => :uint16le}
# a = BinData::Choice.new(choices: choices, copy_on_change: true,
# selection: -> { mychoice.choice })
# a.assign(256)
# a.to_binary_s #=> "\001\000"
#
# mychoice.choice = 'little'
# a.to_binary_s #=> "\000\001"
#
# == Parameters
#
# Parameters may be provided at initialisation to control the behaviour of
# an object. These params are:
#
# <tt>:choices</tt>:: Either an array or a hash specifying the possible
# data objects. The format of the
# array/hash.values is a list of symbols
# representing the data object type. If a choice
# is to have params passed to it, then it should
# be provided as [type_symbol, hash_params]. An
# implementation constraint is that the hash may
# not contain symbols as keys, with the exception
# of :default. :default is to be used when then
# :selection does not exist in the :choices hash.
# <tt>:selection</tt>:: An index/key into the :choices array/hash which
# specifies the currently active choice.
# <tt>:copy_on_change</tt>:: If set to true, copy the value of the previous
# selection to the current selection whenever the
# selection changes. Default is false.
class Choice < BinData::Base
extend DSLMixin
dsl_parser :choice
arg_processor :choice
mandatory_parameters :choices, :selection
optional_parameter :copy_on_change
def initialize_shared_instance
extend CopyOnChangePlugin if eval_parameter(:copy_on_change) == true
super
end
def initialize_instance
@choices = {}
@last_selection = nil
end
# Returns the current selection.
def selection
selection = eval_parameter(:selection)
if selection.nil?
raise IndexError, ":selection returned nil for #{debug_name}"
end
selection
end
def respond_to?(symbol, include_all = false) # :nodoc:
current_choice.respond_to?(symbol, include_all) || super
end
def method_missing(symbol, *args, &block) # :nodoc:
current_choice.__send__(symbol, *args, &block)
end
%w[clear? assign snapshot do_read do_write do_num_bytes].each do |m|
module_eval <<-END
def #{m}(*args)
current_choice.#{m}(*args)
end
END
end
#---------------
private
def current_choice
current_selection = selection
@choices[current_selection] ||= instantiate_choice(current_selection)
end
def instantiate_choice(selection)
prototype = get_parameter(:choices)[selection]
if prototype.nil?
msg = "selection '#{selection}' does not exist in :choices for #{debug_name}"
raise IndexError, msg
end
prototype.instantiate(nil, self)
end
end
class ChoiceArgProcessor < BaseArgProcessor
def sanitize_parameters!(obj_class, params) # :nodoc:
params.merge!(obj_class.dsl_params)
params.sanitize_choices(:choices) do |choices|
hash_choices = choices_as_hash(choices)
ensure_valid_keys(hash_choices)
hash_choices
end
end
#-------------
private
def choices_as_hash(choices)
if choices.respond_to?(:to_ary)
key_array_by_index(choices.to_ary)
else
choices
end
end
def key_array_by_index(array)
result = {}
array.each_with_index do |el, i|
result[i] = el unless el.nil?
end
result
end
def ensure_valid_keys(choices)
if choices.key?(nil)
raise ArgumentError, ":choices hash may not have nil key"
end
if choices.keys.detect { |key| key.is_a?(Symbol) && key != :default }
raise ArgumentError, ":choices hash may not have symbols for keys"
end
end
end
# Logic for the :copy_on_change parameter
module CopyOnChangePlugin
def current_choice
obj = super
copy_previous_value(obj)
obj
end
def copy_previous_value(obj)
current_selection = selection
prev = get_previous_choice(current_selection)
obj.assign(prev) unless prev.nil?
remember_current_selection(current_selection)
end
def get_previous_choice(selection)
if @last_selection && selection != @last_selection
@choices[@last_selection]
end
end
def remember_current_selection(selection)
@last_selection = selection
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/sanitize.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/sanitize.rb | require 'bindata/registry'
module BinData
# Subclasses of this are sanitized
class SanitizedParameter; end
class SanitizedPrototype < SanitizedParameter
def initialize(obj_type, obj_params, hints)
raw_hints = hints.dup
if raw_hints[:endian].respond_to?(:endian)
raw_hints[:endian] = raw_hints[:endian].endian
end
obj_params ||= {}
if BinData::Base === obj_type
obj_class = obj_type
else
obj_class = RegisteredClasses.lookup(obj_type, raw_hints)
end
if BinData::Base === obj_class
@factory = obj_class
else
@obj_class = obj_class
@obj_params = SanitizedParameters.new(obj_params, @obj_class, hints)
end
end
def has_parameter?(param)
if defined? @factory
@factory.has_parameter?(param)
else
@obj_params.has_parameter?(param)
end
end
def instantiate(value = nil, parent = nil)
@factory ||= @obj_class.new(@obj_params)
@factory.new(value, parent)
end
end
#----------------------------------------------------------------------------
class SanitizedField < SanitizedParameter
def initialize(name, field_type, field_params, hints)
@name = name
@prototype = SanitizedPrototype.new(field_type, field_params, hints)
end
attr_reader :prototype, :name
def name_as_sym
@name&.to_sym
end
def has_parameter?(param)
@prototype.has_parameter?(param)
end
def instantiate(value = nil, parent = nil)
@prototype.instantiate(value, parent)
end
end
#----------------------------------------------------------------------------
class SanitizedFields < SanitizedParameter
include Enumerable
def initialize(hints, base_fields = nil)
@hints = hints
@fields = base_fields ? base_fields.raw_fields : []
end
def add_field(type, name, params)
name = nil if name == ""
@fields << SanitizedField.new(name, type, params, @hints)
end
def raw_fields
@fields.dup
end
def [](idx)
@fields[idx]
end
def empty?
@fields.empty?
end
def length
@fields.length
end
def each(&block)
@fields.each(&block)
end
def field_names
@fields.collect(&:name_as_sym)
end
def field_name?(name)
@fields.detect { |f| f.name_as_sym == name.to_sym }
end
def all_field_names_blank?
@fields.all? { |f| f.name.nil? }
end
def no_field_names_blank?
@fields.all? { |f| f.name != nil }
end
def any_field_has_parameter?(parameter)
@fields.any? { |f| f.has_parameter?(parameter) }
end
end
#----------------------------------------------------------------------------
class SanitizedChoices < SanitizedParameter
def initialize(choices, hints)
@choices = {}
choices.each_pair do |key, val|
if SanitizedParameter === val
prototype = val
else
type, param = val
prototype = SanitizedPrototype.new(type, param, hints)
end
if key == :default
@choices.default = prototype
else
@choices[key] = prototype
end
end
end
def [](key)
@choices[key]
end
end
#----------------------------------------------------------------------------
class SanitizedBigEndian < SanitizedParameter
def endian
:big
end
end
class SanitizedLittleEndian < SanitizedParameter
def endian
:little
end
end
#----------------------------------------------------------------------------
# BinData objects are instantiated with parameters to determine their
# behaviour. These parameters must be sanitized to ensure their values
# are valid. When instantiating many objects with identical parameters,
# such as an array of records, there is much duplicated sanitizing.
#
# The purpose of the sanitizing code is to eliminate the duplicated
# validation.
#
# SanitizedParameters is a hash-like collection of parameters. Its purpose
# is to recursively sanitize the parameters of an entire BinData object chain
# at a single time.
class SanitizedParameters < Hash
# Memoized constants
BIG_ENDIAN = SanitizedBigEndian.new
LITTLE_ENDIAN = SanitizedLittleEndian.new
class << self
def sanitize(parameters, the_class)
if SanitizedParameters === parameters
parameters
else
SanitizedParameters.new(parameters, the_class, {})
end
end
end
def initialize(parameters, the_class, hints)
parameters.each_pair { |key, value| self[key.to_sym] = value }
@the_class = the_class
if hints[:endian]
self[:endian] ||= hints[:endian]
end
if hints[:search_prefix] && !hints[:search_prefix].empty?
self[:search_prefix] = Array(self[:search_prefix]).concat(Array(hints[:search_prefix]))
end
sanitize!
end
alias has_parameter? key?
def has_at_least_one_of?(*keys)
keys.each do |key|
return true if has_parameter?(key)
end
false
end
def warn_replacement_parameter(bad_key, suggested_key)
if has_parameter?(bad_key)
Kernel.warn ":#{bad_key} is not used with #{@the_class}. " \
"You probably want to change this to :#{suggested_key}"
end
end
# def warn_renamed_parameter(old_key, new_key)
# val = delete(old_key)
# if val
# self[new_key] = val
# Kernel.warn ":#{old_key} has been renamed to :#{new_key} in #{@the_class}. " \
# "Using :#{old_key} is now deprecated and will be removed in the future"
# end
# end
def must_be_integer(*keys)
keys.each do |key|
if has_parameter?(key)
parameter = self[key]
unless Symbol === parameter ||
parameter.respond_to?(:arity) ||
parameter.respond_to?(:to_int)
raise ArgumentError, "parameter '#{key}' in #{@the_class} must " \
"evaluate to an integer, got #{parameter.class}"
end
end
end
end
def rename_parameter(old_key, new_key)
if has_parameter?(old_key)
self[new_key] = delete(old_key)
end
end
def sanitize_object_prototype(key)
sanitize(key) do |obj_type, obj_params|
create_sanitized_object_prototype(obj_type, obj_params)
end
end
def sanitize_fields(key, &block)
sanitize(key) do |fields|
sanitized_fields = create_sanitized_fields
yield(fields, sanitized_fields)
sanitized_fields
end
end
def sanitize_choices(key, &block)
sanitize(key) do |obj|
create_sanitized_choices(yield(obj))
end
end
def sanitize_endian(key)
sanitize(key) { |endian| create_sanitized_endian(endian) }
end
def sanitize(key, &block)
if needs_sanitizing?(key)
self[key] = yield(self[key])
end
end
def create_sanitized_params(params, the_class)
SanitizedParameters.new(params, the_class, hints)
end
def hints
{ endian: self[:endian], search_prefix: self[:search_prefix] }
end
#---------------
private
def sanitize!
ensure_no_nil_values
merge_default_parameters!
@the_class.arg_processor.sanitize_parameters!(@the_class, self)
ensure_mandatory_parameters_exist
ensure_mutual_exclusion_of_parameters
end
def needs_sanitizing?(key)
has_parameter?(key) && !self[key].is_a?(SanitizedParameter)
end
def ensure_no_nil_values
each do |key, value|
if value.nil?
raise ArgumentError,
"parameter '#{key}' has nil value in #{@the_class}"
end
end
end
def merge_default_parameters!
@the_class.default_parameters.each do |key, value|
self[key] = value unless has_parameter?(key)
end
end
def ensure_mandatory_parameters_exist
@the_class.mandatory_parameters.each do |key|
unless has_parameter?(key)
raise ArgumentError,
"parameter '#{key}' must be specified in #{@the_class}"
end
end
end
def ensure_mutual_exclusion_of_parameters
return if length < 2
@the_class.mutually_exclusive_parameters.each do |key1, key2|
if has_parameter?(key1) && has_parameter?(key2)
raise ArgumentError, "params '#{key1}' and '#{key2}' " \
"are mutually exclusive in #{@the_class}"
end
end
end
def create_sanitized_endian(endian)
if endian == :big
BIG_ENDIAN
elsif endian == :little
LITTLE_ENDIAN
elsif endian == :big_and_little
raise ArgumentError, "endian: :big or endian: :little is required"
else
raise ArgumentError, "unknown value for endian '#{endian}'"
end
end
def create_sanitized_choices(choices)
SanitizedChoices.new(choices, hints)
end
def create_sanitized_fields
SanitizedFields.new(hints)
end
def create_sanitized_object_prototype(obj_type, obj_params)
SanitizedPrototype.new(obj_type, obj_params, hints)
end
end
#----------------------------------------------------------------------------
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/base_primitive.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/base_primitive.rb | require 'bindata/base'
module BinData
# A BinData::BasePrimitive object is a container for a value that has a
# particular binary representation. A value corresponds to a primitive type
# such as as integer, float or string. Only one value can be contained by
# this object. This value can be read from or written to an IO stream.
#
# require 'bindata'
#
# obj = BinData::Uint8.new(initial_value: 42)
# obj #=> 42
# obj.assign(5)
# obj #=> 5
# obj.clear
# obj #=> 42
#
# obj = BinData::Uint8.new(value: 42)
# obj #=> 42
# obj.assign(5)
# obj #=> 42
#
# obj = BinData::Uint8.new(assert: 3)
# obj.read("\005") #=> BinData::ValidityError: value is '5' but expected '3'
#
# obj = BinData::Uint8.new(assert: -> { value < 5 })
# obj.read("\007") #=> BinData::ValidityError: value not as expected
#
# == Parameters
#
# Parameters may be provided at initialisation to control the behaviour of
# an object. These params include those for BinData::Base as well as:
#
# [<tt>:initial_value</tt>] This is the initial value to use before one is
# either #read or explicitly set with #value=.
# [<tt>:value</tt>] The object will always have this value.
# Calls to #value= are ignored when
# using this param. While reading, #value
# will return the value of the data read from the
# IO, not the result of the <tt>:value</tt> param.
# [<tt>:assert</tt>] Raise an error unless the value read or assigned
# meets this criteria. The variable +value+ is
# made available to any lambda assigned to this
# parameter. A boolean return indicates success
# or failure. Any other return is compared to
# the value just read in.
# [<tt>:asserted_value</tt>] Equivalent to <tt>:assert</tt> and <tt>:value</tt>.
#
class BasePrimitive < BinData::Base
unregister_self
optional_parameters :initial_value, :value, :assert, :asserted_value
mutually_exclusive_parameters :initial_value, :value
mutually_exclusive_parameters :asserted_value, :value, :assert
def initialize_shared_instance
extend InitialValuePlugin if has_parameter?(:initial_value)
extend ValuePlugin if has_parameter?(:value)
extend AssertPlugin if has_parameter?(:assert)
extend AssertedValuePlugin if has_parameter?(:asserted_value)
super
end
def initialize_instance
@value = nil
end
def clear? # :nodoc:
@value.nil?
end
def assign(val)
raise ArgumentError, "can't set a nil value for #{debug_name}" if val.nil?
raw_val = val.respond_to?(:snapshot) ? val.snapshot : val
@value = raw_val.dup
end
def snapshot
_value
end
def value
snapshot
end
def value=(val)
assign(val)
end
def respond_to_missing?(symbol, include_all = false) # :nodoc:
child = snapshot
child.respond_to?(symbol, include_all) || super
end
def method_missing(symbol, *args, &block) # :nodoc:
child = snapshot
if child.respond_to?(symbol)
self.class.class_eval <<-END, __FILE__, __LINE__ + 1
def #{symbol}(*args, &block) # def clamp(*args, &block)
snapshot.#{symbol}(*args, &block) # snapshot.clamp(*args, &block)
end # end
END
child.__send__(symbol, *args, &block)
else
super
end
end
def <=>(other)
snapshot <=> other
end
def eql?(other)
# double dispatch
other.eql?(snapshot)
end
def hash
snapshot.hash
end
def do_read(io) # :nodoc:
@value = read_and_return_value(io)
end
def do_write(io) # :nodoc:
io.writebytes(value_to_binary_string(_value))
end
def do_num_bytes # :nodoc:
value_to_binary_string(_value).length
end
#---------------
private
# The unmodified value of this data object. Note that #snapshot calls this
# method. This indirection is so that #snapshot can be overridden in
# subclasses to modify the presentation value.
def _value
@value != nil ? @value : sensible_default
end
# Logic for the :value parameter
module ValuePlugin
def assign(val)
# Ignored
end
def _value
reading? ? @value : eval_parameter(:value)
end
end
# Logic for the :initial_value parameter
module InitialValuePlugin
def _value
@value != nil ? @value : eval_parameter(:initial_value)
end
end
# Logic for the :assert parameter
module AssertPlugin
def assign(val)
super(val)
assert!
end
def do_read(io) # :nodoc:
super(io)
assert!
end
def assert!
current_value = snapshot
expected = eval_parameter(:assert, value: current_value)
msg =
if !expected
"value '#{current_value}' not as expected"
elsif expected != true && current_value != expected
"value is '#{current_value}' but expected '#{expected}'"
else
nil
end
raise ValidityError, "#{msg} for #{debug_name}" if msg
end
end
# Logic for the :asserted_value parameter
module AssertedValuePlugin
def assign(val)
assert_value(val)
super(val)
end
def _value
reading? ? @value : eval_parameter(:asserted_value)
end
# The asserted value as a binary string.
#
# Rationale: while reading, +#to_binary_s+ will use the
# value read in, rather than the +:asserted_value+.
# This feature is used by Skip.
def asserted_binary_s
value_to_binary_string(eval_parameter(:asserted_value))
end
def do_read(io) # :nodoc:
super(io)
assert!
end
def assert!
assert_value(snapshot)
end
def assert_value(current_value)
expected = eval_parameter(:asserted_value, value: current_value)
if current_value != expected
raise ValidityError,
"value is '#{current_value}' but " \
"expected '#{expected}' for #{debug_name}"
end
end
end
###########################################################################
# To be implemented by subclasses
# Return the string representation that +val+ will take when written.
def value_to_binary_string(val)
raise NotImplementedError
end
# Read a number of bytes from +io+ and return the value they represent.
def read_and_return_value(io)
raise NotImplementedError
end
# Return a sensible default for this data.
def sensible_default
raise NotImplementedError
end
# To be implemented by subclasses
###########################################################################
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/virtual.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/virtual.rb | require 'bindata/base'
module BinData
# A virtual field is one that is neither read, written nor occupies space in
# the data stream. It is used to make assertions or as a convenient label
# for determining offsets or storing values.
#
# require 'bindata'
#
# class A < BinData::Record
# string :a, read_length: 5
# string :b, read_length: 5
# virtual :c, assert: -> { a == b }
# end
#
# obj = A.read("abcdeabcde")
# obj.a #=> "abcde"
# obj.c.rel_offset #=> 10
#
# obj = A.read("abcdeABCDE") #=> BinData::ValidityError: assertion failed for obj.c
#
# == Parameters
#
# Parameters may be provided at initialisation to control the behaviour of
# an object. These params include those for BinData::Base as well as:
#
# [<tt>:assert</tt>] Raise an error when reading or assigning if the value
# of this evaluated parameter is false.
# [<tt>:value</tt>] The virtual object will always have this value.
#
class Virtual < BinData::BasePrimitive
def do_read(io); end
def do_write(io); end
def do_num_bytes
0.0
end
def sensible_default
nil
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/primitive.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/primitive.rb | require 'bindata/base_primitive'
require 'bindata/dsl'
require 'bindata/struct'
module BinData
# A Primitive is a declarative way to define a new BinData data type.
# The data type must contain a primitive value only, i.e numbers or strings.
# For new data types that contain multiple values see BinData::Record.
#
# To define a new data type, set fields as if for Record and add a
# #get and #set method to extract / convert the data between the fields
# and the #value of the object.
#
# require 'bindata'
#
# class PascalString < BinData::Primitive
# uint8 :len, value: -> { data.length }
# string :data, read_length: :len
#
# def get
# self.data
# end
#
# def set(v)
# self.data = v
# end
# end
#
# ps = PascalString.new(initial_value: "hello")
# ps.to_binary_s #=> "\005hello"
# ps.read("\003abcde")
# ps #=> "abc"
#
# # Unsigned 24 bit big endian integer
# class Uint24be < BinData::Primitive
# uint8 :byte1
# uint8 :byte2
# uint8 :byte3
#
# def get
# (self.byte1 << 16) | (self.byte2 << 8) | self.byte3
# end
#
# def set(v)
# v = 0 if v < 0
# v = 0xffffff if v > 0xffffff
#
# self.byte1 = (v >> 16) & 0xff
# self.byte2 = (v >> 8) & 0xff
# self.byte3 = v & 0xff
# end
# end
#
# u24 = Uint24be.new
# u24.read("\x12\x34\x56")
# "0x%x" % u24 #=> 0x123456
#
# == Parameters
#
# Primitive objects accept all the parameters that BinData::BasePrimitive do.
#
class Primitive < BasePrimitive
extend DSLMixin
unregister_self
dsl_parser :primitive
arg_processor :primitive
mandatory_parameter :struct_params
def initialize_instance
super
@struct = BinData::Struct.new(get_parameter(:struct_params), self)
end
def respond_to?(symbol, include_private = false) # :nodoc:
@struct.respond_to?(symbol, include_private) || super
end
def method_missing(symbol, *args, &block) # :nodoc:
if @struct.respond_to?(symbol)
@struct.__send__(symbol, *args, &block)
else
super
end
end
def assign(val)
super(val)
set(_value)
@value = get
end
def debug_name_of(child) # :nodoc:
debug_name + "-internal-"
end
def do_write(io)
set(_value)
@struct.do_write(io)
end
def do_num_bytes
set(_value)
@struct.do_num_bytes
end
#---------------
private
def sensible_default
get
end
def read_and_return_value(io)
@struct.do_read(io)
get
end
###########################################################################
# To be implemented by subclasses
# Extracts the value for this data object from the fields of the
# internal struct.
def get
raise NotImplementedError
end
# Sets the fields of the internal struct to represent +v+.
def set(v)
raise NotImplementedError
end
# To be implemented by subclasses
###########################################################################
end
class PrimitiveArgProcessor < BaseArgProcessor
def sanitize_parameters!(obj_class, params)
params[:struct_params] = params.create_sanitized_params(obj_class.dsl_params, BinData::Struct)
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/stringz.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/stringz.rb | require 'bindata/base_primitive'
module BinData
# A BinData::Stringz object is a container for a zero ("\0") terminated
# string.
#
# For convenience, the zero terminator is not necessary when setting the
# value. Likewise, the returned value will not be zero terminated.
#
# require 'bindata'
#
# data = "abcd\x00efgh"
#
# obj = BinData::Stringz.new
# obj.read(data)
# obj.snapshot #=> "abcd"
# obj.num_bytes #=> 5
# obj.to_binary_s #=> "abcd\000"
#
# == Parameters
#
# Stringz objects accept all the params that BinData::BasePrimitive
# does, as well as the following:
#
# <tt>:max_length</tt>:: The maximum length of the string including the zero
# byte.
class Stringz < BinData::BasePrimitive
optional_parameters :max_length
def assign(val)
super(binary_string(val))
end
def snapshot
# override to always remove trailing zero bytes
result = super
trim_and_zero_terminate(result).chomp("\0")
end
#---------------
private
def value_to_binary_string(val)
trim_and_zero_terminate(val)
end
def read_and_return_value(io)
max_length = eval_parameter(:max_length)
str = binary_string("")
i = 0
ch = nil
# read until zero byte or we have read in the max number of bytes
while ch != "\0" && i != max_length
ch = io.readbytes(1)
str << ch
i += 1
end
trim_and_zero_terminate(str)
end
def sensible_default
""
end
def trim_and_zero_terminate(str)
max_length = eval_parameter(:max_length)
if max_length && max_length < 1
msg = "max_length must be >= 1 in #{debug_name} (got #{max_length})"
raise ArgumentError, msg
end
result = binary_string(str)
truncate_after_first_zero_byte!(result)
trim_to!(result, max_length)
append_zero_byte_if_needed!(result)
result
end
def truncate_after_first_zero_byte!(str)
str.sub!(/([^\0]*\0).*/, '\1')
end
def trim_to!(str, max_length = nil)
if max_length
str.slice!(max_length..-1)
str[-1, 1] = "\0" if str.length == max_length
end
end
def append_zero_byte_if_needed!(str)
if str.empty? || str[-1, 1] != "\0"
str << "\0"
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/trace.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/trace.rb | module BinData
# Turn on trace information when reading a BinData object.
# If +block+ is given then the tracing only occurs for that block.
# This is useful for debugging a BinData declaration.
def trace_reading(io = STDERR)
@tracer = Tracer.new(io)
[BasePrimitive, Choice].each(&:turn_on_tracing)
if block_given?
begin
yield
ensure
[BasePrimitive, Choice].each(&:turn_off_tracing)
@tracer = nil
end
end
end
# reference to the current tracer
@tracer ||= nil
class Tracer # :nodoc:
def initialize(io)
@trace_io = io
end
def trace(msg)
@trace_io.puts(msg)
end
def trace_obj(obj_name, val)
if val.length > 30
val = val.slice(0..30) + "..."
end
trace "#{obj_name} => #{val}"
end
end
def trace_message # :nodoc:
yield @tracer
end
module_function :trace_reading, :trace_message
module TraceHook
def turn_on_tracing
if !method_defined? :do_read_without_hook
alias_method :do_read_without_hook, :do_read
alias_method :do_read, :do_read_with_hook
end
end
def turn_off_tracing
if method_defined? :do_read_without_hook
alias_method :do_read, :do_read_without_hook
remove_method :do_read_without_hook
end
end
end
class BasePrimitive < BinData::Base
extend TraceHook
def do_read_with_hook(io)
do_read_without_hook(io)
BinData.trace_message do |tracer|
value_string = _value.inspect
tracer.trace_obj(debug_name, value_string)
end
end
end
class Choice < BinData::Base
extend TraceHook
def do_read_with_hook(io)
BinData.trace_message do |tracer|
selection_string = eval_parameter(:selection).inspect
tracer.trace_obj("#{debug_name}-selection-", selection_string)
end
do_read_without_hook(io)
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/framework.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/framework.rb | module BinData
# Error raised when unexpected results occur when reading data from IO.
class ValidityError < StandardError; end
# All methods provided by the framework are to be implemented or overridden
# by subclasses of BinData::Base.
module Framework
# Initializes the state of the object. All instance variables that
# are used by the object must be initialized here.
def initialize_instance; end
# Initialises state that is shared by objects with the same parameters.
#
# This should only be used when optimising for performance. Instance
# variables set here, and changes to the singleton class will be shared
# between all objects that are initialized with the same parameters.
# This method is called only once for a particular set of parameters.
def initialize_shared_instance; end
# Returns true if the object has not been changed since creation.
def clear?
raise NotImplementedError
end
# Assigns the value of +val+ to this data object. Note that +val+ must
# always be deep copied to ensure no aliasing problems can occur.
def assign(val)
raise NotImplementedError
end
# Returns a snapshot of this data object.
def snapshot
raise NotImplementedError
end
# Returns the debug name of +child+. This only needs to be implemented
# by objects that contain child objects.
def debug_name_of(child) # :nodoc:
debug_name
end
# Returns the offset of +child+. This only needs to be implemented
# by objects that contain child objects.
def offset_of(child) # :nodoc:
0
end
# Is this object aligned on non-byte boundaries?
def bit_aligned?
false
end
# Reads the data for this data object from +io+.
def do_read(io) # :nodoc:
raise NotImplementedError
end
# Writes the value for this data to +io+.
def do_write(io) # :nodoc:
raise NotImplementedError
end
# Returns the number of bytes it will take to write this data.
def do_num_bytes # :nodoc:
raise NotImplementedError
end
# Set visibility requirements of methods to implement
public :clear?, :assign, :snapshot, :debug_name_of, :offset_of
protected :initialize_instance, :initialize_shared_instance
protected :do_read, :do_write, :do_num_bytes
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/buffer.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/buffer.rb | require 'bindata/base'
require 'bindata/dsl'
module BinData
# A Buffer is conceptually a substream within a data stream. It has a
# defined size and it will always read or write the exact number of bytes to
# fill the buffer. Short reads will skip over unused bytes and short writes
# will pad the substream with "\0" bytes.
#
# require 'bindata'
#
# obj = BinData::Buffer.new(length: 5, type: [:string, {value: "abc"}])
# obj.to_binary_s #=> "abc\000\000"
#
#
# class MyBuffer < BinData::Buffer
# default_parameter length: 8
#
# endian :little
#
# uint16 :num1
# uint16 :num2
# # padding occurs here
# end
#
# obj = MyBuffer.read("\001\000\002\000\000\000\000\000")
# obj.num1 #=> 1
# obj.num1 #=> 2
# obj.raw_num_bytes #=> 4
# obj.num_bytes #=> 8
#
#
# class StringTable < BinData::Record
# endian :little
#
# uint16 :table_size_in_bytes
# buffer :strings, length: :table_size_in_bytes do
# array read_until: :eof do
# uint8 :len
# string :str, length: :len
# end
# end
# end
#
#
# == Parameters
#
# Parameters may be provided at initialisation to control the behaviour of
# an object. These params are:
#
# <tt>:length</tt>:: The number of bytes in the buffer.
# <tt>:type</tt>:: The single type inside the buffer. Use a struct if
# multiple fields are required.
class Buffer < BinData::Base
extend DSLMixin
dsl_parser :buffer
arg_processor :buffer
mandatory_parameters :length, :type
def initialize_instance
@type = get_parameter(:type).instantiate(nil, self)
end
# The number of bytes used, ignoring the padding imposed by the buffer.
def raw_num_bytes
@type.num_bytes
end
def clear?
@type.clear?
end
def assign(val)
@type.assign(val)
end
def snapshot
@type.snapshot
end
def respond_to_missing?(symbol, include_all = false) # :nodoc:
@type.respond_to?(symbol, include_all) || super
end
def method_missing(symbol, *args, &block) # :nodoc:
@type.__send__(symbol, *args, &block)
end
def do_read(io) # :nodoc:
buf_len = eval_parameter(:length)
io.transform(BufferIO.new(buf_len)) do |transformed_io, _|
@type.do_read(transformed_io)
end
end
def do_write(io) # :nodoc:
buf_len = eval_parameter(:length)
io.transform(BufferIO.new(buf_len)) do |transformed_io, _|
@type.do_write(transformed_io)
end
end
def do_num_bytes # :nodoc:
eval_parameter(:length)
end
# Transforms the IO stream to restrict access inside
# a buffer of specified length.
class BufferIO < IO::Transform
def initialize(length)
super()
@bytes_remaining = length
end
def before_transform
@buf_start = offset
@buf_end = @buf_start + @bytes_remaining
end
def num_bytes_remaining
[@bytes_remaining, super].min
rescue IOError
@bytes_remaining
end
def skip(n)
nbytes = buffer_limited_n(n)
@bytes_remaining -= nbytes
chain_skip(nbytes)
end
def seek_abs(n)
if n < @buf_start || n >= @buf_end
raise IOError, "can not seek to abs_offset outside of buffer"
end
@bytes_remaining -= (n - offset)
chain_seek_abs(n)
end
def read(n)
nbytes = buffer_limited_n(n)
@bytes_remaining -= nbytes
chain_read(nbytes)
end
def write(data)
nbytes = buffer_limited_n(data.size)
@bytes_remaining -= nbytes
if nbytes < data.size
data = data[0, nbytes]
end
chain_write(data)
end
def after_read_transform
read(nil)
end
def after_write_transform
write("\x00" * @bytes_remaining)
end
def buffer_limited_n(n)
if n.nil?
@bytes_remaining
elsif n.positive?
limit = @bytes_remaining
n > limit ? limit : n
# uncomment if we decide to allow backwards skipping
# elsif n.negative?
# limit = @bytes_remaining + @buf_start - @buf_end
# n < limit ? limit : n
else
0
end
end
end
end
class BufferArgProcessor < BaseArgProcessor
include MultiFieldArgSeparator
def sanitize_parameters!(obj_class, params)
params.merge!(obj_class.dsl_params)
params.must_be_integer(:length)
params.sanitize_object_prototype(:type)
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/transform/lz4.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/transform/lz4.rb | require 'extlz4'
module BinData
module Transform
# Transforms a LZ4 compressed data stream.
#
# gem install extlz4
class LZ4 < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::LZ4::decode(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::LZ4::encode(@write))
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/transform/xor.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/transform/xor.rb | module BinData
module Transform
# Transforms the data stream by xoring each byte.
class Xor < BinData::IO::Transform
def initialize(xor)
super()
@xor = xor
end
def read(n)
chain_read(n).bytes.map { |byte| (byte ^ @xor).chr }.join
end
def write(data)
chain_write(data.bytes.map { |byte| (byte ^ @xor).chr }.join)
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/transform/zstd.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/transform/zstd.rb | require 'zstd-ruby'
module BinData
module Transform
# Transforms a zstd compressed data stream.
#
# gem install zstd-ruby
class Zstd < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::Zstd::decompress(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::Zstd::compress(@write))
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/transform/xz.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/transform/xz.rb | require 'xz'
module BinData
module Transform
# Transforms a xz compressed data stream.
#
# gem install ruby-xz
class XZ < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::XZ::decompress(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::XZ::compress(@write))
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/transform/brotli.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/transform/brotli.rb | require 'brotli'
module BinData
module Transform
# Transforms a brotli compressed data stream.
#
# gem install brotli
class Brotli < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::Brotli::inflate(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::Brotli::deflate(@write))
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/transform/zlib.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/transform/zlib.rb | require 'zlib'
module BinData
module Transform
# Transforms a zlib compressed data stream.
class Zlib < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::Zlib::Inflate.inflate(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::Zlib::Deflate.deflate(@write))
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/transform/lzma.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/bindata-2.5.1/lib/bindata/transform/lzma.rb | require 'xz'
module BinData
module Transform
# Transforms a lzma compressed data stream.
#
# gem install ruby-xz
class Lzma < BinData::IO::Transform
transform_changes_stream_length!
def initialize(read_length)
super()
@length = read_length
end
def read(n)
@read ||= ::XZ::decompress(chain_read(@length))
@read.slice!(0...n)
end
def write(data)
@write ||= create_empty_binary_string
@write << data
end
def after_read_transform
raise IOError, "didn't read all data" unless @read.empty?
end
def after_write_transform
chain_write(::XZ::compress(@write))
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/addressable-2.8.8/lib/addressable.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/addressable-2.8.8/lib/addressable.rb | # frozen_string_literal: true
require 'addressable/uri'
require 'addressable/template'
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/addressable-2.8.8/lib/addressable/version.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/addressable-2.8.8/lib/addressable/version.rb | # frozen_string_literal: true
#--
# Copyright (C) Bob Aman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#++
# Used to prevent the class/module from being loaded more than once
if !defined?(Addressable::VERSION)
module Addressable
module VERSION
MAJOR = 2
MINOR = 8
TINY = 8
STRING = [MAJOR, MINOR, TINY].join('.')
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/addressable-2.8.8/lib/addressable/uri.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/addressable-2.8.8/lib/addressable/uri.rb | # frozen_string_literal: true
#--
# Copyright (C) Bob Aman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#++
require "addressable/version"
require "addressable/idna"
require "public_suffix"
##
# Addressable is a library for processing links and URIs.
module Addressable
##
# This is an implementation of a URI parser based on
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>,
# <a href="http://www.ietf.org/rfc/rfc3987.txt">RFC 3987</a>.
class URI
##
# Raised if something other than a uri is supplied.
class InvalidURIError < StandardError
end
##
# Container for the character classes specified in
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>.
#
# Note: Concatenated and interpolated `String`s are not affected by the
# `frozen_string_literal` directive and must be frozen explicitly.
#
# Interpolated `String`s *were* frozen this way before Ruby 3.0:
# https://bugs.ruby-lang.org/issues/17104
module CharacterClasses
ALPHA = "a-zA-Z"
DIGIT = "0-9"
GEN_DELIMS = "\\:\\/\\?\\#\\[\\]\\@"
SUB_DELIMS = "\\!\\$\\&\\'\\(\\)\\*\\+\\,\\;\\="
RESERVED = (GEN_DELIMS + SUB_DELIMS).freeze
UNRESERVED = (ALPHA + DIGIT + "\\-\\.\\_\\~").freeze
RESERVED_AND_UNRESERVED = RESERVED + UNRESERVED
PCHAR = (UNRESERVED + SUB_DELIMS + "\\:\\@").freeze
SCHEME = (ALPHA + DIGIT + "\\-\\+\\.").freeze
HOST = (UNRESERVED + SUB_DELIMS + "\\[\\:\\]").freeze
AUTHORITY = (PCHAR + "\\[\\]").freeze
PATH = (PCHAR + "\\/").freeze
QUERY = (PCHAR + "\\/\\?").freeze
FRAGMENT = (PCHAR + "\\/\\?").freeze
end
module NormalizeCharacterClasses
HOST = /[^#{CharacterClasses::HOST}]/
UNRESERVED = /[^#{CharacterClasses::UNRESERVED}]/
PCHAR = /[^#{CharacterClasses::PCHAR}]/
SCHEME = /[^#{CharacterClasses::SCHEME}]/
FRAGMENT = /[^#{CharacterClasses::FRAGMENT}]/
QUERY = %r{[^a-zA-Z0-9\-\.\_\~\!\$\'\(\)\*\+\,\=\:\@\/\?%]|%(?!2B|2b)}
end
module CharacterClassesRegexps
AUTHORITY = /[^#{CharacterClasses::AUTHORITY}]/
FRAGMENT = /[^#{CharacterClasses::FRAGMENT}]/
HOST = /[^#{CharacterClasses::HOST}]/
PATH = /[^#{CharacterClasses::PATH}]/
QUERY = /[^#{CharacterClasses::QUERY}]/
RESERVED = /[^#{CharacterClasses::RESERVED}]/
RESERVED_AND_UNRESERVED = /[^#{CharacterClasses::RESERVED_AND_UNRESERVED}]/
SCHEME = /[^#{CharacterClasses::SCHEME}]/
UNRESERVED = /[^#{CharacterClasses::UNRESERVED}]/
end
SLASH = '/'
EMPTY_STR = ''
URIREGEX = /^(([^:\/?#]+):)?(\/\/([^\/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?$/
PORT_MAPPING = {
"http" => 80,
"https" => 443,
"ftp" => 21,
"tftp" => 69,
"sftp" => 22,
"ssh" => 22,
"svn+ssh" => 22,
"telnet" => 23,
"nntp" => 119,
"gopher" => 70,
"wais" => 210,
"ldap" => 389,
"prospero" => 1525
}.freeze
##
# Returns a URI object based on the parsed string.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI string to parse.
# No parsing is performed if the object is already an
# <code>Addressable::URI</code>.
#
# @return [Addressable::URI] The parsed URI.
def self.parse(uri)
# If we were given nil, return nil.
return nil unless uri
# If a URI object is passed, just return itself.
return uri.dup if uri.kind_of?(self)
# If a URI object of the Ruby standard library variety is passed,
# convert it to a string, then parse the string.
# We do the check this way because we don't want to accidentally
# cause a missing constant exception to be thrown.
if uri.class.name =~ /^URI\b/
uri = uri.to_s
end
# Otherwise, convert to a String
begin
uri = uri.to_str
rescue TypeError, NoMethodError
raise TypeError, "Can't convert #{uri.class} into String."
end unless uri.is_a?(String)
# This Regexp supplied as an example in RFC 3986, and it works great.
scan = uri.scan(URIREGEX)
fragments = scan[0]
scheme = fragments[1]
authority = fragments[3]
path = fragments[4]
query = fragments[6]
fragment = fragments[8]
user = nil
password = nil
host = nil
port = nil
if authority != nil
# The Regexp above doesn't split apart the authority.
userinfo = authority[/^([^\[\]]*)@/, 1]
if userinfo != nil
user = userinfo.strip[/^([^:]*):?/, 1]
password = userinfo.strip[/:(.*)$/, 1]
end
host = authority.sub(
/^([^\[\]]*)@/, EMPTY_STR
).sub(
/:([^:@\[\]]*?)$/, EMPTY_STR
)
port = authority[/:([^:@\[\]]*?)$/, 1]
port = nil if port == EMPTY_STR
end
return new(
:scheme => scheme,
:user => user,
:password => password,
:host => host,
:port => port,
:path => path,
:query => query,
:fragment => fragment
)
end
##
# Converts an input to a URI. The input does not have to be a valid
# URI — the method will use heuristics to guess what URI was intended.
# This is not standards-compliant, merely user-friendly.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI string to parse.
# No parsing is performed if the object is already an
# <code>Addressable::URI</code>.
# @param [Hash] hints
# A <code>Hash</code> of hints to the heuristic parser.
# Defaults to <code>{:scheme => "http"}</code>.
#
# @return [Addressable::URI] The parsed URI.
def self.heuristic_parse(uri, hints={})
# If we were given nil, return nil.
return nil unless uri
# If a URI object is passed, just return itself.
return uri.dup if uri.kind_of?(self)
# If a URI object of the Ruby standard library variety is passed,
# convert it to a string, then parse the string.
# We do the check this way because we don't want to accidentally
# cause a missing constant exception to be thrown.
if uri.class.name =~ /^URI\b/
uri = uri.to_s
end
unless uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{uri.class} into String."
end
# Otherwise, convert to a String
uri = uri.to_str.dup.strip
hints = {
:scheme => "http"
}.merge(hints)
case uri
when /^http:\//i
uri.sub!(/^http:\/+/i, "http://")
when /^https:\//i
uri.sub!(/^https:\/+/i, "https://")
when /^feed:\/+http:\//i
uri.sub!(/^feed:\/+http:\/+/i, "feed:http://")
when /^feed:\//i
uri.sub!(/^feed:\/+/i, "feed://")
when %r[^file:/{4}]i
uri.sub!(%r[^file:/+]i, "file:////")
when %r[^file://localhost/]i
uri.sub!(%r[^file://localhost/+]i, "file:///")
when %r[^file:/+]i
uri.sub!(%r[^file:/+]i, "file:///")
when /^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/
uri.sub!(/^/, hints[:scheme] + "://")
when /\A\d+\..*:\d+\z/
uri = "#{hints[:scheme]}://#{uri}"
end
match = uri.match(URIREGEX)
fragments = match.captures
authority = fragments[3]
if authority && authority.length > 0
new_authority = authority.tr("\\", "/").gsub(" ", "%20")
# NOTE: We want offset 4, not 3!
offset = match.offset(4)
uri = uri.dup
uri[offset[0]...offset[1]] = new_authority
end
parsed = self.parse(uri)
if parsed.scheme =~ /^[^\/?#\.]+\.[^\/?#]+$/
parsed = self.parse(hints[:scheme] + "://" + uri)
end
if parsed.path.include?(".")
if parsed.path[/\b@\b/]
parsed.scheme = "mailto" unless parsed.scheme
elsif new_host = parsed.path[/^([^\/]+\.[^\/]*)/, 1]
parsed.defer_validation do
new_path = parsed.path.sub(
Regexp.new("^" + Regexp.escape(new_host)), EMPTY_STR)
parsed.host = new_host
parsed.path = new_path
parsed.scheme = hints[:scheme] unless parsed.scheme
end
end
end
return parsed
end
##
# Converts a path to a file scheme URI. If the path supplied is
# relative, it will be returned as a relative URI. If the path supplied
# is actually a non-file URI, it will parse the URI as if it had been
# parsed with <code>Addressable::URI.parse</code>. Handles all of the
# various Microsoft-specific formats for specifying paths.
#
# @param [String, Addressable::URI, #to_str] path
# Typically a <code>String</code> path to a file or directory, but
# will return a sensible return value if an absolute URI is supplied
# instead.
#
# @return [Addressable::URI]
# The parsed file scheme URI or the original URI if some other URI
# scheme was provided.
#
# @example
# base = Addressable::URI.convert_path("/absolute/path/")
# uri = Addressable::URI.convert_path("relative/path")
# (base + uri).to_s
# #=> "file:///absolute/path/relative/path"
#
# Addressable::URI.convert_path(
# "c:\\windows\\My Documents 100%20\\foo.txt"
# ).to_s
# #=> "file:///c:/windows/My%20Documents%20100%20/foo.txt"
#
# Addressable::URI.convert_path("http://example.com/").to_s
# #=> "http://example.com/"
def self.convert_path(path)
# If we were given nil, return nil.
return nil unless path
# If a URI object is passed, just return itself.
return path if path.kind_of?(self)
unless path.respond_to?(:to_str)
raise TypeError, "Can't convert #{path.class} into String."
end
# Otherwise, convert to a String
path = path.to_str.strip
path.sub!(/^file:\/?\/?/, EMPTY_STR) if path =~ /^file:\/?\/?/
path = SLASH + path if path =~ /^([a-zA-Z])[\|:]/
uri = self.parse(path)
if uri.scheme == nil
# Adjust windows-style uris
uri.path.sub!(/^\/?([a-zA-Z])[\|:][\\\/]/) do
"/#{$1.downcase}:/"
end
uri.path.tr!("\\", SLASH)
if File.exist?(uri.path) &&
File.stat(uri.path).directory?
uri.path.chomp!(SLASH)
uri.path = uri.path + '/'
end
# If the path is absolute, set the scheme and host.
if uri.path.start_with?(SLASH)
uri.scheme = "file"
uri.host = EMPTY_STR
end
uri.normalize!
end
return uri
end
##
# Joins several URIs together.
#
# @param [String, Addressable::URI, #to_str] *uris
# The URIs to join.
#
# @return [Addressable::URI] The joined URI.
#
# @example
# base = "http://example.com/"
# uri = Addressable::URI.parse("relative/path")
# Addressable::URI.join(base, uri)
# #=> #<Addressable::URI:0xcab390 URI:http://example.com/relative/path>
def self.join(*uris)
uri_objects = uris.collect do |uri|
unless uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{uri.class} into String."
end
uri.kind_of?(self) ? uri : self.parse(uri.to_str)
end
result = uri_objects.shift.dup
uri_objects.each do |uri|
result.join!(uri)
end
return result
end
##
# Tables used to optimize encoding operations in `self.encode_component`
# and `self.normalize_component`
SEQUENCE_ENCODING_TABLE = (0..255).map do |byte|
format("%02x", byte).freeze
end.freeze
SEQUENCE_UPCASED_PERCENT_ENCODING_TABLE = (0..255).map do |byte|
format("%%%02X", byte).freeze
end.freeze
##
# Percent encodes a URI component.
#
# @param [String, #to_str] component The URI component to encode.
#
# @param [String, Regexp] character_class
# The characters which are not percent encoded. If a <code>String</code>
# is passed, the <code>String</code> must be formatted as a regular
# expression character class. (Do not include the surrounding square
# brackets.) For example, <code>"b-zB-Z0-9"</code> would cause
# everything but the letters 'b' through 'z' and the numbers '0' through
# '9' to be percent encoded. If a <code>Regexp</code> is passed, the
# value <code>/[^b-zB-Z0-9]/</code> would have the same effect. A set of
# useful <code>String</code> values may be found in the
# <code>Addressable::URI::CharacterClasses</code> module. The default
# value is the reserved plus unreserved character classes specified in
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>.
#
# @param [Regexp] upcase_encoded
# A string of characters that may already be percent encoded, and whose
# encodings should be upcased. This allows normalization of percent
# encodings for characters not included in the
# <code>character_class</code>.
#
# @return [String] The encoded component.
#
# @example
# Addressable::URI.encode_component("simple/example", "b-zB-Z0-9")
# => "simple%2Fex%61mple"
# Addressable::URI.encode_component("simple/example", /[^b-zB-Z0-9]/)
# => "simple%2Fex%61mple"
# Addressable::URI.encode_component(
# "simple/example", Addressable::URI::CharacterClasses::UNRESERVED
# )
# => "simple%2Fexample"
def self.encode_component(component, character_class=CharacterClassesRegexps::RESERVED_AND_UNRESERVED, upcase_encoded='')
return nil if component.nil?
begin
if component.kind_of?(Symbol) ||
component.kind_of?(Numeric) ||
component.kind_of?(TrueClass) ||
component.kind_of?(FalseClass)
component = component.to_s
else
component = component.to_str
end
rescue TypeError, NoMethodError
raise TypeError, "Can't convert #{component.class} into String."
end if !component.is_a? String
if ![String, Regexp].include?(character_class.class)
raise TypeError,
"Expected String or Regexp, got #{character_class.inspect}"
end
if character_class.kind_of?(String)
character_class = /[^#{character_class}]/
end
# We can't perform regexps on invalid UTF sequences, but
# here we need to, so switch to ASCII.
component = component.dup
component.force_encoding(Encoding::ASCII_8BIT)
# Avoiding gsub! because there are edge cases with frozen strings
component = component.gsub(character_class) do |char|
SEQUENCE_UPCASED_PERCENT_ENCODING_TABLE[char.ord]
end
if upcase_encoded.length > 0
upcase_encoded_chars = upcase_encoded.bytes.map do |byte|
SEQUENCE_ENCODING_TABLE[byte]
end
component = component.gsub(/%(#{upcase_encoded_chars.join('|')})/,
&:upcase)
end
return component
end
class << self
alias_method :escape_component, :encode_component
end
##
# Unencodes any percent encoded characters within a URI component.
# This method may be used for unencoding either components or full URIs,
# however, it is recommended to use the <code>unencode_component</code>
# alias when unencoding components.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI or component to unencode.
#
# @param [Class] return_type
# The type of object to return.
# This value may only be set to <code>String</code> or
# <code>Addressable::URI</code>. All other values are invalid. Defaults
# to <code>String</code>.
#
# @param [String] leave_encoded
# A string of characters to leave encoded. If a percent encoded character
# in this list is encountered then it will remain percent encoded.
#
# @return [String, Addressable::URI]
# The unencoded component or URI.
# The return type is determined by the <code>return_type</code>
# parameter.
def self.unencode(uri, return_type=String, leave_encoded='')
return nil if uri.nil?
begin
uri = uri.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{uri.class} into String."
end if !uri.is_a? String
if ![String, ::Addressable::URI].include?(return_type)
raise TypeError,
"Expected Class (String or Addressable::URI), " +
"got #{return_type.inspect}"
end
result = uri.gsub(/%[0-9a-f]{2}/i) do |sequence|
c = sequence[1..3].to_i(16).chr
c.force_encoding(sequence.encoding)
leave_encoded.include?(c) ? sequence : c
end
result.force_encoding(Encoding::UTF_8)
if return_type == String
return result
elsif return_type == ::Addressable::URI
return ::Addressable::URI.parse(result)
end
end
class << self
alias_method :unescape, :unencode
alias_method :unencode_component, :unencode
alias_method :unescape_component, :unencode
end
##
# Normalizes the encoding of a URI component.
#
# @param [String, #to_str] component The URI component to encode.
#
# @param [String, Regexp] character_class
# The characters which are not percent encoded. If a <code>String</code>
# is passed, the <code>String</code> must be formatted as a regular
# expression character class. (Do not include the surrounding square
# brackets.) For example, <code>"b-zB-Z0-9"</code> would cause
# everything but the letters 'b' through 'z' and the numbers '0'
# through '9' to be percent encoded. If a <code>Regexp</code> is passed,
# the value <code>/[^b-zB-Z0-9]/</code> would have the same effect. A
# set of useful <code>String</code> values may be found in the
# <code>Addressable::URI::CharacterClasses</code> module. The default
# value is the reserved plus unreserved character classes specified in
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>.
#
# @param [String] leave_encoded
# When <code>character_class</code> is a <code>String</code> then
# <code>leave_encoded</code> is a string of characters that should remain
# percent encoded while normalizing the component; if they appear percent
# encoded in the original component, then they will be upcased ("%2f"
# normalized to "%2F") but otherwise left alone.
#
# @return [String] The normalized component.
#
# @example
# Addressable::URI.normalize_component("simpl%65/%65xampl%65", "b-zB-Z")
# => "simple%2Fex%61mple"
# Addressable::URI.normalize_component(
# "simpl%65/%65xampl%65", /[^b-zB-Z]/
# )
# => "simple%2Fex%61mple"
# Addressable::URI.normalize_component(
# "simpl%65/%65xampl%65",
# Addressable::URI::CharacterClasses::UNRESERVED
# )
# => "simple%2Fexample"
# Addressable::URI.normalize_component(
# "one%20two%2fthree%26four",
# "0-9a-zA-Z &/",
# "/"
# )
# => "one two%2Fthree&four"
def self.normalize_component(component, character_class=
CharacterClassesRegexps::RESERVED_AND_UNRESERVED,
leave_encoded='')
return nil if component.nil?
begin
component = component.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{component.class} into String."
end if !component.is_a? String
if ![String, Regexp].include?(character_class.class)
raise TypeError,
"Expected String or Regexp, got #{character_class.inspect}"
end
if character_class.kind_of?(String)
leave_re = if leave_encoded.length > 0
character_class = "#{character_class}%" unless character_class.include?('%')
bytes = leave_encoded.bytes
leave_encoded_pattern = bytes.map { |b| SEQUENCE_ENCODING_TABLE[b] }.join('|')
"|%(?!#{leave_encoded_pattern}|#{leave_encoded_pattern.upcase})"
end
character_class = if leave_re
/[^#{character_class}]#{leave_re}/
else
/[^#{character_class}]/
end
end
# We can't perform regexps on invalid UTF sequences, but
# here we need to, so switch to ASCII.
component = component.dup
component.force_encoding(Encoding::ASCII_8BIT)
unencoded = self.unencode_component(component, String, leave_encoded)
begin
encoded = self.encode_component(
unencoded.unicode_normalize(:nfc),
character_class,
leave_encoded
)
rescue ArgumentError
encoded = self.encode_component(unencoded)
end
encoded.force_encoding(Encoding::UTF_8)
return encoded
end
##
# Percent encodes any special characters in the URI.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI to encode.
#
# @param [Class] return_type
# The type of object to return.
# This value may only be set to <code>String</code> or
# <code>Addressable::URI</code>. All other values are invalid. Defaults
# to <code>String</code>.
#
# @return [String, Addressable::URI]
# The encoded URI.
# The return type is determined by the <code>return_type</code>
# parameter.
def self.encode(uri, return_type=String)
return nil if uri.nil?
begin
uri = uri.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{uri.class} into String."
end if !uri.is_a? String
if ![String, ::Addressable::URI].include?(return_type)
raise TypeError,
"Expected Class (String or Addressable::URI), " +
"got #{return_type.inspect}"
end
uri_object = uri.kind_of?(self) ? uri : self.parse(uri)
encoded_uri = Addressable::URI.new(
:scheme => self.encode_component(uri_object.scheme,
Addressable::URI::CharacterClassesRegexps::SCHEME),
:authority => self.encode_component(uri_object.authority,
Addressable::URI::CharacterClassesRegexps::AUTHORITY),
:path => self.encode_component(uri_object.path,
Addressable::URI::CharacterClassesRegexps::PATH),
:query => self.encode_component(uri_object.query,
Addressable::URI::CharacterClassesRegexps::QUERY),
:fragment => self.encode_component(uri_object.fragment,
Addressable::URI::CharacterClassesRegexps::FRAGMENT)
)
if return_type == String
return encoded_uri.to_s
elsif return_type == ::Addressable::URI
return encoded_uri
end
end
class << self
alias_method :escape, :encode
end
##
# Normalizes the encoding of a URI. Characters within a hostname are
# not percent encoded to allow for internationalized domain names.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI to encode.
#
# @param [Class] return_type
# The type of object to return.
# This value may only be set to <code>String</code> or
# <code>Addressable::URI</code>. All other values are invalid. Defaults
# to <code>String</code>.
#
# @return [String, Addressable::URI]
# The encoded URI.
# The return type is determined by the <code>return_type</code>
# parameter.
def self.normalized_encode(uri, return_type=String)
begin
uri = uri.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{uri.class} into String."
end if !uri.is_a? String
if ![String, ::Addressable::URI].include?(return_type)
raise TypeError,
"Expected Class (String or Addressable::URI), " +
"got #{return_type.inspect}"
end
uri_object = uri.kind_of?(self) ? uri : self.parse(uri)
components = {
:scheme => self.unencode_component(uri_object.scheme),
:user => self.unencode_component(uri_object.user),
:password => self.unencode_component(uri_object.password),
:host => self.unencode_component(uri_object.host),
:port => (uri_object.port.nil? ? nil : uri_object.port.to_s),
:path => self.unencode_component(uri_object.path),
:query => self.unencode_component(uri_object.query),
:fragment => self.unencode_component(uri_object.fragment)
}
components.each do |key, value|
if value != nil
begin
components[key] = value.to_str.unicode_normalize(:nfc)
rescue ArgumentError
# Likely a malformed UTF-8 character, skip unicode normalization
components[key] = value.to_str
end
end
end
encoded_uri = Addressable::URI.new(
:scheme => self.encode_component(components[:scheme],
Addressable::URI::CharacterClassesRegexps::SCHEME),
:user => self.encode_component(components[:user],
Addressable::URI::CharacterClassesRegexps::UNRESERVED),
:password => self.encode_component(components[:password],
Addressable::URI::CharacterClassesRegexps::UNRESERVED),
:host => components[:host],
:port => components[:port],
:path => self.encode_component(components[:path],
Addressable::URI::CharacterClassesRegexps::PATH),
:query => self.encode_component(components[:query],
Addressable::URI::CharacterClassesRegexps::QUERY),
:fragment => self.encode_component(components[:fragment],
Addressable::URI::CharacterClassesRegexps::FRAGMENT)
)
if return_type == String
return encoded_uri.to_s
elsif return_type == ::Addressable::URI
return encoded_uri
end
end
##
# Encodes a set of key/value pairs according to the rules for the
# <code>application/x-www-form-urlencoded</code> MIME type.
#
# @param [#to_hash, #to_ary] form_values
# The form values to encode.
#
# @param [TrueClass, FalseClass] sort
# Sort the key/value pairs prior to encoding.
# Defaults to <code>false</code>.
#
# @return [String]
# The encoded value.
def self.form_encode(form_values, sort=false)
if form_values.respond_to?(:to_hash)
form_values = form_values.to_hash.to_a
elsif form_values.respond_to?(:to_ary)
form_values = form_values.to_ary
else
raise TypeError, "Can't convert #{form_values.class} into Array."
end
form_values = form_values.inject([]) do |accu, (key, value)|
if value.kind_of?(Array)
value.each do |v|
accu << [key.to_s, v.to_s]
end
else
accu << [key.to_s, value.to_s]
end
accu
end
if sort
# Useful for OAuth and optimizing caching systems
form_values = form_values.sort
end
escaped_form_values = form_values.map do |(key, value)|
# Line breaks are CRLF pairs
[
self.encode_component(
key.gsub(/(\r\n|\n|\r)/, "\r\n"),
CharacterClassesRegexps::UNRESERVED
).gsub("%20", "+"),
self.encode_component(
value.gsub(/(\r\n|\n|\r)/, "\r\n"),
CharacterClassesRegexps::UNRESERVED
).gsub("%20", "+")
]
end
return escaped_form_values.map do |(key, value)|
"#{key}=#{value}"
end.join("&")
end
##
# Decodes a <code>String</code> according to the rules for the
# <code>application/x-www-form-urlencoded</code> MIME type.
#
# @param [String, #to_str] encoded_value
# The form values to decode.
#
# @return [Array]
# The decoded values.
# This is not a <code>Hash</code> because of the possibility for
# duplicate keys.
def self.form_unencode(encoded_value)
if !encoded_value.respond_to?(:to_str)
raise TypeError, "Can't convert #{encoded_value.class} into String."
end
encoded_value = encoded_value.to_str
split_values = encoded_value.split("&").map do |pair|
pair.split("=", 2)
end
return split_values.map do |(key, value)|
[
key ? self.unencode_component(
key.gsub("+", "%20")).gsub(/(\r\n|\n|\r)/, "\n") : nil,
value ? (self.unencode_component(
value.gsub("+", "%20")).gsub(/(\r\n|\n|\r)/, "\n")) : nil
]
end
end
##
# Creates a new uri object from component parts.
#
# @option [String, #to_str] scheme The scheme component.
# @option [String, #to_str] user The user component.
# @option [String, #to_str] password The password component.
# @option [String, #to_str] userinfo
# The userinfo component. If this is supplied, the user and password
# components must be omitted.
# @option [String, #to_str] host The host component.
# @option [String, #to_str] port The port component.
# @option [String, #to_str] authority
# The authority component. If this is supplied, the user, password,
# userinfo, host, and port components must be omitted.
# @option [String, #to_str] path The path component.
# @option [String, #to_str] query The query component.
# @option [String, #to_str] fragment The fragment component.
#
# @return [Addressable::URI] The constructed URI object.
def initialize(options={})
if options.has_key?(:authority)
if (options.keys & [:userinfo, :user, :password, :host, :port]).any?
raise ArgumentError,
"Cannot specify both an authority and any of the components " +
"within the authority."
end
end
if options.has_key?(:userinfo)
if (options.keys & [:user, :password]).any?
raise ArgumentError,
"Cannot specify both a userinfo and either the user or password."
end
end
reset_ivs
defer_validation do
# Bunch of crazy logic required because of the composite components
# like userinfo and authority.
self.scheme = options[:scheme] if options[:scheme]
self.user = options[:user] if options[:user]
self.password = options[:password] if options[:password]
self.userinfo = options[:userinfo] if options[:userinfo]
self.host = options[:host] if options[:host]
self.port = options[:port] if options[:port]
self.authority = options[:authority] if options[:authority]
self.path = options[:path] if options[:path]
self.query = options[:query] if options[:query]
self.query_values = options[:query_values] if options[:query_values]
self.fragment = options[:fragment] if options[:fragment]
end
to_s # force path validation
end
##
# Freeze URI, initializing instance variables.
#
# @return [Addressable::URI] The frozen URI object.
def freeze
self.normalized_scheme
self.normalized_user
self.normalized_password
self.normalized_userinfo
self.normalized_host
self.normalized_port
self.normalized_authority
self.normalized_site
self.normalized_path
self.normalized_query
self.normalized_fragment
self.hash
super
end
##
# The scheme component for this URI.
#
# @return [String] The scheme component.
attr_reader :scheme
##
# The scheme component for this URI, normalized.
#
# @return [String] The scheme component, normalized.
def normalized_scheme
return nil unless self.scheme
if @normalized_scheme == NONE
@normalized_scheme = if self.scheme =~ /^\s*ssh\+svn\s*$/i
"svn+ssh".dup
else
Addressable::URI.normalize_component(
self.scheme.strip.downcase,
Addressable::URI::NormalizeCharacterClasses::SCHEME
)
end
end
# All normalized values should be UTF-8
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | true |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/addressable-2.8.8/lib/addressable/template.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/addressable-2.8.8/lib/addressable/template.rb | # frozen_string_literal: true
#--
# Copyright (C) Bob Aman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#++
require "addressable/version"
require "addressable/uri"
module Addressable
##
# This is an implementation of a URI template based on
# RFC 6570 (http://tools.ietf.org/html/rfc6570).
class Template
# Constants used throughout the template code.
anything =
Addressable::URI::CharacterClasses::RESERVED +
Addressable::URI::CharacterClasses::UNRESERVED
variable_char_class =
Addressable::URI::CharacterClasses::ALPHA +
Addressable::URI::CharacterClasses::DIGIT + '_'
var_char =
"(?>(?:[#{variable_char_class}]|%[a-fA-F0-9][a-fA-F0-9])+)"
RESERVED =
"(?:[#{anything}]|%[a-fA-F0-9][a-fA-F0-9])"
UNRESERVED =
"(?:[#{
Addressable::URI::CharacterClasses::UNRESERVED
}]|%[a-fA-F0-9][a-fA-F0-9])"
variable =
"(?:#{var_char}(?:\\.?#{var_char})*)"
varspec =
"(?:(#{variable})(\\*|:\\d+)?)"
VARNAME =
/^#{variable}$/
VARSPEC =
/^#{varspec}$/
VARIABLE_LIST =
/^#{varspec}(?:,#{varspec})*$/
operator =
"+#./;?&=,!@|"
EXPRESSION =
/\{([#{operator}])?(#{varspec}(?:,#{varspec})*)\}/
LEADERS = {
'?' => '?',
'/' => '/',
'#' => '#',
'.' => '.',
';' => ';',
'&' => '&'
}
JOINERS = {
'?' => '&',
'.' => '.',
';' => ';',
'&' => '&',
'/' => '/'
}
##
# Raised if an invalid template value is supplied.
class InvalidTemplateValueError < StandardError
end
##
# Raised if an invalid template operator is used in a pattern.
class InvalidTemplateOperatorError < StandardError
end
##
# Raised if an invalid template operator is used in a pattern.
class TemplateOperatorAbortedError < StandardError
end
##
# This class represents the data that is extracted when a Template
# is matched against a URI.
class MatchData
##
# Creates a new MatchData object.
# MatchData objects should never be instantiated directly.
#
# @param [Addressable::URI] uri
# The URI that the template was matched against.
def initialize(uri, template, mapping)
@uri = uri.dup.freeze
@template = template
@mapping = mapping.dup.freeze
end
##
# @return [Addressable::URI]
# The URI that the Template was matched against.
attr_reader :uri
##
# @return [Addressable::Template]
# The Template used for the match.
attr_reader :template
##
# @return [Hash]
# The mapping that resulted from the match.
# Note that this mapping does not include keys or values for
# variables that appear in the Template, but are not present
# in the URI.
attr_reader :mapping
##
# @return [Array]
# The list of variables that were present in the Template.
# Note that this list will include variables which do not appear
# in the mapping because they were not present in URI.
def variables
self.template.variables
end
alias_method :keys, :variables
alias_method :names, :variables
##
# @return [Array]
# The list of values that were captured by the Template.
# Note that this list will include nils for any variables which
# were in the Template, but did not appear in the URI.
def values
@values ||= self.variables.inject([]) do |accu, key|
accu << self.mapping[key]
accu
end
end
alias_method :captures, :values
##
# Accesses captured values by name or by index.
#
# @param [String, Symbol, Fixnum] key
# Capture index or name. Note that when accessing by with index
# of 0, the full URI will be returned. The intention is to mimic
# the ::MatchData#[] behavior.
#
# @param [#to_int, nil] len
# If provided, an array of values will be returned with the given
# parameter used as length.
#
# @return [Array, String, nil]
# The captured value corresponding to the index or name. If the
# value was not provided or the key is unknown, nil will be
# returned.
#
# If the second parameter is provided, an array of that length will
# be returned instead.
def [](key, len = nil)
if len
to_a[key, len]
elsif String === key or Symbol === key
mapping[key.to_s]
else
to_a[key]
end
end
##
# @return [Array]
# Array with the matched URI as first element followed by the captured
# values.
def to_a
[to_s, *values]
end
##
# @return [String]
# The matched URI as String.
def to_s
uri.to_s
end
alias_method :string, :to_s
# Returns multiple captured values at once.
#
# @param [String, Symbol, Fixnum] *indexes
# Indices of the captures to be returned
#
# @return [Array]
# Values corresponding to given indices.
#
# @see Addressable::Template::MatchData#[]
def values_at(*indexes)
indexes.map { |i| self[i] }
end
##
# Returns a <tt>String</tt> representation of the MatchData's state.
#
# @return [String] The MatchData's state, as a <tt>String</tt>.
def inspect
sprintf("#<%s:%#0x RESULT:%s>",
self.class.to_s, self.object_id, self.mapping.inspect)
end
##
# Dummy method for code expecting a ::MatchData instance
#
# @return [String] An empty string.
def pre_match
""
end
alias_method :post_match, :pre_match
end
##
# Creates a new <tt>Addressable::Template</tt> object.
#
# @param [#to_str] pattern The URI Template pattern.
#
# @return [Addressable::Template] The initialized Template object.
def initialize(pattern)
if !pattern.respond_to?(:to_str)
raise TypeError, "Can't convert #{pattern.class} into String."
end
@pattern = pattern.to_str.dup.freeze
end
##
# Freeze URI, initializing instance variables.
#
# @return [Addressable::URI] The frozen URI object.
def freeze
self.variables
self.variable_defaults
self.named_captures
super
end
##
# @return [String] The Template object's pattern.
attr_reader :pattern
##
# Returns a <tt>String</tt> representation of the Template object's state.
#
# @return [String] The Template object's state, as a <tt>String</tt>.
def inspect
sprintf("#<%s:%#0x PATTERN:%s>",
self.class.to_s, self.object_id, self.pattern)
end
##
# Returns <code>true</code> if the Template objects are equal. This method
# does NOT normalize either Template before doing the comparison.
#
# @param [Object] template The Template to compare.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the Templates are equivalent, <code>false</code>
# otherwise.
def ==(template)
return false unless template.kind_of?(Template)
return self.pattern == template.pattern
end
##
# Addressable::Template makes no distinction between `==` and `eql?`.
#
# @see #==
alias_method :eql?, :==
##
# Extracts a mapping from the URI using a URI Template pattern.
#
# @param [Addressable::URI, #to_str] uri
# The URI to extract from.
#
# @param [#restore, #match] processor
# A template processor object may optionally be supplied.
#
# The object should respond to either the <tt>restore</tt> or
# <tt>match</tt> messages or both. The <tt>restore</tt> method should
# take two parameters: `[String] name` and `[String] value`.
# The <tt>restore</tt> method should reverse any transformations that
# have been performed on the value to ensure a valid URI.
# The <tt>match</tt> method should take a single
# parameter: `[String] name`. The <tt>match</tt> method should return
# a <tt>String</tt> containing a regular expression capture group for
# matching on that particular variable. The default value is `".*?"`.
# The <tt>match</tt> method has no effect on multivariate operator
# expansions.
#
# @return [Hash, NilClass]
# The <tt>Hash</tt> mapping that was extracted from the URI, or
# <tt>nil</tt> if the URI didn't match the template.
#
# @example
# class ExampleProcessor
# def self.restore(name, value)
# return value.gsub(/\+/, " ") if name == "query"
# return value
# end
#
# def self.match(name)
# return ".*?" if name == "first"
# return ".*"
# end
# end
#
# uri = Addressable::URI.parse(
# "http://example.com/search/an+example+search+query/"
# )
# Addressable::Template.new(
# "http://example.com/search/{query}/"
# ).extract(uri, ExampleProcessor)
# #=> {"query" => "an example search query"}
#
# uri = Addressable::URI.parse("http://example.com/a/b/c/")
# Addressable::Template.new(
# "http://example.com/{first}/{second}/"
# ).extract(uri, ExampleProcessor)
# #=> {"first" => "a", "second" => "b/c"}
#
# uri = Addressable::URI.parse("http://example.com/a/b/c/")
# Addressable::Template.new(
# "http://example.com/{first}/{-list|/|second}/"
# ).extract(uri)
# #=> {"first" => "a", "second" => ["b", "c"]}
def extract(uri, processor=nil)
match_data = self.match(uri, processor)
return (match_data ? match_data.mapping : nil)
end
##
# Extracts match data from the URI using a URI Template pattern.
#
# @param [Addressable::URI, #to_str] uri
# The URI to extract from.
#
# @param [#restore, #match] processor
# A template processor object may optionally be supplied.
#
# The object should respond to either the <tt>restore</tt> or
# <tt>match</tt> messages or both. The <tt>restore</tt> method should
# take two parameters: `[String] name` and `[String] value`.
# The <tt>restore</tt> method should reverse any transformations that
# have been performed on the value to ensure a valid URI.
# The <tt>match</tt> method should take a single
# parameter: `[String] name`. The <tt>match</tt> method should return
# a <tt>String</tt> containing a regular expression capture group for
# matching on that particular variable. The default value is `".*?"`.
# The <tt>match</tt> method has no effect on multivariate operator
# expansions.
#
# @return [Hash, NilClass]
# The <tt>Hash</tt> mapping that was extracted from the URI, or
# <tt>nil</tt> if the URI didn't match the template.
#
# @example
# class ExampleProcessor
# def self.restore(name, value)
# return value.gsub(/\+/, " ") if name == "query"
# return value
# end
#
# def self.match(name)
# return ".*?" if name == "first"
# return ".*"
# end
# end
#
# uri = Addressable::URI.parse(
# "http://example.com/search/an+example+search+query/"
# )
# match = Addressable::Template.new(
# "http://example.com/search/{query}/"
# ).match(uri, ExampleProcessor)
# match.variables
# #=> ["query"]
# match.captures
# #=> ["an example search query"]
#
# uri = Addressable::URI.parse("http://example.com/a/b/c/")
# match = Addressable::Template.new(
# "http://example.com/{first}/{+second}/"
# ).match(uri, ExampleProcessor)
# match.variables
# #=> ["first", "second"]
# match.captures
# #=> ["a", "b/c"]
#
# uri = Addressable::URI.parse("http://example.com/a/b/c/")
# match = Addressable::Template.new(
# "http://example.com/{first}{/second*}/"
# ).match(uri)
# match.variables
# #=> ["first", "second"]
# match.captures
# #=> ["a", ["b", "c"]]
def match(uri, processor=nil)
uri = Addressable::URI.parse(uri) unless uri.is_a?(Addressable::URI)
mapping = {}
# First, we need to process the pattern, and extract the values.
expansions, expansion_regexp =
parse_template_pattern(pattern, processor)
return nil unless uri.to_str.match(expansion_regexp)
unparsed_values = uri.to_str.scan(expansion_regexp).flatten
if uri.to_str == pattern
return Addressable::Template::MatchData.new(uri, self, mapping)
elsif expansions.size > 0
index = 0
expansions.each do |expansion|
_, operator, varlist = *expansion.match(EXPRESSION)
varlist.split(',').each do |varspec|
_, name, modifier = *varspec.match(VARSPEC)
mapping[name] ||= nil
case operator
when nil, '+', '#', '/', '.'
unparsed_value = unparsed_values[index]
name = varspec[VARSPEC, 1]
value = unparsed_value
value = value.split(JOINERS[operator]) if value && modifier == '*'
when ';', '?', '&'
if modifier == '*'
if unparsed_values[index]
value = unparsed_values[index].split(JOINERS[operator])
value = value.inject({}) do |acc, v|
key, val = v.split('=')
val = "" if val.nil?
acc[key] = val
acc
end
end
else
if (unparsed_values[index])
name, value = unparsed_values[index].split('=')
value = "" if value.nil?
end
end
end
if processor != nil && processor.respond_to?(:restore)
value = processor.restore(name, value)
end
if processor == nil
if value.is_a?(Hash)
value = value.inject({}){|acc, (k, v)|
acc[Addressable::URI.unencode_component(k)] =
Addressable::URI.unencode_component(v)
acc
}
elsif value.is_a?(Array)
value = value.map{|v| Addressable::URI.unencode_component(v) }
else
value = Addressable::URI.unencode_component(value)
end
end
if !mapping.has_key?(name) || mapping[name].nil?
# Doesn't exist, set to value (even if value is nil)
mapping[name] = value
end
index = index + 1
end
end
return Addressable::Template::MatchData.new(uri, self, mapping)
else
return nil
end
end
##
# Expands a URI template into another URI template.
#
# @param [Hash] mapping The mapping that corresponds to the pattern.
# @param [#validate, #transform] processor
# An optional processor object may be supplied.
# @param [Boolean] normalize_values
# Optional flag to enable/disable unicode normalization. Default: true
#
# The object should respond to either the <tt>validate</tt> or
# <tt>transform</tt> messages or both. Both the <tt>validate</tt> and
# <tt>transform</tt> methods should take two parameters: <tt>name</tt> and
# <tt>value</tt>. The <tt>validate</tt> method should return <tt>true</tt>
# or <tt>false</tt>; <tt>true</tt> if the value of the variable is valid,
# <tt>false</tt> otherwise. An <tt>InvalidTemplateValueError</tt>
# exception will be raised if the value is invalid. The <tt>transform</tt>
# method should return the transformed variable value as a <tt>String</tt>.
# If a <tt>transform</tt> method is used, the value will not be percent
# encoded automatically. Unicode normalization will be performed both
# before and after sending the value to the transform method.
#
# @return [Addressable::Template] The partially expanded URI template.
#
# @example
# Addressable::Template.new(
# "http://example.com/{one}/{two}/"
# ).partial_expand({"one" => "1"}).pattern
# #=> "http://example.com/1/{two}/"
#
# Addressable::Template.new(
# "http://example.com/{?one,two}/"
# ).partial_expand({"one" => "1"}).pattern
# #=> "http://example.com/?one=1{&two}/"
#
# Addressable::Template.new(
# "http://example.com/{?one,two,three}/"
# ).partial_expand({"one" => "1", "three" => 3}).pattern
# #=> "http://example.com/?one=1{&two}&three=3"
def partial_expand(mapping, processor=nil, normalize_values=true)
result = self.pattern.dup
mapping = normalize_keys(mapping)
result.gsub!( EXPRESSION ) do |capture|
transform_partial_capture(mapping, capture, processor, normalize_values)
end
return Addressable::Template.new(result)
end
##
# Expands a URI template into a full URI.
#
# @param [Hash] mapping The mapping that corresponds to the pattern.
# @param [#validate, #transform] processor
# An optional processor object may be supplied.
# @param [Boolean] normalize_values
# Optional flag to enable/disable unicode normalization. Default: true
#
# The object should respond to either the <tt>validate</tt> or
# <tt>transform</tt> messages or both. Both the <tt>validate</tt> and
# <tt>transform</tt> methods should take two parameters: <tt>name</tt> and
# <tt>value</tt>. The <tt>validate</tt> method should return <tt>true</tt>
# or <tt>false</tt>; <tt>true</tt> if the value of the variable is valid,
# <tt>false</tt> otherwise. An <tt>InvalidTemplateValueError</tt>
# exception will be raised if the value is invalid. The <tt>transform</tt>
# method should return the transformed variable value as a <tt>String</tt>.
# If a <tt>transform</tt> method is used, the value will not be percent
# encoded automatically. Unicode normalization will be performed both
# before and after sending the value to the transform method.
#
# @return [Addressable::URI] The expanded URI template.
#
# @example
# class ExampleProcessor
# def self.validate(name, value)
# return !!(value =~ /^[\w ]+$/) if name == "query"
# return true
# end
#
# def self.transform(name, value)
# return value.gsub(/ /, "+") if name == "query"
# return value
# end
# end
#
# Addressable::Template.new(
# "http://example.com/search/{query}/"
# ).expand(
# {"query" => "an example search query"},
# ExampleProcessor
# ).to_str
# #=> "http://example.com/search/an+example+search+query/"
#
# Addressable::Template.new(
# "http://example.com/search/{query}/"
# ).expand(
# {"query" => "an example search query"}
# ).to_str
# #=> "http://example.com/search/an%20example%20search%20query/"
#
# Addressable::Template.new(
# "http://example.com/search/{query}/"
# ).expand(
# {"query" => "bogus!"},
# ExampleProcessor
# ).to_str
# #=> Addressable::Template::InvalidTemplateValueError
def expand(mapping, processor=nil, normalize_values=true)
result = self.pattern.dup
mapping = normalize_keys(mapping)
result.gsub!( EXPRESSION ) do |capture|
transform_capture(mapping, capture, processor, normalize_values)
end
return Addressable::URI.parse(result)
end
##
# Returns an Array of variables used within the template pattern.
# The variables are listed in the Array in the order they appear within
# the pattern. Multiple occurrences of a variable within a pattern are
# not represented in this Array.
#
# @return [Array] The variables present in the template's pattern.
def variables
@variables ||= ordered_variable_defaults.map { |var, val| var }.uniq
end
alias_method :keys, :variables
alias_method :names, :variables
##
# Returns a mapping of variables to their default values specified
# in the template. Variables without defaults are not returned.
#
# @return [Hash] Mapping of template variables to their defaults
def variable_defaults
@variable_defaults ||=
Hash[*ordered_variable_defaults.reject { |k, v| v.nil? }.flatten]
end
##
# Coerces a template into a `Regexp` object. This regular expression will
# behave very similarly to the actual template, and should match the same
# URI values, but it cannot fully handle, for example, values that would
# extract to an `Array`.
#
# @return [Regexp] A regular expression which should match the template.
def to_regexp
_, source = parse_template_pattern(pattern)
Regexp.new(source)
end
##
# Returns the source of the coerced `Regexp`.
#
# @return [String] The source of the `Regexp` given by {#to_regexp}.
#
# @api private
def source
self.to_regexp.source
end
##
# Returns the named captures of the coerced `Regexp`.
#
# @return [Hash] The named captures of the `Regexp` given by {#to_regexp}.
#
# @api private
def named_captures
self.to_regexp.named_captures
end
private
def ordered_variable_defaults
@ordered_variable_defaults ||= begin
expansions, _ = parse_template_pattern(pattern)
expansions.flat_map do |capture|
_, _, varlist = *capture.match(EXPRESSION)
varlist.split(',').map do |varspec|
varspec[VARSPEC, 1]
end
end
end
end
##
# Loops through each capture and expands any values available in mapping
#
# @param [Hash] mapping
# Set of keys to expand
# @param [String] capture
# The expression to expand
# @param [#validate, #transform] processor
# An optional processor object may be supplied.
# @param [Boolean] normalize_values
# Optional flag to enable/disable unicode normalization. Default: true
#
# The object should respond to either the <tt>validate</tt> or
# <tt>transform</tt> messages or both. Both the <tt>validate</tt> and
# <tt>transform</tt> methods should take two parameters: <tt>name</tt> and
# <tt>value</tt>. The <tt>validate</tt> method should return <tt>true</tt>
# or <tt>false</tt>; <tt>true</tt> if the value of the variable is valid,
# <tt>false</tt> otherwise. An <tt>InvalidTemplateValueError</tt> exception
# will be raised if the value is invalid. The <tt>transform</tt> method
# should return the transformed variable value as a <tt>String</tt>. If a
# <tt>transform</tt> method is used, the value will not be percent encoded
# automatically. Unicode normalization will be performed both before and
# after sending the value to the transform method.
#
# @return [String] The expanded expression
def transform_partial_capture(mapping, capture, processor = nil,
normalize_values = true)
_, operator, varlist = *capture.match(EXPRESSION)
vars = varlist.split(",")
if operator == "?"
# partial expansion of form style query variables sometimes requires a
# slight reordering of the variables to produce a valid url.
first_to_expand = vars.find { |varspec|
_, name, _ = *varspec.match(VARSPEC)
mapping.key?(name) && !mapping[name].nil?
}
vars = [first_to_expand] + vars.reject {|varspec| varspec == first_to_expand} if first_to_expand
end
vars.
inject("".dup) do |acc, varspec|
_, name, _ = *varspec.match(VARSPEC)
next_val = if mapping.key? name
transform_capture(mapping, "{#{operator}#{varspec}}",
processor, normalize_values)
else
"{#{operator}#{varspec}}"
end
# If we've already expanded at least one '?' operator with non-empty
# value, change to '&'
operator = "&" if (operator == "?") && (next_val != "")
acc << next_val
end
end
##
# Transforms a mapped value so that values can be substituted into the
# template.
#
# @param [Hash] mapping The mapping to replace captures
# @param [String] capture
# The expression to replace
# @param [#validate, #transform] processor
# An optional processor object may be supplied.
# @param [Boolean] normalize_values
# Optional flag to enable/disable unicode normalization. Default: true
#
#
# The object should respond to either the <tt>validate</tt> or
# <tt>transform</tt> messages or both. Both the <tt>validate</tt> and
# <tt>transform</tt> methods should take two parameters: <tt>name</tt> and
# <tt>value</tt>. The <tt>validate</tt> method should return <tt>true</tt>
# or <tt>false</tt>; <tt>true</tt> if the value of the variable is valid,
# <tt>false</tt> otherwise. An <tt>InvalidTemplateValueError</tt> exception
# will be raised if the value is invalid. The <tt>transform</tt> method
# should return the transformed variable value as a <tt>String</tt>. If a
# <tt>transform</tt> method is used, the value will not be percent encoded
# automatically. Unicode normalization will be performed both before and
# after sending the value to the transform method.
#
# @return [String] The expanded expression
def transform_capture(mapping, capture, processor=nil,
normalize_values=true)
_, operator, varlist = *capture.match(EXPRESSION)
return_value = varlist.split(',').inject([]) do |acc, varspec|
_, name, modifier = *varspec.match(VARSPEC)
value = mapping[name]
unless value == nil || value == {}
allow_reserved = %w(+ #).include?(operator)
# Common primitives where the .to_s output is well-defined
if Numeric === value || Symbol === value ||
value == true || value == false
value = value.to_s
end
length = modifier.gsub(':', '').to_i if modifier =~ /^:\d+/
unless (Hash === value) ||
value.respond_to?(:to_ary) || value.respond_to?(:to_str)
raise TypeError,
"Can't convert #{value.class} into String or Array."
end
value = normalize_value(value) if normalize_values
if processor == nil || !processor.respond_to?(:transform)
# Handle percent escaping
if allow_reserved
encode_map =
Addressable::URI::CharacterClasses::RESERVED +
Addressable::URI::CharacterClasses::UNRESERVED
else
encode_map = Addressable::URI::CharacterClasses::UNRESERVED
end
if value.kind_of?(Array)
transformed_value = value.map do |val|
if length
Addressable::URI.encode_component(val[0...length], encode_map)
else
Addressable::URI.encode_component(val, encode_map)
end
end
unless modifier == "*"
transformed_value = transformed_value.join(',')
end
elsif value.kind_of?(Hash)
transformed_value = value.map do |key, val|
if modifier == "*"
"#{
Addressable::URI.encode_component( key, encode_map)
}=#{
Addressable::URI.encode_component( val, encode_map)
}"
else
"#{
Addressable::URI.encode_component( key, encode_map)
},#{
Addressable::URI.encode_component( val, encode_map)
}"
end
end
unless modifier == "*"
transformed_value = transformed_value.join(',')
end
else
if length
transformed_value = Addressable::URI.encode_component(
value[0...length], encode_map)
else
transformed_value = Addressable::URI.encode_component(
value, encode_map)
end
end
end
# Process, if we've got a processor
if processor != nil
if processor.respond_to?(:validate)
if !processor.validate(name, value)
display_value = value.kind_of?(Array) ? value.inspect : value
raise InvalidTemplateValueError,
"#{name}=#{display_value} is an invalid template value."
end
end
if processor.respond_to?(:transform)
transformed_value = processor.transform(name, value)
if normalize_values
transformed_value = normalize_value(transformed_value)
end
end
end
acc << [name, transformed_value]
end
acc
end
return "" if return_value.empty?
join_values(operator, return_value)
end
##
# Takes a set of values, and joins them together based on the
# operator.
#
# @param [String, Nil] operator One of the operators from the set
# (?,&,+,#,;,/,.), or nil if there wasn't one.
# @param [Array] return_value
# The set of return values (as [variable_name, value] tuples) that will
# be joined together.
#
# @return [String] The transformed mapped value
def join_values(operator, return_value)
leader = LEADERS.fetch(operator, '')
joiner = JOINERS.fetch(operator, ',')
case operator
when '&', '?'
leader + return_value.map{|k,v|
if v.is_a?(Array) && v.first =~ /=/
v.join(joiner)
elsif v.is_a?(Array)
v.map{|inner_value| "#{k}=#{inner_value}"}.join(joiner)
else
"#{k}=#{v}"
end
}.join(joiner)
when ';'
return_value.map{|k,v|
if v.is_a?(Array) && v.first =~ /=/
';' + v.join(";")
elsif v.is_a?(Array)
';' + v.map{|inner_value| "#{k}=#{inner_value}"}.join(";")
else
v && v != '' ? ";#{k}=#{v}" : ";#{k}"
end
}.join
else
leader + return_value.map{|k,v| v}.join(joiner)
end
end
##
# Takes a set of values, and joins them together based on the
# operator.
#
# @param [Hash, Array, String] value
# Normalizes unicode keys and values with String#unicode_normalize (NFC)
#
# @return [Hash, Array, String] The normalized values
def normalize_value(value)
# Handle unicode normalization
if value.respond_to?(:to_ary)
value.to_ary.map! { |val| normalize_value(val) }
elsif value.kind_of?(Hash)
value = value.inject({}) { |acc, (k, v)|
acc[normalize_value(k)] = normalize_value(v)
acc
}
else
value = value.to_s if !value.kind_of?(String)
if value.encoding != Encoding::UTF_8
value = value.dup.force_encoding(Encoding::UTF_8)
end
value = value.unicode_normalize(:nfc)
end
value
end
##
# Generates a hash with string keys
#
# @param [Hash] mapping A mapping hash to normalize
#
# @return [Hash]
# A hash with stringified keys
def normalize_keys(mapping)
return mapping.inject({}) do |accu, pair|
name, value = pair
if Symbol === name
name = name.to_s
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | true |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/addressable-2.8.8/lib/addressable/idna.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/addressable-2.8.8/lib/addressable/idna.rb | # frozen_string_literal: true
#--
# Copyright (C) Bob Aman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#++
begin
require "addressable/idna/native"
rescue LoadError
# libidn or the idn gem was not available, fall back on a pure-Ruby
# implementation...
require "addressable/idna/pure"
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/addressable-2.8.8/lib/addressable/idna/pure.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/addressable-2.8.8/lib/addressable/idna/pure.rb | # frozen_string_literal: true
#--
# Copyright (C) Bob Aman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#++
module Addressable
module IDNA
# This module is loosely based on idn_actionmailer by Mick Staugaard,
# the unicode library by Yoshida Masato, and the punycode implementation
# by Kazuhiro Nishiyama. Most of the code was copied verbatim, but
# some reformatting was done, and some translation from C was done.
#
# Without their code to work from as a base, we'd all still be relying
# on the presence of libidn. Which nobody ever seems to have installed.
#
# Original sources:
# http://github.com/staugaard/idn_actionmailer
# http://www.yoshidam.net/Ruby.html#unicode
# http://rubyforge.org/frs/?group_id=2550
ACE_PREFIX = "xn--"
UTF8_REGEX = /\A(?:
[\x09\x0A\x0D\x20-\x7E] # ASCII
| [\xC2-\xDF][\x80-\xBF] # non-overlong 2-byte
| \xE0[\xA0-\xBF][\x80-\xBF] # excluding overlongs
| [\xE1-\xEC\xEE\xEF][\x80-\xBF]{2} # straight 3-byte
| \xED[\x80-\x9F][\x80-\xBF] # excluding surrogates
| \xF0[\x90-\xBF][\x80-\xBF]{2} # planes 1-3
| [\xF1-\xF3][\x80-\xBF]{3} # planes 4nil5
| \xF4[\x80-\x8F][\x80-\xBF]{2} # plane 16
)*\z/mnx
UTF8_REGEX_MULTIBYTE = /(?:
[\xC2-\xDF][\x80-\xBF] # non-overlong 2-byte
| \xE0[\xA0-\xBF][\x80-\xBF] # excluding overlongs
| [\xE1-\xEC\xEE\xEF][\x80-\xBF]{2} # straight 3-byte
| \xED[\x80-\x9F][\x80-\xBF] # excluding surrogates
| \xF0[\x90-\xBF][\x80-\xBF]{2} # planes 1-3
| [\xF1-\xF3][\x80-\xBF]{3} # planes 4nil5
| \xF4[\x80-\x8F][\x80-\xBF]{2} # plane 16
)/mnx
# :startdoc:
# Converts from a Unicode internationalized domain name to an ASCII
# domain name as described in RFC 3490.
def self.to_ascii(input)
input = input.to_s unless input.is_a?(String)
input = input.dup.force_encoding(Encoding::UTF_8).unicode_normalize(:nfkc)
if input.respond_to?(:force_encoding)
input.force_encoding(Encoding::ASCII_8BIT)
end
if input =~ UTF8_REGEX && input =~ UTF8_REGEX_MULTIBYTE
parts = unicode_downcase(input).split('.')
parts.map! do |part|
if part.respond_to?(:force_encoding)
part.force_encoding(Encoding::ASCII_8BIT)
end
if part =~ UTF8_REGEX && part =~ UTF8_REGEX_MULTIBYTE
ACE_PREFIX + punycode_encode(part)
else
part
end
end
parts.join('.')
else
input
end
end
# Converts from an ASCII domain name to a Unicode internationalized
# domain name as described in RFC 3490.
def self.to_unicode(input)
input = input.to_s unless input.is_a?(String)
parts = input.split('.')
parts.map! do |part|
if part =~ /^#{ACE_PREFIX}(.+)/
begin
punycode_decode(part[/^#{ACE_PREFIX}(.+)/, 1])
rescue Addressable::IDNA::PunycodeBadInput
# toUnicode is explicitly defined as never-fails by the spec
part
end
else
part
end
end
output = parts.join('.')
if output.respond_to?(:force_encoding)
output.force_encoding(Encoding::UTF_8)
end
output
end
class << self
# @deprecated Use {String#unicode_normalize(:nfkc)} instead
def unicode_normalize_kc(value)
value.to_s.unicode_normalize(:nfkc)
end
extend Gem::Deprecate
deprecate :unicode_normalize_kc, "String#unicode_normalize(:nfkc)", 2023, 4
end
##
# Unicode aware downcase method.
#
# @api private
# @param [String] input
# The input string.
# @return [String] The downcased result.
def self.unicode_downcase(input)
input = input.to_s unless input.is_a?(String)
unpacked = input.unpack("U*")
unpacked.map! { |codepoint| lookup_unicode_lowercase(codepoint) }
return unpacked.pack("U*")
end
private_class_method :unicode_downcase
def self.lookup_unicode_lowercase(codepoint)
codepoint_data = UNICODE_DATA[codepoint]
(codepoint_data ?
(codepoint_data[UNICODE_DATA_LOWERCASE] || codepoint) :
codepoint)
end
private_class_method :lookup_unicode_lowercase
UNICODE_DATA_COMBINING_CLASS = 0
UNICODE_DATA_EXCLUSION = 1
UNICODE_DATA_CANONICAL = 2
UNICODE_DATA_COMPATIBILITY = 3
UNICODE_DATA_UPPERCASE = 4
UNICODE_DATA_LOWERCASE = 5
UNICODE_DATA_TITLECASE = 6
UNICODE_DATA = {
65 => [0, 0, nil, nil, nil, 97, nil],
66 => [0, 0, nil, nil, nil, 98, nil],
67 => [0, 0, nil, nil, nil, 99, nil],
68 => [0, 0, nil, nil, nil, 100, nil],
69 => [0, 0, nil, nil, nil, 101, nil],
70 => [0, 0, nil, nil, nil, 102, nil],
71 => [0, 0, nil, nil, nil, 103, nil],
72 => [0, 0, nil, nil, nil, 104, nil],
73 => [0, 0, nil, nil, nil, 105, nil],
74 => [0, 0, nil, nil, nil, 106, nil],
75 => [0, 0, nil, nil, nil, 107, nil],
76 => [0, 0, nil, nil, nil, 108, nil],
77 => [0, 0, nil, nil, nil, 109, nil],
78 => [0, 0, nil, nil, nil, 110, nil],
79 => [0, 0, nil, nil, nil, 111, nil],
80 => [0, 0, nil, nil, nil, 112, nil],
81 => [0, 0, nil, nil, nil, 113, nil],
82 => [0, 0, nil, nil, nil, 114, nil],
83 => [0, 0, nil, nil, nil, 115, nil],
84 => [0, 0, nil, nil, nil, 116, nil],
85 => [0, 0, nil, nil, nil, 117, nil],
86 => [0, 0, nil, nil, nil, 118, nil],
87 => [0, 0, nil, nil, nil, 119, nil],
88 => [0, 0, nil, nil, nil, 120, nil],
89 => [0, 0, nil, nil, nil, 121, nil],
90 => [0, 0, nil, nil, nil, 122, nil],
97 => [0, 0, nil, nil, 65, nil, 65],
98 => [0, 0, nil, nil, 66, nil, 66],
99 => [0, 0, nil, nil, 67, nil, 67],
100 => [0, 0, nil, nil, 68, nil, 68],
101 => [0, 0, nil, nil, 69, nil, 69],
102 => [0, 0, nil, nil, 70, nil, 70],
103 => [0, 0, nil, nil, 71, nil, 71],
104 => [0, 0, nil, nil, 72, nil, 72],
105 => [0, 0, nil, nil, 73, nil, 73],
106 => [0, 0, nil, nil, 74, nil, 74],
107 => [0, 0, nil, nil, 75, nil, 75],
108 => [0, 0, nil, nil, 76, nil, 76],
109 => [0, 0, nil, nil, 77, nil, 77],
110 => [0, 0, nil, nil, 78, nil, 78],
111 => [0, 0, nil, nil, 79, nil, 79],
112 => [0, 0, nil, nil, 80, nil, 80],
113 => [0, 0, nil, nil, 81, nil, 81],
114 => [0, 0, nil, nil, 82, nil, 82],
115 => [0, 0, nil, nil, 83, nil, 83],
116 => [0, 0, nil, nil, 84, nil, 84],
117 => [0, 0, nil, nil, 85, nil, 85],
118 => [0, 0, nil, nil, 86, nil, 86],
119 => [0, 0, nil, nil, 87, nil, 87],
120 => [0, 0, nil, nil, 88, nil, 88],
121 => [0, 0, nil, nil, 89, nil, 89],
122 => [0, 0, nil, nil, 90, nil, 90],
160 => [0, 0, nil, " ", nil, nil, nil],
168 => [0, 0, nil, " ̈", nil, nil, nil],
170 => [0, 0, nil, "a", nil, nil, nil],
175 => [0, 0, nil, " ̄", nil, nil, nil],
178 => [0, 0, nil, "2", nil, nil, nil],
179 => [0, 0, nil, "3", nil, nil, nil],
180 => [0, 0, nil, " ́", nil, nil, nil],
181 => [0, 0, nil, "μ", 924, nil, 924],
184 => [0, 0, nil, " ̧", nil, nil, nil],
185 => [0, 0, nil, "1", nil, nil, nil],
186 => [0, 0, nil, "o", nil, nil, nil],
188 => [0, 0, nil, "1⁄4", nil, nil, nil],
189 => [0, 0, nil, "1⁄2", nil, nil, nil],
190 => [0, 0, nil, "3⁄4", nil, nil, nil],
192 => [0, 0, "À", "À", nil, 224, nil],
193 => [0, 0, "Á", "Á", nil, 225, nil],
194 => [0, 0, "Â", "Â", nil, 226, nil],
195 => [0, 0, "Ã", "Ã", nil, 227, nil],
196 => [0, 0, "Ä", "Ä", nil, 228, nil],
197 => [0, 0, "Å", "Å", nil, 229, nil],
198 => [0, 0, nil, nil, nil, 230, nil],
199 => [0, 0, "Ç", "Ç", nil, 231, nil],
200 => [0, 0, "È", "È", nil, 232, nil],
201 => [0, 0, "É", "É", nil, 233, nil],
202 => [0, 0, "Ê", "Ê", nil, 234, nil],
203 => [0, 0, "Ë", "Ë", nil, 235, nil],
204 => [0, 0, "Ì", "Ì", nil, 236, nil],
205 => [0, 0, "Í", "Í", nil, 237, nil],
206 => [0, 0, "Î", "Î", nil, 238, nil],
207 => [0, 0, "Ï", "Ï", nil, 239, nil],
208 => [0, 0, nil, nil, nil, 240, nil],
209 => [0, 0, "Ñ", "Ñ", nil, 241, nil],
210 => [0, 0, "Ò", "Ò", nil, 242, nil],
211 => [0, 0, "Ó", "Ó", nil, 243, nil],
212 => [0, 0, "Ô", "Ô", nil, 244, nil],
213 => [0, 0, "Õ", "Õ", nil, 245, nil],
214 => [0, 0, "Ö", "Ö", nil, 246, nil],
216 => [0, 0, nil, nil, nil, 248, nil],
217 => [0, 0, "Ù", "Ù", nil, 249, nil],
218 => [0, 0, "Ú", "Ú", nil, 250, nil],
219 => [0, 0, "Û", "Û", nil, 251, nil],
220 => [0, 0, "Ü", "Ü", nil, 252, nil],
221 => [0, 0, "Ý", "Ý", nil, 253, nil],
222 => [0, 0, nil, nil, nil, 254, nil],
224 => [0, 0, "à", "à", 192, nil, 192],
225 => [0, 0, "á", "á", 193, nil, 193],
226 => [0, 0, "â", "â", 194, nil, 194],
227 => [0, 0, "ã", "ã", 195, nil, 195],
228 => [0, 0, "ä", "ä", 196, nil, 196],
229 => [0, 0, "å", "å", 197, nil, 197],
230 => [0, 0, nil, nil, 198, nil, 198],
231 => [0, 0, "ç", "ç", 199, nil, 199],
232 => [0, 0, "è", "è", 200, nil, 200],
233 => [0, 0, "é", "é", 201, nil, 201],
234 => [0, 0, "ê", "ê", 202, nil, 202],
235 => [0, 0, "ë", "ë", 203, nil, 203],
236 => [0, 0, "ì", "ì", 204, nil, 204],
237 => [0, 0, "í", "í", 205, nil, 205],
238 => [0, 0, "î", "î", 206, nil, 206],
239 => [0, 0, "ï", "ï", 207, nil, 207],
240 => [0, 0, nil, nil, 208, nil, 208],
241 => [0, 0, "ñ", "ñ", 209, nil, 209],
242 => [0, 0, "ò", "ò", 210, nil, 210],
243 => [0, 0, "ó", "ó", 211, nil, 211],
244 => [0, 0, "ô", "ô", 212, nil, 212],
245 => [0, 0, "õ", "õ", 213, nil, 213],
246 => [0, 0, "ö", "ö", 214, nil, 214],
248 => [0, 0, nil, nil, 216, nil, 216],
249 => [0, 0, "ù", "ù", 217, nil, 217],
250 => [0, 0, "ú", "ú", 218, nil, 218],
251 => [0, 0, "û", "û", 219, nil, 219],
252 => [0, 0, "ü", "ü", 220, nil, 220],
253 => [0, 0, "ý", "ý", 221, nil, 221],
254 => [0, 0, nil, nil, 222, nil, 222],
255 => [0, 0, "ÿ", "ÿ", 376, nil, 376],
256 => [0, 0, "Ā", "Ā", nil, 257, nil],
257 => [0, 0, "ā", "ā", 256, nil, 256],
258 => [0, 0, "Ă", "Ă", nil, 259, nil],
259 => [0, 0, "ă", "ă", 258, nil, 258],
260 => [0, 0, "Ą", "Ą", nil, 261, nil],
261 => [0, 0, "ą", "ą", 260, nil, 260],
262 => [0, 0, "Ć", "Ć", nil, 263, nil],
263 => [0, 0, "ć", "ć", 262, nil, 262],
264 => [0, 0, "Ĉ", "Ĉ", nil, 265, nil],
265 => [0, 0, "ĉ", "ĉ", 264, nil, 264],
266 => [0, 0, "Ċ", "Ċ", nil, 267, nil],
267 => [0, 0, "ċ", "ċ", 266, nil, 266],
268 => [0, 0, "Č", "Č", nil, 269, nil],
269 => [0, 0, "č", "č", 268, nil, 268],
270 => [0, 0, "Ď", "Ď", nil, 271, nil],
271 => [0, 0, "ď", "ď", 270, nil, 270],
272 => [0, 0, nil, nil, nil, 273, nil],
273 => [0, 0, nil, nil, 272, nil, 272],
274 => [0, 0, "Ē", "Ē", nil, 275, nil],
275 => [0, 0, "ē", "ē", 274, nil, 274],
276 => [0, 0, "Ĕ", "Ĕ", nil, 277, nil],
277 => [0, 0, "ĕ", "ĕ", 276, nil, 276],
278 => [0, 0, "Ė", "Ė", nil, 279, nil],
279 => [0, 0, "ė", "ė", 278, nil, 278],
280 => [0, 0, "Ę", "Ę", nil, 281, nil],
281 => [0, 0, "ę", "ę", 280, nil, 280],
282 => [0, 0, "Ě", "Ě", nil, 283, nil],
283 => [0, 0, "ě", "ě", 282, nil, 282],
284 => [0, 0, "Ĝ", "Ĝ", nil, 285, nil],
285 => [0, 0, "ĝ", "ĝ", 284, nil, 284],
286 => [0, 0, "Ğ", "Ğ", nil, 287, nil],
287 => [0, 0, "ğ", "ğ", 286, nil, 286],
288 => [0, 0, "Ġ", "Ġ", nil, 289, nil],
289 => [0, 0, "ġ", "ġ", 288, nil, 288],
290 => [0, 0, "Ģ", "Ģ", nil, 291, nil],
291 => [0, 0, "ģ", "ģ", 290, nil, 290],
292 => [0, 0, "Ĥ", "Ĥ", nil, 293, nil],
293 => [0, 0, "ĥ", "ĥ", 292, nil, 292],
294 => [0, 0, nil, nil, nil, 295, nil],
295 => [0, 0, nil, nil, 294, nil, 294],
296 => [0, 0, "Ĩ", "Ĩ", nil, 297, nil],
297 => [0, 0, "ĩ", "ĩ", 296, nil, 296],
298 => [0, 0, "Ī", "Ī", nil, 299, nil],
299 => [0, 0, "ī", "ī", 298, nil, 298],
300 => [0, 0, "Ĭ", "Ĭ", nil, 301, nil],
301 => [0, 0, "ĭ", "ĭ", 300, nil, 300],
302 => [0, 0, "Į", "Į", nil, 303, nil],
303 => [0, 0, "į", "į", 302, nil, 302],
304 => [0, 0, "İ", "İ", nil, 105, nil],
305 => [0, 0, nil, nil, 73, nil, 73],
306 => [0, 0, nil, "IJ", nil, 307, nil],
307 => [0, 0, nil, "ij", 306, nil, 306],
308 => [0, 0, "Ĵ", "Ĵ", nil, 309, nil],
309 => [0, 0, "ĵ", "ĵ", 308, nil, 308],
310 => [0, 0, "Ķ", "Ķ", nil, 311, nil],
311 => [0, 0, "ķ", "ķ", 310, nil, 310],
313 => [0, 0, "Ĺ", "Ĺ", nil, 314, nil],
314 => [0, 0, "ĺ", "ĺ", 313, nil, 313],
315 => [0, 0, "Ļ", "Ļ", nil, 316, nil],
316 => [0, 0, "ļ", "ļ", 315, nil, 315],
317 => [0, 0, "Ľ", "Ľ", nil, 318, nil],
318 => [0, 0, "ľ", "ľ", 317, nil, 317],
319 => [0, 0, nil, "L·", nil, 320, nil],
320 => [0, 0, nil, "l·", 319, nil, 319],
321 => [0, 0, nil, nil, nil, 322, nil],
322 => [0, 0, nil, nil, 321, nil, 321],
323 => [0, 0, "Ń", "Ń", nil, 324, nil],
324 => [0, 0, "ń", "ń", 323, nil, 323],
325 => [0, 0, "Ņ", "Ņ", nil, 326, nil],
326 => [0, 0, "ņ", "ņ", 325, nil, 325],
327 => [0, 0, "Ň", "Ň", nil, 328, nil],
328 => [0, 0, "ň", "ň", 327, nil, 327],
329 => [0, 0, nil, "ʼn", nil, nil, nil],
330 => [0, 0, nil, nil, nil, 331, nil],
331 => [0, 0, nil, nil, 330, nil, 330],
332 => [0, 0, "Ō", "Ō", nil, 333, nil],
333 => [0, 0, "ō", "ō", 332, nil, 332],
334 => [0, 0, "Ŏ", "Ŏ", nil, 335, nil],
335 => [0, 0, "ŏ", "ŏ", 334, nil, 334],
336 => [0, 0, "Ő", "Ő", nil, 337, nil],
337 => [0, 0, "ő", "ő", 336, nil, 336],
338 => [0, 0, nil, nil, nil, 339, nil],
339 => [0, 0, nil, nil, 338, nil, 338],
340 => [0, 0, "Ŕ", "Ŕ", nil, 341, nil],
341 => [0, 0, "ŕ", "ŕ", 340, nil, 340],
342 => [0, 0, "Ŗ", "Ŗ", nil, 343, nil],
343 => [0, 0, "ŗ", "ŗ", 342, nil, 342],
344 => [0, 0, "Ř", "Ř", nil, 345, nil],
345 => [0, 0, "ř", "ř", 344, nil, 344],
346 => [0, 0, "Ś", "Ś", nil, 347, nil],
347 => [0, 0, "ś", "ś", 346, nil, 346],
348 => [0, 0, "Ŝ", "Ŝ", nil, 349, nil],
349 => [0, 0, "ŝ", "ŝ", 348, nil, 348],
350 => [0, 0, "Ş", "Ş", nil, 351, nil],
351 => [0, 0, "ş", "ş", 350, nil, 350],
352 => [0, 0, "Š", "Š", nil, 353, nil],
353 => [0, 0, "š", "š", 352, nil, 352],
354 => [0, 0, "Ţ", "Ţ", nil, 355, nil],
355 => [0, 0, "ţ", "ţ", 354, nil, 354],
356 => [0, 0, "Ť", "Ť", nil, 357, nil],
357 => [0, 0, "ť", "ť", 356, nil, 356],
358 => [0, 0, nil, nil, nil, 359, nil],
359 => [0, 0, nil, nil, 358, nil, 358],
360 => [0, 0, "Ũ", "Ũ", nil, 361, nil],
361 => [0, 0, "ũ", "ũ", 360, nil, 360],
362 => [0, 0, "Ū", "Ū", nil, 363, nil],
363 => [0, 0, "ū", "ū", 362, nil, 362],
364 => [0, 0, "Ŭ", "Ŭ", nil, 365, nil],
365 => [0, 0, "ŭ", "ŭ", 364, nil, 364],
366 => [0, 0, "Ů", "Ů", nil, 367, nil],
367 => [0, 0, "ů", "ů", 366, nil, 366],
368 => [0, 0, "Ű", "Ű", nil, 369, nil],
369 => [0, 0, "ű", "ű", 368, nil, 368],
370 => [0, 0, "Ų", "Ų", nil, 371, nil],
371 => [0, 0, "ų", "ų", 370, nil, 370],
372 => [0, 0, "Ŵ", "Ŵ", nil, 373, nil],
373 => [0, 0, "ŵ", "ŵ", 372, nil, 372],
374 => [0, 0, "Ŷ", "Ŷ", nil, 375, nil],
375 => [0, 0, "ŷ", "ŷ", 374, nil, 374],
376 => [0, 0, "Ÿ", "Ÿ", nil, 255, nil],
377 => [0, 0, "Ź", "Ź", nil, 378, nil],
378 => [0, 0, "ź", "ź", 377, nil, 377],
379 => [0, 0, "Ż", "Ż", nil, 380, nil],
380 => [0, 0, "ż", "ż", 379, nil, 379],
381 => [0, 0, "Ž", "Ž", nil, 382, nil],
382 => [0, 0, "ž", "ž", 381, nil, 381],
383 => [0, 0, nil, "s", 83, nil, 83],
385 => [0, 0, nil, nil, nil, 595, nil],
386 => [0, 0, nil, nil, nil, 387, nil],
387 => [0, 0, nil, nil, 386, nil, 386],
388 => [0, 0, nil, nil, nil, 389, nil],
389 => [0, 0, nil, nil, 388, nil, 388],
390 => [0, 0, nil, nil, nil, 596, nil],
391 => [0, 0, nil, nil, nil, 392, nil],
392 => [0, 0, nil, nil, 391, nil, 391],
393 => [0, 0, nil, nil, nil, 598, nil],
394 => [0, 0, nil, nil, nil, 599, nil],
395 => [0, 0, nil, nil, nil, 396, nil],
396 => [0, 0, nil, nil, 395, nil, 395],
398 => [0, 0, nil, nil, nil, 477, nil],
399 => [0, 0, nil, nil, nil, 601, nil],
400 => [0, 0, nil, nil, nil, 603, nil],
401 => [0, 0, nil, nil, nil, 402, nil],
402 => [0, 0, nil, nil, 401, nil, 401],
403 => [0, 0, nil, nil, nil, 608, nil],
404 => [0, 0, nil, nil, nil, 611, nil],
405 => [0, 0, nil, nil, 502, nil, 502],
406 => [0, 0, nil, nil, nil, 617, nil],
407 => [0, 0, nil, nil, nil, 616, nil],
408 => [0, 0, nil, nil, nil, 409, nil],
409 => [0, 0, nil, nil, 408, nil, 408],
412 => [0, 0, nil, nil, nil, 623, nil],
413 => [0, 0, nil, nil, nil, 626, nil],
415 => [0, 0, nil, nil, nil, 629, nil],
416 => [0, 0, "Ơ", "Ơ", nil, 417, nil],
417 => [0, 0, "ơ", "ơ", 416, nil, 416],
418 => [0, 0, nil, nil, nil, 419, nil],
419 => [0, 0, nil, nil, 418, nil, 418],
420 => [0, 0, nil, nil, nil, 421, nil],
421 => [0, 0, nil, nil, 420, nil, 420],
422 => [0, 0, nil, nil, nil, 640, nil],
423 => [0, 0, nil, nil, nil, 424, nil],
424 => [0, 0, nil, nil, 423, nil, 423],
425 => [0, 0, nil, nil, nil, 643, nil],
428 => [0, 0, nil, nil, nil, 429, nil],
429 => [0, 0, nil, nil, 428, nil, 428],
430 => [0, 0, nil, nil, nil, 648, nil],
431 => [0, 0, "Ư", "Ư", nil, 432, nil],
432 => [0, 0, "ư", "ư", 431, nil, 431],
433 => [0, 0, nil, nil, nil, 650, nil],
434 => [0, 0, nil, nil, nil, 651, nil],
435 => [0, 0, nil, nil, nil, 436, nil],
436 => [0, 0, nil, nil, 435, nil, 435],
437 => [0, 0, nil, nil, nil, 438, nil],
438 => [0, 0, nil, nil, 437, nil, 437],
439 => [0, 0, nil, nil, nil, 658, nil],
440 => [0, 0, nil, nil, nil, 441, nil],
441 => [0, 0, nil, nil, 440, nil, 440],
444 => [0, 0, nil, nil, nil, 445, nil],
445 => [0, 0, nil, nil, 444, nil, 444],
447 => [0, 0, nil, nil, 503, nil, 503],
452 => [0, 0, nil, "DŽ", nil, 454, 453],
453 => [0, 0, nil, "Dž", 452, 454, nil],
454 => [0, 0, nil, "dž", 452, nil, 453],
455 => [0, 0, nil, "LJ", nil, 457, 456],
456 => [0, 0, nil, "Lj", 455, 457, nil],
457 => [0, 0, nil, "lj", 455, nil, 456],
458 => [0, 0, nil, "NJ", nil, 460, 459],
459 => [0, 0, nil, "Nj", 458, 460, nil],
460 => [0, 0, nil, "nj", 458, nil, 459],
461 => [0, 0, "Ǎ", "Ǎ", nil, 462, nil],
462 => [0, 0, "ǎ", "ǎ", 461, nil, 461],
463 => [0, 0, "Ǐ", "Ǐ", nil, 464, nil],
464 => [0, 0, "ǐ", "ǐ", 463, nil, 463],
465 => [0, 0, "Ǒ", "Ǒ", nil, 466, nil],
466 => [0, 0, "ǒ", "ǒ", 465, nil, 465],
467 => [0, 0, "Ǔ", "Ǔ", nil, 468, nil],
468 => [0, 0, "ǔ", "ǔ", 467, nil, 467],
469 => [0, 0, "Ǖ", "Ǖ", nil, 470, nil],
470 => [0, 0, "ǖ", "ǖ", 469, nil, 469],
471 => [0, 0, "Ǘ", "Ǘ", nil, 472, nil],
472 => [0, 0, "ǘ", "ǘ", 471, nil, 471],
473 => [0, 0, "Ǚ", "Ǚ", nil, 474, nil],
474 => [0, 0, "ǚ", "ǚ", 473, nil, 473],
475 => [0, 0, "Ǜ", "Ǜ", nil, 476, nil],
476 => [0, 0, "ǜ", "ǜ", 475, nil, 475],
477 => [0, 0, nil, nil, 398, nil, 398],
478 => [0, 0, "Ǟ", "Ǟ", nil, 479, nil],
479 => [0, 0, "ǟ", "ǟ", 478, nil, 478],
480 => [0, 0, "Ǡ", "Ǡ", nil, 481, nil],
481 => [0, 0, "ǡ", "ǡ", 480, nil, 480],
482 => [0, 0, "Ǣ", "Ǣ", nil, 483, nil],
483 => [0, 0, "ǣ", "ǣ", 482, nil, 482],
484 => [0, 0, nil, nil, nil, 485, nil],
485 => [0, 0, nil, nil, 484, nil, 484],
486 => [0, 0, "Ǧ", "Ǧ", nil, 487, nil],
487 => [0, 0, "ǧ", "ǧ", 486, nil, 486],
488 => [0, 0, "Ǩ", "Ǩ", nil, 489, nil],
489 => [0, 0, "ǩ", "ǩ", 488, nil, 488],
490 => [0, 0, "Ǫ", "Ǫ", nil, 491, nil],
491 => [0, 0, "ǫ", "ǫ", 490, nil, 490],
492 => [0, 0, "Ǭ", "Ǭ", nil, 493, nil],
493 => [0, 0, "ǭ", "ǭ", 492, nil, 492],
494 => [0, 0, "Ǯ", "Ǯ", nil, 495, nil],
495 => [0, 0, "ǯ", "ǯ", 494, nil, 494],
496 => [0, 0, "ǰ", "ǰ", nil, nil, nil],
497 => [0, 0, nil, "DZ", nil, 499, 498],
498 => [0, 0, nil, "Dz", 497, 499, nil],
499 => [0, 0, nil, "dz", 497, nil, 498],
500 => [0, 0, "Ǵ", "Ǵ", nil, 501, nil],
501 => [0, 0, "ǵ", "ǵ", 500, nil, 500],
502 => [0, 0, nil, nil, nil, 405, nil],
503 => [0, 0, nil, nil, nil, 447, nil],
504 => [0, 0, "Ǹ", "Ǹ", nil, 505, nil],
505 => [0, 0, "ǹ", "ǹ", 504, nil, 504],
506 => [0, 0, "Ǻ", "Ǻ", nil, 507, nil],
507 => [0, 0, "ǻ", "ǻ", 506, nil, 506],
508 => [0, 0, "Ǽ", "Ǽ", nil, 509, nil],
509 => [0, 0, "ǽ", "ǽ", 508, nil, 508],
510 => [0, 0, "Ǿ", "Ǿ", nil, 511, nil],
511 => [0, 0, "ǿ", "ǿ", 510, nil, 510],
512 => [0, 0, "Ȁ", "Ȁ", nil, 513, nil],
513 => [0, 0, "ȁ", "ȁ", 512, nil, 512],
514 => [0, 0, "Ȃ", "Ȃ", nil, 515, nil],
515 => [0, 0, "ȃ", "ȃ", 514, nil, 514],
516 => [0, 0, "Ȅ", "Ȅ", nil, 517, nil],
517 => [0, 0, "ȅ", "ȅ", 516, nil, 516],
518 => [0, 0, "Ȇ", "Ȇ", nil, 519, nil],
519 => [0, 0, "ȇ", "ȇ", 518, nil, 518],
520 => [0, 0, "Ȉ", "Ȉ", nil, 521, nil],
521 => [0, 0, "ȉ", "ȉ", 520, nil, 520],
522 => [0, 0, "Ȋ", "Ȋ", nil, 523, nil],
523 => [0, 0, "ȋ", "ȋ", 522, nil, 522],
524 => [0, 0, "Ȍ", "Ȍ", nil, 525, nil],
525 => [0, 0, "ȍ", "ȍ", 524, nil, 524],
526 => [0, 0, "Ȏ", "Ȏ", nil, 527, nil],
527 => [0, 0, "ȏ", "ȏ", 526, nil, 526],
528 => [0, 0, "Ȑ", "Ȑ", nil, 529, nil],
529 => [0, 0, "ȑ", "ȑ", 528, nil, 528],
530 => [0, 0, "Ȓ", "Ȓ", nil, 531, nil],
531 => [0, 0, "ȓ", "ȓ", 530, nil, 530],
532 => [0, 0, "Ȕ", "Ȕ", nil, 533, nil],
533 => [0, 0, "ȕ", "ȕ", 532, nil, 532],
534 => [0, 0, "Ȗ", "Ȗ", nil, 535, nil],
535 => [0, 0, "ȗ", "ȗ", 534, nil, 534],
536 => [0, 0, "Ș", "Ș", nil, 537, nil],
537 => [0, 0, "ș", "ș", 536, nil, 536],
538 => [0, 0, "Ț", "Ț", nil, 539, nil],
539 => [0, 0, "ț", "ț", 538, nil, 538],
540 => [0, 0, nil, nil, nil, 541, nil],
541 => [0, 0, nil, nil, 540, nil, 540],
542 => [0, 0, "Ȟ", "Ȟ", nil, 543, nil],
543 => [0, 0, "ȟ", "ȟ", 542, nil, 542],
546 => [0, 0, nil, nil, nil, 547, nil],
547 => [0, 0, nil, nil, 546, nil, 546],
548 => [0, 0, nil, nil, nil, 549, nil],
549 => [0, 0, nil, nil, 548, nil, 548],
550 => [0, 0, "Ȧ", "Ȧ", nil, 551, nil],
551 => [0, 0, "ȧ", "ȧ", 550, nil, 550],
552 => [0, 0, "Ȩ", "Ȩ", nil, 553, nil],
553 => [0, 0, "ȩ", "ȩ", 552, nil, 552],
554 => [0, 0, "Ȫ", "Ȫ", nil, 555, nil],
555 => [0, 0, "ȫ", "ȫ", 554, nil, 554],
556 => [0, 0, "Ȭ", "Ȭ", nil, 557, nil],
557 => [0, 0, "ȭ", "ȭ", 556, nil, 556],
558 => [0, 0, "Ȯ", "Ȯ", nil, 559, nil],
559 => [0, 0, "ȯ", "ȯ", 558, nil, 558],
560 => [0, 0, "Ȱ", "Ȱ", nil, 561, nil],
561 => [0, 0, "ȱ", "ȱ", 560, nil, 560],
562 => [0, 0, "Ȳ", "Ȳ", nil, 563, nil],
563 => [0, 0, "ȳ", "ȳ", 562, nil, 562],
595 => [0, 0, nil, nil, 385, nil, 385],
596 => [0, 0, nil, nil, 390, nil, 390],
598 => [0, 0, nil, nil, 393, nil, 393],
599 => [0, 0, nil, nil, 394, nil, 394],
601 => [0, 0, nil, nil, 399, nil, 399],
603 => [0, 0, nil, nil, 400, nil, 400],
608 => [0, 0, nil, nil, 403, nil, 403],
611 => [0, 0, nil, nil, 404, nil, 404],
616 => [0, 0, nil, nil, 407, nil, 407],
617 => [0, 0, nil, nil, 406, nil, 406],
623 => [0, 0, nil, nil, 412, nil, 412],
626 => [0, 0, nil, nil, 413, nil, 413],
629 => [0, 0, nil, nil, 415, nil, 415],
640 => [0, 0, nil, nil, 422, nil, 422],
643 => [0, 0, nil, nil, 425, nil, 425],
648 => [0, 0, nil, nil, 430, nil, 430],
650 => [0, 0, nil, nil, 433, nil, 433],
651 => [0, 0, nil, nil, 434, nil, 434],
658 => [0, 0, nil, nil, 439, nil, 439],
688 => [0, 0, nil, "h", nil, nil, nil],
689 => [0, 0, nil, "ɦ", nil, nil, nil],
690 => [0, 0, nil, "j", nil, nil, nil],
691 => [0, 0, nil, "r", nil, nil, nil],
692 => [0, 0, nil, "ɹ", nil, nil, nil],
693 => [0, 0, nil, "ɻ", nil, nil, nil],
694 => [0, 0, nil, "ʁ", nil, nil, nil],
695 => [0, 0, nil, "w", nil, nil, nil],
696 => [0, 0, nil, "y", nil, nil, nil],
728 => [0, 0, nil, " ̆", nil, nil, nil],
729 => [0, 0, nil, " ̇", nil, nil, nil],
730 => [0, 0, nil, " ̊", nil, nil, nil],
731 => [0, 0, nil, " ̨", nil, nil, nil],
732 => [0, 0, nil, " ̃", nil, nil, nil],
733 => [0, 0, nil, " ̋", nil, nil, nil],
736 => [0, 0, nil, "ɣ", nil, nil, nil],
737 => [0, 0, nil, "l", nil, nil, nil],
738 => [0, 0, nil, "s", nil, nil, nil],
739 => [0, 0, nil, "x", nil, nil, nil],
740 => [0, 0, nil, "ʕ", nil, nil, nil],
768 => [230, 0, nil, nil, nil, nil, nil],
769 => [230, 0, nil, nil, nil, nil, nil],
770 => [230, 0, nil, nil, nil, nil, nil],
771 => [230, 0, nil, nil, nil, nil, nil],
772 => [230, 0, nil, nil, nil, nil, nil],
773 => [230, 0, nil, nil, nil, nil, nil],
774 => [230, 0, nil, nil, nil, nil, nil],
775 => [230, 0, nil, nil, nil, nil, nil],
776 => [230, 0, nil, nil, nil, nil, nil],
777 => [230, 0, nil, nil, nil, nil, nil],
778 => [230, 0, nil, nil, nil, nil, nil],
779 => [230, 0, nil, nil, nil, nil, nil],
780 => [230, 0, nil, nil, nil, nil, nil],
781 => [230, 0, nil, nil, nil, nil, nil],
782 => [230, 0, nil, nil, nil, nil, nil],
783 => [230, 0, nil, nil, nil, nil, nil],
784 => [230, 0, nil, nil, nil, nil, nil],
785 => [230, 0, nil, nil, nil, nil, nil],
786 => [230, 0, nil, nil, nil, nil, nil],
787 => [230, 0, nil, nil, nil, nil, nil],
788 => [230, 0, nil, nil, nil, nil, nil],
789 => [232, 0, nil, nil, nil, nil, nil],
790 => [220, 0, nil, nil, nil, nil, nil],
791 => [220, 0, nil, nil, nil, nil, nil],
792 => [220, 0, nil, nil, nil, nil, nil],
793 => [220, 0, nil, nil, nil, nil, nil],
794 => [232, 0, nil, nil, nil, nil, nil],
795 => [216, 0, nil, nil, nil, nil, nil],
796 => [220, 0, nil, nil, nil, nil, nil],
797 => [220, 0, nil, nil, nil, nil, nil],
798 => [220, 0, nil, nil, nil, nil, nil],
799 => [220, 0, nil, nil, nil, nil, nil],
800 => [220, 0, nil, nil, nil, nil, nil],
801 => [202, 0, nil, nil, nil, nil, nil],
802 => [202, 0, nil, nil, nil, nil, nil],
803 => [220, 0, nil, nil, nil, nil, nil],
804 => [220, 0, nil, nil, nil, nil, nil],
805 => [220, 0, nil, nil, nil, nil, nil],
806 => [220, 0, nil, nil, nil, nil, nil],
807 => [202, 0, nil, nil, nil, nil, nil],
808 => [202, 0, nil, nil, nil, nil, nil],
809 => [220, 0, nil, nil, nil, nil, nil],
810 => [220, 0, nil, nil, nil, nil, nil],
811 => [220, 0, nil, nil, nil, nil, nil],
812 => [220, 0, nil, nil, nil, nil, nil],
813 => [220, 0, nil, nil, nil, nil, nil],
814 => [220, 0, nil, nil, nil, nil, nil],
815 => [220, 0, nil, nil, nil, nil, nil],
816 => [220, 0, nil, nil, nil, nil, nil],
817 => [220, 0, nil, nil, nil, nil, nil],
818 => [220, 0, nil, nil, nil, nil, nil],
819 => [220, 0, nil, nil, nil, nil, nil],
820 => [1, 0, nil, nil, nil, nil, nil],
821 => [1, 0, nil, nil, nil, nil, nil],
822 => [1, 0, nil, nil, nil, nil, nil],
823 => [1, 0, nil, nil, nil, nil, nil],
824 => [1, 0, nil, nil, nil, nil, nil],
825 => [220, 0, nil, nil, nil, nil, nil],
826 => [220, 0, nil, nil, nil, nil, nil],
827 => [220, 0, nil, nil, nil, nil, nil],
828 => [220, 0, nil, nil, nil, nil, nil],
829 => [230, 0, nil, nil, nil, nil, nil],
830 => [230, 0, nil, nil, nil, nil, nil],
831 => [230, 0, nil, nil, nil, nil, nil],
832 => [230, 2, "̀", "̀", nil, nil, nil],
833 => [230, 2, "́", "́", nil, nil, nil],
834 => [230, 0, nil, nil, nil, nil, nil],
835 => [230, 2, "̓", "̓", nil, nil, nil],
836 => [230, 3, "̈́", "̈́", nil, nil, nil],
837 => [240, 0, nil, nil, 921, nil, 921],
838 => [230, 0, nil, nil, nil, nil, nil],
839 => [220, 0, nil, nil, nil, nil, nil],
840 => [220, 0, nil, nil, nil, nil, nil],
841 => [220, 0, nil, nil, nil, nil, nil],
842 => [230, 0, nil, nil, nil, nil, nil],
843 => [230, 0, nil, nil, nil, nil, nil],
844 => [230, 0, nil, nil, nil, nil, nil],
845 => [220, 0, nil, nil, nil, nil, nil],
846 => [220, 0, nil, nil, nil, nil, nil],
864 => [234, 0, nil, nil, nil, nil, nil],
865 => [234, 0, nil, nil, nil, nil, nil],
866 => [233, 0, nil, nil, nil, nil, nil],
884 => [0, 2, "ʹ", "ʹ", nil, nil, nil],
890 => [0, 0, nil, " ͅ", nil, nil, nil],
894 => [0, 2, ";", ";", nil, nil, nil],
900 => [0, 0, nil, " ́", nil, nil, nil],
901 => [0, 0, "΅", "΅", nil, nil, nil],
902 => [0, 0, "Ά", "Ά", nil, 940, nil],
903 => [0, 2, "·", "·", nil, nil, nil],
904 => [0, 0, "Έ", "Έ", nil, 941, nil],
905 => [0, 0, "Ή", "Ή", nil, 942, nil],
906 => [0, 0, "Ί", "Ί", nil, 943, nil],
908 => [0, 0, "Ό", "Ό", nil, 972, nil],
910 => [0, 0, "Ύ", "Ύ", nil, 973, nil],
911 => [0, 0, "Ώ", "Ώ", nil, 974, nil],
912 => [0, 0, "ΐ", "ΐ", nil, nil, nil],
913 => [0, 0, nil, nil, nil, 945, nil],
914 => [0, 0, nil, nil, nil, 946, nil],
915 => [0, 0, nil, nil, nil, 947, nil],
916 => [0, 0, nil, nil, nil, 948, nil],
917 => [0, 0, nil, nil, nil, 949, nil],
918 => [0, 0, nil, nil, nil, 950, nil],
919 => [0, 0, nil, nil, nil, 951, nil],
920 => [0, 0, nil, nil, nil, 952, nil],
921 => [0, 0, nil, nil, nil, 953, nil],
922 => [0, 0, nil, nil, nil, 954, nil],
923 => [0, 0, nil, nil, nil, 955, nil],
924 => [0, 0, nil, nil, nil, 956, nil],
925 => [0, 0, nil, nil, nil, 957, nil],
926 => [0, 0, nil, nil, nil, 958, nil],
927 => [0, 0, nil, nil, nil, 959, nil],
928 => [0, 0, nil, nil, nil, 960, nil],
929 => [0, 0, nil, nil, nil, 961, nil],
931 => [0, 0, nil, nil, nil, 963, nil],
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | true |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/addressable-2.8.8/lib/addressable/idna/native.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/addressable-2.8.8/lib/addressable/idna/native.rb | # frozen_string_literal: true
#--
# Copyright (C) Bob Aman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#++
require "idn"
module Addressable
module IDNA
def self.punycode_encode(value)
IDN::Punycode.encode(value.to_s)
end
def self.punycode_decode(value)
IDN::Punycode.decode(value.to_s)
end
class << self
# @deprecated Use {String#unicode_normalize(:nfkc)} instead
def unicode_normalize_kc(value)
value.to_s.unicode_normalize(:nfkc)
end
extend Gem::Deprecate
deprecate :unicode_normalize_kc, "String#unicode_normalize(:nfkc)", 2023, 4
end
def self.to_ascii(value)
value.to_s.split('.', -1).map do |segment|
if segment.size > 0 && segment.size < 64
IDN::Idna.toASCII(segment, IDN::Idna::ALLOW_UNASSIGNED)
elsif segment.size >= 64
segment
else
''
end
end.join('.')
end
def self.to_unicode(value)
value.to_s.split('.', -1).map do |segment|
if segment.size > 0 && segment.size < 64
IDN::Idna.toUnicode(segment, IDN::Idna::ALLOW_UNASSIGNED)
elsif segment.size >= 64
segment
else
''
end
end.join('.')
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent-ruby.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent-ruby.rb | # This file is here so that there is a file with the same name as the gem that
# can be required by Bundler.require. Applications should normally
# require 'concurrent'.
require_relative "concurrent"
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent.rb | require 'concurrent/version'
require 'concurrent/constants'
require 'concurrent/errors'
require 'concurrent/configuration'
require 'concurrent/atomics'
require 'concurrent/executors'
require 'concurrent/synchronization'
require 'concurrent/atomic/atomic_markable_reference'
require 'concurrent/atomic/atomic_reference'
require 'concurrent/agent'
require 'concurrent/atom'
require 'concurrent/array'
require 'concurrent/hash'
require 'concurrent/set'
require 'concurrent/map'
require 'concurrent/tuple'
require 'concurrent/async'
require 'concurrent/dataflow'
require 'concurrent/delay'
require 'concurrent/exchanger'
require 'concurrent/future'
require 'concurrent/immutable_struct'
require 'concurrent/ivar'
require 'concurrent/maybe'
require 'concurrent/mutable_struct'
require 'concurrent/mvar'
require 'concurrent/promise'
require 'concurrent/scheduled_task'
require 'concurrent/settable_struct'
require 'concurrent/timer_task'
require 'concurrent/tvar'
require 'concurrent/promises'
require 'concurrent/thread_safe/synchronized_delegator'
require 'concurrent/thread_safe/util'
require 'concurrent/options'
# @!macro internal_implementation_note
#
# @note **Private Implementation:** This abstraction is a private, internal
# implementation detail. It should never be used directly.
# @!macro monotonic_clock_warning
#
# @note Time calculations on all platforms and languages are sensitive to
# changes to the system clock. To alleviate the potential problems
# associated with changing the system clock while an application is running,
# most modern operating systems provide a monotonic clock that operates
# independently of the system clock. A monotonic clock cannot be used to
# determine human-friendly clock times. A monotonic clock is used exclusively
# for calculating time intervals. Not all Ruby platforms provide access to an
# operating system monotonic clock. On these platforms a pure-Ruby monotonic
# clock will be used as a fallback. An operating system monotonic clock is both
# faster and more reliable than the pure-Ruby implementation. The pure-Ruby
# implementation should be fast and reliable enough for most non-realtime
# operations. At this time the common Ruby platforms that provide access to an
# operating system monotonic clock are MRI 2.1 and above and JRuby (all versions).
#
# @see http://linux.die.net/man/3/clock_gettime Linux clock_gettime(3)
# @!macro copy_options
#
# ## Copy Options
#
# Object references in Ruby are mutable. This can lead to serious
# problems when the {#value} of an object is a mutable reference. Which
# is always the case unless the value is a `Fixnum`, `Symbol`, or similar
# "primitive" data type. Each instance can be configured with a few
# options that can help protect the program from potentially dangerous
# operations. Each of these options can be optionally set when the object
# instance is created:
#
# * `:dup_on_deref` When true the object will call the `#dup` method on
# the `value` object every time the `#value` method is called
# (default: false)
# * `:freeze_on_deref` When true the object will call the `#freeze`
# method on the `value` object every time the `#value` method is called
# (default: false)
# * `:copy_on_deref` When given a `Proc` object the `Proc` will be run
# every time the `#value` method is called. The `Proc` will be given
# the current `value` as its only argument and the result returned by
# the block will be the return value of the `#value` call. When `nil`
# this option will be ignored (default: nil)
#
# When multiple deref options are set the order of operations is strictly defined.
# The order of deref operations is:
# * `:copy_on_deref`
# * `:dup_on_deref`
# * `:freeze_on_deref`
#
# Because of this ordering there is no need to `#freeze` an object created by a
# provided `:copy_on_deref` block. Simply set `:freeze_on_deref` to `true`.
# Setting both `:dup_on_deref` to `true` and `:freeze_on_deref` to `true` is
# as close to the behavior of a "pure" functional language (like Erlang, Clojure,
# or Haskell) as we are likely to get in Ruby.
# @!macro deref_options
#
# @option opts [Boolean] :dup_on_deref (false) Call `#dup` before
# returning the data from {#value}
# @option opts [Boolean] :freeze_on_deref (false) Call `#freeze` before
# returning the data from {#value}
# @option opts [Proc] :copy_on_deref (nil) When calling the {#value}
# method, call the given proc passing the internal value as the sole
# argument then return the new value returned from the proc.
# @!macro executor_and_deref_options
#
# @param [Hash] opts the options used to define the behavior at update and deref
# and to specify the executor on which to perform actions
# @option opts [Executor] :executor when set use the given `Executor` instance.
# Three special values are also supported: `:io` returns the global pool for
# long, blocking (IO) tasks, `:fast` returns the global pool for short, fast
# operations, and `:immediate` returns the global `ImmediateExecutor` object.
# @!macro deref_options
# @!macro warn.edge
# @api Edge
# @note **Edge Features** are under active development and may change frequently.
#
# - Deprecations are not added before incompatible changes.
# - Edge version: _major_ is always 0, _minor_ bump means incompatible change,
# _patch_ bump means compatible change.
# - Edge features may also lack tests and documentation.
# - Features developed in `concurrent-ruby-edge` are expected to move
# to `concurrent-ruby` when finalised.
# {include:file:README.md}
module Concurrent
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/agent.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/agent.rb | require 'concurrent/configuration'
require 'concurrent/atomic/atomic_reference'
require 'concurrent/atomic/count_down_latch'
require 'concurrent/atomic/thread_local_var'
require 'concurrent/collection/copy_on_write_observer_set'
require 'concurrent/concern/observable'
require 'concurrent/synchronization/lockable_object'
module Concurrent
# `Agent` is inspired by Clojure's [agent](http://clojure.org/agents)
# function. An agent is a shared, mutable variable providing independent,
# uncoordinated, *asynchronous* change of individual values. Best used when
# the value will undergo frequent, complex updates. Suitable when the result
# of an update does not need to be known immediately. `Agent` is (mostly)
# functionally equivalent to Clojure's agent, except where the runtime
# prevents parity.
#
# Agents are reactive, not autonomous - there is no imperative message loop
# and no blocking receive. The state of an Agent should be itself immutable
# and the `#value` of an Agent is always immediately available for reading by
# any thread without any messages, i.e. observation does not require
# cooperation or coordination.
#
# Agent action dispatches are made using the various `#send` methods. These
# methods always return immediately. At some point later, in another thread,
# the following will happen:
#
# 1. The given `action` will be applied to the state of the Agent and the
# `args`, if any were supplied.
# 2. The return value of `action` will be passed to the validator lambda,
# if one has been set on the Agent.
# 3. If the validator succeeds or if no validator was given, the return value
# of the given `action` will become the new `#value` of the Agent. See
# `#initialize` for details.
# 4. If any observers were added to the Agent, they will be notified. See
# `#add_observer` for details.
# 5. If during the `action` execution any other dispatches are made (directly
# or indirectly), they will be held until after the `#value` of the Agent
# has been changed.
#
# If any exceptions are thrown by an action function, no nested dispatches
# will occur, and the exception will be cached in the Agent itself. When an
# Agent has errors cached, any subsequent interactions will immediately throw
# an exception, until the agent's errors are cleared. Agent errors can be
# examined with `#error` and the agent restarted with `#restart`.
#
# The actions of all Agents get interleaved amongst threads in a thread pool.
# At any point in time, at most one action for each Agent is being executed.
# Actions dispatched to an agent from another single agent or thread will
# occur in the order they were sent, potentially interleaved with actions
# dispatched to the same agent from other sources. The `#send` method should
# be used for actions that are CPU limited, while the `#send_off` method is
# appropriate for actions that may block on IO.
#
# Unlike in Clojure, `Agent` cannot participate in `Concurrent::TVar` transactions.
#
# ## Example
#
# ```
# def next_fibonacci(set = nil)
# return [0, 1] if set.nil?
# set + [set[-2..-1].reduce{|sum,x| sum + x }]
# end
#
# # create an agent with an initial value
# agent = Concurrent::Agent.new(next_fibonacci)
#
# # send a few update requests
# 5.times do
# agent.send{|set| next_fibonacci(set) }
# end
#
# # wait for them to complete
# agent.await
#
# # get the current value
# agent.value #=> [0, 1, 1, 2, 3, 5, 8]
# ```
#
# ## Observation
#
# Agents support observers through the {Concurrent::Observable} mixin module.
# Notification of observers occurs every time an action dispatch returns and
# the new value is successfully validated. Observation will *not* occur if the
# action raises an exception, if validation fails, or when a {#restart} occurs.
#
# When notified the observer will receive three arguments: `time`, `old_value`,
# and `new_value`. The `time` argument is the time at which the value change
# occurred. The `old_value` is the value of the Agent when the action began
# processing. The `new_value` is the value to which the Agent was set when the
# action completed. Note that `old_value` and `new_value` may be the same.
# This is not an error. It simply means that the action returned the same
# value.
#
# ## Nested Actions
#
# It is possible for an Agent action to post further actions back to itself.
# The nested actions will be enqueued normally then processed *after* the
# outer action completes, in the order they were sent, possibly interleaved
# with action dispatches from other threads. Nested actions never deadlock
# with one another and a failure in a nested action will never affect the
# outer action.
#
# Nested actions can be called using the Agent reference from the enclosing
# scope or by passing the reference in as a "send" argument. Nested actions
# cannot be post using `self` from within the action block/proc/lambda; `self`
# in this context will not reference the Agent. The preferred method for
# dispatching nested actions is to pass the Agent as an argument. This allows
# Ruby to more effectively manage the closing scope.
#
# Prefer this:
#
# ```
# agent = Concurrent::Agent.new(0)
# agent.send(agent) do |value, this|
# this.send {|v| v + 42 }
# 3.14
# end
# agent.value #=> 45.14
# ```
#
# Over this:
#
# ```
# agent = Concurrent::Agent.new(0)
# agent.send do |value|
# agent.send {|v| v + 42 }
# 3.14
# end
# ```
#
# @!macro agent_await_warning
#
# **NOTE** Never, *under any circumstances*, call any of the "await" methods
# ({#await}, {#await_for}, {#await_for!}, and {#wait}) from within an action
# block/proc/lambda. The call will block the Agent and will always fail.
# Calling either {#await} or {#wait} (with a timeout of `nil`) will
# hopelessly deadlock the Agent with no possibility of recovery.
#
# @!macro thread_safe_variable_comparison
#
# @see http://clojure.org/Agents Clojure Agents
# @see http://clojure.org/state Values and Change - Clojure's approach to Identity and State
class Agent < Synchronization::LockableObject
include Concern::Observable
ERROR_MODES = [:continue, :fail].freeze
private_constant :ERROR_MODES
AWAIT_FLAG = ::Object.new
private_constant :AWAIT_FLAG
AWAIT_ACTION = ->(value, latch) { latch.count_down; AWAIT_FLAG }
private_constant :AWAIT_ACTION
DEFAULT_ERROR_HANDLER = ->(agent, error) { nil }
private_constant :DEFAULT_ERROR_HANDLER
DEFAULT_VALIDATOR = ->(value) { true }
private_constant :DEFAULT_VALIDATOR
Job = Struct.new(:action, :args, :executor, :caller)
private_constant :Job
# Raised during action processing or any other time in an Agent's lifecycle.
class Error < StandardError
def initialize(message = nil)
message ||= 'agent must be restarted before jobs can post'
super(message)
end
end
# Raised when a new value obtained during action processing or at `#restart`
# fails validation.
class ValidationError < Error
def initialize(message = nil)
message ||= 'invalid value'
super(message)
end
end
# The error mode this Agent is operating in. See {#initialize} for details.
attr_reader :error_mode
# Create a new `Agent` with the given initial value and options.
#
# The `:validator` option must be `nil` or a side-effect free proc/lambda
# which takes one argument. On any intended value change the validator, if
# provided, will be called. If the new value is invalid the validator should
# return `false` or raise an error.
#
# The `:error_handler` option must be `nil` or a proc/lambda which takes two
# arguments. When an action raises an error or validation fails, either by
# returning false or raising an error, the error handler will be called. The
# arguments to the error handler will be a reference to the agent itself and
# the error object which was raised.
#
# The `:error_mode` may be either `:continue` (the default if an error
# handler is given) or `:fail` (the default if error handler nil or not
# given).
#
# If an action being run by the agent throws an error or doesn't pass
# validation the error handler, if present, will be called. After the
# handler executes if the error mode is `:continue` the Agent will continue
# as if neither the action that caused the error nor the error itself ever
# happened.
#
# If the mode is `:fail` the Agent will become {#failed?} and will stop
# accepting new action dispatches. Any previously queued actions will be
# held until {#restart} is called. The {#value} method will still work,
# returning the value of the Agent before the error.
#
# @param [Object] initial the initial value
# @param [Hash] opts the configuration options
#
# @option opts [Symbol] :error_mode either `:continue` or `:fail`
# @option opts [nil, Proc] :error_handler the (optional) error handler
# @option opts [nil, Proc] :validator the (optional) validation procedure
def initialize(initial, opts = {})
super()
synchronize { ns_initialize(initial, opts) }
end
# The current value (state) of the Agent, irrespective of any pending or
# in-progress actions. The value is always available and is non-blocking.
#
# @return [Object] the current value
def value
@current.value # TODO (pitr 12-Sep-2015): broken unsafe read?
end
alias_method :deref, :value
# When {#failed?} and {#error_mode} is `:fail`, returns the error object
# which caused the failure, else `nil`. When {#error_mode} is `:continue`
# will *always* return `nil`.
#
# @return [nil, Error] the error which caused the failure when {#failed?}
def error
@error.value
end
alias_method :reason, :error
# @!macro agent_send
#
# Dispatches an action to the Agent and returns immediately. Subsequently,
# in a thread from a thread pool, the {#value} will be set to the return
# value of the action. Action dispatches are only allowed when the Agent
# is not {#failed?}.
#
# The action must be a block/proc/lambda which takes 1 or more arguments.
# The first argument is the current {#value} of the Agent. Any arguments
# passed to the send method via the `args` parameter will be passed to the
# action as the remaining arguments. The action must return the new value
# of the Agent.
#
# * {#send} and {#send!} should be used for actions that are CPU limited
# * {#send_off}, {#send_off!}, and {#<<} are appropriate for actions that
# may block on IO
# * {#send_via} and {#send_via!} are used when a specific executor is to
# be used for the action
#
# @param [Array<Object>] args zero or more arguments to be passed to
# the action
# @param [Proc] action the action dispatch to be enqueued
#
# @yield [agent, value, *args] process the old value and return the new
# @yieldparam [Object] value the current {#value} of the Agent
# @yieldparam [Array<Object>] args zero or more arguments to pass to the
# action
# @yieldreturn [Object] the new value of the Agent
#
# @!macro send_return
# @return [Boolean] true if the action is successfully enqueued, false if
# the Agent is {#failed?}
def send(*args, &action)
enqueue_action_job(action, args, Concurrent.global_fast_executor)
end
# @!macro agent_send
#
# @!macro send_bang_return_and_raise
# @return [Boolean] true if the action is successfully enqueued
# @raise [Concurrent::Agent::Error] if the Agent is {#failed?}
def send!(*args, &action)
raise Error.new unless send(*args, &action)
true
end
# @!macro agent_send
# @!macro send_return
def send_off(*args, &action)
enqueue_action_job(action, args, Concurrent.global_io_executor)
end
alias_method :post, :send_off
# @!macro agent_send
# @!macro send_bang_return_and_raise
def send_off!(*args, &action)
raise Error.new unless send_off(*args, &action)
true
end
# @!macro agent_send
# @!macro send_return
# @param [Concurrent::ExecutorService] executor the executor on which the
# action is to be dispatched
def send_via(executor, *args, &action)
enqueue_action_job(action, args, executor)
end
# @!macro agent_send
# @!macro send_bang_return_and_raise
# @param [Concurrent::ExecutorService] executor the executor on which the
# action is to be dispatched
def send_via!(executor, *args, &action)
raise Error.new unless send_via(executor, *args, &action)
true
end
# Dispatches an action to the Agent and returns immediately. Subsequently,
# in a thread from a thread pool, the {#value} will be set to the return
# value of the action. Appropriate for actions that may block on IO.
#
# @param [Proc] action the action dispatch to be enqueued
# @return [Concurrent::Agent] self
# @see #send_off
def <<(action)
send_off(&action)
self
end
# Blocks the current thread (indefinitely!) until all actions dispatched
# thus far, from this thread or nested by the Agent, have occurred. Will
# block when {#failed?}. Will never return if a failed Agent is {#restart}
# with `:clear_actions` true.
#
# Returns a reference to `self` to support method chaining:
#
# ```
# current_value = agent.await.value
# ```
#
# @return [Boolean] self
#
# @!macro agent_await_warning
def await
wait(nil)
self
end
# Blocks the current thread until all actions dispatched thus far, from this
# thread or nested by the Agent, have occurred, or the timeout (in seconds)
# has elapsed.
#
# @param [Float] timeout the maximum number of seconds to wait
# @return [Boolean] true if all actions complete before timeout else false
#
# @!macro agent_await_warning
def await_for(timeout)
wait(timeout.to_f)
end
# Blocks the current thread until all actions dispatched thus far, from this
# thread or nested by the Agent, have occurred, or the timeout (in seconds)
# has elapsed.
#
# @param [Float] timeout the maximum number of seconds to wait
# @return [Boolean] true if all actions complete before timeout
#
# @raise [Concurrent::TimeoutError] when timeout is reached
#
# @!macro agent_await_warning
def await_for!(timeout)
raise Concurrent::TimeoutError unless wait(timeout.to_f)
true
end
# Blocks the current thread until all actions dispatched thus far, from this
# thread or nested by the Agent, have occurred, or the timeout (in seconds)
# has elapsed. Will block indefinitely when timeout is nil or not given.
#
# Provided mainly for consistency with other classes in this library. Prefer
# the various `await` methods instead.
#
# @param [Float] timeout the maximum number of seconds to wait
# @return [Boolean] true if all actions complete before timeout else false
#
# @!macro agent_await_warning
def wait(timeout = nil)
latch = Concurrent::CountDownLatch.new(1)
enqueue_await_job(latch)
latch.wait(timeout)
end
# Is the Agent in a failed state?
#
# @see #restart
def failed?
!@error.value.nil?
end
alias_method :stopped?, :failed?
# When an Agent is {#failed?}, changes the Agent {#value} to `new_value`
# then un-fails the Agent so that action dispatches are allowed again. If
# the `:clear_actions` option is give and true, any actions queued on the
# Agent that were being held while it was failed will be discarded,
# otherwise those held actions will proceed. The `new_value` must pass the
# validator if any, or `restart` will raise an exception and the Agent will
# remain failed with its old {#value} and {#error}. Observers, if any, will
# not be notified of the new state.
#
# @param [Object] new_value the new value for the Agent once restarted
# @param [Hash] opts the configuration options
# @option opts [Symbol] :clear_actions true if all enqueued but unprocessed
# actions should be discarded on restart, else false (default: false)
# @return [Boolean] true
#
# @raise [Concurrent:AgentError] when not failed
def restart(new_value, opts = {})
clear_actions = opts.fetch(:clear_actions, false)
synchronize do
raise Error.new('agent is not failed') unless failed?
raise ValidationError unless ns_validate(new_value)
@current.value = new_value
@error.value = nil
@queue.clear if clear_actions
ns_post_next_job unless @queue.empty?
end
true
end
class << self
# Blocks the current thread (indefinitely!) until all actions dispatched
# thus far to all the given Agents, from this thread or nested by the
# given Agents, have occurred. Will block when any of the agents are
# failed. Will never return if a failed Agent is restart with
# `:clear_actions` true.
#
# @param [Array<Concurrent::Agent>] agents the Agents on which to wait
# @return [Boolean] true
#
# @!macro agent_await_warning
def await(*agents)
agents.each { |agent| agent.await }
true
end
# Blocks the current thread until all actions dispatched thus far to all
# the given Agents, from this thread or nested by the given Agents, have
# occurred, or the timeout (in seconds) has elapsed.
#
# @param [Float] timeout the maximum number of seconds to wait
# @param [Array<Concurrent::Agent>] agents the Agents on which to wait
# @return [Boolean] true if all actions complete before timeout else false
#
# @!macro agent_await_warning
def await_for(timeout, *agents)
end_at = Concurrent.monotonic_time + timeout.to_f
ok = agents.length.times do |i|
break false if (delay = end_at - Concurrent.monotonic_time) < 0
break false unless agents[i].await_for(delay)
end
!!ok
end
# Blocks the current thread until all actions dispatched thus far to all
# the given Agents, from this thread or nested by the given Agents, have
# occurred, or the timeout (in seconds) has elapsed.
#
# @param [Float] timeout the maximum number of seconds to wait
# @param [Array<Concurrent::Agent>] agents the Agents on which to wait
# @return [Boolean] true if all actions complete before timeout
#
# @raise [Concurrent::TimeoutError] when timeout is reached
# @!macro agent_await_warning
def await_for!(timeout, *agents)
raise Concurrent::TimeoutError unless await_for(timeout, *agents)
true
end
end
private
def ns_initialize(initial, opts)
@error_mode = opts[:error_mode]
@error_handler = opts[:error_handler]
if @error_mode && !ERROR_MODES.include?(@error_mode)
raise ArgumentError.new('unrecognized error mode')
elsif @error_mode.nil?
@error_mode = @error_handler ? :continue : :fail
end
@error_handler ||= DEFAULT_ERROR_HANDLER
@validator = opts.fetch(:validator, DEFAULT_VALIDATOR)
@current = Concurrent::AtomicReference.new(initial)
@error = Concurrent::AtomicReference.new(nil)
@caller = Concurrent::ThreadLocalVar.new(nil)
@queue = []
self.observers = Collection::CopyOnNotifyObserverSet.new
end
def enqueue_action_job(action, args, executor)
raise ArgumentError.new('no action given') unless action
job = Job.new(action, args, executor, @caller.value || Thread.current.object_id)
synchronize { ns_enqueue_job(job) }
end
def enqueue_await_job(latch)
synchronize do
if (index = ns_find_last_job_for_thread)
job = Job.new(AWAIT_ACTION, [latch], Concurrent.global_immediate_executor,
Thread.current.object_id)
ns_enqueue_job(job, index+1)
else
latch.count_down
true
end
end
end
def ns_enqueue_job(job, index = nil)
# a non-nil index means this is an await job
return false if index.nil? && failed?
index ||= @queue.length
@queue.insert(index, job)
# if this is the only job, post to executor
ns_post_next_job if @queue.length == 1
true
end
def ns_post_next_job
@queue.first.executor.post { execute_next_job }
end
def execute_next_job
job = synchronize { @queue.first }
old_value = @current.value
@caller.value = job.caller # for nested actions
new_value = job.action.call(old_value, *job.args)
@caller.value = nil
return if new_value == AWAIT_FLAG
if ns_validate(new_value)
@current.value = new_value
observers.notify_observers(Time.now, old_value, new_value)
else
handle_error(ValidationError.new)
end
rescue => error
handle_error(error)
ensure
synchronize do
@queue.shift
unless failed? || @queue.empty?
ns_post_next_job
end
end
end
def ns_validate(value)
@validator.call(value)
rescue
false
end
def handle_error(error)
# stop new jobs from posting
@error.value = error if @error_mode == :fail
@error_handler.call(self, error)
rescue
# do nothing
end
def ns_find_last_job_for_thread
@queue.rindex { |job| job.caller == Thread.current.object_id }
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/tuple.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/tuple.rb | require 'concurrent/atomic/atomic_reference'
module Concurrent
# A fixed size array with volatile (synchronized, thread safe) getters/setters.
# Mixes in Ruby's `Enumerable` module for enhanced search, sort, and traversal.
#
# @example
# tuple = Concurrent::Tuple.new(16)
#
# tuple.set(0, :foo) #=> :foo | volatile write
# tuple.get(0) #=> :foo | volatile read
# tuple.compare_and_set(0, :foo, :bar) #=> true | strong CAS
# tuple.cas(0, :foo, :baz) #=> false | strong CAS
# tuple.get(0) #=> :bar | volatile read
#
# @see https://en.wikipedia.org/wiki/Tuple Tuple entry at Wikipedia
# @see http://www.erlang.org/doc/reference_manual/data_types.html#id70396 Erlang Tuple
# @see http://ruby-doc.org/core-2.2.2/Enumerable.html Enumerable
class Tuple
include Enumerable
# The (fixed) size of the tuple.
attr_reader :size
# Create a new tuple of the given size.
#
# @param [Integer] size the number of elements in the tuple
def initialize(size)
@size = size
@tuple = tuple = ::Array.new(size)
i = 0
while i < size
tuple[i] = Concurrent::AtomicReference.new
i += 1
end
end
# Get the value of the element at the given index.
#
# @param [Integer] i the index from which to retrieve the value
# @return [Object] the value at the given index or nil if the index is out of bounds
def get(i)
return nil if i >= @size || i < 0
@tuple[i].get
end
alias_method :volatile_get, :get
# Set the element at the given index to the given value
#
# @param [Integer] i the index for the element to set
# @param [Object] value the value to set at the given index
#
# @return [Object] the new value of the element at the given index or nil if the index is out of bounds
def set(i, value)
return nil if i >= @size || i < 0
@tuple[i].set(value)
end
alias_method :volatile_set, :set
# Set the value at the given index to the new value if and only if the current
# value matches the given old value.
#
# @param [Integer] i the index for the element to set
# @param [Object] old_value the value to compare against the current value
# @param [Object] new_value the value to set at the given index
#
# @return [Boolean] true if the value at the given element was set else false
def compare_and_set(i, old_value, new_value)
return false if i >= @size || i < 0
@tuple[i].compare_and_set(old_value, new_value)
end
alias_method :cas, :compare_and_set
# Calls the given block once for each element in self, passing that element as a parameter.
#
# @yieldparam [Object] ref the `Concurrent::AtomicReference` object at the current index
def each
@tuple.each {|ref| yield ref.get}
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/re_include.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/re_include.rb | module Concurrent
# Methods form module A included to a module B, which is already included into class C,
# will not be visible in the C class. If this module is extended to B then A's methods
# are correctly made visible to C.
#
# @example
# module A
# def a
# :a
# end
# end
#
# module B1
# end
#
# class C1
# include B1
# end
#
# module B2
# extend Concurrent::ReInclude
# end
#
# class C2
# include B2
# end
#
# B1.send :include, A
# B2.send :include, A
#
# C1.new.respond_to? :a # => false
# C2.new.respond_to? :a # => true
#
# @!visibility private
module ReInclude
# @!visibility private
def included(base)
(@re_include_to_bases ||= []) << [:include, base]
super(base)
end
# @!visibility private
def extended(base)
(@re_include_to_bases ||= []) << [:extend, base]
super(base)
end
# @!visibility private
def include(*modules)
result = super(*modules)
modules.reverse.each do |module_being_included|
(@re_include_to_bases ||= []).each do |method, mod|
mod.send method, module_being_included
end
end
result
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/version.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/version.rb | module Concurrent
VERSION = '1.3.6'
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/errors.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/errors.rb | module Concurrent
Error = Class.new(StandardError)
# Raised when errors occur during configuration.
ConfigurationError = Class.new(Error)
# Raised when an asynchronous operation is cancelled before execution.
CancelledOperationError = Class.new(Error)
# Raised when a lifecycle method (such as `stop`) is called in an improper
# sequence or when the object is in an inappropriate state.
LifecycleError = Class.new(Error)
# Raised when an attempt is made to violate an immutability guarantee.
ImmutabilityError = Class.new(Error)
# Raised when an operation is attempted which is not legal given the
# receiver's current state
IllegalOperationError = Class.new(Error)
# Raised when an object's methods are called when it has not been
# properly initialized.
InitializationError = Class.new(Error)
# Raised when an object with a start/stop lifecycle has been started an
# excessive number of times. Often used in conjunction with a restart
# policy or strategy.
MaxRestartFrequencyError = Class.new(Error)
# Raised when an attempt is made to modify an immutable object
# (such as an `IVar`) after its final state has been set.
class MultipleAssignmentError < Error
attr_reader :inspection_data
def initialize(message = nil, inspection_data = nil)
@inspection_data = inspection_data
super message
end
def inspect
format '%s %s>', super[0..-2], @inspection_data.inspect
end
end
# Raised by an `Executor` when it is unable to process a given task,
# possibly because of a reject policy or other internal error.
RejectedExecutionError = Class.new(Error)
# Raised when any finite resource, such as a lock counter, exceeds its
# maximum limit/threshold.
ResourceLimitError = Class.new(Error)
# Raised when an operation times out.
TimeoutError = Class.new(Error)
# Aggregates multiple exceptions.
class MultipleErrors < Error
attr_reader :errors
def initialize(errors, message = "#{errors.size} errors")
@errors = errors
super [*message,
*errors.map { |e| [format('%s (%s)', e.message, e.class), *e.backtrace] }.flatten(1)
].join("\n")
end
end
# @!macro internal_implementation_note
class ConcurrentUpdateError < ThreadError
# frozen pre-allocated backtrace to speed ConcurrentUpdateError
CONC_UP_ERR_BACKTRACE = ['backtrace elided; set verbose to enable'].freeze
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/options.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/options.rb | require 'concurrent/configuration'
module Concurrent
# @!visibility private
module Options
# Get the requested `Executor` based on the values set in the options hash.
#
# @param [Hash] opts the options defining the requested executor
# @option opts [Executor] :executor when set use the given `Executor` instance.
# Three special values are also supported: `:fast` returns the global fast executor,
# `:io` returns the global io executor, and `:immediate` returns a new
# `ImmediateExecutor` object.
#
# @return [Executor, nil] the requested thread pool, or nil when no option specified
#
# @!visibility private
def self.executor_from_options(opts = {}) # :nodoc:
if identifier = opts.fetch(:executor, nil)
executor(identifier)
else
nil
end
end
def self.executor(executor_identifier)
case executor_identifier
when :fast
Concurrent.global_fast_executor
when :io
Concurrent.global_io_executor
when :immediate
Concurrent.global_immediate_executor
when Concurrent::ExecutorService
executor_identifier
else
raise ArgumentError, "executor not recognized by '#{executor_identifier}'"
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/maybe.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/maybe.rb | require 'concurrent/synchronization/object'
module Concurrent
# A `Maybe` encapsulates an optional value. A `Maybe` either contains a value
# of (represented as `Just`), or it is empty (represented as `Nothing`). Using
# `Maybe` is a good way to deal with errors or exceptional cases without
# resorting to drastic measures such as exceptions.
#
# `Maybe` is a replacement for the use of `nil` with better type checking.
#
# For compatibility with {Concurrent::Concern::Obligation} the predicate and
# accessor methods are aliased as `fulfilled?`, `rejected?`, `value`, and
# `reason`.
#
# ## Motivation
#
# A common pattern in languages with pattern matching, such as Erlang and
# Haskell, is to return *either* a value *or* an error from a function
# Consider this Erlang code:
#
# ```erlang
# case file:consult("data.dat") of
# {ok, Terms} -> do_something_useful(Terms);
# {error, Reason} -> lager:error(Reason)
# end.
# ```
#
# In this example the standard library function `file:consult` returns a
# [tuple](http://erlang.org/doc/reference_manual/data_types.html#id69044)
# with two elements: an [atom](http://erlang.org/doc/reference_manual/data_types.html#id64134)
# (similar to a ruby symbol) and a variable containing ancillary data. On
# success it returns the atom `ok` and the data from the file. On failure it
# returns `error` and a string with an explanation of the problem. With this
# pattern there is no ambiguity regarding success or failure. If the file is
# empty the return value cannot be misinterpreted as an error. And when an
# error occurs the return value provides useful information.
#
# In Ruby we tend to return `nil` when an error occurs or else we raise an
# exception. Both of these idioms are problematic. Returning `nil` is
# ambiguous because `nil` may also be a valid value. It also lacks
# information pertaining to the nature of the error. Raising an exception
# is both expensive and usurps the normal flow of control. All of these
# problems can be solved with the use of a `Maybe`.
#
# A `Maybe` is unambiguous with regard to whether or not it contains a value.
# When `Just` it contains a value, when `Nothing` it does not. When `Just`
# the value it contains may be `nil`, which is perfectly valid. When
# `Nothing` the reason for the lack of a value is contained as well. The
# previous Erlang example can be duplicated in Ruby in a principled way by
# having functions return `Maybe` objects:
#
# ```ruby
# result = MyFileUtils.consult("data.dat") # returns a Maybe
# if result.just?
# do_something_useful(result.value) # or result.just
# else
# logger.error(result.reason) # or result.nothing
# end
# ```
#
# @example Returning a Maybe from a Function
# module MyFileUtils
# def self.consult(path)
# file = File.open(path, 'r')
# Concurrent::Maybe.just(file.read)
# rescue => ex
# return Concurrent::Maybe.nothing(ex)
# ensure
# file.close if file
# end
# end
#
# maybe = MyFileUtils.consult('bogus.file')
# maybe.just? #=> false
# maybe.nothing? #=> true
# maybe.reason #=> #<Errno::ENOENT: No such file or directory @ rb_sysopen - bogus.file>
#
# maybe = MyFileUtils.consult('README.md')
# maybe.just? #=> true
# maybe.nothing? #=> false
# maybe.value #=> "# Concurrent Ruby\n[![Gem Version..."
#
# @example Using Maybe with a Block
# result = Concurrent::Maybe.from do
# Client.find(10) # Client is an ActiveRecord model
# end
#
# # -- if the record was found
# result.just? #=> true
# result.value #=> #<Client id: 10, first_name: "Ryan">
#
# # -- if the record was not found
# result.just? #=> false
# result.reason #=> ActiveRecord::RecordNotFound
#
# @example Using Maybe with the Null Object Pattern
# # In a Rails controller...
# result = ClientService.new(10).find # returns a Maybe
# render json: result.or(NullClient.new)
#
# @see https://hackage.haskell.org/package/base-4.2.0.1/docs/Data-Maybe.html Haskell Data.Maybe
# @see https://github.com/purescript/purescript-maybe/blob/master/docs/Data.Maybe.md PureScript Data.Maybe
class Maybe < Synchronization::Object
include Comparable
safe_initialization!
# Indicates that the given attribute has not been set.
# When `Just` the {#nothing} getter will return `NONE`.
# When `Nothing` the {#just} getter will return `NONE`.
NONE = ::Object.new.freeze
# The value of a `Maybe` when `Just`. Will be `NONE` when `Nothing`.
attr_reader :just
# The reason for the `Maybe` when `Nothing`. Will be `NONE` when `Just`.
attr_reader :nothing
private_class_method :new
# Create a new `Maybe` using the given block.
#
# Runs the given block passing all function arguments to the block as block
# arguments. If the block runs to completion without raising an exception
# a new `Just` is created with the value set to the return value of the
# block. If the block raises an exception a new `Nothing` is created with
# the reason being set to the raised exception.
#
# @param [Array<Object>] args Zero or more arguments to pass to the block.
# @yield The block from which to create a new `Maybe`.
# @yieldparam [Array<Object>] args Zero or more block arguments passed as
# arguments to the function.
#
# @return [Maybe] The newly created object.
#
# @raise [ArgumentError] when no block given.
def self.from(*args)
raise ArgumentError.new('no block given') unless block_given?
begin
value = yield(*args)
return new(value, NONE)
rescue => ex
return new(NONE, ex)
end
end
# Create a new `Just` with the given value.
#
# @param [Object] value The value to set for the new `Maybe` object.
#
# @return [Maybe] The newly created object.
def self.just(value)
return new(value, NONE)
end
# Create a new `Nothing` with the given (optional) reason.
#
# @param [Exception] error The reason to set for the new `Maybe` object.
# When given a string a new `StandardError` will be created with the
# argument as the message. When no argument is given a new
# `StandardError` with an empty message will be created.
#
# @return [Maybe] The newly created object.
def self.nothing(error = '')
if error.is_a?(Exception)
nothing = error
else
nothing = StandardError.new(error.to_s)
end
return new(NONE, nothing)
end
# Is this `Maybe` a `Just` (successfully fulfilled with a value)?
#
# @return [Boolean] True if `Just` or false if `Nothing`.
def just?
! nothing?
end
alias :fulfilled? :just?
# Is this `Maybe` a `nothing` (rejected with an exception upon fulfillment)?
#
# @return [Boolean] True if `Nothing` or false if `Just`.
def nothing?
@nothing != NONE
end
alias :rejected? :nothing?
alias :value :just
alias :reason :nothing
# Comparison operator.
#
# @return [Integer] 0 if self and other are both `Nothing`;
# -1 if self is `Nothing` and other is `Just`;
# 1 if self is `Just` and other is nothing;
# `self.just <=> other.just` if both self and other are `Just`.
def <=>(other)
if nothing?
other.nothing? ? 0 : -1
else
other.nothing? ? 1 : just <=> other.just
end
end
# Return either the value of self or the given default value.
#
# @return [Object] The value of self when `Just`; else the given default.
def or(other)
just? ? just : other
end
private
# Create a new `Maybe` with the given attributes.
#
# @param [Object] just The value when `Just` else `NONE`.
# @param [Exception, Object] nothing The exception when `Nothing` else `NONE`.
#
# @return [Maybe] The new `Maybe`.
#
# @!visibility private
def initialize(just, nothing)
@just = just
@nothing = nothing
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/future.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/future.rb | require 'thread'
require 'concurrent/constants'
require 'concurrent/errors'
require 'concurrent/ivar'
require 'concurrent/executor/safe_task_executor'
require 'concurrent/options'
# TODO (pitr-ch 14-Mar-2017): deprecate, Future, Promise, etc.
module Concurrent
# {include:file:docs-source/future.md}
#
# @!macro copy_options
#
# @see http://ruby-doc.org/stdlib-2.1.1/libdoc/observer/rdoc/Observable.html Ruby Observable module
# @see http://clojuredocs.org/clojure_core/clojure.core/future Clojure's future function
# @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html java.util.concurrent.Future
class Future < IVar
# Create a new `Future` in the `:unscheduled` state.
#
# @yield the asynchronous operation to perform
#
# @!macro executor_and_deref_options
#
# @option opts [object, Array] :args zero or more arguments to be passed the task
# block on execution
#
# @raise [ArgumentError] if no block is given
def initialize(opts = {}, &block)
raise ArgumentError.new('no block given') unless block_given?
super(NULL, opts.merge(__task_from_block__: block), &nil)
end
# Execute an `:unscheduled` `Future`. Immediately sets the state to `:pending` and
# passes the block to a new thread/thread pool for eventual execution.
# Does nothing if the `Future` is in any state other than `:unscheduled`.
#
# @return [Future] a reference to `self`
#
# @example Instance and execute in separate steps
# future = Concurrent::Future.new{ sleep(1); 42 }
# future.state #=> :unscheduled
# future.execute
# future.state #=> :pending
#
# @example Instance and execute in one line
# future = Concurrent::Future.new{ sleep(1); 42 }.execute
# future.state #=> :pending
def execute
if compare_and_set_state(:pending, :unscheduled)
@executor.post{ safe_execute(@task, @args) }
self
end
end
# Create a new `Future` object with the given block, execute it, and return the
# `:pending` object.
#
# @yield the asynchronous operation to perform
#
# @!macro executor_and_deref_options
#
# @option opts [object, Array] :args zero or more arguments to be passed the task
# block on execution
#
# @raise [ArgumentError] if no block is given
#
# @return [Future] the newly created `Future` in the `:pending` state
#
# @example
# future = Concurrent::Future.execute{ sleep(1); 42 }
# future.state #=> :pending
def self.execute(opts = {}, &block)
Future.new(opts, &block).execute
end
# @!macro ivar_set_method
def set(value = NULL, &block)
check_for_block_or_value!(block_given?, value)
synchronize do
if @state != :unscheduled
raise MultipleAssignmentError
else
@task = block || Proc.new { value }
end
end
execute
end
# Attempt to cancel the operation if it has not already processed.
# The operation can only be cancelled while still `pending`. It cannot
# be cancelled once it has begun processing or has completed.
#
# @return [Boolean] was the operation successfully cancelled.
def cancel
if compare_and_set_state(:cancelled, :pending)
complete(false, nil, CancelledOperationError.new)
true
else
false
end
end
# Has the operation been successfully cancelled?
#
# @return [Boolean]
def cancelled?
state == :cancelled
end
# Wait the given number of seconds for the operation to complete.
# On timeout attempt to cancel the operation.
#
# @param [Numeric] timeout the maximum time in seconds to wait.
# @return [Boolean] true if the operation completed before the timeout
# else false
def wait_or_cancel(timeout)
wait(timeout)
if complete?
true
else
cancel
false
end
end
protected
def ns_initialize(value, opts)
super
@state = :unscheduled
@task = opts[:__task_from_block__]
@executor = Options.executor_from_options(opts) || Concurrent.global_io_executor
@args = get_arguments_from(opts)
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/array.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/array.rb | require 'concurrent/utility/engine'
require 'concurrent/thread_safe/util'
module Concurrent
# @!macro concurrent_array
#
# A thread-safe subclass of Array. This version locks against the object
# itself for every method call, ensuring only one thread can be reading
# or writing at a time. This includes iteration methods like `#each`.
#
# @note `a += b` is **not** a **thread-safe** operation on
# `Concurrent::Array`. It reads array `a`, then it creates new `Concurrent::Array`
# which is concatenation of `a` and `b`, then it writes the concatenation to `a`.
# The read and write are independent operations they do not form a single atomic
# operation therefore when two `+=` operations are executed concurrently updates
# may be lost. Use `#concat` instead.
#
# @see http://ruby-doc.org/core/Array.html Ruby standard library `Array`
# @!macro internal_implementation_note
ArrayImplementation = case
when Concurrent.on_cruby?
# Array is not fully thread-safe on CRuby, see
# https://github.com/ruby-concurrency/concurrent-ruby/issues/929
# So we will need to add synchronization here
::Array
when Concurrent.on_jruby?
require 'jruby/synchronized'
class JRubyArray < ::Array
include JRuby::Synchronized
end
JRubyArray
when Concurrent.on_truffleruby?
require 'concurrent/thread_safe/util/data_structures'
class TruffleRubyArray < ::Array
end
ThreadSafe::Util.make_synchronized_on_truffleruby TruffleRubyArray
TruffleRubyArray
else
warn 'Possibly unsupported Ruby implementation'
::Array
end
private_constant :ArrayImplementation
# @!macro concurrent_array
class Array < ArrayImplementation
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/set.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/set.rb | require 'concurrent/utility/engine'
require 'concurrent/thread_safe/util'
require 'set'
module Concurrent
# @!macro concurrent_set
#
# A thread-safe subclass of Set. This version locks against the object
# itself for every method call, ensuring only one thread can be reading
# or writing at a time. This includes iteration methods like `#each`.
#
# @note `a += b` is **not** a **thread-safe** operation on
# `Concurrent::Set`. It reads Set `a`, then it creates new `Concurrent::Set`
# which is union of `a` and `b`, then it writes the union to `a`.
# The read and write are independent operations they do not form a single atomic
# operation therefore when two `+=` operations are executed concurrently updates
# may be lost. Use `#merge` instead.
#
# @see http://ruby-doc.org/stdlib-2.4.0/libdoc/set/rdoc/Set.html Ruby standard library `Set`
# @!macro internal_implementation_note
SetImplementation = case
when Concurrent.on_cruby?
# The CRuby implementation of Set is written in Ruby itself and is
# not thread safe for certain methods.
require 'monitor'
require 'concurrent/thread_safe/util/data_structures'
class CRubySet < ::Set
end
ThreadSafe::Util.make_synchronized_on_cruby CRubySet
CRubySet
when Concurrent.on_jruby?
require 'jruby/synchronized'
class JRubySet < ::Set
include JRuby::Synchronized
end
JRubySet
when Concurrent.on_truffleruby?
require 'concurrent/thread_safe/util/data_structures'
class TruffleRubySet < ::Set
end
ThreadSafe::Util.make_synchronized_on_truffleruby TruffleRubySet
TruffleRubySet
else
warn 'Possibly unsupported Ruby implementation'
::Set
end
private_constant :SetImplementation
# @!macro concurrent_set
class Set < SetImplementation
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/constants.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/constants.rb | module Concurrent
# Various classes within allows for +nil+ values to be stored,
# so a special +NULL+ token is required to indicate the "nil-ness".
# @!visibility private
NULL = ::Object.new
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/timer_task.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/timer_task.rb | require 'concurrent/collection/copy_on_notify_observer_set'
require 'concurrent/concern/dereferenceable'
require 'concurrent/concern/observable'
require 'concurrent/atomic/atomic_boolean'
require 'concurrent/atomic/atomic_fixnum'
require 'concurrent/executor/executor_service'
require 'concurrent/executor/ruby_executor_service'
require 'concurrent/executor/safe_task_executor'
require 'concurrent/scheduled_task'
module Concurrent
# A very common concurrency pattern is to run a thread that performs a task at
# regular intervals. The thread that performs the task sleeps for the given
# interval then wakes up and performs the task. Lather, rinse, repeat... This
# pattern causes two problems. First, it is difficult to test the business
# logic of the task because the task itself is tightly coupled with the
# concurrency logic. Second, an exception raised while performing the task can
# cause the entire thread to abend. In a long-running application where the
# task thread is intended to run for days/weeks/years a crashed task thread
# can pose a significant problem. `TimerTask` alleviates both problems.
#
# When a `TimerTask` is launched it starts a thread for monitoring the
# execution interval. The `TimerTask` thread does not perform the task,
# however. Instead, the TimerTask launches the task on a separate thread.
# Should the task experience an unrecoverable crash only the task thread will
# crash. This makes the `TimerTask` very fault tolerant. Additionally, the
# `TimerTask` thread can respond to the success or failure of the task,
# performing logging or ancillary operations.
#
# One other advantage of `TimerTask` is that it forces the business logic to
# be completely decoupled from the concurrency logic. The business logic can
# be tested separately then passed to the `TimerTask` for scheduling and
# running.
#
# A `TimerTask` supports two different types of interval calculations.
# A fixed delay will always wait the same amount of time between the
# completion of one task and the start of the next. A fixed rate will
# attempt to maintain a constant rate of execution regardless of the
# duration of the task. For example, if a fixed rate task is scheduled
# to run every 60 seconds but the task itself takes 10 seconds to
# complete, the next task will be scheduled to run 50 seconds after
# the start of the previous task. If the task takes 70 seconds to
# complete, the next task will be start immediately after the previous
# task completes. Tasks will not be executed concurrently.
#
# In some cases it may be necessary for a `TimerTask` to affect its own
# execution cycle. To facilitate this, a reference to the TimerTask instance
# is passed as an argument to the provided block every time the task is
# executed.
#
# The `TimerTask` class includes the `Dereferenceable` mixin module so the
# result of the last execution is always available via the `#value` method.
# Dereferencing options can be passed to the `TimerTask` during construction or
# at any later time using the `#set_deref_options` method.
#
# `TimerTask` supports notification through the Ruby standard library
# {http://ruby-doc.org/stdlib-2.0/libdoc/observer/rdoc/Observable.html
# Observable} module. On execution the `TimerTask` will notify the observers
# with three arguments: time of execution, the result of the block (or nil on
# failure), and any raised exceptions (or nil on success).
#
# @!macro copy_options
#
# @example Basic usage
# task = Concurrent::TimerTask.new{ puts 'Boom!' }
# task.execute
#
# task.execution_interval #=> 60 (default)
#
# # wait 60 seconds...
# #=> 'Boom!'
#
# task.shutdown #=> true
#
# @example Configuring `:execution_interval`
# task = Concurrent::TimerTask.new(execution_interval: 5) do
# puts 'Boom!'
# end
#
# task.execution_interval #=> 5
#
# @example Immediate execution with `:run_now`
# task = Concurrent::TimerTask.new(run_now: true){ puts 'Boom!' }
# task.execute
#
# #=> 'Boom!'
#
# @example Configuring `:interval_type` with either :fixed_delay or :fixed_rate, default is :fixed_delay
# task = Concurrent::TimerTask.new(execution_interval: 5, interval_type: :fixed_rate) do
# puts 'Boom!'
# end
# task.interval_type #=> :fixed_rate
#
# @example Last `#value` and `Dereferenceable` mixin
# task = Concurrent::TimerTask.new(
# dup_on_deref: true,
# execution_interval: 5
# ){ Time.now }
#
# task.execute
# Time.now #=> 2013-11-07 18:06:50 -0500
# sleep(10)
# task.value #=> 2013-11-07 18:06:55 -0500
#
# @example Controlling execution from within the block
# timer_task = Concurrent::TimerTask.new(execution_interval: 1) do |task|
# task.execution_interval.to_i.times{ print 'Boom! ' }
# print "\n"
# task.execution_interval += 1
# if task.execution_interval > 5
# puts 'Stopping...'
# task.shutdown
# end
# end
#
# timer_task.execute
# #=> Boom!
# #=> Boom! Boom!
# #=> Boom! Boom! Boom!
# #=> Boom! Boom! Boom! Boom!
# #=> Boom! Boom! Boom! Boom! Boom!
# #=> Stopping...
#
# @example Observation
# class TaskObserver
# def update(time, result, ex)
# if result
# print "(#{time}) Execution successfully returned #{result}\n"
# else
# print "(#{time}) Execution failed with error #{ex}\n"
# end
# end
# end
#
# task = Concurrent::TimerTask.new(execution_interval: 1){ 42 }
# task.add_observer(TaskObserver.new)
# task.execute
# sleep 4
#
# #=> (2013-10-13 19:08:58 -0400) Execution successfully returned 42
# #=> (2013-10-13 19:08:59 -0400) Execution successfully returned 42
# #=> (2013-10-13 19:09:00 -0400) Execution successfully returned 42
# task.shutdown
#
# task = Concurrent::TimerTask.new(execution_interval: 1){ sleep }
# task.add_observer(TaskObserver.new)
# task.execute
#
# #=> (2013-10-13 19:07:25 -0400) Execution timed out
# #=> (2013-10-13 19:07:27 -0400) Execution timed out
# #=> (2013-10-13 19:07:29 -0400) Execution timed out
# task.shutdown
#
# task = Concurrent::TimerTask.new(execution_interval: 1){ raise StandardError }
# task.add_observer(TaskObserver.new)
# task.execute
#
# #=> (2013-10-13 19:09:37 -0400) Execution failed with error StandardError
# #=> (2013-10-13 19:09:38 -0400) Execution failed with error StandardError
# #=> (2013-10-13 19:09:39 -0400) Execution failed with error StandardError
# task.shutdown
#
# @see http://ruby-doc.org/stdlib-2.0/libdoc/observer/rdoc/Observable.html
# @see http://docs.oracle.com/javase/7/docs/api/java/util/TimerTask.html
class TimerTask < RubyExecutorService
include Concern::Dereferenceable
include Concern::Observable
# Default `:execution_interval` in seconds.
EXECUTION_INTERVAL = 60
# Maintain the interval between the end of one execution and the start of the next execution.
FIXED_DELAY = :fixed_delay
# Maintain the interval between the start of one execution and the start of the next.
# If execution time exceeds the interval, the next execution will start immediately
# after the previous execution finishes. Executions will not run concurrently.
FIXED_RATE = :fixed_rate
# Default `:interval_type`
DEFAULT_INTERVAL_TYPE = FIXED_DELAY
# Create a new TimerTask with the given task and configuration.
#
# @!macro timer_task_initialize
# @param [Hash] opts the options defining task execution.
# @option opts [Float] :execution_interval number of seconds between
# task executions (default: EXECUTION_INTERVAL)
# @option opts [Boolean] :run_now Whether to run the task immediately
# upon instantiation or to wait until the first # execution_interval
# has passed (default: false)
# @options opts [Symbol] :interval_type method to calculate the interval
# between executions, can be either :fixed_rate or :fixed_delay.
# (default: :fixed_delay)
# @option opts [Executor] executor, default is `global_io_executor`
#
# @!macro deref_options
#
# @raise ArgumentError when no block is given.
#
# @yield to the block after :execution_interval seconds have passed since
# the last yield
# @yieldparam task a reference to the `TimerTask` instance so that the
# block can control its own lifecycle. Necessary since `self` will
# refer to the execution context of the block rather than the running
# `TimerTask`.
#
# @return [TimerTask] the new `TimerTask`
def initialize(opts = {}, &task)
raise ArgumentError.new('no block given') unless block_given?
super
set_deref_options opts
end
# Is the executor running?
#
# @return [Boolean] `true` when running, `false` when shutting down or shutdown
def running?
@running.true?
end
# Execute a previously created `TimerTask`.
#
# @return [TimerTask] a reference to `self`
#
# @example Instance and execute in separate steps
# task = Concurrent::TimerTask.new(execution_interval: 10){ print "Hello World\n" }
# task.running? #=> false
# task.execute
# task.running? #=> true
#
# @example Instance and execute in one line
# task = Concurrent::TimerTask.new(execution_interval: 10){ print "Hello World\n" }.execute
# task.running? #=> true
def execute
synchronize do
if @running.false?
@running.make_true
@age.increment
schedule_next_task(@run_now ? 0 : @execution_interval)
end
end
self
end
# Create and execute a new `TimerTask`.
#
# @!macro timer_task_initialize
#
# @example
# task = Concurrent::TimerTask.execute(execution_interval: 10){ print "Hello World\n" }
# task.running? #=> true
def self.execute(opts = {}, &task)
TimerTask.new(opts, &task).execute
end
# @!attribute [rw] execution_interval
# @return [Fixnum] Number of seconds after the task completes before the
# task is performed again.
def execution_interval
synchronize { @execution_interval }
end
# @!attribute [rw] execution_interval
# @return [Fixnum] Number of seconds after the task completes before the
# task is performed again.
def execution_interval=(value)
if (value = value.to_f) <= 0.0
raise ArgumentError.new('must be greater than zero')
else
synchronize { @execution_interval = value }
end
end
# @!attribute [r] interval_type
# @return [Symbol] method to calculate the interval between executions
attr_reader :interval_type
# @!attribute [rw] timeout_interval
# @return [Fixnum] Number of seconds the task can run before it is
# considered to have failed.
def timeout_interval
warn 'TimerTask timeouts are now ignored as these were not able to be implemented correctly'
end
# @!attribute [rw] timeout_interval
# @return [Fixnum] Number of seconds the task can run before it is
# considered to have failed.
def timeout_interval=(value)
warn 'TimerTask timeouts are now ignored as these were not able to be implemented correctly'
end
private :post, :<<
private
def ns_initialize(opts, &task)
set_deref_options(opts)
self.execution_interval = opts[:execution] || opts[:execution_interval] || EXECUTION_INTERVAL
if opts[:interval_type] && ![FIXED_DELAY, FIXED_RATE].include?(opts[:interval_type])
raise ArgumentError.new('interval_type must be either :fixed_delay or :fixed_rate')
end
if opts[:timeout] || opts[:timeout_interval]
warn 'TimeTask timeouts are now ignored as these were not able to be implemented correctly'
end
@run_now = opts[:now] || opts[:run_now]
@interval_type = opts[:interval_type] || DEFAULT_INTERVAL_TYPE
@task = Concurrent::SafeTaskExecutor.new(task)
@executor = opts[:executor] || Concurrent.global_io_executor
@running = Concurrent::AtomicBoolean.new(false)
@age = Concurrent::AtomicFixnum.new(0)
@value = nil
self.observers = Collection::CopyOnNotifyObserverSet.new
end
# @!visibility private
def ns_shutdown_execution
@running.make_false
super
end
# @!visibility private
def ns_kill_execution
@running.make_false
super
end
# @!visibility private
def schedule_next_task(interval = execution_interval)
ScheduledTask.execute(interval, executor: @executor, args: [Concurrent::Event.new, @age.value], &method(:execute_task))
nil
end
# @!visibility private
def execute_task(completion, age_when_scheduled)
return nil unless @running.true?
return nil unless @age.value == age_when_scheduled
start_time = Concurrent.monotonic_time
_success, value, reason = @task.execute(self)
if completion.try?
self.value = value
schedule_next_task(calculate_next_interval(start_time))
time = Time.now
observers.notify_observers do
[time, self.value, reason]
end
end
nil
end
# @!visibility private
def calculate_next_interval(start_time)
if @interval_type == FIXED_RATE
run_time = Concurrent.monotonic_time - start_time
[execution_interval - run_time, 0].max
else # FIXED_DELAY
execution_interval
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/atomics.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/atomics.rb | require 'concurrent/atomic/atomic_reference'
require 'concurrent/atomic/atomic_boolean'
require 'concurrent/atomic/atomic_fixnum'
require 'concurrent/atomic/cyclic_barrier'
require 'concurrent/atomic/count_down_latch'
require 'concurrent/atomic/event'
require 'concurrent/atomic/read_write_lock'
require 'concurrent/atomic/reentrant_read_write_lock'
require 'concurrent/atomic/semaphore'
require 'concurrent/atomic/thread_local_var'
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/delay.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/delay.rb | require 'thread'
require 'concurrent/concern/obligation'
require 'concurrent/executor/immediate_executor'
require 'concurrent/synchronization/lockable_object'
module Concurrent
# This file has circular require issues. It must be autoloaded here.
autoload :Options, 'concurrent/options'
# Lazy evaluation of a block yielding an immutable result. Useful for
# expensive operations that may never be needed. It may be non-blocking,
# supports the `Concern::Obligation` interface, and accepts the injection of
# custom executor upon which to execute the block. Processing of
# block will be deferred until the first time `#value` is called.
# At that time the caller can choose to return immediately and let
# the block execute asynchronously, block indefinitely, or block
# with a timeout.
#
# When a `Delay` is created its state is set to `pending`. The value and
# reason are both `nil`. The first time the `#value` method is called the
# enclosed operation will be run and the calling thread will block. Other
# threads attempting to call `#value` will block as well. Once the operation
# is complete the *value* will be set to the result of the operation or the
# *reason* will be set to the raised exception, as appropriate. All threads
# blocked on `#value` will return. Subsequent calls to `#value` will immediately
# return the cached value. The operation will only be run once. This means that
# any side effects created by the operation will only happen once as well.
#
# `Delay` includes the `Concurrent::Concern::Dereferenceable` mixin to support thread
# safety of the reference returned by `#value`.
#
# @!macro copy_options
#
# @!macro delay_note_regarding_blocking
# @note The default behavior of `Delay` is to block indefinitely when
# calling either `value` or `wait`, executing the delayed operation on
# the current thread. This makes the `timeout` value completely
# irrelevant. To enable non-blocking behavior, use the `executor`
# constructor option. This will cause the delayed operation to be
# execute on the given executor, allowing the call to timeout.
#
# @see Concurrent::Concern::Dereferenceable
class Delay < Synchronization::LockableObject
include Concern::Obligation
# NOTE: Because the global thread pools are lazy-loaded with these objects
# there is a performance hit every time we post a new task to one of these
# thread pools. Subsequently it is critical that `Delay` perform as fast
# as possible post-completion. This class has been highly optimized using
# the benchmark script `examples/lazy_and_delay.rb`. Do NOT attempt to
# DRY-up this class or perform other refactoring with running the
# benchmarks and ensuring that performance is not negatively impacted.
# Create a new `Delay` in the `:pending` state.
#
# @!macro executor_and_deref_options
#
# @yield the delayed operation to perform
#
# @raise [ArgumentError] if no block is given
def initialize(opts = {}, &block)
raise ArgumentError.new('no block given') unless block_given?
super(&nil)
synchronize { ns_initialize(opts, &block) }
end
# Return the value this object represents after applying the options
# specified by the `#set_deref_options` method. If the delayed operation
# raised an exception this method will return nil. The exception object
# can be accessed via the `#reason` method.
#
# @param [Numeric] timeout the maximum number of seconds to wait
# @return [Object] the current value of the object
#
# @!macro delay_note_regarding_blocking
def value(timeout = nil)
if @executor # TODO (pitr 12-Sep-2015): broken unsafe read?
super
else
# this function has been optimized for performance and
# should not be modified without running new benchmarks
synchronize do
execute = @evaluation_started = true unless @evaluation_started
if execute
begin
set_state(true, @task.call, nil)
rescue => ex
set_state(false, nil, ex)
end
elsif incomplete?
raise IllegalOperationError, 'Recursive call to #value during evaluation of the Delay'
end
end
if @do_nothing_on_deref
@value
else
apply_deref_options(@value)
end
end
end
# Return the value this object represents after applying the options
# specified by the `#set_deref_options` method. If the delayed operation
# raised an exception, this method will raise that exception (even when)
# the operation has already been executed).
#
# @param [Numeric] timeout the maximum number of seconds to wait
# @return [Object] the current value of the object
# @raise [Exception] when `#rejected?` raises `#reason`
#
# @!macro delay_note_regarding_blocking
def value!(timeout = nil)
if @executor
super
else
result = value
raise @reason if @reason
result
end
end
# Return the value this object represents after applying the options
# specified by the `#set_deref_options` method.
#
# @param [Integer] timeout (nil) the maximum number of seconds to wait for
# the value to be computed. When `nil` the caller will block indefinitely.
#
# @return [Object] self
#
# @!macro delay_note_regarding_blocking
def wait(timeout = nil)
if @executor
execute_task_once
super(timeout)
else
value
end
self
end
# Reconfigures the block returning the value if still `#incomplete?`
#
# @yield the delayed operation to perform
# @return [true, false] if success
def reconfigure(&block)
synchronize do
raise ArgumentError.new('no block given') unless block_given?
unless @evaluation_started
@task = block
true
else
false
end
end
end
protected
def ns_initialize(opts, &block)
init_obligation
set_deref_options(opts)
@executor = opts[:executor]
@task = block
@state = :pending
@evaluation_started = false
end
private
# @!visibility private
def execute_task_once # :nodoc:
# this function has been optimized for performance and
# should not be modified without running new benchmarks
execute = task = nil
synchronize do
execute = @evaluation_started = true unless @evaluation_started
task = @task
end
if execute
executor = Options.executor_from_options(executor: @executor)
executor.post do
begin
result = task.call
success = true
rescue => ex
reason = ex
end
synchronize do
set_state(success, result, reason)
event.set
end
end
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/exchanger.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/exchanger.rb | require 'concurrent/constants'
require 'concurrent/errors'
require 'concurrent/maybe'
require 'concurrent/atomic/atomic_reference'
require 'concurrent/atomic/count_down_latch'
require 'concurrent/utility/engine'
require 'concurrent/utility/monotonic_time'
module Concurrent
# @!macro exchanger
#
# A synchronization point at which threads can pair and swap elements within
# pairs. Each thread presents some object on entry to the exchange method,
# matches with a partner thread, and receives its partner's object on return.
#
# @!macro thread_safe_variable_comparison
#
# This implementation is very simple, using only a single slot for each
# exchanger (unlike more advanced implementations which use an "arena").
# This approach will work perfectly fine when there are only a few threads
# accessing a single `Exchanger`. Beyond a handful of threads the performance
# will degrade rapidly due to contention on the single slot, but the algorithm
# will remain correct.
#
# @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Exchanger.html java.util.concurrent.Exchanger
# @example
#
# exchanger = Concurrent::Exchanger.new
#
# threads = [
# Thread.new { puts "first: " << exchanger.exchange('foo', 1) }, #=> "first: bar"
# Thread.new { puts "second: " << exchanger.exchange('bar', 1) } #=> "second: foo"
# ]
# threads.each {|t| t.join(2) }
# @!visibility private
class AbstractExchanger < Synchronization::Object
# @!visibility private
CANCEL = ::Object.new
private_constant :CANCEL
def initialize
super
end
# @!macro exchanger_method_do_exchange
#
# Waits for another thread to arrive at this exchange point (unless the
# current thread is interrupted), and then transfers the given object to
# it, receiving its object in return. The timeout value indicates the
# approximate number of seconds the method should block while waiting
# for the exchange. When the timeout value is `nil` the method will
# block indefinitely.
#
# @param [Object] value the value to exchange with another thread
# @param [Numeric, nil] timeout in seconds, `nil` blocks indefinitely
#
# @!macro exchanger_method_exchange
#
# In some edge cases when a `timeout` is given a return value of `nil` may be
# ambiguous. Specifically, if `nil` is a valid value in the exchange it will
# be impossible to tell whether `nil` is the actual return value or if it
# signifies timeout. When `nil` is a valid value in the exchange consider
# using {#exchange!} or {#try_exchange} instead.
#
# @return [Object] the value exchanged by the other thread or `nil` on timeout
def exchange(value, timeout = nil)
(value = do_exchange(value, timeout)) == CANCEL ? nil : value
end
# @!macro exchanger_method_do_exchange
# @!macro exchanger_method_exchange_bang
#
# On timeout a {Concurrent::TimeoutError} exception will be raised.
#
# @return [Object] the value exchanged by the other thread
# @raise [Concurrent::TimeoutError] on timeout
def exchange!(value, timeout = nil)
if (value = do_exchange(value, timeout)) == CANCEL
raise Concurrent::TimeoutError
else
value
end
end
# @!macro exchanger_method_do_exchange
# @!macro exchanger_method_try_exchange
#
# The return value will be a {Concurrent::Maybe} set to `Just` on success or
# `Nothing` on timeout.
#
# @return [Concurrent::Maybe] on success a `Just` maybe will be returned with
# the item exchanged by the other thread as `#value`; on timeout a
# `Nothing` maybe will be returned with {Concurrent::TimeoutError} as `#reason`
#
# @example
#
# exchanger = Concurrent::Exchanger.new
#
# result = exchanger.exchange(:foo, 0.5)
#
# if result.just?
# puts result.value #=> :bar
# else
# puts 'timeout'
# end
def try_exchange(value, timeout = nil)
if (value = do_exchange(value, timeout)) == CANCEL
Concurrent::Maybe.nothing(Concurrent::TimeoutError)
else
Concurrent::Maybe.just(value)
end
end
private
# @!macro exchanger_method_do_exchange
#
# @return [Object, CANCEL] the value exchanged by the other thread; {CANCEL} on timeout
def do_exchange(value, timeout)
raise NotImplementedError
end
end
# @!macro internal_implementation_note
# @!visibility private
class RubyExchanger < AbstractExchanger
# A simplified version of java.util.concurrent.Exchanger written by
# Doug Lea, Bill Scherer, and Michael Scott with assistance from members
# of JCP JSR-166 Expert Group and released to the public domain. It does
# not include the arena or the multi-processor spin loops.
# http://grepcode.com/file/repository.grepcode.com/java/root/jdk/openjdk/6-b14/java/util/concurrent/Exchanger.java
safe_initialization!
class Node < Concurrent::Synchronization::Object
attr_atomic :value
safe_initialization!
def initialize(item)
super()
@Item = item
@Latch = Concurrent::CountDownLatch.new
self.value = nil
end
def latch
@Latch
end
def item
@Item
end
end
private_constant :Node
def initialize
super
end
private
attr_atomic(:slot)
# @!macro exchanger_method_do_exchange
#
# @return [Object, CANCEL] the value exchanged by the other thread; {CANCEL} on timeout
def do_exchange(value, timeout)
# ALGORITHM
#
# From the original Java version:
#
# > The basic idea is to maintain a "slot", which is a reference to
# > a Node containing both an Item to offer and a "hole" waiting to
# > get filled in. If an incoming "occupying" thread sees that the
# > slot is null, it CAS'es (compareAndSets) a Node there and waits
# > for another to invoke exchange. That second "fulfilling" thread
# > sees that the slot is non-null, and so CASes it back to null,
# > also exchanging items by CASing the hole, plus waking up the
# > occupying thread if it is blocked. In each case CAS'es may
# > fail because a slot at first appears non-null but is null upon
# > CAS, or vice-versa. So threads may need to retry these
# > actions.
#
# This version:
#
# An exchange occurs between an "occupier" thread and a "fulfiller" thread.
# The "slot" is used to setup this interaction. The first thread in the
# exchange puts itself into the slot (occupies) and waits for a fulfiller.
# The second thread removes the occupier from the slot and attempts to
# perform the exchange. Removing the occupier also frees the slot for
# another occupier/fulfiller pair.
#
# Because the occupier and the fulfiller are operating independently and
# because there may be contention with other threads, any failed operation
# indicates contention. Both the occupier and the fulfiller operate within
# spin loops. Any failed actions along the happy path will cause the thread
# to repeat the loop and try again.
#
# When a timeout value is given the thread must be cognizant of time spent
# in the spin loop. The remaining time is checked every loop. When the time
# runs out the thread will exit.
#
# A "node" is the data structure used to perform the exchange. Only the
# occupier's node is necessary. It's the node used for the exchange.
# Each node has an "item," a "hole" (self), and a "latch." The item is the
# node's initial value. It never changes. It's what the fulfiller returns on
# success. The occupier's hole is where the fulfiller put its item. It's the
# item that the occupier returns on success. The latch is used for synchronization.
# Because a thread may act as either an occupier or fulfiller (or possibly
# both in periods of high contention) every thread creates a node when
# the exchange method is first called.
#
# The following steps occur within the spin loop. If any actions fail
# the thread will loop and try again, so long as there is time remaining.
# If time runs out the thread will return CANCEL.
#
# Check the slot for an occupier:
#
# * If the slot is empty try to occupy
# * If the slot is full try to fulfill
#
# Attempt to occupy:
#
# * Attempt to CAS myself into the slot
# * Go to sleep and wait to be woken by a fulfiller
# * If the sleep is successful then the fulfiller completed its happy path
# - Return the value from my hole (the value given by the fulfiller)
# * When the sleep fails (time ran out) attempt to cancel the operation
# - Attempt to CAS myself out of the hole
# - If successful there is no contention
# - Return CANCEL
# - On failure, I am competing with a fulfiller
# - Attempt to CAS my hole to CANCEL
# - On success
# - Let the fulfiller deal with my cancel
# - Return CANCEL
# - On failure the fulfiller has completed its happy path
# - Return th value from my hole (the fulfiller's value)
#
# Attempt to fulfill:
#
# * Attempt to CAS the occupier out of the slot
# - On failure loop again
# * Attempt to CAS my item into the occupier's hole
# - On failure the occupier is trying to cancel
# - Loop again
# - On success we are on the happy path
# - Wake the sleeping occupier
# - Return the occupier's item
value = NULL if value.nil? # The sentinel allows nil to be a valid value
me = Node.new(value) # create my node in case I need to occupy
end_at = Concurrent.monotonic_time + timeout.to_f # The time to give up
result = loop do
other = slot
if other && compare_and_set_slot(other, nil)
# try to fulfill
if other.compare_and_set_value(nil, value)
# happy path
other.latch.count_down
break other.item
end
elsif other.nil? && compare_and_set_slot(nil, me)
# try to occupy
timeout = end_at - Concurrent.monotonic_time if timeout
if me.latch.wait(timeout)
# happy path
break me.value
else
# attempt to remove myself from the slot
if compare_and_set_slot(me, nil)
break CANCEL
elsif !me.compare_and_set_value(nil, CANCEL)
# I've failed to block the fulfiller
break me.value
end
end
end
break CANCEL if timeout && Concurrent.monotonic_time >= end_at
end
result == NULL ? nil : result
end
end
if Concurrent.on_jruby?
require 'concurrent/utility/native_extension_loader'
# @!macro internal_implementation_note
# @!visibility private
class JavaExchanger < AbstractExchanger
def initialize
@exchanger = java.util.concurrent.Exchanger.new
end
private
# @!macro exchanger_method_do_exchange
#
# @return [Object, CANCEL] the value exchanged by the other thread; {CANCEL} on timeout
def do_exchange(value, timeout)
result = nil
if timeout.nil?
Synchronization::JRuby.sleep_interruptibly do
result = @exchanger.exchange(value)
end
else
Synchronization::JRuby.sleep_interruptibly do
result = @exchanger.exchange(value, 1000 * timeout, java.util.concurrent.TimeUnit::MILLISECONDS)
end
end
result
rescue java.util.concurrent.TimeoutException
CANCEL
end
end
end
# @!visibility private
# @!macro internal_implementation_note
ExchangerImplementation = case
when Concurrent.on_jruby?
JavaExchanger
else
RubyExchanger
end
private_constant :ExchangerImplementation
# @!macro exchanger
class Exchanger < ExchangerImplementation
# @!method initialize
# Creates exchanger instance
# @!method exchange(value, timeout = nil)
# @!macro exchanger_method_do_exchange
# @!macro exchanger_method_exchange
# @!method exchange!(value, timeout = nil)
# @!macro exchanger_method_do_exchange
# @!macro exchanger_method_exchange_bang
# @!method try_exchange(value, timeout = nil)
# @!macro exchanger_method_do_exchange
# @!macro exchanger_method_try_exchange
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/tvar.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/tvar.rb | require 'set'
require 'concurrent/synchronization/object'
module Concurrent
# A `TVar` is a transactional variable - a single-element container that
# is used as part of a transaction - see `Concurrent::atomically`.
#
# @!macro thread_safe_variable_comparison
#
# {include:file:docs-source/tvar.md}
class TVar < Synchronization::Object
safe_initialization!
# Create a new `TVar` with an initial value.
def initialize(value)
@value = value
@lock = Mutex.new
end
# Get the value of a `TVar`.
def value
Concurrent::atomically do
Transaction::current.read(self)
end
end
# Set the value of a `TVar`.
def value=(value)
Concurrent::atomically do
Transaction::current.write(self, value)
end
end
# @!visibility private
def unsafe_value # :nodoc:
@value
end
# @!visibility private
def unsafe_value=(value) # :nodoc:
@value = value
end
# @!visibility private
def unsafe_lock # :nodoc:
@lock
end
end
# Run a block that reads and writes `TVar`s as a single atomic transaction.
# With respect to the value of `TVar` objects, the transaction is atomic, in
# that it either happens or it does not, consistent, in that the `TVar`
# objects involved will never enter an illegal state, and isolated, in that
# transactions never interfere with each other. You may recognise these
# properties from database transactions.
#
# There are some very important and unusual semantics that you must be aware of:
#
# * Most importantly, the block that you pass to atomically may be executed
# more than once. In most cases your code should be free of
# side-effects, except for via TVar.
#
# * If an exception escapes an atomically block it will abort the transaction.
#
# * It is undefined behaviour to use callcc or Fiber with atomically.
#
# * If you create a new thread within an atomically, it will not be part of
# the transaction. Creating a thread counts as a side-effect.
#
# Transactions within transactions are flattened to a single transaction.
#
# @example
# a = new TVar(100_000)
# b = new TVar(100)
#
# Concurrent::atomically do
# a.value -= 10
# b.value += 10
# end
def atomically
raise ArgumentError.new('no block given') unless block_given?
# Get the current transaction
transaction = Transaction::current
# Are we not already in a transaction (not nested)?
if transaction.nil?
# New transaction
begin
# Retry loop
loop do
# Create a new transaction
transaction = Transaction.new
Transaction::current = transaction
# Run the block, aborting on exceptions
begin
result = yield
rescue Transaction::AbortError => e
transaction.abort
result = Transaction::ABORTED
rescue Transaction::LeaveError => e
transaction.abort
break result
rescue => e
transaction.abort
raise e
end
# If we can commit, break out of the loop
if result != Transaction::ABORTED
if transaction.commit
break result
end
end
end
ensure
# Clear the current transaction
Transaction::current = nil
end
else
# Nested transaction - flatten it and just run the block
yield
end
end
# Abort a currently running transaction - see `Concurrent::atomically`.
def abort_transaction
raise Transaction::AbortError.new
end
# Leave a transaction without committing or aborting - see `Concurrent::atomically`.
def leave_transaction
raise Transaction::LeaveError.new
end
module_function :atomically, :abort_transaction, :leave_transaction
private
# @!visibility private
class Transaction
ABORTED = ::Object.new
OpenEntry = Struct.new(:value, :modified)
AbortError = Class.new(StandardError)
LeaveError = Class.new(StandardError)
def initialize
@open_tvars = {}
end
def read(tvar)
entry = open(tvar)
entry.value
end
def write(tvar, value)
entry = open(tvar)
entry.modified = true
entry.value = value
end
def open(tvar)
entry = @open_tvars[tvar]
unless entry
unless tvar.unsafe_lock.try_lock
Concurrent::abort_transaction
end
entry = OpenEntry.new(tvar.unsafe_value, false)
@open_tvars[tvar] = entry
end
entry
end
def abort
unlock
end
def commit
@open_tvars.each do |tvar, entry|
if entry.modified
tvar.unsafe_value = entry.value
end
end
unlock
end
def unlock
@open_tvars.each_key do |tvar|
tvar.unsafe_lock.unlock
end
end
def self.current
Thread.current[:current_tvar_transaction]
end
def self.current=(transaction)
Thread.current[:current_tvar_transaction] = transaction
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/settable_struct.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/settable_struct.rb | require 'concurrent/errors'
require 'concurrent/synchronization/abstract_struct'
require 'concurrent/synchronization/lockable_object'
module Concurrent
# An thread-safe, write-once variation of Ruby's standard `Struct`.
# Each member can have its value set at most once, either at construction
# or any time thereafter. Attempting to assign a value to a member
# that has already been set will result in a `Concurrent::ImmutabilityError`.
#
# @see http://ruby-doc.org/core/Struct.html Ruby standard library `Struct`
# @see http://en.wikipedia.org/wiki/Final_(Java) Java `final` keyword
module SettableStruct
include Synchronization::AbstractStruct
# @!macro struct_values
def values
synchronize { ns_values }
end
alias_method :to_a, :values
# @!macro struct_values_at
def values_at(*indexes)
synchronize { ns_values_at(indexes) }
end
# @!macro struct_inspect
def inspect
synchronize { ns_inspect }
end
alias_method :to_s, :inspect
# @!macro struct_merge
def merge(other, &block)
synchronize { ns_merge(other, &block) }
end
# @!macro struct_to_h
def to_h
synchronize { ns_to_h }
end
# @!macro struct_get
def [](member)
synchronize { ns_get(member) }
end
# @!macro struct_equality
def ==(other)
synchronize { ns_equality(other) }
end
# @!macro struct_each
def each(&block)
return enum_for(:each) unless block_given?
synchronize { ns_each(&block) }
end
# @!macro struct_each_pair
def each_pair(&block)
return enum_for(:each_pair) unless block_given?
synchronize { ns_each_pair(&block) }
end
# @!macro struct_select
def select(&block)
return enum_for(:select) unless block_given?
synchronize { ns_select(&block) }
end
# @!macro struct_set
#
# @raise [Concurrent::ImmutabilityError] if the given member has already been set
def []=(member, value)
if member.is_a? Integer
length = synchronize { @values.length }
if member >= length
raise IndexError.new("offset #{member} too large for struct(size:#{length})")
end
synchronize do
unless @values[member].nil?
raise Concurrent::ImmutabilityError.new('struct member has already been set')
end
@values[member] = value
end
else
send("#{member}=", value)
end
rescue NoMethodError
raise NameError.new("no member '#{member}' in struct")
end
private
# @!visibility private
def initialize_copy(original)
synchronize do
super(original)
ns_initialize_copy
end
end
# @!macro struct_new
def self.new(*args, &block)
clazz_name = nil
if args.length == 0
raise ArgumentError.new('wrong number of arguments (0 for 1+)')
elsif args.length > 0 && args.first.is_a?(String)
clazz_name = args.shift
end
FACTORY.define_struct(clazz_name, args, &block)
end
FACTORY = Class.new(Synchronization::LockableObject) do
def define_struct(name, members, &block)
synchronize do
clazz = Synchronization::AbstractStruct.define_struct_class(SettableStruct, Synchronization::LockableObject, name, members, &block)
members.each_with_index do |member, index|
clazz.send :remove_method, member if clazz.instance_methods.include? member
clazz.send(:define_method, member) do
synchronize { @values[index] }
end
clazz.send(:define_method, "#{member}=") do |value|
synchronize do
unless @values[index].nil?
raise Concurrent::ImmutabilityError.new('struct member has already been set')
end
@values[index] = value
end
end
end
clazz
end
end
end.new
private_constant :FACTORY
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/configuration.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/configuration.rb | require 'thread'
require 'concurrent/delay'
require 'concurrent/errors'
require 'concurrent/concern/deprecation'
require 'concurrent/executor/immediate_executor'
require 'concurrent/executor/fixed_thread_pool'
require 'concurrent/executor/cached_thread_pool'
require 'concurrent/utility/processor_counter'
module Concurrent
extend Concern::Deprecation
autoload :Options, 'concurrent/options'
autoload :TimerSet, 'concurrent/executor/timer_set'
autoload :ThreadPoolExecutor, 'concurrent/executor/thread_pool_executor'
# @!visibility private
GLOBAL_FAST_EXECUTOR = Delay.new { Concurrent.new_fast_executor }
private_constant :GLOBAL_FAST_EXECUTOR
# @!visibility private
GLOBAL_IO_EXECUTOR = Delay.new { Concurrent.new_io_executor }
private_constant :GLOBAL_IO_EXECUTOR
# @!visibility private
GLOBAL_TIMER_SET = Delay.new { TimerSet.new }
private_constant :GLOBAL_TIMER_SET
# @!visibility private
GLOBAL_IMMEDIATE_EXECUTOR = ImmediateExecutor.new
private_constant :GLOBAL_IMMEDIATE_EXECUTOR
# Disables AtExit handlers including pool auto-termination handlers.
# When disabled it will be the application programmer's responsibility
# to ensure that the handlers are shutdown properly prior to application
# exit by calling `AtExit.run` method.
#
# @note this option should be needed only because of `at_exit` ordering
# issues which may arise when running some of the testing frameworks.
# E.g. Minitest's test-suite runs itself in `at_exit` callback which
# executes after the pools are already terminated. Then auto termination
# needs to be disabled and called manually after test-suite ends.
# @note This method should *never* be called
# from within a gem. It should *only* be used from within the main
# application and even then it should be used only when necessary.
# @deprecated Has no effect since it is no longer needed, see https://github.com/ruby-concurrency/concurrent-ruby/pull/841.
#
def self.disable_at_exit_handlers!
deprecated "Method #disable_at_exit_handlers! has no effect since it is no longer needed, see https://github.com/ruby-concurrency/concurrent-ruby/pull/841."
end
# Global thread pool optimized for short, fast *operations*.
#
# @return [ThreadPoolExecutor] the thread pool
def self.global_fast_executor
GLOBAL_FAST_EXECUTOR.value!
end
# Global thread pool optimized for long, blocking (IO) *tasks*.
#
# @return [ThreadPoolExecutor] the thread pool
def self.global_io_executor
GLOBAL_IO_EXECUTOR.value!
end
def self.global_immediate_executor
GLOBAL_IMMEDIATE_EXECUTOR
end
# Global thread pool user for global *timers*.
#
# @return [Concurrent::TimerSet] the thread pool
def self.global_timer_set
GLOBAL_TIMER_SET.value!
end
# General access point to global executors.
# @param [Symbol, Executor] executor_identifier symbols:
# - :fast - {Concurrent.global_fast_executor}
# - :io - {Concurrent.global_io_executor}
# - :immediate - {Concurrent.global_immediate_executor}
# @return [Executor]
def self.executor(executor_identifier)
Options.executor(executor_identifier)
end
def self.new_fast_executor(opts = {})
FixedThreadPool.new(
[2, Concurrent.processor_count].max,
auto_terminate: opts.fetch(:auto_terminate, true),
idletime: 60, # 1 minute
max_queue: 0, # unlimited
fallback_policy: :abort, # shouldn't matter -- 0 max queue
name: "fast"
)
end
def self.new_io_executor(opts = {})
CachedThreadPool.new(
auto_terminate: opts.fetch(:auto_terminate, true),
fallback_policy: :abort, # shouldn't matter -- 0 max queue
name: "io"
)
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/executors.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/executors.rb | require 'concurrent/executor/abstract_executor_service'
require 'concurrent/executor/cached_thread_pool'
require 'concurrent/executor/executor_service'
require 'concurrent/executor/fixed_thread_pool'
require 'concurrent/executor/immediate_executor'
require 'concurrent/executor/indirect_immediate_executor'
require 'concurrent/executor/java_executor_service'
require 'concurrent/executor/java_single_thread_executor'
require 'concurrent/executor/java_thread_pool_executor'
require 'concurrent/executor/ruby_executor_service'
require 'concurrent/executor/ruby_single_thread_executor'
require 'concurrent/executor/ruby_thread_pool_executor'
require 'concurrent/executor/safe_task_executor'
require 'concurrent/executor/serial_executor_service'
require 'concurrent/executor/serialized_execution'
require 'concurrent/executor/serialized_execution_delegator'
require 'concurrent/executor/single_thread_executor'
require 'concurrent/executor/thread_pool_executor'
require 'concurrent/executor/timer_set'
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/atom.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/atom.rb | require 'concurrent/atomic/atomic_reference'
require 'concurrent/collection/copy_on_notify_observer_set'
require 'concurrent/concern/observable'
require 'concurrent/synchronization/object'
# @!macro thread_safe_variable_comparison
#
# ## Thread-safe Variable Classes
#
# Each of the thread-safe variable classes is designed to solve a different
# problem. In general:
#
# * *{Concurrent::Agent}:* Shared, mutable variable providing independent,
# uncoordinated, *asynchronous* change of individual values. Best used when
# the value will undergo frequent, complex updates. Suitable when the result
# of an update does not need to be known immediately.
# * *{Concurrent::Atom}:* Shared, mutable variable providing independent,
# uncoordinated, *synchronous* change of individual values. Best used when
# the value will undergo frequent reads but only occasional, though complex,
# updates. Suitable when the result of an update must be known immediately.
# * *{Concurrent::AtomicReference}:* A simple object reference that can be updated
# atomically. Updates are synchronous but fast. Best used when updates a
# simple set operations. Not suitable when updates are complex.
# {Concurrent::AtomicBoolean} and {Concurrent::AtomicFixnum} are similar
# but optimized for the given data type.
# * *{Concurrent::Exchanger}:* Shared, stateless synchronization point. Used
# when two or more threads need to exchange data. The threads will pair then
# block on each other until the exchange is complete.
# * *{Concurrent::MVar}:* Shared synchronization point. Used when one thread
# must give a value to another, which must take the value. The threads will
# block on each other until the exchange is complete.
# * *{Concurrent::ThreadLocalVar}:* Shared, mutable, isolated variable which
# holds a different value for each thread which has access. Often used as
# an instance variable in objects which must maintain different state
# for different threads.
# * *{Concurrent::TVar}:* Shared, mutable variables which provide
# *coordinated*, *synchronous*, change of *many* stated. Used when multiple
# value must change together, in an all-or-nothing transaction.
module Concurrent
# Atoms provide a way to manage shared, synchronous, independent state.
#
# An atom is initialized with an initial value and an optional validation
# proc. At any time the value of the atom can be synchronously and safely
# changed. If a validator is given at construction then any new value
# will be checked against the validator and will be rejected if the
# validator returns false or raises an exception.
#
# There are two ways to change the value of an atom: {#compare_and_set} and
# {#swap}. The former will set the new value if and only if it validates and
# the current value matches the new value. The latter will atomically set the
# new value to the result of running the given block if and only if that
# value validates.
#
# ## Example
#
# ```
# def next_fibonacci(set = nil)
# return [0, 1] if set.nil?
# set + [set[-2..-1].reduce{|sum,x| sum + x }]
# end
#
# # create an atom with an initial value
# atom = Concurrent::Atom.new(next_fibonacci)
#
# # send a few update requests
# 5.times do
# atom.swap{|set| next_fibonacci(set) }
# end
#
# # get the current value
# atom.value #=> [0, 1, 1, 2, 3, 5, 8]
# ```
#
# ## Observation
#
# Atoms support observers through the {Concurrent::Observable} mixin module.
# Notification of observers occurs every time the value of the Atom changes.
# When notified the observer will receive three arguments: `time`, `old_value`,
# and `new_value`. The `time` argument is the time at which the value change
# occurred. The `old_value` is the value of the Atom when the change began
# The `new_value` is the value to which the Atom was set when the change
# completed. Note that `old_value` and `new_value` may be the same. This is
# not an error. It simply means that the change operation returned the same
# value.
#
# Unlike in Clojure, `Atom` cannot participate in {Concurrent::TVar} transactions.
#
# @!macro thread_safe_variable_comparison
#
# @see http://clojure.org/atoms Clojure Atoms
# @see http://clojure.org/state Values and Change - Clojure's approach to Identity and State
class Atom < Synchronization::Object
include Concern::Observable
safe_initialization!
attr_atomic(:value)
private :value=, :swap_value, :compare_and_set_value, :update_value
public :value
alias_method :deref, :value
# @!method value
# The current value of the atom.
#
# @return [Object] The current value.
# Create a new atom with the given initial value.
#
# @param [Object] value The initial value
# @param [Hash] opts The options used to configure the atom
# @option opts [Proc] :validator (nil) Optional proc used to validate new
# values. It must accept one and only one argument which will be the
# intended new value. The validator will return true if the new value
# is acceptable else return false (preferably) or raise an exception.
#
# @!macro deref_options
#
# @raise [ArgumentError] if the validator is not a `Proc` (when given)
def initialize(value, opts = {})
super()
@Validator = opts.fetch(:validator, -> v { true })
self.observers = Collection::CopyOnNotifyObserverSet.new
self.value = value
end
# Atomically swaps the value of atom using the given block. The current
# value will be passed to the block, as will any arguments passed as
# arguments to the function. The new value will be validated against the
# (optional) validator proc given at construction. If validation fails the
# value will not be changed.
#
# Internally, {#swap} reads the current value, applies the block to it, and
# attempts to compare-and-set it in. Since another thread may have changed
# the value in the intervening time, it may have to retry, and does so in a
# spin loop. The net effect is that the value will always be the result of
# the application of the supplied block to a current value, atomically.
# However, because the block might be called multiple times, it must be free
# of side effects.
#
# @note The given block may be called multiple times, and thus should be free
# of side effects.
#
# @param [Object] args Zero or more arguments passed to the block.
#
# @yield [value, args] Calculates a new value for the atom based on the
# current value and any supplied arguments.
# @yieldparam value [Object] The current value of the atom.
# @yieldparam args [Object] All arguments passed to the function, in order.
# @yieldreturn [Object] The intended new value of the atom.
#
# @return [Object] The final value of the atom after all operations and
# validations are complete.
#
# @raise [ArgumentError] When no block is given.
def swap(*args)
raise ArgumentError.new('no block given') unless block_given?
loop do
old_value = value
new_value = yield(old_value, *args)
begin
break old_value unless valid?(new_value)
break new_value if compare_and_set(old_value, new_value)
rescue
break old_value
end
end
end
# Atomically sets the value of atom to the new value if and only if the
# current value of the atom is identical to the old value and the new
# value successfully validates against the (optional) validator given
# at construction.
#
# @param [Object] old_value The expected current value.
# @param [Object] new_value The intended new value.
#
# @return [Boolean] True if the value is changed else false.
def compare_and_set(old_value, new_value)
if valid?(new_value) && compare_and_set_value(old_value, new_value)
observers.notify_observers(Time.now, old_value, new_value)
true
else
false
end
end
# Atomically sets the value of atom to the new value without regard for the
# current value so long as the new value successfully validates against the
# (optional) validator given at construction.
#
# @param [Object] new_value The intended new value.
#
# @return [Object] The final value of the atom after all operations and
# validations are complete.
def reset(new_value)
old_value = value
if valid?(new_value)
self.value = new_value
observers.notify_observers(Time.now, old_value, new_value)
new_value
else
old_value
end
end
private
# Is the new value valid?
#
# @param [Object] new_value The intended new value.
# @return [Boolean] false if the validator function returns false or raises
# an exception else true
def valid?(new_value)
@Validator.call(new_value)
rescue
false
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/promises.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/promises.rb | require 'concurrent/synchronization/object'
require 'concurrent/atomic/atomic_boolean'
require 'concurrent/atomic/atomic_fixnum'
require 'concurrent/collection/lock_free_stack'
require 'concurrent/configuration'
require 'concurrent/errors'
require 'concurrent/re_include'
require 'concurrent/utility/monotonic_time'
module Concurrent
# {include:file:docs-source/promises-main.md}
module Promises
# @!macro promises.param.default_executor
# @param [Executor, :io, :fast] default_executor Instance of an executor or a name of the
# global executor. Default executor propagates to chained futures unless overridden with
# executor parameter or changed with {AbstractEventFuture#with_default_executor}.
#
# @!macro promises.param.executor
# @param [Executor, :io, :fast] executor Instance of an executor or a name of the
# global executor. The task is executed on it, default executor remains unchanged.
#
# @!macro promises.param.args
# @param [Object] args arguments which are passed to the task when it's executed.
# (It might be prepended with other arguments, see the @yield section).
#
# @!macro promises.shortcut.on
# Shortcut of {#$0_on} with default `:io` executor supplied.
# @see #$0_on
#
# @!macro promises.shortcut.using
# Shortcut of {#$0_using} with default `:io` executor supplied.
# @see #$0_using
#
# @!macro promise.param.task-future
# @yieldreturn will become result of the returned Future.
# Its returned value becomes {Future#value} fulfilling it,
# raised exception becomes {Future#reason} rejecting it.
#
# @!macro promise.param.callback
# @yieldreturn is forgotten.
# Container of all {Future}, {Event} factory methods. They are never constructed directly with
# new.
module FactoryMethods
extend ReInclude
extend self
module Configuration
# @return [Executor, :io, :fast] the executor which is used when none is supplied
# to a factory method. The method can be overridden in the receivers of
# `include FactoryMethod`
def default_executor
:io
end
end
include Configuration
# @!macro promises.shortcut.on
# @return [ResolvableEvent]
def resolvable_event
resolvable_event_on default_executor
end
# Creates a resolvable event, user is responsible for resolving the event once
# by calling {Promises::ResolvableEvent#resolve}.
#
# @!macro promises.param.default_executor
# @return [ResolvableEvent]
def resolvable_event_on(default_executor = self.default_executor)
ResolvableEventPromise.new(default_executor).future
end
# @!macro promises.shortcut.on
# @return [ResolvableFuture]
def resolvable_future
resolvable_future_on default_executor
end
# Creates resolvable future, user is responsible for resolving the future once by
# {Promises::ResolvableFuture#resolve}, {Promises::ResolvableFuture#fulfill},
# or {Promises::ResolvableFuture#reject}
#
# @!macro promises.param.default_executor
# @return [ResolvableFuture]
def resolvable_future_on(default_executor = self.default_executor)
ResolvableFuturePromise.new(default_executor).future
end
# @!macro promises.shortcut.on
# @return [Future]
def future(*args, &task)
future_on(default_executor, *args, &task)
end
# Constructs a new Future which will be resolved after block is evaluated on default executor.
# Evaluation begins immediately.
#
# @!macro promises.param.default_executor
# @!macro promises.param.args
# @yield [*args] to the task.
# @!macro promise.param.task-future
# @return [Future]
def future_on(default_executor, *args, &task)
ImmediateEventPromise.new(default_executor).future.then(*args, &task)
end
# Creates a resolved future with will be either fulfilled with the given value or rejected with
# the given reason.
#
# @param [true, false] fulfilled
# @param [Object] value
# @param [Object] reason
# @!macro promises.param.default_executor
# @return [Future]
def resolved_future(fulfilled, value, reason, default_executor = self.default_executor)
ImmediateFuturePromise.new(default_executor, fulfilled, value, reason).future
end
# Creates a resolved future which will be fulfilled with the given value.
#
# @!macro promises.param.default_executor
# @param [Object] value
# @return [Future]
def fulfilled_future(value, default_executor = self.default_executor)
resolved_future true, value, nil, default_executor
end
# Creates a resolved future which will be rejected with the given reason.
#
# @!macro promises.param.default_executor
# @param [Object] reason
# @return [Future]
def rejected_future(reason, default_executor = self.default_executor)
resolved_future false, nil, reason, default_executor
end
# Creates resolved event.
#
# @!macro promises.param.default_executor
# @return [Event]
def resolved_event(default_executor = self.default_executor)
ImmediateEventPromise.new(default_executor).event
end
# General constructor. Behaves differently based on the argument's type. It's provided for convenience
# but it's better to be explicit.
#
# @see rejected_future, resolved_event, fulfilled_future
# @!macro promises.param.default_executor
# @return [Event, Future]
#
# @overload make_future(nil, default_executor = self.default_executor)
# @param [nil] nil
# @return [Event] resolved event.
#
# @overload make_future(a_future, default_executor = self.default_executor)
# @param [Future] a_future
# @return [Future] a future which will be resolved when a_future is.
#
# @overload make_future(an_event, default_executor = self.default_executor)
# @param [Event] an_event
# @return [Event] an event which will be resolved when an_event is.
#
# @overload make_future(exception, default_executor = self.default_executor)
# @param [Exception] exception
# @return [Future] a rejected future with the exception as its reason.
#
# @overload make_future(value, default_executor = self.default_executor)
# @param [Object] value when none of the above overloads fits
# @return [Future] a fulfilled future with the value.
def make_future(argument = nil, default_executor = self.default_executor)
case argument
when AbstractEventFuture
# returning wrapper would change nothing
argument
when Exception
rejected_future argument, default_executor
when nil
resolved_event default_executor
else
fulfilled_future argument, default_executor
end
end
# @!macro promises.shortcut.on
# @return [Future, Event]
def delay(*args, &task)
delay_on default_executor, *args, &task
end
# Creates a new event or future which is resolved only after it is touched,
# see {Concurrent::AbstractEventFuture#touch}.
#
# @!macro promises.param.default_executor
# @overload delay_on(default_executor, *args, &task)
# If task is provided it returns a {Future} representing the result of the task.
# @!macro promises.param.args
# @yield [*args] to the task.
# @!macro promise.param.task-future
# @return [Future]
# @overload delay_on(default_executor)
# If no task is provided, it returns an {Event}
# @return [Event]
def delay_on(default_executor, *args, &task)
event = DelayPromise.new(default_executor).event
task ? event.chain(*args, &task) : event
end
# @!macro promises.shortcut.on
# @return [Future, Event]
def schedule(intended_time, *args, &task)
schedule_on default_executor, intended_time, *args, &task
end
# Creates a new event or future which is resolved in intended_time.
#
# @!macro promises.param.default_executor
# @!macro promises.param.intended_time
# @param [Numeric, Time] intended_time `Numeric` means to run in `intended_time` seconds.
# `Time` means to run on `intended_time`.
# @overload schedule_on(default_executor, intended_time, *args, &task)
# If task is provided it returns a {Future} representing the result of the task.
# @!macro promises.param.args
# @yield [*args] to the task.
# @!macro promise.param.task-future
# @return [Future]
# @overload schedule_on(default_executor, intended_time)
# If no task is provided, it returns an {Event}
# @return [Event]
def schedule_on(default_executor, intended_time, *args, &task)
event = ScheduledPromise.new(default_executor, intended_time).event
task ? event.chain(*args, &task) : event
end
# @!macro promises.shortcut.on
# @return [Future]
def zip_futures(*futures_and_or_events)
zip_futures_on default_executor, *futures_and_or_events
end
# Creates a new future which is resolved after all futures_and_or_events are resolved.
# Its value is an array of zipped future values. Its reason is an array of reasons for rejection.
# If there is an error it rejects.
# @!macro promises.event-conversion
# If event is supplied, which does not have value and can be only resolved, it's
# represented as `:fulfilled` with value `nil`.
#
# @!macro promises.param.default_executor
# @param [AbstractEventFuture] futures_and_or_events
# @return [Future]
def zip_futures_on(default_executor, *futures_and_or_events)
ZipFuturesPromise.new_blocked_by(futures_and_or_events, default_executor).future
end
alias_method :zip, :zip_futures
# @!macro promises.shortcut.on
# @return [Event]
def zip_events(*futures_and_or_events)
zip_events_on default_executor, *futures_and_or_events
end
# Creates a new event which is resolved after all futures_and_or_events are resolved.
# (Future is resolved when fulfilled or rejected.)
#
# @!macro promises.param.default_executor
# @param [AbstractEventFuture] futures_and_or_events
# @return [Event]
def zip_events_on(default_executor, *futures_and_or_events)
ZipEventsPromise.new_blocked_by(futures_and_or_events, default_executor).event
end
# @!macro promises.shortcut.on
# @return [Future]
def any_resolved_future(*futures_and_or_events)
any_resolved_future_on default_executor, *futures_and_or_events
end
alias_method :any, :any_resolved_future
# Creates a new future which is resolved after the first futures_and_or_events is resolved.
# Its result equals the result of the first resolved future.
# @!macro promises.any-touch
# If resolved it does not propagate {Concurrent::AbstractEventFuture#touch}, leaving delayed
# futures un-executed if they are not required any more.
# @!macro promises.event-conversion
#
# @!macro promises.param.default_executor
# @param [AbstractEventFuture] futures_and_or_events
# @return [Future]
def any_resolved_future_on(default_executor, *futures_and_or_events)
AnyResolvedFuturePromise.new_blocked_by(futures_and_or_events, default_executor).future
end
# @!macro promises.shortcut.on
# @return [Future]
def any_fulfilled_future(*futures_and_or_events)
any_fulfilled_future_on default_executor, *futures_and_or_events
end
# Creates a new future which is resolved after the first futures_and_or_events is fulfilled.
# Its result equals the result of the first resolved future or if all futures_and_or_events reject,
# it has reason of the last rejected future.
# @!macro promises.any-touch
# @!macro promises.event-conversion
#
# @!macro promises.param.default_executor
# @param [AbstractEventFuture] futures_and_or_events
# @return [Future]
def any_fulfilled_future_on(default_executor, *futures_and_or_events)
AnyFulfilledFuturePromise.new_blocked_by(futures_and_or_events, default_executor).future
end
# @!macro promises.shortcut.on
# @return [Event]
def any_event(*futures_and_or_events)
any_event_on default_executor, *futures_and_or_events
end
# Creates a new event which becomes resolved after the first futures_and_or_events resolves.
# @!macro promises.any-touch
#
# @!macro promises.param.default_executor
# @param [AbstractEventFuture] futures_and_or_events
# @return [Event]
def any_event_on(default_executor, *futures_and_or_events)
AnyResolvedEventPromise.new_blocked_by(futures_and_or_events, default_executor).event
end
# TODO consider adding first(count, *futures)
# TODO consider adding zip_by(slice, *futures) processing futures in slices
# TODO or rather a generic aggregator taking a function
end
module InternalStates
# @!visibility private
class State
def resolved?
raise NotImplementedError
end
def to_sym
raise NotImplementedError
end
end
# @!visibility private
class Pending < State
def resolved?
false
end
def to_sym
:pending
end
end
# @!visibility private
class Reserved < Pending
end
# @!visibility private
class ResolvedWithResult < State
def resolved?
true
end
def to_sym
:resolved
end
def result
[fulfilled?, value, reason]
end
def fulfilled?
raise NotImplementedError
end
def value
raise NotImplementedError
end
def reason
raise NotImplementedError
end
def apply
raise NotImplementedError
end
end
# @!visibility private
class Fulfilled < ResolvedWithResult
def initialize(value)
@Value = value
end
def fulfilled?
true
end
def apply(args, block)
block.call value, *args
end
def value
@Value
end
def reason
nil
end
def to_sym
:fulfilled
end
end
# @!visibility private
class FulfilledArray < Fulfilled
def apply(args, block)
block.call(*value, *args)
end
end
# @!visibility private
class Rejected < ResolvedWithResult
def initialize(reason)
@Reason = reason
end
def fulfilled?
false
end
def value
nil
end
def reason
@Reason
end
def to_sym
:rejected
end
def apply(args, block)
block.call reason, *args
end
end
# @!visibility private
class PartiallyRejected < ResolvedWithResult
def initialize(value, reason)
super()
@Value = value
@Reason = reason
end
def fulfilled?
false
end
def to_sym
:rejected
end
def value
@Value
end
def reason
@Reason
end
def apply(args, block)
block.call(*reason, *args)
end
end
# @!visibility private
PENDING = Pending.new
# @!visibility private
RESERVED = Reserved.new
# @!visibility private
RESOLVED = Fulfilled.new(nil)
def RESOLVED.to_sym
:resolved
end
end
private_constant :InternalStates
# @!macro promises.shortcut.event-future
# @see Event#$0
# @see Future#$0
# @!macro promises.param.timeout
# @param [Numeric] timeout the maximum time in second to wait.
# @!macro promises.warn.blocks
# @note This function potentially blocks current thread until the Future is resolved.
# Be careful it can deadlock. Try to chain instead.
# Common ancestor of {Event} and {Future} classes, many shared methods are defined here.
class AbstractEventFuture < Synchronization::Object
safe_initialization!
attr_atomic(:internal_state)
private :internal_state=, :swap_internal_state, :compare_and_set_internal_state, :update_internal_state
# @!method internal_state
# @!visibility private
include InternalStates
def initialize(promise, default_executor)
super()
@Lock = Mutex.new
@Condition = ConditionVariable.new
@Promise = promise
@DefaultExecutor = default_executor
@Callbacks = LockFreeStack.new
@Waiters = AtomicFixnum.new 0
self.internal_state = PENDING
end
private :initialize
# Returns its state.
# @return [Symbol]
#
# @overload an_event.state
# @return [:pending, :resolved]
# @overload a_future.state
# Both :fulfilled, :rejected implies :resolved.
# @return [:pending, :fulfilled, :rejected]
def state
internal_state.to_sym
end
# Is it in pending state?
# @return [Boolean]
def pending?
!internal_state.resolved?
end
# Is it in resolved state?
# @return [Boolean]
def resolved?
internal_state.resolved?
end
# Propagates touch. Requests all the delayed futures, which it depends on, to be
# executed. This method is called by any other method requiring resolved state, like {#wait}.
# @return [self]
def touch
@Promise.touch
self
end
# @!macro promises.touches
# Calls {Concurrent::AbstractEventFuture#touch}.
# @!macro promises.method.wait
# Wait (block the Thread) until receiver is {#resolved?}.
# @!macro promises.touches
#
# @!macro promises.warn.blocks
# @!macro promises.param.timeout
# @return [self, true, false] self implies timeout was not used, true implies timeout was used
# and it was resolved, false implies it was not resolved within timeout.
def wait(timeout = nil)
result = wait_until_resolved(timeout)
timeout ? result : self
end
# Returns default executor.
# @return [Executor] default executor
# @see #with_default_executor
# @see FactoryMethods#future_on
# @see FactoryMethods#resolvable_future
# @see FactoryMethods#any_fulfilled_future_on
# @see similar
def default_executor
@DefaultExecutor
end
# @!macro promises.shortcut.on
# @return [Future]
def chain(*args, &task)
chain_on @DefaultExecutor, *args, &task
end
# Chains the task to be executed asynchronously on executor after it is resolved.
#
# @!macro promises.param.executor
# @!macro promises.param.args
# @return [Future]
# @!macro promise.param.task-future
#
# @overload an_event.chain_on(executor, *args, &task)
# @yield [*args] to the task.
# @overload a_future.chain_on(executor, *args, &task)
# @yield [fulfilled, value, reason, *args] to the task.
# @yieldparam [true, false] fulfilled
# @yieldparam [Object] value
# @yieldparam [Object] reason
def chain_on(executor, *args, &task)
ChainPromise.new_blocked_by1(self, executor, executor, args, &task).future
end
# @return [String] Short string representation.
def to_s
format '%s %s>', super[0..-2], state
end
alias_method :inspect, :to_s
# Resolves the resolvable when receiver is resolved.
#
# @param [Resolvable] resolvable
# @return [self]
def chain_resolvable(resolvable)
on_resolution! { resolvable.resolve_with internal_state }
end
alias_method :tangle, :chain_resolvable
# @!macro promises.shortcut.using
# @return [self]
def on_resolution(*args, &callback)
on_resolution_using @DefaultExecutor, *args, &callback
end
# Stores the callback to be executed synchronously on resolving thread after it is
# resolved.
#
# @!macro promises.param.args
# @!macro promise.param.callback
# @return [self]
#
# @overload an_event.on_resolution!(*args, &callback)
# @yield [*args] to the callback.
# @overload a_future.on_resolution!(*args, &callback)
# @yield [fulfilled, value, reason, *args] to the callback.
# @yieldparam [true, false] fulfilled
# @yieldparam [Object] value
# @yieldparam [Object] reason
def on_resolution!(*args, &callback)
add_callback :callback_on_resolution, args, callback
end
# Stores the callback to be executed asynchronously on executor after it is resolved.
#
# @!macro promises.param.executor
# @!macro promises.param.args
# @!macro promise.param.callback
# @return [self]
#
# @overload an_event.on_resolution_using(executor, *args, &callback)
# @yield [*args] to the callback.
# @overload a_future.on_resolution_using(executor, *args, &callback)
# @yield [fulfilled, value, reason, *args] to the callback.
# @yieldparam [true, false] fulfilled
# @yieldparam [Object] value
# @yieldparam [Object] reason
def on_resolution_using(executor, *args, &callback)
add_callback :async_callback_on_resolution, executor, args, callback
end
# @!macro promises.method.with_default_executor
# Crates new object with same class with the executor set as its new default executor.
# Any futures depending on it will use the new default executor.
# @!macro promises.shortcut.event-future
# @abstract
# @return [AbstractEventFuture]
def with_default_executor(executor)
raise NotImplementedError
end
# @!visibility private
def resolve_with(state, raise_on_reassign = true, reserved = false)
if compare_and_set_internal_state(reserved ? RESERVED : PENDING, state)
# go to synchronized block only if there were waiting threads
@Lock.synchronize { @Condition.broadcast } unless @Waiters.value == 0
call_callbacks state
else
return rejected_resolution(raise_on_reassign, state)
end
self
end
# For inspection.
# @!visibility private
# @return [Array<AbstractPromise>]
def blocks
@Callbacks.each_with_object([]) do |(method, args), promises|
promises.push(args[0]) if method == :callback_notify_blocked
end
end
# For inspection.
# @!visibility private
def callbacks
@Callbacks.each.to_a
end
# For inspection.
# @!visibility private
def promise
@Promise
end
# For inspection.
# @!visibility private
def touched?
promise.touched?
end
# For inspection.
# @!visibility private
def waiting_threads
@Waiters.each.to_a
end
# @!visibility private
def add_callback_notify_blocked(promise, index)
add_callback :callback_notify_blocked, promise, index
end
# @!visibility private
def add_callback_clear_delayed_node(node)
add_callback(:callback_clear_delayed_node, node)
end
# @!visibility private
def with_hidden_resolvable
# TODO (pitr-ch 10-Dec-2018): documentation, better name if in edge
self
end
private
def add_callback(method, *args)
state = internal_state
if state.resolved?
call_callback method, state, args
else
@Callbacks.push [method, args]
state = internal_state
# take back if it was resolved in the meanwhile
call_callbacks state if state.resolved?
end
self
end
def callback_clear_delayed_node(state, node)
node.value = nil
end
# @return [Boolean]
def wait_until_resolved(timeout)
return true if resolved?
touch
@Lock.synchronize do
@Waiters.increment
begin
if timeout
start = Concurrent.monotonic_time
until resolved?
break if @Condition.wait(@Lock, timeout) == nil # nil means timeout
timeout -= (Concurrent.monotonic_time - start)
break if timeout <= 0
end
else
until resolved?
@Condition.wait(@Lock, timeout)
end
end
ensure
# JRuby may raise ConcurrencyError
@Waiters.decrement
end
end
resolved?
end
def call_callback(method, state, args)
self.send method, state, *args
end
def call_callbacks(state)
method, args = @Callbacks.pop
while method
call_callback method, state, args
method, args = @Callbacks.pop
end
end
def with_async(executor, *args, &block)
Concurrent.executor(executor).post(*args, &block)
end
def async_callback_on_resolution(state, executor, args, callback)
with_async(executor, state, args, callback) do |st, ar, cb|
callback_on_resolution st, ar, cb
end
end
def callback_notify_blocked(state, promise, index)
promise.on_blocker_resolution self, index
end
end
# Represents an event which will happen in future (will be resolved). The event is either
# pending or resolved. It should be always resolved. Use {Future} to communicate rejections and
# cancellation.
class Event < AbstractEventFuture
alias_method :then, :chain
# @!macro promises.method.zip
# Creates a new event or a future which will be resolved when receiver and other are.
# Returns an event if receiver and other are events, otherwise returns a future.
# If just one of the parties is Future then the result
# of the returned future is equal to the result of the supplied future. If both are futures
# then the result is as described in {FactoryMethods#zip_futures_on}.
#
# @return [Future, Event]
def zip(other)
if other.is_a?(Future)
ZipFutureEventPromise.new_blocked_by2(other, self, @DefaultExecutor).future
else
ZipEventEventPromise.new_blocked_by2(self, other, @DefaultExecutor).event
end
end
alias_method :&, :zip
# Creates a new event which will be resolved when the first of receiver, `event_or_future`
# resolves.
#
# @return [Event]
def any(event_or_future)
AnyResolvedEventPromise.new_blocked_by2(self, event_or_future, @DefaultExecutor).event
end
alias_method :|, :any
# Creates new event dependent on receiver which will not evaluate until touched, see {#touch}.
# In other words, it inserts delay into the chain of Futures making rest of it lazy evaluated.
#
# @return [Event]
def delay
event = DelayPromise.new(@DefaultExecutor).event
ZipEventEventPromise.new_blocked_by2(self, event, @DefaultExecutor).event
end
# @!macro promise.method.schedule
# Creates new event dependent on receiver scheduled to execute on/in intended_time.
# In time is interpreted from the moment the receiver is resolved, therefore it inserts
# delay into the chain.
#
# @!macro promises.param.intended_time
# @return [Event]
def schedule(intended_time)
chain do
event = ScheduledPromise.new(@DefaultExecutor, intended_time).event
ZipEventEventPromise.new_blocked_by2(self, event, @DefaultExecutor).event
end.flat_event
end
# Converts event to a future. The future is fulfilled when the event is resolved, the future may never fail.
#
# @return [Future]
def to_future
future = Promises.resolvable_future
ensure
chain_resolvable(future)
end
# Returns self, since this is event
# @return [Event]
def to_event
self
end
# @!macro promises.method.with_default_executor
# @return [Event]
def with_default_executor(executor)
EventWrapperPromise.new_blocked_by1(self, executor).event
end
private
def rejected_resolution(raise_on_reassign, state)
raise Concurrent::MultipleAssignmentError.new('Event can be resolved only once') if raise_on_reassign
return false
end
def callback_on_resolution(state, args, callback)
callback.call(*args)
end
end
# Represents a value which will become available in future. May reject with a reason instead,
# e.g. when the tasks raises an exception.
class Future < AbstractEventFuture
# Is it in fulfilled state?
# @return [Boolean]
def fulfilled?
state = internal_state
state.resolved? && state.fulfilled?
end
# Is it in rejected state?
# @return [Boolean]
def rejected?
state = internal_state
state.resolved? && !state.fulfilled?
end
# @!macro promises.warn.nil
# @note Make sure returned `nil` is not confused with timeout, no value when rejected,
# no reason when fulfilled, etc.
# Use more exact methods if needed, like {#wait}, {#value!}, {#result}, etc.
# @!macro promises.method.value
# Return value of the future.
# @!macro promises.touches
#
# @!macro promises.warn.blocks
# @!macro promises.warn.nil
# @!macro promises.param.timeout
# @!macro promises.param.timeout_value
# @param [Object] timeout_value a value returned by the method when it times out
# @return [Object, nil, timeout_value] the value of the Future when fulfilled,
# timeout_value on timeout,
# nil on rejection.
def value(timeout = nil, timeout_value = nil)
if wait_until_resolved timeout
internal_state.value
else
timeout_value
end
end
# Returns reason of future's rejection.
# @!macro promises.touches
#
# @!macro promises.warn.blocks
# @!macro promises.warn.nil
# @!macro promises.param.timeout
# @!macro promises.param.timeout_value
# @return [Object, timeout_value] the reason, or timeout_value on timeout, or nil on fulfillment.
def reason(timeout = nil, timeout_value = nil)
if wait_until_resolved timeout
internal_state.reason
else
timeout_value
end
end
# Returns triplet fulfilled?, value, reason.
# @!macro promises.touches
#
# @!macro promises.warn.blocks
# @!macro promises.param.timeout
# @return [Array(Boolean, Object, Object), nil] triplet of fulfilled?, value, reason, or nil
# on timeout.
def result(timeout = nil)
internal_state.result if wait_until_resolved timeout
end
# @!macro promises.method.wait
# @raise [Exception] {#reason} on rejection
def wait!(timeout = nil)
result = wait_until_resolved!(timeout)
timeout ? result : self
end
# @!macro promises.method.value
# @return [Object, nil, timeout_value] the value of the Future when fulfilled,
# or nil on rejection,
# or timeout_value on timeout.
# @raise [Exception] {#reason} on rejection
def value!(timeout = nil, timeout_value = nil)
if wait_until_resolved! timeout
internal_state.value
else
timeout_value
end
end
# Allows rejected Future to be risen with `raise` method.
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | true |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/immutable_struct.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/immutable_struct.rb | require 'concurrent/synchronization/abstract_struct'
require 'concurrent/synchronization/lockable_object'
module Concurrent
# A thread-safe, immutable variation of Ruby's standard `Struct`.
#
# @see http://ruby-doc.org/core/Struct.html Ruby standard library `Struct`
module ImmutableStruct
include Synchronization::AbstractStruct
def self.included(base)
base.safe_initialization!
end
# @!macro struct_values
def values
ns_values
end
alias_method :to_a, :values
# @!macro struct_values_at
def values_at(*indexes)
ns_values_at(indexes)
end
# @!macro struct_inspect
def inspect
ns_inspect
end
alias_method :to_s, :inspect
# @!macro struct_merge
def merge(other, &block)
ns_merge(other, &block)
end
# @!macro struct_to_h
def to_h
ns_to_h
end
# @!macro struct_get
def [](member)
ns_get(member)
end
# @!macro struct_equality
def ==(other)
ns_equality(other)
end
# @!macro struct_each
def each(&block)
return enum_for(:each) unless block_given?
ns_each(&block)
end
# @!macro struct_each_pair
def each_pair(&block)
return enum_for(:each_pair) unless block_given?
ns_each_pair(&block)
end
# @!macro struct_select
def select(&block)
return enum_for(:select) unless block_given?
ns_select(&block)
end
private
# @!visibility private
def initialize_copy(original)
super(original)
ns_initialize_copy
end
# @!macro struct_new
def self.new(*args, &block)
clazz_name = nil
if args.length == 0
raise ArgumentError.new('wrong number of arguments (0 for 1+)')
elsif args.length > 0 && args.first.is_a?(String)
clazz_name = args.shift
end
FACTORY.define_struct(clazz_name, args, &block)
end
FACTORY = Class.new(Synchronization::LockableObject) do
def define_struct(name, members, &block)
synchronize do
Synchronization::AbstractStruct.define_struct_class(ImmutableStruct, Synchronization::Object, name, members, &block)
end
end
end.new
private_constant :FACTORY
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/dataflow.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/dataflow.rb | require 'concurrent/future'
require 'concurrent/atomic/atomic_fixnum'
module Concurrent
# @!visibility private
class DependencyCounter # :nodoc:
def initialize(count, &block)
@counter = AtomicFixnum.new(count)
@block = block
end
def update(time, value, reason)
if @counter.decrement == 0
@block.call
end
end
end
# Dataflow allows you to create a task that will be scheduled when all of its data dependencies are available.
# {include:file:docs-source/dataflow.md}
#
# @param [Future] inputs zero or more `Future` operations that this dataflow depends upon
#
# @yield The operation to perform once all the dependencies are met
# @yieldparam [Future] inputs each of the `Future` inputs to the dataflow
# @yieldreturn [Object] the result of the block operation
#
# @return [Object] the result of all the operations
#
# @raise [ArgumentError] if no block is given
# @raise [ArgumentError] if any of the inputs are not `IVar`s
def dataflow(*inputs, &block)
dataflow_with(Concurrent.global_io_executor, *inputs, &block)
end
module_function :dataflow
def dataflow_with(executor, *inputs, &block)
call_dataflow(:value, executor, *inputs, &block)
end
module_function :dataflow_with
def dataflow!(*inputs, &block)
dataflow_with!(Concurrent.global_io_executor, *inputs, &block)
end
module_function :dataflow!
def dataflow_with!(executor, *inputs, &block)
call_dataflow(:value!, executor, *inputs, &block)
end
module_function :dataflow_with!
private
def call_dataflow(method, executor, *inputs, &block)
raise ArgumentError.new('an executor must be provided') if executor.nil?
raise ArgumentError.new('no block given') unless block_given?
unless inputs.all? { |input| input.is_a? IVar }
raise ArgumentError.new("Not all dependencies are IVars.\nDependencies: #{ inputs.inspect }")
end
result = Future.new(executor: executor) do
values = inputs.map { |input| input.send(method) }
block.call(*values)
end
if inputs.empty?
result.execute
else
counter = DependencyCounter.new(inputs.size) { result.execute }
inputs.each do |input|
input.add_observer counter
end
end
result
end
module_function :call_dataflow
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/ivar.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/ivar.rb | require 'concurrent/constants'
require 'concurrent/errors'
require 'concurrent/collection/copy_on_write_observer_set'
require 'concurrent/concern/obligation'
require 'concurrent/concern/observable'
require 'concurrent/executor/safe_task_executor'
require 'concurrent/synchronization/lockable_object'
module Concurrent
# An `IVar` is like a future that you can assign. As a future is a value that
# is being computed that you can wait on, an `IVar` is a value that is waiting
# to be assigned, that you can wait on. `IVars` are single assignment and
# deterministic.
#
# Then, express futures as an asynchronous computation that assigns an `IVar`.
# The `IVar` becomes the primitive on which [futures](Future) and
# [dataflow](Dataflow) are built.
#
# An `IVar` is a single-element container that is normally created empty, and
# can only be set once. The I in `IVar` stands for immutable. Reading an
# `IVar` normally blocks until it is set. It is safe to set and read an `IVar`
# from different threads.
#
# If you want to have some parallel task set the value in an `IVar`, you want
# a `Future`. If you want to create a graph of parallel tasks all executed
# when the values they depend on are ready you want `dataflow`. `IVar` is
# generally a low-level primitive.
#
# ## Examples
#
# Create, set and get an `IVar`
#
# ```ruby
# ivar = Concurrent::IVar.new
# ivar.set 14
# ivar.value #=> 14
# ivar.set 2 # would now be an error
# ```
#
# ## See Also
#
# 1. For the theory: Arvind, R. Nikhil, and K. Pingali.
# [I-Structures: Data structures for parallel computing](http://dl.acm.org/citation.cfm?id=69562).
# In Proceedings of Workshop on Graph Reduction, 1986.
# 2. For recent application:
# [DataDrivenFuture in Habanero Java from Rice](http://www.cs.rice.edu/~vs3/hjlib/doc/edu/rice/hj/api/HjDataDrivenFuture.html).
class IVar < Synchronization::LockableObject
include Concern::Obligation
include Concern::Observable
# Create a new `IVar` in the `:pending` state with the (optional) initial value.
#
# @param [Object] value the initial value
# @param [Hash] opts the options to create a message with
# @option opts [String] :dup_on_deref (false) call `#dup` before returning
# the data
# @option opts [String] :freeze_on_deref (false) call `#freeze` before
# returning the data
# @option opts [String] :copy_on_deref (nil) call the given `Proc` passing
# the internal value and returning the value returned from the proc
def initialize(value = NULL, opts = {}, &block)
if value != NULL && block_given?
raise ArgumentError.new('provide only a value or a block')
end
super(&nil)
synchronize { ns_initialize(value, opts, &block) }
end
# Add an observer on this object that will receive notification on update.
#
# Upon completion the `IVar` will notify all observers in a thread-safe way.
# The `func` method of the observer will be called with three arguments: the
# `Time` at which the `Future` completed the asynchronous operation, the
# final `value` (or `nil` on rejection), and the final `reason` (or `nil` on
# fulfillment).
#
# @param [Object] observer the object that will be notified of changes
# @param [Symbol] func symbol naming the method to call when this
# `Observable` has changes`
def add_observer(observer = nil, func = :update, &block)
raise ArgumentError.new('cannot provide both an observer and a block') if observer && block
direct_notification = false
if block
observer = block
func = :call
end
synchronize do
if event.set?
direct_notification = true
else
observers.add_observer(observer, func)
end
end
observer.send(func, Time.now, self.value, reason) if direct_notification
observer
end
# @!macro ivar_set_method
# Set the `IVar` to a value and wake or notify all threads waiting on it.
#
# @!macro ivar_set_parameters_and_exceptions
# @param [Object] value the value to store in the `IVar`
# @yield A block operation to use for setting the value
# @raise [ArgumentError] if both a value and a block are given
# @raise [Concurrent::MultipleAssignmentError] if the `IVar` has already
# been set or otherwise completed
#
# @return [IVar] self
def set(value = NULL)
check_for_block_or_value!(block_given?, value)
raise MultipleAssignmentError unless compare_and_set_state(:processing, :pending)
begin
value = yield if block_given?
complete_without_notification(true, value, nil)
rescue => ex
complete_without_notification(false, nil, ex)
end
notify_observers(self.value, reason)
self
end
# @!macro ivar_fail_method
# Set the `IVar` to failed due to some error and wake or notify all threads waiting on it.
#
# @param [Object] reason for the failure
# @raise [Concurrent::MultipleAssignmentError] if the `IVar` has already
# been set or otherwise completed
# @return [IVar] self
def fail(reason = StandardError.new)
complete(false, nil, reason)
end
# Attempt to set the `IVar` with the given value or block. Return a
# boolean indicating the success or failure of the set operation.
#
# @!macro ivar_set_parameters_and_exceptions
#
# @return [Boolean] true if the value was set else false
def try_set(value = NULL, &block)
set(value, &block)
true
rescue MultipleAssignmentError
false
end
protected
# @!visibility private
def ns_initialize(value, opts)
value = yield if block_given?
init_obligation
self.observers = Collection::CopyOnWriteObserverSet.new
set_deref_options(opts)
@state = :pending
if value != NULL
ns_complete_without_notification(true, value, nil)
end
end
# @!visibility private
def safe_execute(task, args = [])
if compare_and_set_state(:processing, :pending)
success, val, reason = SafeTaskExecutor.new(task, rescue_exception: true).execute(*@args)
complete(success, val, reason)
yield(success, val, reason) if block_given?
end
end
# @!visibility private
def complete(success, value, reason)
complete_without_notification(success, value, reason)
notify_observers(self.value, reason)
self
end
# @!visibility private
def complete_without_notification(success, value, reason)
synchronize { ns_complete_without_notification(success, value, reason) }
self
end
# @!visibility private
def notify_observers(value, reason)
observers.notify_and_delete_observers{ [Time.now, value, reason] }
end
# @!visibility private
def ns_complete_without_notification(success, value, reason)
raise MultipleAssignmentError if [:fulfilled, :rejected].include? @state
set_state(success, value, reason)
event.set
end
# @!visibility private
def check_for_block_or_value!(block_given, value) # :nodoc:
if (block_given && value != NULL) || (! block_given && value == NULL)
raise ArgumentError.new('must set with either a value or a block')
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/scheduled_task.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/scheduled_task.rb | require 'concurrent/constants'
require 'concurrent/errors'
require 'concurrent/configuration'
require 'concurrent/ivar'
require 'concurrent/collection/copy_on_notify_observer_set'
require 'concurrent/utility/monotonic_time'
require 'concurrent/options'
module Concurrent
# `ScheduledTask` is a close relative of `Concurrent::Future` but with one
# important difference: A `Future` is set to execute as soon as possible
# whereas a `ScheduledTask` is set to execute after a specified delay. This
# implementation is loosely based on Java's
# [ScheduledExecutorService](http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ScheduledExecutorService.html).
# It is a more feature-rich variant of {Concurrent.timer}.
#
# The *intended* schedule time of task execution is set on object construction
# with the `delay` argument. The delay is a numeric (floating point or integer)
# representing a number of seconds in the future. Any other value or a numeric
# equal to or less than zero will result in an exception. The *actual* schedule
# time of task execution is set when the `execute` method is called.
#
# The constructor can also be given zero or more processing options. Currently
# the only supported options are those recognized by the
# [Dereferenceable](Dereferenceable) module.
#
# The final constructor argument is a block representing the task to be performed.
# If no block is given an `ArgumentError` will be raised.
#
# **States**
#
# `ScheduledTask` mixes in the [Obligation](Obligation) module thus giving it
# "future" behavior. This includes the expected lifecycle states. `ScheduledTask`
# has one additional state, however. While the task (block) is being executed the
# state of the object will be `:processing`. This additional state is necessary
# because it has implications for task cancellation.
#
# **Cancellation**
#
# A `:pending` task can be cancelled using the `#cancel` method. A task in any
# other state, including `:processing`, cannot be cancelled. The `#cancel`
# method returns a boolean indicating the success of the cancellation attempt.
# A cancelled `ScheduledTask` cannot be restarted. It is immutable.
#
# **Obligation and Observation**
#
# The result of a `ScheduledTask` can be obtained either synchronously or
# asynchronously. `ScheduledTask` mixes in both the [Obligation](Obligation)
# module and the
# [Observable](http://ruby-doc.org/stdlib-2.0/libdoc/observer/rdoc/Observable.html)
# module from the Ruby standard library. With one exception `ScheduledTask`
# behaves identically to [Future](Observable) with regard to these modules.
#
# @!macro copy_options
#
# @example Basic usage
#
# require 'concurrent/scheduled_task'
# require 'csv'
# require 'open-uri'
#
# class Ticker
# def get_year_end_closing(symbol, year, api_key)
# uri = "https://www.alphavantage.co/query?function=TIME_SERIES_MONTHLY&symbol=#{symbol}&apikey=#{api_key}&datatype=csv"
# data = []
# csv = URI.parse(uri).read
# if csv.include?('call frequency')
# return :rate_limit_exceeded
# end
# CSV.parse(csv, headers: true) do |row|
# data << row['close'].to_f if row['timestamp'].include?(year.to_s)
# end
# year_end = data.first
# year_end
# rescue => e
# p e
# end
# end
#
# api_key = ENV['ALPHAVANTAGE_KEY']
# abort(error_message) unless api_key
#
# # Future
# price = Concurrent::Future.execute{ Ticker.new.get_year_end_closing('TWTR', 2013, api_key) }
# price.state #=> :pending
# price.pending? #=> true
# price.value(0) #=> nil (does not block)
#
# sleep(1) # do other stuff
#
# price.value #=> 63.65 (after blocking if necessary)
# price.state #=> :fulfilled
# price.fulfilled? #=> true
# price.value #=> 63.65
#
# @example Successful task execution
#
# task = Concurrent::ScheduledTask.new(2){ 'What does the fox say?' }
# task.state #=> :unscheduled
# task.execute
# task.state #=> pending
#
# # wait for it...
# sleep(3)
#
# task.unscheduled? #=> false
# task.pending? #=> false
# task.fulfilled? #=> true
# task.rejected? #=> false
# task.value #=> 'What does the fox say?'
#
# @example One line creation and execution
#
# task = Concurrent::ScheduledTask.new(2){ 'What does the fox say?' }.execute
# task.state #=> pending
#
# task = Concurrent::ScheduledTask.execute(2){ 'What do you get when you multiply 6 by 9?' }
# task.state #=> pending
#
# @example Failed task execution
#
# task = Concurrent::ScheduledTask.execute(2){ raise StandardError.new('Call me maybe?') }
# task.pending? #=> true
#
# # wait for it...
# sleep(3)
#
# task.unscheduled? #=> false
# task.pending? #=> false
# task.fulfilled? #=> false
# task.rejected? #=> true
# task.value #=> nil
# task.reason #=> #<StandardError: Call me maybe?>
#
# @example Task execution with observation
#
# observer = Class.new{
# def update(time, value, reason)
# puts "The task completed at #{time} with value '#{value}'"
# end
# }.new
#
# task = Concurrent::ScheduledTask.new(2){ 'What does the fox say?' }
# task.add_observer(observer)
# task.execute
# task.pending? #=> true
#
# # wait for it...
# sleep(3)
#
# #>> The task completed at 2013-11-07 12:26:09 -0500 with value 'What does the fox say?'
#
# @!macro monotonic_clock_warning
#
# @see Concurrent.timer
class ScheduledTask < IVar
include Comparable
# The executor on which to execute the task.
# @!visibility private
attr_reader :executor
# Schedule a task for execution at a specified future time.
#
# @param [Float] delay the number of seconds to wait for before executing the task
#
# @yield the task to be performed
#
# @!macro executor_and_deref_options
#
# @option opts [object, Array] :args zero or more arguments to be passed the task
# block on execution
#
# @raise [ArgumentError] When no block is given
# @raise [ArgumentError] When given a time that is in the past
def initialize(delay, opts = {}, &task)
raise ArgumentError.new('no block given') unless block_given?
raise ArgumentError.new('seconds must be greater than zero') if delay.to_f < 0.0
super(NULL, opts, &nil)
synchronize do
ns_set_state(:unscheduled)
@parent = opts.fetch(:timer_set, Concurrent.global_timer_set)
@args = get_arguments_from(opts)
@delay = delay.to_f
@task = task
@time = nil
@executor = Options.executor_from_options(opts) || Concurrent.global_io_executor
self.observers = Collection::CopyOnNotifyObserverSet.new
end
end
# The `delay` value given at instantiation.
#
# @return [Float] the initial delay.
def initial_delay
synchronize { @delay }
end
# The monotonic time at which the the task is scheduled to be executed.
#
# @return [Float] the schedule time or nil if `unscheduled`
def schedule_time
synchronize { @time }
end
# Comparator which orders by schedule time.
#
# @!visibility private
def <=>(other)
schedule_time <=> other.schedule_time
end
# Has the task been cancelled?
#
# @return [Boolean] true if the task is in the given state else false
def cancelled?
synchronize { ns_check_state?(:cancelled) }
end
# In the task execution in progress?
#
# @return [Boolean] true if the task is in the given state else false
def processing?
synchronize { ns_check_state?(:processing) }
end
# Cancel this task and prevent it from executing. A task can only be
# cancelled if it is pending or unscheduled.
#
# @return [Boolean] true if successfully cancelled else false
def cancel
if compare_and_set_state(:cancelled, :pending, :unscheduled)
complete(false, nil, CancelledOperationError.new)
# To avoid deadlocks this call must occur outside of #synchronize
# Changing the state above should prevent redundant calls
@parent.send(:remove_task, self)
else
false
end
end
# Reschedule the task using the original delay and the current time.
# A task can only be reset while it is `:pending`.
#
# @return [Boolean] true if successfully rescheduled else false
def reset
synchronize{ ns_reschedule(@delay) }
end
# Reschedule the task using the given delay and the current time.
# A task can only be reset while it is `:pending`.
#
# @param [Float] delay the number of seconds to wait for before executing the task
#
# @return [Boolean] true if successfully rescheduled else false
#
# @raise [ArgumentError] When given a time that is in the past
def reschedule(delay)
delay = delay.to_f
raise ArgumentError.new('seconds must be greater than zero') if delay < 0.0
synchronize{ ns_reschedule(delay) }
end
# Execute an `:unscheduled` `ScheduledTask`. Immediately sets the state to `:pending`
# and starts counting down toward execution. Does nothing if the `ScheduledTask` is
# in any state other than `:unscheduled`.
#
# @return [ScheduledTask] a reference to `self`
def execute
if compare_and_set_state(:pending, :unscheduled)
synchronize{ ns_schedule(@delay) }
end
self
end
# Create a new `ScheduledTask` object with the given block, execute it, and return the
# `:pending` object.
#
# @param [Float] delay the number of seconds to wait for before executing the task
#
# @!macro executor_and_deref_options
#
# @return [ScheduledTask] the newly created `ScheduledTask` in the `:pending` state
#
# @raise [ArgumentError] if no block is given
def self.execute(delay, opts = {}, &task)
new(delay, opts, &task).execute
end
# Execute the task.
#
# @!visibility private
def process_task
safe_execute(@task, @args)
end
protected :set, :try_set, :fail, :complete
protected
# Schedule the task using the given delay and the current time.
#
# @param [Float] delay the number of seconds to wait for before executing the task
#
# @return [Boolean] true if successfully rescheduled else false
#
# @!visibility private
def ns_schedule(delay)
@delay = delay
@time = Concurrent.monotonic_time + @delay
@parent.send(:post_task, self)
end
# Reschedule the task using the given delay and the current time.
# A task can only be reset while it is `:pending`.
#
# @param [Float] delay the number of seconds to wait for before executing the task
#
# @return [Boolean] true if successfully rescheduled else false
#
# @!visibility private
def ns_reschedule(delay)
return false unless ns_check_state?(:pending)
@parent.send(:remove_task, self) && ns_schedule(delay)
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/map.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/map.rb | require 'thread'
require 'concurrent/constants'
require 'concurrent/utility/engine'
module Concurrent
# @!visibility private
module Collection
# @!visibility private
MapImplementation = case
when Concurrent.on_jruby?
require 'concurrent/utility/native_extension_loader'
# noinspection RubyResolve
JRubyMapBackend
when Concurrent.on_cruby?
require 'concurrent/collection/map/mri_map_backend'
MriMapBackend
when Concurrent.on_truffleruby?
if defined?(::TruffleRuby::ConcurrentMap)
require 'concurrent/collection/map/truffleruby_map_backend'
TruffleRubyMapBackend
else
require 'concurrent/collection/map/synchronized_map_backend'
SynchronizedMapBackend
end
else
warn 'Concurrent::Map: unsupported Ruby engine, using a fully synchronized Concurrent::Map implementation'
require 'concurrent/collection/map/synchronized_map_backend'
SynchronizedMapBackend
end
end
# `Concurrent::Map` is a hash-like object and should have much better performance
# characteristics, especially under high concurrency, than `Concurrent::Hash`.
# However, `Concurrent::Map `is not strictly semantically equivalent to a ruby `Hash`
# -- for instance, it does not necessarily retain ordering by insertion time as `Hash`
# does. For most uses it should do fine though, and we recommend you consider
# `Concurrent::Map` instead of `Concurrent::Hash` for your concurrency-safe hash needs.
class Map < Collection::MapImplementation
# @!macro map.atomic_method
# This method is atomic.
# @!macro map.atomic_method_with_block
# This method is atomic.
# @note Atomic methods taking a block do not allow the `self` instance
# to be used within the block. Doing so will cause a deadlock.
# @!method []=(key, value)
# Set a value with key
# @param [Object] key
# @param [Object] value
# @return [Object] the new value
# @!method compute_if_absent(key)
# Compute and store new value for key if the key is absent.
# @param [Object] key
# @yield new value
# @yieldreturn [Object] new value
# @return [Object] new value or current value
# @!macro map.atomic_method_with_block
# @!method compute_if_present(key)
# Compute and store new value for key if the key is present.
# @param [Object] key
# @yield new value
# @yieldparam old_value [Object]
# @yieldreturn [Object, nil] new value, when nil the key is removed
# @return [Object, nil] new value or nil
# @!macro map.atomic_method_with_block
# @!method compute(key)
# Compute and store new value for key.
# @param [Object] key
# @yield compute new value from old one
# @yieldparam old_value [Object, nil] old_value, or nil when key is absent
# @yieldreturn [Object, nil] new value, when nil the key is removed
# @return [Object, nil] new value or nil
# @!macro map.atomic_method_with_block
# @!method merge_pair(key, value)
# If the key is absent, the value is stored, otherwise new value is
# computed with a block.
# @param [Object] key
# @param [Object] value
# @yield compute new value from old one
# @yieldparam old_value [Object] old value
# @yieldreturn [Object, nil] new value, when nil the key is removed
# @return [Object, nil] new value or nil
# @!macro map.atomic_method_with_block
# @!method replace_pair(key, old_value, new_value)
# Replaces old_value with new_value if key exists and current value
# matches old_value
# @param [Object] key
# @param [Object] old_value
# @param [Object] new_value
# @return [true, false] true if replaced
# @!macro map.atomic_method
# @!method replace_if_exists(key, new_value)
# Replaces current value with new_value if key exists
# @param [Object] key
# @param [Object] new_value
# @return [Object, nil] old value or nil
# @!macro map.atomic_method
# @!method get_and_set(key, value)
# Get the current value under key and set new value.
# @param [Object] key
# @param [Object] value
# @return [Object, nil] old value or nil when the key was absent
# @!macro map.atomic_method
# @!method delete(key)
# Delete key and its value.
# @param [Object] key
# @return [Object, nil] old value or nil when the key was absent
# @!macro map.atomic_method
# @!method delete_pair(key, value)
# Delete pair and its value if current value equals the provided value.
# @param [Object] key
# @param [Object] value
# @return [true, false] true if deleted
# @!macro map.atomic_method
# NonConcurrentMapBackend handles default_proc natively
unless defined?(Collection::NonConcurrentMapBackend) and self < Collection::NonConcurrentMapBackend
# @param [Hash, nil] options options to set the :initial_capacity or :load_factor. Ignored on some Rubies.
# @param [Proc] default_proc Optional block to compute the default value if the key is not set, like `Hash#default_proc`
def initialize(options = nil, &default_proc)
if options.kind_of?(::Hash)
validate_options_hash!(options)
else
options = nil
end
super(options)
@default_proc = default_proc
end
# Get a value with key
# @param [Object] key
# @return [Object] the value
def [](key)
if value = super # non-falsy value is an existing mapping, return it right away
value
# re-check is done with get_or_default(key, NULL) instead of a simple !key?(key) in order to avoid a race condition, whereby by the time the current thread gets to the key?(key) call
# a key => value mapping might have already been created by a different thread (key?(key) would then return true, this elsif branch wouldn't be taken and an incorrect +nil+ value
# would be returned)
# note: nil == value check is not technically necessary
elsif @default_proc && nil == value && NULL == (value = get_or_default(key, NULL))
@default_proc.call(self, key)
else
value
end
end
end
alias_method :get, :[]
alias_method :put, :[]=
# Get a value with key, or default_value when key is absent,
# or fail when no default_value is given.
# @param [Object] key
# @param [Object] default_value
# @yield default value for a key
# @yieldparam key [Object]
# @yieldreturn [Object] default value
# @return [Object] the value or default value
# @raise [KeyError] when key is missing and no default_value is provided
# @!macro map_method_not_atomic
# @note The "fetch-then-act" methods of `Map` are not atomic. `Map` is intended
# to be use as a concurrency primitive with strong happens-before
# guarantees. It is not intended to be used as a high-level abstraction
# supporting complex operations. All read and write operations are
# thread safe, but no guarantees are made regarding race conditions
# between the fetch operation and yielding to the block. Additionally,
# this method does not support recursion. This is due to internal
# constraints that are very unlikely to change in the near future.
def fetch(key, default_value = NULL)
if NULL != (value = get_or_default(key, NULL))
value
elsif block_given?
yield key
elsif NULL != default_value
default_value
else
raise_fetch_no_key
end
end
# Fetch value with key, or store default value when key is absent,
# or fail when no default_value is given. This is a two step operation,
# therefore not atomic. The store can overwrite other concurrently
# stored value.
# @param [Object] key
# @param [Object] default_value
# @yield default value for a key
# @yieldparam key [Object]
# @yieldreturn [Object] default value
# @return [Object] the value or default value
def fetch_or_store(key, default_value = NULL)
fetch(key) do
put(key, block_given? ? yield(key) : (NULL == default_value ? raise_fetch_no_key : default_value))
end
end
# Insert value into map with key if key is absent in one atomic step.
# @param [Object] key
# @param [Object] value
# @return [Object, nil] the previous value when key was present or nil when there was no key
def put_if_absent(key, value)
computed = false
result = compute_if_absent(key) do
computed = true
value
end
computed ? nil : result
end unless method_defined?(:put_if_absent)
# Is the value stored in the map. Iterates over all values.
# @param [Object] value
# @return [true, false]
def value?(value)
each_value do |v|
return true if value.equal?(v)
end
false
end
# All keys
# @return [::Array<Object>] keys
def keys
arr = []
each_pair { |k, v| arr << k }
arr
end unless method_defined?(:keys)
# All values
# @return [::Array<Object>] values
def values
arr = []
each_pair { |k, v| arr << v }
arr
end unless method_defined?(:values)
# Iterates over each key.
# @yield for each key in the map
# @yieldparam key [Object]
# @return [self]
# @!macro map.atomic_method_with_block
def each_key
each_pair { |k, v| yield k }
end unless method_defined?(:each_key)
# Iterates over each value.
# @yield for each value in the map
# @yieldparam value [Object]
# @return [self]
# @!macro map.atomic_method_with_block
def each_value
each_pair { |k, v| yield v }
end unless method_defined?(:each_value)
# Iterates over each key value pair.
# @yield for each key value pair in the map
# @yieldparam key [Object]
# @yieldparam value [Object]
# @return [self]
# @!macro map.atomic_method_with_block
def each_pair
return enum_for :each_pair unless block_given?
super
end
alias_method :each, :each_pair unless method_defined?(:each)
# Find key of a value.
# @param [Object] value
# @return [Object, nil] key or nil when not found
def key(value)
each_pair { |k, v| return k if v == value }
nil
end unless method_defined?(:key)
# Is map empty?
# @return [true, false]
def empty?
each_pair { |k, v| return false }
true
end unless method_defined?(:empty?)
# The size of map.
# @return [Integer] size
def size
count = 0
each_pair { |k, v| count += 1 }
count
end unless method_defined?(:size)
# @!visibility private
def marshal_dump
raise TypeError, "can't dump hash with default proc" if @default_proc
h = {}
each_pair { |k, v| h[k] = v }
h
end
# @!visibility private
def marshal_load(hash)
initialize
populate_from(hash)
end
undef :freeze
# @!visibility private
def inspect
format '%s entries=%d default_proc=%s>', to_s[0..-2], size.to_s, @default_proc.inspect
end
private
def raise_fetch_no_key
raise KeyError, 'key not found'
end
def initialize_copy(other)
super
populate_from(other)
end
def populate_from(hash)
hash.each_pair { |k, v| self[k] = v }
self
end
def validate_options_hash!(options)
if (initial_capacity = options[:initial_capacity]) && (!initial_capacity.kind_of?(Integer) || initial_capacity < 0)
raise ArgumentError, ":initial_capacity must be a positive Integer"
end
if (load_factor = options[:load_factor]) && (!load_factor.kind_of?(Numeric) || load_factor <= 0 || load_factor > 1)
raise ArgumentError, ":load_factor must be a number between 0 and 1"
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/promise.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/promise.rb | require 'thread'
require 'concurrent/constants'
require 'concurrent/errors'
require 'concurrent/ivar'
require 'concurrent/executor/safe_task_executor'
require 'concurrent/options'
module Concurrent
PromiseExecutionError = Class.new(StandardError)
# Promises are inspired by the JavaScript [Promises/A](http://wiki.commonjs.org/wiki/Promises/A)
# and [Promises/A+](http://promises-aplus.github.io/promises-spec/) specifications.
#
# > A promise represents the eventual value returned from the single
# > completion of an operation.
#
# Promises are similar to futures and share many of the same behaviours.
# Promises are far more robust, however. Promises can be chained in a tree
# structure where each promise may have zero or more children. Promises are
# chained using the `then` method. The result of a call to `then` is always
# another promise. Promises are resolved asynchronously (with respect to the
# main thread) but in a strict order: parents are guaranteed to be resolved
# before their children, children before their younger siblings. The `then`
# method takes two parameters: an optional block to be executed upon parent
# resolution and an optional callable to be executed upon parent failure. The
# result of each promise is passed to each of its children upon resolution.
# When a promise is rejected all its children will be summarily rejected and
# will receive the reason.
#
# Promises have several possible states: *:unscheduled*, *:pending*,
# *:processing*, *:rejected*, or *:fulfilled*. These are also aggregated as
# `#incomplete?` and `#complete?`. When a Promise is created it is set to
# *:unscheduled*. Once the `#execute` method is called the state becomes
# *:pending*. Once a job is pulled from the thread pool's queue and is given
# to a thread for processing (often immediately upon `#post`) the state
# becomes *:processing*. The future will remain in this state until processing
# is complete. A future that is in the *:unscheduled*, *:pending*, or
# *:processing* is considered `#incomplete?`. A `#complete?` Promise is either
# *:rejected*, indicating that an exception was thrown during processing, or
# *:fulfilled*, indicating success. If a Promise is *:fulfilled* its `#value`
# will be updated to reflect the result of the operation. If *:rejected* the
# `reason` will be updated with a reference to the thrown exception. The
# predicate methods `#unscheduled?`, `#pending?`, `#rejected?`, and
# `#fulfilled?` can be called at any time to obtain the state of the Promise,
# as can the `#state` method, which returns a symbol.
#
# Retrieving the value of a promise is done through the `value` (alias:
# `deref`) method. Obtaining the value of a promise is a potentially blocking
# operation. When a promise is *rejected* a call to `value` will return `nil`
# immediately. When a promise is *fulfilled* a call to `value` will
# immediately return the current value. When a promise is *pending* a call to
# `value` will block until the promise is either *rejected* or *fulfilled*. A
# *timeout* value can be passed to `value` to limit how long the call will
# block. If `nil` the call will block indefinitely. If `0` the call will not
# block. Any other integer or float value will indicate the maximum number of
# seconds to block.
#
# Promises run on the global thread pool.
#
# @!macro copy_options
#
# ### Examples
#
# Start by requiring promises
#
# ```ruby
# require 'concurrent/promise'
# ```
#
# Then create one
#
# ```ruby
# p = Concurrent::Promise.execute do
# # do something
# 42
# end
# ```
#
# Promises can be chained using the `then` method. The `then` method accepts a
# block and an executor, to be executed on fulfillment, and a callable argument to be executed
# on rejection. The result of the each promise is passed as the block argument
# to chained promises.
#
# ```ruby
# p = Concurrent::Promise.new{10}.then{|x| x * 2}.then{|result| result - 10 }.execute
# ```
#
# And so on, and so on, and so on...
#
# ```ruby
# p = Concurrent::Promise.fulfill(20).
# then{|result| result - 10 }.
# then{|result| result * 3 }.
# then(executor: different_executor){|result| result % 5 }.execute
# ```
#
# The initial state of a newly created Promise depends on the state of its parent:
# - if parent is *unscheduled* the child will be *unscheduled*
# - if parent is *pending* the child will be *pending*
# - if parent is *fulfilled* the child will be *pending*
# - if parent is *rejected* the child will be *pending* (but will ultimately be *rejected*)
#
# Promises are executed asynchronously from the main thread. By the time a
# child Promise finishes initialization it may be in a different state than its
# parent (by the time a child is created its parent may have completed
# execution and changed state). Despite being asynchronous, however, the order
# of execution of Promise objects in a chain (or tree) is strictly defined.
#
# There are multiple ways to create and execute a new `Promise`. Both ways
# provide identical behavior:
#
# ```ruby
# # create, operate, then execute
# p1 = Concurrent::Promise.new{ "Hello World!" }
# p1.state #=> :unscheduled
# p1.execute
#
# # create and immediately execute
# p2 = Concurrent::Promise.new{ "Hello World!" }.execute
#
# # execute during creation
# p3 = Concurrent::Promise.execute{ "Hello World!" }
# ```
#
# Once the `execute` method is called a `Promise` becomes `pending`:
#
# ```ruby
# p = Concurrent::Promise.execute{ "Hello, world!" }
# p.state #=> :pending
# p.pending? #=> true
# ```
#
# Wait a little bit, and the promise will resolve and provide a value:
#
# ```ruby
# p = Concurrent::Promise.execute{ "Hello, world!" }
# sleep(0.1)
#
# p.state #=> :fulfilled
# p.fulfilled? #=> true
# p.value #=> "Hello, world!"
# ```
#
# If an exception occurs, the promise will be rejected and will provide
# a reason for the rejection:
#
# ```ruby
# p = Concurrent::Promise.execute{ raise StandardError.new("Here comes the Boom!") }
# sleep(0.1)
#
# p.state #=> :rejected
# p.rejected? #=> true
# p.reason #=> "#<StandardError: Here comes the Boom!>"
# ```
#
# #### Rejection
#
# When a promise is rejected all its children will be rejected and will
# receive the rejection `reason` as the rejection callable parameter:
#
# ```ruby
# p = Concurrent::Promise.execute { Thread.pass; raise StandardError }
#
# c1 = p.then(-> reason { 42 })
# c2 = p.then(-> reason { raise 'Boom!' })
#
# c1.wait.state #=> :fulfilled
# c1.value #=> 42
# c2.wait.state #=> :rejected
# c2.reason #=> #<RuntimeError: Boom!>
# ```
#
# Once a promise is rejected it will continue to accept children that will
# receive immediately rejection (they will be executed asynchronously).
#
# #### Aliases
#
# The `then` method is the most generic alias: it accepts a block to be
# executed upon parent fulfillment and a callable to be executed upon parent
# rejection. At least one of them should be passed. The default block is `{
# |result| result }` that fulfills the child with the parent value. The
# default callable is `{ |reason| raise reason }` that rejects the child with
# the parent reason.
#
# - `on_success { |result| ... }` is the same as `then {|result| ... }`
# - `rescue { |reason| ... }` is the same as `then(Proc.new { |reason| ... } )`
# - `rescue` is aliased by `catch` and `on_error`
class Promise < IVar
# Initialize a new Promise with the provided options.
#
# @!macro executor_and_deref_options
#
# @!macro promise_init_options
#
# @option opts [Promise] :parent the parent `Promise` when building a chain/tree
# @option opts [Proc] :on_fulfill fulfillment handler
# @option opts [Proc] :on_reject rejection handler
# @option opts [object, Array] :args zero or more arguments to be passed
# the task block on execution
#
# @yield The block operation to be performed asynchronously.
#
# @raise [ArgumentError] if no block is given
#
# @see http://wiki.commonjs.org/wiki/Promises/A
# @see http://promises-aplus.github.io/promises-spec/
def initialize(opts = {}, &block)
opts.delete_if { |k, v| v.nil? }
super(NULL, opts.merge(__promise_body_from_block__: block), &nil)
end
# Create a new `Promise` and fulfill it immediately.
#
# @!macro executor_and_deref_options
#
# @!macro promise_init_options
#
# @raise [ArgumentError] if no block is given
#
# @return [Promise] the newly created `Promise`
def self.fulfill(value, opts = {})
Promise.new(opts).tap { |p| p.send(:synchronized_set_state!, true, value, nil) }
end
# Create a new `Promise` and reject it immediately.
#
# @!macro executor_and_deref_options
#
# @!macro promise_init_options
#
# @raise [ArgumentError] if no block is given
#
# @return [Promise] the newly created `Promise`
def self.reject(reason, opts = {})
Promise.new(opts).tap { |p| p.send(:synchronized_set_state!, false, nil, reason) }
end
# Execute an `:unscheduled` `Promise`. Immediately sets the state to `:pending` and
# passes the block to a new thread/thread pool for eventual execution.
# Does nothing if the `Promise` is in any state other than `:unscheduled`.
#
# @return [Promise] a reference to `self`
def execute
if root?
if compare_and_set_state(:pending, :unscheduled)
set_pending
realize(@promise_body)
end
else
compare_and_set_state(:pending, :unscheduled)
@parent.execute
end
self
end
# @!macro ivar_set_method
#
# @raise [Concurrent::PromiseExecutionError] if not the root promise
def set(value = NULL, &block)
raise PromiseExecutionError.new('supported only on root promise') unless root?
check_for_block_or_value!(block_given?, value)
synchronize do
if @state != :unscheduled
raise MultipleAssignmentError
else
@promise_body = block || Proc.new { |result| value }
end
end
execute
end
# @!macro ivar_fail_method
#
# @raise [Concurrent::PromiseExecutionError] if not the root promise
def fail(reason = StandardError.new)
set { raise reason }
end
# Create a new `Promise` object with the given block, execute it, and return the
# `:pending` object.
#
# @!macro executor_and_deref_options
#
# @!macro promise_init_options
#
# @return [Promise] the newly created `Promise` in the `:pending` state
#
# @raise [ArgumentError] if no block is given
#
# @example
# promise = Concurrent::Promise.execute{ sleep(1); 42 }
# promise.state #=> :pending
def self.execute(opts = {}, &block)
new(opts, &block).execute
end
# Chain a new promise off the current promise.
#
# @return [Promise] the new promise
# @yield The block operation to be performed asynchronously.
# @overload then(rescuer, executor, &block)
# @param [Proc] rescuer An optional rescue block to be executed if the
# promise is rejected.
# @param [ThreadPool] executor An optional thread pool executor to be used
# in the new Promise
# @overload then(rescuer, executor: executor, &block)
# @param [Proc] rescuer An optional rescue block to be executed if the
# promise is rejected.
# @param [ThreadPool] executor An optional thread pool executor to be used
# in the new Promise
def then(*args, &block)
if args.last.is_a?(::Hash)
executor = args.pop[:executor]
rescuer = args.first
else
rescuer, executor = args
end
executor ||= @executor
raise ArgumentError.new('rescuers and block are both missing') if rescuer.nil? && !block_given?
block = Proc.new { |result| result } unless block_given?
child = Promise.new(
parent: self,
executor: executor,
on_fulfill: block,
on_reject: rescuer
)
synchronize do
child.state = :pending if @state == :pending
child.on_fulfill(apply_deref_options(@value)) if @state == :fulfilled
child.on_reject(@reason) if @state == :rejected
@children << child
end
child
end
# Chain onto this promise an action to be undertaken on success
# (fulfillment).
#
# @yield The block to execute
#
# @return [Promise] self
def on_success(&block)
raise ArgumentError.new('no block given') unless block_given?
self.then(&block)
end
# Chain onto this promise an action to be undertaken on failure
# (rejection).
#
# @yield The block to execute
#
# @return [Promise] self
def rescue(&block)
self.then(block)
end
alias_method :catch, :rescue
alias_method :on_error, :rescue
# Yield the successful result to the block that returns a promise. If that
# promise is also successful the result is the result of the yielded promise.
# If either part fails the whole also fails.
#
# @example
# Promise.execute { 1 }.flat_map { |v| Promise.execute { v + 2 } }.value! #=> 3
#
# @return [Promise]
def flat_map(&block)
child = Promise.new(
parent: self,
executor: ImmediateExecutor.new,
)
on_error { |e| child.on_reject(e) }
on_success do |result1|
begin
inner = block.call(result1)
inner.execute
inner.on_success { |result2| child.on_fulfill(result2) }
inner.on_error { |e| child.on_reject(e) }
rescue => e
child.on_reject(e)
end
end
child
end
# Builds a promise that produces the result of promises in an Array
# and fails if any of them fails.
#
# @overload zip(*promises)
# @param [Array<Promise>] promises
#
# @overload zip(*promises, opts)
# @param [Array<Promise>] promises
# @param [Hash] opts the configuration options
# @option opts [Executor] :executor (ImmediateExecutor.new) when set use the given `Executor` instance.
# @option opts [Boolean] :execute (true) execute promise before returning
#
# @return [Promise<Array>]
def self.zip(*promises)
opts = promises.last.is_a?(::Hash) ? promises.pop.dup : {}
opts[:executor] ||= ImmediateExecutor.new
zero = if !opts.key?(:execute) || opts.delete(:execute)
fulfill([], opts)
else
Promise.new(opts) { [] }
end
promises.reduce(zero) do |p1, p2|
p1.flat_map do |results|
p2.then do |next_result|
results << next_result
end
end
end
end
# Builds a promise that produces the result of self and others in an Array
# and fails if any of them fails.
#
# @overload zip(*promises)
# @param [Array<Promise>] others
#
# @overload zip(*promises, opts)
# @param [Array<Promise>] others
# @param [Hash] opts the configuration options
# @option opts [Executor] :executor (ImmediateExecutor.new) when set use the given `Executor` instance.
# @option opts [Boolean] :execute (true) execute promise before returning
#
# @return [Promise<Array>]
def zip(*others)
self.class.zip(self, *others)
end
# Aggregates a collection of promises and executes the `then` condition
# if all aggregated promises succeed. Executes the `rescue` handler with
# a `Concurrent::PromiseExecutionError` if any of the aggregated promises
# fail. Upon execution will execute any of the aggregate promises that
# were not already executed.
#
# @!macro promise_self_aggregate
#
# The returned promise will not yet have been executed. Additional `#then`
# and `#rescue` handlers may still be provided. Once the returned promise
# is execute the aggregate promises will be also be executed (if they have
# not been executed already). The results of the aggregate promises will
# be checked upon completion. The necessary `#then` and `#rescue` blocks
# on the aggregating promise will then be executed as appropriate. If the
# `#rescue` handlers are executed the raises exception will be
# `Concurrent::PromiseExecutionError`.
#
# @param [Array] promises Zero or more promises to aggregate
# @return [Promise] an unscheduled (not executed) promise that aggregates
# the promises given as arguments
def self.all?(*promises)
aggregate(:all?, *promises)
end
# Aggregates a collection of promises and executes the `then` condition
# if any aggregated promises succeed. Executes the `rescue` handler with
# a `Concurrent::PromiseExecutionError` if any of the aggregated promises
# fail. Upon execution will execute any of the aggregate promises that
# were not already executed.
#
# @!macro promise_self_aggregate
def self.any?(*promises)
aggregate(:any?, *promises)
end
protected
def ns_initialize(value, opts)
super
@executor = Options.executor_from_options(opts) || Concurrent.global_io_executor
@args = get_arguments_from(opts)
@parent = opts.fetch(:parent) { nil }
@on_fulfill = opts.fetch(:on_fulfill) { Proc.new { |result| result } }
@on_reject = opts.fetch(:on_reject) { Proc.new { |reason| raise reason } }
@promise_body = opts[:__promise_body_from_block__] || Proc.new { |result| result }
@state = :unscheduled
@children = []
end
# Aggregate a collection of zero or more promises under a composite promise,
# execute the aggregated promises and collect them into a standard Ruby array,
# call the given Ruby `Ennnumerable` predicate (such as `any?`, `all?`, `none?`,
# or `one?`) on the collection checking for the success or failure of each,
# then executing the composite's `#then` handlers if the predicate returns
# `true` or executing the composite's `#rescue` handlers if the predicate
# returns false.
#
# @!macro promise_self_aggregate
def self.aggregate(method, *promises)
composite = Promise.new do
completed = promises.collect do |promise|
promise.execute if promise.unscheduled?
promise.wait
promise
end
unless completed.empty? || completed.send(method){|promise| promise.fulfilled? }
raise PromiseExecutionError
end
end
composite
end
# @!visibility private
def set_pending
synchronize do
@state = :pending
@children.each { |c| c.set_pending }
end
end
# @!visibility private
def root? # :nodoc:
@parent.nil?
end
# @!visibility private
def on_fulfill(result)
realize Proc.new { @on_fulfill.call(result) }
nil
end
# @!visibility private
def on_reject(reason)
realize Proc.new { @on_reject.call(reason) }
nil
end
# @!visibility private
def notify_child(child)
if_state(:fulfilled) { child.on_fulfill(apply_deref_options(@value)) }
if_state(:rejected) { child.on_reject(@reason) }
end
# @!visibility private
def complete(success, value, reason)
children_to_notify = synchronize do
set_state!(success, value, reason)
@children.dup
end
children_to_notify.each { |child| notify_child(child) }
observers.notify_and_delete_observers{ [Time.now, self.value, reason] }
end
# @!visibility private
def realize(task)
@executor.post do
success, value, reason = SafeTaskExecutor.new(task, rescue_exception: true).execute(*@args)
complete(success, value, reason)
end
end
# @!visibility private
def set_state!(success, value, reason)
set_state(success, value, reason)
event.set
end
# @!visibility private
def synchronized_set_state!(success, value, reason)
synchronize { set_state!(success, value, reason) }
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/mutable_struct.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/mutable_struct.rb | require 'concurrent/synchronization/abstract_struct'
require 'concurrent/synchronization/lockable_object'
module Concurrent
# An thread-safe variation of Ruby's standard `Struct`. Values can be set at
# construction or safely changed at any time during the object's lifecycle.
#
# @see http://ruby-doc.org/core/Struct.html Ruby standard library `Struct`
module MutableStruct
include Synchronization::AbstractStruct
# @!macro struct_new
#
# Factory for creating new struct classes.
#
# ```
# new([class_name] [, member_name]+>) -> StructClass click to toggle source
# new([class_name] [, member_name]+>) {|StructClass| block } -> StructClass
# new(value, ...) -> obj
# StructClass[value, ...] -> obj
# ```
#
# The first two forms are used to create a new struct subclass `class_name`
# that can contain a value for each member_name . This subclass can be
# used to create instances of the structure like any other Class .
#
# If the `class_name` is omitted an anonymous struct class will be created.
# Otherwise, the name of this struct will appear as a constant in the struct class,
# so it must be unique for all structs under this base class and must start with a
# capital letter. Assigning a struct class to a constant also gives the class
# the name of the constant.
#
# If a block is given it will be evaluated in the context of `StructClass`, passing
# the created class as a parameter. This is the recommended way to customize a struct.
# Subclassing an anonymous struct creates an extra anonymous class that will never be used.
#
# The last two forms create a new instance of a struct subclass. The number of value
# parameters must be less than or equal to the number of attributes defined for the
# struct. Unset parameters default to nil. Passing more parameters than number of attributes
# will raise an `ArgumentError`.
#
# @see http://ruby-doc.org/core/Struct.html#method-c-new Ruby standard library `Struct#new`
# @!macro struct_values
#
# Returns the values for this struct as an Array.
#
# @return [Array] the values for this struct
#
def values
synchronize { ns_values }
end
alias_method :to_a, :values
# @!macro struct_values_at
#
# Returns the struct member values for each selector as an Array.
#
# A selector may be either an Integer offset or a Range of offsets (as in `Array#values_at`).
#
# @param [Fixnum, Range] indexes the index(es) from which to obatin the values (in order)
def values_at(*indexes)
synchronize { ns_values_at(indexes) }
end
# @!macro struct_inspect
#
# Describe the contents of this struct in a string.
#
# @return [String] the contents of this struct in a string
def inspect
synchronize { ns_inspect }
end
alias_method :to_s, :inspect
# @!macro struct_merge
#
# Returns a new struct containing the contents of `other` and the contents
# of `self`. If no block is specified, the value for entries with duplicate
# keys will be that of `other`. Otherwise the value for each duplicate key
# is determined by calling the block with the key, its value in `self` and
# its value in `other`.
#
# @param [Hash] other the hash from which to set the new values
# @yield an options block for resolving duplicate keys
# @yieldparam [String, Symbol] member the name of the member which is duplicated
# @yieldparam [Object] selfvalue the value of the member in `self`
# @yieldparam [Object] othervalue the value of the member in `other`
#
# @return [Synchronization::AbstractStruct] a new struct with the new values
#
# @raise [ArgumentError] of given a member that is not defined in the struct
def merge(other, &block)
synchronize { ns_merge(other, &block) }
end
# @!macro struct_to_h
#
# Returns a hash containing the names and values for the struct’s members.
#
# @return [Hash] the names and values for the struct’s members
def to_h
synchronize { ns_to_h }
end
# @!macro struct_get
#
# Attribute Reference
#
# @param [Symbol, String, Integer] member the string or symbol name of the member
# for which to obtain the value or the member's index
#
# @return [Object] the value of the given struct member or the member at the given index.
#
# @raise [NameError] if the member does not exist
# @raise [IndexError] if the index is out of range.
def [](member)
synchronize { ns_get(member) }
end
# @!macro struct_equality
#
# Equality
#
# @return [Boolean] true if other has the same struct subclass and has
# equal member values (according to `Object#==`)
def ==(other)
synchronize { ns_equality(other) }
end
# @!macro struct_each
#
# Yields the value of each struct member in order. If no block is given
# an enumerator is returned.
#
# @yield the operation to be performed on each struct member
# @yieldparam [Object] value each struct value (in order)
def each(&block)
return enum_for(:each) unless block_given?
synchronize { ns_each(&block) }
end
# @!macro struct_each_pair
#
# Yields the name and value of each struct member in order. If no block is
# given an enumerator is returned.
#
# @yield the operation to be performed on each struct member/value pair
# @yieldparam [Object] member each struct member (in order)
# @yieldparam [Object] value each struct value (in order)
def each_pair(&block)
return enum_for(:each_pair) unless block_given?
synchronize { ns_each_pair(&block) }
end
# @!macro struct_select
#
# Yields each member value from the struct to the block and returns an Array
# containing the member values from the struct for which the given block
# returns a true value (equivalent to `Enumerable#select`).
#
# @yield the operation to be performed on each struct member
# @yieldparam [Object] value each struct value (in order)
#
# @return [Array] an array containing each value for which the block returns true
def select(&block)
return enum_for(:select) unless block_given?
synchronize { ns_select(&block) }
end
# @!macro struct_set
#
# Attribute Assignment
#
# Sets the value of the given struct member or the member at the given index.
#
# @param [Symbol, String, Integer] member the string or symbol name of the member
# for which to obtain the value or the member's index
#
# @return [Object] the value of the given struct member or the member at the given index.
#
# @raise [NameError] if the name does not exist
# @raise [IndexError] if the index is out of range.
def []=(member, value)
if member.is_a? Integer
length = synchronize { @values.length }
if member >= length
raise IndexError.new("offset #{member} too large for struct(size:#{length})")
end
synchronize { @values[member] = value }
else
send("#{member}=", value)
end
rescue NoMethodError
raise NameError.new("no member '#{member}' in struct")
end
private
# @!visibility private
def initialize_copy(original)
synchronize do
super(original)
ns_initialize_copy
end
end
# @!macro struct_new
def self.new(*args, &block)
clazz_name = nil
if args.length == 0
raise ArgumentError.new('wrong number of arguments (0 for 1+)')
elsif args.length > 0 && args.first.is_a?(String)
clazz_name = args.shift
end
FACTORY.define_struct(clazz_name, args, &block)
end
FACTORY = Class.new(Synchronization::LockableObject) do
def define_struct(name, members, &block)
synchronize do
clazz = Synchronization::AbstractStruct.define_struct_class(MutableStruct, Synchronization::LockableObject, name, members, &block)
members.each_with_index do |member, index|
clazz.send :remove_method, member
clazz.send(:define_method, member) do
synchronize { @values[index] }
end
clazz.send(:define_method, "#{member}=") do |value|
synchronize { @values[index] = value }
end
end
clazz
end
end
end.new
private_constant :FACTORY
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/mvar.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/mvar.rb | require 'concurrent/concern/dereferenceable'
require 'concurrent/synchronization/object'
module Concurrent
# An `MVar` is a synchronized single element container. They are empty or
# contain one item. Taking a value from an empty `MVar` blocks, as does
# putting a value into a full one. You can either think of them as blocking
# queue of length one, or a special kind of mutable variable.
#
# On top of the fundamental `#put` and `#take` operations, we also provide a
# `#modify` that is atomic with respect to operations on the same instance.
# These operations all support timeouts.
#
# We also support non-blocking operations `#try_put!` and `#try_take!`, a
# `#set!` that ignores existing values, a `#value` that returns the value
# without removing it or returns `MVar::EMPTY`, and a `#modify!` that yields
# `MVar::EMPTY` if the `MVar` is empty and can be used to set `MVar::EMPTY`.
# You shouldn't use these operations in the first instance.
#
# `MVar` is a [Dereferenceable](Dereferenceable).
#
# `MVar` is related to M-structures in Id, `MVar` in Haskell and `SyncVar` in Scala.
#
# Note that unlike the original Haskell paper, our `#take` is blocking. This is how
# Haskell and Scala do it today.
#
# @!macro copy_options
#
# ## See Also
#
# 1. P. Barth, R. Nikhil, and Arvind. [M-Structures: Extending a parallel, non- strict, functional language with state](http://dl.acm.org/citation.cfm?id=652538). In Proceedings of the 5th
# ACM Conference on Functional Programming Languages and Computer Architecture (FPCA), 1991.
#
# 2. S. Peyton Jones, A. Gordon, and S. Finne. [Concurrent Haskell](http://dl.acm.org/citation.cfm?id=237794).
# In Proceedings of the 23rd Symposium on Principles of Programming Languages
# (PoPL), 1996.
class MVar < Synchronization::Object
include Concern::Dereferenceable
safe_initialization!
# Unique value that represents that an `MVar` was empty
EMPTY = ::Object.new
# Unique value that represents that an `MVar` timed out before it was able
# to produce a value.
TIMEOUT = ::Object.new
# Create a new `MVar`, either empty or with an initial value.
#
# @param [Hash] opts the options controlling how the future will be processed
#
# @!macro deref_options
def initialize(value = EMPTY, opts = {})
@value = value
@mutex = Mutex.new
@empty_condition = ConditionVariable.new
@full_condition = ConditionVariable.new
set_deref_options(opts)
end
# Remove the value from an `MVar`, leaving it empty, and blocking if there
# isn't a value. A timeout can be set to limit the time spent blocked, in
# which case it returns `TIMEOUT` if the time is exceeded.
# @return [Object] the value that was taken, or `TIMEOUT`
def take(timeout = nil)
@mutex.synchronize do
wait_for_full(timeout)
# If we timed out we'll still be empty
if unlocked_full?
value = @value
@value = EMPTY
@empty_condition.signal
apply_deref_options(value)
else
TIMEOUT
end
end
end
# acquires lock on the from an `MVAR`, yields the value to provided block,
# and release lock. A timeout can be set to limit the time spent blocked,
# in which case it returns `TIMEOUT` if the time is exceeded.
# @return [Object] the value returned by the block, or `TIMEOUT`
def borrow(timeout = nil)
@mutex.synchronize do
wait_for_full(timeout)
# If we timed out we'll still be empty
if unlocked_full?
yield @value
else
TIMEOUT
end
end
end
# Put a value into an `MVar`, blocking if there is already a value until
# it is empty. A timeout can be set to limit the time spent blocked, in
# which case it returns `TIMEOUT` if the time is exceeded.
# @return [Object] the value that was put, or `TIMEOUT`
def put(value, timeout = nil)
@mutex.synchronize do
wait_for_empty(timeout)
# If we timed out we won't be empty
if unlocked_empty?
@value = value
@full_condition.signal
apply_deref_options(value)
else
TIMEOUT
end
end
end
# Atomically `take`, yield the value to a block for transformation, and then
# `put` the transformed value. Returns the pre-transform value. A timeout can
# be set to limit the time spent blocked, in which case it returns `TIMEOUT`
# if the time is exceeded.
# @return [Object] the pre-transform value, or `TIMEOUT`
def modify(timeout = nil)
raise ArgumentError.new('no block given') unless block_given?
@mutex.synchronize do
wait_for_full(timeout)
# If we timed out we'll still be empty
if unlocked_full?
value = @value
@value = yield value
@full_condition.signal
apply_deref_options(value)
else
TIMEOUT
end
end
end
# Non-blocking version of `take`, that returns `EMPTY` instead of blocking.
def try_take!
@mutex.synchronize do
if unlocked_full?
value = @value
@value = EMPTY
@empty_condition.signal
apply_deref_options(value)
else
EMPTY
end
end
end
# Non-blocking version of `put`, that returns whether or not it was successful.
def try_put!(value)
@mutex.synchronize do
if unlocked_empty?
@value = value
@full_condition.signal
true
else
false
end
end
end
# Non-blocking version of `put` that will overwrite an existing value.
def set!(value)
@mutex.synchronize do
old_value = @value
@value = value
@full_condition.signal
apply_deref_options(old_value)
end
end
# Non-blocking version of `modify` that will yield with `EMPTY` if there is no value yet.
def modify!
raise ArgumentError.new('no block given') unless block_given?
@mutex.synchronize do
value = @value
@value = yield value
if unlocked_empty?
@empty_condition.signal
else
@full_condition.signal
end
apply_deref_options(value)
end
end
# Returns if the `MVar` is currently empty.
def empty?
@mutex.synchronize { @value == EMPTY }
end
# Returns if the `MVar` currently contains a value.
def full?
!empty?
end
protected
def synchronize(&block)
@mutex.synchronize(&block)
end
private
def unlocked_empty?
@value == EMPTY
end
def unlocked_full?
! unlocked_empty?
end
def wait_for_full(timeout)
wait_while(@full_condition, timeout) { unlocked_empty? }
end
def wait_for_empty(timeout)
wait_while(@empty_condition, timeout) { unlocked_full? }
end
def wait_while(condition, timeout)
if timeout.nil?
while yield
condition.wait(@mutex)
end
else
stop = Concurrent.monotonic_time + timeout
while yield && timeout > 0.0
condition.wait(@mutex, timeout)
timeout = stop - Concurrent.monotonic_time
end
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/hash.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/hash.rb | require 'concurrent/utility/engine'
require 'concurrent/thread_safe/util'
module Concurrent
# @!macro concurrent_hash
#
# A thread-safe subclass of Hash. This version locks against the object
# itself for every method call, ensuring only one thread can be reading
# or writing at a time. This includes iteration methods like `#each`,
# which takes the lock repeatedly when reading an item.
#
# @see http://ruby-doc.org/core/Hash.html Ruby standard library `Hash`
# @!macro internal_implementation_note
HashImplementation = case
when Concurrent.on_cruby?
# Hash is not fully thread-safe on CRuby, see
# https://bugs.ruby-lang.org/issues/19237
# https://github.com/ruby/ruby/commit/ffd52412ab
# https://github.com/ruby-concurrency/concurrent-ruby/issues/929
# So we will need to add synchronization here (similar to Concurrent::Map).
::Hash
when Concurrent.on_jruby?
require 'jruby/synchronized'
class JRubyHash < ::Hash
include JRuby::Synchronized
end
JRubyHash
when Concurrent.on_truffleruby?
require 'concurrent/thread_safe/util/data_structures'
class TruffleRubyHash < ::Hash
end
ThreadSafe::Util.make_synchronized_on_truffleruby TruffleRubyHash
TruffleRubyHash
else
warn 'Possibly unsupported Ruby implementation'
::Hash
end
private_constant :HashImplementation
# @!macro concurrent_hash
class Hash < HashImplementation
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization.rb | require 'concurrent/utility/native_extension_loader' # load native parts first
require 'concurrent/synchronization/object'
require 'concurrent/synchronization/lockable_object'
require 'concurrent/synchronization/condition'
require 'concurrent/synchronization/lock'
module Concurrent
# @!visibility private
module Synchronization
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/async.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/async.rb | require 'concurrent/configuration'
require 'concurrent/ivar'
require 'concurrent/synchronization/lockable_object'
module Concurrent
# A mixin module that provides simple asynchronous behavior to a class,
# turning it into a simple actor. Loosely based on Erlang's
# [gen_server](http://www.erlang.org/doc/man/gen_server.html), but without
# supervision or linking.
#
# A more feature-rich {Concurrent::Actor} is also available when the
# capabilities of `Async` are too limited.
#
# ```cucumber
# Feature:
# As a stateful, plain old Ruby class
# I want safe, asynchronous behavior
# So my long-running methods don't block the main thread
# ```
#
# The `Async` module is a way to mix simple yet powerful asynchronous
# capabilities into any plain old Ruby object or class, turning each object
# into a simple Actor. Method calls are processed on a background thread. The
# caller is free to perform other actions while processing occurs in the
# background.
#
# Method calls to the asynchronous object are made via two proxy methods:
# `async` (alias `cast`) and `await` (alias `call`). These proxy methods post
# the method call to the object's background thread and return a "future"
# which will eventually contain the result of the method call.
#
# This behavior is loosely patterned after Erlang's `gen_server` behavior.
# When an Erlang module implements the `gen_server` behavior it becomes
# inherently asynchronous. The `start` or `start_link` function spawns a
# process (similar to a thread but much more lightweight and efficient) and
# returns the ID of the process. Using the process ID, other processes can
# send messages to the `gen_server` via the `cast` and `call` methods. Unlike
# Erlang's `gen_server`, however, `Async` classes do not support linking or
# supervision trees.
#
# ## Basic Usage
#
# When this module is mixed into a class, objects of the class become inherently
# asynchronous. Each object gets its own background thread on which to post
# asynchronous method calls. Asynchronous method calls are executed in the
# background one at a time in the order they are received.
#
# To create an asynchronous class, simply mix in the `Concurrent::Async` module:
#
# ```
# class Hello
# include Concurrent::Async
#
# def hello(name)
# "Hello, #{name}!"
# end
# end
# ```
#
# Mixing this module into a class provides each object two proxy methods:
# `async` and `await`. These methods are thread safe with respect to the
# enclosing object. The former proxy allows methods to be called
# asynchronously by posting to the object's internal thread. The latter proxy
# allows a method to be called synchronously but does so safely with respect
# to any pending asynchronous method calls and ensures proper ordering. Both
# methods return a {Concurrent::IVar} which can be inspected for the result
# of the proxied method call. Calling a method with `async` will return a
# `:pending` `IVar` whereas `await` will return a `:complete` `IVar`.
#
# ```
# class Echo
# include Concurrent::Async
#
# def echo(msg)
# print "#{msg}\n"
# end
# end
#
# horn = Echo.new
# horn.echo('zero') # synchronous, not thread-safe
# # returns the actual return value of the method
#
# horn.async.echo('one') # asynchronous, non-blocking, thread-safe
# # returns an IVar in the :pending state
#
# horn.await.echo('two') # synchronous, blocking, thread-safe
# # returns an IVar in the :complete state
# ```
#
# ## Let It Fail
#
# The `async` and `await` proxy methods have built-in error protection based
# on Erlang's famous "let it fail" philosophy. Instance methods should not be
# programmed defensively. When an exception is raised by a delegated method
# the proxy will rescue the exception, expose it to the caller as the `reason`
# attribute of the returned future, then process the next method call.
#
# ## Calling Methods Internally
#
# External method calls should *always* use the `async` and `await` proxy
# methods. When one method calls another method, the `async` proxy should
# rarely be used and the `await` proxy should *never* be used.
#
# When an object calls one of its own methods using the `await` proxy the
# second call will be enqueued *behind* the currently running method call.
# Any attempt to wait on the result will fail as the second call will never
# run until after the current call completes.
#
# Calling a method using the `await` proxy from within a method that was
# itself called using `async` or `await` will irreversibly deadlock the
# object. Do *not* do this, ever.
#
# ## Instance Variables and Attribute Accessors
#
# Instance variables do not need to be thread-safe so long as they are private.
# Asynchronous method calls are processed in the order they are received and
# are processed one at a time. Therefore private instance variables can only
# be accessed by one thread at a time. This is inherently thread-safe.
#
# When using private instance variables within asynchronous methods, the best
# practice is to read the instance variable into a local variable at the start
# of the method then update the instance variable at the *end* of the method.
# This way, should an exception be raised during method execution the internal
# state of the object will not have been changed.
#
# ### Reader Attributes
#
# The use of `attr_reader` is discouraged. Internal state exposed externally,
# when necessary, should be done through accessor methods. The instance
# variables exposed by these methods *must* be thread-safe, or they must be
# called using the `async` and `await` proxy methods. These two approaches are
# subtly different.
#
# When internal state is accessed via the `async` and `await` proxy methods,
# the returned value represents the object's state *at the time the call is
# processed*, which may *not* be the state of the object at the time the call
# is made.
#
# To get the state *at the current* time, irrespective of an enqueued method
# calls, a reader method must be called directly. This is inherently unsafe
# unless the instance variable is itself thread-safe, preferably using one
# of the thread-safe classes within this library. Because the thread-safe
# classes within this library are internally-locking or non-locking, they can
# be safely used from within asynchronous methods without causing deadlocks.
#
# Generally speaking, the best practice is to *not* expose internal state via
# reader methods. The best practice is to simply use the method's return value.
#
# ### Writer Attributes
#
# Writer attributes should never be used with asynchronous classes. Changing
# the state externally, even when done in the thread-safe way, is not logically
# consistent. Changes to state need to be timed with respect to all asynchronous
# method calls which my be in-process or enqueued. The only safe practice is to
# pass all necessary data to each method as arguments and let the method update
# the internal state as necessary.
#
# ## Class Constants, Variables, and Methods
#
# ### Class Constants
#
# Class constants do not need to be thread-safe. Since they are read-only and
# immutable they may be safely read both externally and from within
# asynchronous methods.
#
# ### Class Variables
#
# Class variables should be avoided. Class variables represent shared state.
# Shared state is anathema to concurrency. Should there be a need to share
# state using class variables they *must* be thread-safe, preferably
# using the thread-safe classes within this library. When updating class
# variables, never assign a new value/object to the variable itself. Assignment
# is not thread-safe in Ruby. Instead, use the thread-safe update functions
# of the variable itself to change the value.
#
# The best practice is to *never* use class variables with `Async` classes.
#
# ### Class Methods
#
# Class methods which are pure functions are safe. Class methods which modify
# class variables should be avoided, for all the reasons listed above.
#
# ## An Important Note About Thread Safe Guarantees
#
# > Thread safe guarantees can only be made when asynchronous method calls
# > are not mixed with direct method calls. Use only direct method calls
# > when the object is used exclusively on a single thread. Use only
# > `async` and `await` when the object is shared between threads. Once you
# > call a method using `async` or `await`, you should no longer call methods
# > directly on the object. Use `async` and `await` exclusively from then on.
#
# @example
#
# class Echo
# include Concurrent::Async
#
# def echo(msg)
# print "#{msg}\n"
# end
# end
#
# horn = Echo.new
# horn.echo('zero') # synchronous, not thread-safe
# # returns the actual return value of the method
#
# horn.async.echo('one') # asynchronous, non-blocking, thread-safe
# # returns an IVar in the :pending state
#
# horn.await.echo('two') # synchronous, blocking, thread-safe
# # returns an IVar in the :complete state
#
# @see Concurrent::Actor
# @see https://en.wikipedia.org/wiki/Actor_model "Actor Model" at Wikipedia
# @see http://www.erlang.org/doc/man/gen_server.html Erlang gen_server
# @see http://c2.com/cgi/wiki?LetItCrash "Let It Crash" at http://c2.com/
module Async
# @!method self.new(*args, &block)
#
# Instantiate a new object and ensure proper initialization of the
# synchronization mechanisms.
#
# @param [Array<Object>] args Zero or more arguments to be passed to the
# object's initializer.
# @param [Proc] block Optional block to pass to the object's initializer.
# @return [Object] A properly initialized object of the asynchronous class.
# Check for the presence of a method on an object and determine if a given
# set of arguments matches the required arity.
#
# @param [Object] obj the object to check against
# @param [Symbol] method the method to check the object for
# @param [Array] args zero or more arguments for the arity check
#
# @raise [NameError] the object does not respond to `method` method
# @raise [ArgumentError] the given `args` do not match the arity of `method`
#
# @note This check is imperfect because of the way Ruby reports the arity of
# methods with a variable number of arguments. It is possible to determine
# if too few arguments are given but impossible to determine if too many
# arguments are given. This check may also fail to recognize dynamic behavior
# of the object, such as methods simulated with `method_missing`.
#
# @see http://www.ruby-doc.org/core-2.1.1/Method.html#method-i-arity Method#arity
# @see http://ruby-doc.org/core-2.1.0/Object.html#method-i-respond_to-3F Object#respond_to?
# @see http://www.ruby-doc.org/core-2.1.0/BasicObject.html#method-i-method_missing BasicObject#method_missing
#
# @!visibility private
def self.validate_argc(obj, method, *args)
argc = args.length
arity = obj.method(method).arity
if arity >= 0 && argc != arity
raise ArgumentError.new("wrong number of arguments (#{argc} for #{arity})")
elsif arity < 0 && (arity = (arity + 1).abs) > argc
raise ArgumentError.new("wrong number of arguments (#{argc} for #{arity}..*)")
end
end
# @!visibility private
def self.included(base)
base.singleton_class.send(:alias_method, :original_new, :new)
base.extend(ClassMethods)
super(base)
end
# @!visibility private
module ClassMethods
def new(*args, &block)
obj = original_new(*args, &block)
obj.send(:init_synchronization)
obj
end
ruby2_keywords :new if respond_to?(:ruby2_keywords, true)
end
private_constant :ClassMethods
# Delegates asynchronous, thread-safe method calls to the wrapped object.
#
# @!visibility private
class AsyncDelegator < Synchronization::LockableObject
safe_initialization!
# Create a new delegator object wrapping the given delegate.
#
# @param [Object] delegate the object to wrap and delegate method calls to
def initialize(delegate)
super()
@delegate = delegate
@queue = []
@executor = Concurrent.global_io_executor
@ruby_pid = $$
end
# Delegates method calls to the wrapped object.
#
# @param [Symbol] method the method being called
# @param [Array] args zero or more arguments to the method
#
# @return [IVar] the result of the method call
#
# @raise [NameError] the object does not respond to `method` method
# @raise [ArgumentError] the given `args` do not match the arity of `method`
def method_missing(method, *args, &block)
super unless @delegate.respond_to?(method)
Async::validate_argc(@delegate, method, *args)
ivar = Concurrent::IVar.new
synchronize do
reset_if_forked
@queue.push [ivar, method, args, block]
@executor.post { perform } if @queue.length == 1
end
ivar
end
# Check whether the method is responsive
#
# @param [Symbol] method the method being called
def respond_to_missing?(method, include_private = false)
@delegate.respond_to?(method) || super
end
# Perform all enqueued tasks.
#
# This method must be called from within the executor. It must not be
# called while already running. It will loop until the queue is empty.
def perform
loop do
ivar, method, args, block = synchronize { @queue.first }
break unless ivar # queue is empty
begin
ivar.set(@delegate.send(method, *args, &block))
rescue => error
ivar.fail(error)
end
synchronize do
@queue.shift
return if @queue.empty?
end
end
end
def reset_if_forked
if $$ != @ruby_pid
@queue.clear
@ruby_pid = $$
end
end
end
private_constant :AsyncDelegator
# Delegates synchronous, thread-safe method calls to the wrapped object.
#
# @!visibility private
class AwaitDelegator
# Create a new delegator object wrapping the given delegate.
#
# @param [AsyncDelegator] delegate the object to wrap and delegate method calls to
def initialize(delegate)
@delegate = delegate
end
# Delegates method calls to the wrapped object.
#
# @param [Symbol] method the method being called
# @param [Array] args zero or more arguments to the method
#
# @return [IVar] the result of the method call
#
# @raise [NameError] the object does not respond to `method` method
# @raise [ArgumentError] the given `args` do not match the arity of `method`
def method_missing(method, *args, &block)
ivar = @delegate.send(method, *args, &block)
ivar.wait
ivar
end
# Check whether the method is responsive
#
# @param [Symbol] method the method being called
def respond_to_missing?(method, include_private = false)
@delegate.respond_to?(method) || super
end
end
private_constant :AwaitDelegator
# Causes the chained method call to be performed asynchronously on the
# object's thread. The delegated method will return a future in the
# `:pending` state and the method call will have been scheduled on the
# object's thread. The final disposition of the method call can be obtained
# by inspecting the returned future.
#
# @!macro async_thread_safety_warning
# @note The method call is guaranteed to be thread safe with respect to
# all other method calls against the same object that are called with
# either `async` or `await`. The mutable nature of Ruby references
# (and object orientation in general) prevent any other thread safety
# guarantees. Do NOT mix direct method calls with delegated method calls.
# Use *only* delegated method calls when sharing the object between threads.
#
# @return [Concurrent::IVar] the pending result of the asynchronous operation
#
# @raise [NameError] the object does not respond to the requested method
# @raise [ArgumentError] the given `args` do not match the arity of
# the requested method
def async
@__async_delegator__
end
alias_method :cast, :async
# Causes the chained method call to be performed synchronously on the
# current thread. The delegated will return a future in either the
# `:fulfilled` or `:rejected` state and the delegated method will have
# completed. The final disposition of the delegated method can be obtained
# by inspecting the returned future.
#
# @!macro async_thread_safety_warning
#
# @return [Concurrent::IVar] the completed result of the synchronous operation
#
# @raise [NameError] the object does not respond to the requested method
# @raise [ArgumentError] the given `args` do not match the arity of the
# requested method
def await
@__await_delegator__
end
alias_method :call, :await
# Initialize the internal serializer and other stnchronization mechanisms.
#
# @note This method *must* be called immediately upon object construction.
# This is the only way thread-safe initialization can be guaranteed.
#
# @!visibility private
def init_synchronization
return self if defined?(@__async_initialized__) && @__async_initialized__
@__async_initialized__ = true
@__async_delegator__ = AsyncDelegator.new(self)
@__await_delegator__ = AwaitDelegator.new(@__async_delegator__)
self
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/abstract_lockable_object.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/abstract_lockable_object.rb | require 'concurrent/utility/native_extension_loader' # load native parts first
require 'concurrent/utility/monotonic_time'
require 'concurrent/synchronization/object'
module Concurrent
module Synchronization
# @!visibility private
class AbstractLockableObject < Synchronization::Object
protected
# @!macro synchronization_object_method_synchronize
#
# @yield runs the block synchronized against this object,
# equivalent of java's `synchronize(this) {}`
# @note can by made public in descendants if required by `public :synchronize`
def synchronize
raise NotImplementedError
end
# @!macro synchronization_object_method_ns_wait_until
#
# Wait until condition is met or timeout passes,
# protects against spurious wake-ups.
# @param [Numeric, nil] timeout in seconds, `nil` means no timeout
# @yield condition to be met
# @yieldreturn [true, false]
# @return [true, false] if condition met
# @note only to be used inside synchronized block
# @note to provide direct access to this method in a descendant add method
# ```
# def wait_until(timeout = nil, &condition)
# synchronize { ns_wait_until(timeout, &condition) }
# end
# ```
def ns_wait_until(timeout = nil, &condition)
if timeout
wait_until = Concurrent.monotonic_time + timeout
loop do
now = Concurrent.monotonic_time
condition_result = condition.call
return condition_result if now >= wait_until || condition_result
ns_wait wait_until - now
end
else
ns_wait timeout until condition.call
true
end
end
# @!macro synchronization_object_method_ns_wait
#
# Wait until another thread calls #signal or #broadcast,
# spurious wake-ups can happen.
#
# @param [Numeric, nil] timeout in seconds, `nil` means no timeout
# @return [self]
# @note only to be used inside synchronized block
# @note to provide direct access to this method in a descendant add method
# ```
# def wait(timeout = nil)
# synchronize { ns_wait(timeout) }
# end
# ```
def ns_wait(timeout = nil)
raise NotImplementedError
end
# @!macro synchronization_object_method_ns_signal
#
# Signal one waiting thread.
# @return [self]
# @note only to be used inside synchronized block
# @note to provide direct access to this method in a descendant add method
# ```
# def signal
# synchronize { ns_signal }
# end
# ```
def ns_signal
raise NotImplementedError
end
# @!macro synchronization_object_method_ns_broadcast
#
# Broadcast to all waiting threads.
# @return [self]
# @note only to be used inside synchronized block
# @note to provide direct access to this method in a descendant add method
# ```
# def broadcast
# synchronize { ns_broadcast }
# end
# ```
def ns_broadcast
raise NotImplementedError
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/volatile.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/volatile.rb | require 'concurrent/utility/native_extension_loader' # load native parts first
require 'concurrent/utility/engine'
require 'concurrent/synchronization/full_memory_barrier'
module Concurrent
module Synchronization
# Volatile adds the attr_volatile class method when included.
#
# @example
# class Foo
# include Concurrent::Synchronization::Volatile
#
# attr_volatile :bar
#
# def initialize
# self.bar = 1
# end
# end
#
# foo = Foo.new
# foo.bar
# => 1
# foo.bar = 2
# => 2
#
# @!visibility private
module Volatile
def self.included(base)
base.extend(ClassMethods)
end
def full_memory_barrier
Synchronization.full_memory_barrier
end
module ClassMethods
if Concurrent.on_cruby?
def attr_volatile(*names)
names.each do |name|
ivar = :"@volatile_#{name}"
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{name}
#{ivar}
end
def #{name}=(value)
#{ivar} = value
end
RUBY
end
names.map { |n| [n, :"#{n}="] }.flatten
end
elsif Concurrent.on_jruby?
def attr_volatile(*names)
names.each do |name|
ivar = :"@volatile_#{name}"
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{name}
::Concurrent::Synchronization::JRubyAttrVolatile.instance_variable_get_volatile(self, :#{ivar})
end
def #{name}=(value)
::Concurrent::Synchronization::JRubyAttrVolatile.instance_variable_set_volatile(self, :#{ivar}, value)
end
RUBY
end
names.map { |n| [n, :"#{n}="] }.flatten
end
else
warn 'Possibly unsupported Ruby implementation' unless Concurrent.on_truffleruby?
def attr_volatile(*names)
names.each do |name|
ivar = :"@volatile_#{name}"
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{name}
::Concurrent::Synchronization.full_memory_barrier
#{ivar}
end
def #{name}=(value)
#{ivar} = value
::Concurrent::Synchronization.full_memory_barrier
end
RUBY
end
names.map { |n| [n, :"#{n}="] }.flatten
end
end
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/lockable_object.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/lockable_object.rb | require 'concurrent/utility/engine'
require 'concurrent/synchronization/abstract_lockable_object'
require 'concurrent/synchronization/mutex_lockable_object'
require 'concurrent/synchronization/jruby_lockable_object'
module Concurrent
module Synchronization
# @!visibility private
# @!macro internal_implementation_note
LockableObjectImplementation = case
when Concurrent.on_cruby?
MutexLockableObject
when Concurrent.on_jruby?
JRubyLockableObject
when Concurrent.on_truffleruby?
MutexLockableObject
else
warn 'Possibly unsupported Ruby implementation'
MonitorLockableObject
end
private_constant :LockableObjectImplementation
# Safe synchronization under any Ruby implementation.
# It provides methods like {#synchronize}, {#wait}, {#signal} and {#broadcast}.
# Provides a single layer which can improve its implementation over time without changes needed to
# the classes using it. Use {Synchronization::Object} not this abstract class.
#
# @note this object does not support usage together with
# [`Thread#wakeup`](http://ruby-doc.org/core/Thread.html#method-i-wakeup)
# and [`Thread#raise`](http://ruby-doc.org/core/Thread.html#method-i-raise).
# `Thread#sleep` and `Thread#wakeup` will work as expected but mixing `Synchronization::Object#wait` and
# `Thread#wakeup` will not work on all platforms.
#
# @see Event implementation as an example of this class use
#
# @example simple
# class AnClass < Synchronization::Object
# def initialize
# super
# synchronize { @value = 'asd' }
# end
#
# def value
# synchronize { @value }
# end
# end
#
# @!visibility private
class LockableObject < LockableObjectImplementation
# TODO (pitr 12-Sep-2015): make private for c-r, prohibit subclassing
# TODO (pitr 12-Sep-2015): we inherit too much ourselves :/
# @!method initialize(*args, &block)
# @!macro synchronization_object_method_initialize
# @!method synchronize
# @!macro synchronization_object_method_synchronize
# @!method wait_until(timeout = nil, &condition)
# @!macro synchronization_object_method_ns_wait_until
# @!method wait(timeout = nil)
# @!macro synchronization_object_method_ns_wait
# @!method signal
# @!macro synchronization_object_method_ns_signal
# @!method broadcast
# @!macro synchronization_object_method_ns_broadcast
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/abstract_object.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/abstract_object.rb | module Concurrent
module Synchronization
# @!visibility private
# @!macro internal_implementation_note
class AbstractObject
def initialize
# nothing to do
end
# @!visibility private
# @abstract
def full_memory_barrier
raise NotImplementedError
end
def self.attr_volatile(*names)
raise NotImplementedError
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/abstract_struct.rb | module Concurrent
module Synchronization
# @!visibility private
# @!macro internal_implementation_note
module AbstractStruct
# @!visibility private
def initialize(*values)
super()
ns_initialize(*values)
end
# @!macro struct_length
#
# Returns the number of struct members.
#
# @return [Fixnum] the number of struct members
def length
self.class::MEMBERS.length
end
alias_method :size, :length
# @!macro struct_members
#
# Returns the struct members as an array of symbols.
#
# @return [Array] the struct members as an array of symbols
def members
self.class::MEMBERS.dup
end
protected
# @!macro struct_values
#
# @!visibility private
def ns_values
@values.dup
end
# @!macro struct_values_at
#
# @!visibility private
def ns_values_at(indexes)
@values.values_at(*indexes)
end
# @!macro struct_to_h
#
# @!visibility private
def ns_to_h
length.times.reduce({}){|memo, i| memo[self.class::MEMBERS[i]] = @values[i]; memo}
end
# @!macro struct_get
#
# @!visibility private
def ns_get(member)
if member.is_a? Integer
if member >= @values.length
raise IndexError.new("offset #{member} too large for struct(size:#{@values.length})")
end
@values[member]
else
send(member)
end
rescue NoMethodError
raise NameError.new("no member '#{member}' in struct")
end
# @!macro struct_equality
#
# @!visibility private
def ns_equality(other)
self.class == other.class && self.values == other.values
end
# @!macro struct_each
#
# @!visibility private
def ns_each
values.each{|value| yield value }
end
# @!macro struct_each_pair
#
# @!visibility private
def ns_each_pair
@values.length.times do |index|
yield self.class::MEMBERS[index], @values[index]
end
end
# @!macro struct_select
#
# @!visibility private
def ns_select
values.select{|value| yield value }
end
# @!macro struct_inspect
#
# @!visibility private
def ns_inspect
struct = pr_underscore(self.class.ancestors[1])
clazz = ((self.class.to_s =~ /^#<Class:/) == 0) ? '' : " #{self.class}"
"#<#{struct}#{clazz} #{ns_to_h}>"
end
# @!macro struct_merge
#
# @!visibility private
def ns_merge(other, &block)
self.class.new(*self.to_h.merge(other, &block).values)
end
# @!visibility private
def ns_initialize_copy
@values = @values.map do |val|
begin
val.clone
rescue TypeError
val
end
end
end
# @!visibility private
def pr_underscore(clazz)
word = clazz.to_s.dup # dup string to workaround JRuby 9.2.0.0 bug https://github.com/jruby/jruby/issues/5229
word.gsub!(/::/, '/')
word.gsub!(/([A-Z]+)([A-Z][a-z])/,'\1_\2')
word.gsub!(/([a-z\d])([A-Z])/,'\1_\2')
word.tr!("-", "_")
word.downcase!
word
end
# @!visibility private
def self.define_struct_class(parent, base, name, members, &block)
clazz = Class.new(base || Object) do
include parent
self.const_set(:MEMBERS, members.collect{|member| member.to_s.to_sym}.freeze)
def ns_initialize(*values)
raise ArgumentError.new('struct size differs') if values.length > length
@values = values.fill(nil, values.length..length-1)
end
end
unless name.nil?
begin
parent.send :remove_const, name if parent.const_defined?(name, false)
parent.const_set(name, clazz)
clazz
rescue NameError
raise NameError.new("identifier #{name} needs to be constant")
end
end
members.each_with_index do |member, index|
clazz.send :remove_method, member if clazz.instance_methods(false).include? member
clazz.send(:define_method, member) do
@values[index]
end
end
clazz.class_exec(&block) unless block.nil?
clazz.singleton_class.send :alias_method, :[], :new
clazz
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/mutex_lockable_object.rb | require 'concurrent/synchronization/abstract_lockable_object'
module Concurrent
module Synchronization
# @!visibility private
# @!macro internal_implementation_note
module ConditionSignalling
protected
def ns_signal
@__Condition__.signal
self
end
def ns_broadcast
@__Condition__.broadcast
self
end
end
# @!visibility private
# @!macro internal_implementation_note
class MutexLockableObject < AbstractLockableObject
include ConditionSignalling
safe_initialization!
def initialize
super()
@__Lock__ = ::Mutex.new
@__Condition__ = ::ConditionVariable.new
end
def initialize_copy(other)
super
@__Lock__ = ::Mutex.new
@__Condition__ = ::ConditionVariable.new
end
protected
def synchronize
if @__Lock__.owned?
yield
else
@__Lock__.synchronize { yield }
end
end
def ns_wait(timeout = nil)
@__Condition__.wait @__Lock__, timeout
self
end
end
# @!visibility private
# @!macro internal_implementation_note
class MonitorLockableObject < AbstractLockableObject
include ConditionSignalling
safe_initialization!
def initialize
super()
@__Lock__ = ::Monitor.new
@__Condition__ = @__Lock__.new_cond
end
def initialize_copy(other)
super
@__Lock__ = ::Monitor.new
@__Condition__ = @__Lock__.new_cond
end
protected
def synchronize # TODO may be a problem with lock.synchronize { lock.wait }
@__Lock__.synchronize { yield }
end
def ns_wait(timeout = nil)
@__Condition__.wait timeout
self
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/jruby_lockable_object.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/jruby_lockable_object.rb | require 'concurrent/utility/native_extension_loader' # load native parts first
module Concurrent
module Synchronization
if Concurrent.on_jruby?
# @!visibility private
# @!macro internal_implementation_note
class JRubyLockableObject < AbstractLockableObject
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/object.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/object.rb | require 'concurrent/utility/native_extension_loader' # load native parts first
require 'concurrent/synchronization/safe_initialization'
require 'concurrent/synchronization/volatile'
require 'concurrent/atomic/atomic_reference'
module Concurrent
module Synchronization
# Abstract object providing final, volatile, ans CAS extensions to build other concurrent abstractions.
# - final instance variables see {Object.safe_initialization!}
# - volatile instance variables see {Object.attr_volatile}
# - volatile instance variables see {Object.attr_atomic}
# @!visibility private
class Object < AbstractObject
include Volatile
# TODO make it a module if possible
# @!method self.attr_volatile(*names)
# Creates methods for reading and writing (as `attr_accessor` does) to a instance variable with
# volatile (Java) semantic. The instance variable should be accessed only through generated methods.
#
# @param [::Array<Symbol>] names of the instance variables to be volatile
# @return [::Array<Symbol>] names of defined method names
# Has to be called by children.
def initialize
super
__initialize_atomic_fields__
end
def self.safe_initialization!
extend SafeInitialization unless safe_initialization?
end
def self.safe_initialization?
self.singleton_class < SafeInitialization
end
# For testing purposes, quite slow. Injects assert code to new method which will raise if class instance contains
# any instance variables with CamelCase names and isn't {.safe_initialization?}.
# @raise when offend found
# @return [true]
def self.ensure_safe_initialization_when_final_fields_are_present
Object.class_eval do
def self.new(*args, &block)
object = super(*args, &block)
ensure
has_final_field = object.instance_variables.any? { |v| v.to_s =~ /^@[A-Z]/ }
if has_final_field && !safe_initialization?
raise "there was an instance of #{object.class} with final field but not marked with safe_initialization!"
end
end
end
true
end
# Creates methods for reading and writing to a instance variable with
# volatile (Java) semantic as {.attr_volatile} does.
# The instance variable should be accessed only through generated methods.
# This method generates following methods: `value`, `value=(new_value) #=> new_value`,
# `swap_value(new_value) #=> old_value`,
# `compare_and_set_value(expected, value) #=> true || false`, `update_value(&block)`.
# @param [::Array<Symbol>] names of the instance variables to be volatile with CAS.
# @return [::Array<Symbol>] names of defined method names.
# @!macro attr_atomic
# @!method $1
# @return [Object] The $1.
# @!method $1=(new_$1)
# Set the $1.
# @return [Object] new_$1.
# @!method swap_$1(new_$1)
# Set the $1 to new_$1 and return the old $1.
# @return [Object] old $1
# @!method compare_and_set_$1(expected_$1, new_$1)
# Sets the $1 to new_$1 if the current $1 is expected_$1
# @return [true, false]
# @!method update_$1(&block)
# Updates the $1 using the block.
# @yield [Object] Calculate a new $1 using given (old) $1
# @yieldparam [Object] old $1
# @return [Object] new $1
def self.attr_atomic(*names)
@__atomic_fields__ ||= []
@__atomic_fields__ += names
safe_initialization!
define_initialize_atomic_fields
names.each do |name|
ivar = :"@Atomic#{name.to_s.gsub(/(?:^|_)(.)/) { $1.upcase }}"
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{name}
#{ivar}.get
end
def #{name}=(value)
#{ivar}.set value
end
def swap_#{name}(value)
#{ivar}.swap value
end
def compare_and_set_#{name}(expected, value)
#{ivar}.compare_and_set expected, value
end
def update_#{name}(&block)
#{ivar}.update(&block)
end
RUBY
end
names.flat_map { |n| [n, :"#{n}=", :"swap_#{n}", :"compare_and_set_#{n}", :"update_#{n}"] }
end
# @param [true, false] inherited should inherited volatile with CAS fields be returned?
# @return [::Array<Symbol>] Returns defined volatile with CAS fields on this class.
def self.atomic_attributes(inherited = true)
@__atomic_fields__ ||= []
((superclass.atomic_attributes if superclass.respond_to?(:atomic_attributes) && inherited) || []) + @__atomic_fields__
end
# @return [true, false] is the attribute with name atomic?
def self.atomic_attribute?(name)
atomic_attributes.include? name
end
private
def self.define_initialize_atomic_fields
assignments = @__atomic_fields__.map do |name|
"@Atomic#{name.to_s.gsub(/(?:^|_)(.)/) { $1.upcase }} = Concurrent::AtomicReference.new(nil)"
end.join("\n")
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def __initialize_atomic_fields__
super
#{assignments}
end
RUBY
end
private_class_method :define_initialize_atomic_fields
def __initialize_atomic_fields__
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/full_memory_barrier.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/full_memory_barrier.rb | require 'concurrent/utility/native_extension_loader' # load native parts first
module Concurrent
module Synchronization
case
when Concurrent.on_cruby?
def self.full_memory_barrier
# relying on undocumented behavior of CRuby, GVL acquire has lock which ensures visibility of ivars
# https://github.com/ruby/ruby/blob/ruby_2_2/thread_pthread.c#L204-L211
end
when Concurrent.on_jruby?
require 'concurrent/utility/native_extension_loader'
def self.full_memory_barrier
JRubyAttrVolatile.full_memory_barrier
end
when Concurrent.on_truffleruby?
def self.full_memory_barrier
TruffleRuby.full_memory_barrier
end
else
warn 'Possibly unsupported Ruby implementation'
def self.full_memory_barrier
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/safe_initialization.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/safe_initialization.rb | require 'concurrent/synchronization/full_memory_barrier'
module Concurrent
module Synchronization
# @!visibility private
# @!macro internal_implementation_note
#
# By extending this module, a class and all its children are marked to be constructed safely. Meaning that
# all writes (ivar initializations) are made visible to all readers of newly constructed object. It ensures
# same behaviour as Java's final fields.
#
# Due to using Kernel#extend, the module is not included again if already present in the ancestors,
# which avoids extra overhead.
#
# @example
# class AClass < Concurrent::Synchronization::Object
# extend Concurrent::Synchronization::SafeInitialization
#
# def initialize
# @AFinalValue = 'value' # published safely, #foo will never return nil
# end
#
# def foo
# @AFinalValue
# end
# end
module SafeInitialization
def new(*args, &block)
super(*args, &block)
ensure
Concurrent::Synchronization.full_memory_barrier
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/lock.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/lock.rb | require 'concurrent/synchronization/lockable_object'
module Concurrent
module Synchronization
# @!visibility private
# TODO (pitr-ch 04-Dec-2016): should be in edge
class Lock < LockableObject
# TODO use JavaReentrantLock on JRuby
public :synchronize
def wait(timeout = nil)
synchronize { ns_wait(timeout) }
end
public :ns_wait
def wait_until(timeout = nil, &condition)
synchronize { ns_wait_until(timeout, &condition) }
end
public :ns_wait_until
def signal
synchronize { ns_signal }
end
public :ns_signal
def broadcast
synchronize { ns_broadcast }
end
public :ns_broadcast
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/condition.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/synchronization/condition.rb | require 'concurrent/synchronization/lockable_object'
module Concurrent
module Synchronization
# @!visibility private
# TODO (pitr-ch 04-Dec-2016): should be in edge
class Condition < LockableObject
safe_initialization!
# TODO (pitr 12-Sep-2015): locks two objects, improve
# TODO (pitr 26-Sep-2015): study
# http://grepcode.com/file/repository.grepcode.com/java/root/jdk/openjdk/8-b132/java/util/concurrent/locks/AbstractQueuedSynchronizer.java#AbstractQueuedSynchronizer.Node
singleton_class.send :alias_method, :private_new, :new
private_class_method :new
def initialize(lock)
super()
@Lock = lock
end
def wait(timeout = nil)
@Lock.synchronize { ns_wait(timeout) }
end
def ns_wait(timeout = nil)
synchronize { super(timeout) }
end
def wait_until(timeout = nil, &condition)
@Lock.synchronize { ns_wait_until(timeout, &condition) }
end
def ns_wait_until(timeout = nil, &condition)
synchronize { super(timeout, &condition) }
end
def signal
@Lock.synchronize { ns_signal }
end
def ns_signal
synchronize { super }
end
def broadcast
@Lock.synchronize { ns_broadcast }
end
def ns_broadcast
synchronize { super }
end
end
class LockableObject < LockableObjectImplementation
def new_condition
Condition.private_new(self)
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/thread_safe/synchronized_delegator.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/thread_safe/synchronized_delegator.rb | require 'delegate'
require 'monitor'
module Concurrent
# This class provides a trivial way to synchronize all calls to a given object
# by wrapping it with a `Delegator` that performs `Monitor#enter/exit` calls
# around the delegated `#send`. Example:
#
# array = [] # not thread-safe on many impls
# array = SynchronizedDelegator.new([]) # thread-safe
#
# A simple `Monitor` provides a very coarse-grained way to synchronize a given
# object, in that it will cause synchronization for methods that have no need
# for it, but this is a trivial way to get thread-safety where none may exist
# currently on some implementations.
#
# This class is currently being considered for inclusion into stdlib, via
# https://bugs.ruby-lang.org/issues/8556
#
# @!visibility private
class SynchronizedDelegator < SimpleDelegator
def setup
@old_abort = Thread.abort_on_exception
Thread.abort_on_exception = true
end
def teardown
Thread.abort_on_exception = @old_abort
end
def initialize(obj)
__setobj__(obj)
@monitor = Monitor.new
end
def method_missing(method, *args, &block)
monitor = @monitor
begin
monitor.enter
super
ensure
monitor.exit
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/thread_safe/util.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/thread_safe/util.rb | module Concurrent
# @!visibility private
module ThreadSafe
# @!visibility private
module Util
# TODO (pitr-ch 15-Oct-2016): migrate to Utility::NativeInteger
FIXNUM_BIT_SIZE = (0.size * 8) - 2
MAX_INT = (2 ** FIXNUM_BIT_SIZE) - 1
# TODO (pitr-ch 15-Oct-2016): migrate to Utility::ProcessorCounter
CPU_COUNT = 16 # is there a way to determine this?
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/thread_safe/util/volatile.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/thread_safe/util/volatile.rb | require 'concurrent/thread_safe/util'
module Concurrent
# @!visibility private
module ThreadSafe
# @!visibility private
module Util
# @!visibility private
module Volatile
# Provides +volatile+ (in the JVM's sense) attribute accessors implemented
# atop of +Concurrent::AtomicReference+.
#
# Usage:
# class Foo
# extend Concurrent::ThreadSafe::Util::Volatile
# attr_volatile :foo, :bar
#
# def initialize(bar)
# super() # must super() into parent initializers before using the volatile attribute accessors
# self.bar = bar
# end
#
# def hello
# my_foo = foo # volatile read
# self.foo = 1 # volatile write
# cas_foo(1, 2) # => true | a strong CAS
# end
# end
def attr_volatile(*attr_names)
return if attr_names.empty?
include(Module.new do
atomic_ref_setup = attr_names.map {|attr_name| "@__#{attr_name} = Concurrent::AtomicReference.new"}
initialize_copy_setup = attr_names.zip(atomic_ref_setup).map do |attr_name, ref_setup|
"#{ref_setup}(other.instance_variable_get(:@__#{attr_name}).get)"
end
class_eval <<-RUBY_EVAL, __FILE__, __LINE__ + 1
def initialize(*)
super
#{atomic_ref_setup.join('; ')}
end
def initialize_copy(other)
super
#{initialize_copy_setup.join('; ')}
end
RUBY_EVAL
attr_names.each do |attr_name|
class_eval <<-RUBY_EVAL, __FILE__, __LINE__ + 1
def #{attr_name}
@__#{attr_name}.get
end
def #{attr_name}=(value)
@__#{attr_name}.set(value)
end
def compare_and_set_#{attr_name}(old_value, new_value)
@__#{attr_name}.compare_and_set(old_value, new_value)
end
RUBY_EVAL
alias_method :"cas_#{attr_name}", :"compare_and_set_#{attr_name}"
alias_method :"lazy_set_#{attr_name}", :"#{attr_name}="
end
end)
end
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb | require 'concurrent/thread_safe/util'
require 'concurrent/utility/engine'
# Shim for TruffleRuby.synchronized
if Concurrent.on_truffleruby? && !TruffleRuby.respond_to?(:synchronized)
module TruffleRuby
def self.synchronized(object, &block)
Truffle::System.synchronized(object, &block)
end
end
end
module Concurrent
module ThreadSafe
module Util
def self.make_synchronized_on_cruby(klass)
klass.class_eval do
def initialize(*args, &block)
@_monitor = Monitor.new
super
end
def initialize_copy(other)
# make sure a copy is not sharing a monitor with the original object!
@_monitor = Monitor.new
super
end
end
klass.superclass.instance_methods(false).each do |method|
klass.class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{method}(*args)
monitor = @_monitor
monitor or raise("BUG: Internal monitor was not properly initialized. Please report this to the concurrent-ruby developers.")
monitor.synchronize { super }
end
RUBY
end
end
def self.make_synchronized_on_truffleruby(klass)
klass.superclass.instance_methods(false).each do |method|
klass.class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{method}(*args, &block)
TruffleRuby.synchronized(self) { super(*args, &block) }
end
RUBY
end
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/thread_safe/util/striped64.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/thread_safe/util/striped64.rb | require 'concurrent/thread_safe/util'
require 'concurrent/thread_safe/util/power_of_two_tuple'
require 'concurrent/thread_safe/util/volatile'
require 'concurrent/thread_safe/util/xor_shift_random'
module Concurrent
# @!visibility private
module ThreadSafe
# @!visibility private
module Util
# A Ruby port of the Doug Lea's jsr166e.Striped64 class version 1.6
# available in public domain.
#
# Original source code available here:
# http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/jsr166e/Striped64.java?revision=1.6
#
# Class holding common representation and mechanics for classes supporting
# dynamic striping on 64bit values.
#
# This class maintains a lazily-initialized table of atomically updated
# variables, plus an extra +base+ field. The table size is a power of two.
# Indexing uses masked per-thread hash codes. Nearly all methods on this
# class are private, accessed directly by subclasses.
#
# Table entries are of class +Cell+; a variant of AtomicLong padded to
# reduce cache contention on most processors. Padding is overkill for most
# Atomics because they are usually irregularly scattered in memory and thus
# don't interfere much with each other. But Atomic objects residing in
# arrays will tend to be placed adjacent to each other, and so will most
# often share cache lines (with a huge negative performance impact) without
# this precaution.
#
# In part because +Cell+s are relatively large, we avoid creating them until
# they are needed. When there is no contention, all updates are made to the
# +base+ field. Upon first contention (a failed CAS on +base+ update), the
# table is initialized to size 2. The table size is doubled upon further
# contention until reaching the nearest power of two greater than or equal
# to the number of CPUS. Table slots remain empty (+nil+) until they are
# needed.
#
# A single spinlock (+busy+) is used for initializing and resizing the
# table, as well as populating slots with new +Cell+s. There is no need for
# a blocking lock: When the lock is not available, threads try other slots
# (or the base). During these retries, there is increased contention and
# reduced locality, which is still better than alternatives.
#
# Per-thread hash codes are initialized to random values. Contention and/or
# table collisions are indicated by failed CASes when performing an update
# operation (see method +retry_update+). Upon a collision, if the table size
# is less than the capacity, it is doubled in size unless some other thread
# holds the lock. If a hashed slot is empty, and lock is available, a new
# +Cell+ is created. Otherwise, if the slot exists, a CAS is tried. Retries
# proceed by "double hashing", using a secondary hash (XorShift) to try to
# find a free slot.
#
# The table size is capped because, when there are more threads than CPUs,
# supposing that each thread were bound to a CPU, there would exist a
# perfect hash function mapping threads to slots that eliminates collisions.
# When we reach capacity, we search for this mapping by randomly varying the
# hash codes of colliding threads. Because search is random, and collisions
# only become known via CAS failures, convergence can be slow, and because
# threads are typically not bound to CPUS forever, may not occur at all.
# However, despite these limitations, observed contention rates are
# typically low in these cases.
#
# It is possible for a +Cell+ to become unused when threads that once hashed
# to it terminate, as well as in the case where doubling the table causes no
# thread to hash to it under expanded mask. We do not try to detect or
# remove such cells, under the assumption that for long-running instances,
# observed contention levels will recur, so the cells will eventually be
# needed again; and for short-lived ones, it does not matter.
#
# @!visibility private
class Striped64
# Padded variant of AtomicLong supporting only raw accesses plus CAS.
# The +value+ field is placed between pads, hoping that the JVM doesn't
# reorder them.
#
# Optimisation note: It would be possible to use a release-only
# form of CAS here, if it were provided.
#
# @!visibility private
class Cell < Concurrent::AtomicReference
alias_method :cas, :compare_and_set
def cas_computed
cas(current_value = value, yield(current_value))
end
# @!visibility private
def self.padding
# TODO: this only adds padding after the :value slot, need to find a way to add padding before the slot
# TODO (pitr-ch 28-Jul-2018): the padding instance vars may not be created
# hide from yardoc in a method
attr_reader :padding_0, :padding_1, :padding_2, :padding_3, :padding_4, :padding_5, :padding_6, :padding_7, :padding_8, :padding_9, :padding_10, :padding_11
end
padding
end
extend Volatile
attr_volatile :cells, # Table of cells. When non-null, size is a power of 2.
:base, # Base value, used mainly when there is no contention, but also as a fallback during table initialization races. Updated via CAS.
:busy # Spinlock (locked via CAS) used when resizing and/or creating Cells.
alias_method :busy?, :busy
def initialize
super()
self.busy = false
self.base = 0
end
# Handles cases of updates involving initialization, resizing,
# creating new Cells, and/or contention. See above for
# explanation. This method suffers the usual non-modularity
# problems of optimistic retry code, relying on rechecked sets of
# reads.
#
# Arguments:
# [+x+]
# the value
# [+hash_code+]
# hash code used
# [+x+]
# false if CAS failed before call
def retry_update(x, hash_code, was_uncontended) # :yields: current_value
hash = hash_code
collided = false # True if last slot nonempty
while true
if current_cells = cells
if !(cell = current_cells.volatile_get_by_hash(hash))
if busy?
collided = false
else # Try to attach new Cell
if try_to_install_new_cell(Cell.new(x), hash) # Optimistically create and try to insert new cell
break
else
redo # Slot is now non-empty
end
end
elsif !was_uncontended # CAS already known to fail
was_uncontended = true # Continue after rehash
elsif cell.cas_computed {|current_value| yield current_value}
break
elsif current_cells.size >= CPU_COUNT || cells != current_cells # At max size or stale
collided = false
elsif collided && expand_table_unless_stale(current_cells)
collided = false
redo # Retry with expanded table
else
collided = true
end
hash = XorShiftRandom.xorshift(hash)
elsif try_initialize_cells(x, hash) || cas_base_computed {|current_base| yield current_base}
break
end
end
self.hash_code = hash
end
private
# Static per-thread hash code key. Shared across all instances to
# reduce Thread locals pollution and because adjustments due to
# collisions in one table are likely to be appropriate for
# others.
THREAD_LOCAL_KEY = "#{name}.hash_code".to_sym
# A thread-local hash code accessor. The code is initially
# random, but may be set to a different value upon collisions.
def hash_code
Thread.current[THREAD_LOCAL_KEY] ||= XorShiftRandom.get
end
def hash_code=(hash)
Thread.current[THREAD_LOCAL_KEY] = hash
end
# Sets base and all +cells+ to the given value.
def internal_reset(initial_value)
current_cells = cells
self.base = initial_value
if current_cells
current_cells.each do |cell|
cell.value = initial_value if cell
end
end
end
def cas_base_computed
cas_base(current_base = base, yield(current_base))
end
def free?
!busy?
end
def try_initialize_cells(x, hash)
if free? && !cells
try_in_busy do
unless cells # Recheck under lock
new_cells = PowerOfTwoTuple.new(2)
new_cells.volatile_set_by_hash(hash, Cell.new(x))
self.cells = new_cells
end
end
end
end
def expand_table_unless_stale(current_cells)
try_in_busy do
if current_cells == cells # Recheck under lock
new_cells = current_cells.next_in_size_table
current_cells.each_with_index {|x, i| new_cells.volatile_set(i, x)}
self.cells = new_cells
end
end
end
def try_to_install_new_cell(new_cell, hash)
try_in_busy do
# Recheck under lock
if (current_cells = cells) && !current_cells.volatile_get(i = current_cells.hash_to_index(hash))
current_cells.volatile_set(i, new_cell)
end
end
end
def try_in_busy
if cas_busy(false, true)
begin
yield
ensure
self.busy = false
end
end
end
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/thread_safe/util/xor_shift_random.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/thread_safe/util/xor_shift_random.rb | require 'concurrent/thread_safe/util'
module Concurrent
# @!visibility private
module ThreadSafe
# @!visibility private
module Util
# A xorshift random number (positive +Fixnum+s) generator, provides
# reasonably cheap way to generate thread local random numbers without
# contending for the global +Kernel.rand+.
#
# Usage:
# x = XorShiftRandom.get # uses Kernel.rand to generate an initial seed
# while true
# if (x = XorShiftRandom.xorshift).odd? # thread-locally generate a next random number
# do_something_at_random
# end
# end
module XorShiftRandom
extend self
MAX_XOR_SHIFTABLE_INT = MAX_INT - 1
# Generates an initial non-zero positive +Fixnum+ via +Kernel.rand+.
def get
Kernel.rand(MAX_XOR_SHIFTABLE_INT) + 1 # 0 can't be xorshifted
end
# xorshift based on: http://www.jstatsoft.org/v08/i14/paper
if 0.size == 4
# using the "yˆ=y>>a; yˆ=y<<b; yˆ=y>>c;" transform with the (a,b,c) tuple with values (3,1,14) to minimise Bignum overflows
def xorshift(x)
x ^= x >> 3
x ^= (x << 1) & MAX_INT # cut-off Bignum overflow
x ^= x >> 14
end
else
# using the "yˆ=y>>a; yˆ=y<<b; yˆ=y>>c;" transform with the (a,b,c) tuple with values (1,1,54) to minimise Bignum overflows
def xorshift(x)
x ^= x >> 1
x ^= (x << 1) & MAX_INT # cut-off Bignum overflow
x ^= x >> 54
end
end
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/thread_safe/util/adder.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/thread_safe/util/adder.rb | require 'concurrent/thread_safe/util'
require 'concurrent/thread_safe/util/striped64'
module Concurrent
# @!visibility private
module ThreadSafe
# @!visibility private
module Util
# A Ruby port of the Doug Lea's jsr166e.LongAdder class version 1.8
# available in public domain.
#
# Original source code available here:
# http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/jsr166e/LongAdder.java?revision=1.8
#
# One or more variables that together maintain an initially zero
# sum. When updates (method +add+) are contended across threads,
# the set of variables may grow dynamically to reduce contention.
# Method +sum+ returns the current total combined across the
# variables maintaining the sum.
#
# This class is usually preferable to single +Atomic+ reference when
# multiple threads update a common sum that is used for purposes such
# as collecting statistics, not for fine-grained synchronization
# control. Under low update contention, the two classes have similar
# characteristics. But under high contention, expected throughput of
# this class is significantly higher, at the expense of higher space
# consumption.
#
# @!visibility private
class Adder < Striped64
# Adds the given value.
def add(x)
if (current_cells = cells) || !cas_base_computed {|current_base| current_base + x}
was_uncontended = true
hash = hash_code
unless current_cells && (cell = current_cells.volatile_get_by_hash(hash)) && (was_uncontended = cell.cas_computed {|current_value| current_value + x})
retry_update(x, hash, was_uncontended) {|current_value| current_value + x}
end
end
end
def increment
add(1)
end
def decrement
add(-1)
end
# Returns the current sum. The returned value is _NOT_ an
# atomic snapshot: Invocation in the absence of concurrent
# updates returns an accurate result, but concurrent updates that
# occur while the sum is being calculated might not be
# incorporated.
def sum
x = base
if current_cells = cells
current_cells.each do |cell|
x += cell.value if cell
end
end
x
end
def reset
internal_reset(0)
end
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/thread_safe/util/power_of_two_tuple.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/thread_safe/util/power_of_two_tuple.rb | require 'concurrent/thread_safe/util'
require 'concurrent/tuple'
module Concurrent
# @!visibility private
module ThreadSafe
# @!visibility private
module Util
# @!visibility private
class PowerOfTwoTuple < Concurrent::Tuple
def initialize(size)
raise ArgumentError, "size must be a power of 2 (#{size.inspect} provided)" unless size > 0 && size & (size - 1) == 0
super(size)
end
def hash_to_index(hash)
(size - 1) & hash
end
def volatile_get_by_hash(hash)
volatile_get(hash_to_index(hash))
end
def volatile_set_by_hash(hash, value)
volatile_set(hash_to_index(hash), value)
end
def next_in_size_table
self.class.new(size << 1)
end
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/ruby_timeout_queue.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/ruby_timeout_queue.rb | module Concurrent
module Collection
# @!visibility private
# @!macro ruby_timeout_queue
class RubyTimeoutQueue < ::Queue
def initialize(*args)
if RUBY_VERSION >= '3.2'
raise "#{self.class.name} is not needed on Ruby 3.2 or later, use ::Queue instead"
end
super(*args)
@mutex = Mutex.new
@cond_var = ConditionVariable.new
end
def push(obj)
@mutex.synchronize do
super(obj)
@cond_var.signal
end
end
alias_method :enq, :push
alias_method :<<, :push
def pop(non_block = false, timeout: nil)
if non_block && timeout
raise ArgumentError, "can't set a timeout if non_block is enabled"
end
if non_block
super(true)
elsif timeout
@mutex.synchronize do
deadline = Concurrent.monotonic_time + timeout
while (now = Concurrent.monotonic_time) < deadline && empty?
@cond_var.wait(@mutex, deadline - now)
end
begin
return super(true)
rescue ThreadError
# still empty
nil
end
end
else
super(false)
end
end
alias_method :deq, :pop
alias_method :shift, :pop
end
private_constant :RubyTimeoutQueue
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/ruby_non_concurrent_priority_queue.rb | module Concurrent
module Collection
# @!macro priority_queue
#
# @!visibility private
# @!macro internal_implementation_note
class RubyNonConcurrentPriorityQueue
# @!macro priority_queue_method_initialize
def initialize(opts = {})
order = opts.fetch(:order, :max)
@comparator = [:min, :low].include?(order) ? -1 : 1
clear
end
# @!macro priority_queue_method_clear
def clear
@queue = [nil]
@length = 0
true
end
# @!macro priority_queue_method_delete
def delete(item)
return false if empty?
original_length = @length
k = 1
while k <= @length
if @queue[k] == item
swap(k, @length)
@length -= 1
sink(k) || swim(k)
@queue.pop
else
k += 1
end
end
@length != original_length
end
# @!macro priority_queue_method_empty
def empty?
size == 0
end
# @!macro priority_queue_method_include
def include?(item)
@queue.include?(item)
end
alias_method :has_priority?, :include?
# @!macro priority_queue_method_length
def length
@length
end
alias_method :size, :length
# @!macro priority_queue_method_peek
def peek
empty? ? nil : @queue[1]
end
# @!macro priority_queue_method_pop
def pop
return nil if empty?
max = @queue[1]
swap(1, @length)
@length -= 1
sink(1)
@queue.pop
max
end
alias_method :deq, :pop
alias_method :shift, :pop
# @!macro priority_queue_method_push
def push(item)
raise ArgumentError.new('cannot enqueue nil') if item.nil?
@length += 1
@queue << item
swim(@length)
true
end
alias_method :<<, :push
alias_method :enq, :push
# @!macro priority_queue_method_from_list
def self.from_list(list, opts = {})
queue = new(opts)
list.each{|item| queue << item }
queue
end
private
# Exchange the values at the given indexes within the internal array.
#
# @param [Integer] x the first index to swap
# @param [Integer] y the second index to swap
#
# @!visibility private
def swap(x, y)
temp = @queue[x]
@queue[x] = @queue[y]
@queue[y] = temp
end
# Are the items at the given indexes ordered based on the priority
# order specified at construction?
#
# @param [Integer] x the first index from which to retrieve a comparable value
# @param [Integer] y the second index from which to retrieve a comparable value
#
# @return [Boolean] true if the two elements are in the correct priority order
# else false
#
# @!visibility private
def ordered?(x, y)
(@queue[x] <=> @queue[y]) == @comparator
end
# Percolate down to maintain heap invariant.
#
# @param [Integer] k the index at which to start the percolation
#
# @!visibility private
def sink(k)
success = false
while (j = (2 * k)) <= @length do
j += 1 if j < @length && ! ordered?(j, j+1)
break if ordered?(k, j)
swap(k, j)
success = true
k = j
end
success
end
# Percolate up to maintain heap invariant.
#
# @param [Integer] k the index at which to start the percolation
#
# @!visibility private
def swim(k)
success = false
while k > 1 && ! ordered?(k/2, k) do
swap(k, k/2)
k = k/2
success = true
end
success
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/copy_on_write_observer_set.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/copy_on_write_observer_set.rb | require 'concurrent/synchronization/lockable_object'
module Concurrent
module Collection
# A thread safe observer set implemented using copy-on-write approach:
# every time an observer is added or removed the whole internal data structure is
# duplicated and replaced with a new one.
#
# @api private
class CopyOnWriteObserverSet < Synchronization::LockableObject
def initialize
super()
synchronize { ns_initialize }
end
# @!macro observable_add_observer
def add_observer(observer = nil, func = :update, &block)
if observer.nil? && block.nil?
raise ArgumentError, 'should pass observer as a first argument or block'
elsif observer && block
raise ArgumentError.new('cannot provide both an observer and a block')
end
if block
observer = block
func = :call
end
synchronize do
new_observers = @observers.dup
new_observers[observer] = func
@observers = new_observers
observer
end
end
# @!macro observable_delete_observer
def delete_observer(observer)
synchronize do
new_observers = @observers.dup
new_observers.delete(observer)
@observers = new_observers
observer
end
end
# @!macro observable_delete_observers
def delete_observers
self.observers = {}
self
end
# @!macro observable_count_observers
def count_observers
observers.count
end
# Notifies all registered observers with optional args
# @param [Object] args arguments to be passed to each observer
# @return [CopyOnWriteObserverSet] self
def notify_observers(*args, &block)
notify_to(observers, *args, &block)
self
end
# Notifies all registered observers with optional args and deletes them.
#
# @param [Object] args arguments to be passed to each observer
# @return [CopyOnWriteObserverSet] self
def notify_and_delete_observers(*args, &block)
old = clear_observers_and_return_old
notify_to(old, *args, &block)
self
end
protected
def ns_initialize
@observers = {}
end
private
def notify_to(observers, *args)
raise ArgumentError.new('cannot give arguments and a block') if block_given? && !args.empty?
observers.each do |observer, function|
args = yield if block_given?
observer.send(function, *args)
end
end
def observers
synchronize { @observers }
end
def observers=(new_set)
synchronize { @observers = new_set }
end
def clear_observers_and_return_old
synchronize do
old_observers = @observers
@observers = {}
old_observers
end
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/timeout_queue.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/timeout_queue.rb | module Concurrent
module Collection
# @!visibility private
# @!macro internal_implementation_note
TimeoutQueueImplementation = if RUBY_VERSION >= '3.2'
::Queue
else
require 'concurrent/collection/ruby_timeout_queue'
RubyTimeoutQueue
end
private_constant :TimeoutQueueImplementation
# @!visibility private
# @!macro timeout_queue
class TimeoutQueue < TimeoutQueueImplementation
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/copy_on_notify_observer_set.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/copy_on_notify_observer_set.rb | require 'concurrent/synchronization/lockable_object'
module Concurrent
module Collection
# A thread safe observer set implemented using copy-on-read approach:
# observers are added and removed from a thread safe collection; every time
# a notification is required the internal data structure is copied to
# prevent concurrency issues
#
# @api private
class CopyOnNotifyObserverSet < Synchronization::LockableObject
def initialize
super()
synchronize { ns_initialize }
end
# @!macro observable_add_observer
def add_observer(observer = nil, func = :update, &block)
if observer.nil? && block.nil?
raise ArgumentError, 'should pass observer as a first argument or block'
elsif observer && block
raise ArgumentError.new('cannot provide both an observer and a block')
end
if block
observer = block
func = :call
end
synchronize do
@observers[observer] = func
observer
end
end
# @!macro observable_delete_observer
def delete_observer(observer)
synchronize do
@observers.delete(observer)
observer
end
end
# @!macro observable_delete_observers
def delete_observers
synchronize do
@observers.clear
self
end
end
# @!macro observable_count_observers
def count_observers
synchronize { @observers.count }
end
# Notifies all registered observers with optional args
# @param [Object] args arguments to be passed to each observer
# @return [CopyOnWriteObserverSet] self
def notify_observers(*args, &block)
observers = duplicate_observers
notify_to(observers, *args, &block)
self
end
# Notifies all registered observers with optional args and deletes them.
#
# @param [Object] args arguments to be passed to each observer
# @return [CopyOnWriteObserverSet] self
def notify_and_delete_observers(*args, &block)
observers = duplicate_and_clear_observers
notify_to(observers, *args, &block)
self
end
protected
def ns_initialize
@observers = {}
end
private
def duplicate_and_clear_observers
synchronize do
observers = @observers.dup
@observers.clear
observers
end
end
def duplicate_observers
synchronize { @observers.dup }
end
def notify_to(observers, *args)
raise ArgumentError.new('cannot give arguments and a block') if block_given? && !args.empty?
observers.each do |observer, function|
args = yield if block_given?
observer.send(function, *args)
end
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/java_non_concurrent_priority_queue.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/java_non_concurrent_priority_queue.rb | if Concurrent.on_jruby?
module Concurrent
module Collection
# @!macro priority_queue
#
# @!visibility private
# @!macro internal_implementation_note
class JavaNonConcurrentPriorityQueue
# @!macro priority_queue_method_initialize
def initialize(opts = {})
order = opts.fetch(:order, :max)
if [:min, :low].include?(order)
@queue = java.util.PriorityQueue.new(11) # 11 is the default initial capacity
else
@queue = java.util.PriorityQueue.new(11, java.util.Collections.reverseOrder())
end
end
# @!macro priority_queue_method_clear
def clear
@queue.clear
true
end
# @!macro priority_queue_method_delete
def delete(item)
found = false
while @queue.remove(item) do
found = true
end
found
end
# @!macro priority_queue_method_empty
def empty?
@queue.size == 0
end
# @!macro priority_queue_method_include
def include?(item)
@queue.contains(item)
end
alias_method :has_priority?, :include?
# @!macro priority_queue_method_length
def length
@queue.size
end
alias_method :size, :length
# @!macro priority_queue_method_peek
def peek
@queue.peek
end
# @!macro priority_queue_method_pop
def pop
@queue.poll
end
alias_method :deq, :pop
alias_method :shift, :pop
# @!macro priority_queue_method_push
def push(item)
raise ArgumentError.new('cannot enqueue nil') if item.nil?
@queue.add(item)
end
alias_method :<<, :push
alias_method :enq, :push
# @!macro priority_queue_method_from_list
def self.from_list(list, opts = {})
queue = new(opts)
list.each{|item| queue << item }
queue
end
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/lock_free_stack.rb | require 'concurrent/synchronization/object'
module Concurrent
# @!macro warn.edge
class LockFreeStack < Synchronization::Object
safe_initialization!
class Node
# TODO (pitr-ch 20-Dec-2016): Could be unified with Stack class?
# @return [Node]
attr_reader :next_node
# @return [Object]
attr_reader :value
# @!visibility private
# allow to nil-ify to free GC when the entry is no longer relevant, not synchronised
attr_writer :value
def initialize(value, next_node)
@value = value
@next_node = next_node
end
singleton_class.send :alias_method, :[], :new
end
# The singleton for empty node
EMPTY = Node[nil, nil]
def EMPTY.next_node
self
end
attr_atomic(:head)
private :head, :head=, :swap_head, :compare_and_set_head, :update_head
# @!visibility private
def self.of1(value)
new Node[value, EMPTY]
end
# @!visibility private
def self.of2(value1, value2)
new Node[value1, Node[value2, EMPTY]]
end
# @param [Node] head
def initialize(head = EMPTY)
super()
self.head = head
end
# @param [Node] head
# @return [true, false]
def empty?(head = head())
head.equal? EMPTY
end
# @param [Node] head
# @param [Object] value
# @return [true, false]
def compare_and_push(head, value)
compare_and_set_head head, Node[value, head]
end
# @param [Object] value
# @return [self]
def push(value)
while true
current_head = head
return self if compare_and_set_head current_head, Node[value, current_head]
end
end
# @return [Node]
def peek
head
end
# @param [Node] head
# @return [true, false]
def compare_and_pop(head)
compare_and_set_head head, head.next_node
end
# @return [Object]
def pop
while true
current_head = head
return current_head.value if compare_and_set_head current_head, current_head.next_node
end
end
# @param [Node] head
# @return [true, false]
def compare_and_clear(head)
compare_and_set_head head, EMPTY
end
include Enumerable
# @param [Node] head
# @return [self]
def each(head = nil)
return to_enum(:each, head) unless block_given?
it = head || peek
until it.equal?(EMPTY)
yield it.value
it = it.next_node
end
self
end
# @return [true, false]
def clear
while true
current_head = head
return false if current_head == EMPTY
return true if compare_and_set_head current_head, EMPTY
end
end
# @param [Node] head
# @return [true, false]
def clear_if(head)
compare_and_set_head head, EMPTY
end
# @param [Node] head
# @param [Node] new_head
# @return [true, false]
def replace_if(head, new_head)
compare_and_set_head head, new_head
end
# @return [self]
# @yield over the cleared stack
# @yieldparam [Object] value
def clear_each(&block)
while true
current_head = head
return self if current_head == EMPTY
if compare_and_set_head current_head, EMPTY
each current_head, &block
return self
end
end
end
# @return [String] Short string representation.
def to_s
format '%s %s>', super[0..-2], to_a.to_s
end
alias_method :inspect, :to_s
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/non_concurrent_priority_queue.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/non_concurrent_priority_queue.rb | require 'concurrent/utility/engine'
require 'concurrent/collection/java_non_concurrent_priority_queue'
require 'concurrent/collection/ruby_non_concurrent_priority_queue'
module Concurrent
module Collection
# @!visibility private
# @!macro internal_implementation_note
NonConcurrentPriorityQueueImplementation = case
when Concurrent.on_jruby?
JavaNonConcurrentPriorityQueue
else
RubyNonConcurrentPriorityQueue
end
private_constant :NonConcurrentPriorityQueueImplementation
# @!macro priority_queue
#
# A queue collection in which the elements are sorted based on their
# comparison (spaceship) operator `<=>`. Items are added to the queue
# at a position relative to their priority. On removal the element
# with the "highest" priority is removed. By default the sort order is
# from highest to lowest, but a lowest-to-highest sort order can be
# set on construction.
#
# The API is based on the `Queue` class from the Ruby standard library.
#
# The pure Ruby implementation, `RubyNonConcurrentPriorityQueue` uses a heap algorithm
# stored in an array. The algorithm is based on the work of Robert Sedgewick
# and Kevin Wayne.
#
# The JRuby native implementation is a thin wrapper around the standard
# library `java.util.NonConcurrentPriorityQueue`.
#
# When running under JRuby the class `NonConcurrentPriorityQueue` extends `JavaNonConcurrentPriorityQueue`.
# When running under all other interpreters it extends `RubyNonConcurrentPriorityQueue`.
#
# @note This implementation is *not* thread safe.
#
# @see http://en.wikipedia.org/wiki/Priority_queue
# @see http://ruby-doc.org/stdlib-2.0.0/libdoc/thread/rdoc/Queue.html
#
# @see http://algs4.cs.princeton.edu/24pq/index.php#2.6
# @see http://algs4.cs.princeton.edu/24pq/MaxPQ.java.html
#
# @see http://docs.oracle.com/javase/7/docs/api/java/util/PriorityQueue.html
#
# @!visibility private
class NonConcurrentPriorityQueue < NonConcurrentPriorityQueueImplementation
alias_method :has_priority?, :include?
alias_method :size, :length
alias_method :deq, :pop
alias_method :shift, :pop
alias_method :<<, :push
alias_method :enq, :push
# @!method initialize(opts = {})
# @!macro priority_queue_method_initialize
#
# Create a new priority queue with no items.
#
# @param [Hash] opts the options for creating the queue
# @option opts [Symbol] :order (:max) dictates the order in which items are
# stored: from highest to lowest when `:max` or `:high`; from lowest to
# highest when `:min` or `:low`
# @!method clear
# @!macro priority_queue_method_clear
#
# Removes all of the elements from this priority queue.
# @!method delete(item)
# @!macro priority_queue_method_delete
#
# Deletes all items from `self` that are equal to `item`.
#
# @param [Object] item the item to be removed from the queue
# @return [Object] true if the item is found else false
# @!method empty?
# @!macro priority_queue_method_empty
#
# Returns `true` if `self` contains no elements.
#
# @return [Boolean] true if there are no items in the queue else false
# @!method include?(item)
# @!macro priority_queue_method_include
#
# Returns `true` if the given item is present in `self` (that is, if any
# element == `item`), otherwise returns false.
#
# @param [Object] item the item to search for
#
# @return [Boolean] true if the item is found else false
# @!method length
# @!macro priority_queue_method_length
#
# The current length of the queue.
#
# @return [Fixnum] the number of items in the queue
# @!method peek
# @!macro priority_queue_method_peek
#
# Retrieves, but does not remove, the head of this queue, or returns `nil`
# if this queue is empty.
#
# @return [Object] the head of the queue or `nil` when empty
# @!method pop
# @!macro priority_queue_method_pop
#
# Retrieves and removes the head of this queue, or returns `nil` if this
# queue is empty.
#
# @return [Object] the head of the queue or `nil` when empty
# @!method push(item)
# @!macro priority_queue_method_push
#
# Inserts the specified element into this priority queue.
#
# @param [Object] item the item to insert onto the queue
# @!method self.from_list(list, opts = {})
# @!macro priority_queue_method_from_list
#
# Create a new priority queue from the given list.
#
# @param [Enumerable] list the list to build the queue from
# @param [Hash] opts the options for creating the queue
#
# @return [NonConcurrentPriorityQueue] the newly created and populated queue
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/map/non_concurrent_map_backend.rb | require 'concurrent/constants'
module Concurrent
# @!visibility private
module Collection
# @!visibility private
class NonConcurrentMapBackend
# WARNING: all public methods of the class must operate on the @backend
# directly without calling each other. This is important because of the
# SynchronizedMapBackend which uses a non-reentrant mutex for performance
# reasons.
def initialize(options = nil, &default_proc)
validate_options_hash!(options) if options.kind_of?(::Hash)
set_backend(default_proc)
@default_proc = default_proc
end
def [](key)
@backend[key]
end
def []=(key, value)
@backend[key] = value
end
def compute_if_absent(key)
if NULL != (stored_value = @backend.fetch(key, NULL))
stored_value
else
@backend[key] = yield
end
end
def replace_pair(key, old_value, new_value)
if pair?(key, old_value)
@backend[key] = new_value
true
else
false
end
end
def replace_if_exists(key, new_value)
if NULL != (stored_value = @backend.fetch(key, NULL))
@backend[key] = new_value
stored_value
end
end
def compute_if_present(key)
if NULL != (stored_value = @backend.fetch(key, NULL))
store_computed_value(key, yield(stored_value))
end
end
def compute(key)
store_computed_value(key, yield(get_or_default(key, nil)))
end
def merge_pair(key, value)
if NULL == (stored_value = @backend.fetch(key, NULL))
@backend[key] = value
else
store_computed_value(key, yield(stored_value))
end
end
def get_and_set(key, value)
stored_value = get_or_default(key, nil)
@backend[key] = value
stored_value
end
def key?(key)
@backend.key?(key)
end
def delete(key)
@backend.delete(key)
end
def delete_pair(key, value)
if pair?(key, value)
@backend.delete(key)
true
else
false
end
end
def clear
@backend.clear
self
end
def each_pair
dupped_backend.each_pair do |k, v|
yield k, v
end
self
end
def size
@backend.size
end
def get_or_default(key, default_value)
@backend.fetch(key, default_value)
end
private
def set_backend(default_proc)
if default_proc
@backend = ::Hash.new { |_h, key| default_proc.call(self, key) }
else
@backend = {}
end
end
def initialize_copy(other)
super
set_backend(@default_proc)
self
end
def dupped_backend
@backend.dup
end
def pair?(key, expected_value)
NULL != (stored_value = @backend.fetch(key, NULL)) && expected_value.equal?(stored_value)
end
def store_computed_value(key, new_value)
if new_value.nil?
@backend.delete(key)
nil
else
@backend[key] = new_value
end
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/map/mri_map_backend.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/map/mri_map_backend.rb | require 'thread'
require 'concurrent/collection/map/non_concurrent_map_backend'
module Concurrent
# @!visibility private
module Collection
# @!visibility private
class MriMapBackend < NonConcurrentMapBackend
def initialize(options = nil, &default_proc)
super(options, &default_proc)
@write_lock = Mutex.new
end
def []=(key, value)
@write_lock.synchronize { super }
end
def compute_if_absent(key)
if NULL != (stored_value = @backend.fetch(key, NULL)) # fast non-blocking path for the most likely case
stored_value
else
@write_lock.synchronize { super }
end
end
def compute_if_present(key)
@write_lock.synchronize { super }
end
def compute(key)
@write_lock.synchronize { super }
end
def merge_pair(key, value)
@write_lock.synchronize { super }
end
def replace_pair(key, old_value, new_value)
@write_lock.synchronize { super }
end
def replace_if_exists(key, new_value)
@write_lock.synchronize { super }
end
def get_and_set(key, value)
@write_lock.synchronize { super }
end
def delete(key)
@write_lock.synchronize { super }
end
def delete_pair(key, value)
@write_lock.synchronize { super }
end
def clear
@write_lock.synchronize { super }
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/map/synchronized_map_backend.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/map/synchronized_map_backend.rb | require 'concurrent/collection/map/non_concurrent_map_backend'
module Concurrent
# @!visibility private
module Collection
# @!visibility private
class SynchronizedMapBackend < NonConcurrentMapBackend
def initialize(*args, &block)
super
# WARNING: Mutex is a non-reentrant lock, so the synchronized methods are
# not allowed to call each other.
@mutex = Mutex.new
end
def [](key)
@mutex.synchronize { super }
end
def []=(key, value)
@mutex.synchronize { super }
end
def compute_if_absent(key)
@mutex.synchronize { super }
end
def compute_if_present(key)
@mutex.synchronize { super }
end
def compute(key)
@mutex.synchronize { super }
end
def merge_pair(key, value)
@mutex.synchronize { super }
end
def replace_pair(key, old_value, new_value)
@mutex.synchronize { super }
end
def replace_if_exists(key, new_value)
@mutex.synchronize { super }
end
def get_and_set(key, value)
@mutex.synchronize { super }
end
def key?(key)
@mutex.synchronize { super }
end
def delete(key)
@mutex.synchronize { super }
end
def delete_pair(key, value)
@mutex.synchronize { super }
end
def clear
@mutex.synchronize { super }
end
def size
@mutex.synchronize { super }
end
def get_or_default(key, default_value)
@mutex.synchronize { super }
end
private
def dupped_backend
@mutex.synchronize { super }
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/map/truffleruby_map_backend.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/collection/map/truffleruby_map_backend.rb | module Concurrent
# @!visibility private
module Collection
# @!visibility private
class TruffleRubyMapBackend < TruffleRuby::ConcurrentMap
def initialize(options = nil)
options ||= {}
super(initial_capacity: options[:initial_capacity], load_factor: options[:load_factor])
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/atomic/java_count_down_latch.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/atomic/java_count_down_latch.rb | if Concurrent.on_jruby?
require 'concurrent/utility/native_extension_loader'
module Concurrent
# @!macro count_down_latch
# @!visibility private
# @!macro internal_implementation_note
class JavaCountDownLatch
# @!macro count_down_latch_method_initialize
def initialize(count = 1)
Utility::NativeInteger.ensure_integer_and_bounds(count)
Utility::NativeInteger.ensure_positive(count)
@latch = java.util.concurrent.CountDownLatch.new(count)
end
# @!macro count_down_latch_method_wait
def wait(timeout = nil)
result = nil
if timeout.nil?
Synchronization::JRuby.sleep_interruptibly { @latch.await }
result = true
else
Synchronization::JRuby.sleep_interruptibly do
result = @latch.await(1000 * timeout, java.util.concurrent.TimeUnit::MILLISECONDS)
end
end
result
end
# @!macro count_down_latch_method_count_down
def count_down
@latch.countDown
end
# @!macro count_down_latch_method_count
def count
@latch.getCount
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/atomic/semaphore.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/atomic/semaphore.rb | require 'concurrent/atomic/mutex_semaphore'
module Concurrent
###################################################################
# @!macro semaphore_method_initialize
#
# Create a new `Semaphore` with the initial `count`.
#
# @param [Fixnum] count the initial count
#
# @raise [ArgumentError] if `count` is not an integer
# @!macro semaphore_method_acquire
#
# Acquires the given number of permits from this semaphore,
# blocking until all are available. If a block is given,
# yields to it and releases the permits afterwards.
#
# @param [Fixnum] permits Number of permits to acquire
#
# @raise [ArgumentError] if `permits` is not an integer or is less than zero
#
# @return [nil, BasicObject] Without a block, `nil` is returned. If a block
# is given, its return value is returned.
# @!macro semaphore_method_available_permits
#
# Returns the current number of permits available in this semaphore.
#
# @return [Integer]
# @!macro semaphore_method_drain_permits
#
# Acquires and returns all permits that are immediately available.
#
# @return [Integer]
# @!macro semaphore_method_try_acquire
#
# Acquires the given number of permits from this semaphore,
# only if all are available at the time of invocation or within
# `timeout` interval. If a block is given, yields to it if the permits
# were successfully acquired, and releases them afterward, returning the
# block's return value.
#
# @param [Fixnum] permits the number of permits to acquire
#
# @param [Fixnum] timeout the number of seconds to wait for the counter
# or `nil` to return immediately
#
# @raise [ArgumentError] if `permits` is not an integer or is less than zero
#
# @return [true, false, nil, BasicObject] `false` if no permits are
# available, `true` when acquired a permit. If a block is given, the
# block's return value is returned if the permits were acquired; if not,
# `nil` is returned.
# @!macro semaphore_method_release
#
# Releases the given number of permits, returning them to the semaphore.
#
# @param [Fixnum] permits Number of permits to return to the semaphore.
#
# @raise [ArgumentError] if `permits` is not a number or is less than zero
#
# @return [nil]
###################################################################
# @!macro semaphore_public_api
#
# @!method initialize(count)
# @!macro semaphore_method_initialize
#
# @!method acquire(permits = 1)
# @!macro semaphore_method_acquire
#
# @!method available_permits
# @!macro semaphore_method_available_permits
#
# @!method drain_permits
# @!macro semaphore_method_drain_permits
#
# @!method try_acquire(permits = 1, timeout = nil)
# @!macro semaphore_method_try_acquire
#
# @!method release(permits = 1)
# @!macro semaphore_method_release
###################################################################
# @!visibility private
# @!macro internal_implementation_note
SemaphoreImplementation = if Concurrent.on_jruby?
require 'concurrent/utility/native_extension_loader'
JavaSemaphore
else
MutexSemaphore
end
private_constant :SemaphoreImplementation
# @!macro semaphore
#
# A counting semaphore. Conceptually, a semaphore maintains a set of
# permits. Each {#acquire} blocks if necessary until a permit is
# available, and then takes it. Each {#release} adds a permit, potentially
# releasing a blocking acquirer.
# However, no actual permit objects are used; the Semaphore just keeps a
# count of the number available and acts accordingly.
# Alternatively, permits may be acquired within a block, and automatically
# released after the block finishes executing.
#
# @!macro semaphore_public_api
# @example
# semaphore = Concurrent::Semaphore.new(2)
#
# t1 = Thread.new do
# semaphore.acquire
# puts "Thread 1 acquired semaphore"
# end
#
# t2 = Thread.new do
# semaphore.acquire
# puts "Thread 2 acquired semaphore"
# end
#
# t3 = Thread.new do
# semaphore.acquire
# puts "Thread 3 acquired semaphore"
# end
#
# t4 = Thread.new do
# sleep(2)
# puts "Thread 4 releasing semaphore"
# semaphore.release
# end
#
# [t1, t2, t3, t4].each(&:join)
#
# # prints:
# # Thread 3 acquired semaphore
# # Thread 2 acquired semaphore
# # Thread 4 releasing semaphore
# # Thread 1 acquired semaphore
#
# @example
# semaphore = Concurrent::Semaphore.new(1)
#
# puts semaphore.available_permits
# semaphore.acquire do
# puts semaphore.available_permits
# end
# puts semaphore.available_permits
#
# # prints:
# # 1
# # 0
# # 1
class Semaphore < SemaphoreImplementation
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/atomic/mutex_atomic_boolean.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/atomic/mutex_atomic_boolean.rb | require 'concurrent/synchronization/safe_initialization'
module Concurrent
# @!macro atomic_boolean
# @!visibility private
# @!macro internal_implementation_note
class MutexAtomicBoolean
extend Concurrent::Synchronization::SafeInitialization
# @!macro atomic_boolean_method_initialize
def initialize(initial = false)
super()
@Lock = ::Mutex.new
@value = !!initial
end
# @!macro atomic_boolean_method_value_get
def value
synchronize { @value }
end
# @!macro atomic_boolean_method_value_set
def value=(value)
synchronize { @value = !!value }
end
# @!macro atomic_boolean_method_true_question
def true?
synchronize { @value }
end
# @!macro atomic_boolean_method_false_question
def false?
synchronize { !@value }
end
# @!macro atomic_boolean_method_make_true
def make_true
synchronize { ns_make_value(true) }
end
# @!macro atomic_boolean_method_make_false
def make_false
synchronize { ns_make_value(false) }
end
protected
# @!visibility private
def synchronize
if @Lock.owned?
yield
else
@Lock.synchronize { yield }
end
end
private
# @!visibility private
def ns_make_value(value)
old = @value
@value = value
old != @value
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/atomic/event.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/atomic/event.rb | require 'thread'
require 'concurrent/synchronization/lockable_object'
module Concurrent
# Old school kernel-style event reminiscent of Win32 programming in C++.
#
# When an `Event` is created it is in the `unset` state. Threads can choose to
# `#wait` on the event, blocking until released by another thread. When one
# thread wants to alert all blocking threads it calls the `#set` method which
# will then wake up all listeners. Once an `Event` has been set it remains set.
# New threads calling `#wait` will return immediately. An `Event` may be
# `#reset` at any time once it has been set.
#
# @see http://msdn.microsoft.com/en-us/library/windows/desktop/ms682655.aspx
# @example
# event = Concurrent::Event.new
#
# t1 = Thread.new do
# puts "t1 is waiting"
# event.wait(1)
# puts "event occurred"
# end
#
# t2 = Thread.new do
# puts "t2 calling set"
# event.set
# end
#
# [t1, t2].each(&:join)
#
# # prints:
# # t1 is waiting
# # t2 calling set
# # event occurred
class Event < Synchronization::LockableObject
# Creates a new `Event` in the unset state. Threads calling `#wait` on the
# `Event` will block.
def initialize
super
synchronize { ns_initialize }
end
# Is the object in the set state?
#
# @return [Boolean] indicating whether or not the `Event` has been set
def set?
synchronize { @set }
end
# Trigger the event, setting the state to `set` and releasing all threads
# waiting on the event. Has no effect if the `Event` has already been set.
#
# @return [Boolean] should always return `true`
def set
synchronize { ns_set }
end
def try?
synchronize { @set ? false : ns_set }
end
# Reset a previously set event back to the `unset` state.
# Has no effect if the `Event` has not yet been set.
#
# @return [Boolean] should always return `true`
def reset
synchronize do
if @set
@set = false
@iteration +=1
end
true
end
end
# Wait a given number of seconds for the `Event` to be set by another
# thread. Will wait forever when no `timeout` value is given. Returns
# immediately if the `Event` has already been set.
#
# @return [Boolean] true if the `Event` was set before timeout else false
def wait(timeout = nil)
synchronize do
unless @set
iteration = @iteration
ns_wait_until(timeout) { iteration < @iteration || @set }
else
true
end
end
end
protected
def ns_set
unless @set
@set = true
ns_broadcast
end
true
end
def ns_initialize
@set = false
@iteration = 0
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/atomic/mutex_count_down_latch.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/atomic/mutex_count_down_latch.rb | require 'concurrent/synchronization/lockable_object'
require 'concurrent/utility/native_integer'
module Concurrent
# @!macro count_down_latch
# @!visibility private
# @!macro internal_implementation_note
class MutexCountDownLatch < Synchronization::LockableObject
# @!macro count_down_latch_method_initialize
def initialize(count = 1)
Utility::NativeInteger.ensure_integer_and_bounds count
Utility::NativeInteger.ensure_positive count
super()
synchronize { ns_initialize count }
end
# @!macro count_down_latch_method_wait
def wait(timeout = nil)
synchronize { ns_wait_until(timeout) { @count == 0 } }
end
# @!macro count_down_latch_method_count_down
def count_down
synchronize do
@count -= 1 if @count > 0
ns_broadcast if @count == 0
end
end
# @!macro count_down_latch_method_count
def count
synchronize { @count }
end
protected
def ns_initialize(count)
@count = count
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/atomic/cyclic_barrier.rb | Library/Homebrew/vendor/bundle/ruby/3.4.0/gems/concurrent-ruby-1.3.6/lib/concurrent-ruby/concurrent/atomic/cyclic_barrier.rb | require 'concurrent/synchronization/lockable_object'
require 'concurrent/utility/native_integer'
module Concurrent
# A synchronization aid that allows a set of threads to all wait for each
# other to reach a common barrier point.
# @example
# barrier = Concurrent::CyclicBarrier.new(3)
# jobs = Array.new(3) { |i| -> { sleep i; p done: i } }
# process = -> (i) do
# # waiting to start at the same time
# barrier.wait
# # execute job
# jobs[i].call
# # wait for others to finish
# barrier.wait
# end
# threads = 2.times.map do |i|
# Thread.new(i, &process)
# end
#
# # use main as well
# process.call 2
#
# # here we can be sure that all jobs are processed
class CyclicBarrier < Synchronization::LockableObject
# @!visibility private
Generation = Struct.new(:status)
private_constant :Generation
# Create a new `CyclicBarrier` that waits for `parties` threads
#
# @param [Fixnum] parties the number of parties
# @yield an optional block that will be executed that will be executed after
# the last thread arrives and before the others are released
#
# @raise [ArgumentError] if `parties` is not an integer or is less than zero
def initialize(parties, &block)
Utility::NativeInteger.ensure_integer_and_bounds parties
Utility::NativeInteger.ensure_positive_and_no_zero parties
super(&nil)
synchronize { ns_initialize parties, &block }
end
# @return [Fixnum] the number of threads needed to pass the barrier
def parties
synchronize { @parties }
end
# @return [Fixnum] the number of threads currently waiting on the barrier
def number_waiting
synchronize { @number_waiting }
end
# Blocks on the barrier until the number of waiting threads is equal to
# `parties` or until `timeout` is reached or `reset` is called
# If a block has been passed to the constructor, it will be executed once by
# the last arrived thread before releasing the others
# @param [Fixnum] timeout the number of seconds to wait for the counter or
# `nil` to block indefinitely
# @return [Boolean] `true` if the `count` reaches zero else false on
# `timeout` or on `reset` or if the barrier is broken
def wait(timeout = nil)
synchronize do
return false unless @generation.status == :waiting
@number_waiting += 1
if @number_waiting == @parties
@action.call if @action
ns_generation_done @generation, :fulfilled
true
else
generation = @generation
if ns_wait_until(timeout) { generation.status != :waiting }
generation.status == :fulfilled
else
ns_generation_done generation, :broken, false
false
end
end
end
end
# resets the barrier to its initial state
# If there is at least one waiting thread, it will be woken up, the `wait`
# method will return false and the barrier will be broken
# If the barrier is broken, this method restores it to the original state
#
# @return [nil]
def reset
synchronize { ns_generation_done @generation, :reset }
end
# A barrier can be broken when:
# - a thread called the `reset` method while at least one other thread was waiting
# - at least one thread timed out on `wait` method
#
# A broken barrier can be restored using `reset` it's safer to create a new one
# @return [Boolean] true if the barrier is broken otherwise false
def broken?
synchronize { @generation.status != :waiting }
end
protected
def ns_generation_done(generation, status, continue = true)
generation.status = status
ns_next_generation if continue
ns_broadcast
end
def ns_next_generation
@generation = Generation.new(:waiting)
@number_waiting = 0
end
def ns_initialize(parties, &block)
@parties = parties
@action = block
ns_next_generation
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.