CombinedText stringlengths 4 3.42M |
|---|
module Lederhosen
module Version
MAJOR = 1
MINOR = 2
CODENAME = 'Regenmantel' # changes for minor versions
PATCH = 1
STRING = [MAJOR, MINOR, PATCH].join('.')
end
end
version bump!
module Lederhosen
module Version
MAJOR = 1
MINOR = 2
CODENAME = 'Regenmantel' # changes for minor versions
PATCH = 2
STRING = [MAJOR, MINOR, PATCH].join('.')
end
end
|
# The MIT License (MIT)
#
# Copyright (c) 2016 Sylvain Daubert
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
require_relative 'loggable'
module LetsCert
# Input/output plugin
# @author Sylvain Daubert
class IOPlugin
include Loggable
# Plugin name
# @return [String]
attr_reader :name
# Registered plugins
@registered = {}
# Get empty data
# @return [Hash] +{ account_key: nil, key: nil, cert: nil, chain: nil }+
def self.empty_data
{ account_key: nil, key: nil, cert: nil, chain: nil }
end
# Register a plugin
# @param [Class] klass
# @param [Array] args args to pass to +klass+ constructor
# @return [IOPlugin]
def self.register(klass, *args)
plugin = klass.new(*args)
if plugin.name =~ %r{[/\\]} or ['.', '..'].include?(plugin.name)
raise Error, 'plugin name should just be a file name, without path'
end
@registered[plugin.name] = plugin
klass
end
# Get registered plugins
# @return [Hash] keys are filenames and keys are instances of IOPlugin
# subclasses.
def self.registered
@registered
end
# @param [String] name
def initialize(name)
@name = name
end
# @abstract This method must be overriden in subclasses
def load
raise NotImplementedError
end
# @abstract This method must be overriden in subclasses
def save
raise NotImplementedError
end
end
end
require_relative 'io_plugins/file_io_plugin_mixin'
require_relative 'io_plugins/jwk_io_plugin_mixin'
require_relative 'io_plugins/openssl_io_plugin'
require_relative 'io_plugins/account_key'
require_relative 'io_plugins/key_file'
require_relative 'io_plugins/chain_file'
require_relative 'io_plugins/full_chain_file'
require_relative 'io_plugins/cert_file'
Refactorize IOPlugin class by using eigenclass.
# The MIT License (MIT)
#
# Copyright (c) 2016 Sylvain Daubert
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
require_relative 'loggable'
module LetsCert
# Input/output plugin
# @author Sylvain Daubert
class IOPlugin
include Loggable
# Plugin name
# @return [String]
attr_reader :name
# Registered plugins
@registered = {}
class << self
# Get registered plugins
# @return [Hash] keys are filenames and keys are instances of IOPlugin
# subclasses.
attr_reader :registered
# Get empty data
# @return [Hash] +{ account_key: nil, key: nil, cert: nil, chain: nil }+
def empty_data
{ account_key: nil, key: nil, cert: nil, chain: nil }
end
# Register a plugin
# @param [Class] klass
# @param [Array] args args to pass to +klass+ constructor
# @return [IOPlugin]
def register(klass, *args)
plugin = klass.new(*args)
if plugin.name =~ %r{[/\\]} or ['.', '..'].include?(plugin.name)
raise Error, 'plugin name should just be a file name, without path'
end
@registered[plugin.name] = plugin
klass
end
end
# @param [String] name
def initialize(name)
@name = name
end
# @abstract This method must be overriden in subclasses
def load
raise NotImplementedError
end
# @abstract This method must be overriden in subclasses
def save
raise NotImplementedError
end
end
end
require_relative 'io_plugins/file_io_plugin_mixin'
require_relative 'io_plugins/jwk_io_plugin_mixin'
require_relative 'io_plugins/openssl_io_plugin'
require_relative 'io_plugins/account_key'
require_relative 'io_plugins/key_file'
require_relative 'io_plugins/chain_file'
require_relative 'io_plugins/full_chain_file'
require_relative 'io_plugins/cert_file'
|
module Protoable
module Persistence
def self.included(klass)
klass.extend Protoable::Persistence::ClassMethods
end
module ClassMethods
# Filters accessible attributes that exist in the given protobuf message's
# fields or have attribute transformers defined for them.
#
# Returns a hash of attribute fields with their respective values.
#
def _filter_attribute_fields(proto)
fields = proto.to_hash
fields.select! { |key, value| proto.has_field?(key) && !proto.get_field_by_name(key).repeated? }
attributes = self.new.attributes.keys - protected_attributes.to_a
attribute_fields = attributes.inject({}) do |hash, column_name|
symbolized_column = column_name.to_sym
if fields.has_key?(symbolized_column) ||
_protobuf_attribute_transformers.has_key?(symbolized_column)
hash[symbolized_column] = fields[symbolized_column]
end
hash
end
attribute_fields
end
# Creates a hash of attributes from a given protobuf message.
#
# It converts and transforms field values using the field converters and
# attribute transformers, ignoring repeated and nil fields.
#
def attributes_from_proto(proto)
attribute_fields = _filter_attribute_fields(proto)
attributes = attribute_fields.inject({}) do |hash, (key, value)|
if _protobuf_attribute_transformers.has_key?(key)
hash[key] = _protobuf_attribute_transformers[key].call(proto)
else
hash[key] = _protobuf_convert_fields_to_columns(key, value)
end
hash
end
attributes
end
# :nodoc:
def create(attributes, options = {}, &block)
attributes = attributes_from_proto(attributes) if attributes.is_a?(::Protobuf::Message)
super(attributes, options)
end
# :nodoc:
def create!(attributes, options = {}, &block)
attributes = attributes_from_proto(attributes) if attributes.is_a?(::Protobuf::Message)
super(attributes, options)
end
# Creates an object from the given protobuf message, if it's valid. The
# newly created object is returned if it was successfully saved or not.
#
def create_from_proto(proto, options = {})
attributes = attributes_from_proto(proto)
yield(attributes) if block_given?
self.create(attributes, options)
end
end
# :nodoc:
def assign_attributes(attributes, options = {})
attributes = attributes_from_proto(proto) if attributes.is_a?(::Protobuf::Message)
super(attributes, options)
end
# Calls up to the class version of the method.
#
def attributes_from_proto(proto)
self.class.attributes_from_proto(proto)
end
# Destroys the record. Mainly wrapped to provide a consistent API and
# a convient way to override protobuf-specific destroy behavior.
#
def destroy_from_proto
destroy
end
# :nodoc:
def update_attributes(attributes, options = {})
attributes = attributes_from_proto(attributes) if attributes.is_a?(::Protobuf::Message)
super(attributes, options)
end
# :nodoc:
def update_attributes!(attributes, options = {})
attributes = attributes_from_proto(attributes) if attributes.is_a?(::Protobuf::Message)
super(attributes, options)
end
# Update a record from a proto message. Accepts an optional block.
# If block is given, yields the attributes that would be updated.
#
def update_from_proto(proto, options = {})
attributes = attributes_from_proto(proto)
yield(attributes) if block_given?
update_attributes(attributes, options)
end
end
end
Overriding Active Record's initialize method so that it will take a protobuf message and convert it to attributes.
module Protoable
module Persistence
def self.included(klass)
klass.extend Protoable::Persistence::ClassMethods
klass.class_eval do
# Override Active Record's initialize method so it can accept a protobuf
# message as it's attributes. Need to do it in class_eval block since initialize
# is defined in ActiveRecord::Base.
# :noapi:
def initialize(*args)
args.first = attributes_from_proto(args.first) if args.first.is_a?(::Protobuf::Message)
super(*args)
end
end
end
module ClassMethods
# Filters accessible attributes that exist in the given protobuf message's
# fields or have attribute transformers defined for them.
#
# Returns a hash of attribute fields with their respective values.
#
def _filter_attribute_fields(proto)
fields = proto.to_hash
fields.select! { |key, value| proto.has_field?(key) && !proto.get_field_by_name(key).repeated? }
attributes = self.new.attributes.keys - protected_attributes.to_a
attribute_fields = attributes.inject({}) do |hash, column_name|
symbolized_column = column_name.to_sym
if fields.has_key?(symbolized_column) ||
_protobuf_attribute_transformers.has_key?(symbolized_column)
hash[symbolized_column] = fields[symbolized_column]
end
hash
end
attribute_fields
end
# Creates a hash of attributes from a given protobuf message.
#
# It converts and transforms field values using the field converters and
# attribute transformers, ignoring repeated and nil fields.
#
def attributes_from_proto(proto)
attribute_fields = _filter_attribute_fields(proto)
attributes = attribute_fields.inject({}) do |hash, (key, value)|
if _protobuf_attribute_transformers.has_key?(key)
hash[key] = _protobuf_attribute_transformers[key].call(proto)
else
hash[key] = _protobuf_convert_fields_to_columns(key, value)
end
hash
end
attributes
end
# :nodoc:
def create(attributes, options = {}, &block)
attributes = attributes_from_proto(attributes) if attributes.is_a?(::Protobuf::Message)
super(attributes, options)
end
# :nodoc:
def create!(attributes, options = {}, &block)
attributes = attributes_from_proto(attributes) if attributes.is_a?(::Protobuf::Message)
super(attributes, options)
end
# Creates an object from the given protobuf message, if it's valid. The
# newly created object is returned if it was successfully saved or not.
#
def create_from_proto(proto, options = {})
attributes = attributes_from_proto(proto)
yield(attributes) if block_given?
self.create(attributes, options)
end
end
# :nodoc:
def assign_attributes(attributes, options = {})
attributes = attributes_from_proto(proto) if attributes.is_a?(::Protobuf::Message)
super(attributes, options)
end
# Calls up to the class version of the method.
#
def attributes_from_proto(proto)
self.class.attributes_from_proto(proto)
end
# Destroys the record. Mainly wrapped to provide a consistent API and
# a convient way to override protobuf-specific destroy behavior.
#
def destroy_from_proto
destroy
end
# :nodoc:
def update_attributes(attributes, options = {})
attributes = attributes_from_proto(attributes) if attributes.is_a?(::Protobuf::Message)
super(attributes, options)
end
# :nodoc:
def update_attributes!(attributes, options = {})
attributes = attributes_from_proto(attributes) if attributes.is_a?(::Protobuf::Message)
super(attributes, options)
end
# Update a record from a proto message. Accepts an optional block.
# If block is given, yields the attributes that would be updated.
#
def update_from_proto(proto, options = {})
attributes = attributes_from_proto(proto)
yield(attributes) if block_given?
update_attributes(attributes, options)
end
end
end
|
require 'securerandom'
module Puppet::Parser::Functions
newfunction(:update_internal_dns) do |args|
r53_zone, base, hostname = *args
region = Facter.value(:ec2_placement_availability_zone).chop
r53 = Aws::Route53::Client.new(region:region)
begin
zones = r53.list_hosted_zones_by_name(dns_name: r53_zone).to_hash[:hosted_zones].select{|zone|
zone[:config][:private_zone] and (
(zone[:name] == r53_zone) or
(zone[:name] == "#{r53_zone}."))
}
if (zones.count == 0)
Puppet.send(:notice, "No zones with the DNS name of #{r53_zone}, taking no action.")
elsif zones.count == 1
zone_id = zones[0][:id]
Puppet.send(:notice, "Located Route53 Zone with DNS name of #{r53_zone} and id #{zone_id}.")
else
Puppet.send(:notice, "More than one zone with the DNS name of #{r53_zone}, taking no action.")
end
if zone_id
begin
zone = r53.get_hosted_zone(id: zone_id).to_hash
while not first_record
records = r53.list_resource_record_sets(hosted_zone_id: zone_id, start_record_name: first_record)
matches = records[:resource_record_sets].select{|record| record[:name] =~ /^#{base}/}
if matches.count > 0
first_record = matches.first[:name]
elsif not matches.is_truncated?
break
end
end
if first_record
temp_records = r53.list_resource_record_sets(hosted_zone_id: zone_id, start_record_name: first_record).to_hash
while temp_records[:max_items] == temp_records[:resource_record_sets].select{|record| record[:name] =~ /^#{base}/}.count
full_list += temp_records[:resource_record_sets].select{|record| record[:name] =~ /^#{base}/}
temp_records = r53.list_resource_record_sets(hosted_zone_id: zone_id, start_record_name: first_record).to_hash
end
full_list.each{|record|
zone[:vp_cs].each{|vpc|
}
if zone[:vp_cs].select{|vpc| vpc[:vpc_id] == vpc_id}.count == 0
Puppet.send(:notice, "Route53 zone #{r53_zone} not currently associated with vpc #{vpc_id}, associating.")
r53.associate_vpc_with_hosted_zone(hosted_zone_id: zone_id, vpc: {vpc_region: region, vpc_id: vpc_id}, comment: "Associated by puppet-aws on #{Time.now}")
else
Puppet.send(:notice, "Route53 zone #{r53_zone} is already associated with vpc #{vpc_id}.")
end
rescue Aws::Route53::Errors::ServiceError
Puppet.send(:warn, e)
end
end
end
end
end
try 2, probably still goign to be rewritten.
require 'securerandom'
module Puppet::Parser::Functions
newfunction(:update_internal_dns) do |args|
r53_zone, base, hostname = *args
region = Facter.value(:ec2_placement_availability_zone).chop
r53 = Aws::Route53::Client.new(region:region)
begin
zones = r53.list_hosted_zones_by_name(dns_name: r53_zone).to_hash[:hosted_zones].select{|zone|
zone[:config][:private_zone] and (
(zone[:name] == r53_zone) or
(zone[:name] == "#{r53_zone}."))
}
if (zones.count == 0)
Puppet.send(:notice, "No zones with the DNS name of #{r53_zone}, taking no action.")
elsif zones.count == 1
zone_id = zones[0][:id]
Puppet.send(:notice, "Located Route53 Zone with DNS name of #{r53_zone} and id #{zone_id}.")
else
Puppet.send(:notice, "More than one zone with the DNS name of #{r53_zone}, taking no action.")
end
if zone_id
begin
zone = r53.get_hosted_zone(id: zone_id).to_hash
changes = {}
changes[:hosted_zone_id] = zone_id
changes[:change_batch] = {}
changes[:change_batch][:comment] = "Updated by puppet-aws on #{Facter.value(aws::bootstrap::fqdn)} at #{Time.now()}."
changes[:change_batch][:changes] = []
record = r53.list_resource_record_sets(hosted_zone_id: zone_id, start_record_name: base, start_record_type: "CNAME")[:resource_record_sets].select{|rec|
(rec[:name] =~ /#{base}\.#{r53_zone}/) and
(rec[:type] == "CNAME" )
}
if record.count == 0
Puppet.send(:notice, "No cname exists for #{base}.#{r53_zone}, creating it.")
change = changes
change[:change_batch][:changes][0][:action] = "CREATE"
change[:change_batch][:changes][0][:resource_record_set] = {}
change[:change_batch][:changes][0][:resource_record_set][:name] = "#{base}.#{r53_zone}"
change[:change_batch][:changes][0][:resource_record_set][:type] = "CNAME"
change[:change_batch][:changes][0][:resource_record_set][:resource_records] = []
change[:change_batch][:changes][0][:resource_record_set][:resource_records].push({value: Facter.value(aws::bootstrap::fqdn)})
resp = r53.change_resource_record_sets(change)
Puppet.send(:debug, "Response: #{resp[:change_info].to_hash.to_s}")
sleep(5)
record = r53.list_resource_record_sets(hosted_zone_id: zone_id, start_record_name: base, start_record_type: "CNAME")[:resource_record_sets].select{|rec|
(rec[:name] =~ /#{base}\.#{r53_zone}/) and
(rec[:type] == "CNAME" )
}
end
ec2_conns = {}
record[:resource_records].each{|hostname|
# get r53 A and TXT records for that hostname
records = r53.list_resource_record_sets(hosted_zone_id: zone_id, start_record_name: hostname[:value])[:resource_record_sets].select{|rec| rec[:name] == hostname[:value]}
txt_rec = records.select{|rec| rec[:type] == "TXT"}[:resource_records]
a_rec = records.select{|rec| rec[:type] == "A"}[:resource_records]
# split TXT record, check vpc and verify that that instance id is still running.
if txt_rec.count == 1
vpc_id = txt_rec.split(',')[0]
instance_id = txt_rec.split(',')[1]
interface_id = txt_rec.split(',')[2]
else
Puppet.send(:warn, "TXT record for #{hostname[:value]} isn't correct.")
end
if zone[:vp_cs].select{|vpc| vpc[:vpc_id] == vpc_id}
# if instance is still running, do nothing.
region = zone[:vp_cs].select{|vpc| vpc[:vpc_id] == vpc_id}[0][:region].to_sym
if not ec2_conns[region]
ec2_conns[region] = Aws::EC2::Client.new(region: region.to_s)
end
begin
instance = Aws::EC2::Instance.new(id: instance_id, client: ec2_conns[region])
if instance.state[:name] == "running"
if a_rec == instance.network_interfaces.last.private_ip_address
Puppet.send(:debug, "#{instance.network_interfaces.last.network_interface_id} on #{instance_id} still valid for #{hostname[:value]}.")
else
Puppet.send(:debug, "#{instance.network_interfaces.last.network_interface_id} on #{instance_id} no longer valid for #{hostname[:value]}, deleting.")
rescue Aws::EC2::Errors::InvalidInstanceIDNotFound
else
# if instance doesn't exist, delete TXT and A records, remove record set from base tag cname.
}
# Finally, add A, TXT and record set for the new instance (self)
if zone[:vp_cs].select{|vpc| vpc[:vpc_id] == vpc_id}.count == 0
Puppet.send(:notice, "Route53 zone #{r53_zone} not currently associated with vpc #{vpc_id}, associating.")
r53.associate_vpc_with_hosted_zone(hosted_zone_id: zone_id, vpc: {vpc_region: region, vpc_id: vpc_id}, comment: "Associated by puppet-aws on #{Time.now}")
else
Puppet.send(:notice, "Route53 zone #{r53_zone} is already associated with vpc #{vpc_id}.")
end
rescue Aws::Route53::Errors::ServiceError
Puppet.send(:warn, e)
end
end
end
end
end
|
module Rack::PerftoolsProfiler
class ProfilerArgumentError < RuntimeError; end;
class ProfilerMiddleware
include Rack::Utils
PRINTER_CONTENT_TYPE = {
:text => 'text/plain',
:gif => 'image/gif',
:pdf => 'application/pdf',
:callgrind => 'text/plain
}
PRINTERS = PRINTER_CONTENT_TYPE.keys
def initialize(app, options = {})
@app = app
@profiler = Profiler.new(@app, options.clone)
end
def call(env)
@env = env.clone
action = Action.for_env(@env, @profiler, self)
action.act
action.response
rescue ProfilerArgumentError => err
@env['rack.errors'].write(err.message)
[400, {'Content-Type' => 'text/plain'}, [err.message]]
rescue ProfilingError => err
@env['rack.errors'].write(err.message + "\n" + err.stderr)
[500, {'Content-Type' => 'text/plain'}, [err.message+"\n\n", "Standard error:\n"+err.stderr+"\n"]]
end
def call_app(env)
@app.call(env)
end
def force_stop
@profiler.stop
end
def profiler_data_response(profiling_data)
format, body = profiling_data
body = Array(body)
if format==:none
message = 'No profiling data available. Visit /__stop__ and then visit /__data__'
[404, {'Content-Type' => 'text/plain'}, [message]]
else
[200, headers(format, body), Array(body)]
end
end
private
def headers(printer, body)
headers = {
'Content-Type' => PRINTER_CONTENT_TYPE[printer],
'Content-Length' => content_length(body)
}
if printer==:pdf
filetype = printer
filename='profile_data'
headers['Content-Disposition'] = %(attachment; filename="#{filename}.#{filetype}")
end
headers
end
def content_length(body)
body.inject(0) { |len, part| len + bytesize(part) }.to_s
end
end
end
Fixed typo
module Rack::PerftoolsProfiler
class ProfilerArgumentError < RuntimeError; end;
class ProfilerMiddleware
include Rack::Utils
PRINTER_CONTENT_TYPE = {
:text => 'text/plain',
:gif => 'image/gif',
:pdf => 'application/pdf',
:callgrind => 'text/plain'
}
PRINTERS = PRINTER_CONTENT_TYPE.keys
def initialize(app, options = {})
@app = app
@profiler = Profiler.new(@app, options.clone)
end
def call(env)
@env = env.clone
action = Action.for_env(@env, @profiler, self)
action.act
action.response
rescue ProfilerArgumentError => err
@env['rack.errors'].write(err.message)
[400, {'Content-Type' => 'text/plain'}, [err.message]]
rescue ProfilingError => err
@env['rack.errors'].write(err.message + "\n" + err.stderr)
[500, {'Content-Type' => 'text/plain'}, [err.message+"\n\n", "Standard error:\n"+err.stderr+"\n"]]
end
def call_app(env)
@app.call(env)
end
def force_stop
@profiler.stop
end
def profiler_data_response(profiling_data)
format, body = profiling_data
body = Array(body)
if format==:none
message = 'No profiling data available. Visit /__stop__ and then visit /__data__'
[404, {'Content-Type' => 'text/plain'}, [message]]
else
[200, headers(format, body), Array(body)]
end
end
private
def headers(printer, body)
headers = {
'Content-Type' => PRINTER_CONTENT_TYPE[printer],
'Content-Length' => content_length(body)
}
if printer==:pdf
filetype = printer
filename='profile_data'
headers['Content-Disposition'] = %(attachment; filename="#{filename}.#{filetype}")
end
headers
end
def content_length(body)
body.inject(0) { |len, part| len + bytesize(part) }.to_s
end
end
end
|
module Railsyard::Globalize
module ActiveRecordExtension
def railsyard_translates(*args, &block)
translates(*args)
translation_class.instance_eval &block
translation_class.instance_eval do
attr_accessible :locale
end
attr_accessible :translations_attributes
accepts_nested_attributes_for :translations, allow_destroy: true
end
end
end
Makes attributes attr_accessible
module Railsyard::Globalize
module ActiveRecordExtension
def railsyard_translates(*args, &block)
attrs = args.dup.extract_options!
translates(*args)
translation_class.instance_eval &block
translation_class.instance_eval do
attr_accessible :locale
attr_accessible *attrs
end
attr_accessible :translations_attributes
accepts_nested_attributes_for :translations, allow_destroy: true
end
end
end
|
module RSpec::Puppet
module FunctionExampleGroup
include RSpec::Puppet::FunctionMatchers
include RSpec::Puppet::ManifestMatchers
include RSpec::Puppet::Support
class V4FunctionWrapper
attr_reader :func, :func_name
def initialize(name, func, overrides)
@func_name = name
@func = func
@overrides = overrides
end
# This method is used by the `run` matcher to trigger the function execution, and provides a uniform interface across all puppet versions.
def execute(*args, &block)
Puppet.override(@overrides, "rspec-test scope") do
@func.call(@overrides[:global_scope], *freeze_arg(args), &block)
end
end
# compatibility alias for existing tests
def call(scope, *args)
RSpec.deprecate("subject.call", :replacement => "is_expected.to run.with().and_raise_error(), or execute()")
execute(*args)
end
private
# Facts, keywords, single-quoted strings etc. are usually frozen in Puppet manifests, so freeze arguments to ensure functions are tested
# under worst-case conditions.
def freeze_arg(arg)
case arg
when Array
arg.each { |a| freeze_arg(a) }
arg.freeze
when Hash
arg.each { |k,v| freeze_arg(k); freeze_arg(v) }
arg.freeze
when String
arg.freeze
end
arg
end
end
class V3FunctionWrapper
attr_accessor :func_name
def initialize(name, func)
@func_name = name
@func = func
end
# This method is used by the `run` matcher to trigger the function execution, and provides a uniform interface across all puppet versions.
def execute(*args)
if args.nil?
@func.call
else
@func.call(args)
end
end
# This method was formerly used by the `run` matcher to trigger the function execution, and provides puppet versions dependant interface.
def call(*args)
RSpec.deprecate("subject.call", :replacement => "is_expected.to run.with().and_raise_error(), or execute()")
if args.nil?
@func.call
else
@func.call(*args)
end
end
end
# (at least) rspec 3.5 doesn't seem to memoize `subject` when called from
# a before(:each) hook, so we need to memoize it ourselves.
def subject
@subject ||= find_function
end
def find_function
function_name = self.class.top_level_description
with_vardir do
env = adapter.current_environment
if Puppet.version.to_f >= 4.0
context_overrides = compiler.context_overrides
func = nil
Puppet.override(context_overrides, "rspec-test scope") do
loader = Puppet::Pops::Loaders.new(env)
func = V4FunctionWrapper.new(function_name, loader.private_environment_loader.load(:function, function_name), context_overrides)
@scope = context_overrides[:global_scope]
end
return func if func.func
end
if Puppet::Parser::Functions.function(function_name)
V3FunctionWrapper.new(function_name, scope.method("function_#{function_name}".intern))
else
nil
end
end
end
def scope
@scope ||= build_scope(compiler, nodename(:function))
end
def catalogue
@catalogue ||= compiler.catalog
end
def rspec_puppet_cleanup
@subject = nil
@catalogue = nil
@compiler = nil
@scope = nil
end
private
def compiler
@compiler ||= build_compiler
end
# get a compiler with an attached compiled catalog
def build_compiler
node_name = nodename(:function)
fact_values = facts_hash(node_name)
trusted_values = trusted_facts_hash(node_name)
# Allow different Hiera configurations:
HieraPuppet.instance_variable_set('@hiera', nil) if defined? HieraPuppet
# if we specify a pre_condition, we should ensure that we compile that
# code into a catalog that is accessible from the scope where the
# function is called
Puppet[:code] = pre_cond
node_facts = Puppet::Node::Facts.new(node_name, fact_values.dup)
node_options = {
:parameters => fact_values,
:facts => node_facts
}
stub_facts! fact_values
node = build_node(node_name, node_options)
if Puppet::Util::Package.versioncmp(Puppet.version, '4.3.0') >= 0
Puppet.push_context(
{
:trusted_information => Puppet::Context::TrustedInformation.new('remote', node_name, trusted_values)
},
"Context for spec trusted hash"
)
end
compiler = Puppet::Parser::Compiler.new(node)
compiler.compile
compiler
end
def build_scope(compiler, node_name)
if Puppet.version.to_f >= 4.0
return compiler.context_overrides[:global_scope]
elsif Puppet.version =~ /^2\.[67]/
# loadall should only be necessary prior to 3.x
# Please note, loadall needs to happen first when creating a scope, otherwise
# you might receive undefined method `function_*' errors
Puppet::Parser::Functions.autoloader.loadall
scope = Puppet::Parser::Scope.new(:compiler => compiler)
else
scope = Puppet::Parser::Scope.new(compiler)
end
scope.source = Puppet::Resource::Type.new(:node, node_name)
scope.parent = compiler.topscope
scope
end
def build_node(name, opts = {})
node_environment = adapter.current_environment
opts.merge!({:environment => node_environment})
Puppet::Node.new(name, opts)
end
end
end
Several fixes to function_example_group for scoping
* The current implementation uses Puppet.override to set global_scope
when a function example is called; but this does not trickle down
into all the functions called by the example function. This results
in strange behaviour as self.closure_scope returns a hash if @closure_scope
is nil (which it always will be), and Puppet.lookup(:global_scope)
returns nil (since it's never set accept during the initial call()).
* Ensure that :loaders exists so that functions like
lookup/hiera 5 will function properly.
* Add call_function() wrapper that mimics the same api as the scope
function, that can be used to call another function in the same scope
as the example function; useful for validating that the example
function performed the correct action.
module RSpec::Puppet
module FunctionExampleGroup
include RSpec::Puppet::FunctionMatchers
include RSpec::Puppet::ManifestMatchers
include RSpec::Puppet::Support
class V4FunctionWrapper
attr_reader :func, :func_name
def initialize(name, func, overrides)
@func_name = name
@func = func
@overrides = overrides
end
# This method is used by the `run` matcher to trigger the function execution, and provides a uniform interface across all puppet versions.
def execute(*args, &block)
Puppet.override(@overrides, "rspec-test scope") do
@func.call(@overrides[:global_scope], *freeze_arg(args), &block)
end
end
# compatibility alias for existing tests
def call(scope, *args)
RSpec.deprecate("subject.call", :replacement => "is_expected.to run.with().and_raise_error(), or execute()")
execute(*args)
end
private
# Facts, keywords, single-quoted strings etc. are usually frozen in Puppet manifests, so freeze arguments to ensure functions are tested
# under worst-case conditions.
def freeze_arg(arg)
case arg
when Array
arg.each { |a| freeze_arg(a) }
arg.freeze
when Hash
arg.each { |k,v| freeze_arg(k); freeze_arg(v) }
arg.freeze
when String
arg.freeze
end
arg
end
end
class V3FunctionWrapper
attr_accessor :func_name
def initialize(name, func)
@func_name = name
@func = func
end
# This method is used by the `run` matcher to trigger the function execution, and provides a uniform interface across all puppet versions.
def execute(*args)
if args.nil?
@func.call
else
@func.call(args)
end
end
# This method was formerly used by the `run` matcher to trigger the function execution, and provides puppet versions dependant interface.
def call(*args)
RSpec.deprecate("subject.call", :replacement => "is_expected.to run.with().and_raise_error(), or execute()")
if args.nil?
@func.call
else
@func.call(*args)
end
end
end
# (at least) rspec 3.5 doesn't seem to memoize `subject` when called from
# a before(:each) hook, so we need to memoize it ourselves.
def subject
@subject ||= find_function
end
def find_function(function_name = self.class.top_level_description)
with_vardir do
env = adapter.current_environment
if Puppet.version.to_f >= 4.0
context_overrides = compiler.context_overrides
func = nil
loaders = Puppet.lookup(:loaders)
Puppet.override(context_overrides, "rspec-test scope") do
func = V4FunctionWrapper.new(function_name, loaders.private_environment_loader.load(:function, function_name), context_overrides)
@scope = context_overrides[:global_scope]
end
return func if func.func
end
if Puppet::Parser::Functions.function(function_name)
V3FunctionWrapper.new(function_name, scope.method("function_#{function_name}".intern))
else
nil
end
end
end
def call_function(function_name, *args)
# function = find_function(function_name)
# function.execute(*args)
scope.call_function(function_name, args)
end
def scope
@scope ||= build_scope(compiler, nodename(:function))
end
def catalogue
@catalogue ||= compiler.catalog
end
def rspec_puppet_cleanup
@subject = nil
@catalogue = nil
@compiler = nil
@scope = nil
end
private
def compiler
@compiler ||= build_compiler
end
# get a compiler with an attached compiled catalog
def build_compiler
node_name = nodename(:function)
fact_values = facts_hash(node_name)
trusted_values = trusted_facts_hash(node_name)
# Allow different Hiera configurations:
HieraPuppet.instance_variable_set('@hiera', nil) if defined? HieraPuppet
# if we specify a pre_condition, we should ensure that we compile that
# code into a catalog that is accessible from the scope where the
# function is called
Puppet[:code] = pre_cond
node_facts = Puppet::Node::Facts.new(node_name, fact_values.dup)
node_options = {
:parameters => fact_values,
:facts => node_facts
}
stub_facts! fact_values
node = build_node(node_name, node_options)
if Puppet::Util::Package.versioncmp(Puppet.version, '4.3.0') >= 0
Puppet.push_context(
{
:trusted_information => Puppet::Context::TrustedInformation.new('remote', node_name, trusted_values)
},
"Context for spec trusted hash"
)
end
compiler = Puppet::Parser::Compiler.new(node)
compiler.compile
if Puppet::Util::Package.versioncmp(Puppet.version, '4.0.0') >= 0
loaders = Puppet::Pops::Loaders.new(adapter.current_environment)
Puppet.push_context(
{
:loaders => loaders,
:global_scope => compiler.context_overrides[:global_scope]
},
"set globals")
end
compiler
end
def build_scope(compiler, node_name)
if Puppet.version.to_f >= 4.0
return compiler.context_overrides[:global_scope]
elsif Puppet.version =~ /^2\.[67]/
# loadall should only be necessary prior to 3.x
# Please note, loadall needs to happen first when creating a scope, otherwise
# you might receive undefined method `function_*' errors
Puppet::Parser::Functions.autoloader.loadall
scope = Puppet::Parser::Scope.new(:compiler => compiler)
else
scope = Puppet::Parser::Scope.new(compiler)
end
scope.source = Puppet::Resource::Type.new(:node, node_name)
scope.parent = compiler.topscope
scope
end
def build_node(name, opts = {})
node_environment = adapter.current_environment
opts.merge!({:environment => node_environment})
Puppet::Node.new(name, opts)
end
end
end
|
ActiveRecord::ConnectionAdapters::Column.class_eval do
def self.fallback_string_to_date(string)
return nil if StringUtils.blank?(string)
raise InvalidDateFormat.new(string)
end
def self.fallback_string_to_time(string)
return nil if StringUtils.blank?(string)
raise InvalidDateTimeFormat.new(string)
end
class InvalidDateFormat < StandardError; end
class InvalidDateTimeFormat < StandardError; end
end
Allow date only formats for datetime instantiation
ActiveRecord::ConnectionAdapters::Column.class_eval do
def self.fallback_string_to_date(string)
return nil if StringUtils.blank?(string)
raise InvalidDateFormat.new(string)
end
def self.fallback_string_to_time(string)
return nil if StringUtils.blank?(string)
if string =~ DateParser::ISO_DATE
DateTime.new($1.to_i, $2.to_i, $3.to_i, 00, 00, 00)
else
raise InvalidDateTimeFormat.new(string)
end
end
class InvalidDateFormat < StandardError; end
class InvalidDateTimeFormat < StandardError; end
end
|
module SmartAnswer::Calculators
class UkVisaCalculator
include ActiveModel::Model
attr_writer :passport_country
attr_accessor :purpose_of_visit_answer
attr_accessor :passing_through_uk_border_control_answer
def passport_country_in_eea?
COUNTRY_GROUP_EEA.include?(@passport_country)
end
def passport_country_in_non_visa_national_list?
COUNTRY_GROUP_NON_VISA_NATIONAL.include?(@passport_country)
end
def passport_country_in_visa_national_list?
COUNTRY_GROUP_VISA_NATIONAL.include?(@passport_country)
end
def passport_country_in_ukot_list?
COUNTRY_GROUP_UKOT.include?(@passport_country)
end
def passport_country_in_datv_list?
COUNTRY_GROUP_DATV.include?(@passport_country)
end
def passport_country_in_youth_mobility_scheme_list?
COUNTRY_GROUP_YOUTH_MOBILITY_SCHEME.include?(@passport_country)
end
def passport_country_in_electronic_visa_waiver_list?
COUNTRY_GROUP_ELECTRONIC_VISA_WAIVER.include?(@passport_country)
end
def passport_country_in_b1_b2_visa_exception_list?
@passport_country == 'syria'
end
def passport_country_is_israel?
@passport_country == 'israel'
end
def passport_country_is_taiwan?
@passport_country == 'taiwan'
end
def passport_country_is_venezuela?
@passport_country == 'venezuela'
end
def passport_country_is_croatia?
@passport_country == 'croatia'
end
def passport_country_is_china?
@passport_country == 'china'
end
def passport_country_is_turkey?
@passport_country == 'turkey'
end
def applicant_is_stateless_or_a_refugee?
@passport_country == 'stateless-or-refugee'
end
def tourism_visit?
purpose_of_visit_answer == 'tourism'
end
def work_visit?
purpose_of_visit_answer == 'work'
end
def study_visit?
purpose_of_visit_answer == 'study'
end
def transit_visit?
purpose_of_visit_answer == 'transit'
end
def family_visit?
purpose_of_visit_answer == 'family'
end
def marriage_visit?
purpose_of_visit_answer == 'marriage'
end
def school_visit?
purpose_of_visit_answer == 'school'
end
def medical_visit?
purpose_of_visit_answer == 'medical'
end
def diplomatic_visit?
purpose_of_visit_answer == 'diplomatic'
end
def passing_through_uk_border_control?
passing_through_uk_border_control_answer == 'yes'
end
EXCLUDE_COUNTRIES = %w(american-samoa british-antarctic-territory british-indian-ocean-territory french-guiana french-polynesia gibraltar guadeloupe holy-see martinique mayotte new-caledonia reunion st-pierre-and-miquelon the-occupied-palestinian-territories wallis-and-futuna western-sahara)
COUNTRY_GROUP_UKOT = %w(anguilla bermuda british-dependent-territories-citizen british-overseas-citizen british-protected-person british-virgin-islands cayman-islands falkland-islands montserrat st-helena-ascension-and-tristan-da-cunha south-georgia-and-south-sandwich-islands turks-and-caicos-islands)
COUNTRY_GROUP_NON_VISA_NATIONAL = %w(andorra antigua-and-barbuda argentina aruba australia bahamas barbados belize bonaire-st-eustatius-saba botswana brazil british-national-overseas brunei canada chile costa-rica curacao dominica timor-leste el-salvador grenada guatemala honduras hong-kong hong-kong-(british-national-overseas) israel japan kiribati south-korea macao malaysia maldives marshall-islands mauritius mexico micronesia monaco namibia nauru new-zealand nicaragua palau panama papua-new-guinea paraguay pitcairn-island st-kitts-and-nevis st-lucia st-maarten st-vincent-and-the-grenadines samoa san-marino seychelles singapore solomon-islands tonga trinidad-and-tobago tuvalu usa uruguay vanuatu vatican-city)
COUNTRY_GROUP_VISA_NATIONAL = %w(stateless-or-refugee armenia azerbaijan bahrain benin bhutan bolivia bosnia-and-herzegovina burkina-faso cambodia cape-verde central-african-republic chad colombia comoros cuba djibouti dominican-republic ecuador equatorial-guinea fiji gabon georgia guyana haiti indonesia jordan kazakhstan north-korea kuwait kyrgyzstan laos madagascar mali montenegro mauritania morocco mozambique niger oman peru philippines qatar russia sao-tome-and-principe saudi-arabia suriname tajikistan taiwan thailand togo tunisia turkmenistan ukraine united-arab-emirates uzbekistan zambia)
COUNTRY_GROUP_DATV = %w(afghanistan albania algeria angola bangladesh belarus burma burundi cameroon china congo cyprus-north democratic-republic-of-congo egypt eritrea ethiopia gambia ghana guinea guinea-bissau india iran iraq israel-provisional-passport cote-d-ivoire jamaica kenya kosovo lebanon lesotho liberia libya macedonia malawi moldova mongolia nepal nigeria palestinian-territories pakistan rwanda senegal serbia sierra-leone somalia south-africa south-sudan sri-lanka sudan swaziland syria tanzania turkey uganda venezuela vietnam yemen zimbabwe)
COUNTRY_GROUP_EEA = %w(austria belgium bulgaria croatia cyprus czech-republic denmark estonia finland france germany greece hungary iceland ireland italy latvia liechtenstein lithuania luxembourg malta netherlands norway poland portugal romania slovakia slovenia spain sweden switzerland)
COUNTRY_GROUP_YOUTH_MOBILITY_SCHEME = %w(australia canada japan monaco new-zealand hong-kong south-korea taiwan)
COUNTRY_GROUP_ELECTRONIC_VISA_WAIVER = %w(oman qatar united-arab-emirates)
end
end
Make passing_through_uk_border_control_answer write-only
There shouldn't be any reason to access this value directly from outside
this calculator object.
module SmartAnswer::Calculators
class UkVisaCalculator
include ActiveModel::Model
attr_writer :passport_country
attr_accessor :purpose_of_visit_answer
attr_writer :passing_through_uk_border_control_answer
def passport_country_in_eea?
COUNTRY_GROUP_EEA.include?(@passport_country)
end
def passport_country_in_non_visa_national_list?
COUNTRY_GROUP_NON_VISA_NATIONAL.include?(@passport_country)
end
def passport_country_in_visa_national_list?
COUNTRY_GROUP_VISA_NATIONAL.include?(@passport_country)
end
def passport_country_in_ukot_list?
COUNTRY_GROUP_UKOT.include?(@passport_country)
end
def passport_country_in_datv_list?
COUNTRY_GROUP_DATV.include?(@passport_country)
end
def passport_country_in_youth_mobility_scheme_list?
COUNTRY_GROUP_YOUTH_MOBILITY_SCHEME.include?(@passport_country)
end
def passport_country_in_electronic_visa_waiver_list?
COUNTRY_GROUP_ELECTRONIC_VISA_WAIVER.include?(@passport_country)
end
def passport_country_in_b1_b2_visa_exception_list?
@passport_country == 'syria'
end
def passport_country_is_israel?
@passport_country == 'israel'
end
def passport_country_is_taiwan?
@passport_country == 'taiwan'
end
def passport_country_is_venezuela?
@passport_country == 'venezuela'
end
def passport_country_is_croatia?
@passport_country == 'croatia'
end
def passport_country_is_china?
@passport_country == 'china'
end
def passport_country_is_turkey?
@passport_country == 'turkey'
end
def applicant_is_stateless_or_a_refugee?
@passport_country == 'stateless-or-refugee'
end
def tourism_visit?
purpose_of_visit_answer == 'tourism'
end
def work_visit?
purpose_of_visit_answer == 'work'
end
def study_visit?
purpose_of_visit_answer == 'study'
end
def transit_visit?
purpose_of_visit_answer == 'transit'
end
def family_visit?
purpose_of_visit_answer == 'family'
end
def marriage_visit?
purpose_of_visit_answer == 'marriage'
end
def school_visit?
purpose_of_visit_answer == 'school'
end
def medical_visit?
purpose_of_visit_answer == 'medical'
end
def diplomatic_visit?
purpose_of_visit_answer == 'diplomatic'
end
def passing_through_uk_border_control?
@passing_through_uk_border_control_answer == 'yes'
end
EXCLUDE_COUNTRIES = %w(american-samoa british-antarctic-territory british-indian-ocean-territory french-guiana french-polynesia gibraltar guadeloupe holy-see martinique mayotte new-caledonia reunion st-pierre-and-miquelon the-occupied-palestinian-territories wallis-and-futuna western-sahara)
COUNTRY_GROUP_UKOT = %w(anguilla bermuda british-dependent-territories-citizen british-overseas-citizen british-protected-person british-virgin-islands cayman-islands falkland-islands montserrat st-helena-ascension-and-tristan-da-cunha south-georgia-and-south-sandwich-islands turks-and-caicos-islands)
COUNTRY_GROUP_NON_VISA_NATIONAL = %w(andorra antigua-and-barbuda argentina aruba australia bahamas barbados belize bonaire-st-eustatius-saba botswana brazil british-national-overseas brunei canada chile costa-rica curacao dominica timor-leste el-salvador grenada guatemala honduras hong-kong hong-kong-(british-national-overseas) israel japan kiribati south-korea macao malaysia maldives marshall-islands mauritius mexico micronesia monaco namibia nauru new-zealand nicaragua palau panama papua-new-guinea paraguay pitcairn-island st-kitts-and-nevis st-lucia st-maarten st-vincent-and-the-grenadines samoa san-marino seychelles singapore solomon-islands tonga trinidad-and-tobago tuvalu usa uruguay vanuatu vatican-city)
COUNTRY_GROUP_VISA_NATIONAL = %w(stateless-or-refugee armenia azerbaijan bahrain benin bhutan bolivia bosnia-and-herzegovina burkina-faso cambodia cape-verde central-african-republic chad colombia comoros cuba djibouti dominican-republic ecuador equatorial-guinea fiji gabon georgia guyana haiti indonesia jordan kazakhstan north-korea kuwait kyrgyzstan laos madagascar mali montenegro mauritania morocco mozambique niger oman peru philippines qatar russia sao-tome-and-principe saudi-arabia suriname tajikistan taiwan thailand togo tunisia turkmenistan ukraine united-arab-emirates uzbekistan zambia)
COUNTRY_GROUP_DATV = %w(afghanistan albania algeria angola bangladesh belarus burma burundi cameroon china congo cyprus-north democratic-republic-of-congo egypt eritrea ethiopia gambia ghana guinea guinea-bissau india iran iraq israel-provisional-passport cote-d-ivoire jamaica kenya kosovo lebanon lesotho liberia libya macedonia malawi moldova mongolia nepal nigeria palestinian-territories pakistan rwanda senegal serbia sierra-leone somalia south-africa south-sudan sri-lanka sudan swaziland syria tanzania turkey uganda venezuela vietnam yemen zimbabwe)
COUNTRY_GROUP_EEA = %w(austria belgium bulgaria croatia cyprus czech-republic denmark estonia finland france germany greece hungary iceland ireland italy latvia liechtenstein lithuania luxembourg malta netherlands norway poland portugal romania slovakia slovenia spain sweden switzerland)
COUNTRY_GROUP_YOUTH_MOBILITY_SCHEME = %w(australia canada japan monaco new-zealand hong-kong south-korea taiwan)
COUNTRY_GROUP_ELECTRONIC_VISA_WAIVER = %w(oman qatar united-arab-emirates)
end
end
|
require 'libvirt/ffi/connection'
require 'libvirt/domain'
class Connection
def initialize(url)
@url = url
@connection = nil
end
def open
@connection = FFI::Connection.virConnectOpen(@url).read_pointer
raise(Libvirt::ConnectionError, "Failed to open #{url}") unless @connection
true
end
def closed?
@connection.nil?
end
def type
type = FFI::Connect.virConnectGetType(@connection)
raise(RetrieveError, "Could'n revtrieve connection type") unless type
type
end
def version
version_ptr = MemoryPointer.new(:ulong)
result = FFI::Connect.virConnectGetVersion(@connection, connection_version)
raise(RetrieveError, "Could'n revtrieve connection version") if result < 0
version = version_ptr.get_ulong(0)
end
def hostname
hostname = FFI::Connect.virConnectGetHostname(@connection)
raise(RetrieveError, "Could'n revtrieve connection hostname") unless hostname
type
end
def uri
uri = FFI::Connect.virConnectGetURI(@connection)
raise(RetrieveError, "Could'n revtrieve connection hostname") unless uri
uri
end
def max_vcpus(hypervisor_type = 'xen')
vcpus = FFI::Connect.virConnectGetURI(@connection, hypervisor_type)
raise(RetrieveError, "Could'n revtrieve connection hostname") unless vcpus
vcpus
end
def get_node_info
node_info_ptr = FFI::MemoryPointer.new(:char, NodeInfo.size)
result = FFI::Connection.virNodeGetInfo(@connection, node_info_ptr)
raise(RetrieveError, "Couldn't revtrieve connection node info") if result < 0
NodeInfo.new(node_info_ptr)
end
def capabilities
connection_capabilities = FFI::Connection.virConnectGetCapabilities(@connection)
raise(RetrieveError, "Couldn't revtrieve connection node info") unless connection_capabilities
connection_capabilities
end
def num_of_domains
result = FFI::Connection.virConnectNumOfDomains(@connection)
raise(RetrieveError, "Couldn't revtrieve connection num of domains") if result < 0
result
end
def list_domains
domains_count = 0
domains_count = num_of_domains
return [] if domains_count == 0
array_names_ptr = FFI::MemoryPointer.new(:pointer, domains_count)
result = FFI::Connection.virConnectListDomains(@connection, array_names_ptr, domains_count)
raise(RetrieveError, "Couldn't revtrieve connection list of domain names") if result < 0
string_ptr = array_names_ptr.read_pointer
string_ptr.null? ? [] : string_ptr.get_array_of_string(0, domains_count).compact
end
def num_of_defined_domains
result = FFI::Connection.virConnectNumOfDefinedDomains(@connection)
raise(RetrieveError, "Couldn't revtrieve connection num of defined domains") if result < 0
result
end
def list_defined_domains
domains_count = 0
domains_count = num_of_defined_domains
return [] if domains_count == 0
array_names_ptr = FFI::MemoryPointer.new(:pointer, domains_count)
result = FFI::Connection.virConnectListDefinedDomains(@connection, array_names_ptr, domains_count)
raise(RetrieveError, "Couldn't revtrieve connection list of defined domain names") if result < 0
string_ptr = array_names_ptr.read_pointer
string_ptr.null? ? [] : string_ptr.get_array_of_string(0, domains_count).compact
end
def create_domain_linux(xml)
domain = FFI::Connection.virConnectListDefinedDomains(@connection, xml, 0)
raise(Error, "Couldn't create linux domain") if result < 0
Domain.new(domain.read_pointer)
end
def get_domain_by_name(name)
domain = FFI::Connection.virDomainLookupByName(@connection, name)
raise(RetrieveError, "Can not find domain with name '#{name}'") if domain.null?
Domain.new(domain.read_pointer)
end
def get_domain_by_id(id)
domain = FFI::Connection.virDomainLookupByID(@connection, id)
raise(RetrieveError, "Can not find domain with id '#{id}'") if domain.null?
Domain.new(domain.read_pointer)
end
def get_domain_by_uuid(uuid)
domain = FFI::Connection.virDomainLookupByID(@connection, uuid)
raise(RetrieveError, "Can not find domain with uuid '#{uuid}'") if domain.null?
Domain.new(domain.read_pointer)
end
def define_domain_xml(xml)
domain = FFI::Connection.virDomainDefineXML(@connection, xml)
raise(DefinitionError, "Can not define domain with xml:\n#{xml}") if domain.null?
Domain.new(domain.read_pointer)
end
def close
result = Connect.virConnectClose(@connection)
raise(Libvirt::SystemCallError, "Connection close failed") if result < 0
end
def release
@connection.free if @connection
@connection = nil
end
end
Fix bug when connection couldn't be open
require 'libvirt/ffi/connection'
require 'libvirt/domain'
class Connection
def initialize(url)
@url = url
@connection = FFI::Pointer.new(0)
end
def open
@connection = FFI::Connection.virConnectOpen(@url)
raise(Libvirt::ConnectionError, "Failed to open #{@url}") if @connection.null?
true
end
def closed?
@connection.null?
end
def type
type = FFI::Connect.virConnectGetType(@connection)
raise(RetrieveError, "Could'n revtrieve connection type") unless type
type
end
def version
version_ptr = MemoryPointer.new(:ulong)
result = FFI::Connect.virConnectGetVersion(@connection, connection_version)
raise(RetrieveError, "Could'n revtrieve connection version") if result < 0
version = version_ptr.get_ulong(0)
end
def hostname
hostname = FFI::Connect.virConnectGetHostname(@connection)
raise(RetrieveError, "Could'n revtrieve connection hostname") unless hostname
type
end
def uri
uri = FFI::Connect.virConnectGetURI(@connection)
raise(RetrieveError, "Could'n revtrieve connection hostname") unless uri
uri
end
def max_vcpus(hypervisor_type = 'xen')
vcpus = FFI::Connect.virConnectGetURI(@connection, hypervisor_type)
raise(RetrieveError, "Could'n revtrieve connection hostname") unless vcpus
vcpus
end
def get_node_info
node_info_ptr = FFI::MemoryPointer.new(:char, NodeInfo.size)
result = FFI::Connection.virNodeGetInfo(@connection, node_info_ptr)
raise(RetrieveError, "Couldn't revtrieve connection node info") if result < 0
NodeInfo.new(node_info_ptr)
end
def capabilities
connection_capabilities = FFI::Connection.virConnectGetCapabilities(@connection)
raise(RetrieveError, "Couldn't revtrieve connection node info") unless connection_capabilities
connection_capabilities
end
def num_of_domains
result = FFI::Connection.virConnectNumOfDomains(@connection)
raise(RetrieveError, "Couldn't revtrieve connection num of domains") if result < 0
result
end
def list_domains
domains_count = 0
domains_count = num_of_domains
return [] if domains_count == 0
array_names_ptr = FFI::MemoryPointer.new(:pointer, domains_count)
result = FFI::Connection.virConnectListDomains(@connection, array_names_ptr, domains_count)
raise(RetrieveError, "Couldn't revtrieve connection list of domain names") if result < 0
string_ptr = array_names_ptr.read_pointer
string_ptr.null? ? [] : string_ptr.get_array_of_string(0, domains_count).compact
end
def num_of_defined_domains
result = FFI::Connection.virConnectNumOfDefinedDomains(@connection)
raise(RetrieveError, "Couldn't revtrieve connection num of defined domains") if result < 0
result
end
def list_defined_domains
domains_count = 0
domains_count = num_of_defined_domains
return [] if domains_count == 0
array_names_ptr = FFI::MemoryPointer.new(:pointer, domains_count)
result = FFI::Connection.virConnectListDefinedDomains(@connection, array_names_ptr, domains_count)
raise(RetrieveError, "Couldn't revtrieve connection list of defined domain names") if result < 0
string_ptr = array_names_ptr.read_pointer
string_ptr.null? ? [] : string_ptr.get_array_of_string(0, domains_count).compact
end
def create_domain_linux(xml)
domain = FFI::Connection.virConnectListDefinedDomains(@connection, xml, 0)
raise(Error, "Couldn't create linux domain") if result < 0
Domain.new(domain.read_pointer)
end
def get_domain_by_name(name)
domain = FFI::Connection.virDomainLookupByName(@connection, name)
raise(RetrieveError, "Can not find domain with name '#{name}'") if domain.null?
Domain.new(domain.read_pointer)
end
def get_domain_by_id(id)
domain = FFI::Connection.virDomainLookupByID(@connection, id)
raise(RetrieveError, "Can not find domain with id '#{id}'") if domain.null?
Domain.new(domain.read_pointer)
end
def get_domain_by_uuid(uuid)
domain = FFI::Connection.virDomainLookupByID(@connection, uuid)
raise(RetrieveError, "Can not find domain with uuid '#{uuid}'") if domain.null?
Domain.new(domain.read_pointer)
end
def define_domain_xml(xml)
domain = FFI::Connection.virDomainDefineXML(@connection, xml)
raise(DefinitionError, "Can not define domain with xml:\n#{xml}") if domain.null?
Domain.new(domain.read_pointer)
end
def close
result = Connect.virConnectClose(@connection)
raise(Libvirt::SystemCallError, "Connection close failed") if result < 0
end
end
|
module Listy
module ViewHelpers
def listy_tree(collection, spec, empty_message)
if collection.present?
html = "<div class='listy-tree'>" + create_listy_tree(collection, spec, "") + "</div>"
else
html = "There are no entries in this tree."
html = empty_message if !empty_message.nil?
end
raw html
end
def create_listy_tree(collection, spec, html)
html += "<ul>"
collection.each do |element|
html += "<li>"
html += "<div class='listy-tree-list-header'>#{element.try(spec[:display_method_name])}</div>"
html += create_listy_tree(element.try(spec[:children]), spec[:child], html) if !spec[:children].nil?
html += "</li>"
end
html += "</ul>"
end
def listy_links(collection, display_method_name, css_class, show_more_index=5, empty_message="")
if collection.present?
html = "<ul class='" + css_class + "'>"
show_more = false
collection.each_with_index do |element, index|
if index > show_more_index && !show_more
html += "<div class='listy-show-more-list' style='display:none'>"
show_more = true
end
html += "<li>" + link_to(element.try(display_method_name), element) + "</li>"
end
if show_more
html += "</div>"
html += link_to("Show More", "#", :class => "listy-show-more-link button orange-button")
end
html += "</ul>"
else
html = "There are no entries in this list."
html = empty_message if !empty_message.nil?
end
raw html
end
def multi_column_listy_links(collection, display_method_name, css_class, number_of_columns)
html = ""
if collection.present?
number_of_entries_per_column = collection.size/number_of_columns
percentage_width_of_column = 100 / number_of_columns
Rails.logger.info(collection.size.to_s + " " + number_of_entries_per_column.to_s)
(0..number_of_columns-1).each do |i|
html += "<div style='float:left;width:" + percentage_width_of_column.to_s + "%'>"
start_index = i * number_of_entries_per_column
end_index = (i+1) * number_of_entries_per_column
end_index = collection.size if end_index > collection.size
html += list_of_links(collection[start_index..end_index], display_method_name, css_class, show_more_index=1000, "")
html += "</div>"
end
html += "<div style='clear:both'></div>"
else
html = "There are no entries in this list."
end
raw html
end
end
end
Fixed recursion call assignment.
module Listy
module ViewHelpers
def listy_tree(collection, spec, empty_message)
if collection.present?
html = "<div class='listy-tree'>" + create_listy_tree(collection, spec, "", 0) + "</div>"
else
html = "There are no entries in this tree."
html = empty_message if !empty_message.nil?
end
raw html
end
def create_listy_tree(collection, spec, html, level)
html += "<ul class='listy-tree-level-#{level}'>"
collection.each do |element|
html += "<li>"
html += "<div class='listy-tree-list-header'>#{element.try(spec[:display_method_name])}</div>"
html = create_listy_tree(element.try(spec[:children]), spec[:child], html, level+1) if !spec[:children].nil?
html += "</li>"
end
html += "</ul>"
end
def listy_links(collection, display_method_name, css_class, show_more_index=5, empty_message="")
if collection.present?
html = "<ul class='" + css_class + "'>"
show_more = false
collection.each_with_index do |element, index|
if index > show_more_index && !show_more
html += "<div class='listy-show-more-list' style='display:none'>"
show_more = true
end
html += "<li>" + link_to(element.try(display_method_name), element) + "</li>"
end
if show_more
html += "</div>"
html += link_to("Show More", "#", :class => "listy-show-more-link button orange-button")
end
html += "</ul>"
else
html = "There are no entries in this list."
html = empty_message if !empty_message.nil?
end
raw html
end
def multi_column_listy_links(collection, display_method_name, css_class, number_of_columns)
html = ""
if collection.present?
number_of_entries_per_column = collection.size/number_of_columns
percentage_width_of_column = 100 / number_of_columns
Rails.logger.info(collection.size.to_s + " " + number_of_entries_per_column.to_s)
(0..number_of_columns-1).each do |i|
html += "<div style='float:left;width:" + percentage_width_of_column.to_s + "%'>"
start_index = i * number_of_entries_per_column
end_index = (i+1) * number_of_entries_per_column
end_index = collection.size if end_index > collection.size
html += list_of_links(collection[start_index..end_index], display_method_name, css_class, show_more_index=1000, "")
html += "</div>"
end
html += "<div style='clear:both'></div>"
else
html = "There are no entries in this list."
end
raw html
end
end
end |
require 'uri'
module Lita
module Handlers
class Snoo < Handler
config :domains, type: Array, default: ["imgur.com"]
route(/(#{URI.regexp})/, :url_search, command: false)
route(/^(?:reddit|snoo)\s+(#{URI.regexp})/i, :url_search, command: true,
help: {t("help.snoo_url_key") => t("help.snoo_url_value")})
route(/^\/?r\/(\S+)\s*(.*)/i, :subreddit, command: true,
help: {t("help.snoo_sub_key") => t("snoo_sub_value")})
def url_search(response)
domains = /#{config.domains.map {|d| Regexp.escape(d)}.join("|")}/
url = response.matches.first.first.split("#").first
# Lita::Message#command?
if response.message.command?
response.reply api_search(url, true)
elsif domains =~ url
post = api_search(url)
response.reply post if post
end
end
def subreddit(response)
end
private
def api_search(url, command=false)
http_response = http.get(
"https://www.reddit.com/search.json",
q: "url:'#{url}'",
sort: "top",
t: "all"
)
posts = MultiJson.load(http_response.body)["data"]["children"]
if posts.empty?
if command
return "No reddit posts found for #{url}"
else
return nil
end
end
format_post(posts.first)
end
private
def format_post(post)
title = post["data"]["title"]
author = post["data"]["author"]
subreddit = post["data"]["subreddit"]
date = Time.at(post["data"]["created"]).to_datetime.strftime("%F")
score = post["data"]["score"].to_s.reverse.gsub(/(\d{3})(?=\d)/, '\\1,').reverse
ups = post["data"]["ups"].to_f
downs = post["data"]["downs"].to_f
percent = "%.f" % (ups / (ups + downs) * 100)
id = post["data"]["id"]
nsfw = post["data"]["over_18"] ? "[NSFW] " : ""
"#{nsfw}#{title} - #{author} on /r/#{subreddit}, #{date} (#{score} points, #{percent}% upvoted) http://redd.it/#{id}"
end
end
Lita.register_handler(Snoo)
end
end
lib: Split #url_search into #ambient_url and #url
require 'uri'
module Lita
module Handlers
class Snoo < Handler
config :domains, type: Array, default: ["imgur.com"]
route(/(#{URI.regexp})/, :ambient_url, command: false)
route(/^(?:reddit|snoo)\s+(#{URI.regexp})/i, :url, command: true,
help: {t("help.snoo_url_key") => t("help.snoo_url_value")})
route(/^\/?r\/(\S+)\s*(.*)/i, :subreddit, command: true,
help: {t("help.snoo_sub_key") => t("snoo_sub_value")})
def ambient_url(response)
domains = /#{config.domains.map {|d| Regexp.escape(d)}.join("|")}/
url = response.matches.first.first.split("#").first
if domains =~ url
post = api_search("url:'#{url}'")
response.reply post if post
end
end
def url(response)
url = response.matches.first.first.split("#").first
post = api_search("url:'#{url}'")
if post
response.reply post
else
response.reply "No reddit posts found for #{url}"
end
end
def subreddit(response)
end
private
def api_search(query)
http_response = http.get(
"https://www.reddit.com/search.json",
q: query,
sort: "top",
t: "all"
)
posts = MultiJson.load(http_response.body)["data"]["children"]
return nil if posts.empty?
format_post(posts.first)
end
private
def api_subreddit()
end
private
def api_subreddit_search()
end
private
def format_post(post)
title = post["data"]["title"]
author = post["data"]["author"]
subreddit = post["data"]["subreddit"]
date = Time.at(post["data"]["created"]).to_datetime.strftime("%F")
score = post["data"]["score"].to_s.reverse.gsub(/(\d{3})(?=\d)/, '\\1,').reverse
ups = post["data"]["ups"].to_f
downs = post["data"]["downs"].to_f
percent = "%.f" % (ups / (ups + downs) * 100)
id = post["data"]["id"]
nsfw = post["data"]["over_18"] ? "[NSFW] " : ""
"#{nsfw}#{title} - #{author} on /r/#{subreddit}, #{date} (#{score} points, #{percent}% upvoted) http://redd.it/#{id}"
end
end
Lita.register_handler(Snoo)
end
end
|
module Locationer
VERSION = "0.0.1"
end
Bump the version number
module Locationer
VERSION = "0.0.2"
end
|
require 'active_support/core_ext/object/blank'
require 'active_support/version'
require 'logger'
require 'socket'
module Loga
class Configuration
DEFAULT_KEYS = %i(
device
filter_parameters
format
host
level
service_name
service_version
sync
).freeze
attr_accessor(*DEFAULT_KEYS)
attr_reader :logger
private_constant :DEFAULT_KEYS
def initialize(user_options = {}, framework_options = {})
options = default_options.merge(framework_options)
.merge(environment_options)
.merge(user_options)
DEFAULT_KEYS.each do |attribute|
public_send("#{attribute}=", options[attribute])
end
raise ConfigurationError, 'Service name cannot be blank' if service_name.blank?
raise ConfigurationError, 'Device cannot be blank' if device.blank?
# TODO: @service_version = compute_service_version
initialize_logger
end
def format=(name)
@format = name.to_s.to_sym
end
def service_name=(name)
@service_name = name.to_s.strip
end
def service_version=(name)
@service_version = name.to_s.strip
end
def structured?
format == :gelf
end
private
def default_options
{
device: STDOUT,
filter_parameters: [],
format: :simple,
host: hostname,
level: :info,
sync: true,
}
end
def environment_options
ENV['LOGA_FORMAT'].blank? ? {} : { format: ENV['LOGA_FORMAT'] }
end
def compute_service_version
RevisionStrategy.call(service_version)
end
def initialize_logger
device.sync = sync
logger = Logger.new(device)
logger.formatter = assign_formatter
logger.level = constantized_log_level
@logger = TaggedLogging.new(logger)
end
def constantized_log_level
Logger.const_get(level.to_s.upcase)
end
# rubocop:disable Lint/RescueException
def hostname
Socket.gethostname
rescue Exception
'unknown.host'
end
# rubocop:enable Lint/RescueException
def assign_formatter
if format == :gelf
Formatter.new(
service_name: service_name,
service_version: service_version,
host: host,
)
else
active_support_simple_formatter
end
end
def active_support_simple_formatter
case ActiveSupport::VERSION::MAJOR
when 3
require 'active_support/core_ext/logger'
Logger::SimpleFormatter.new
when 4..5
require 'active_support/logger'
ActiveSupport::Logger::SimpleFormatter.new
else
raise Loga::ConfigurationError,
"ActiveSupport #{ActiveSupport::VERSION::MAJOR} is unsupported"
end
end
end
end
Improve environment options method in Loga::Configuration
require 'active_support/core_ext/object/blank'
require 'active_support/version'
require 'logger'
require 'socket'
module Loga
class Configuration
DEFAULT_KEYS = %i(
device
filter_parameters
format
host
level
service_name
service_version
sync
).freeze
attr_accessor(*DEFAULT_KEYS)
attr_reader :logger
private_constant :DEFAULT_KEYS
def initialize(user_options = {}, framework_options = {})
options = default_options.merge(framework_options)
.merge(environment_options)
.merge(user_options)
DEFAULT_KEYS.each do |attribute|
public_send("#{attribute}=", options[attribute])
end
raise ConfigurationError, 'Service name cannot be blank' if service_name.blank?
raise ConfigurationError, 'Device cannot be blank' if device.blank?
# TODO: @service_version = compute_service_version
initialize_logger
end
def format=(name)
@format = name.to_s.to_sym
end
def service_name=(name)
@service_name = name.to_s.strip
end
def service_version=(name)
@service_version = name.to_s.strip
end
def structured?
format == :gelf
end
private
def default_options
{
device: STDOUT,
filter_parameters: [],
format: :simple,
host: hostname,
level: :info,
sync: true,
}
end
def environment_options
{ format: ENV['LOGA_FORMAT'].presence }.delete_if { |_, v| v.nil? }
end
def compute_service_version
RevisionStrategy.call(service_version)
end
def initialize_logger
device.sync = sync
logger = Logger.new(device)
logger.formatter = assign_formatter
logger.level = constantized_log_level
@logger = TaggedLogging.new(logger)
end
def constantized_log_level
Logger.const_get(level.to_s.upcase)
end
# rubocop:disable Lint/RescueException
def hostname
Socket.gethostname
rescue Exception
'unknown.host'
end
# rubocop:enable Lint/RescueException
def assign_formatter
if format == :gelf
Formatter.new(
service_name: service_name,
service_version: service_version,
host: host,
)
else
active_support_simple_formatter
end
end
def active_support_simple_formatter
case ActiveSupport::VERSION::MAJOR
when 3
require 'active_support/core_ext/logger'
Logger::SimpleFormatter.new
when 4..5
require 'active_support/logger'
ActiveSupport::Logger::SimpleFormatter.new
else
raise Loga::ConfigurationError,
"ActiveSupport #{ActiveSupport::VERSION::MAJOR} is unsupported"
end
end
end
end
|
module Lumberg
module Whm
class Server < Base
# Server
attr_accessor :host
# Remote access hash
attr_accessor :hash
# Base URL to the WHM API
attr_accessor :base_url
# Enable Basic Authentication with API - default false
attr_accessor :basic_auth
# API username - :default => root
attr_accessor :user
# Raw HTTP response from WHM
attr_accessor :raw_response
# WHM parsed response
attr_reader :response
# HTTP Params used for API requests
attr_accessor :params
# WHM API function name
attr_reader :function
# Use ssl?
attr_accessor :ssl
# HTTP SSL verify mode
attr_accessor :ssl_verify
# Returned params to transfor to booleans
attr_accessor :boolean_params
# Force response type...ARG!
attr_accessor :force_response_type
#
# ==== Required
# * <tt>:host</tt> - PENDING
# * <tt>:hash</tt> - PENDING
#
# ==== Optional
# * <tt>:user</tt> - PENDING
# * <tt>:ssl</tt> - PENDING
# * <tt>:basic_auth</tt>
def initialize(options)
Args.new(options) do |c|
c.requires :host, :hash
c.optionals :user, :ssl, :basic_auth
end
@ssl_verify ||= false
@ssl = options.delete(:ssl)
@host = options.delete(:host)
@hash = format_hash(options.delete(:hash))
@user = (options.has_key?(:user) ? options.delete(:user) : 'root')
@basic_auth = options.delete(:basic_auth)
@base_url = format_url(options)
end
def perform_request(function, options = {})
# WHM sometime uses different keys for the result hash
@key = options.delete(:key) || 'result'
@function = function
@params = format_query(options)
yield self if block_given?
do_request(@base_url, function, @params)
end
def get_hostname
perform_request('gethostname', {:key => 'hostname'})
end
def version
perform_request('version', {:key => 'version'})
end
def load_average
@force_response_type = :query
result = perform_request('loadavg')
result[:success] = result[:params].has_key?(:one)
result
end
def system_load_average(options = {})
Args.new(options) do |c|
c.requires "api.version".to_sym
end
perform_request('systemloadavg', options.merge(:key => 'data'))
end
def languages
perform_request('getlanglist', {:key => 'lang'})
end
def list_ips
perform_request('listips', {:key => 'result'})
end
def add_ip(options = {})
Args.new(options) do |c|
c.requires :ip, :netmask
end
perform_request('addip', options.merge(:key => 'addip'))
end
def delete_ip(options = {})
Args.new(options) do |c|
c.requires :ip
c.optionals :ethernetdev
c.booleans :skipifshutdown
end
perform_request('delip', options.merge(:key => 'delip'))
end
def set_hostname(options = {})
Args.new(options) do |c|
c.requires :hostname
end
perform_request('sethostname', options.merge(:key => 'sethostname'))
end
def set_resolvers(options = {})
Args.new(options) do |c|
c.requires :nameserver1
c.optionals :nameserver2, :nameserver3
end
perform_request('setresolvers', options.merge(:key => 'setresolvers'))
end
def show_bandwidth(options = {})
Args.new(options) do |c|
c.optionals :month, :year, :showres, :search, :searchtype
end
perform_request('showbw', options.merge(:key => 'bandwidth'))
end
def set_nv_var(options = {})
Args.new(options) do |c|
c.requires :key
c.optionals :value
end
perform_request('nvset', options.merge(:key => 'nvset'))
end
def get_nv_var(options = {})
Args.new(options) do |c|
c.requires :key
c.optionals :value
end
perform_request('nvget', options.merge(:key => 'nvget'))
end
def reboot
perform_request('reboot', {:key => "reboot"})
end
private
def do_request(uri, function, params)
@response = Faraday.new(:url => uri, :ssl => ssl_options) do |c|
c.basic_auth @user, @hash
c.params = params
c.request :url_encoded
c.response :logger, create_logger_instance
# TODO: c.response :skip_bad_headers
# TODO: c.response :whm_errors
c.response :json
c.adapter :net_http
end.get(function).body
# TODO: Move to middleware
format_response
end
def format_query(hash)
hash.inject({}) do |params, (key, value)|
value = 1 if value === true
value = 0 if value === false
params[key] = value
params
end
end
def create_logger_instance
Logger.new(Lumberg.configuration[:debug].is_a?(TrueClass) ? $stderr : Lumberg.configuration[:debug])
end
def ssl_options
if @ssl_verify
{
:verify_mode => OpenSSL::SSL::VERIFY_PEER,
:ca_file => File.join(Lumberg::base_path, "cacert.pem")
}
else
{
:verify_mode => OpenSSL::SSL::VERIFY_NONE
}
end
end
# TODO: Move to middleware
def format_response
success, message, params = false, nil, {}
case response_type
when :action
success, message, params = format_action_response
when :query
success, message, params = format_query_response
when :error
message = @response['error']
when :unknown
message = "Unknown error occurred #{@response.inspect}"
end
params = Whm::to_bool(params, @boolean_params) unless @boolean_params.nil?
# Reset this for subsequent requests
@force_response_type = nil
{:success => success, :message => message, :params => Whm::symbolize_keys(params)}
end
# TODO: Move to middleware
def format_action_response
# Some API methods ALSO return a 'status' as
# part of a result. We only use this value if it's
# part of the results hash
item = @response[@key]
unless item.is_a?(Array) || item.is_a?(Hash)
res = {@key => item}
success, message = true, ""
else
result = nil
if item.first.is_a?(Hash)
result = item.first
res = (item.size > 1 ? item.dup : item.first.dup)
else
res = item.dup
# more hacks for WHM silly API
if @response.has_key?('result')
result_node = @response['result']
node_with_key_status = result_node.is_a?(Hash) && result_node.has_key?('status')
result = (node_with_key_status ? result_node : result_node.first)
else
res.delete('status')
res.delete('statusmsg')
end
end
unless result.nil?
success = result['status'].to_i == 1
message = result['statusmsg']
end
end
return success, message, res
end
# TODO: Move to middleware
def format_query_response
success = @response['status'].to_i == 1
message = @response['statusmsg']
# returns the rest as a params arg
res = @response.dup
res.delete('status')
res.delete('statusmsg')
return success, message, res
end
def response_type
if !@force_response_type.nil?
@force_response_type
elsif !@response.respond_to?(:has_key?)
:unknown
elsif @response.has_key?('error')
:error
elsif @response.has_key?(@key)
:action
elsif @response.has_key?('status') && @response.has_key?('statusmsg')
:query
else
:unknown
end
end
def format_url(options = {})
@ssl = true if @ssl.nil?
port = (@ssl ? 2087 : 2086)
proto = (@ssl ? 'https' : 'http')
"#{proto}://#{@host}:#{port}/json-api/"
end
def format_hash(hash)
raise Lumberg::WhmArgumentError.new("Missing WHM hash") unless hash.is_a?(String)
hash.gsub(/\n|\s/, '')
end
# Creates WHM::Whatever.new(:server => @server)
# automagically
def auto_accessors
[:account, :dns, :reseller]
end
def method_missing(meth, *args, &block)
if auto_accessors.include?(meth.to_sym)
ivar = instance_variable_get("@#{meth}")
if ivar.nil?
constant = Whm.const_get(meth.to_s.capitalize)
return instance_variable_set("@#{meth}", constant.new(:server => self))
else
return ivar
end
else
super
end
end
end
end
end
Removed unnused attr
module Lumberg
module Whm
class Server < Base
# Server
attr_accessor :host
# Remote access hash
attr_accessor :hash
# Base URL to the WHM API
attr_accessor :base_url
# Enable Basic Authentication with API - default false
attr_accessor :basic_auth
# API username - :default => root
attr_accessor :user
# WHM parsed response
attr_reader :response
# HTTP Params used for API requests
attr_accessor :params
# WHM API function name
attr_reader :function
# Use ssl?
attr_accessor :ssl
# HTTP SSL verify mode
attr_accessor :ssl_verify
# Returned params to transfor to booleans
attr_accessor :boolean_params
# Force response type...ARG!
attr_accessor :force_response_type
#
# ==== Required
# * <tt>:host</tt> - PENDING
# * <tt>:hash</tt> - PENDING
#
# ==== Optional
# * <tt>:user</tt> - PENDING
# * <tt>:ssl</tt> - PENDING
# * <tt>:basic_auth</tt>
def initialize(options)
Args.new(options) do |c|
c.requires :host, :hash
c.optionals :user, :ssl, :basic_auth
end
@ssl_verify ||= false
@ssl = options.delete(:ssl)
@host = options.delete(:host)
@hash = format_hash(options.delete(:hash))
@user = (options.has_key?(:user) ? options.delete(:user) : 'root')
@basic_auth = options.delete(:basic_auth)
@base_url = format_url(options)
end
def perform_request(function, options = {})
# WHM sometime uses different keys for the result hash
@key = options.delete(:key) || 'result'
@function = function
@params = format_query(options)
yield self if block_given?
do_request(@base_url, function, @params)
end
def get_hostname
perform_request('gethostname', {:key => 'hostname'})
end
def version
perform_request('version', {:key => 'version'})
end
def load_average
@force_response_type = :query
result = perform_request('loadavg')
result[:success] = result[:params].has_key?(:one)
result
end
def system_load_average(options = {})
Args.new(options) do |c|
c.requires "api.version".to_sym
end
perform_request('systemloadavg', options.merge(:key => 'data'))
end
def languages
perform_request('getlanglist', {:key => 'lang'})
end
def list_ips
perform_request('listips', {:key => 'result'})
end
def add_ip(options = {})
Args.new(options) do |c|
c.requires :ip, :netmask
end
perform_request('addip', options.merge(:key => 'addip'))
end
def delete_ip(options = {})
Args.new(options) do |c|
c.requires :ip
c.optionals :ethernetdev
c.booleans :skipifshutdown
end
perform_request('delip', options.merge(:key => 'delip'))
end
def set_hostname(options = {})
Args.new(options) do |c|
c.requires :hostname
end
perform_request('sethostname', options.merge(:key => 'sethostname'))
end
def set_resolvers(options = {})
Args.new(options) do |c|
c.requires :nameserver1
c.optionals :nameserver2, :nameserver3
end
perform_request('setresolvers', options.merge(:key => 'setresolvers'))
end
def show_bandwidth(options = {})
Args.new(options) do |c|
c.optionals :month, :year, :showres, :search, :searchtype
end
perform_request('showbw', options.merge(:key => 'bandwidth'))
end
def set_nv_var(options = {})
Args.new(options) do |c|
c.requires :key
c.optionals :value
end
perform_request('nvset', options.merge(:key => 'nvset'))
end
def get_nv_var(options = {})
Args.new(options) do |c|
c.requires :key
c.optionals :value
end
perform_request('nvget', options.merge(:key => 'nvget'))
end
def reboot
perform_request('reboot', {:key => "reboot"})
end
private
def do_request(uri, function, params)
@response = Faraday.new(:url => uri, :ssl => ssl_options) do |c|
c.basic_auth @user, @hash
c.params = params
c.request :url_encoded
c.response :logger, create_logger_instance
# TODO: c.response :skip_bad_headers
# TODO: c.response :whm_errors
c.response :json
c.adapter :net_http
end.get(function).body
# TODO: Move to middleware
format_response
end
def format_query(hash)
hash.inject({}) do |params, (key, value)|
value = 1 if value === true
value = 0 if value === false
params[key] = value
params
end
end
def create_logger_instance
Logger.new(Lumberg.configuration[:debug].is_a?(TrueClass) ? $stderr : Lumberg.configuration[:debug])
end
def ssl_options
if @ssl_verify
{
:verify_mode => OpenSSL::SSL::VERIFY_PEER,
:ca_file => File.join(Lumberg::base_path, "cacert.pem")
}
else
{
:verify_mode => OpenSSL::SSL::VERIFY_NONE
}
end
end
# TODO: Move to middleware
def format_response
success, message, params = false, nil, {}
case response_type
when :action
success, message, params = format_action_response
when :query
success, message, params = format_query_response
when :error
message = @response['error']
when :unknown
message = "Unknown error occurred #{@response.inspect}"
end
params = Whm::to_bool(params, @boolean_params) unless @boolean_params.nil?
# Reset this for subsequent requests
@force_response_type = nil
{:success => success, :message => message, :params => Whm::symbolize_keys(params)}
end
# TODO: Move to middleware
def format_action_response
# Some API methods ALSO return a 'status' as
# part of a result. We only use this value if it's
# part of the results hash
item = @response[@key]
unless item.is_a?(Array) || item.is_a?(Hash)
res = {@key => item}
success, message = true, ""
else
result = nil
if item.first.is_a?(Hash)
result = item.first
res = (item.size > 1 ? item.dup : item.first.dup)
else
res = item.dup
# more hacks for WHM silly API
if @response.has_key?('result')
result_node = @response['result']
node_with_key_status = result_node.is_a?(Hash) && result_node.has_key?('status')
result = (node_with_key_status ? result_node : result_node.first)
else
res.delete('status')
res.delete('statusmsg')
end
end
unless result.nil?
success = result['status'].to_i == 1
message = result['statusmsg']
end
end
return success, message, res
end
# TODO: Move to middleware
def format_query_response
success = @response['status'].to_i == 1
message = @response['statusmsg']
# returns the rest as a params arg
res = @response.dup
res.delete('status')
res.delete('statusmsg')
return success, message, res
end
def response_type
if !@force_response_type.nil?
@force_response_type
elsif !@response.respond_to?(:has_key?)
:unknown
elsif @response.has_key?('error')
:error
elsif @response.has_key?(@key)
:action
elsif @response.has_key?('status') && @response.has_key?('statusmsg')
:query
else
:unknown
end
end
def format_url(options = {})
@ssl = true if @ssl.nil?
port = (@ssl ? 2087 : 2086)
proto = (@ssl ? 'https' : 'http')
"#{proto}://#{@host}:#{port}/json-api/"
end
def format_hash(hash)
raise Lumberg::WhmArgumentError.new("Missing WHM hash") unless hash.is_a?(String)
hash.gsub(/\n|\s/, '')
end
# Creates WHM::Whatever.new(:server => @server)
# automagically
def auto_accessors
[:account, :dns, :reseller]
end
def method_missing(meth, *args, &block)
if auto_accessors.include?(meth.to_sym)
ivar = instance_variable_get("@#{meth}")
if ivar.nil?
constant = Whm.const_get(meth.to_s.capitalize)
return instance_variable_set("@#{meth}", constant.new(:server => self))
else
return ivar
end
else
super
end
end
end
end
end |
Pod::Spec.new do |s|
s.name = "touchin-rateme"
s.version = "0.6.1"
s.license = 'MIT'
s.author = { "alarin" => "me@alarin.ru" }
s.source = { :git => "https://github.com/<GITHUB_USERNAME>/touchin-analytics.git", :tag => s.version.to_s }
s.platform = :ios, '7.0'
s.requires_arc = true
s.public_header_files = 'Pod/Classes/*.h'
s.source_files = 'Pod/Classes'
# s.resources = 'Pod/Assets'
s.resource_bundles = {
'TIRateMe' => ['Pod/Assets/TIRateMeCell.xib', 'Pod/Assets/*.lproj']
}
s.frameworks = 'UIKit'
s.dependency 'touchin-trivia'
s.dependency 'touchin-analytics/CoreIOS'
end
[+] summary and homepage
Pod::Spec.new do |s|
s.name = "touchin-rateme"
s.summary = "RateMe please table cells, alers and screens"
s.homepage = "https://github.com/wayd-labs/touchin-rateme.git"
s.version = "0.6.1"
s.license = 'MIT'
s.author = { "alarin" => "me@alarin.ru" }
s.source = { :git => "https://github.com/wayd-labs/touchin-rateme.git", :tag => s.version.to_s }
s.platform = :ios, '7.0'
s.requires_arc = true
s.public_header_files = 'Pod/Classes/*.h'
s.source_files = 'Pod/Classes'
# s.resources = 'Pod/Assets'
s.resource_bundles = {
'TIRateMe' => ['Pod/Assets/TIRateMeCell.xib', 'Pod/Assets/*.lproj']
}
s.frameworks = 'UIKit'
s.dependency 'touchin-trivia'
s.dependency 'touchin-analytics/CoreIOS'
end
|
# Version info
module MagicEnum
module Version
MAJOR = 1
MINOR = 0
PATCH = 0
BUILD = 'beta1'
STRING = [MAJOR, MINOR, PATCH, BUILD].compact.join('.')
end
end
Version bump - 1.0.0 release!
# Version info
module MagicEnum
module Version
MAJOR = 1
MINOR = 0
PATCH = 0
BUILD = nil
STRING = [MAJOR, MINOR, PATCH, BUILD].compact.join('.')
end
end
|
module Manifestly
VERSION = "2.3.0"
end
2.3.1
module Manifestly
VERSION = "2.3.1"
end
|
module Manifestly
VERSION = "2.1.0.beta01"
end
2.2.0
module Manifestly
VERSION = "2.2.0"
end
|
require 'rubygems'
require 'logger'
require 'rest_client'
require 'rexml/document'
require 'uri'
require 'active_support'
module MediaWiki
class Gateway
attr_reader :log
# Set up a MediaWiki::Gateway for a given MediaWiki installation
#
# [url] Path to API of target MediaWiki (eg. "http://en.wikipedia.org/w/api.php")
# [options] Hash of options
#
# Options:
# [:ignorewarnings] Log API warnings and invalid page titles, instead throwing MediaWiki::APIError
# [:limit] Maximum number of results returned per search (see http://www.mediawiki.org/wiki/API:Query_-_Lists#Limits), defaults to the MediaWiki default of 500.
# [:loglevel] Log level to use, defaults to Logger::WARN. Set to Logger::DEBUG to dump every request and response to the log.
# [:maxlag] Maximum allowed server lag (see http://www.mediawiki.org/wiki/Manual:Maxlag_parameter), defaults to 5 seconds.
# [:retry_count] Number of times to try before giving up if MediaWiki returns 503 Service Unavailable, defaults to 3 (original request plus two retries).
# [:retry_delay] Seconds to wait before retry if MediaWiki returns 503 Service Unavailable, defaults to 10 seconds.
def initialize(url, options={})
default_options = {
:limit => 500,
:loglevel => Logger::WARN,
:maxlag => 5,
:retry_count => 3,
:retry_delay => 10
}
@options = default_options.merge(options)
@wiki_url = url
@log = Logger.new(STDERR)
@log.level = @options[:loglevel]
@headers = { "User-Agent" => "MediaWiki::Gateway/#{MediaWiki::VERSION}" }
@cookies = {}
end
attr_reader :base_url, :cookies
# Login to MediaWiki
#
# [username] Username
# [password] Password
# [domain] Domain for authentication plugin logins (eg. LDAP), optional -- defaults to 'local' if not given
#
# Throws MediaWiki::Unauthorized if login fails
def login(username, password, domain = 'local')
form_data = {'action' => 'login', 'lgname' => username, 'lgpassword' => password, 'lgdomain' => domain}
make_api_request(form_data)
@password = password
@username = username
end
# Fetch MediaWiki page in MediaWiki format. Does not follow redirects.
#
# [page_title] Page title to fetch
#
# Returns content of page as string, nil if the page does not exist.
def get(page_title)
form_data = {'action' => 'query', 'prop' => 'revisions', 'rvprop' => 'content', 'titles' => page_title}
page = make_api_request(form_data).first.elements["query/pages/page"]
if valid_page? page
page.elements["revisions/rev"].text || ""
end
end
# Fetch latest revision ID of a MediaWiki page. Does not follow redirects.
#
# [page_title] Page title to fetch
#
# Returns revision ID as a string, nil if the page does not exist.
def revision(page_title)
form_data = {'action' => 'query', 'prop' => 'revisions', 'rvprop' => 'ids', 'rvlimit' => 1, 'titles' => page_title}
page = make_api_request(form_data).first.elements["query/pages/page"]
if valid_page? page
page.elements["revisions/rev"].attributes["revid"]
end
end
# Render a MediaWiki page as HTML
#
# [page_title] Page title to fetch
# [options] Hash of additional options
#
# Options:
# * [:linkbase] supply a String to prefix all internal (relative) links with. '/wiki/' is assumed to be the base of a relative link
# * [:noeditsections] strips all edit-links if set to +true+
# * [:noimages] strips all +img+ tags from the rendered text if set to +true+
#
# Returns rendered page as string, or nil if the page does not exist
def render(page_title, options = {})
form_data = {'action' => 'parse', 'page' => page_title}
valid_options = %w(linkbase noeditsections noimages)
# Check options
options.keys.each{|opt| raise ArgumentError.new("Unknown option '#{opt}'") unless valid_options.include?(opt.to_s)}
rendered = nil
parsed = make_api_request(form_data).first.elements["parse"]
if parsed.attributes["revid"] != '0'
rendered = parsed.elements["text"].text.gsub(/<!--(.|\s)*?-->/, '')
# OPTIMIZE: unifiy the keys in +options+ like symbolize_keys! but w/o
if options["linkbase"] or options[:linkbase]
linkbase = options["linkbase"] || options[:linkbase]
rendered = rendered.gsub(/\shref="\/wiki\/([\w\(\)_\-\.%\d:,]*)"/, ' href="' + linkbase + '/wiki/\1"')
end
if options["noeditsections"] or options[:noeditsections]
rendered = rendered.gsub(/<span class="editsection">\[.+\]<\/span>/, '')
end
if options["noimages"] or options[:noimages]
rendered = rendered.gsub(/<img.*\/>/, '')
end
end
rendered
end
# Create a new page, or overwrite an existing one
#
# [title] Page title to create or overwrite, string
# [content] Content for the page, string
# [options] Hash of additional options
#
# Options:
# * [:overwrite] Allow overwriting existing pages
# * [:summary] Edit summary for history, string
# * [:token] Use this existing edit token instead requesting a new one (useful for bulk loads)
def create(title, content, options={})
form_data = {'action' => 'edit', 'title' => title, 'text' => content, 'summary' => (options[:summary] || ""), 'token' => get_token('edit', title)}
form_data['createonly'] = "" unless options[:overwrite]
make_api_request(form_data)
end
# Edit page
#
# Same options as create, but always overwrites existing pages (and creates them if they don't exist already).
def edit(title, content, options={})
create(title, content, {:overwrite => true}.merge(options))
end
# Move a page to a new title
#
# [from] Old page name
# [to] New page name
# [options] Hash of additional options
#
# Options:
# * [:movesubpages] Move associated subpages
# * [:movetalk] Move associated talkpages
# * [:noredirect] Do not create a redirect page from old name. Requires the 'suppressredirect' user right, otherwise MW will silently ignore the option and create the redirect anyway.
# * [:reason] Reason for move
# * [:watch] Add page and any redirect to watchlist
# * [:unwatch] Remove page and any redirect from watchlist
def move(from, to, options={})
valid_options = %w(movesubpages movetalk noredirect reason watch unwatch)
options.keys.each{|opt| raise ArgumentError.new("Unknown option '#{opt}'") unless valid_options.include?(opt.to_s)}
form_data = options.merge({'action' => 'move', 'from' => from, 'to' => to, 'token' => get_token('move', from)})
make_api_request(form_data)
end
# Delete one page. (MediaWiki API does not support deleting multiple pages at a time.)
#
# [title] Title of page to delete
def delete(title)
form_data = {'action' => 'delete', 'title' => title, 'token' => get_token('delete', title)}
make_api_request(form_data)
end
# Undelete all revisions of one page.
#
# [title] Title of page to undelete
#
# Returns number of revisions undeleted, or zero if nothing to undelete
def undelete(title)
token = get_undelete_token(title)
if token
form_data = {'action' => 'undelete', 'title' => title, 'token' => token }
make_api_request(form_data).first.elements["undelete"].attributes["revisions"].to_i
else
0 # No revisions to undelete
end
end
# Get a list of matching page titles in a namespace
#
# [key] Search key, matched as a prefix (^key.*). May contain or equal a namespace, defaults to main (namespace 0) if none given.
# [options] Optional hash of additional options, eg. { 'apfilterredir' => 'nonredirects' }. See http://www.mediawiki.org/wiki/API:Allpages
#
# Returns array of page titles (empty if no matches)
def list(key, options = {})
titles = []
apfrom = nil
key, namespace = key.split(":", 2).reverse
namespace = namespaces_by_prefix[namespace] || 0
begin
form_data = options.merge(
{'action' => 'query',
'list' => 'allpages',
'apfrom' => apfrom,
'apprefix' => key,
'aplimit' => @options[:limit],
'apnamespace' => namespace})
res, apfrom = make_api_request(form_data, '//query-continue/allpages/@apfrom')
titles += REXML::XPath.match(res, "//p").map { |x| x.attributes["title"] }
end while apfrom
titles
end
# Get a list of pages that link to a target page
#
# [title] Link target page
# [filter] "all" links (default), "redirects" only, or "nonredirects" (plain links only)
#
# Returns array of page titles (empty if no matches)
def backlinks(title, filter = "all")
titles = []
blcontinue = nil
begin
form_data =
{'action' => 'query',
'list' => 'backlinks',
'bltitle' => title,
'blfilterredir' => filter,
'bllimit' => @options[:limit] }
form_data['blcontinue'] = blcontinue if blcontinue
res, blcontinue = make_api_request(form_data, '//query-continue/backlinks/@blcontinue')
titles += REXML::XPath.match(res, "//bl").map { |x| x.attributes["title"] }
end while blcontinue
titles
end
# Get a list of pages with matching content in given namespaces
#
# [key] Search key
# [namespaces] Array of namespace names to search (defaults to main only)
# [limit] Maximum number of hits to ask for (defaults to 500; note that Wikimedia Foundation wikis allow only 50 for normal users)
#
# Returns array of page titles (empty if no matches)
def search(key, namespaces=nil, limit=@options[:limit])
titles = []
offset = nil
in_progress = true
form_data = { 'action' => 'query',
'list' => 'search',
'srwhat' => 'text',
'srsearch' => key,
'srlimit' => limit
}
if namespaces
namespaces = [ namespaces ] unless namespaces.kind_of? Array
form_data['srnamespace'] = namespaces.map! do |ns| namespaces_by_prefix[ns] end.join('|')
end
begin
form_data['sroffset'] = offset if offset
res, offset = make_api_request(form_data, '//query-continue/search/@sroffset')
titles += REXML::XPath.match(res, "//p").map { |x| x.attributes["title"] }
end while offset
titles
end
# Upload a file, or get the status of pending uploads. Several
# methods are available:
#
# * Upload file contents directly.
# * Have the MediaWiki server fetch a file from a URL, using the
# "url" parameter
#
# Requires Mediawiki 1.16+
#
# Arguments:
# * [path] Path to file to upload. Set to nil if uploading from URL.
# * [options] Hash of additional options
#
# Note that queries using session keys must be done in the same login
# session as the query that originally returned the key (i.e. do not
# log out and then log back in).
#
# Options:
# * 'filename' - Target filename (defaults to local name if not given), options[:target] is alias for this.
# * 'comment' - Upload comment. Also used as the initial page text for new files if "text" is not specified.
# * 'text' - Initial page text for new files
# * 'watch' - Watch the page
# * 'ignorewarnings' - Ignore any warnings
# * 'url' - Url to fetch the file from. Set path to nil if you want to use this.
#
# Deprecated but still supported options:
# * :description - Description of this file. Used as 'text'.
# * :target - Target filename, same as 'filename'.
# * :summary - Edit summary for history. Used as 'comment'. Also used as 'text' if neither it or :description is specified.
#
# Examples:
# mw.upload('/path/to/local/file.jpg', 'filename' => "RemoteFile.jpg")
# mw.upload(nil, 'filename' => "RemoteFile2.jpg", 'url' => 'http://remote.com/server/file.jpg')
#
def upload(path, options={})
if options[:description]
options['text'] = options[:description]
options.delete(:description)
end
if options[:target]
options['filename'] = options[:target]
options.delete(:target)
end
if options[:summary]
options['text'] ||= options[:summary]
options['comment'] = options[:summary]
options.delete(:summary)
end
options['comment'] ||= "Uploaded by MediaWiki::Gateway"
options['file'] = File.new(path) if path
full_name = path || options['url']
options['filename'] ||= File.basename(full_name) if full_name
raise ArgumentError.new(
"One of the 'file', 'url' or 'sessionkey' options must be specified!"
) unless options['file'] || options['url'] || options['sessionkey']
form_data = options.merge(
'action' => 'upload',
'token' => get_token('edit', options['filename'])
)
make_api_request(form_data)
end
# Checks if page is a redirect.
#
# [page_title] Page title to fetch
#
# Returns true if the page is a redirect, false if it is not or the page does not exist.
def redirect?(page_title)
form_data = {'action' => 'query', 'prop' => 'info', 'titles' => page_title}
page = make_api_request(form_data).first.elements["query/pages/page"]
!!(valid_page?(page) and page.attributes["redirect"])
end
# Requests image info from MediaWiki. Follows redirects.
#
# _file_name_or_page_id_ should be either:
# * a file name (String) you want info about without File: prefix.
# * or a Fixnum page id you of the file.
#
# _options_ is +Hash+ passed as query arguments. See
# http://www.mediawiki.org/wiki/API:Query_-_Properties#imageinfo_.2F_ii
# for more information.
#
# options['iiprop'] should be either a string of properties joined by
# '|' or an +Array+ (or more precisely something that responds to #join).
#
# +Hash+ like object is returned where keys are image properties.
#
# Example:
# mw.image_info(
# "Trooper.jpg", 'iiprop' => ['timestamp', 'user']
# ).each do |key, value|
# puts "#{key.inspect} => #{value.inspect}"
# end
#
# Output:
# "timestamp" => "2009-10-31T12:59:11Z"
# "user" => "Valdas"
#
def image_info(file_name_or_page_id, options={})
options['iiprop'] = options['iiprop'].join('|') \
if options['iiprop'].respond_to?(:join)
form_data = options.merge(
'action' => 'query',
'prop' => 'imageinfo',
'redirects' => true
)
case file_name_or_page_id
when Fixnum
form_data['pageids'] = file_name_or_page_id
else
form_data['titles'] = "File:#{file_name_or_page_id}"
end
xml, dummy = make_api_request(form_data)
page = xml.elements["query/pages/page"]
if valid_page? page
if xml.elements["query/redirects/r"]
# We're dealing with redirect here.
image_info(page.attributes["pageid"].to_i, options)
else
page.elements["imageinfo/ii"].attributes
end
else
nil
end
end
# Download _file_name_ (without "File:" or "Image:" prefix). Returns file contents. All options are passed to
# #image_info however options['iiprop'] is forced to url. You can still
# set other options to control what file you want to download.
def download(file_name, options={})
options['iiprop'] = 'url'
attributes = image_info(file_name, options)
if attributes
RestClient.get attributes['url']
else
nil
end
end
# Imports a MediaWiki XML dump
#
# [xml] String or array of page names to fetch
#
# Returns XML array <api><import><page/><page/>...
# <page revisions="1"> (or more) means successfully imported
# <page revisions="0"> means duplicate, not imported
def import(xmlfile)
form_data = { "action" => "import",
"xml" => File.new(xmlfile),
"token" => get_token('import', 'Main Page'), # NB: dummy page name
"format" => 'xml' }
make_api_request(form_data)
end
# Exports a page or set of pages
#
# [page_titles] String or array of page titles to fetch
#
# Returns MediaWiki XML dump
def export(page_titles)
form_data = {'action' => 'query', 'titles' => [page_titles].join('|'), 'export' => nil, 'exportnowrap' => nil}
return make_api_request(form_data)
end
# Get a list of all known namespaces
#
# Returns array of namespaces (name => id)
def namespaces_by_prefix
form_data = { 'action' => 'query', 'meta' => 'siteinfo', 'siprop' => 'namespaces' }
res = make_api_request(form_data)
REXML::XPath.match(res, "//ns").inject(Hash.new) do |namespaces, namespace|
prefix = namespace.attributes["canonical"] || ""
namespaces[prefix] = namespace.attributes["id"].to_i
namespaces
end
end
# Get a list of all installed (and registered) extensions
#
# Returns array of extensions (name => version)
def extensions
form_data = { 'action' => 'query', 'meta' => 'siteinfo', 'siprop' => 'extensions' }
res = make_api_request(form_data)
REXML::XPath.match(res, "//ext").inject(Hash.new) do |extensions, extension|
name = extension.attributes["name"] || ""
extensions[name] = extension.attributes["version"]
extensions
end
end
# Execute Semantic Mediawiki query
#
# [query] Semantic Mediawiki query
# [params] Array of additional parameters or options, eg. mainlabel=Foo or ?Place (optional)
#
# Returns result as an HTML string
def semantic_query(query, params = [])
params << "format=list"
form_data = { 'action' => 'parse', 'prop' => 'text', 'text' => "{{#ask:#{query}|#{params.join('|')}}}" }
xml, dummy = make_api_request(form_data)
return xml.elements["parse/text"].text
end
# Set groups for a user
#
# [user] Username of user to modify
# [groups_to_add] Groups to add user to, as an array or a string if a single group (optional)
# [groups_to_remove] Groups to remove user from, as an array or a string if a single group (optional)
def set_groups(user, groups_to_add = [], groups_to_remove = [], comment = '')
token = get_userrights_token(user)
userrights(user, token, groups_to_add, groups_to_remove, comment)
end
# Review current revision of an article (requires FlaggedRevisions extension, see http://www.mediawiki.org/wiki/Extension:FlaggedRevs)
#
# [title] Title of article to review
# [flags] Hash of flags and values to set, eg. { "accuracy" => "1", "depth" => "2" }
# [comment] Comment to add to review (optional)
def review(title, flags, comment = "Reviewed by MediaWiki::Gateway")
raise APIError.new('missingtitle', "Article #{title} not found") unless revid = revision(title)
form_data = {'action' => 'review', 'revid' => revid, 'token' => get_token('edit', title), 'comment' => comment}
form_data.merge!( Hash[flags.map {|k,v| ["flag_#{k}", v]}] )
res, dummy = make_api_request(form_data)
res
end
private
# Fetch token (type 'delete', 'edit', 'import', 'move')
def get_token(type, page_titles)
form_data = {'action' => 'query', 'prop' => 'info', 'intoken' => type, 'titles' => page_titles}
res, dummy = make_api_request(form_data)
token = res.elements["query/pages/page"].attributes[type + "token"]
raise Unauthorized.new "User is not permitted to perform this operation: #{type}" if token.nil?
token
end
def get_undelete_token(page_titles)
form_data = {'action' => 'query', 'list' => 'deletedrevs', 'prop' => 'info', 'drprop' => 'token', 'titles' => page_titles}
res, dummy = make_api_request(form_data)
if res.elements["query/deletedrevs/page"]
token = res.elements["query/deletedrevs/page"].attributes["token"]
raise Unauthorized.new "User is not permitted to perform this operation: #{type}" if token.nil?
token
else
nil
end
end
# User rights management (aka group assignment)
def get_userrights_token(user)
form_data = {'action' => 'query', 'list' => 'users', 'ustoken' => 'userrights', 'ususers' => user}
res, dummy = make_api_request(form_data)
token = res.elements["query/users/user"].attributes["userrightstoken"]
@log.debug("RESPONSE: #{res.to_s}")
if token.nil?
if res.elements["query/users/user"].attributes["missing"]
raise APIError.new('invaliduser', "User '#{user}' was not found (get_userrights_token)")
else
raise Unauthorized.new "User '#{@username}' is not permitted to perform this operation: get_userrights_token"
end
end
token
end
def userrights(user, token, groups_to_add, groups_to_remove, reason)
# groups_to_add and groups_to_remove can be a string or an array. Turn them into MediaWiki's pipe-delimited list format.
if groups_to_add.is_a? Array
groups_to_add = groups_to_add.join('|')
end
if groups_to_remove.is_a? Array
groups_to_remove = groups_to_remove.join('|')
end
form_data = {'action' => 'userrights', 'user' => user, 'token' => token,
'add' => groups_to_add,
'remove' => groups_to_remove,
'reason' => reason
}
res, dummy = make_api_request(form_data)
res
end
# Make generic request to API
#
# [form_data] hash or string of attributes to post
# [continue_xpath] XPath selector for query continue parameter
# [retry_count] Counter for retries
#
# Returns XML document
def make_api_request(form_data, continue_xpath=nil, retry_count=1)
if form_data.kind_of? Hash
form_data['format'] = 'xml'
form_data['maxlag'] = @options[:maxlag]
end
log.debug("REQ: #{form_data.inspect}, #{@cookies.inspect}")
RestClient.post(@wiki_url, form_data, @headers.merge({:cookies => @cookies})) do |response, &block|
if response.code == 503 and retry_count < @options[:retry_count]
log.warn("503 Service Unavailable: #{response.body}. Retry in #{@options[:retry_delay]} seconds.")
sleep @options[:retry_delay]
make_api_request(form_data, continue_xpath, retry_count + 1)
end
# Check response for errors and return XML
raise MediaWiki::Exception.new "Bad response: #{response}" unless response.code >= 200 and response.code < 300
doc = get_response(response.dup)
if(form_data['action'] == 'login')
login_result = doc.elements["login"].attributes['result']
@cookies.merge!(response.cookies)
case login_result
when "Success" then # do nothing
when "NeedToken" then make_api_request(form_data.merge('lgtoken' => doc.elements["login"].attributes["token"]))
else raise Unauthorized.new "Login failed: " + login_result
end
end
continue = (continue_xpath and doc.elements['query-continue']) ? REXML::XPath.first(doc, continue_xpath).value : nil
return [doc, continue]
end
end
# Get API XML response
# If there are errors or warnings, raise APIError
# Otherwise return XML root
def get_response(res)
begin
doc = REXML::Document.new(res).root
rescue REXML::ParseException => e
raise MediaWiki::Exception.new "Response is not XML. Are you sure you are pointing to api.php?"
end
log.debug("RES: #{doc}")
raise MediaWiki::Exception.new "Response does not contain Mediawiki API XML: #{res}" unless [ "api", "mediawiki" ].include? doc.name
if doc.elements["error"]
code = doc.elements["error"].attributes["code"]
info = doc.elements["error"].attributes["info"]
raise APIError.new(code, info)
end
if doc.elements["warnings"]
warning("API warning: #{doc.elements["warnings"].children.map {|e| e.text}.join(", ")}")
end
doc
end
def valid_page?(page)
return false unless page
return false if page.attributes["missing"]
if page.attributes["invalid"]
warning("Invalid title '#{page.attributes["title"]}'")
else
true
end
end
def warning(msg)
if @options[:ignorewarnings]
log.warn(msg)
return false
else
raise APIError.new('warning', msg)
end
end
end
end
Force UTF-8 encoding in Ruby 1.9 to fix #12
require 'rubygems'
require 'logger'
require 'rest_client'
require 'rexml/document'
require 'uri'
require 'active_support'
module MediaWiki
class Gateway
attr_reader :log
# Set up a MediaWiki::Gateway for a given MediaWiki installation
#
# [url] Path to API of target MediaWiki (eg. "http://en.wikipedia.org/w/api.php")
# [options] Hash of options
#
# Options:
# [:ignorewarnings] Log API warnings and invalid page titles, instead throwing MediaWiki::APIError
# [:limit] Maximum number of results returned per search (see http://www.mediawiki.org/wiki/API:Query_-_Lists#Limits), defaults to the MediaWiki default of 500.
# [:loglevel] Log level to use, defaults to Logger::WARN. Set to Logger::DEBUG to dump every request and response to the log.
# [:maxlag] Maximum allowed server lag (see http://www.mediawiki.org/wiki/Manual:Maxlag_parameter), defaults to 5 seconds.
# [:retry_count] Number of times to try before giving up if MediaWiki returns 503 Service Unavailable, defaults to 3 (original request plus two retries).
# [:retry_delay] Seconds to wait before retry if MediaWiki returns 503 Service Unavailable, defaults to 10 seconds.
def initialize(url, options={})
default_options = {
:limit => 500,
:loglevel => Logger::WARN,
:maxlag => 5,
:retry_count => 3,
:retry_delay => 10
}
@options = default_options.merge(options)
@wiki_url = url
@log = Logger.new(STDERR)
@log.level = @options[:loglevel]
@headers = { "User-Agent" => "MediaWiki::Gateway/#{MediaWiki::VERSION}" }
@cookies = {}
end
attr_reader :base_url, :cookies
# Login to MediaWiki
#
# [username] Username
# [password] Password
# [domain] Domain for authentication plugin logins (eg. LDAP), optional -- defaults to 'local' if not given
#
# Throws MediaWiki::Unauthorized if login fails
def login(username, password, domain = 'local')
form_data = {'action' => 'login', 'lgname' => username, 'lgpassword' => password, 'lgdomain' => domain}
make_api_request(form_data)
@password = password
@username = username
end
# Fetch MediaWiki page in MediaWiki format. Does not follow redirects.
#
# [page_title] Page title to fetch
#
# Returns content of page as string, nil if the page does not exist.
def get(page_title)
form_data = {'action' => 'query', 'prop' => 'revisions', 'rvprop' => 'content', 'titles' => page_title}
page = make_api_request(form_data).first.elements["query/pages/page"]
if valid_page? page
page.elements["revisions/rev"].text || ""
end
end
# Fetch latest revision ID of a MediaWiki page. Does not follow redirects.
#
# [page_title] Page title to fetch
#
# Returns revision ID as a string, nil if the page does not exist.
def revision(page_title)
form_data = {'action' => 'query', 'prop' => 'revisions', 'rvprop' => 'ids', 'rvlimit' => 1, 'titles' => page_title}
page = make_api_request(form_data).first.elements["query/pages/page"]
if valid_page? page
page.elements["revisions/rev"].attributes["revid"]
end
end
# Render a MediaWiki page as HTML
#
# [page_title] Page title to fetch
# [options] Hash of additional options
#
# Options:
# * [:linkbase] supply a String to prefix all internal (relative) links with. '/wiki/' is assumed to be the base of a relative link
# * [:noeditsections] strips all edit-links if set to +true+
# * [:noimages] strips all +img+ tags from the rendered text if set to +true+
#
# Returns rendered page as string, or nil if the page does not exist
def render(page_title, options = {})
form_data = {'action' => 'parse', 'page' => page_title}
valid_options = %w(linkbase noeditsections noimages)
# Check options
options.keys.each{|opt| raise ArgumentError.new("Unknown option '#{opt}'") unless valid_options.include?(opt.to_s)}
rendered = nil
parsed = make_api_request(form_data).first.elements["parse"]
if parsed.attributes["revid"] != '0'
rendered = parsed.elements["text"].text.gsub(/<!--(.|\s)*?-->/, '')
# OPTIMIZE: unifiy the keys in +options+ like symbolize_keys! but w/o
if options["linkbase"] or options[:linkbase]
linkbase = options["linkbase"] || options[:linkbase]
rendered = rendered.gsub(/\shref="\/wiki\/([\w\(\)_\-\.%\d:,]*)"/, ' href="' + linkbase + '/wiki/\1"')
end
if options["noeditsections"] or options[:noeditsections]
rendered = rendered.gsub(/<span class="editsection">\[.+\]<\/span>/, '')
end
if options["noimages"] or options[:noimages]
rendered = rendered.gsub(/<img.*\/>/, '')
end
end
rendered
end
# Create a new page, or overwrite an existing one
#
# [title] Page title to create or overwrite, string
# [content] Content for the page, string
# [options] Hash of additional options
#
# Options:
# * [:overwrite] Allow overwriting existing pages
# * [:summary] Edit summary for history, string
# * [:token] Use this existing edit token instead requesting a new one (useful for bulk loads)
def create(title, content, options={})
form_data = {'action' => 'edit', 'title' => title, 'text' => content, 'summary' => (options[:summary] || ""), 'token' => get_token('edit', title)}
form_data['createonly'] = "" unless options[:overwrite]
make_api_request(form_data)
end
# Edit page
#
# Same options as create, but always overwrites existing pages (and creates them if they don't exist already).
def edit(title, content, options={})
create(title, content, {:overwrite => true}.merge(options))
end
# Move a page to a new title
#
# [from] Old page name
# [to] New page name
# [options] Hash of additional options
#
# Options:
# * [:movesubpages] Move associated subpages
# * [:movetalk] Move associated talkpages
# * [:noredirect] Do not create a redirect page from old name. Requires the 'suppressredirect' user right, otherwise MW will silently ignore the option and create the redirect anyway.
# * [:reason] Reason for move
# * [:watch] Add page and any redirect to watchlist
# * [:unwatch] Remove page and any redirect from watchlist
def move(from, to, options={})
valid_options = %w(movesubpages movetalk noredirect reason watch unwatch)
options.keys.each{|opt| raise ArgumentError.new("Unknown option '#{opt}'") unless valid_options.include?(opt.to_s)}
form_data = options.merge({'action' => 'move', 'from' => from, 'to' => to, 'token' => get_token('move', from)})
make_api_request(form_data)
end
# Delete one page. (MediaWiki API does not support deleting multiple pages at a time.)
#
# [title] Title of page to delete
def delete(title)
form_data = {'action' => 'delete', 'title' => title, 'token' => get_token('delete', title)}
make_api_request(form_data)
end
# Undelete all revisions of one page.
#
# [title] Title of page to undelete
#
# Returns number of revisions undeleted, or zero if nothing to undelete
def undelete(title)
token = get_undelete_token(title)
if token
form_data = {'action' => 'undelete', 'title' => title, 'token' => token }
make_api_request(form_data).first.elements["undelete"].attributes["revisions"].to_i
else
0 # No revisions to undelete
end
end
# Get a list of matching page titles in a namespace
#
# [key] Search key, matched as a prefix (^key.*). May contain or equal a namespace, defaults to main (namespace 0) if none given.
# [options] Optional hash of additional options, eg. { 'apfilterredir' => 'nonredirects' }. See http://www.mediawiki.org/wiki/API:Allpages
#
# Returns array of page titles (empty if no matches)
def list(key, options = {})
titles = []
apfrom = nil
key, namespace = key.split(":", 2).reverse
namespace = namespaces_by_prefix[namespace] || 0
begin
form_data = options.merge(
{'action' => 'query',
'list' => 'allpages',
'apfrom' => apfrom,
'apprefix' => key,
'aplimit' => @options[:limit],
'apnamespace' => namespace})
res, apfrom = make_api_request(form_data, '//query-continue/allpages/@apfrom')
titles += REXML::XPath.match(res, "//p").map { |x| x.attributes["title"] }
end while apfrom
titles
end
# Get a list of pages that link to a target page
#
# [title] Link target page
# [filter] "all" links (default), "redirects" only, or "nonredirects" (plain links only)
#
# Returns array of page titles (empty if no matches)
def backlinks(title, filter = "all")
titles = []
blcontinue = nil
begin
form_data =
{'action' => 'query',
'list' => 'backlinks',
'bltitle' => title,
'blfilterredir' => filter,
'bllimit' => @options[:limit] }
form_data['blcontinue'] = blcontinue if blcontinue
res, blcontinue = make_api_request(form_data, '//query-continue/backlinks/@blcontinue')
titles += REXML::XPath.match(res, "//bl").map { |x| x.attributes["title"] }
end while blcontinue
titles
end
# Get a list of pages with matching content in given namespaces
#
# [key] Search key
# [namespaces] Array of namespace names to search (defaults to main only)
# [limit] Maximum number of hits to ask for (defaults to 500; note that Wikimedia Foundation wikis allow only 50 for normal users)
#
# Returns array of page titles (empty if no matches)
def search(key, namespaces=nil, limit=@options[:limit])
titles = []
offset = nil
in_progress = true
form_data = { 'action' => 'query',
'list' => 'search',
'srwhat' => 'text',
'srsearch' => key,
'srlimit' => limit
}
if namespaces
namespaces = [ namespaces ] unless namespaces.kind_of? Array
form_data['srnamespace'] = namespaces.map! do |ns| namespaces_by_prefix[ns] end.join('|')
end
begin
form_data['sroffset'] = offset if offset
res, offset = make_api_request(form_data, '//query-continue/search/@sroffset')
titles += REXML::XPath.match(res, "//p").map { |x| x.attributes["title"] }
end while offset
titles
end
# Upload a file, or get the status of pending uploads. Several
# methods are available:
#
# * Upload file contents directly.
# * Have the MediaWiki server fetch a file from a URL, using the
# "url" parameter
#
# Requires Mediawiki 1.16+
#
# Arguments:
# * [path] Path to file to upload. Set to nil if uploading from URL.
# * [options] Hash of additional options
#
# Note that queries using session keys must be done in the same login
# session as the query that originally returned the key (i.e. do not
# log out and then log back in).
#
# Options:
# * 'filename' - Target filename (defaults to local name if not given), options[:target] is alias for this.
# * 'comment' - Upload comment. Also used as the initial page text for new files if "text" is not specified.
# * 'text' - Initial page text for new files
# * 'watch' - Watch the page
# * 'ignorewarnings' - Ignore any warnings
# * 'url' - Url to fetch the file from. Set path to nil if you want to use this.
#
# Deprecated but still supported options:
# * :description - Description of this file. Used as 'text'.
# * :target - Target filename, same as 'filename'.
# * :summary - Edit summary for history. Used as 'comment'. Also used as 'text' if neither it or :description is specified.
#
# Examples:
# mw.upload('/path/to/local/file.jpg', 'filename' => "RemoteFile.jpg")
# mw.upload(nil, 'filename' => "RemoteFile2.jpg", 'url' => 'http://remote.com/server/file.jpg')
#
def upload(path, options={})
if options[:description]
options['text'] = options[:description]
options.delete(:description)
end
if options[:target]
options['filename'] = options[:target]
options.delete(:target)
end
if options[:summary]
options['text'] ||= options[:summary]
options['comment'] = options[:summary]
options.delete(:summary)
end
options['comment'] ||= "Uploaded by MediaWiki::Gateway"
options['file'] = File.new(path) if path
full_name = path || options['url']
options['filename'] ||= File.basename(full_name) if full_name
raise ArgumentError.new(
"One of the 'file', 'url' or 'sessionkey' options must be specified!"
) unless options['file'] || options['url'] || options['sessionkey']
form_data = options.merge(
'action' => 'upload',
'token' => get_token('edit', options['filename'])
)
make_api_request(form_data)
end
# Checks if page is a redirect.
#
# [page_title] Page title to fetch
#
# Returns true if the page is a redirect, false if it is not or the page does not exist.
def redirect?(page_title)
form_data = {'action' => 'query', 'prop' => 'info', 'titles' => page_title}
page = make_api_request(form_data).first.elements["query/pages/page"]
!!(valid_page?(page) and page.attributes["redirect"])
end
# Requests image info from MediaWiki. Follows redirects.
#
# _file_name_or_page_id_ should be either:
# * a file name (String) you want info about without File: prefix.
# * or a Fixnum page id you of the file.
#
# _options_ is +Hash+ passed as query arguments. See
# http://www.mediawiki.org/wiki/API:Query_-_Properties#imageinfo_.2F_ii
# for more information.
#
# options['iiprop'] should be either a string of properties joined by
# '|' or an +Array+ (or more precisely something that responds to #join).
#
# +Hash+ like object is returned where keys are image properties.
#
# Example:
# mw.image_info(
# "Trooper.jpg", 'iiprop' => ['timestamp', 'user']
# ).each do |key, value|
# puts "#{key.inspect} => #{value.inspect}"
# end
#
# Output:
# "timestamp" => "2009-10-31T12:59:11Z"
# "user" => "Valdas"
#
def image_info(file_name_or_page_id, options={})
options['iiprop'] = options['iiprop'].join('|') \
if options['iiprop'].respond_to?(:join)
form_data = options.merge(
'action' => 'query',
'prop' => 'imageinfo',
'redirects' => true
)
case file_name_or_page_id
when Fixnum
form_data['pageids'] = file_name_or_page_id
else
form_data['titles'] = "File:#{file_name_or_page_id}"
end
xml, dummy = make_api_request(form_data)
page = xml.elements["query/pages/page"]
if valid_page? page
if xml.elements["query/redirects/r"]
# We're dealing with redirect here.
image_info(page.attributes["pageid"].to_i, options)
else
page.elements["imageinfo/ii"].attributes
end
else
nil
end
end
# Download _file_name_ (without "File:" or "Image:" prefix). Returns file contents. All options are passed to
# #image_info however options['iiprop'] is forced to url. You can still
# set other options to control what file you want to download.
def download(file_name, options={})
options['iiprop'] = 'url'
attributes = image_info(file_name, options)
if attributes
RestClient.get attributes['url']
else
nil
end
end
# Imports a MediaWiki XML dump
#
# [xml] String or array of page names to fetch
#
# Returns XML array <api><import><page/><page/>...
# <page revisions="1"> (or more) means successfully imported
# <page revisions="0"> means duplicate, not imported
def import(xmlfile)
form_data = { "action" => "import",
"xml" => File.new(xmlfile),
"token" => get_token('import', 'Main Page'), # NB: dummy page name
"format" => 'xml' }
make_api_request(form_data)
end
# Exports a page or set of pages
#
# [page_titles] String or array of page titles to fetch
#
# Returns MediaWiki XML dump
def export(page_titles)
form_data = {'action' => 'query', 'titles' => [page_titles].join('|'), 'export' => nil, 'exportnowrap' => nil}
return make_api_request(form_data)
end
# Get a list of all known namespaces
#
# Returns array of namespaces (name => id)
def namespaces_by_prefix
form_data = { 'action' => 'query', 'meta' => 'siteinfo', 'siprop' => 'namespaces' }
res = make_api_request(form_data)
REXML::XPath.match(res, "//ns").inject(Hash.new) do |namespaces, namespace|
prefix = namespace.attributes["canonical"] || ""
namespaces[prefix] = namespace.attributes["id"].to_i
namespaces
end
end
# Get a list of all installed (and registered) extensions
#
# Returns array of extensions (name => version)
def extensions
form_data = { 'action' => 'query', 'meta' => 'siteinfo', 'siprop' => 'extensions' }
res = make_api_request(form_data)
REXML::XPath.match(res, "//ext").inject(Hash.new) do |extensions, extension|
name = extension.attributes["name"] || ""
extensions[name] = extension.attributes["version"]
extensions
end
end
# Execute Semantic Mediawiki query
#
# [query] Semantic Mediawiki query
# [params] Array of additional parameters or options, eg. mainlabel=Foo or ?Place (optional)
#
# Returns result as an HTML string
def semantic_query(query, params = [])
params << "format=list"
form_data = { 'action' => 'parse', 'prop' => 'text', 'text' => "{{#ask:#{query}|#{params.join('|')}}}" }
xml, dummy = make_api_request(form_data)
return xml.elements["parse/text"].text
end
# Set groups for a user
#
# [user] Username of user to modify
# [groups_to_add] Groups to add user to, as an array or a string if a single group (optional)
# [groups_to_remove] Groups to remove user from, as an array or a string if a single group (optional)
def set_groups(user, groups_to_add = [], groups_to_remove = [], comment = '')
token = get_userrights_token(user)
userrights(user, token, groups_to_add, groups_to_remove, comment)
end
# Review current revision of an article (requires FlaggedRevisions extension, see http://www.mediawiki.org/wiki/Extension:FlaggedRevs)
#
# [title] Title of article to review
# [flags] Hash of flags and values to set, eg. { "accuracy" => "1", "depth" => "2" }
# [comment] Comment to add to review (optional)
def review(title, flags, comment = "Reviewed by MediaWiki::Gateway")
raise APIError.new('missingtitle', "Article #{title} not found") unless revid = revision(title)
form_data = {'action' => 'review', 'revid' => revid, 'token' => get_token('edit', title), 'comment' => comment}
form_data.merge!( Hash[flags.map {|k,v| ["flag_#{k}", v]}] )
res, dummy = make_api_request(form_data)
res
end
private
# Fetch token (type 'delete', 'edit', 'import', 'move')
def get_token(type, page_titles)
form_data = {'action' => 'query', 'prop' => 'info', 'intoken' => type, 'titles' => page_titles}
res, dummy = make_api_request(form_data)
token = res.elements["query/pages/page"].attributes[type + "token"]
raise Unauthorized.new "User is not permitted to perform this operation: #{type}" if token.nil?
token
end
def get_undelete_token(page_titles)
form_data = {'action' => 'query', 'list' => 'deletedrevs', 'prop' => 'info', 'drprop' => 'token', 'titles' => page_titles}
res, dummy = make_api_request(form_data)
if res.elements["query/deletedrevs/page"]
token = res.elements["query/deletedrevs/page"].attributes["token"]
raise Unauthorized.new "User is not permitted to perform this operation: #{type}" if token.nil?
token
else
nil
end
end
# User rights management (aka group assignment)
def get_userrights_token(user)
form_data = {'action' => 'query', 'list' => 'users', 'ustoken' => 'userrights', 'ususers' => user}
res, dummy = make_api_request(form_data)
token = res.elements["query/users/user"].attributes["userrightstoken"]
@log.debug("RESPONSE: #{res.to_s}")
if token.nil?
if res.elements["query/users/user"].attributes["missing"]
raise APIError.new('invaliduser', "User '#{user}' was not found (get_userrights_token)")
else
raise Unauthorized.new "User '#{@username}' is not permitted to perform this operation: get_userrights_token"
end
end
token
end
def userrights(user, token, groups_to_add, groups_to_remove, reason)
# groups_to_add and groups_to_remove can be a string or an array. Turn them into MediaWiki's pipe-delimited list format.
if groups_to_add.is_a? Array
groups_to_add = groups_to_add.join('|')
end
if groups_to_remove.is_a? Array
groups_to_remove = groups_to_remove.join('|')
end
form_data = {'action' => 'userrights', 'user' => user, 'token' => token,
'add' => groups_to_add,
'remove' => groups_to_remove,
'reason' => reason
}
res, dummy = make_api_request(form_data)
res
end
# Make generic request to API
#
# [form_data] hash or string of attributes to post
# [continue_xpath] XPath selector for query continue parameter
# [retry_count] Counter for retries
#
# Returns XML document
def make_api_request(form_data, continue_xpath=nil, retry_count=1)
if form_data.kind_of? Hash
form_data['format'] = 'xml'
form_data['maxlag'] = @options[:maxlag]
end
log.debug("REQ: #{form_data.inspect}, #{@cookies.inspect}")
RestClient.post(@wiki_url, form_data, @headers.merge({:cookies => @cookies})) do |response, &block|
if response.code == 503 and retry_count < @options[:retry_count]
log.warn("503 Service Unavailable: #{response.body}. Retry in #{@options[:retry_delay]} seconds.")
sleep @options[:retry_delay]
make_api_request(form_data, continue_xpath, retry_count + 1)
end
# Check response for errors and return XML
raise MediaWiki::Exception.new "Bad response: #{response}" unless response.code >= 200 and response.code < 300
doc = get_response(response.dup)
if(form_data['action'] == 'login')
login_result = doc.elements["login"].attributes['result']
@cookies.merge!(response.cookies)
case login_result
when "Success" then # do nothing
when "NeedToken" then make_api_request(form_data.merge('lgtoken' => doc.elements["login"].attributes["token"]))
else raise Unauthorized.new "Login failed: " + login_result
end
end
continue = (continue_xpath and doc.elements['query-continue']) ? REXML::XPath.first(doc, continue_xpath).value : nil
return [doc, continue]
end
end
# Get API XML response
# If there are errors or warnings, raise APIError
# Otherwise return XML root
def get_response(res)
begin
res = res.force_encoding("UTF-8") if res.respond_to?(:force_encoding)
doc = REXML::Document.new(res).root
rescue REXML::ParseException => e
raise MediaWiki::Exception.new "Response is not XML. Are you sure you are pointing to api.php?"
end
log.debug("RES: #{doc}")
raise MediaWiki::Exception.new "Response does not contain Mediawiki API XML: #{res}" unless [ "api", "mediawiki" ].include? doc.name
if doc.elements["error"]
code = doc.elements["error"].attributes["code"]
info = doc.elements["error"].attributes["info"]
raise APIError.new(code, info)
end
if doc.elements["warnings"]
warning("API warning: #{doc.elements["warnings"].children.map {|e| e.text}.join(", ")}")
end
doc
end
def valid_page?(page)
return false unless page
return false if page.attributes["missing"]
if page.attributes["invalid"]
warning("Invalid title '#{page.attributes["title"]}'")
else
true
end
end
def warning(msg)
if @options[:ignorewarnings]
log.warn(msg)
return false
else
raise APIError.new('warning', msg)
end
end
end
end
|
require 'tempfile'
require 'zlib'
##
# General formatting functions shared throughout MiGA.
module MiGA::Common::Format
##
# Tabulates an +values+, and Array of Arrays, all with the same number of
# entries as +header+. Returns an Array of String, one per line.
def tabulate(header, values, tabular = false)
fields = []
fields << header.map(&:to_s) unless tabular && header.all?(&:nil?)
fields << fields.first.map { |h| h.gsub(/\S/, '-') } unless tabular
fields += values.map { |r| r.map { |cell| cell.nil? ? '?' : cell.to_s } }
clen = tabular ? Array.new(header.size, 0) :
fields.map { |r| r.map(&:length) }.transpose.map(&:max)
fields.map do |r|
(0..(clen.size - 1)).map do |col_n|
col_n == 0 ? r[col_n].rjust(clen[col_n]) : r[col_n].ljust(clen[col_n])
end.join(tabular ? "\t" : ' ')
end
end
##
# Cleans a FastA file in place.
def clean_fasta_file(file)
tmp_fh = nil
tmp_path = nil
begin
if file =~ /\.gz/
tmp_path = Tempfile.new('MiGA.gz').tap(&:close).path
File.unlink tmp_path
tmp_path += '.gz'
tmp_fh = Zlib::GzipWriter.open(tmp_path, 9)
fh = Zlib::GzipReader.open(file)
else
tmp_fh = Tempfile.new('MiGA')
tmp_path = tmp_fh.path
fh = File.open(file, 'r')
end
buffer = ''
fh.each_line do |ln|
ln.chomp!
if ln =~ /^>\s*(\S+)(.*)/
id, df = $1, $2
tmp_fh.print buffer.wrap_width(80)
buffer = ''
tmp_fh.puts ">#{id.gsub(/[^A-Za-z0-9_\|\.]/, '_')}#{df}"
else
buffer << ln.gsub(/[^A-Za-z\.\-]/, '')
end
end
tmp_fh.print buffer.wrap_width(80)
tmp_fh.close
fh.close
FileUtils.mv(tmp_path, file)
ensure
begin
tmp_fh.close unless tmp_fh.nil?
File.unlink(tmp_path) unless tmp_path.nil?
rescue
end
end
end
##
# Calculates the average and standard deviation of the sequence lengths in
# a FastA or FastQ file (supports gzipped files). The +format+ must be a
# Symbol, one of +:fasta+ or +:fastq+. Additional estimations can be
# controlled via the +opts+ Hash. Supported options include:
# - +:n50+: If true, it also returns the N50 and the median (in bp)
# - +:gc+: If true, it also returns the G+C content (in %)
# - +:x+: If true, it also returns the undetermined bases content (in %)
def seqs_length(file, format, opts = {})
fh = file =~ /\.gz/ ? Zlib::GzipReader.open(file) : File.open(file, 'r')
l = []
gc = 0
xn = 0
i = 0 # <- Zlib::GzipReader doesn't set `$.`
fh.each_line do |ln|
i += 1
if (format == :fasta and ln =~ /^>/) or
(format == :fastq and (i % 4) == 1)
l << 0
elsif format == :fasta or (i % 4) == 2
l[l.size - 1] += ln.chomp.size
gc += ln.scan(/[GCgc]/).count if opts[:gc]
xn += ln.scan(/[XNxn]/).count if opts[:x]
end
end
fh.close
o = { n: l.size, tot: l.inject(:+), max: l.max }
o[:avg] = o[:tot].to_f / l.size
o[:var] = l.map { |a| a**2 }.inject(:+).to_f / l.size - o[:avg]**2
o[:sd] = Math.sqrt o[:var]
o[:gc] = 100.0 * gc / o[:tot] if opts[:gc]
o[:x] = 100.0 * xn / o[:tot] if opts[:x]
if opts[:n50]
l.sort!
thr = o[:tot] / 2
pos = 0
l.each do |a|
pos += a
o[:n50] = a
break if pos >= thr
end
o[:med] = o[:n].even? ?
0.5 * l[o[:n] / 2 - 1, 2].inject(:+) : l[(o[:n] - 1) / 2]
end
o
end
end
##
# MiGA extensions to the String class.
class String
##
# Replace any character not allowed in a MiGA name for underscore (_). This
# results in a MiGA-compliant name EXCEPT for empty strings, that results in
# empty strings.
def miga_name
gsub(/[^A-Za-z0-9_]/, '_')
end
##
# Is the string a MiGA-compliant name?
def miga_name?
!(self !~ /^[A-Za-z0-9_]+$/)
end
##
# Replace underscores by spaces or dots (depending on context).
def unmiga_name
gsub(/_(str|sp|subsp|pv)__/, '_\\1._').tr('_', ' ')
end
##
# Wraps the string with fixed Integer +width+.
def wrap_width(width)
gsub(/([^\n\r]{1,#{width}})/, "\\1\n")
end
##
# Replace {{variables}} using the +vars+ hash
def miga_variables(vars)
o = "#{self}"
vars.each { |k, v| o.gsub!("{{#{k}}}", v.to_s) }
o
end
end
Graceful support for empty sequence stats
require 'tempfile'
require 'zlib'
##
# General formatting functions shared throughout MiGA.
module MiGA::Common::Format
##
# Tabulates an +values+, and Array of Arrays, all with the same number of
# entries as +header+. Returns an Array of String, one per line.
def tabulate(header, values, tabular = false)
fields = []
fields << header.map(&:to_s) unless tabular && header.all?(&:nil?)
fields << fields.first.map { |h| h.gsub(/\S/, '-') } unless tabular
fields += values.map { |r| r.map { |cell| cell.nil? ? '?' : cell.to_s } }
clen = tabular ? Array.new(header.size, 0) :
fields.map { |r| r.map(&:length) }.transpose.map(&:max)
fields.map do |r|
(0..(clen.size - 1)).map do |col_n|
col_n == 0 ? r[col_n].rjust(clen[col_n]) : r[col_n].ljust(clen[col_n])
end.join(tabular ? "\t" : ' ')
end
end
##
# Cleans a FastA file in place.
def clean_fasta_file(file)
tmp_fh = nil
tmp_path = nil
begin
if file =~ /\.gz/
tmp_path = Tempfile.new('MiGA.gz').tap(&:close).path
File.unlink tmp_path
tmp_path += '.gz'
tmp_fh = Zlib::GzipWriter.open(tmp_path, 9)
fh = Zlib::GzipReader.open(file)
else
tmp_fh = Tempfile.new('MiGA')
tmp_path = tmp_fh.path
fh = File.open(file, 'r')
end
buffer = ''
fh.each_line do |ln|
ln.chomp!
if ln =~ /^>\s*(\S+)(.*)/
id, df = $1, $2
tmp_fh.print buffer.wrap_width(80)
buffer = ''
tmp_fh.puts ">#{id.gsub(/[^A-Za-z0-9_\|\.]/, '_')}#{df}"
else
buffer << ln.gsub(/[^A-Za-z\.\-]/, '')
end
end
tmp_fh.print buffer.wrap_width(80)
tmp_fh.close
fh.close
FileUtils.mv(tmp_path, file)
ensure
begin
tmp_fh.close unless tmp_fh.nil?
File.unlink(tmp_path) unless tmp_path.nil?
rescue
end
end
end
##
# Calculates the average and standard deviation of the sequence lengths in
# a FastA or FastQ file (supports gzipped files). The +format+ must be a
# Symbol, one of +:fasta+ or +:fastq+. Additional estimations can be
# controlled via the +opts+ Hash. Supported options include:
# - +:n50+: If true, it also returns the N50 and the median (in bp)
# - +:gc+: If true, it also returns the G+C content (in %)
# - +:x+: If true, it also returns the undetermined bases content (in %)
def seqs_length(file, format, opts = {})
fh = file =~ /\.gz/ ? Zlib::GzipReader.open(file) : File.open(file, 'r')
l = []
gc = 0
xn = 0
i = 0 # <- Zlib::GzipReader doesn't set `$.`
fh.each_line do |ln|
i += 1
if (format == :fasta and ln =~ /^>/) or
(format == :fastq and (i % 4) == 1)
l << 0
elsif format == :fasta or (i % 4) == 2
l[l.size - 1] += ln.chomp.size
gc += ln.scan(/[GCgc]/).count if opts[:gc]
xn += ln.scan(/[XNxn]/).count if opts[:x]
end
end
fh.close
o = { n: l.size, tot: l.inject(0, :+), max: l.max }
return o if o[:tot].zero?
o[:avg] = o[:tot].to_f / l.size
o[:var] = l.map { |a| a**2 }.inject(:+).to_f / l.size - o[:avg]**2
o[:sd] = Math.sqrt o[:var]
o[:gc] = 100.0 * gc / o[:tot] if opts[:gc]
o[:x] = 100.0 * xn / o[:tot] if opts[:x]
if opts[:n50]
l.sort!
thr = o[:tot] / 2
pos = 0
l.each do |a|
pos += a
o[:n50] = a
break if pos >= thr
end
o[:med] = o[:n].even? ?
0.5 * l[o[:n] / 2 - 1, 2].inject(:+) :
l[(o[:n] - 1) / 2]
end
o
end
end
##
# MiGA extensions to the String class.
class String
##
# Replace any character not allowed in a MiGA name for underscore (_). This
# results in a MiGA-compliant name EXCEPT for empty strings, that results in
# empty strings.
def miga_name
gsub(/[^A-Za-z0-9_]/, '_')
end
##
# Is the string a MiGA-compliant name?
def miga_name?
!(self !~ /^[A-Za-z0-9_]+$/)
end
##
# Replace underscores by spaces or dots (depending on context).
def unmiga_name
gsub(/_(str|sp|subsp|pv)__/, '_\\1._').tr('_', ' ')
end
##
# Wraps the string with fixed Integer +width+.
def wrap_width(width)
gsub(/([^\n\r]{1,#{width}})/, "\\1\n")
end
##
# Replace {{variables}} using the +vars+ hash
def miga_variables(vars)
o = "#{self}"
vars.each { |k, v| o.gsub!("{{#{k}}}", v.to_s) }
o
end
end
|
require "minitest/unit"
module MiniTest
require "minitest/relative_position"
require "minitest/reporter"
require "minitest/reporter_runner"
require "minitest/around_test_hooks"
require "minitest/test_runner"
require "minitest/test_recorder"
require "minitest/extensible_backtrace_filter"
module Reporters
require "minitest/reporters/version"
autoload :DefaultReporter, "minitest/reporters/default_reporter"
autoload :SpecReporter, "minitest/reporters/spec_reporter"
autoload :ProgressReporter, "minitest/reporters/progress_reporter"
autoload :RubyMateReporter, "minitest/reporters/ruby_mate_reporter"
autoload :RubyMineReporter, "minitest/reporters/rubymine_reporter"
autoload :GuardReporter, "minitest/reporters/guard_reporter"
autoload :JUnitReporter, "minitest/reporters/junit_reporter"
def self.use!(console_reporters = ProgressReporter.new, env = ENV, backtrace_filter = ExtensibleBacktraceFilter.default_filter)
use_runner!(console_reporters, env)
use_around_test_hooks!
use_backtrace_filter!(backtrace_filter)
use_parallel_length_method!
use_old_activesupport_fix!
end
def self.use_runner!(console_reporters, env)
runner = ReporterRunner.new
runner.reporters = choose_reporters(console_reporters, env)
Unit.runner = runner
end
def self.use_around_test_hooks!
Unit::TestCase.class_eval do
def run_with_hooks(runner)
AroundTestHooks.before_test(self)
result = run_without_hooks(runner)
AroundTestHooks.after_test(self)
result
end
alias_method :run_without_hooks, :run
alias_method :run, :run_with_hooks
end
end
def self.use_backtrace_filter!(backtrace_filter)
if Unit::VERSION < "4.1.0"
MiniTest.class_eval do
class << self
attr_accessor :backtrace_filter
end
def self.filter_backtrace(backtrace)
backtrace_filter.filter(backtrace)
end
end
end
MiniTest.backtrace_filter = backtrace_filter
end
def self.choose_reporters(console_reporters, env)
if env["TM_PID"]
[RubyMateReporter.new]
elsif env["RM_INFO"] || env["TEAMCITY_VERSION"]
[RubyMineReporter.new]
else
Array(console_reporters)
end
end
def self.use_parallel_length_method!
if Unit::VERSION >= "4.2.0"
require "minitest/parallel_each"
ParallelEach.send(:define_method, :length) do
@queue.length
end
end
end
def self.use_old_activesupport_fix!
if defined?(ActiveSupport) && ActiveSupport::VERSION::MAJOR < 4
require "minitest/old_activesupport_fix"
end
end
end
end
Check if the VERSION constant is defined.
Fixed #69.
require "minitest/unit"
module MiniTest
require "minitest/relative_position"
require "minitest/reporter"
require "minitest/reporter_runner"
require "minitest/around_test_hooks"
require "minitest/test_runner"
require "minitest/test_recorder"
require "minitest/extensible_backtrace_filter"
module Reporters
require "minitest/reporters/version"
autoload :DefaultReporter, "minitest/reporters/default_reporter"
autoload :SpecReporter, "minitest/reporters/spec_reporter"
autoload :ProgressReporter, "minitest/reporters/progress_reporter"
autoload :RubyMateReporter, "minitest/reporters/ruby_mate_reporter"
autoload :RubyMineReporter, "minitest/reporters/rubymine_reporter"
autoload :GuardReporter, "minitest/reporters/guard_reporter"
autoload :JUnitReporter, "minitest/reporters/junit_reporter"
def self.use!(console_reporters = ProgressReporter.new, env = ENV, backtrace_filter = ExtensibleBacktraceFilter.default_filter)
use_runner!(console_reporters, env)
use_around_test_hooks!
use_backtrace_filter!(backtrace_filter)
use_parallel_length_method!
use_old_activesupport_fix!
end
def self.use_runner!(console_reporters, env)
runner = ReporterRunner.new
runner.reporters = choose_reporters(console_reporters, env)
Unit.runner = runner
end
def self.use_around_test_hooks!
Unit::TestCase.class_eval do
def run_with_hooks(runner)
AroundTestHooks.before_test(self)
result = run_without_hooks(runner)
AroundTestHooks.after_test(self)
result
end
alias_method :run_without_hooks, :run
alias_method :run, :run_with_hooks
end
end
def self.use_backtrace_filter!(backtrace_filter)
if Unit::VERSION < "4.1.0"
MiniTest.class_eval do
class << self
attr_accessor :backtrace_filter
end
def self.filter_backtrace(backtrace)
backtrace_filter.filter(backtrace)
end
end
end
MiniTest.backtrace_filter = backtrace_filter
end
def self.choose_reporters(console_reporters, env)
if env["TM_PID"]
[RubyMateReporter.new]
elsif env["RM_INFO"] || env["TEAMCITY_VERSION"]
[RubyMineReporter.new]
else
Array(console_reporters)
end
end
def self.use_parallel_length_method!
if Unit::VERSION >= "4.2.0"
require "minitest/parallel_each"
ParallelEach.send(:define_method, :length) do
@queue.length
end
end
end
def self.use_old_activesupport_fix!
if defined?(ActiveSupport::VERSION) && ActiveSupport::VERSION::MAJOR < 4
require "minitest/old_activesupport_fix"
end
end
end
end
|
module BBLib
# Allows any public setter method to be called during initialization using keyword arguments.
# Add include BBLib::SimpleInit or prepend BBLib::SimpleInit to classes to add this behavior.
module SimpleInit
attr_reader :_init_type
INIT_TYPES = [:strict, :loose].freeze
def self.included(base)
base.extend ClassMethods
base.class_eval do
define_method(:initialize) do |*args, &block|
send(:simple_setup) if respond_to?(:simple_setup, true)
_initialize(*args)
send(:simple_init, *args, &block) if respond_to?(:simple_init, true)
yield self if block_given?
end
end
end
module ClassMethods
def init_type(type = nil)
return @init_type ||= :strict unless type
raise ArgumentError, "Unknown init type '#{type}'. Must be #{INIT_TYPES.join_terms('or', encapsulate: "'")}." unless INIT_TYPES.include?(type)
@init_type = type
end
end
protected
def _initialize(*args)
named = BBLib.named_args(*args)
if self.class.respond_to?(:_attrs)
missing = self.class._attrs.map do |method, details|
details[:options][:required] && !named.include?(method) ? method : nil
end.compact
raise ArgumentError, "You are missing the following required #{BBLib.pluralize(missing.size, 'argument')}: #{missing.join_terms}" unless missing.empty?
end
named.each do |method, value|
setter = "#{method}="
exists = respond_to?(setter)
raise ArgumentError, "Undefined attribute #{setter} for class #{self.class}." if !exists && self.class.init_type == :strict
next unless exists
send(setter, value)
end
end
end
end
Added some minor fixes and added support for cascading init type.
module BBLib
# Allows any public setter method to be called during initialization using keyword arguments.
# Add include BBLib::SimpleInit or prepend BBLib::SimpleInit to classes to add this behavior.
module SimpleInit
attr_reader :_init_type
INIT_TYPES = [:strict, :loose].freeze
def self.included(base)
base.extend ClassMethods
base.class_eval do
define_method(:initialize) do |*args, &block|
send(:simple_setup) if respond_to?(:simple_setup, true)
send(:simple_preinit, *args, &block) if respond_to?(:simple_preinit, true)
_initialize(*args)
send(:simple_init, *args, &block) if respond_to?(:simple_init, true)
block.call(self) if block
end
end
end
module ClassMethods
def init_type(type = nil)
return @init_type ||= _super_init_type unless type
raise ArgumentError, "Unknown init type '#{type}'. Must be #{INIT_TYPES.join_terms('or', encapsulate: "'")}." unless INIT_TYPES.include?(type)
@init_type = type
end
def _super_init_type
ancestors.each do |ancestor|
next if ancestor == self
return ancestor.init_type if ancestor.respond_to?(:init_type)
end
:strict
end
end
protected
def _initialize(*args)
named = BBLib.named_args(*args)
if self.class.respond_to?(:_attrs)
missing = self.class._attrs.map do |method, details|
next if send(method)
details[:options][:required] && !named.include?(method) ? method : nil
end.compact
raise ArgumentError, "You are missing the following required #{BBLib.pluralize(missing.size, 'argument')}: #{missing.join_terms}" unless missing.empty?
end
named.each do |method, value|
setter = "#{method}="
exists = respond_to?(setter)
raise ArgumentError, "Undefined attribute #{setter} for class #{self.class}." if !exists && self.class.init_type == :strict
next unless exists
send(setter, value)
end
end
end
end
|
module MMS
class Resource::Alert < Resource
attr_accessor :name
attr_accessor :group
attr_accessor :type_name
attr_accessor :event_type_name
attr_accessor :status
attr_accessor :acknowledged_until
attr_accessor :created
attr_accessor :updated
attr_accessor :resolved
attr_accessor :last_notified
attr_accessor :current_value
def initialize(id, group_id, data = nil)
@group = MMS::Resource::Group.new(group_id)
super id, data
end
def table_row
[@group.name, JSON.dump(@current_value), @status, @type_name, @event_type_name, @created, @updated, @resolved, @last_notified]
end
def table_section
[table_row]
end
def self.table_header
['Group', 'Value', 'Status', 'Type', 'Event name', 'Created', 'Updated', 'Resolved', 'Last notified']
end
private
def _load(id)
MMS::Client.instance.get '/groups/' + @group.id + '/alerts/' + id.to_s
end
def _from_hash(data)
@type_name = data['typeName']
@event_type_name = data['eventTypeName']
@status = data['status']
@acknowledged_until = data['acknowledgedUntil']
@created = data['created']
@updated = data['updated']
@resolved = data['resolved']
@last_notified = data['lastNotified']
@current_value = data['currentValue']
@name = @type_name
end
end
end
Reordered alerts printing
module MMS
class Resource::Alert < Resource
attr_accessor :name
attr_accessor :group
attr_accessor :type_name
attr_accessor :event_type_name
attr_accessor :status
attr_accessor :acknowledged_until
attr_accessor :created
attr_accessor :updated
attr_accessor :resolved
attr_accessor :last_notified
attr_accessor :current_value
def initialize(id, group_id, data = nil)
@group = MMS::Resource::Group.new(group_id)
super id, data
end
def table_row
[@status, @group.name, @type_name, @event_type_name, @created, @updated, @resolved, @last_notified, JSON.dump(@current_value)]
end
def table_section
[table_row]
end
def self.table_header
['Status', 'Group', 'Type', 'Event name', 'Created', 'Updated', 'Resolved', 'Last notified', 'Value']
end
private
def _load(id)
MMS::Client.instance.get '/groups/' + @group.id + '/alerts/' + id.to_s
end
def _from_hash(data)
@type_name = data['typeName']
@event_type_name = data['eventTypeName']
@status = data['status']
@acknowledged_until = data['acknowledgedUntil']
@created = data['created']
@updated = data['updated']
@resolved = data['resolved']
@last_notified = data['lastNotified']
@current_value = data['currentValue']
@name = @type_name
end
end
end
|
Mokio Simple Logger - lib
class MokioLogger
# new logger object
@@mokiologger = Logger.new("log/#{Rails.env}_mokio.log")
#initialize and set debug level
def initialize
level = defined?(Rails.configuration.mokio_log_level) ? Rails.configuration.mokio_log_level : Logger::INFO
@@mokiologger.level = level
end
# methods for logged info , error ,debug , warn msg
def info(msg)
@@mokiologger.info(msg)
end
def debug(msg)
@@mokiologger.error(msg)
end
def error(msg)
@@mokiologger.debug(msg)
end
def warn(msg)
@@mokiologger.warn(msg)
end
end
|
module Moose
module Page
module Actions
include Helpers::Waiter
# Given an element, set the provided value
# @param [Watir::Element] locator The element that we want to set a value for
# @param [String] value The value to set
def fill_text(locator, value)
wait_for_element(locator)
meth = locator.respond_to?(:set) ? :set : :send_keys
locator.send(meth, value)
end
# Given a set of date fields (year, month, day) set to a X number of days ago
# @param [Hash<String->String>] date_fields Hash with :year, :month & :day
# @param [Integer] days_ago How many days in the past to set this date?
# @return [Date] The date X days ago
def fill_date(date_fields, days_ago)
past_day = Date.today - days_ago
select_year = past_day.strftime("%Y").to_s
select(date_fields[:year], select_year)
select_month = past_day.strftime("%B").to_s
select(date_fields[:month], select_month)
select_day = past_day.strftime("#{past_day.day}").to_s
select(date_fields[:day], select_day)
past_day
end
# Given a Watir::Element ensure that it is present and click on it
# @param [Watir::Element] element The element to click_on
# @return [Boolean] true when successful
def click_on(element)
wait_for_element(element)
wait_until do
element.click
end
true
end
# Given a locator, click on it and then wait till it disappears
# @param [Watir::Element] locator The element to click and then wait on
def click_and_wait(locator)
click_on(locator)
wait_while_present(locator)
end
# Given a locator, select the provided value
# @param [Watir::Element] locator The element to select from
# @param [String] value The value to select
def select(locator, value)
locator.select(value)
# Return the value to show which was selected in cases where it's not
# clear (ie: select_random or select_last)
value
end
# Given a tab, select it
# @param [Watir::Element] tab The tab to select
def select_tab(tab)
wait_until do
tab.select!
tab.selected?
end
end
# Discover the available options in a select element
# @param [Watir::Element] locator The select that we are examining
# @param [Boolean] include_blank Should we include the zeroth option in the select?
# @param [Array<String>] The available selection entries
def options_for_select(locator, include_blank = false)
range = include_blank ? (1..-1) : (0..-1)
wait_until { locator.present? }
if locator.options.length > 0
locator.options.map(&:text)[range]
else
jq_cmd = "return $('#{locator.css}').map(function(i, el) { return $(el).text();});"
browser.execute_script(jq_cmd)[range]
end
end
# Choose randomly in a provided select element
# @param [Watir::Element] locator The select element to choose from
def select_random(locator)
select(locator, options_for_select(locator).sample)
end
# Choose the last choice in the provided select selement
# @param [Watir::Element] locator The select element to choose from
def select_last(locator)
select(locator, options_for_select(locator).last)
end
# Wait until the element is no longer present
# @param [Watir::Element] locator The locator to check in on
def wait_while_present(locator)
wait_until do
!locator.present?
end
end
# Wait until the element is present
# @param [Watir::Element] locator The locator to wait for
def wait_until_present(locator)
wait_until do
locator.present?
end
end
# Wait for the provided element to be present and enabled
# @param [Watir::Element] locator The locator we want to be present and enabled
# @param [Integer] attempts How many times we should check to see if present and check to see if enabled
# @return [Boolean] If the element is present and enabled or not
def wait_for_element(locator)
wait_until do
locator.present? && locator_is_enabled?(locator)
end
end
def murder_dialog(opts)
return nil unless opts == :ok || opts == :cancel
browser.execute_script("window.confirm = function() {return #{opts.to_s}}")
end
def click_modal_element(locator)
wait_until do
locator.wd.location.y == locator.wd.location.y && locator_is_enabled?(locator)
end
click_on(locator)
end
private
def locator_is_enabled?(locator)
locator.respond_to?(:enabled?) ? locator.enabled? : true
end
# Write a message saying that whatever we were looking for wasn't found
# @param [Watir::Element] locator The element that we failed to find
def not_found(locator)
Moose.msg.fail("#{locator} not found")
end
end
end
end
add attach_file
module Moose
module Page
module Actions
include Helpers::Waiter
# Given an element, set the provided value
# @param [Watir::Element] locator The element that we want to set a value for
# @param [String] value The value to set
def fill_text(locator, value)
wait_for_element(locator)
meth = locator.respond_to?(:set) ? :set : :send_keys
locator.send(meth, value)
end
# Given a set of date fields (year, month, day) set to a X number of days ago
# @param [Hash<String->String>] date_fields Hash with :year, :month & :day
# @param [Integer] days_ago How many days in the past to set this date?
# @return [Date] The date X days ago
def fill_date(date_fields, days_ago)
past_day = Date.today - days_ago
select_year = past_day.strftime("%Y").to_s
select(date_fields[:year], select_year)
select_month = past_day.strftime("%B").to_s
select(date_fields[:month], select_month)
select_day = past_day.strftime("#{past_day.day}").to_s
select(date_fields[:day], select_day)
past_day
end
# Given a Watir::Element ensure that it is present and click on it
# @param [Watir::Element] element The element to click_on
# @return [Boolean] true when successful
def click_on(element)
wait_for_element(element)
wait_until do
element.click
end
true
end
# Given a locator, click on it and then wait till it disappears
# @param [Watir::Element] locator The element to click and then wait on
def click_and_wait(locator)
click_on(locator)
wait_while_present(locator)
end
# Given a locator, select the provided value
# @param [Watir::Element] locator The element to select from
# @param [String] value The value to select
def select(locator, value)
locator.select(value)
# Return the value to show which was selected in cases where it's not
# clear (ie: select_random or select_last)
value
end
# Given a tab, select it
# @param [Watir::Element] tab The tab to select
def select_tab(tab)
wait_until do
tab.select!
tab.selected?
end
end
# Discover the available options in a select element
# @param [Watir::Element] locator The select that we are examining
# @param [Boolean] include_blank Should we include the zeroth option in the select?
# @param [Array<String>] The available selection entries
def options_for_select(locator, include_blank = false)
range = include_blank ? (1..-1) : (0..-1)
wait_until { locator.present? }
if locator.options.length > 0
locator.options.map(&:text)[range]
else
jq_cmd = "return $('#{locator.css}').map(function(i, el) { return $(el).text();});"
browser.execute_script(jq_cmd)[range]
end
end
# Choose randomly in a provided select element
# @param [Watir::Element] locator The select element to choose from
def select_random(locator)
select(locator, options_for_select(locator).sample)
end
# Choose the last choice in the provided select selement
# @param [Watir::Element] locator The select element to choose from
def select_last(locator)
select(locator, options_for_select(locator).last)
end
# Wait until the element is no longer present
# @param [Watir::Element] locator The locator to check in on
def wait_while_present(locator)
wait_until do
!locator.present?
end
end
# Wait until the element is present
# @param [Watir::Element] locator The locator to wait for
def wait_until_present(locator)
wait_until do
locator.present?
end
end
# Wait for the provided element to be present and enabled
# @param [Watir::Element] locator The locator we want to be present and enabled
# @param [Integer] attempts How many times we should check to see if present and check to see if enabled
# @return [Boolean] If the element is present and enabled or not
def wait_for_element(locator)
wait_until do
locator.present? && locator_is_enabled?(locator)
end
end
def murder_dialog(opts)
return nil unless opts == :ok || opts == :cancel
browser.execute_script("window.confirm = function() {return #{opts.to_s}}")
end
def click_modal_element(locator)
wait_until do
locator.wd.location.y == locator.wd.location.y && locator_is_enabled?(locator)
end
click_on(locator)
end
def attach_file(locator, path)
wait_for_element(locator)
locator.set(path)
end
def locator_is_enabled?(locator)
locator.respond_to?(:enabled?) ? locator.enabled? : true
end
end
end
end
|
require 'thor'
require 'yaml'
require 'pp'
module Moritasan
module Draw
class CLI < Thor
option :tweet, aliases:'-t', required: true, desc:'Tweet TWEET'
desc 'tweet', 'Tweet original TWEET'
def tweet
m = Mukuchi.new
m.d
m.tweet(options[:tweet])
end
option :phraserow, aliases:'-p', desc:'Tweet phrase'
option :interactive, aliases:'-i', type: :boolean, default: false, desc:'Select phrase by interactive mode'
desc 'tweetphrase', 'Tweet fixed phrase from phrase.yml'
def tweetphrase
m = Mukuchi.new
m.d
phrase_array = YAML.load_file('phrase.yml')
if options[:interactive]
puts 'Please select index you want to tweet'
phrase_array.each.with_index(1) do |v, i|
puts "#{i}: #{v}"
end
while true
selected = STDIN.gets.chomp!.to_i
if selected <= phrase_array.length
break
else
puts 'Out of range'
end
end
tw = phrase_array[selected - 1]
else
row = options[:phraserow].to_i
if row == 0
puts "ERROR phrase -p [PHRASEROW] (NOT array index)"
exit 1
else
if row <= phrase_array.length
tw = phrase_array[row - 1]
else
puts 'ERROR Out of range'
exit 1
end
end
end
m.tweet(tw)
end
option :run, aliases:'-r', default: false, type: :boolean, desc:'Run tweet theme(DEFAULT: dryrun)'
desc 'tweettheme', 'Tweet themed tweet from theme.yml'
def tweettheme
m = Mukuchi.new
m.d
theme = YAML.load_file('theme.yml')
max_length = theme['themes'].length
index = rand(max_length)
th = theme['themes'][index]['theme']
tw = theme['words']['prefix'] + th + theme['words']['suffix']
if options[:run]
count = theme['themes'][index]['count']
count += 1
theme['themes'][index]['count'] = count
last_updated = Time.now.to_i
theme['themes'][index]['last_updated'] = last_updated
open('theme.yml', 'w') do |e|
YAML.dump(theme, e)
end
m.tweet(tw)
else
puts 'DRYRUN, please add -r or --run option if you want to RUN'
puts tw
exit 0
end
end
option :id, aliases:'-i', required: true, desc:'Delete tweet id'
desc 'tweetdel', 'Delete tweet by id'
def tweetdel
m = Mukuchi.new
m.d
m.delete_tweet(options[:id])
end
option :theme, aliases:'-t', required: true, desc:'Add theme'
desc 'themeadd', 'Add theme to theme.yml'
def themeadd
theme = load_yaml('theme.yml')
arg = options[:theme]
# Duplicate is exit
if duplicate_theme(theme, arg)
exit 1
else
add_theme = { 'theme' => arg, 'count' => 0, 'last_updated' => Time.now.to_i }
puts "Add theme: #{add_theme}"
theme['themes'] << add_theme
open('theme.yml', 'w') do |e|
YAML.dump(theme, e)
end
end
end
option :theme, aliases:'-t', required: true, desc:'Delete theme'
desc 'themedel', 'Delete theme to theme.yml'
def themedel
theme = load_yaml('theme.yml')
arg = options[:theme]
# Duplicate is delete
if duplicate_theme(theme, arg)
theme['themes'].each do |t|
if t.has_value?(arg)
theme['themes'].delete(t)
end
end
open('theme.yml', 'w') do |e|
YAML.dump(theme, e)
end
end
end
option :word, aliases:'-w', required: true, desc:'search word'
desc 'themert', 'Search tweet and retweet'
def themert
m = Mukuchi.new
m.d
m.search_and_retweet(options[:word])
end
no_tasks do
def load_yaml(file)
YAML.load_file('theme.yml')
end
# Check duplicate
def duplicate_theme(theme, arg)
theme['themes'].each do |t|
if t.has_value?(arg)
puts "Duplicate theme: #{arg}"
return true
end
end
return false
end
end
end
end
end
yaml読み込み部分を共通化
require 'thor'
require 'yaml'
require 'pp'
module Moritasan
module Draw
class CLI < Thor
THEME_YML = 'theme.yml'
PHRASE_YML = 'phrase.yml'
option :tweet, aliases:'-t', required: true, desc:'Tweet TWEET'
desc 'tweet', 'Tweet original TWEET'
def tweet
m = Mukuchi.new
m.d
m.tweet(options[:tweet])
end
option :phraserow, aliases:'-p', desc:'Tweet phrase'
option :interactive, aliases:'-i', type: :boolean, default: false, desc:'Select phrase by interactive mode'
desc 'tweetphrase', 'Tweet fixed phrase from phrase.yml'
def tweetphrase
m = Mukuchi.new
m.d
phrase_array = load_yaml(PHRASE_YML)
if options[:interactive]
puts 'Please select index you want to tweet'
phrase_array.each.with_index(1) do |v, i|
puts "#{i}: #{v}"
end
while true
selected = STDIN.gets.chomp!.to_i
if selected <= phrase_array.length
break
else
puts 'Out of range'
end
end
tw = phrase_array[selected - 1]
else
row = options[:phraserow].to_i
if row == 0
puts "ERROR phrase -p [PHRASEROW] (NOT array index)"
exit 1
else
if row <= phrase_array.length
tw = phrase_array[row - 1]
else
puts 'ERROR Out of range'
exit 1
end
end
end
m.tweet(tw)
end
option :run, aliases:'-r', default: false, type: :boolean, desc:'Run tweet theme(DEFAULT: dryrun)'
desc 'tweettheme', 'Tweet themed tweet from theme.yml'
def tweettheme
m = Mukuchi.new
m.d
theme = load_yaml(THEME_YML)
# Random number
max_length = theme['themes'].length
index = rand(max_length)
# Concat theme pre + suf
th = theme['themes'][index]['theme']
tw = theme['words']['prefix'] + th + theme['words']['suffix']
if options[:run]
# Update count and last_update
count = theme['themes'][index]['count']
count += 1
theme['themes'][index]['count'] = count
last_updated = Time.now.to_i
theme['themes'][index]['last_updated'] = last_updated
write_yaml(THEME_YML, theme)
m.tweet(tw)
else
puts 'DRYRUN, please add -r or --run option if you want to RUN'
puts tw
exit 0
end
end
option :id, aliases:'-i', required: true, desc:'Delete tweet id'
desc 'tweetdel', 'Delete tweet by id'
def tweetdel
m = Mukuchi.new
m.d
m.delete_tweet(options[:id])
end
option :theme, aliases:'-t', required: true, desc:'Add theme'
desc 'themeadd', 'Add theme to theme.yml'
def themeadd
theme = load_yaml(THEME_YML)
arg = options[:theme]
# Duplicate is exit
if duplicate_theme(theme, arg)
exit 1
else
add_theme = { 'theme' => arg, 'count' => 0, 'last_updated' => Time.now.to_i }
puts "Add theme: #{add_theme}"
theme['themes'] << add_theme
write_yaml(THEME_YML, theme)
end
end
option :theme, aliases:'-t', required: true, desc:'Delete theme'
desc 'themedel', 'Delete theme to theme.yml'
def themedel
theme = load_yaml(THEME_YML)
arg = options[:theme]
# Duplicate is delete
if duplicate_theme(theme, arg)
theme['themes'].each do |t|
if t.has_value?(arg)
puts "Delete theme: #{t}"
theme['themes'].delete(t)
end
end
write_yaml(THEME_YML, theme)
end
end
option :word, aliases:'-w', required: true, desc:'search word'
desc 'themert', 'Search tweet and retweet'
def themert
m = Mukuchi.new
m.d
m.search_and_retweet(options[:word])
end
no_tasks do
def load_yaml(file)
YAML.load_file(file)
end
def write_yaml(file, theme)
open(file, 'w') do |e|
YAML.dump(theme, e)
end
end
# Check duplicate
def duplicate_theme(theme, arg)
theme['themes'].each do |t|
if t.has_value?(arg)
puts "Duplicate theme: #{arg}"
return true
end
end
return false
end
end
end
end
end
|
module PathSeeds
def PathSeeds.l(name)
Location.find_by!(code_name:name)
end
PATHS = [
# Real paths
{
description: 'Exit SSC and go down stairs',
start_location_id: PathSeeds.l('SSC_e1').id,
end_location_id: PathSeeds.l('FellowsGarden_k1').id,
photo: 'paths/SSC_e1_FellowsGarden_k1.jpg'
},
{
description: 'Exit SSC and go to the road in front of you',
start_location_id: PathSeeds.l('SSC_e2').id,
end_location_id: PathSeeds.l('FellowsGarden_k1').id
},
{
description: 'Walk toward the physics building',
start_location_id: PathSeeds.l('FellowsGarden_k1').id,
end_location_id: PathSeeds.l('FellowsGarden_k2').id,
photo: 'paths/FellowsGarden_k1_FellowsGarden_k2.jpg'
},
{
description: 'Walk along the Fellows Garden',
start_location_id: PathSeeds.l('FellowsGarden_k2').id,
end_location_id: PathSeeds.l('FellowsGarden_k3').id,
photo: 'paths/FellowsGarden_k2_FellowsGarden_k3.jpg'
},
{
description: 'Climb up the hill toward Volen',
start_location_id: PathSeeds.l('FellowsGarden_k3').id,
end_location_id: PathSeeds.l('Volen_k1').id,
photo: 'paths/FellowsGarden_k3_Volen_k1.jpg'
},
{
description: 'Volen is on your right. Walk a little closer to Volen',
start_location_id: PathSeeds.l('Volen_k1').id,
end_location_id: PathSeeds.l('Volen_k2').id,
photo: 'paths/Volen_k1_Volen_k2.jpg'
},
{
description: 'Entrance to Volen is right ahead',
start_location_id: PathSeeds.l('Volen_k2').id,
end_location_id: PathSeeds.l('Volen_e1').id,
photo: 'paths/Volen_k2_Volen_e1.jpg'
}
]
end
add seed path volen to ssc
module PathSeeds
def PathSeeds.l(name)
Location.find_by!(code_name:name)
end
PATHS = [
# Real paths
{
description: 'Exit SSC and go down stairs',
start_location_id: PathSeeds.l('SSC_e1').id,
end_location_id: PathSeeds.l('FellowsGarden_k1').id,
photo: 'paths/SSC_e1_FellowsGarden_k1.jpg'
},
{
description: 'Exit SSC and go to the road in front of you',
start_location_id: PathSeeds.l('SSC_e2').id,
end_location_id: PathSeeds.l('FellowsGarden_k1').id
},
{
description: 'Walk toward the physics building',
start_location_id: PathSeeds.l('FellowsGarden_k1').id,
end_location_id: PathSeeds.l('FellowsGarden_k2').id,
photo: 'paths/FellowsGarden_k1_FellowsGarden_k2.jpg'
},
{
description: 'Walk along the Fellows Garden',
start_location_id: PathSeeds.l('FellowsGarden_k2').id,
end_location_id: PathSeeds.l('FellowsGarden_k3').id,
photo: 'paths/FellowsGarden_k2_FellowsGarden_k3.jpg'
},
{
description: 'Climb up the hill toward Volen',
start_location_id: PathSeeds.l('FellowsGarden_k3').id,
end_location_id: PathSeeds.l('Volen_k1').id,
photo: 'paths/FellowsGarden_k3_Volen_k1.jpg'
},
{
description: 'Volen is on your right. Walk a little closer to Volen',
start_location_id: PathSeeds.l('Volen_k1').id,
end_location_id: PathSeeds.l('Volen_k2').id,
photo: 'paths/Volen_k1_Volen_k2.jpg'
},
{
description: 'Entrance to Volen is right ahead',
start_location_id: PathSeeds.l('Volen_k2').id,
end_location_id: PathSeeds.l('Volen_e1').id,
photo: 'paths/Volen_k2_Volen_e1.jpg'
},
{
description: 'Climb up the stairs and SSC is right ahead',
end_location_id: PathSeeds.l('SSC_e1').id,
start_location_id: PathSeeds.l('FellowsGarden_k1').id
},
{
description: "Don't go up, entrance to SSC is on the right of the stairs.",
end_location_id: PathSeeds.l('SSC_e2').id,
start_location_id: PathSeeds.l('FellowsGarden_k1').id
},
{
description: 'Walk pass the physics building and SSC will be on your left',
end_location_id: PathSeeds.l('FellowsGarden_k1').id,
start_location_id: PathSeeds.l('FellowsGarden_k2').id
},
{
description: 'Walk along the Fellows Garden',
end_location_id: PathSeeds.l('FellowsGarden_k2').id,
start_location_id: PathSeeds.l('FellowsGarden_k3').id
},
{
description: 'Walk down the hill, facing the blue building',
end_location_id: PathSeeds.l('FellowsGarden_k3').id,
start_location_id: PathSeeds.l('Volen_k1').id
},
{
description: 'Walk toward the down path once you have left the building',
end_location_id: PathSeeds.l('Volen_k1').id,
start_location_id: PathSeeds.l('Volen_k2').id
},
{
description: 'Exit Volen',
end_location_id: PathSeeds.l('Volen_k2').id,
start_location_id: PathSeeds.l('Volen_e1').id
}
]
end |
class Mothership
VERSION = "0.0.1"
end
bump to 0.0.2
class Mothership
VERSION = "0.0.2"
end
|
require 'mspec/runner/mspec'
require 'mspec/runner/actions/tally'
require 'rbconfig'
class SpecGuard
def self.report
@report ||= Hash.new { |h,k| h[k] = [] }
end
def self.clear
@report = nil
end
def self.finish
report.keys.sort.each do |key|
desc = report[key]
size = desc.size
spec = size == 1 ? "spec" : "specs"
print "\n\n#{size} #{spec} omitted by guard: #{key}:\n"
desc.each { |description| print "\n", description; }
end
print "\n\n"
end
def self.guards
@guards ||= []
end
def self.clear_guards
@guards = []
end
@@ruby_version_override = nil
def self.ruby_version_override=(version)
@@ruby_version_override = version
end
def self.ruby_version_override
@@ruby_version_override
end
# Returns a partial Ruby version string based on +which+. For example,
# if RUBY_VERSION = 8.2.3 and RUBY_PATCHLEVEL = 71:
#
# :major => "8"
# :minor => "8.2"
# :tiny => "8.2.3"
# :teeny => "8.2.3"
# :full => "8.2.3.71"
def self.ruby_version(which = :minor)
case which
when :major
n = 1
when :minor
n = 2
when :tiny, :teeny
n = 3
else
n = 4
end
patch = RUBY_PATCHLEVEL.to_i
patch = 0 if patch < 0
version = "#{ruby_version_override || RUBY_VERSION}.#{patch}"
version.split('.')[0,n].join('.')
end
attr_accessor :name, :parameters
def initialize(*args)
self.parameters = @args = args
end
def yield?(invert=false)
return true if MSpec.mode? :unguarded
allow = match? ^ invert
if not allow and reporting?
MSpec.guard
MSpec.register :finish, SpecGuard
MSpec.register :add, self
return true
elsif MSpec.mode? :verify
return true
end
allow
end
def ===(other)
true
end
def reporting?
MSpec.mode?(:report) or
(MSpec.mode?(:report_on) and SpecGuard.guards.include?(name))
end
def report_key
"#{name} #{parameters.join(", ")}"
end
def record(description)
SpecGuard.report[report_key] << description
end
def add(example)
record example.description
MSpec.retrieve(:formatter).tally.counter.guards!
end
def unregister
MSpec.unguard
MSpec.unregister :add, self
end
def implementation?(*args)
args.any? do |name|
!!case name
when :rubinius
RUBY_NAME =~ /^rbx/
when :ruby
RUBY_NAME =~ /^ruby/
when :jruby
RUBY_NAME =~ /^jruby/
when :ironruby
RUBY_NAME =~ /^ironruby/
when :macruby
RUBY_NAME =~ /^macruby/
when :maglev
RUBY_NAME =~ /^maglev/
else
false
end
end
end
def standard?
implementation? :ruby
end
def windows?(sym, key)
sym == :windows && !key.match(/(mswin|mingw)/).nil?
end
def platform?(*args)
args.any? do |platform|
if platform != :java && RUBY_PLATFORM.match('java') && os?(platform)
true
else
RUBY_PLATFORM.match(platform.to_s) || windows?(platform, RUBY_PLATFORM)
end
end
end
def wordsize?(size)
size == 8 * 1.size
end
def os?(*oses)
oses.any? do |os|
host_os = RbConfig::CONFIG['host_os'] || RUBY_PLATFORM
host_os.downcase!
host_os.match(os.to_s) || windows?(os, host_os)
end
end
def match?
implementation?(*@args) or platform?(*@args)
end
end
Test for winapi in platform name.
require 'mspec/runner/mspec'
require 'mspec/runner/actions/tally'
require 'rbconfig'
class SpecGuard
def self.report
@report ||= Hash.new { |h,k| h[k] = [] }
end
def self.clear
@report = nil
end
def self.finish
report.keys.sort.each do |key|
desc = report[key]
size = desc.size
spec = size == 1 ? "spec" : "specs"
print "\n\n#{size} #{spec} omitted by guard: #{key}:\n"
desc.each { |description| print "\n", description; }
end
print "\n\n"
end
def self.guards
@guards ||= []
end
def self.clear_guards
@guards = []
end
@@ruby_version_override = nil
def self.ruby_version_override=(version)
@@ruby_version_override = version
end
def self.ruby_version_override
@@ruby_version_override
end
# Returns a partial Ruby version string based on +which+. For example,
# if RUBY_VERSION = 8.2.3 and RUBY_PATCHLEVEL = 71:
#
# :major => "8"
# :minor => "8.2"
# :tiny => "8.2.3"
# :teeny => "8.2.3"
# :full => "8.2.3.71"
def self.ruby_version(which = :minor)
case which
when :major
n = 1
when :minor
n = 2
when :tiny, :teeny
n = 3
else
n = 4
end
patch = RUBY_PATCHLEVEL.to_i
patch = 0 if patch < 0
version = "#{ruby_version_override || RUBY_VERSION}.#{patch}"
version.split('.')[0,n].join('.')
end
attr_accessor :name, :parameters
def initialize(*args)
self.parameters = @args = args
end
def yield?(invert=false)
return true if MSpec.mode? :unguarded
allow = match? ^ invert
if not allow and reporting?
MSpec.guard
MSpec.register :finish, SpecGuard
MSpec.register :add, self
return true
elsif MSpec.mode? :verify
return true
end
allow
end
def ===(other)
true
end
def reporting?
MSpec.mode?(:report) or
(MSpec.mode?(:report_on) and SpecGuard.guards.include?(name))
end
def report_key
"#{name} #{parameters.join(", ")}"
end
def record(description)
SpecGuard.report[report_key] << description
end
def add(example)
record example.description
MSpec.retrieve(:formatter).tally.counter.guards!
end
def unregister
MSpec.unguard
MSpec.unregister :add, self
end
def implementation?(*args)
args.any? do |name|
!!case name
when :rubinius
RUBY_NAME =~ /^rbx/
when :ruby
RUBY_NAME =~ /^ruby/
when :jruby
RUBY_NAME =~ /^jruby/
when :ironruby
RUBY_NAME =~ /^ironruby/
when :macruby
RUBY_NAME =~ /^macruby/
when :maglev
RUBY_NAME =~ /^maglev/
else
false
end
end
end
def standard?
implementation? :ruby
end
def windows?(sym, key)
sym == :windows && !key.match(/(mswin|mingw|winapi)/).nil?
end
def platform?(*args)
args.any? do |platform|
if platform != :java && RUBY_PLATFORM.match('java') && os?(platform)
true
else
RUBY_PLATFORM.match(platform.to_s) || windows?(platform, RUBY_PLATFORM)
end
end
end
def wordsize?(size)
size == 8 * 1.size
end
def os?(*oses)
oses.any? do |os|
host_os = RbConfig::CONFIG['host_os'] || RUBY_PLATFORM
host_os.downcase!
host_os.match(os.to_s) || windows?(os, host_os)
end
end
def match?
implementation?(*@args) or platform?(*@args)
end
end
|
# frozen_string_literal: true
module MuchMixin
VERSION = "0.0.1"
end
version to (much-mixin) 0.2.3
This version matches the latest version of MuchPlugin as this
is equivalent to MuchPlugin, just renamed to MuchMixin.
* rename much-plugin to much-mixin #5
# frozen_string_literal: true
module MuchMixin
VERSION = "0.2.3"
end
|
module MultiMail
VERSION = "0.0.2"
end
bump version
module MultiMail
VERSION = "0.1.0"
end
|
module Munin
unless defined?(::Munin::VERSION)
VERSION = '0.2.3'
end
end
Version bump: 0.2.4
module Munin
unless defined?(::Munin::VERSION)
VERSION = '0.2.4'
end
end
|
require_relative 'myfitnesspal_stats/version'
require_relative 'myfitnesspal_stats/account'
require 'net/http'
require 'open-uri'
require 'yaml'
require 'mechanize'
# Possible gems to use:
# - markaby for generating html
# - RedCloth for generating html
# - hpricot or searching within html documents
# - REXML for parsing XML
module MyfitnesspalStats
=begin
class Day
end
class Food
end
class Weight
end
=end
end
requested new client.rb file, and defined scrape method
require_relative 'myfitnesspal_stats/version'
require_relative 'myfitnesspal_stats/account'
require_relative 'myfitnesspal_stats/client'
require 'mechanize'
# Possible gems to use:
# - markaby for generating html
# - RedCloth for generating html
# - hpricot or searching within html documents
# - REXML for parsing XML
=begin
class Nutrition_data
end
class Weight
end
class Diary
end
=end
module MyfitnesspalStats
def self.scrape
Client.run
end
end |
module Navigatrix
VERSION = "0.1.0"
end
Version bump
module Navigatrix
VERSION = "0.1.1"
end
|
module NoBrainer
VERSION = '0.2'
end
Version Bump
module NoBrainer
VERSION = '0.3'
end
|
module Notably
module Notifiable
def unread_notifications
parse_notifications(Notably.config.redis.zrevrangebyscore(notification_key, Time.now.to_i, last_notification_read_at))
end
def unread_notifications!
parse_notifications(Notably.config.redis.zrevrangebyscore(notification_key, Time.now.to_i, Notably.config.redis.getset(last_notification_read_at_key, Time.now.to_i)))
end
def notifications
parse_notifications(Notably.config.redis.zrevrangebyscore(notification_key, Time.now.to_i, 0))
end
def notifications_since(time)
parse_notifications(Notably.config.redis.zrevrangebyscore(notification_key, Time.now.to_i, time.to_i))
end
def read_notifications
parse_notifications(Notably.config.redis.zrevrangebyscore(notification_key, last_notification_read_at, 0))
end
def read_notifications!
parse_notifications(Notably.config.redis.set(last_notification_read_at_key, Time.now.to_i))
end
def last_notification_read_at
Notably.config.redis.get(last_notification_read_at_key).to_i || 0
end
def push_notification(notification, time)
Notably.config.redis.zadd(notification_key, time, notification)
end
def delete_notification(notification)
Notably.config.redis.zrem(notification_key, notification)
end
private
def notification_key
"notably:notifications:#{self.class}:#{self.id}"
end
def last_notification_read_at_key
"notably:last_read_at:#{self.class}:#{self.id}"
end
def parse_notifications(notifications)
notifications.collect { |n| Marshal.load(n) }
end
end
end
Clean up notifiable
module Notably
module Notifiable
def notifications
parse_notifications(Notably.config.redis.zrevrangebyscore(notification_key, Time.now.to_i, 0))
end
def notifications_since(time)
parse_notifications(Notably.config.redis.zrevrangebyscore(notification_key, Time.now.to_i, time.to_i))
end
def unread_notifications
notifications_since(last_notification_read_at)
end
def unread_notifications!
notifications_since(Notably.config.redis.getset(last_notification_read_at_key, Time.now.to_i))
end
def read_notifications
parse_notifications(Notably.config.redis.zrevrangebyscore(notification_key, last_notification_read_at, 0))
end
def read_notifications!
parse_notifications(Notably.config.redis.set(last_notification_read_at_key, Time.now.to_i))
end
def last_notification_read_at
Notably.config.redis.get(last_notification_read_at_key).to_i
end
def notification_key
"notably:notifications:#{self.class}:#{self.id}"
end
def last_notification_read_at_key
"notably:last_read_at:#{self.class}:#{self.id}"
end
private
def parse_notifications(notifications)
notifications.collect { |n| Marshal.load(n) }
end
end
end |
# Requirements
# =======================================================================
# Stdlib
# -----------------------------------------------------------------------
# Deps
# -----------------------------------------------------------------------
# Project / Package
# -----------------------------------------------------------------------
# Refinements
# =======================================================================
require 'nrser/refinements'
using NRSER
# Definitions
# =======================================================================
module NRSER::Types
# Tuple type - array of fixed length and types (though those could be
# {NRSER::Types::ANY}).
#
class TupleType < NRSER::Types::ArrayType
# Constants
# ======================================================================
# Class Methods
# ======================================================================
# Attributes
# ======================================================================
# Constructor
# ======================================================================
# Instantiate a new `TupleType`.
def initialize *types, **options
super **options
@types = types.map &NRSER::Types.method(:make)
end # #initialize
# Instance Methods
# ======================================================================
# @todo Document test method.
#
# @param [type] arg_name
# @todo Add name param description.
#
# @return [return_type]
# @todo Document return value.
#
def test value
# Test the super class first
return false unless super( value )
# If it's not the right length then it doesn't pass
return false unless value.length == @types.length
# Test each item type
@types.each_with_index.all? { |type, index|
type.test value[index]
}
end # #test
# @return [Boolean]
# `true` if this type can load values from a string, which is true if
# *all* it's types can load values from strings.
#
def has_from_s?
@types.all? &:has_from_s?
end # #has_from_s?
# Load each value in an array of strings split out by
# {NRSER::Types::ArrayType#from_s} by passing each value to `#from_s` in
# the type of the corresponding index.
#
# @param [Array<String>] strings
#
# @return [Array]
#
def items_from_strings strings
@types.each_with_index.map { |type, index|
type.from_s strings[index]
}
end
end # class TupleType
# @todo Document tuple method.
#
# @param [type] arg_name
# @todo Add name param description.
#
# @return [return_type]
# @todo Document return value.
#
def self.tuple *types, **options
TupleType.new *types, **options
end # .tuple
end # module NRSER::Types
add default name to TupleType
# Requirements
# =======================================================================
# Stdlib
# -----------------------------------------------------------------------
# Deps
# -----------------------------------------------------------------------
# Project / Package
# -----------------------------------------------------------------------
# Refinements
# =======================================================================
require 'nrser/refinements'
using NRSER
# Definitions
# =======================================================================
module NRSER::Types
# Tuple type - array of fixed length and types (though those could be
# {NRSER::Types::ANY}).
#
class TupleType < NRSER::Types::ArrayType
# Constants
# ======================================================================
# Class Methods
# ======================================================================
# Attributes
# ======================================================================
# Constructor
# ======================================================================
# Instantiate a new `TupleType`.
def initialize *types, **options
super **options
@types = types.map &NRSER::Types.method(:make)
end # #initialize
def default_name
'[' + @types.map( &:name ).join( ', ' ) + ']'
end
# Instance Methods
# ======================================================================
# @todo Document test method.
#
# @param [type] arg_name
# @todo Add name param description.
#
# @return [return_type]
# @todo Document return value.
#
def test value
# Test the super class first
return false unless super( value )
# If it's not the right length then it doesn't pass
return false unless value.length == @types.length
# Test each item type
@types.each_with_index.all? { |type, index|
type.test value[index]
}
end # #test
# @return [Boolean]
# `true` if this type can load values from a string, which is true if
# *all* it's types can load values from strings.
#
def has_from_s?
@types.all? &:has_from_s?
end # #has_from_s?
# Load each value in an array of strings split out by
# {NRSER::Types::ArrayType#from_s} by passing each value to `#from_s` in
# the type of the corresponding index.
#
# @param [Array<String>] strings
#
# @return [Array]
#
def items_from_strings strings
@types.each_with_index.map { |type, index|
type.from_s strings[index]
}
end
end # class TupleType
# @todo Document tuple method.
#
# @param [type] arg_name
# @todo Add name param description.
#
# @return [return_type]
# @todo Document return value.
#
def self.tuple *types, **options
TupleType.new *types, **options
end # .tuple
end # module NRSER::Types
|
# -*- coding: utf-8 -*-
require 'rspec/core/formatters/base_text_formatter'
class NyanCatFormatter < RSpec::Core::Formatters::BaseTextFormatter
ESC = "\e["
NND = "#{ESC}0m"
PASS = '='
FAIL = '*'
ERROR = '!'
PENDING = '·'
attr_reader :title, :current, :example_results, :color_index
def start(example_count)
super(example_count)
@current, @color_index = 0,0
@bar_length = 70
@example_results = []
end
def example_passed(example)
super(example)
tick PASS
end
def example_pending(example)
super(example)
@pending_count =+1
tick PENDING
end
def example_failed(example)
super(example)
@failure_count =+1
tick FAIL
end
def start_dump
@current = @example_count
end
def dump_summary(duration, example_count, failure_count, pending_count)
dump_profile if profile_examples? && failure_count == 0
summary = "\nNyan Cat flew #{format_seconds(duration)} seconds".split(//).map { |c| rainbowify(c) }
output.puts summary.join
output.puts colorise_summary(summary_line(example_count, failure_count, pending_count))
dump_commands_to_rerun_failed_examples
end
def dump_failures
# noop
end
# Increments the example count and displays the current progress
#
# Returns nothing
def tick(mark = PASS)
@example_results << mark
@current = (@current > @example_count) ? @example_count : @current + 1
@title = " #{current}/#{example_count}"
dump_progress
end
# Creates a rainbow trail
#
# Returns the sprintf format of the Nyan cat
def nyan_trail
width = percentage * @bar_length / 100
marker = @example_results.map{ |mark| highlight(mark) }.join
sprintf("%s#{nyan_cat}%s", marker, " " * (@bar_length - width) )
end
# Calculates the percentage completed any given point
#
# Returns Fixnum of the percentage
def percentage
@example_count.zero? ? 100 : @current * 100 / @example_count
end
# Ascii Nyan Cat. If tests are complete, Nyan Cat goes to sleep. If
# there are failing or pending examples, Nyan Cat is concerned.
#
# Returns String Nyan Cat
def nyan_cat
if @failure_count > 0 || @pending_count > 0
'~|_(o.o)'
elsif (@current == @example_count)
'~|_(-.-)'
else
'~|_(^.^)'
end
end
# Displays the current progress in all Nyan Cat glory
#
def dump_progress
max_width = 80
line = sprintf("%-8s %s", @title[0,(7)] + ":", nyan_trail)
tail = (@current == @example_count) ? "\n" : "\r"
if line.length == max_width - 1
output.print line + tail
output.flush
elsif line.length >= max_width
@bar_length = [@bar_length - (line.length - max_width + 1), 0].max
@bar_length == 0 ? output.print( rainbowify(line + tail) ) : dump_progress
else
@bar_length += max_width - line.length + 1
dump_progress
end
end
# Colorizes the string with raindow colors of the rainbow
#
def rainbowify(string)
c = colors[@color_index % colors.size]
@color_index += 1
"#{ESC}38;5;#{c}m#{string}#{NND}"
end
# Calculates the colors of the rainbow
#
def colors
@colors ||= (0...(6 * 7)).map do |n|
pi_3 = Math::PI / 3
n *= 1.0 / 6
r = (3 * Math.sin(n ) + 3).to_i
g = (3 * Math.sin(n + 2 * pi_3) + 3).to_i
b = (3 * Math.sin(n + 4 * pi_3) + 3).to_i
36 * r + 6 * g + b + 16
end
end
# Determines how to color the example. If pass, it is rainbowified, otherwise
# we assign red if failed or yellow if an error occurred.
#
def highlight(mark = PASS)
case mark
when PASS; rainbowify mark
when FAIL; red mark
when ERROR; yellow mark
else mark
end
end
end
count title width and align title to right
# -*- coding: utf-8 -*-
require 'rspec/core/formatters/base_text_formatter'
class NyanCatFormatter < RSpec::Core::Formatters::BaseTextFormatter
ESC = "\e["
NND = "#{ESC}0m"
PASS = '='
FAIL = '*'
ERROR = '!'
PENDING = '·'
attr_reader :title, :current, :example_results, :color_index
def start(example_count)
super(example_count)
@current, @color_index = 0,0
@bar_length = 70
@example_results = []
end
def example_passed(example)
super(example)
tick PASS
end
def example_pending(example)
super(example)
@pending_count =+1
tick PENDING
end
def example_failed(example)
super(example)
@failure_count =+1
tick FAIL
end
def start_dump
@current = @example_count
end
def dump_summary(duration, example_count, failure_count, pending_count)
dump_profile if profile_examples? && failure_count == 0
summary = "\nNyan Cat flew #{format_seconds(duration)} seconds".split(//).map { |c| rainbowify(c) }
output.puts summary.join
output.puts colorise_summary(summary_line(example_count, failure_count, pending_count))
dump_commands_to_rerun_failed_examples
end
def dump_failures
# noop
end
# Increments the example count and displays the current progress
#
# Returns nothing
def tick(mark = PASS)
@example_results << mark
@current = (@current > @example_count) ? @example_count : @current + 1
@title = " #{current}/#{example_count}"
dump_progress
end
# Creates a rainbow trail
#
# Returns the sprintf format of the Nyan cat
def nyan_trail
width = percentage * @bar_length / 100
marker = @example_results.map{ |mark| highlight(mark) }.join
sprintf("%s#{nyan_cat}%s", marker, " " * (@bar_length - width) )
end
# Calculates the percentage completed any given point
#
# Returns Fixnum of the percentage
def percentage
@example_count.zero? ? 100 : @current * 100 / @example_count
end
# Ascii Nyan Cat. If tests are complete, Nyan Cat goes to sleep. If
# there are failing or pending examples, Nyan Cat is concerned.
#
# Returns String Nyan Cat
def nyan_cat
if @failure_count > 0 || @pending_count > 0
'~|_(o.o)'
elsif (@current == @example_count)
'~|_(-.-)'
else
'~|_(^.^)'
end
end
# Displays the current progress in all Nyan Cat glory
#
def dump_progress
title_width = @example_count.to_s.length * 2 + 4
max_width = 80
line = sprintf("%#{title_width}s %s", @title + ":", nyan_trail)
tail = (@current == @example_count) ? "\n" : "\r"
if line.length == max_width - 1
output.print line + tail
output.flush
elsif line.length >= max_width
@bar_length = [@bar_length - (line.length - max_width + 1), 0].max
@bar_length == 0 ? output.print( rainbowify(line + tail) ) : dump_progress
else
@bar_length += max_width - line.length + 1
dump_progress
end
end
# Colorizes the string with raindow colors of the rainbow
#
def rainbowify(string)
c = colors[@color_index % colors.size]
@color_index += 1
"#{ESC}38;5;#{c}m#{string}#{NND}"
end
# Calculates the colors of the rainbow
#
def colors
@colors ||= (0...(6 * 7)).map do |n|
pi_3 = Math::PI / 3
n *= 1.0 / 6
r = (3 * Math.sin(n ) + 3).to_i
g = (3 * Math.sin(n + 2 * pi_3) + 3).to_i
b = (3 * Math.sin(n + 4 * pi_3) + 3).to_i
36 * r + 6 * g + b + 16
end
end
# Determines how to color the example. If pass, it is rainbowified, otherwise
# we assign red if failed or yellow if an error occurred.
#
def highlight(mark = PASS)
case mark
when PASS; rainbowify mark
when FAIL; red mark
when ERROR; yellow mark
else mark
end
end
end
|
module OnTheMap
VERSION = '0.1.3'
end
bump vesion
module OnTheMap
VERSION = '0.1.4'
end |
module Ox
module Builder
VERSION = "1.0.0"
end
end
Bump to 1.0.1
module Ox
module Builder
VERSION = "1.0.1"
end
end
|
require 'dry/core/deprecations'
require 'rom/initializer'
require 'rom/repository/class_interface'
require 'rom/repository/session'
module ROM
# Abstract repository class to inherit from
#
# A repository provides access to composable relations and commands.
# Its job is to provide application-specific data that is already materialized, so that
# relations don't leak into your application layer.
#
# Typically, you're going to work with Repository::Root that is configured to
# use a single relation as its root, and compose aggregates and use changesets and commands
# against the root relation.
#
# @example
# rom = ROM.container(:sql, 'sqlite::memory') do |conf|
# conf.default.create_table(:users) do
# primary_key :id
# column :name, String
# end
#
# conf.default.create_table(:tasks) do
# primary_key :id
# column :user_id, Integer
# column :title, String
# end
#
# conf.relation(:users) do
# associations do
# has_many :tasks
# end
# end
# end
#
# class UserRepo < ROM::Repository[:users]
# def users_with_tasks
# aggregate(:tasks).to_a
# end
# end
#
# user_repo = UserRepo.new(rom)
# user_repo.users_with_tasks
#
# @see Repository::Root
#
# @api public
class Repository
extend ClassInterface
extend Initializer
extend Dry::Core::ClassAttributes
# @!method self.auto_struct
# Get or set auto_struct setting
#
# When disabled, rom structs won't be created
#
# @overload auto_struct
# Return auto_struct setting value
# @return [TrueClass,FalseClass]
#
# @overload auto_struct(value)
# Set auto_struct value
# @return [Class]
defines :auto_struct
auto_struct true
# @!method self.auto_struct
# Get or set struct namespace
defines :struct_namespace
# @!method self.relation_reader
# Get or set relation reader module
# @return [RelationReader]
defines :relation_reader
struct_namespace ROM::Struct
# @!attribute [r] container
# @return [ROM::Container] The container used to set up a repo
param :container, allow: ROM::Container
# @!attribute [r] struct_namespace
# @return [Module,Class] The namespace for auto-generated structs
option :struct_namespace, default: -> { self.class.struct_namespace }
# @!attribute [r] auto_struct
# @return [Boolean] The container used to set up a repo
option :auto_struct, default: -> { self.class.auto_struct }
# @!attribute [r] relations
# @return [RelationRegistry] The relation proxy registry used by a repo
attr_reader :relations
# Initializes a new repo by establishing configured relation proxies from
# the passed container
#
# @param container [ROM::Container] The rom container with relations and optional commands
#
# @api private
def initialize(container, options = EMPTY_HASH)
super
@relations = {}
end
# Open a database transaction
#
# @example commited transaction
# user = transaction do |t|
# create(changeset(name: 'Jane'))
# end
#
# user
# # => #<ROM::Struct::User id=1 name="Jane">
#
# @example with a rollback
# user = transaction do |t|
# changeset(name: 'Jane').commit
# t.rollback!
# end
#
# user
# # nil
#
# @api public
def transaction(&block)
container.gateways[:default].transaction(&block)
end
# Return a string representation of a repository object
#
# @return [String]
#
# @api public
def inspect
%(#<#{self.class} struct_namespace=#{struct_namespace} auto_struct=#{auto_struct}>)
end
# Start a session for multiple changesets
#
# TODO: this is partly done, needs tweaks in changesets so that we can gather
# command results and return them in a nice way
#
# @!visibility private
#
# @api public
def session(&block)
session = Session.new(self)
yield(session)
transaction { session.commit! }
end
end
end
require 'rom/repository/root'
[repository] add missing requires
require 'dry/core/deprecations'
require 'rom/initializer'
require 'rom/struct'
require 'rom/container'
require 'rom/repository/class_interface'
require 'rom/repository/session'
module ROM
# Abstract repository class to inherit from
#
# A repository provides access to composable relations and commands.
# Its job is to provide application-specific data that is already materialized, so that
# relations don't leak into your application layer.
#
# Typically, you're going to work with Repository::Root that is configured to
# use a single relation as its root, and compose aggregates and use changesets and commands
# against the root relation.
#
# @example
# rom = ROM.container(:sql, 'sqlite::memory') do |conf|
# conf.default.create_table(:users) do
# primary_key :id
# column :name, String
# end
#
# conf.default.create_table(:tasks) do
# primary_key :id
# column :user_id, Integer
# column :title, String
# end
#
# conf.relation(:users) do
# associations do
# has_many :tasks
# end
# end
# end
#
# class UserRepo < ROM::Repository[:users]
# def users_with_tasks
# aggregate(:tasks).to_a
# end
# end
#
# user_repo = UserRepo.new(rom)
# user_repo.users_with_tasks
#
# @see Repository::Root
#
# @api public
class Repository
extend ClassInterface
extend Initializer
extend Dry::Core::ClassAttributes
# @!method self.auto_struct
# Get or set auto_struct setting
#
# When disabled, rom structs won't be created
#
# @overload auto_struct
# Return auto_struct setting value
# @return [TrueClass,FalseClass]
#
# @overload auto_struct(value)
# Set auto_struct value
# @return [Class]
defines :auto_struct
auto_struct true
# @!method self.auto_struct
# Get or set struct namespace
defines :struct_namespace
# @!method self.relation_reader
# Get or set relation reader module
# @return [RelationReader]
defines :relation_reader
struct_namespace ROM::Struct
# @!attribute [r] container
# @return [ROM::Container] The container used to set up a repo
param :container, allow: ROM::Container
# @!attribute [r] struct_namespace
# @return [Module,Class] The namespace for auto-generated structs
option :struct_namespace, default: -> { self.class.struct_namespace }
# @!attribute [r] auto_struct
# @return [Boolean] The container used to set up a repo
option :auto_struct, default: -> { self.class.auto_struct }
# @!attribute [r] relations
# @return [RelationRegistry] The relation proxy registry used by a repo
attr_reader :relations
# Initializes a new repo by establishing configured relation proxies from
# the passed container
#
# @param container [ROM::Container] The rom container with relations and optional commands
#
# @api private
def initialize(container, options = EMPTY_HASH)
super
@relations = {}
end
# Open a database transaction
#
# @example commited transaction
# user = transaction do |t|
# create(changeset(name: 'Jane'))
# end
#
# user
# # => #<ROM::Struct::User id=1 name="Jane">
#
# @example with a rollback
# user = transaction do |t|
# changeset(name: 'Jane').commit
# t.rollback!
# end
#
# user
# # nil
#
# @api public
def transaction(&block)
container.gateways[:default].transaction(&block)
end
# Return a string representation of a repository object
#
# @return [String]
#
# @api public
def inspect
%(#<#{self.class} struct_namespace=#{struct_namespace} auto_struct=#{auto_struct}>)
end
# Start a session for multiple changesets
#
# TODO: this is partly done, needs tweaks in changesets so that we can gather
# command results and return them in a nice way
#
# @!visibility private
#
# @api public
def session(&block)
session = Session.new(self)
yield(session)
transaction { session.commit! }
end
end
end
require 'rom/repository/root'
|
module Paperclip
# Defines the geometry of an image.
class Geometry
attr_accessor :height, :width, :modifier
# Gives a Geometry representing the given height and width
def initialize width = nil, height = nil, modifier = nil
height = nil if height == ""
width = nil if width == ""
@height = (height || width).to_f
@width = (width || height).to_f
@modifier = modifier
end
# Uses ImageMagick to determing the dimensions of a file, passed in as either a
# File or path.
def self.from_file file
file = file.path if file.respond_to? "path"
parse(`#{Paperclip.path_for_command('identify')} "#{file}"`) ||
raise(NotIdentifiedByImageMagickError.new("#{file} is not recognized by the 'identify' command."))
end
# Parses a "WxH" formatted string, where W is the width and H is the height.
def self.parse string
if match = (string && string.match(/\b(\d*)x(\d*)\b([\>\<\#\@\%^!])?/))
Geometry.new(*match[1,3])
end
end
# True if the dimensions represent a square
def square?
height == width
end
# True if the dimensions represent a horizontal rectangle
def horizontal?
height < width
end
# True if the dimensions represent a vertical rectangle
def vertical?
height > width
end
# The aspect ratio of the dimensions.
def aspect
width / height
end
# Returns the larger of the two dimensions
def larger
[height, width].max
end
# Returns the smaller of the two dimensions
def smaller
[height, width].min
end
# Returns the width and height in a format suitable to be passed to Geometry.parse
def to_s
"%dx%d%s" % [width, height, modifier]
end
# Same as to_s
def inspect
to_s
end
# Returns the scaling and cropping geometries (in string-based ImageMagick format)
# neccessary to transform this Geometry into the Geometry given. If crop is true,
# then it is assumed the destination Geometry will be the exact final resolution.
# In this case, the source Geometry is scaled so that an image containing the
# destination Geometry would be completely filled by the source image, and any
# overhanging image would be cropped. Useful for square thumbnail images. The cropping
# is weighted at the center of the Geometry.
def transformation_to dst, crop = false
ratio = Geometry.new( dst.width / self.width, dst.height / self.height )
if crop
scale_geometry, scale = scaling(dst, ratio)
crop_geometry = cropping(dst, ratio, scale)
else
scale_geometry = dst.to_s
end
[ scale_geometry, crop_geometry ]
end
private
def scaling dst, ratio
if ratio.horizontal? || ratio.square?
[ "%dx" % dst.width, ratio.width ]
else
[ "x%d" % dst.height, ratio.height ]
end
end
def cropping dst, ratio, scale
if ratio.horizontal? || ratio.square?
"%dx%d+%d+%d" % [ dst.width, dst.height, 0, (self.height * scale - dst.height) / 2 ]
else
"%dx%d+%d+%d" % [ dst.width, dst.height, (self.width * scale - dst.width) / 2, 0 ]
end
end
end
end
slight refactoring in geometry.rb to - only calculate transformation ratio if image needs to be cropped (Clemens Kofler)
module Paperclip
# Defines the geometry of an image.
class Geometry
attr_accessor :height, :width, :modifier
# Gives a Geometry representing the given height and width
def initialize width = nil, height = nil, modifier = nil
height = nil if height == ""
width = nil if width == ""
@height = (height || width).to_f
@width = (width || height).to_f
@modifier = modifier
end
# Uses ImageMagick to determing the dimensions of a file, passed in as either a
# File or path.
def self.from_file file
file = file.path if file.respond_to? "path"
parse(`#{Paperclip.path_for_command('identify')} "#{file}"`) ||
raise(NotIdentifiedByImageMagickError.new("#{file} is not recognized by the 'identify' command."))
end
# Parses a "WxH" formatted string, where W is the width and H is the height.
def self.parse string
if match = (string && string.match(/\b(\d*)x(\d*)\b([\>\<\#\@\%^!])?/))
Geometry.new(*match[1,3])
end
end
# True if the dimensions represent a square
def square?
height == width
end
# True if the dimensions represent a horizontal rectangle
def horizontal?
height < width
end
# True if the dimensions represent a vertical rectangle
def vertical?
height > width
end
# The aspect ratio of the dimensions.
def aspect
width / height
end
# Returns the larger of the two dimensions
def larger
[height, width].max
end
# Returns the smaller of the two dimensions
def smaller
[height, width].min
end
# Returns the width and height in a format suitable to be passed to Geometry.parse
def to_s
"%dx%d%s" % [width, height, modifier]
end
# Same as to_s
def inspect
to_s
end
# Returns the scaling and cropping geometries (in string-based ImageMagick format)
# neccessary to transform this Geometry into the Geometry given. If crop is true,
# then it is assumed the destination Geometry will be the exact final resolution.
# In this case, the source Geometry is scaled so that an image containing the
# destination Geometry would be completely filled by the source image, and any
# overhanging image would be cropped. Useful for square thumbnail images. The cropping
# is weighted at the center of the Geometry.
def transformation_to dst, crop = false
if crop
ratio = Geometry.new( dst.width / self.width, dst.height / self.height )
scale_geometry, scale = scaling(dst, ratio)
crop_geometry = cropping(dst, ratio, scale)
else
scale_geometry = dst.to_s
end
[ scale_geometry, crop_geometry ]
end
private
def scaling dst, ratio
if ratio.horizontal? || ratio.square?
[ "%dx" % dst.width, ratio.width ]
else
[ "x%d" % dst.height, ratio.height ]
end
end
def cropping dst, ratio, scale
if ratio.horizontal? || ratio.square?
"%dx%d+%d+%d" % [ dst.width, dst.height, 0, (self.height * scale - dst.height) / 2 ]
else
"%dx%d+%d+%d" % [ dst.width, dst.height, (self.width * scale - dst.width) / 2, 0 ]
end
end
end
end
|
module Parametric
VERSION = "0.1.3"
end
Bump version to 0.2.0 including structs and expanded fields
module Parametric
VERSION = "0.2.0"
end
|
module Parse
module Cron
VERSION = "0.1.1"
end
end
Bump version to 0.1.1
module Parse
module Cron
VERSION = "0.1.2"
end
end
|
# Copyright 2014 - 2016 Ryan Moore
# Contact: moorer@udel.edu
#
# This file is part of parse_fasta.
#
# parse_fasta is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# parse_fasta is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with parse_fasta. If not, see <http://www.gnu.org/licenses/>.
module ParseFasta
class Record
# @!attribute header
# @return [String] the full header of the record without the '>'
# or '@'
# @!attribute seq
# @return [String] the sequence of the record
# @!attribute desc
# @return [String or Nil] if the record is from a fastA file, it
# is nil; else, the description line of the fastQ record
# @!attribute qual
# @return [String or Nil] if the record is from a fastA file, it
# is nil; else, the quality string of the fastQ record
attr_accessor :header, :seq, :desc, :qual
# The constructor takes keyword args.
#
# @example Init a new Record object for a fastA record
# Record.new header: "apple", seq: "actg"
# @example Init a new Record object for a fastQ record
# Record.new header: "apple", seq: "actd", desc: "", qual: "IIII"
#
# @param header [String] the header of the record
# @param seq [String] the sequence of the record
# @param desc [String] the description line of a fastQ record
# @param qual [String] the quality string of a fastQ record
#
# @raise [SequenceFormatError] if a fastA sequence has a '>'
# character in it
def initialize args = {}
@header = args.fetch :header
@desc = args.fetch :desc, nil
@qual = args.fetch :qual, nil
@qual.gsub!(/\s+/, "") if @qual
seq = args.fetch(:seq).gsub(/\s+/, "")
if fastq? # is fastQ
@seq = seq
else # is fastA
@seq = check_fasta_seq(seq)
end
end
# Compare attrs of this rec with another
#
# @param rec [Record] a Record object to compare with
#
# @return [Bool] true or false
def == rec
self.header == rec.header && self.seq == rec.seq &&
self.desc == rec.desc && self.qual == rec.qual
end
# Return a fastA or fastQ record ready to print.
#
# If the Record is fastQ like then it returns a fastQ record
# string. If the record is fastA like, then it returns a fastA
# record string.
#
# @return [String] a printable sequence record
#
# @example When the record is fastA like
# rec = Record.new header: "Apple", seq: "ACTG"
# rec.to_s #=> ">Apple\nACTG"
#
# @example When the record is fastQ like
# rec = Record.new header: "Apple", seq: "ACTG", desc: "Hi", qual: "IIII"
# rec.to_s #=> "@Apple\nACTG\n+Hi\nIIII"
def to_s
if fastq?
to_fastq
else
to_fasta
end
end
# Returns a fastA record ready to print.
#
# If the record is fastQ like, the desc and qual are dropped.
#
# @return [String] a printable fastA sequence record
#
# @example When the record is fastA like
# rec = Record.new header: "Apple", seq: "ACTG"
# rec.to_fasta #=> ">Apple\nACTG"
#
# @example When the record is fastQ like
# rec = Record.new header: "Apple", seq: "ACTG", desc: "Hi", qual: "IIII"
# rec.to_fasta #=> ">Apple\nACTG"
def to_fasta
">#{header}\n#{seq}"
end
# Returns a fastA record ready to print.
#
# If the record is fastA like, the desc and qual can be specified.
#
# @return [String] a printable fastQ sequence record
#
# @example When the record is fastA like, no args
# rec = Record.new header: "Apple", seq: "ACTG"
# rec.to_fastq #=> "@Apple\nACTG\n+\nIIII"
#
# @example When the record is fastA like, desc and qual specified
# rec = Record.new header: "Apple", seq: "ACTG"
# rec.to_fastq decs: "Hi", qual: "A" #=> "@Apple\nACTG\n+Hi\nAAAA"
#
# @example When the record is fastA like, can specify fancy qual strings
# rec = Record.new header: "Apple", seq: "ACTGACTG"
# rec.to_fastq decs: "Hi", qual: "!a2" #=> "@Apple\nACTG\n+Hi\n!a2!a2!a"
#
# @example When the record is fastQ like
# rec = Record.new header: "Apple", seq: "ACTG", desc: "Hi", qual: "IIII"
# rec.to_fastq #=> ">Apple\nACTG"
#
# @raise [ParseFasta::Error::ArgumentError] if qual is ""
def to_fastq opts = {}
if fastq?
"@#{@header}\n#{@seq}\n+#{@desc}\n#{qual}"
else
qual = opts.fetch :qual, "I"
check_qual qual
desc = opts.fetch :desc, ""
qual_str = make_qual_str qual
"@#{@header}\n#{@seq}\n+#{desc}\n#{qual_str}"
end
end
private
def check_fasta_seq seq
if seq.match ">"
raise ParseFasta::Error::SequenceFormatError,
"A sequence contained a '>' character " +
"(the fastA file record separator)"
else
seq
end
end
def fastq?
true if @qual
end
def make_qual_str qual
(qual * (@seq.length / qual.length.to_f).ceil)[0, @seq.length]
end
def check_qual qual
if qual.length.zero?
raise ParseFasta::Error::ArgumentError,
":qual was '#{qual.inspect}', but it can't be empty"
end
end
end
end
Update docs
# Copyright 2014 - 2016 Ryan Moore
# Contact: moorer@udel.edu
#
# This file is part of parse_fasta.
#
# parse_fasta is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# parse_fasta is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with parse_fasta. If not, see <http://www.gnu.org/licenses/>.
module ParseFasta
class Record
# @!attribute header
# @return [String] the full header of the record without the '>'
# or '@'
# @!attribute seq
# @return [String] the sequence of the record
# @!attribute desc
# @return [String or Nil] if the record is from a fastA file, it
# is nil; else, the description line of the fastQ record
# @!attribute qual
# @return [String or Nil] if the record is from a fastA file, it
# is nil; else, the quality string of the fastQ record
attr_accessor :header, :seq, :desc, :qual
# The constructor takes keyword args.
#
# @example Init a new Record object for a fastA record
# Record.new header: "apple", seq: "actg"
# @example Init a new Record object for a fastQ record
# Record.new header: "apple", seq: "actd", desc: "", qual: "IIII"
#
# @param header [String] the header of the record
# @param seq [String] the sequence of the record
# @param desc [String] the description line of a fastQ record
# @param qual [String] the quality string of a fastQ record
#
# @raise [ParseFasta::Error::SequenceFormatError] if a fastA sequence has a '>'
# character in it
def initialize args = {}
@header = args.fetch :header
@desc = args.fetch :desc, nil
@qual = args.fetch :qual, nil
@qual.gsub!(/\s+/, "") if @qual
seq = args.fetch(:seq).gsub(/\s+/, "")
if fastq? # is fastQ
@seq = seq
else # is fastA
@seq = check_fasta_seq(seq)
end
end
# Compare attrs of this rec with another
#
# @param rec [Record] a Record object to compare with
#
# @return [Bool] true or false
def == rec
self.header == rec.header && self.seq == rec.seq &&
self.desc == rec.desc && self.qual == rec.qual
end
# Return a fastA or fastQ record ready to print.
#
# If the Record is fastQ like then it returns a fastQ record
# string. If the record is fastA like, then it returns a fastA
# record string.
#
# @return [String] a printable sequence record
#
# @example When the record is fastA like
# rec = Record.new header: "Apple", seq: "ACTG"
# rec.to_s #=> ">Apple\nACTG"
#
# @example When the record is fastQ like
# rec = Record.new header: "Apple", seq: "ACTG", desc: "Hi", qual: "IIII"
# rec.to_s #=> "@Apple\nACTG\n+Hi\nIIII"
def to_s
if fastq?
to_fastq
else
to_fasta
end
end
# Returns a fastA record ready to print.
#
# If the record is fastQ like, the desc and qual are dropped.
#
# @return [String] a printable fastA sequence record
#
# @example When the record is fastA like
# rec = Record.new header: "Apple", seq: "ACTG"
# rec.to_fasta #=> ">Apple\nACTG"
#
# @example When the record is fastQ like
# rec = Record.new header: "Apple", seq: "ACTG", desc: "Hi", qual: "IIII"
# rec.to_fasta #=> ">Apple\nACTG"
def to_fasta
">#{header}\n#{seq}"
end
# Returns a fastA record ready to print.
#
# If the record is fastA like, the desc and qual can be specified.
#
# @return [String] a printable fastQ sequence record
#
# @example When the record is fastA like, no args
# rec = Record.new header: "Apple", seq: "ACTG"
# rec.to_fastq #=> "@Apple\nACTG\n+\nIIII"
#
# @example When the record is fastA like, desc and qual specified
# rec = Record.new header: "Apple", seq: "ACTG"
# rec.to_fastq decs: "Hi", qual: "A" #=> "@Apple\nACTG\n+Hi\nAAAA"
#
# @example When the record is fastA like, can specify fancy qual strings
# rec = Record.new header: "Apple", seq: "ACTGACTG"
# rec.to_fastq decs: "Hi", qual: "!a2" #=> "@Apple\nACTG\n+Hi\n!a2!a2!a"
#
# @example When the record is fastQ like
# rec = Record.new header: "Apple", seq: "ACTG", desc: "Hi", qual: "IIII"
# rec.to_fastq #=> ">Apple\nACTG"
#
# @raise [ParseFasta::Error::ArgumentError] if qual is ""
def to_fastq opts = {}
if fastq?
"@#{@header}\n#{@seq}\n+#{@desc}\n#{qual}"
else
qual = opts.fetch :qual, "I"
check_qual qual
desc = opts.fetch :desc, ""
qual_str = make_qual_str qual
"@#{@header}\n#{@seq}\n+#{desc}\n#{qual_str}"
end
end
private
def check_fasta_seq seq
if seq.match ">"
raise ParseFasta::Error::SequenceFormatError,
"A sequence contained a '>' character " +
"(the fastA file record separator)"
else
seq
end
end
def fastq?
true if @qual
end
def make_qual_str qual
(qual * (@seq.length / qual.length.to_f).ceil)[0, @seq.length]
end
def check_qual qual
if qual.length.zero?
raise ParseFasta::Error::ArgumentError,
":qual was '#{qual.inspect}', but it can't be empty"
end
end
end
end
|
Regenerate gemspec for version 0.0.1
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{mongoid_commentable}
s.version = "0.0.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Max Golovnia"]
s.date = %q{2011-05-02}
s.description = %q{Mongoid_commentable provides methods to create commentable documents}
s.email = %q{mgolovnia@gmail.com}
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
".rspec",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"app/controllers/comments_controller.rb",
"app/models/comment.rb",
"app/views/comments/_form.html.erb",
"app/views/comments/edit.html.erb",
"app/views/comments/index.html.erb",
"app/views/comments/new.html.erb",
"app/views/comments/show.html.erb",
"lib/generators/mongoid_commentable/views_generator.rb",
"lib/mongoid/commentable.rb",
"lib/mongoid_commentable.rb",
"spec/mongoid/commentable_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = %q{http://github.com/mgolovnia/mongoid_commentable}
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.6.2}
s.summary = %q{Comments for Mongoid documents}
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<mongoid>, ["~> 2.0"])
s.add_runtime_dependency(%q<bson_ext>, ["~> 1.3"])
s.add_development_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_development_dependency(%q<database_cleaner>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.0"])
s.add_development_dependency(%q<rcov>, [">= 0"])
else
s.add_dependency(%q<mongoid>, ["~> 2.0"])
s.add_dependency(%q<bson_ext>, ["~> 1.3"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<database_cleaner>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.0"])
s.add_dependency(%q<rcov>, [">= 0"])
end
else
s.add_dependency(%q<mongoid>, ["~> 2.0"])
s.add_dependency(%q<bson_ext>, ["~> 1.3"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<database_cleaner>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.0"])
s.add_dependency(%q<rcov>, [">= 0"])
end
end
|
module Passphrase
# Version numbers are bumped according to {http://semver.org Semantic
# Versioning}.
VERSION = "1.1.0"
end
Bumped version number to 1.2.0
module Passphrase
# Version numbers are bumped according to {http://semver.org Semantic
# Versioning}.
VERSION = "1.2.0"
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'mongoid_model_maker/version'
Gem::Specification.new do |spec|
spec.name = "mongoid_model_maker"
spec.version = MongoidModelMaker::VERSION
spec.authors = ["David Wilcox"]
spec.email = ["dave@davidgwilcox.com"]
spec.description = %q{A Rails generator to help produce large quantities of Models, with relationships, from a yaml source file.}
spec.summary = spec.description
spec.homepage = "https://github.com/Dawil/MongoidModelMaker"
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
end
fixed require path in gemspec
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'generators/mongoid_model_maker/version'
Gem::Specification.new do |spec|
spec.name = "mongoid_model_maker"
spec.version = MongoidModelMaker::VERSION
spec.authors = ["David Wilcox"]
spec.email = ["dave@davidgwilcox.com"]
spec.description = %q{A Rails generator to help produce large quantities of Models, with relationships, from a yaml source file.}
spec.summary = spec.description
spec.homepage = "https://github.com/Dawil/MongoidModelMaker"
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
end
|
require "pdg/particle.rb"
module PDG
class ParticleTable
def initialize(path)
@particles = Hash.new
source = File.new path, "r"
while (line = source.gets)
line_type = line[0]
case line_type
# Comment lines begin with a *
when "*"
next
when "M"
parse_line line, :mass
when "W"
parse_line line, :width
end
end
source.close
end
# Parses a line from the PDG MC data
# Passes the information to the add_row method
def parse_line(line, type)
names_charges = line[68..88].split
ids = line[1..32].split
name = names_charges[0]
charges = names_charges[1].split ","
type_value = line[34..48].strip
ids.each_index do |i|
add_row({
:id => ids[i],
:name => name,
:charge => charges[i],
type => type_value.to_f
})
end
end
# Adds a row with the corresponding properties to the table
def add_row(particle_properties)
particle = self[particle_properties[:id]]
if particle.nil?
self << Particle.new(particle_properties)
else
particle.mass = particle_properties[:mass] unless particle_properties[:mass].nil?
particle.width = particle_properties[:width] unless particle_properties[:width].nil?
end
end
# Convenience method
def [](id)
@particles[id.to_i]
end
# Appends a new particle to the `particles` hash
def <<(particle)
@particles[particle.id] = particle
end
# Iterate over the values of the hash
def each(&block)
@particles.each_value(&block)
end
def to_s
"<##{self.class}:#{self.object_id.to_s(8)}>"
end
protected :<<, :parse_line, :add_row
private
end
end
Have ParticleTable inherit from Hash to save us work.
require "pdg/particle.rb"
module PDG
class ParticleTable < Hash
def initialize(path)
source = File.new path, "r"
while (line = source.gets)
line_type = line[0]
case line_type
# Comment lines begin with a *
when "*"
next
when "M"
parse_line line, :mass
when "W"
parse_line line, :width
end
end
source.close
end
# Parses a line from the PDG MC data
# Passes the information to the add_row method
def parse_line(line, type)
names_charges = line[68..88].split
ids = line[1..32].split
name = names_charges[0]
charges = names_charges[1].split ","
type_value = line[34..48].strip
ids.each_index do |i|
add_row({
:id => ids[i],
:name => name,
:charge => charges[i],
type => type_value.to_f
})
end
end
# Adds a row with the corresponding properties to the table
def add_row(particle_properties)
particle = self[particle_properties[:id]]
if particle.nil?
self << Particle.new(particle_properties)
else
particle.mass = particle_properties[:mass] unless particle_properties[:mass].nil?
particle.width = particle_properties[:width] unless particle_properties[:width].nil?
end
end
def [](id)
if id.class == Range or id.class == Array
self.values_at(*id).compact
else
self.values_at(id).first
end
end
# Appends a new particle to the `particles` hash
def <<(particle)
self[particle.id] = particle
end
def to_s
"<##{self.class}:#{self.object_id.to_s(8)}>"
end
protected :<<, :parse_line, :add_row
protected :<<, :parse_line, :add_row
end
end
|
require 'photish/log'
require 'photish/config/app_settings'
require 'photish/gallery/collection'
require 'photish/render/site'
require 'tilt'
module Photish
class Generation
include Photish::Log
def initialize(runtime_config)
@config = Photish::Config::AppSettings.new(runtime_config)
.config
end
def execute
log_important_config_values
log_album_and_photo_names
render_site
end
private
attr_reader :config
def log_important_config_values
log "Photo directory: #{photo_dir}"
log "Site directory: #{site_dir}"
log "Output directory: #{output_dir}"
end
def log_album_and_photo_names
collection.albums.each do |album|
log album.name
log album.photos.map(&:name)
end
end
def render_site
Photish::Render::Site.new(collection, site_dir, output_dir)
.all
end
def photo_dir
config.val(:photo_dir)
end
def output_dir
config.val(:output_dir)
end
def site_dir
config.val(:site_dir)
end
def collection
@collection ||= Gallery::Collection.new(photo_dir)
end
end
end
code cleanup
require 'photish/log'
require 'photish/config/app_settings'
require 'photish/gallery/collection'
require 'photish/render/site'
require 'tilt'
module Photish
class Generation
include Photish::Log
def initialize(runtime_config)
@config = Photish::Config::AppSettings.new(runtime_config)
.config
end
def execute
log_important_config_values
log_album_and_photo_names
render_whole_site
end
private
attr_reader :config
def log_important_config_values
log "Photo directory: #{photo_dir}"
log "Site directory: #{site_dir}"
log "Output directory: #{output_dir}"
end
def log_album_and_photo_names
collection.albums.each do |album|
log album.name
log album.photos.map(&:name)
end
end
def render_whole_site
Photish::Render::Site.new(collection,
site_dir,
output_dir)
.all
end
def photo_dir
config.val(:photo_dir)
end
def output_dir
config.val(:output_dir)
end
def site_dir
config.val(:site_dir)
end
def collection
@collection ||= Gallery::Collection.new(photo_dir)
end
end
end
|
module Gnuplot
##
# === Overview
# Plot correspond to simple 2D visualisation
class Plot
##
# ==== Parameters
# * *datasets* are either instances of Dataset class or
# [data, **dataset_options] arrays
# * *options* will be considered as 'settable' options of gnuplot
# ('set xrange [1:10]' for { xrange: 1..10 }, "set title 'plot'" for { title: 'plot' } etc)
def initialize(*datasets, **options)
@datasets = if datasets[0].is_a? Hamster::Vector
datasets[0]
else
Hamster::Vector.new(datasets).map { |ds| ds.is_a?(Dataset) ? ds.clone : Dataset.new(*ds) }
end
@options = Hamster.hash(options)
@already_plotted = false
@cmd = 'plot '
@terminal = Terminal.new
yield(self) if block_given?
end
##
# ==== Overview
# This outputs plot to term (if given) or to last used term (if any)
# or just builds its own Terminal with plot and options
# ==== Parameters
# * *term* - Terminal to plot to
# * *options* - will be considered as 'settable' options of gnuplot
# ('set xrange [1:10]', 'set title 'plot'' etc);
# options passed here have priority above already given to ::new
def plot(term = @terminal, **options)
opts = @options.merge(options)
full_command = @cmd + @datasets.map { |dataset| dataset.to_s(term) }.join(' , ')
plot_command(term, full_command, opts)
@already_plotted = true
self
end
##
# ==== Overview
# Method which outputs plot to specific terminal (possibly some file).
# Explicit use should be avoided. This method is called from #method_missing
# when it handles method names like #to_png(options).
# ==== Parameters
# * *terminal* - string corresponding to terminal type (png, html, jpeg etc)
# * *path* - path to output file, if none given it will output to temp file
# and then read it and return binary data with contents of file
# * *options* - used in 'set term <term type> <options here>'
# ==== Examples
# plot.to_png('./result.png', size: [300, 500])
# contents = plot.to_svg(size: [100, 100])
# plot.to_dumb('./result.txt', size: [30, 15])
def to_specific_term(terminal, path = nil, **options)
if path
result = plot(term: [terminal, options], output: path)
else
path = Dir::Tmpname.make_tmpname(terminal, 0)
plot(term: [terminal, options], output: path)
result = File.binread(path)
File.delete(path)
end
result
end
##
# ==== Overview
# In this gem #method_missing is used both to handle
# options and to handle plotting to specific terminal.
#
# ==== Options handling
# ===== Overview
# You may set options using #option_name(option_value) method.
# A new object will be constructed with selected option set.
# And finally you can get current value of any option using
# #options_name without arguments.
# ===== Examples
# new_plot = plot.title('Awesome plot')
# plot.title # >nil
# new_plot.title # >'Awesome plot'
# plot.title # >'One more awesome plot'
#
# ==== Plotting to specific term
# ===== Overview
# Gnuplot offers possibility to output graphics to many image formats.
# The easiest way to to so is to use #to_<plot_name> methods.
# ===== Parameters
# * *options* - set of options related to terminal (size, font etc).
# ===== Examples
# # options specific for png term
# plot.to_png('./result.png', size: [300, 500], font: ['arial', 12])
# # options specific for svg term
# content = plot.to_svg(size: [100, 100], fname: 'Arial', fsize: 12)
def method_missing(meth_id, *args)
meth = meth_id.id2name
return to_specific_term(meth[3..-1], *args) if meth[0..2] == 'to_'
if args.empty?
value = @options[meth.to_sym]
value = value[0] if value && value.size == 1
value
else
Plot.new(@datasets, @options.merge(meth.to_sym => args))
end
end
##
# ==== Overview
# Create new Plot object where dataset at *position* will
# be replaced with the new one created from it by updating.
# ==== Parameters
# * *position* - position of dataset which you need to update
# (by default first dataset is updated)
# * *data* - data to update dataset with
# * *options* - options to update dataset with
# ==== Example
# TODO add examples (and specs!)
def update_dataset(position = 0, data: nil, **options)
old_ds = @datasets[position]
new_ds = old_ds.update(data, options)
new_ds.equal?(old_ds) ? self : replace_dataset(position, new_ds)
end
##
# ==== Overview
# Create new Plot object where dataset at *position* will
# be replaced with the given one.
# ==== Parameters
# * *position* - position of dataset which you need to update
# (by default first dataset is replaced)
# * *dataset* - dataset to replace the old one
# ==== Example
# TODO add examples (and specs!)
def replace_dataset(position = 0, dataset)
Plot.new(@datasets.set(position, dataset), @options)
end
##
# ==== Overview
# Create new Plot object where given dataset will
# be appended to dataset list.
# ==== Parameters
# * *dataset* - dataset to add
# ==== Example
# TODO add examples (and specs!)
def add_dataset(dataset)
Plot.new(@datasets.add(dataset.is_a?(Dataset) ? dataset : Dataset.new(*dataset)), @options)
end
##
# ==== Overview
# Create new Plot object where given dataset will
# be appended to dataset list.
# ==== Parameters
# * *position* - position of dataset that should be
# removed (by default last dataset is removed)
# ==== Example
# TODO add examples (and specs!)
def remove_dataset(position = -1)
Plot.new(@datasets.delete_at(position), @options)
end
##
# ==== Overview
# Replot self. Usable is cases then Plot contains
# datasets which store data in files. Replot may be
# used in this case to update plot after data update.
# # ==== Example
# TODO add examples (and specs!)
def replot
@already_plotted ? plot_command(@terminal, 'replot', @options) : plot
end
##
# ==== Overview
# Create new Plot object where current Plot's
# options are merged with given. If no options
# given it will just return existing set of options.
# ==== Parameters
# * *options* - options to add
# ==== Example
# sin_graph = Plot.new(['sin(x)', title: 'Sin'], title: 'Sin plot from 0 to 3', xrange: 0..3)
# sin_graph.plot
# sin_graph_update = sin_graph.options(title: 'Sin plot from -10 to 10', xrange: -10..10)
# sin_graph_update.plot
# # you may also consider this as
# # sin_graph.title(...).xrange(...)
def options(**options)
if options.empty?
@options
else
Plot.new(@datasets, @options.merge(options))
end
end
attr_reader :terminal
attr_reader :datasets
##
# ==== Overview
# Get a dataset number *position*
def [](*args)
@datasets[*args]
end
##
# TODO docs here
def plot_command(term, full_command, options)
File.delete(options[:output]) if options[:output] && File.file?(options[:output])
term.set(options)
.puts(full_command)
.unset(options.keys)
if options[:output]
sleep 0.001 until File.file?(options[:output]) && File.size(options[:output]) > 100
end
end
end
end
add spec for Plot#<<
module Gnuplot
##
# === Overview
# Plot correspond to simple 2D visualisation
class Plot
attr_reader :terminal
attr_reader :datasets
##
# ==== Parameters
# * *datasets* are either instances of Dataset class or
# [data, **dataset_options] arrays
# * *options* will be considered as 'settable' options of gnuplot
# ('set xrange [1:10]' for { xrange: 1..10 }, "set title 'plot'" for { title: 'plot' } etc)
def initialize(*datasets, **options)
@datasets = if datasets[0].is_a? Hamster::Vector
datasets[0]
else
Hamster::Vector.new(datasets).map { |ds| convert_to_dataset(ds) }
end
@options = Hamster.hash(options)
@already_plotted = false
@cmd = 'plot '
@terminal = Terminal.new
yield(self) if block_given?
end
##
# ==== Overview
# This outputs plot to term (if given) or to last used term (if any)
# or just builds its own Terminal with plot and options
# ==== Parameters
# * *term* - Terminal to plot to
# * *options* - will be considered as 'settable' options of gnuplot
# ('set xrange [1:10]', 'set title 'plot'' etc);
# options passed here have priority above already given to ::new
def plot(term = @terminal, **options)
opts = @options.merge(options)
full_command = @cmd + @datasets.map { |dataset| dataset.to_s(term) }.join(' , ')
plot_command(term, full_command, opts)
@already_plotted = true
self
end
##
# ==== Overview
# Method which outputs plot to specific terminal (possibly some file).
# Explicit use should be avoided. This method is called from #method_missing
# when it handles method names like #to_png(options).
# ==== Parameters
# * *terminal* - string corresponding to terminal type (png, html, jpeg etc)
# * *path* - path to output file, if none given it will output to temp file
# and then read it and return binary data with contents of file
# * *options* - used in 'set term <term type> <options here>'
# ==== Examples
# plot.to_png('./result.png', size: [300, 500])
# contents = plot.to_svg(size: [100, 100])
# plot.to_dumb('./result.txt', size: [30, 15])
def to_specific_term(terminal, path = nil, **options)
if path
result = plot(term: [terminal, options], output: path)
else
path = Dir::Tmpname.make_tmpname(terminal, 0)
plot(term: [terminal, options], output: path)
result = File.binread(path)
File.delete(path)
end
result
end
##
# ==== Overview
# In this gem #method_missing is used both to handle
# options and to handle plotting to specific terminal.
#
# ==== Options handling
# ===== Overview
# You may set options using #option_name(option_value) method.
# A new object will be constructed with selected option set.
# And finally you can get current value of any option using
# #options_name without arguments.
# ===== Examples
# new_plot = plot.title('Awesome plot')
# plot.title # >nil
# new_plot.title # >'Awesome plot'
# plot.title # >'One more awesome plot'
#
# ==== Plotting to specific term
# ===== Overview
# Gnuplot offers possibility to output graphics to many image formats.
# The easiest way to to so is to use #to_<plot_name> methods.
# ===== Parameters
# * *options* - set of options related to terminal (size, font etc).
# ===== Examples
# # options specific for png term
# plot.to_png('./result.png', size: [300, 500], font: ['arial', 12])
# # options specific for svg term
# content = plot.to_svg(size: [100, 100], fname: 'Arial', fsize: 12)
def method_missing(meth_id, *args)
meth = meth_id.id2name
return to_specific_term(meth[3..-1], *args) if meth[0..2] == 'to_'
if args.empty?
value = @options[meth.to_sym]
value = value[0] if value && value.size == 1
value
else
Plot.new(@datasets, @options.merge(meth.to_sym => args))
end
end
##
# ==== Overview
# Create new Plot object where dataset at *position* will
# be replaced with the new one created from it by updating.
# ==== Parameters
# * *position* - position of dataset which you need to update
# (by default first dataset is updated)
# * *data* - data to update dataset with
# * *options* - options to update dataset with
# ==== Example
# TODO add examples (and specs!)
def update_dataset(position = 0, data: nil, **options)
old_ds = @datasets[position]
new_ds = old_ds.update(data, options)
new_ds.equal?(old_ds) ? self : replace_dataset(position, new_ds)
end
##
# ==== Overview
# Create new Plot object where dataset at *position* will
# be replaced with the given one.
# ==== Parameters
# * *position* - position of dataset which you need to update
# (by default first dataset is replaced)
# * *dataset* - dataset to replace the old one
# ==== Example
# TODO add examples (and specs!)
def replace_dataset(position = 0, dataset)
Plot.new(@datasets.set(position, dataset), @options)
end
##
# ==== Overview
# Create new Plot object where given dataset will
# be appended to dataset list.
# ==== Parameters
# * *dataset* - dataset to add
# ==== Example
# TODO add examples (and specs!)
def add_dataset(dataset)
Plot.new(@datasets.add(convert_to_dataset(dataset)), @options)
end
alias_method :<<, :add_dataset
##
# ==== Overview
# Create new Plot object where given dataset will
# be appended to dataset list.
# ==== Parameters
# * *position* - position of dataset that should be
# removed (by default last dataset is removed)
# ==== Example
# TODO add examples (and specs!)
def remove_dataset(position = -1)
Plot.new(@datasets.delete_at(position), @options)
end
##
# ==== Overview
# Replot self. Usable is cases then Plot contains
# datasets which store data in files. Replot may be
# used in this case to update plot after data update.
# # ==== Example
# TODO add examples (and specs!)
def replot
@already_plotted ? plot_command(@terminal, 'replot', @options) : plot
end
##
# ==== Overview
# Create new Plot object where current Plot's
# options are merged with given. If no options
# given it will just return existing set of options.
# ==== Parameters
# * *options* - options to add
# ==== Example
# sin_graph = Plot.new(['sin(x)', title: 'Sin'], title: 'Sin plot from 0 to 3', xrange: 0..3)
# sin_graph.plot
# sin_graph_update = sin_graph.options(title: 'Sin plot from -10 to 10', xrange: -10..10)
# sin_graph_update.plot
# # you may also consider this as
# # sin_graph.title(...).xrange(...)
def options(**options)
if options.empty?
@options
else
Plot.new(@datasets, @options.merge(options))
end
end
##
# ==== Overview
# Get a dataset number *position*
def [](*args)
@datasets[*args]
end
def convert_to_dataset(source)
source.is_a?(Dataset) ? source : Dataset.new(*source)
end
##
# TODO docs here
def plot_command(term, full_command, options)
File.delete(options[:output]) if options[:output] && File.file?(options[:output])
term.set(options)
.puts(full_command)
.unset(options.keys)
if options[:output]
sleep 0.001 until File.file?(options[:output]) && File.size(options[:output]) > 100
end
end
end
end
|
module PlayingCards
class Deck
class NotEnoughCardsError < StandardError; end
class NotDrawnCardError < StandardError; end
class InvalidDeckStateError < StandardError; end
attr_reader :cards, :discards, :drawn_cards, :options
def initialize(options = {})
@options = options
@cards = []
@discards = []
@drawn_cards = []
if options[:cards] || options[:discards] || options[:drawn_cards]
restore_deck_from_options
else
(Card.card_combinations * number_of_decks).each do |card_combination|
@cards << Card.new(card_combination[0], card_combination[1])
end
end
end
def number_of_decks
@options.fetch(:num_decks, 1)
end
def cards_remaining
cards.size
end
def shuffle!
cards.shuffle!
end
def cut
top_cut, bottom_cut = split
@cards = bottom_cut + top_cut
end
def split
x = rand(cards_remaining)
top_cut = cards.slice(0..x)
bottom_cut = cards.slice(x+1..cards.size-1)
[top_cut, bottom_cut]
end
def draw(num = 1)
raise NotEnoughCardsError if num > cards_remaining
draws = cards.shift(num)
@drawn_cards += draws
draws
end
def discard(card)
card_pos = drawn_cards.index(card)
if card_pos && drawn_card = drawn_cards.delete_at(card_pos)
@discards << card
else
raise NotDrawnCardError
end
end
def reuse_discards(shuffle_cards = true)
if cards_remaining == 0
@cards += discards
@discards = []
self.shuffle! if shuffle_cards
@cards
end
end
def dump_state
cards_state = cards.collect{|c| c.state}
discards_state = discards.collect{|c| c.state}
drawn_cards_state = drawn_cards.collect{|c| c.state}
[cards_state, discards_state, drawn_cards_state]
end
private
def restore_deck_from_options
restore_cards = options.delete(:cards) || []
restore_discards = options.delete(:discards) || []
restore_drawn_cards = options.delete(:drawn_cards) || []
raise InvalidDeckStateError unless restored_deck_valid?(restore_cards, restore_discards, restore_drawn_cards)
unless restore_cards.empty?
restore_cards.each do |card_combination|
@cards << Card.new(card_combination[0], card_combination[1])
end
end
unless restore_discards.empty?
restore_discards.each do |card_combination|
@discards << Card.new(card_combination[0], card_combination[1])
end
end
unless restore_drawn_cards.empty?
restore_drawn_cards.each do |card_combination|
@drawn_cards << Card.new(card_combination[0], card_combination[1])
end
end
end
def restored_deck_valid?(restore_cards, restore_discards, restore_drawn_cards)
restore_deck = restore_cards + restore_discards + restore_drawn_cards
sorted_restore_deck = restore_deck.sort{|a,b| a <=> b}
sorted_restore_deck == (Card.card_combinations * number_of_decks).sort{|a,b| a <=> b}
end
end
end
method cleanup for readability
module PlayingCards
class Deck
class NotEnoughCardsError < StandardError; end
class NotDrawnCardError < StandardError; end
class InvalidDeckStateError < StandardError; end
attr_reader :cards, :discards, :drawn_cards, :options
def initialize(options = {})
@options = options
@cards = []
@discards = []
@drawn_cards = []
if options[:cards] || options[:discards] || options[:drawn_cards]
restore_deck_from_options
else
(Card.card_combinations * number_of_decks).each do |card_combination|
@cards << Card.new(card_combination[0], card_combination[1])
end
end
end
def number_of_decks
@options.fetch(:num_decks, 1)
end
def cards_remaining
cards.size
end
def shuffle!
cards.shuffle!
end
def cut
top_cut, bottom_cut = split
@cards = bottom_cut + top_cut
end
def split
x = rand(cards_remaining)
top_cut = cards.slice(0..x)
bottom_cut = cards.slice(x+1..cards.size-1)
[top_cut, bottom_cut]
end
def draw(num = 1)
raise NotEnoughCardsError if num > cards_remaining
draws = cards.shift(num)
@drawn_cards += draws
draws
end
def discard(card)
card_pos = drawn_cards.index(card)
if card_pos && drawn_card = drawn_cards.delete_at(card_pos)
@discards << card
else
raise NotDrawnCardError
end
end
def reuse_discards(shuffle_cards = true)
if cards_remaining == 0
@cards += discards
@discards = []
self.shuffle! if shuffle_cards
@cards
end
end
def dump_state
cards_state = cards.collect{|c| c.state}
discards_state = discards.collect{|c| c.state}
drawn_cards_state = drawn_cards.collect{|c| c.state}
[cards_state, discards_state, drawn_cards_state]
end
private
def restore_deck_from_options
restore_cards = options.delete(:cards) || []
restore_discards = options.delete(:discards) || []
restore_drawn_cards = options.delete(:drawn_cards) || []
if restored_deck_valid?(restore_cards, restore_discards, restore_drawn_cards)
unless restore_cards.empty?
restore_cards.each do |card_combination|
@cards << Card.new(card_combination[0], card_combination[1])
end
end
unless restore_discards.empty?
restore_discards.each do |card_combination|
@discards << Card.new(card_combination[0], card_combination[1])
end
end
unless restore_drawn_cards.empty?
restore_drawn_cards.each do |card_combination|
@drawn_cards << Card.new(card_combination[0], card_combination[1])
end
end
else
raise InvalidDeckStateError
end
end
def restored_deck_valid?(restore_cards, restore_discards, restore_drawn_cards)
restore_deck = restore_cards + restore_discards + restore_drawn_cards
sorted_restore_deck = restore_deck.sort{|a,b| a <=> b}
sorted_restore_deck == (Card.card_combinations * number_of_decks).sort{|a,b| a <=> b}
end
end
end
|
module Plezi
module_function
# Reviews the Redis connection, sets it up if it's missing and returns the Redis connection.
#
# A Redis connection will be automatically created if the `ENV['PL_REDIS_URL']` is set.
# for example:
# ENV['PL_REDIS_URL'] = ENV['REDISCLOUD_URL']`
# or
# ENV['PL_REDIS_URL'] = "redis://username:password@my.host:6379"
def redis_connection
return @redis if (@redis_sub_thread && @redis_sub_thread.alive?) && @redis
return false unless defined?(Redis) && ENV['PL_REDIS_URL']
@redis_uri ||= URI.parse(ENV['PL_REDIS_URL'])
@redis ||= Redis.new(host: @redis_uri.host, port: @redis_uri.port, password: @redis_uri.password)
raise "Redis connction failed for: #{ENV['PL_REDIS_URL']}" unless @redis
@redis_sub_thread = Thread.new do
begin
Redis.new(host: @redis_uri.host, port: @redis_uri.port, password: @redis_uri.password).subscribe(Plezi::Settings.redis_channel_name) do |on|
on.message do |channel, msg|
begin
data = YAML.load(msg)
next if data[:server] == Plezi::Settings.uuid
if data[:target]
GRHttp::Base::WSHandler.unicast data[:target], data
else
GRHttp::Base::WSHandler.broadcast data
end
rescue => e
Reactor.error e
end
end
end
rescue => e
Reactor.error e
retry
end
end
@redis
rescue => e
Reactor.error e
false
end
end
protect reds from multi-threading
module Plezi
module_function
# Reviews the Redis connection, sets it up if it's missing and returns the Redis connection.
#
# A Redis connection will be automatically created if the `ENV['PL_REDIS_URL']` is set.
# for example:
# ENV['PL_REDIS_URL'] = ENV['REDISCLOUD_URL']`
# or
# ENV['PL_REDIS_URL'] = "redis://username:password@my.host:6379"
def redis_connection
return @redis if (@redis_sub_thread && @redis_sub_thread.alive?) && @redis
return false unless defined?(Redis) && ENV['PL_REDIS_URL']
@redis_locker ||= Mutex.new
@redis_locker.synchronize do
return @redis if (@redis_sub_thread && @redis_sub_thread.alive?) && @redis # repeat the test once syncing is done.
@redis_uri ||= URI.parse(ENV['PL_REDIS_URL'])
@redis ||= Redis.new(host: @redis_uri.host, port: @redis_uri.port, password: @redis_uri.password)
raise "Redis connction failed for: #{ENV['PL_REDIS_URL']}" unless @redis
@redis_sub_thread = Thread.new do
begin
Redis.new(host: @redis_uri.host, port: @redis_uri.port, password: @redis_uri.password).subscribe(Plezi::Settings.redis_channel_name) do |on|
on.message do |channel, msg|
begin
data = YAML.load(msg)
next if data[:server] == Plezi::Settings.uuid
if data[:target]
GRHttp::Base::WSHandler.unicast data[:target], data
else
GRHttp::Base::WSHandler.broadcast data
end
rescue => e
Reactor.error e
end
end
end
rescue => e
Reactor.error e
retry
end
end
end
@redis
rescue => e
Reactor.error e
false
end
end
|
require 'pdf/reader'
class CustomPageTextReceiver < PDF::Reader::PageTextReceiver
attr_reader :characters, :mediabox
end
class CustomPageLayout < PDF::Reader::PageLayout
attr_reader :runs
end
reader = PDF::Reader.new(ARGV[0])
text_receiver = CustomPageTextReceiver.new
reader.pages.each do |page|
unless page.nil?
page.walk(text_receiver)
runs = CustomPageLayout.new(text_receiver.characters, text_receiver.mediabox).runs
runs = runs.select {|run| run.text == "p"}
end
end
Output JSON
require 'pdf/reader'
require 'json'
class CustomPageTextReceiver < PDF::Reader::PageTextReceiver
attr_reader :characters, :mediabox
end
class CustomPageLayout < PDF::Reader::PageLayout
attr_reader :runs
end
reader = PDF::Reader.new(ARGV[0])
text_receiver = CustomPageTextReceiver.new
spots = []
reader.pages.each do |page|
unless page.nil?
page.walk(text_receiver)
runs = CustomPageLayout.new(text_receiver.characters, text_receiver.mediabox).runs
runs = runs.select {|run| run.text == "p"}
runCoordinates = runs.map {|run| {:x => run.x, :y => run.y}}
spots << runCoordinates
end
end
File.open(ARGV[0] + ".json", 'w') do |file|
file.write(JSON.generate(spots))
end
|
Script to create summary of all sprint work
ACCESS_TOKEN = "your github access token"
MILESTONE = "title of end of sprint milestone"
require_relative 'sprint_statistics'
def stats
@stats ||= SprintStatistics.new(ACCESS_TOKEN)
end
def repos_to_track
stats.project_names_from_org("ManageIQ").to_a + ["Ansible/ansible_tower_client_ruby"]
end
prs = []
title = ""
repos_to_track.each do |repo|
milestone = stats.client.milestones(repo, :state => "all").detect { |m| m[:title] == MILESTONE }
if milestone
puts "Milestone found for #{repo}, collecting."
title = milestone.title
stats.pull_requests(repo, :milestone => milestone[:number], :state => "closed").each { |pr| prs << pr }
else
puts "Milestone not found for #{repo}, skipping."
next
end
end
File.open("closed_issues_manageiq_repos.csv", 'w') do |f|
f.puts "Milestone Statistics for: #{title}"
f.puts "NUMBER,TITLE,AUTHOR,ASSIGNEE,LABELS,CLOSED AT,CHANGELOGTEXT"
prs.each do |i|
i.changelog = "#{i.title} [(##{i.number})](#{i.pull_request.html_url})"
f.puts "#{i.number},#{i.title},#{i.user.login},#{i.assignee && i.assignee.login},#{i.labels.collect(&:name).join(" ")},#{i.closed_at},#{i.changelog}"
end
end
|
class Podio::Space < ActivePodio::Base
include ActivePodio::Updatable
property :space_id, :integer
property :name, :string
property :url, :string
property :url_label, :string
property :org_id, :integer
property :contact_count, :integer
property :members, :integer
property :role, :string
property :rights, :array
property :post_on_new_app, :boolean
property :post_on_new_member, :boolean
property :subscribed, :boolean
property :privacy, :string
property :auto_join, :boolean
has_one :created_by, :class => 'ByLine'
alias_method :id, :space_id
def create
response = Space.create(:org_id => org_id, :name => name)
self.url = response['url']
self.space_id = response['space_id']
end
def update
self.class.update(self.space_id, :name => self.name, :post_on_new_app => self.post_on_new_app, :post_on_new_member => self.post_on_new_member, :url_label => self.url_label, :privacy => self.privacy, :auto_join => self.auto_join)
end
class << self
def create(attributes)
response = Podio.connection.post do |req|
req.url '/space/'
req.body = attributes
end
response.body
end
def update(space_id, attributes)
Podio.connection.put("/space/#{space_id}", attributes).status
end
def delete(id)
Podio.connection.delete("/space/#{id}").status
end
def find(id)
member Podio.connection.get("/space/#{id}").body
end
def join(space_id)
Podio.connection.post("/space/#{space_id}/join").body
end
def find_by_url(url, info = false)
info = info ? 1 : 0
member Podio.connection.get("/space/url?url=#{ERB::Util.url_encode(url)}&info=#{info}").body
end
def find_all_for_org(org_id)
list Podio.connection.get("/org/#{org_id}/space/").body
end
def find_open_for_org(org_id)
list Podio.connection.get("/space/org/#{org_id}/available/").body
end
def validate_url_label(org_id, url_label)
Podio.connection.post { |req|
req.url "/space/org/#{org_id}/url/validate"
req.body = {:url_label => url_label}
}.body
end
end
end
Add privacy and auto_join options when creating space
class Podio::Space < ActivePodio::Base
include ActivePodio::Updatable
property :space_id, :integer
property :name, :string
property :url, :string
property :url_label, :string
property :org_id, :integer
property :contact_count, :integer
property :members, :integer
property :role, :string
property :rights, :array
property :post_on_new_app, :boolean
property :post_on_new_member, :boolean
property :subscribed, :boolean
property :privacy, :string
property :auto_join, :boolean
has_one :created_by, :class => 'ByLine'
alias_method :id, :space_id
def create
response = Space.create(:org_id => org_id, :name => name, :privacy => self.privacy, :auto_join => self.auto_join)
self.url = response['url']
self.space_id = response['space_id']
end
def update
self.class.update(self.space_id, :name => self.name, :post_on_new_app => self.post_on_new_app, :post_on_new_member => self.post_on_new_member, :url_label => self.url_label, :privacy => self.privacy, :auto_join => self.auto_join)
end
class << self
def create(attributes)
response = Podio.connection.post do |req|
req.url '/space/'
req.body = attributes
end
response.body
end
def update(space_id, attributes)
Podio.connection.put("/space/#{space_id}", attributes).status
end
def delete(id)
Podio.connection.delete("/space/#{id}").status
end
def find(id)
member Podio.connection.get("/space/#{id}").body
end
def join(space_id)
Podio.connection.post("/space/#{space_id}/join").body
end
def find_by_url(url, info = false)
info = info ? 1 : 0
member Podio.connection.get("/space/url?url=#{ERB::Util.url_encode(url)}&info=#{info}").body
end
def find_all_for_org(org_id)
list Podio.connection.get("/org/#{org_id}/space/").body
end
def find_open_for_org(org_id)
list Podio.connection.get("/space/org/#{org_id}/available/").body
end
def validate_url_label(org_id, url_label)
Podio.connection.post { |req|
req.url "/space/org/#{org_id}/url/validate"
req.body = {:url_label => url_label}
}.body
end
end
end
|
module PoolParty
class Resource < Base
attr_reader :exists
attr_accessor :meta_notifies, :meta_not_if, :meta_only_if, :meta_subscribes
attr_accessor :graph_index
default_options(
:cloud => nil,
:name => to_s.top_level_class,
:ignore_failure => nil,
:provider => nil
)
def initialize(opts={}, extra_opts={}, &block)
@exists = true
super
valid?
end
# Dependency resolver methods
def compile(compiler)
@compiler ||= PoolParty.module_eval("DependencyResolvers::#{compiler.to_s.capitalize}")
@compiler.compile(self)
end
# print_to_chef
# When the dependency resolver comes through and resolves
# this resource, it will come through and check if it resolves
# to chef by checking it it responds to the
# print_to_chef
# method. The contents of the method are considered an ERB
# template and will be rendered as an ERB template.
def print_to_chef
<<-EOE
# <%= has_method_name %>
EOE
end
# META FUNCTIONS
# ALL RESOURCES HAVE THESE METHODS AVAILABLE
def notifies(other_resources_hash, action_to_take=:reload)
@meta_notifies ||= {}
other_resources_hash.each do |k,v|
notifies_array = (@meta_notifies[k] ||= [])
notifies_array << [v, action_to_take] unless notifies_array.include?([v, action_to_take])
# Implicitly add a require
requires(k => v)
end
end
def subscribes(other_resources_hash, action_to_take=:reload, at_time=:delayed)
@meta_subscribes ||= {}
other_resources_hash.each do |k,v|
subscribes_array = (@meta_subscribes[k] ||= [])
subscribes_array << [v, action_to_take, at_time] unless subscribes_array.include?([v, action_to_take, at_time])
end
end
# Requires
def requires(other_resources_obj)
case other_resources_obj
when Hash
other_resources_obj.each do |k,v|
dependencies[k] ||= []
dependencies[k] << v unless dependencies[k].include?(v)
end
when Array
other_resources_obj.each do |obj|
requires(obj)
end
end
end
# Not if
# If a block is given with the not_if, we assume it is
# a proc object so we grab the proc source code
# on both not_if and only_if code
def not_if(code_str=nil, &block)
@meta_not_if = block ? [block.code, :block] : [code_str, :string]
end
# Run only if
def only_if(code_str=nil, &block)
@meta_only_if = block ? [block.code, :block] : [code_str, :string]
end
# Should this resource exist on the remote systems
# which is a lookup of the instance variable
# on the instance of the resource
# The default is that the resource DOES exist
alias :exists? :exists
# The resource exists in the output and should be created
# on the remote systems.
def exists!
@exists = true
end
# The resource should be removed or deleted from the remote
# system
def does_not_exist!
@exists = false
false
end
# CALLBACKS
def before_compile
end
def after_compile
end
# Singleton methods
# has_name
# The has_ and does_not_have methods names
# are considered, unless otherwise denoted to be
# the top level class name
# for instance
# class Tengo < Resource
# end
# the has_ method will be
# has_tengo
def self.has_method_name
to_s.top_level_class
end
# has_method_name alias for the singleton method has_method_name
# so that there is access to the has_method_name on the instance
def has_method_name
self.class.has_method_name
end
# DSL METHODS
# Get access to the cloud that contains this resource
def cloud
get_parent_of_class(PoolParty::Cloud)
end
# Get access to the pool that contains this resource
def pool
get_parent_of_class(PoolParty::Pool)
end
def case_of(var, &block)
end
# Define the resource methods for all the resources sublcassed by Resource
# this creates the methods:
# has_<resource_name>
# does_not_have_<resource_name>
# <resource_name>
# on the Base class
# The has_ method calls exists! on the resource, then places the resource
# in the ordered_resources array
def self.define_resource_methods
defined_resources.each do |res|
next if res.method_defined?
ddputs "Defining resource: #{res} as #{res.has_method_name}"
define_resource(res)
res.method_defined!
unless res.defined_resources.empty?
res.define_resource_methods
end
end
end
# Define the resource on the base class so it's available across all
# PoolParty classes that use Base
def self.define_resource(res)
Base.class_eval <<-EOE
def has_#{res.has_method_name}(a={},b={},&block)
obj = #{res}.new(a,b,&block)
obj.exists!
resources << obj
obj
end
def does_not_have_#{res.has_method_name}(a={},b={},&block)
obj = has_#{res.has_method_name}(a,b,&block)
obj.does_not_exist!
obj
end
def #{res.has_method_name}s
all_resources.select {|q| q if q.class.to_s =~ /#{res.to_s.classify}/ }
end
alias :#{res.has_method_name} :has_#{res.has_method_name}
def get_#{res.has_method_name}(nm)
{:#{res.has_method_name} => nm}
end
EOE
end
# When a new resource is created, the class gets stored as a defined resource
# in the defined_resources resources class variable
def self.inherited(subclass)
defined_resources << subclass
end
# Note that this resource has been defined already
def self.method_defined!
@defined = true
end
# Query if this resource has been defined yet
def self.method_defined?
defined
end
def self.defined
@defined ||= false
end
# Storage of defined resources that are stored when
# the subclass'd resource is subclassed
def self.defined_resources
@defined_resources ||= []
end
# HELPERS FOR RESOURCES
# Print objects
# This helper takes an object and prints them out with as expected
# Case of:
# Number:
# Integer of the format \d\d\d => 0644
# Else => 79
# String
# String of the format \d\d\d\d => 0655
# String of the format \d\d\d => 0644
# Else => "String"
# Proc object
# Calls the proc object
# Array
# All => [ "a", "b" ]
# Symbol
# All => :a
# Hash
# All => :a => "a", :b => ["b"]
# Object
# All => object
def print_variable(obj)
case obj
when Fixnum
case obj
when /^\d{3}$/
"0#{obj.to_i}"
else
"#{obj.to_i}"
end
when String
case obj
when /^\d{4}$/
"#{obj}"
when /^\d{3}$/
"0#{obj}"
else
"\"#{obj}\""
end
when Proc
obj.call # eh
when Array
"[ #{obj.map {|e| print_variable(e) }.reject {|a| a.nil? || a.empty? }.join(", ")} ]"
when nil
nil
when Symbol
":#{obj}"
when Hash
"#{obj.map {|k,v| ":#{k} => #{print_variable(v)}" unless v == obj }.compact.join(",\n")}"
else
"#{obj}"
end
end
private
# Get parent of class
def get_parent_of_class(klass)
if parent.is_a? klass
parent
elsif parent && !parent.is_a?(PoolParty::Pool)
parent.cloud
else
nil
end
end
end
end
Dir["#{File.dirname(__FILE__)}/resources/*.rb"].each {|lib| require lib }
Removed implicit requires on notifies
module PoolParty
class Resource < Base
attr_reader :exists
attr_accessor :meta_notifies, :meta_not_if, :meta_only_if, :meta_subscribes
attr_accessor :graph_index
default_options(
:cloud => nil,
:name => to_s.top_level_class,
:ignore_failure => nil,
:provider => nil
)
def initialize(opts={}, extra_opts={}, &block)
@exists = true
super
valid?
end
# Dependency resolver methods
def compile(compiler)
@compiler ||= PoolParty.module_eval("DependencyResolvers::#{compiler.to_s.capitalize}")
@compiler.compile(self)
end
# print_to_chef
# When the dependency resolver comes through and resolves
# this resource, it will come through and check if it resolves
# to chef by checking it it responds to the
# print_to_chef
# method. The contents of the method are considered an ERB
# template and will be rendered as an ERB template.
def print_to_chef
<<-EOE
# <%= has_method_name %>
EOE
end
# META FUNCTIONS
# ALL RESOURCES HAVE THESE METHODS AVAILABLE
def notifies(other_resources_hash, action_to_take=:reload)
@meta_notifies ||= {}
other_resources_hash.each do |k,v|
notifies_array = (@meta_notifies[k] ||= [])
notifies_array << [v, action_to_take] unless notifies_array.include?([v, action_to_take])
# Implicitly add a require
# requires(k => v)
end
end
def subscribes(other_resources_hash, action_to_take=:reload, at_time=:delayed)
@meta_subscribes ||= {}
other_resources_hash.each do |k,v|
subscribes_array = (@meta_subscribes[k] ||= [])
subscribes_array << [v, action_to_take, at_time] unless subscribes_array.include?([v, action_to_take, at_time])
end
end
# Requires
def requires(other_resources_obj)
case other_resources_obj
when Hash
other_resources_obj.each do |k,v|
dependencies[k] ||= []
dependencies[k] << v unless dependencies[k].include?(v)
end
when Array
other_resources_obj.each do |obj|
requires(obj)
end
end
end
# Not if
# If a block is given with the not_if, we assume it is
# a proc object so we grab the proc source code
# on both not_if and only_if code
def not_if(code_str=nil, &block)
@meta_not_if = block ? [block.code, :block] : [code_str, :string]
end
# Run only if
def only_if(code_str=nil, &block)
@meta_only_if = block ? [block.code, :block] : [code_str, :string]
end
# Should this resource exist on the remote systems
# which is a lookup of the instance variable
# on the instance of the resource
# The default is that the resource DOES exist
alias :exists? :exists
# The resource exists in the output and should be created
# on the remote systems.
def exists!
@exists = true
end
# The resource should be removed or deleted from the remote
# system
def does_not_exist!
@exists = false
false
end
# CALLBACKS
def before_compile
end
def after_compile
end
# Singleton methods
# has_name
# The has_ and does_not_have methods names
# are considered, unless otherwise denoted to be
# the top level class name
# for instance
# class Tengo < Resource
# end
# the has_ method will be
# has_tengo
def self.has_method_name
to_s.top_level_class
end
# has_method_name alias for the singleton method has_method_name
# so that there is access to the has_method_name on the instance
def has_method_name
self.class.has_method_name
end
# DSL METHODS
# Get access to the cloud that contains this resource
def cloud
get_parent_of_class(PoolParty::Cloud)
end
# Get access to the pool that contains this resource
def pool
get_parent_of_class(PoolParty::Pool)
end
def case_of(var, &block)
end
# Define the resource methods for all the resources sublcassed by Resource
# this creates the methods:
# has_<resource_name>
# does_not_have_<resource_name>
# <resource_name>
# on the Base class
# The has_ method calls exists! on the resource, then places the resource
# in the ordered_resources array
def self.define_resource_methods
defined_resources.each do |res|
next if res.method_defined?
ddputs "Defining resource: #{res} as #{res.has_method_name}"
define_resource(res)
res.method_defined!
unless res.defined_resources.empty?
res.define_resource_methods
end
end
end
# Define the resource on the base class so it's available across all
# PoolParty classes that use Base
def self.define_resource(res)
Base.class_eval <<-EOE
def has_#{res.has_method_name}(a={},b={},&block)
obj = #{res}.new(a,b,&block)
obj.exists!
resources << obj
obj
end
def does_not_have_#{res.has_method_name}(a={},b={},&block)
obj = has_#{res.has_method_name}(a,b,&block)
obj.does_not_exist!
obj
end
def #{res.has_method_name}s
all_resources.select {|q| q if q.class.to_s =~ /#{res.to_s.classify}/ }
end
alias :#{res.has_method_name} :has_#{res.has_method_name}
def get_#{res.has_method_name}(nm)
{:#{res.has_method_name} => nm}
end
EOE
end
# When a new resource is created, the class gets stored as a defined resource
# in the defined_resources resources class variable
def self.inherited(subclass)
defined_resources << subclass
end
# Note that this resource has been defined already
def self.method_defined!
@defined = true
end
# Query if this resource has been defined yet
def self.method_defined?
defined
end
def self.defined
@defined ||= false
end
# Storage of defined resources that are stored when
# the subclass'd resource is subclassed
def self.defined_resources
@defined_resources ||= []
end
# HELPERS FOR RESOURCES
# Print objects
# This helper takes an object and prints them out with as expected
# Case of:
# Number:
# Integer of the format \d\d\d => 0644
# Else => 79
# String
# String of the format \d\d\d\d => 0655
# String of the format \d\d\d => 0644
# Else => "String"
# Proc object
# Calls the proc object
# Array
# All => [ "a", "b" ]
# Symbol
# All => :a
# Hash
# All => :a => "a", :b => ["b"]
# Object
# All => object
def print_variable(obj)
case obj
when Fixnum
case obj
when /^\d{3}$/
"0#{obj.to_i}"
else
"#{obj.to_i}"
end
when String
case obj
when /^\d{4}$/
"#{obj}"
when /^\d{3}$/
"0#{obj}"
else
"\"#{obj}\""
end
when Proc
obj.call # eh
when Array
"[ #{obj.map {|e| print_variable(e) }.reject {|a| a.nil? || a.empty? }.join(", ")} ]"
when nil
nil
when Symbol
":#{obj}"
when Hash
"#{obj.map {|k,v| ":#{k} => #{print_variable(v)}" unless v == obj }.compact.join(",\n")}"
else
"#{obj}"
end
end
private
# Get parent of class
def get_parent_of_class(klass)
if parent.is_a? klass
parent
elsif parent && !parent.is_a?(PoolParty::Pool)
parent.cloud
else
nil
end
end
end
end
Dir["#{File.dirname(__FILE__)}/resources/*.rb"].each {|lib| require lib } |
#!/usr/bin/env ruby -i
# encoding: utf-8
# helpers
def pass; end
# main
buffer = ARGF.inject(String.new) do |buffer, line|
# line filters
line.gsub!(/\s*\n$/, "\n")
line.gsub!("'", '"')
line.gsub!('u"', '"')
buffer += line
end
# buffer filters
buffer.gsub!(/\n{2,}/m, "\n\n")
pass while buffer.gsub!(/(\n( *) end)\n{2,}(\2end)/m, "\\1\n\\3")
# Make sure there's only one \n at the end
pass while buffer.chomp!
buffer += "\n"
puts buffer
Made the u"..." removal a bit safer
#!/usr/bin/env ruby -i
# encoding: utf-8
# helpers
def pass; end
# main
buffer = ARGF.inject(String.new) do |buffer, line|
# line filters
line.gsub!(/\s*\n$/, "\n")
line.gsub!("'", '"')
line.gsub!('u"', '"') if line =~ /^\s*# \[/
buffer += line
end
# buffer filters
buffer.gsub!(/\n{2,}/m, "\n\n")
pass while buffer.gsub!(/(\n( *) end)\n{2,}(\2end)/m, "\\1\n\\3")
# Make sure there's only one \n at the end
pass while buffer.chomp!
buffer += "\n"
puts buffer
|
class Queries::VisibleMotions < Delegator
def initialize(user: nil, groups: nil, group_ids: nil)
@user = user
group_ids = []
if groups.present?
group_ids = groups.map(&:id)
end
@relation = Motion.joins(:discussion => :group).where('archived_at IS NULL')
if @user.present?
@relation = @relation.select('motions.*,
1 as joined_to_motion_reader,
mr.id as motion_reader_id,
mr.user_id as motion_reader_user_id,
mr.read_votes_count as read_votes_count,
mr.read_activity_count as read_activity_count,
mr.last_read_at as last_read_at,
mr.following as viewer_following').
joins("LEFT OUTER JOIN motion_readers mr ON
mr.motion_id = motions.id AND mr.user_id = #{@user.id}")
end
if @user.present? && !group_ids.empty?
@relation = @relation.where("discussions.group_id IN (:group_ids) AND
(discussions.group_id IN (:user_group_ids) OR groups.privacy = 'public'
OR (groups.viewable_by_parent_members = TRUE AND groups.parent_id IN (:user_group_ids)))",
group_ids: group_ids,
user_group_ids: @user.group_ids)
elsif @user.present? && group_ids.empty?
@relation = @relation.where('discussions.group_id IN (:user_group_ids)', user_group_ids: @user.group_ids)
elsif @user.blank? && !group_ids.empty?
@relation = @relation.where("discussions.group_id IN (:group_ids) AND groups.privacy = 'public'",
group_ids: group_ids)
else
@relation = []
end
super(@relation)
end
def __getobj__
@relation
end
def __setobj__(obj)
@relation = obj
end
def unread
@relation = @relation.where('(mr.last_read_at < motions.last_vote_at) OR mr.last_read_at IS NULL')
self
end
def followed
@relation = @relation.where('mr.following = ? OR mr.following IS NULL', true)
self
end
end
hotfix bad rich
class Queries::VisibleMotions < Delegator
def initialize(user: nil, groups: nil, group_ids: nil)
@user = user
group_ids = []
if groups.present?
group_ids = groups.map(&:id)
end
@relation = Motion.joins(:discussion => :group).where('groups.archived_at IS NULL')
if @user.present?
@relation = @relation.select('motions.*,
1 as joined_to_motion_reader,
mr.id as motion_reader_id,
mr.user_id as motion_reader_user_id,
mr.read_votes_count as read_votes_count,
mr.read_activity_count as read_activity_count,
mr.last_read_at as last_read_at,
mr.following as viewer_following').
joins("LEFT OUTER JOIN motion_readers mr ON
mr.motion_id = motions.id AND mr.user_id = #{@user.id}")
end
if @user.present? && !group_ids.empty?
@relation = @relation.where("discussions.group_id IN (:group_ids) AND
(discussions.group_id IN (:user_group_ids) OR groups.privacy = 'public'
OR (groups.viewable_by_parent_members = TRUE AND groups.parent_id IN (:user_group_ids)))",
group_ids: group_ids,
user_group_ids: @user.group_ids)
elsif @user.present? && group_ids.empty?
@relation = @relation.where('discussions.group_id IN (:user_group_ids)', user_group_ids: @user.group_ids)
elsif @user.blank? && !group_ids.empty?
@relation = @relation.where("discussions.group_id IN (:group_ids) AND groups.privacy = 'public'",
group_ids: group_ids)
else
@relation = []
end
super(@relation)
end
def __getobj__
@relation
end
def __setobj__(obj)
@relation = obj
end
def unread
@relation = @relation.where('(mr.last_read_at < motions.last_vote_at) OR mr.last_read_at IS NULL')
self
end
def followed
@relation = @relation.where('mr.following = ? OR mr.following IS NULL', true)
self
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'preparermd/version'
Gem::Specification.new do |spec|
spec.name = "preparermd"
spec.version = PreparerMD::VERSION
spec.authors = ["Ash Wilson"]
spec.email = ["smashwilson@gmail.com"]
spec.summary = %q{Build and submit Jekyll content repositories to a Deconst site.}
spec.homepage = "https://github.com/deconst/preparer-jekyll"
# Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or
# delete this section to allow pushing this gem to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.9"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_runtime_dependency "jekyll", "2.5.3"
spec.add_runtime_dependency "faraday", "0.9.1"
spec.add_runtime_dependency "jekyll-assets", "0.14.0"
end
Depend on a JavaScript runtime.
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'preparermd/version'
Gem::Specification.new do |spec|
spec.name = "preparermd"
spec.version = PreparerMD::VERSION
spec.authors = ["Ash Wilson"]
spec.email = ["smashwilson@gmail.com"]
spec.summary = %q{Build and submit Jekyll content repositories to a Deconst site.}
spec.homepage = "https://github.com/deconst/preparer-jekyll"
# Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or
# delete this section to allow pushing this gem to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.9"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_runtime_dependency "jekyll", "2.5.3"
spec.add_runtime_dependency "faraday", "0.9.1"
spec.add_runtime_dependency "jekyll-assets", "0.14.0"
spec.add_runtime_dependency "therubyracer", "0.12.2"
end
|
require 'prompt/command_group'
require 'prompt/command'
module Prompt
class Application
attr :command_groups
attr_accessor :prompt
def initialize
@command_groups = []
@prompt = "> "
end
def select_group desc
@current_command_group_name = desc
end
def add_command command
current_command_group.commands << command
end
def exec words
commands.each do |command|
args = command.match(words)
return command.run(args) if args
end
raise CommandNotFound.new
ensure
clear_cached_values
end
def completions line_starting_with, word_starting_with
args = Console.split(line_starting_with)
arg_idx = word_index(line_starting_with)
all_expansions(args[0,arg_idx], word_starting_with)
end
private
def word_index line
ss = StringScanner.new(line)
ss.scan(/\s+/)
idx = 0
while ss.scan(/[^\s]+\s+/)
idx += 1
end
idx
end
def clear_cached_values
commands.each do |c|
c.clear_cached_values
end
end
def commands
@command_groups.map(&:commands).flatten(1)
end
def all_expansions(args, partial_arg)
commands.select { |c| c.start_with? args }.map do |c|
c.expansions(args.length, partial_arg)
end.flatten(1)
end
def current_command_group
command_groups.find { |cg| cg.name == @current_command_group_name } || begin
cg = CommandGroup.new(@current_command_group_name)
@command_groups << cg
cg
end
end
end
end
Minor refactoring
require 'prompt/command_group'
require 'prompt/command'
module Prompt
class Application
attr :command_groups
attr_accessor :prompt
def initialize
@command_groups = []
@prompt = "> "
end
def select_group desc
@current_command_group_name = desc
end
def add_command command
current_command_group.commands << command
end
def exec words
commands.each do |command|
args = command.match(words)
return command.run(args) if args
end
raise CommandNotFound.new
ensure
clear_cached_values
end
def completions line_starting_with, word_starting_with
args = Console.split(line_starting_with)
last_idx = index_of_last_word(line_starting_with)
all_expansions(args[0,last_idx], word_starting_with)
end
private
def index_of_last_word line
ss = StringScanner.new(line)
ss.scan(/\s+/)
idx = 0
idx += 1 while ss.scan(/[^\s]+\s+/)
idx
end
def clear_cached_values
commands.each do |c|
c.clear_cached_values
end
end
def commands
@command_groups.map(&:commands).flatten(1)
end
def all_expansions(args, partial_arg)
commands.select { |c| c.start_with? args }.map do |c|
c.expansions(args.length, partial_arg)
end.flatten(1)
end
def current_command_group
command_groups.find { |cg| cg.name == @current_command_group_name } || begin
cg = CommandGroup.new(@current_command_group_name)
@command_groups << cg
cg
end
end
end
end
|
require 'ipaddr'
require 'rubygems'
#require 'Dnsruby'
require 'provision/dns'
class Provision::DNS::DDNS < Provision::DNS
def initialize(options={})
super()
range = options[:network_range] || raise("No :network_range supplied")
parts = range.split('/')
if parts.size != 2
raise(":network_range must be of the format X.X.X.X/Y")
end
broadcast_mask = (IPAddr::IN4MASK >> parts[1].to_i)
@network = IPAddr.new(parts[0]).mask(parts[1])
@broadcast = @network | IPAddr.new(broadcast_mask, Socket::AF_INET)
@max_allocation = IPAddr.new(@broadcast.to_i - 1, Socket::AF_INET)
min_allocation = options[:min_allocation] || 10
@min_allocation = IPAddr.new(min_allocation.to_i + @network.to_i, Socket::AF_INET)
end
def reverse_zone
'16.16.172.in-addr.arpa'
end
def get_primary_nameserver_for(zone)
'172.16.16.5'
end
def send_update(zone, update)
res = Dnsruby::Resolver.new({:nameserver => get_primary_nameserver_for(zone)})
ok = true
begin
reply = res.send_message(update)
print "Update succeeded\n"
rescue Exception => e
print "Update failed: #{e}\n"
ok = false
end
ok
end
def remove_ips_for(spec)
ip = nil
hn = spec[:fqdn]
if @by_name[hn]
ip = @by_name[hn]
puts "Removing ip allocation (#{ip}) for #{hn}"
return true
else
puts "No ip allocation found for #{hn}, not removing"
return false
end
end
def try_add_reverse_lookup(ip, fqdn)
update = Dnsruby::Update.new(reverse_zone)
ip_rev = ip.to_s.split('.').reverse.join('.')
update.absent("#{ip_rev}.in-addr.arpa.", 'PTR') # prereq
update.add("#{ip_rev}.in-addr.arpa.", 'PTR', 86400, "#{fqdn}.")
send_update(reverse_zone, update)
end
def allocate_ips_for(spec)
ip = nil
hn = spec[:fqdn]
if lookup_ip_for(spec)
puts "No new allocation for #{hn}, already allocated to #{@by_name[hn]}"
return lookup_ip_for(spec)
else
max_ip = @max_allocation
ip = @min_allocation
while !try_add_reverse_lookup(ip, hn)
ip = IPAddr.new(ip.to_i + 1, Socket::AF_INET)
if ip >= max_ip
raise("Ran out of ips")
end
end
end
ip
end
end
Uncomment Dnsruby
require 'ipaddr'
require 'rubygems'
require 'Dnsruby'
require 'provision/dns'
class Provision::DNS::DDNS < Provision::DNS
def initialize(options={})
super()
range = options[:network_range] || raise("No :network_range supplied")
parts = range.split('/')
if parts.size != 2
raise(":network_range must be of the format X.X.X.X/Y")
end
broadcast_mask = (IPAddr::IN4MASK >> parts[1].to_i)
@network = IPAddr.new(parts[0]).mask(parts[1])
@broadcast = @network | IPAddr.new(broadcast_mask, Socket::AF_INET)
@max_allocation = IPAddr.new(@broadcast.to_i - 1, Socket::AF_INET)
min_allocation = options[:min_allocation] || 10
@min_allocation = IPAddr.new(min_allocation.to_i + @network.to_i, Socket::AF_INET)
end
def reverse_zone
'16.16.172.in-addr.arpa'
end
def get_primary_nameserver_for(zone)
'172.16.16.5'
end
def send_update(zone, update)
res = Dnsruby::Resolver.new({:nameserver => get_primary_nameserver_for(zone)})
ok = true
begin
reply = res.send_message(update)
print "Update succeeded\n"
rescue Exception => e
print "Update failed: #{e}\n"
ok = false
end
ok
end
def remove_ips_for(spec)
ip = nil
hn = spec[:fqdn]
if @by_name[hn]
ip = @by_name[hn]
puts "Removing ip allocation (#{ip}) for #{hn}"
return true
else
puts "No ip allocation found for #{hn}, not removing"
return false
end
end
def try_add_reverse_lookup(ip, fqdn)
update = Dnsruby::Update.new(reverse_zone)
ip_rev = ip.to_s.split('.').reverse.join('.')
update.absent("#{ip_rev}.in-addr.arpa.", 'PTR') # prereq
update.add("#{ip_rev}.in-addr.arpa.", 'PTR', 86400, "#{fqdn}.")
send_update(reverse_zone, update)
end
def allocate_ips_for(spec)
ip = nil
hn = spec[:fqdn]
if lookup_ip_for(spec)
puts "No new allocation for #{hn}, already allocated to #{@by_name[hn]}"
return lookup_ip_for(spec)
else
max_ip = @max_allocation
ip = @min_allocation
while !try_add_reverse_lookup(ip, hn)
ip = IPAddr.new(ip.to_i + 1, Socket::AF_INET)
if ip >= max_ip
raise("Ran out of ips")
end
end
end
ip
end
end
|
require 'ipaddr'
require 'rubygems'
require 'tempfile'
require 'provision/dns'
require 'resolv'
class Provision::DNS::DDNS < Provision::DNS
class Network
def initialize(options={})
range = options[:network_range] || raise("No :network_range supplied")
parts = range.split('/')
if parts.size != 2
raise(":network_range must be of the format X.X.X.X/Y")
end
broadcast_mask = (IPAddr::IN4MASK >> parts[1].to_i)
@network = IPAddr.new(parts[0]).mask(parts[1])
@broadcast = @network | IPAddr.new(broadcast_mask, Socket::AF_INET)
@max_allocation = IPAddr.new(@broadcast.to_i - 1, Socket::AF_INET)
min_allocation = options[:min_allocation] || 10
@min_allocation = IPAddr.new(min_allocation.to_i + @network.to_i, Socket::AF_INET)
@rndc_key = options[:rndc_key] || raise("No :rndc_key supplied")
end
def write_rndc_key
tmp_file = Tempfile.new('remove_temp')
tmp_file.puts "key \"rndc-key\" {"
tmp_file.puts "algorithm hmac-md5;"
tmp_file.puts "secret \"#{@rndc_key}\";"
tmp_file.puts "};"
tmp_file.close
tmp_file
end
def get_primary_nameserver_for(zone)
# '172.16.16.5'
'127.0.0.1'
end
def remove_ips_for(spec)
hn = spec[:fqdn]
puts "Not ability to remove DNS for #{hn}, not removing"
return false
end
def try_add_reverse_lookup(ip, fqdn)
ip_rev = ip.to_s.split('.').reverse.join('.')
tmp_file = Tempfile.new('remove_temp')
tmp_file.puts "server 127.0.0.1"
tmp_file.puts "zone 16.16.172.in-addr.arpa"
tmp_file.puts "prereq nxdomain #{ip_rev}.in-addr.arpa"
tmp_file.puts "update add #{ip_rev}.in-addr.arpa. 86400 PTR #{fqdn}."
tmp_file.puts "send"
tmp_file.close
out = exec_nsupdate(tmp_file)
if out =~ /update failed: YXDOMAIN/
puts "FAILED TO ADD #{ip_rev}.in-addr.arpa. PTR #{fqdn}. IP already used"
return false
else
puts "ADD OK for reverse of #{ip} to #{fqdn} => #{out}"
return true
end
end
def exec_nsupdate(update_file)
rndc_tmp = write_rndc_key
out = `cat #{update_file.path} | nsupdate -k #{rndc_tmp.path} 2>&1`
update_file.unlink
rndc_tmp.unlink
out
end
def add_forward_lookup(ip, fqdn)
fqdn_s = fqdn.split "."
zone_s = fqdn_s.clone
hn = zone_s.shift
zone = zone_s.join('.')
tmp_file = Tempfile.new('remove_temp')
tmp_file.puts "server 127.0.0.1"
tmp_file.puts "zone #{zone}"
tmp_file.puts "update add #{fqdn}. 86400 A #{ip}"
tmp_file.puts "send"
tmp_file.close
out = exec_nsupdate(tmp_file)
if out =~ /^$/
return true
else
raise("Could not add forward lookup #{fqdn} A #{ip}: #{out}")
end
end
def lookup_ip_for(hn)
res = Resolv::DNS.open({:nameserver=>["127.0.0.1"]})
begin
IPAddr.new(res.query(hn))
rescue Resolv::ResolvError
puts "Could not find #{hn}"
false
end
end
def allocate_ips_for(spec)
ip = nil
hn = spec[:fqdn]
if lookup_ip_for(hn)
puts "No new allocation for #{hn}, already allocated"
return lookup_ip_for(hn)
else
max_ip = @max_allocation
ip = @min_allocation
while !try_add_reverse_lookup(ip, hn)
ip = IPAddr.new(ip.to_i + 1, Socket::AF_INET)
if ip >= max_ip
raise("Ran out of ips")
end
end
add_forward_lookup(ip, hn)
end
IPAddr.new(ip)
end
end
def add_network(name, net, start)
@networks[name] = Network.new()
end
end
Put the primary nameserver in one place only
require 'ipaddr'
require 'rubygems'
require 'tempfile'
require 'provision/dns'
require 'resolv'
class Provision::DNS::DDNS < Provision::DNS
class Network
def initialize(options={})
range = options[:network_range] || raise("No :network_range supplied")
parts = range.split('/')
if parts.size != 2
raise(":network_range must be of the format X.X.X.X/Y")
end
broadcast_mask = (IPAddr::IN4MASK >> parts[1].to_i)
@network = IPAddr.new(parts[0]).mask(parts[1])
@broadcast = @network | IPAddr.new(broadcast_mask, Socket::AF_INET)
@max_allocation = IPAddr.new(@broadcast.to_i - 1, Socket::AF_INET)
min_allocation = options[:min_allocation] || 10
@min_allocation = IPAddr.new(min_allocation.to_i + @network.to_i, Socket::AF_INET)
@rndc_key = options[:rndc_key] || raise("No :rndc_key supplied")
end
def write_rndc_key
tmp_file = Tempfile.new('remove_temp')
tmp_file.puts "key \"rndc-key\" {"
tmp_file.puts "algorithm hmac-md5;"
tmp_file.puts "secret \"#{@rndc_key}\";"
tmp_file.puts "};"
tmp_file.close
tmp_file
end
def get_primary_nameserver
# '172.16.16.5'
'127.0.0.1'
end
def remove_ips_for(spec)
hn = spec[:fqdn]
puts "Not ability to remove DNS for #{hn}, not removing"
return false
end
def try_add_reverse_lookup(ip, fqdn)
ip_rev = ip.to_s.split('.').reverse.join('.')
tmp_file = Tempfile.new('remove_temp')
tmp_file.puts "server #{get_primary_nameserver}"
tmp_file.puts "zone 16.16.172.in-addr.arpa"
tmp_file.puts "prereq nxdomain #{ip_rev}.in-addr.arpa"
tmp_file.puts "update add #{ip_rev}.in-addr.arpa. 86400 PTR #{fqdn}."
tmp_file.puts "send"
tmp_file.close
out = exec_nsupdate(tmp_file)
if out =~ /update failed: YXDOMAIN/
puts "FAILED TO ADD #{ip_rev}.in-addr.arpa. PTR #{fqdn}. IP already used"
return false
else
puts "ADD OK for reverse of #{ip} to #{fqdn} => #{out}"
return true
end
end
def exec_nsupdate(update_file)
rndc_tmp = write_rndc_key
out = `cat #{update_file.path} | nsupdate -k #{rndc_tmp.path} 2>&1`
update_file.unlink
rndc_tmp.unlink
out
end
def add_forward_lookup(ip, fqdn)
fqdn_s = fqdn.split "."
zone_s = fqdn_s.clone
hn = zone_s.shift
zone = zone_s.join('.')
tmp_file = Tempfile.new('remove_temp')
tmp_file.puts "server #{get_primary_nameserver}"
tmp_file.puts "zone #{zone}"
tmp_file.puts "update add #{fqdn}. 86400 A #{ip}"
tmp_file.puts "send"
tmp_file.close
out = exec_nsupdate(tmp_file)
if out =~ /^$/
return true
else
raise("Could not add forward lookup #{fqdn} A #{ip}: #{out}")
end
end
def lookup_ip_for(hn)
res = Resolv::DNS.open({:nameserver=>[get_primary_nameserver]})
begin
IPAddr.new(res.query(hn))
rescue Resolv::ResolvError
puts "Could not find #{hn}"
false
end
end
def allocate_ips_for(spec)
ip = nil
hn = spec[:fqdn]
if lookup_ip_for(hn)
puts "No new allocation for #{hn}, already allocated"
return lookup_ip_for(hn)
else
max_ip = @max_allocation
ip = @min_allocation
while !try_add_reverse_lookup(ip, hn)
ip = IPAddr.new(ip.to_i + 1, Socket::AF_INET)
if ip >= max_ip
raise("Ran out of ips")
end
end
add_forward_lookup(ip, hn)
end
IPAddr.new(ip)
end
end
def add_network(name, net, start)
@networks[name] = Network.new()
end
end
|
module Pry::Helpers::Colors
extend self
COLORS =
{
"black" => 0,
"red" => 1,
"green" => 2,
"yellow" => 3,
"blue" => 4,
"purple" => 5,
"magenta" => 5,
"cyan" => 6,
"white" => 7
}
color_enabled = lambda do |pry|
(pry and pry.color) or (defined?(_pry_) ? _pry_.color : Pry.color)
end
COLORS.each_pair do |color, value|
define_method(color) do |text, pry=nil|
instance_exec(pry, &color_enabled) ? "\033[0;#{30+value}m#{text}\033[0m" : text
end
define_method("bright_#{color}") do |text, pry=nil|
instance_exec(pry, &color_enabled) ? "\033[1;#{30+value}m#{text}\033[0m" : text
end
COLORS.each_pair do |bg_color, bg_value|
define_method "#{color}_on_#{bg_color}" do |text, pry=nil|
instance_exec(pry, &color_enabled) ? "\033[0;#{30 + value};#{40 + bg_value}m#{text}\033[0m" : text
end
define_method "bright_#{color}_on_#{bg_color}" do |text, pry=nil|
instance_exec(pry, &color_enabled) ? "\033[1;#{30 + value};#{40 + bg_value}m#{text}\033[0m" : text
end
end
end
#
# @example
#
# paint "foo", :green
# paint "bar", :red
# paint "baz", :bold
#
# @param [String] str
# String to paint.
#
# @param [Symbol]
# The effect to apply to _str_.
#
# @return [String]
# Returns a string with _effect_ applied, or _str_ if the effect is unknown.
#
def paint(str, effect)
(Pry::Helpers::Colors.instance_methods(false) - [__method__]).include?(effect) ?
public_send(effect, str) : str
end
# Returns _text_ as bold text for use on a terminal.
#
# @param [String, #to_s] text
# @return [String] _text_
def bold text, pry=(defined?(_pry_) && _pry_) || Pry
(pry and pry.color) ? "\e[1m#{text}\e[0m" : text
end
# Remove any color codes from _text_.
#
# @param [String, #to_s] text
# @return [String] _text_ stripped of any color codes.
def strip_color(text)
text.to_s.gsub(/(\001)?\e\[.*?(\d)+m(\002)?/ , '')
end
# Executes the block with `Pry.config.color` set to false.
# @yield
# @return [void]
def no_color pry=(defined?(_pry_) && _pry_) || Pry, &block
boolean = pry.config.color
pry.config.color = false
yield
ensure
pry.config.color = boolean
end
end
if 'pry' is passed, don't continue
module Pry::Helpers::Colors
extend self
COLORS =
{
"black" => 0,
"red" => 1,
"green" => 2,
"yellow" => 3,
"blue" => 4,
"purple" => 5,
"magenta" => 5,
"cyan" => 6,
"white" => 7
}
color_enabled = lambda do |pry|
(pry and return pry.color) or (defined?(_pry_) ? _pry_.color : Pry.color)
end
COLORS.each_pair do |color, value|
define_method(color) do |text, pry=nil|
instance_exec(pry, &color_enabled) ? "\033[0;#{30+value}m#{text}\033[0m" : text
end
define_method("bright_#{color}") do |text, pry=nil|
instance_exec(pry, &color_enabled) ? "\033[1;#{30+value}m#{text}\033[0m" : text
end
COLORS.each_pair do |bg_color, bg_value|
define_method "#{color}_on_#{bg_color}" do |text, pry=nil|
instance_exec(pry, &color_enabled) ? "\033[0;#{30 + value};#{40 + bg_value}m#{text}\033[0m" : text
end
define_method "bright_#{color}_on_#{bg_color}" do |text, pry=nil|
instance_exec(pry, &color_enabled) ? "\033[1;#{30 + value};#{40 + bg_value}m#{text}\033[0m" : text
end
end
end
#
# @example
#
# paint "foo", :green
# paint "bar", :red
# paint "baz", :bold
#
# @param [String] str
# String to paint.
#
# @param [Symbol]
# The effect to apply to _str_.
#
# @return [String]
# Returns a string with _effect_ applied, or _str_ if the effect is unknown.
#
def paint(str, effect)
(Pry::Helpers::Colors.instance_methods(false) - [__method__]).include?(effect) ?
public_send(effect, str) : str
end
# Returns _text_ as bold text for use on a terminal.
#
# @param [String, #to_s] text
# @return [String] _text_
def bold text, pry=(defined?(_pry_) && _pry_) || Pry
(pry and pry.color) ? "\e[1m#{text}\e[0m" : text
end
# Remove any color codes from _text_.
#
# @param [String, #to_s] text
# @return [String] _text_ stripped of any color codes.
def strip_color(text)
text.to_s.gsub(/(\001)?\e\[.*?(\d)+m(\002)?/ , '')
end
# Executes the block with `Pry.config.color` set to false.
# @yield
# @return [void]
def no_color pry=(defined?(_pry_) && _pry_) || Pry, &block
boolean = pry.config.color
pry.config.color = false
yield
ensure
pry.config.color = boolean
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'sandthorn_sequel_projection/version'
Gem::Specification.new do |spec|
spec.name = "sandthorn_sequel_projection"
spec.version = SandthornSequelProjection::VERSION
spec.authors = ["Lars Krantz"]
spec.email = ["lars.krantz@alaz.se"]
spec.summary = %q{Helps creating sql projections from sandthorn events}
spec.description = spec.summary
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.required_ruby_version = ">= 2.0"
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec"
spec.add_development_dependency "sandthorn_driver_sequel"
spec.add_development_dependency "awesome_print"
spec.add_development_dependency "pry"
spec.add_development_dependency "sqlite3"
spec.add_development_dependency "codeclimate-test-reporter"
spec.add_runtime_dependency "sandthorn"
spec.add_runtime_dependency "sandthorn_event_filter", "~> 0.0.4"
spec.add_runtime_dependency "sequel"
spec.add_runtime_dependency "simple_migrator"
end
Specify Sandthorn version
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'sandthorn_sequel_projection/version'
Gem::Specification.new do |spec|
spec.name = "sandthorn_sequel_projection"
spec.version = SandthornSequelProjection::VERSION
spec.authors = ["Lars Krantz"]
spec.email = ["lars.krantz@alaz.se"]
spec.summary = %q{Helps creating sql projections from sandthorn events}
spec.description = spec.summary
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.required_ruby_version = ">= 2.0"
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec"
spec.add_development_dependency "sandthorn_driver_sequel"
spec.add_development_dependency "awesome_print"
spec.add_development_dependency "pry"
spec.add_development_dependency "sqlite3"
spec.add_development_dependency "codeclimate-test-reporter"
spec.add_runtime_dependency "sandthorn", "~> 0.6"
spec.add_runtime_dependency "sandthorn_event_filter", "~> 0.0.4"
spec.add_runtime_dependency "sequel"
spec.add_runtime_dependency "simple_migrator"
end
|
require 'puppet-lint/checkplugin'
class PuppetLint::Checks
attr_reader :problems
attr_reader :tokens
# Public: Initialise a new PuppetLint::Checks object and prepare the check
# methods.
def initialize
@problems = []
@default_info = {:check => 'unknown', :linenumber => 0, :column => 0}
PuppetLint.configuration.checks.each do |check|
method = PuppetLint.configuration.check_method[check]
self.class.send(:define_method, "lint_check_#{check}", &method)
end
end
# Public: Add a message to the problems array.
#
# kind - The kind of problem as a Symbol (:warning, :error).
# problem - A Hash containing the attributes of the problem.
# :message - The String message describing the problem.
# :linenumber - The Integer line number of the location of the problem.
# :check - The String name of the check that the problem came from.
# :column - The Integer column number of the location of the problem.
#
# Returns nothing.
def notify(kind, problem)
problem[:kind] = kind
problem.merge!(@default_info) {|key, v1, v2| v1 }
@problems << problem
end
def load_data(fileinfo, data)
lexer = PuppetLint::Lexer.new
begin
@tokens = lexer.tokenise(data)
rescue PuppetLint::LexerError => e
notify :error, {
:message => 'Syntax error (try running `puppet parser validate <file>`)',
:linenumber => e.line_no,
:column => e.column,
}
@tokens = []
end
@fileinfo = fileinfo
@data = data
end
def run(fileinfo, data)
load_data(fileinfo, data)
enabled_checks.each do |check|
@default_info[:check] = check
self.send("lint_check_#{check}")
end
@problems
end
def enabled_checks
@enabled_checks ||= Proc.new do
self.public_methods.select { |method|
method.to_s.start_with? 'lint_check_'
}.map { |method|
method.to_s[11..-1]
}.select { |name|
PuppetLint.configuration.send("#{name}_enabled?")
}
end.call
end
def fullpath
@fileinfo[:fullpath]
end
def title_tokens
@title_tokens ||= Proc.new do
result = []
tokens.each_index do |token_idx|
if tokens[token_idx].type == :COLON
# gather a list of tokens that are resource titles
if tokens[token_idx-1].type == :RBRACK
array_start_idx = tokens.rindex { |r|
r.type == :LBRACK
}
title_array_tokens = tokens[(array_start_idx + 1)..(token_idx - 2)]
result += title_array_tokens.select { |token|
{:STRING => true, :NAME => true}.include? token.type
}
else
next_token = tokens[token_idx].next_code_token
if next_token.type != :LBRACE
result << tokens[token_idx - 1]
end
end
end
end
result
end.call
end
# Public: Calculate the positions of all resource declarations within the
# tokenised manifest. These positions only point to the content of the
# resource declaration, they do not include resource types or
# titles/namevars.
#
# Returns an Array of Hashes, each containing:
# :start - An Integer position in the `tokens` Array pointing to the first
# Token of a resource declaration parameters (type :NAME).
# :end - An Integer position in the `tokens` Array pointing to the last
# Token of a resource declaration parameters (type :RBRACE).
def resource_indexes
@resource_indexes ||= Proc.new do
result = []
tokens.each_index do |token_idx|
if tokens[token_idx].type == :COLON
next_token = tokens[token_idx].next_code_token
depth = 1
if next_token.type != :LBRACE
tokens[(token_idx + 1)..-1].each_index do |idx|
real_idx = token_idx + idx + 1
if tokens[real_idx].type == :LBRACE
depth += 1
elsif {:SEMIC => true, :RBRACE => true}.include? tokens[real_idx].type
unless tokens[real_idx].type == :SEMIC && depth > 1
depth -= 1
if depth == 0
result << {:start => token_idx + 1, :end => real_idx}
break
end
end
end
end
end
end
end
result
end.call
end
# Public: Calculate the positions of all class definitions within the
# tokenised manifest.
#
# Returns an Array of Hashes, each containing:
# :start - An Integer position in the `tokens` Array pointing to the first
# token of a class (type :CLASS).
# :end - An Integer position in the `tokens` Array pointing to the last
# token of a class (type :RBRACE).
def class_indexes
@class_indexes ||= Proc.new do
result = []
tokens.each_index do |token_idx|
if tokens[token_idx].type == :CLASS
depth = 0
in_params = false
tokens[token_idx+1..-1].each_index do |class_token_idx|
idx = class_token_idx + token_idx + 1
if tokens[idx].type == :LPAREN
in_params = true
elsif tokens[idx].type == :RPAREN
in_params = false
elsif tokens[idx].type == :LBRACE
depth += 1 unless in_params
elsif tokens[idx].type == :RBRACE
depth -= 1 unless in_params
if depth == 0 && ! in_params
if tokens[token_idx].next_code_token.type != :LBRACE
result << {:start => token_idx, :end => idx}
end
break
end
end
end
end
end
result
end.call
end
# Public: Calculate the positions of all defined type definitions within
# the tokenised manifest.
#
# Returns an Array of Hashes, each containing:
# :start - An Integer position in the `tokens` Array pointing to the first
# token of a defined type (type :DEFINE).
# :end - An Integer position in the `tokens` Array pointing to the last
# token of a defined type (type :RBRACE).
def defined_type_indexes
@defined_type_indexes ||= Proc.new do
result = []
tokens.each_index do |token_idx|
if tokens[token_idx].type == :DEFINE
depth = 0
in_params = false
tokens[token_idx+1..-1].each_index do |define_token_idx|
idx = define_token_idx + token_idx + 1
if tokens[idx].type == :LPAREN
in_params = true
elsif tokens[idx].type == :RPAREN
in_params = false
elsif tokens[idx].type == :LBRACE
depth += 1 unless in_params
elsif tokens[idx].type == :RBRACE
depth -= 1 unless in_params
if depth == 0 && ! in_params
result << {:start => token_idx, :end => idx}
break
end
end
end
end
end
result
end.call
end
# Public: Retrieves a list of token types that are considered to be
# formatting tokens (ie whitespace, newlines, etc).
#
# Returns an Array of Symbols.
def formatting_tokens
@formatting_tokens ||= PuppetLint::Lexer::FORMATTING_TOKENS
end
# Public: Access the lines of the manifest that is being checked.
#
# Returns an Array of Strings.
def manifest_lines
@manifest_lines ||= @data.split("\n")
end
end
TomDoc PuppetLint::Checks#load_data
require 'puppet-lint/checkplugin'
class PuppetLint::Checks
attr_reader :problems
attr_reader :tokens
# Public: Initialise a new PuppetLint::Checks object and prepare the check
# methods.
def initialize
@problems = []
@default_info = {:check => 'unknown', :linenumber => 0, :column => 0}
PuppetLint.configuration.checks.each do |check|
method = PuppetLint.configuration.check_method[check]
self.class.send(:define_method, "lint_check_#{check}", &method)
end
end
# Public: Add a message to the problems array.
#
# kind - The kind of problem as a Symbol (:warning, :error).
# problem - A Hash containing the attributes of the problem.
# :message - The String message describing the problem.
# :linenumber - The Integer line number of the location of the problem.
# :check - The String name of the check that the problem came from.
# :column - The Integer column number of the location of the problem.
#
# Returns nothing.
def notify(kind, problem)
problem[:kind] = kind
problem.merge!(@default_info) {|key, v1, v2| v1 }
@problems << problem
end
# Internal: Tokenise the manifest code and prepare it for checking.
#
# fileinfo - A Hash containing the following:
# :fullpath - The expanded path to the file as a String.
# :filename - The name of the file as a String.
# :path - The original path to the file as passed to puppet-lint as
# a String.
# data - The String manifest code to be checked.
#
# Returns nothing.
def load_data(fileinfo, data)
lexer = PuppetLint::Lexer.new
begin
@tokens = lexer.tokenise(data)
rescue PuppetLint::LexerError => e
notify :error, {
:message => 'Syntax error (try running `puppet parser validate <file>`)',
:linenumber => e.line_no,
:column => e.column,
}
@tokens = []
end
@fileinfo = fileinfo
@data = data
end
def run(fileinfo, data)
load_data(fileinfo, data)
enabled_checks.each do |check|
@default_info[:check] = check
self.send("lint_check_#{check}")
end
@problems
end
def enabled_checks
@enabled_checks ||= Proc.new do
self.public_methods.select { |method|
method.to_s.start_with? 'lint_check_'
}.map { |method|
method.to_s[11..-1]
}.select { |name|
PuppetLint.configuration.send("#{name}_enabled?")
}
end.call
end
def fullpath
@fileinfo[:fullpath]
end
def title_tokens
@title_tokens ||= Proc.new do
result = []
tokens.each_index do |token_idx|
if tokens[token_idx].type == :COLON
# gather a list of tokens that are resource titles
if tokens[token_idx-1].type == :RBRACK
array_start_idx = tokens.rindex { |r|
r.type == :LBRACK
}
title_array_tokens = tokens[(array_start_idx + 1)..(token_idx - 2)]
result += title_array_tokens.select { |token|
{:STRING => true, :NAME => true}.include? token.type
}
else
next_token = tokens[token_idx].next_code_token
if next_token.type != :LBRACE
result << tokens[token_idx - 1]
end
end
end
end
result
end.call
end
# Public: Calculate the positions of all resource declarations within the
# tokenised manifest. These positions only point to the content of the
# resource declaration, they do not include resource types or
# titles/namevars.
#
# Returns an Array of Hashes, each containing:
# :start - An Integer position in the `tokens` Array pointing to the first
# Token of a resource declaration parameters (type :NAME).
# :end - An Integer position in the `tokens` Array pointing to the last
# Token of a resource declaration parameters (type :RBRACE).
def resource_indexes
@resource_indexes ||= Proc.new do
result = []
tokens.each_index do |token_idx|
if tokens[token_idx].type == :COLON
next_token = tokens[token_idx].next_code_token
depth = 1
if next_token.type != :LBRACE
tokens[(token_idx + 1)..-1].each_index do |idx|
real_idx = token_idx + idx + 1
if tokens[real_idx].type == :LBRACE
depth += 1
elsif {:SEMIC => true, :RBRACE => true}.include? tokens[real_idx].type
unless tokens[real_idx].type == :SEMIC && depth > 1
depth -= 1
if depth == 0
result << {:start => token_idx + 1, :end => real_idx}
break
end
end
end
end
end
end
end
result
end.call
end
# Public: Calculate the positions of all class definitions within the
# tokenised manifest.
#
# Returns an Array of Hashes, each containing:
# :start - An Integer position in the `tokens` Array pointing to the first
# token of a class (type :CLASS).
# :end - An Integer position in the `tokens` Array pointing to the last
# token of a class (type :RBRACE).
def class_indexes
@class_indexes ||= Proc.new do
result = []
tokens.each_index do |token_idx|
if tokens[token_idx].type == :CLASS
depth = 0
in_params = false
tokens[token_idx+1..-1].each_index do |class_token_idx|
idx = class_token_idx + token_idx + 1
if tokens[idx].type == :LPAREN
in_params = true
elsif tokens[idx].type == :RPAREN
in_params = false
elsif tokens[idx].type == :LBRACE
depth += 1 unless in_params
elsif tokens[idx].type == :RBRACE
depth -= 1 unless in_params
if depth == 0 && ! in_params
if tokens[token_idx].next_code_token.type != :LBRACE
result << {:start => token_idx, :end => idx}
end
break
end
end
end
end
end
result
end.call
end
# Public: Calculate the positions of all defined type definitions within
# the tokenised manifest.
#
# Returns an Array of Hashes, each containing:
# :start - An Integer position in the `tokens` Array pointing to the first
# token of a defined type (type :DEFINE).
# :end - An Integer position in the `tokens` Array pointing to the last
# token of a defined type (type :RBRACE).
def defined_type_indexes
@defined_type_indexes ||= Proc.new do
result = []
tokens.each_index do |token_idx|
if tokens[token_idx].type == :DEFINE
depth = 0
in_params = false
tokens[token_idx+1..-1].each_index do |define_token_idx|
idx = define_token_idx + token_idx + 1
if tokens[idx].type == :LPAREN
in_params = true
elsif tokens[idx].type == :RPAREN
in_params = false
elsif tokens[idx].type == :LBRACE
depth += 1 unless in_params
elsif tokens[idx].type == :RBRACE
depth -= 1 unless in_params
if depth == 0 && ! in_params
result << {:start => token_idx, :end => idx}
break
end
end
end
end
end
result
end.call
end
# Public: Retrieves a list of token types that are considered to be
# formatting tokens (ie whitespace, newlines, etc).
#
# Returns an Array of Symbols.
def formatting_tokens
@formatting_tokens ||= PuppetLint::Lexer::FORMATTING_TOKENS
end
# Public: Access the lines of the manifest that is being checked.
#
# Returns an Array of Strings.
def manifest_lines
@manifest_lines ||= @data.split("\n")
end
end
|
module Pushdeploy
VERSION = "0.0.3"
end
Bump to 0.0.4
module Pushdeploy
VERSION = "0.0.4"
end
|
module PushrApns
VERSION = '1.0.4'
end
version 1.0.5
module PushrApns
VERSION = '1.0.5'
end
|
step 'Clone pcp-broker to master'
on master, puppet('resource package git ensure=present')
on master, 'git clone https://github.com/puppetlabs/pcp-broker.git'
step 'Install Java'
on master, puppet('resource package java ensure=present')
step 'Download lein bootstrap'
on master, 'cd /usr/bin && '\
'curl -O https://raw.githubusercontent.com/technomancy/leiningen/stable/bin/lein'
step 'Run lein once so it sets itself up'
on master, 'chmod a+x /usr/bin/lein && export LEIN_ROOT=ok && /usr/bin/lein'
step 'Run pcp-broker in trapperkeeper in background (return immediately)'
on master, 'cd ~/pcp-broker; export LEIN_ROOT=ok; lein tk </dev/null >/dev/null 2>&1 &'
(maint) Improve set-up of pcp-broker in acceptance tests
Add polling of port 8142 to confirm the broker has started up.
Send broker's output to /var/log/pcp-broker.log instead of /dev/null so we have output for troubleshooting.
Before starting the broker, run 'lein deps' as a separate command so that time-consuming downloading of dependencies is separated from start-up time.
pcp_broker_port = 8142
pcp_broker_minutes_to_start = 2
step 'Clone pcp-broker to master'
on master, puppet('resource package git ensure=present')
on master, 'git clone https://github.com/puppetlabs/pcp-broker.git'
step 'Install Java'
on master, puppet('resource package java ensure=present')
step 'Download lein bootstrap'
on master, 'cd /usr/bin && '\
'curl -O https://raw.githubusercontent.com/technomancy/leiningen/stable/bin/lein'
step 'Run lein once so it sets itself up'
on master, 'chmod a+x /usr/bin/lein && export LEIN_ROOT=ok && /usr/bin/lein'
step 'Run lein deps to download dependencies'
# 'lein tk' will download dependencies automatically, but downloading them will take
# some time and will eat into the polling period we allow for the broker to start
on master, 'cd ~/pcp-broker; export LEIN_ROOT=ok; lein deps'
step "Run pcp-broker in trapperkeeper in background and wait for port #{pcp_broker_port.to_s}"
on master, 'cd ~/pcp-broker; export LEIN_ROOT=ok; lein tk </dev/null >/var/log/pcp-broker.log 2>&1 &'
assert(port_open_within?(master, pcp_broker_port, 60 * pcp_broker_minutes_to_start),
"pcp-broker port #{pcp_broker_port.to_s} not open within " \
"#{pcp_broker_minutes_to_start.to_s} minutes of starting the broker")
|
module ActionController #:nodoc:
module Filters #:nodoc:
def self.included(base)
base.class_eval do
extend ClassMethods
include ActionController::Filters::InstanceMethods
end
end
class FilterChain < ActiveSupport::Callbacks::CallbackChain #:nodoc:
def append_filter_to_chain(filters, filter_type, &block)
pos = find_filter_append_position(filters, filter_type)
update_filter_chain(filters, filter_type, pos, &block)
end
def prepend_filter_to_chain(filters, filter_type, &block)
pos = find_filter_prepend_position(filters, filter_type)
update_filter_chain(filters, filter_type, pos, &block)
end
def create_filters(filters, filter_type, &block)
filters, conditions = extract_options(filters, &block)
filters.map! { |filter| find_or_create_filter(filter, filter_type, conditions) }
filters
end
def skip_filter_in_chain(*filters, &test)
filters, conditions = extract_options(filters)
filters.each do |filter|
if callback = find(filter) then delete(callback) end
end if conditions.empty?
update_filter_in_chain(filters, :skip => conditions, &test)
end
private
def update_filter_chain(filters, filter_type, pos, &block)
new_filters = create_filters(filters, filter_type, &block)
insert(pos, new_filters).flatten!
end
def find_filter_append_position(filters, filter_type)
# appending an after filter puts it at the end of the call chain
# before and around filters go before the first after filter in the chain
unless filter_type == :after
each_with_index do |f,i|
return i if f.after?
end
end
return -1
end
def find_filter_prepend_position(filters, filter_type)
# prepending a before or around filter puts it at the front of the call chain
# after filters go before the first after filter in the chain
if filter_type == :after
each_with_index do |f,i|
return i if f.after?
end
return -1
end
return 0
end
def find_or_create_filter(filter, filter_type, options = {})
update_filter_in_chain([filter], options)
if found_filter = find(filter) { |f| f.type == filter_type }
found_filter
else
filter_kind = case
when filter.respond_to?(:before) && filter_type == :before
:before
when filter.respond_to?(:after) && filter_type == :after
:after
else
:filter
end
case filter_type
when :before
BeforeFilter.new(filter_kind, filter, options)
when :after
AfterFilter.new(filter_kind, filter, options)
else
AroundFilter.new(filter_kind, filter, options)
end
end
end
def update_filter_in_chain(filters, options, &test)
filters.map! { |f| block_given? ? find(f, &test) : find(f) }
filters.compact!
map! do |filter|
if filters.include?(filter)
new_filter = filter.dup
new_filter.update_options!(options)
new_filter
else
filter
end
end
end
end
class Filter < ActiveSupport::Callbacks::Callback #:nodoc:
def initialize(kind, method, options = {})
super
update_options! options
end
def before?
self.class == BeforeFilter
end
def after?
self.class == AfterFilter
end
def around?
self.class == AroundFilter
end
# Make sets of strings from :only/:except options
def update_options!(other)
if other
convert_only_and_except_options_to_sets_of_strings(other)
if other[:skip]
convert_only_and_except_options_to_sets_of_strings(other[:skip])
end
end
options.update(other)
end
private
def should_not_skip?(controller)
if options[:skip]
!included_in_action?(controller, options[:skip])
else
true
end
end
def included_in_action?(controller, options)
if options[:only]
options[:only].include?(controller.action_name)
elsif options[:except]
!options[:except].include?(controller.action_name)
else
true
end
end
def should_run_callback?(controller)
should_not_skip?(controller) && included_in_action?(controller, options) && super
end
def convert_only_and_except_options_to_sets_of_strings(opts)
[:only, :except].each do |key|
if values = opts[key]
opts[key] = Array(values).map(&:to_s).to_set
end
end
end
end
class AroundFilter < Filter #:nodoc:
def type
:around
end
def call(controller, &block)
if should_run_callback?(controller)
method = filter_responds_to_before_and_after? ? around_proc : self.method
# For around_filter do |controller, action|
if method.is_a?(Proc) && method.arity == 2
evaluate_method(method, controller, block)
else
evaluate_method(method, controller, &block)
end
else
block.call
end
end
private
def filter_responds_to_before_and_after?
method.respond_to?(:before) && method.respond_to?(:after)
end
def around_proc
Proc.new do |controller, action|
method.before(controller)
if controller.send!(:performed?)
controller.send!(:halt_filter_chain, method, :rendered_or_redirected)
else
begin
action.call
ensure
method.after(controller)
end
end
end
end
end
class BeforeFilter < Filter #:nodoc:
def type
:before
end
def call(controller, &block)
super
if controller.send!(:performed?)
controller.send!(:halt_filter_chain, method, :rendered_or_redirected)
end
end
end
class AfterFilter < Filter #:nodoc:
def type
:after
end
end
# Filters enable controllers to run shared pre- and post-processing code for its actions. These filters can be used to do
# authentication, caching, or auditing before the intended action is performed. Or to do localization or output
# compression after the action has been performed. Filters have access to the request, response, and all the instance
# variables set by other filters in the chain or by the action (in the case of after filters).
#
# == Filter inheritance
#
# Controller inheritance hierarchies share filters downwards, but subclasses can also add or skip filters without
# affecting the superclass. For example:
#
# class BankController < ActionController::Base
# before_filter :audit
#
# private
# def audit
# # record the action and parameters in an audit log
# end
# end
#
# class VaultController < BankController
# before_filter :verify_credentials
#
# private
# def verify_credentials
# # make sure the user is allowed into the vault
# end
# end
#
# Now any actions performed on the BankController will have the audit method called before. On the VaultController,
# first the audit method is called, then the verify_credentials method. If the audit method renders or redirects, then
# verify_credentials and the intended action are never called.
#
# == Filter types
#
# A filter can take one of three forms: method reference (symbol), external class, or inline method (proc). The first
# is the most common and works by referencing a protected or private method somewhere in the inheritance hierarchy of
# the controller by use of a symbol. In the bank example above, both BankController and VaultController use this form.
#
# Using an external class makes for more easily reused generic filters, such as output compression. External filter classes
# are implemented by having a static +filter+ method on any class and then passing this class to the filter method. Example:
#
# class OutputCompressionFilter
# def self.filter(controller)
# controller.response.body = compress(controller.response.body)
# end
# end
#
# class NewspaperController < ActionController::Base
# after_filter OutputCompressionFilter
# end
#
# The filter method is passed the controller instance and is hence granted access to all aspects of the controller and can
# manipulate them as it sees fit.
#
# The inline method (using a proc) can be used to quickly do something small that doesn't require a lot of explanation.
# Or just as a quick test. It works like this:
#
# class WeblogController < ActionController::Base
# before_filter { |controller| head(400) if controller.params["stop_action"] }
# end
#
# As you can see, the block expects to be passed the controller after it has assigned the request to the internal variables.
# This means that the block has access to both the request and response objects complete with convenience methods for params,
# session, template, and assigns. Note: The inline method doesn't strictly have to be a block; any object that responds to call
# and returns 1 or -1 on arity will do (such as a Proc or an Method object).
#
# Please note that around_filters function a little differently than the normal before and after filters with regard to filter
# types. Please see the section dedicated to around_filters below.
#
# == Filter chain ordering
#
# Using <tt>before_filter</tt> and <tt>after_filter</tt> appends the specified filters to the existing chain. That's usually
# just fine, but some times you care more about the order in which the filters are executed. When that's the case, you
# can use <tt>prepend_before_filter</tt> and <tt>prepend_after_filter</tt>. Filters added by these methods will be put at the
# beginning of their respective chain and executed before the rest. For example:
#
# class ShoppingController < ActionController::Base
# before_filter :verify_open_shop
#
# class CheckoutController < ShoppingController
# prepend_before_filter :ensure_items_in_cart, :ensure_items_in_stock
#
# The filter chain for the CheckoutController is now <tt>:ensure_items_in_cart, :ensure_items_in_stock,</tt>
# <tt>:verify_open_shop</tt>. So if either of the ensure filters renders or redirects, we'll never get around to see if the shop
# is open or not.
#
# You may pass multiple filter arguments of each type as well as a filter block.
# If a block is given, it is treated as the last argument.
#
# == Around filters
#
# Around filters wrap an action, executing code both before and after.
# They may be declared as method references, blocks, or objects responding
# to +filter+ or to both +before+ and +after+.
#
# To use a method as an +around_filter+, pass a symbol naming the Ruby method.
# Yield (or <tt>block.call</tt>) within the method to run the action.
#
# around_filter :catch_exceptions
#
# private
# def catch_exceptions
# yield
# rescue => exception
# logger.debug "Caught exception! #{exception}"
# raise
# end
#
# To use a block as an +around_filter+, pass a block taking as args both
# the controller and the action block. You can't call yield directly from
# an +around_filter+ block; explicitly call the action block instead:
#
# around_filter do |controller, action|
# logger.debug "before #{controller.action_name}"
# action.call
# logger.debug "after #{controller.action_name}"
# end
#
# To use a filter object with +around_filter+, pass an object responding
# to <tt>:filter</tt> or both <tt>:before</tt> and <tt>:after</tt>. With a
# filter method, yield to the block as above:
#
# around_filter BenchmarkingFilter
#
# class BenchmarkingFilter
# def self.filter(controller, &block)
# Benchmark.measure(&block)
# end
# end
#
# With +before+ and +after+ methods:
#
# around_filter Authorizer.new
#
# class Authorizer
# # This will run before the action. Redirecting aborts the action.
# def before(controller)
# unless user.authorized?
# redirect_to(login_url)
# end
# end
#
# # This will run after the action if and only if before did not render or redirect.
# def after(controller)
# end
# end
#
# If the filter has +before+ and +after+ methods, the +before+ method will be
# called before the action. If +before+ renders or redirects, the filter chain is
# halted and +after+ will not be run. See Filter Chain Halting below for
# an example.
#
# == Filter chain skipping
#
# Declaring a filter on a base class conveniently applies to its subclasses,
# but sometimes a subclass should skip some of its superclass' filters:
#
# class ApplicationController < ActionController::Base
# before_filter :authenticate
# around_filter :catch_exceptions
# end
#
# class WeblogController < ApplicationController
# # Will run the :authenticate and :catch_exceptions filters.
# end
#
# class SignupController < ApplicationController
# # Skip :authenticate, run :catch_exceptions.
# skip_before_filter :authenticate
# end
#
# class ProjectsController < ApplicationController
# # Skip :catch_exceptions, run :authenticate.
# skip_filter :catch_exceptions
# end
#
# class ClientsController < ApplicationController
# # Skip :catch_exceptions and :authenticate unless action is index.
# skip_filter :catch_exceptions, :authenticate, :except => :index
# end
#
# == Filter conditions
#
# Filters may be limited to specific actions by declaring the actions to
# include or exclude. Both options accept single actions
# (<tt>:only => :index</tt>) or arrays of actions
# (<tt>:except => [:foo, :bar]</tt>).
#
# class Journal < ActionController::Base
# # Require authentication for edit and delete.
# before_filter :authorize, :only => [:edit, :delete]
#
# # Passing options to a filter with a block.
# around_filter(:except => :index) do |controller, action_block|
# results = Profiler.run(&action_block)
# controller.response.sub! "</body>", "#{results}</body>"
# end
#
# private
# def authorize
# # Redirect to login unless authenticated.
# end
# end
#
# == Filter Chain Halting
#
# <tt>before_filter</tt> and <tt>around_filter</tt> may halt the request
# before a controller action is run. This is useful, for example, to deny
# access to unauthenticated users or to redirect from HTTP to HTTPS.
# Simply call render or redirect. After filters will not be executed if the filter
# chain is halted.
#
# Around filters halt the request unless the action block is called.
# Given these filters
# after_filter :after
# around_filter :around
# before_filter :before
#
# The filter chain will look like:
#
# ...
# . \
# . #around (code before yield)
# . . \
# . . #before (actual filter code is run)
# . . . \
# . . . execute controller action
# . . . /
# . . ...
# . . /
# . #around (code after yield)
# . /
# #after (actual filter code is run, unless the around filter does not yield)
#
# If +around+ returns before yielding, +after+ will still not be run. The +before+
# filter and controller action will not be run. If +before+ renders or redirects,
# the second half of +around+ and will still run but +after+ and the
# action will not. If +around+ fails to yield, +after+ will not be run.
module ClassMethods
# The passed <tt>filters</tt> will be appended to the filter_chain and
# will execute before the action on this controller is performed.
def append_before_filter(*filters, &block)
filter_chain.append_filter_to_chain(filters, :before, &block)
end
# The passed <tt>filters</tt> will be prepended to the filter_chain and
# will execute before the action on this controller is performed.
def prepend_before_filter(*filters, &block)
filter_chain.prepend_filter_to_chain(filters, :before, &block)
end
# Shorthand for append_before_filter since it's the most common.
alias :before_filter :append_before_filter
# The passed <tt>filters</tt> will be appended to the array of filters
# that run _after_ actions on this controller are performed.
def append_after_filter(*filters, &block)
filter_chain.append_filter_to_chain(filters, :after, &block)
end
# The passed <tt>filters</tt> will be prepended to the array of filters
# that run _after_ actions on this controller are performed.
def prepend_after_filter(*filters, &block)
filter_chain.prepend_filter_to_chain(filters, :after, &block)
end
# Shorthand for append_after_filter since it's the most common.
alias :after_filter :append_after_filter
# If you <tt>append_around_filter A.new, B.new</tt>, the filter chain looks like
#
# B#before
# A#before
# # run the action
# A#after
# B#after
#
# With around filters which yield to the action block, +before+ and +after+
# are the code before and after the yield.
def append_around_filter(*filters, &block)
filter_chain.append_filter_to_chain(filters, :around, &block)
end
# If you <tt>prepend_around_filter A.new, B.new</tt>, the filter chain looks like:
#
# A#before
# B#before
# # run the action
# B#after
# A#after
#
# With around filters which yield to the action block, +before+ and +after+
# are the code before and after the yield.
def prepend_around_filter(*filters, &block)
filter_chain.prepend_filter_to_chain(filters, :around, &block)
end
# Shorthand for +append_around_filter+ since it's the most common.
alias :around_filter :append_around_filter
# Removes the specified filters from the +before+ filter chain. Note that this only works for skipping method-reference
# filters, not procs. This is especially useful for managing the chain in inheritance hierarchies where only one out
# of many sub-controllers need a different hierarchy.
#
# You can control the actions to skip the filter for with the <tt>:only</tt> and <tt>:except</tt> options,
# just like when you apply the filters.
def skip_before_filter(*filters)
filter_chain.skip_filter_in_chain(*filters, &:before?)
end
# Removes the specified filters from the +after+ filter chain. Note that this only works for skipping method-reference
# filters, not procs. This is especially useful for managing the chain in inheritance hierarchies where only one out
# of many sub-controllers need a different hierarchy.
#
# You can control the actions to skip the filter for with the <tt>:only</tt> and <tt>:except</tt> options,
# just like when you apply the filters.
def skip_after_filter(*filters)
filter_chain.skip_filter_in_chain(*filters, &:after?)
end
# Removes the specified filters from the filter chain. This only works for method reference (symbol)
# filters, not procs. This method is different from skip_after_filter and skip_before_filter in that
# it will match any before, after or yielding around filter.
#
# You can control the actions to skip the filter for with the <tt>:only</tt> and <tt>:except</tt> options,
# just like when you apply the filters.
def skip_filter(*filters)
filter_chain.skip_filter_in_chain(*filters)
end
# Returns an array of Filter objects for this controller.
def filter_chain
if chain = read_inheritable_attribute('filter_chain')
return chain
else
write_inheritable_attribute('filter_chain', FilterChain.new)
return filter_chain
end
end
# Returns all the before filters for this class and all its ancestors.
# This method returns the actual filter that was assigned in the controller to maintain existing functionality.
def before_filters #:nodoc:
filters = []
filter_chain.each do |filter|
filters << filter.method if filter.before?
end
filters
end
# Returns all the after filters for this class and all its ancestors.
# This method returns the actual filter that was assigned in the controller to maintain existing functionality.
def after_filters #:nodoc:
filters = []
filter_chain.each do |filter|
filters << filter.method if filter.after?
end
filters
end
end
module InstanceMethods # :nodoc:
def self.included(base)
base.class_eval do
alias_method_chain :perform_action, :filters
alias_method_chain :process, :filters
end
end
protected
def process_with_filters(request, response, method = :perform_action, *arguments) #:nodoc:
@before_filter_chain_aborted = false
process_without_filters(request, response, method, *arguments)
end
def perform_action_with_filters
call_filters(self.class.filter_chain, 0, 0)
end
private
def call_filters(chain, index, nesting)
index = run_before_filters(chain, index, nesting)
aborted = @before_filter_chain_aborted
perform_action_without_filters unless performed? || aborted
return index if nesting != 0 || aborted
run_after_filters(chain, index)
end
def run_before_filters(chain, index, nesting)
while chain[index]
filter, index = chain[index], index
break unless filter # end of call chain reached
case filter
when BeforeFilter
filter.call(self) # invoke before filter
index = index.next
break if @before_filter_chain_aborted
when AroundFilter
yielded = false
filter.call(self) do
yielded = true
# all remaining before and around filters will be run in this call
index = call_filters(chain, index.next, nesting.next)
end
halt_filter_chain(filter, :did_not_yield) unless yielded
break
else
break # no before or around filters left
end
end
index
end
def run_after_filters(chain, index)
seen_after_filter = false
while chain[index]
filter, index = chain[index], index
break unless filter # end of call chain reached
case filter
when AfterFilter
seen_after_filter = true
filter.call(self) # invoke after filter
else
# implementation error or someone has mucked with the filter chain
raise ActionControllerError, "filter #{filter.inspect} was in the wrong place!" if seen_after_filter
end
index = index.next
end
index.next
end
def halt_filter_chain(filter, reason)
@before_filter_chain_aborted = true
logger.info "Filter chain halted as [#{filter.inspect}] #{reason}." if logger
end
end
end
end
Restore the more readable before_ and after_filters methods since they aren't called frequently
module ActionController #:nodoc:
module Filters #:nodoc:
def self.included(base)
base.class_eval do
extend ClassMethods
include ActionController::Filters::InstanceMethods
end
end
class FilterChain < ActiveSupport::Callbacks::CallbackChain #:nodoc:
def append_filter_to_chain(filters, filter_type, &block)
pos = find_filter_append_position(filters, filter_type)
update_filter_chain(filters, filter_type, pos, &block)
end
def prepend_filter_to_chain(filters, filter_type, &block)
pos = find_filter_prepend_position(filters, filter_type)
update_filter_chain(filters, filter_type, pos, &block)
end
def create_filters(filters, filter_type, &block)
filters, conditions = extract_options(filters, &block)
filters.map! { |filter| find_or_create_filter(filter, filter_type, conditions) }
filters
end
def skip_filter_in_chain(*filters, &test)
filters, conditions = extract_options(filters)
filters.each do |filter|
if callback = find(filter) then delete(callback) end
end if conditions.empty?
update_filter_in_chain(filters, :skip => conditions, &test)
end
private
def update_filter_chain(filters, filter_type, pos, &block)
new_filters = create_filters(filters, filter_type, &block)
insert(pos, new_filters).flatten!
end
def find_filter_append_position(filters, filter_type)
# appending an after filter puts it at the end of the call chain
# before and around filters go before the first after filter in the chain
unless filter_type == :after
each_with_index do |f,i|
return i if f.after?
end
end
return -1
end
def find_filter_prepend_position(filters, filter_type)
# prepending a before or around filter puts it at the front of the call chain
# after filters go before the first after filter in the chain
if filter_type == :after
each_with_index do |f,i|
return i if f.after?
end
return -1
end
return 0
end
def find_or_create_filter(filter, filter_type, options = {})
update_filter_in_chain([filter], options)
if found_filter = find(filter) { |f| f.type == filter_type }
found_filter
else
filter_kind = case
when filter.respond_to?(:before) && filter_type == :before
:before
when filter.respond_to?(:after) && filter_type == :after
:after
else
:filter
end
case filter_type
when :before
BeforeFilter.new(filter_kind, filter, options)
when :after
AfterFilter.new(filter_kind, filter, options)
else
AroundFilter.new(filter_kind, filter, options)
end
end
end
def update_filter_in_chain(filters, options, &test)
filters.map! { |f| block_given? ? find(f, &test) : find(f) }
filters.compact!
map! do |filter|
if filters.include?(filter)
new_filter = filter.dup
new_filter.update_options!(options)
new_filter
else
filter
end
end
end
end
class Filter < ActiveSupport::Callbacks::Callback #:nodoc:
def initialize(kind, method, options = {})
super
update_options! options
end
def before?
self.class == BeforeFilter
end
def after?
self.class == AfterFilter
end
def around?
self.class == AroundFilter
end
# Make sets of strings from :only/:except options
def update_options!(other)
if other
convert_only_and_except_options_to_sets_of_strings(other)
if other[:skip]
convert_only_and_except_options_to_sets_of_strings(other[:skip])
end
end
options.update(other)
end
private
def should_not_skip?(controller)
if options[:skip]
!included_in_action?(controller, options[:skip])
else
true
end
end
def included_in_action?(controller, options)
if options[:only]
options[:only].include?(controller.action_name)
elsif options[:except]
!options[:except].include?(controller.action_name)
else
true
end
end
def should_run_callback?(controller)
should_not_skip?(controller) && included_in_action?(controller, options) && super
end
def convert_only_and_except_options_to_sets_of_strings(opts)
[:only, :except].each do |key|
if values = opts[key]
opts[key] = Array(values).map(&:to_s).to_set
end
end
end
end
class AroundFilter < Filter #:nodoc:
def type
:around
end
def call(controller, &block)
if should_run_callback?(controller)
method = filter_responds_to_before_and_after? ? around_proc : self.method
# For around_filter do |controller, action|
if method.is_a?(Proc) && method.arity == 2
evaluate_method(method, controller, block)
else
evaluate_method(method, controller, &block)
end
else
block.call
end
end
private
def filter_responds_to_before_and_after?
method.respond_to?(:before) && method.respond_to?(:after)
end
def around_proc
Proc.new do |controller, action|
method.before(controller)
if controller.send!(:performed?)
controller.send!(:halt_filter_chain, method, :rendered_or_redirected)
else
begin
action.call
ensure
method.after(controller)
end
end
end
end
end
class BeforeFilter < Filter #:nodoc:
def type
:before
end
def call(controller, &block)
super
if controller.send!(:performed?)
controller.send!(:halt_filter_chain, method, :rendered_or_redirected)
end
end
end
class AfterFilter < Filter #:nodoc:
def type
:after
end
end
# Filters enable controllers to run shared pre- and post-processing code for its actions. These filters can be used to do
# authentication, caching, or auditing before the intended action is performed. Or to do localization or output
# compression after the action has been performed. Filters have access to the request, response, and all the instance
# variables set by other filters in the chain or by the action (in the case of after filters).
#
# == Filter inheritance
#
# Controller inheritance hierarchies share filters downwards, but subclasses can also add or skip filters without
# affecting the superclass. For example:
#
# class BankController < ActionController::Base
# before_filter :audit
#
# private
# def audit
# # record the action and parameters in an audit log
# end
# end
#
# class VaultController < BankController
# before_filter :verify_credentials
#
# private
# def verify_credentials
# # make sure the user is allowed into the vault
# end
# end
#
# Now any actions performed on the BankController will have the audit method called before. On the VaultController,
# first the audit method is called, then the verify_credentials method. If the audit method renders or redirects, then
# verify_credentials and the intended action are never called.
#
# == Filter types
#
# A filter can take one of three forms: method reference (symbol), external class, or inline method (proc). The first
# is the most common and works by referencing a protected or private method somewhere in the inheritance hierarchy of
# the controller by use of a symbol. In the bank example above, both BankController and VaultController use this form.
#
# Using an external class makes for more easily reused generic filters, such as output compression. External filter classes
# are implemented by having a static +filter+ method on any class and then passing this class to the filter method. Example:
#
# class OutputCompressionFilter
# def self.filter(controller)
# controller.response.body = compress(controller.response.body)
# end
# end
#
# class NewspaperController < ActionController::Base
# after_filter OutputCompressionFilter
# end
#
# The filter method is passed the controller instance and is hence granted access to all aspects of the controller and can
# manipulate them as it sees fit.
#
# The inline method (using a proc) can be used to quickly do something small that doesn't require a lot of explanation.
# Or just as a quick test. It works like this:
#
# class WeblogController < ActionController::Base
# before_filter { |controller| head(400) if controller.params["stop_action"] }
# end
#
# As you can see, the block expects to be passed the controller after it has assigned the request to the internal variables.
# This means that the block has access to both the request and response objects complete with convenience methods for params,
# session, template, and assigns. Note: The inline method doesn't strictly have to be a block; any object that responds to call
# and returns 1 or -1 on arity will do (such as a Proc or an Method object).
#
# Please note that around_filters function a little differently than the normal before and after filters with regard to filter
# types. Please see the section dedicated to around_filters below.
#
# == Filter chain ordering
#
# Using <tt>before_filter</tt> and <tt>after_filter</tt> appends the specified filters to the existing chain. That's usually
# just fine, but some times you care more about the order in which the filters are executed. When that's the case, you
# can use <tt>prepend_before_filter</tt> and <tt>prepend_after_filter</tt>. Filters added by these methods will be put at the
# beginning of their respective chain and executed before the rest. For example:
#
# class ShoppingController < ActionController::Base
# before_filter :verify_open_shop
#
# class CheckoutController < ShoppingController
# prepend_before_filter :ensure_items_in_cart, :ensure_items_in_stock
#
# The filter chain for the CheckoutController is now <tt>:ensure_items_in_cart, :ensure_items_in_stock,</tt>
# <tt>:verify_open_shop</tt>. So if either of the ensure filters renders or redirects, we'll never get around to see if the shop
# is open or not.
#
# You may pass multiple filter arguments of each type as well as a filter block.
# If a block is given, it is treated as the last argument.
#
# == Around filters
#
# Around filters wrap an action, executing code both before and after.
# They may be declared as method references, blocks, or objects responding
# to +filter+ or to both +before+ and +after+.
#
# To use a method as an +around_filter+, pass a symbol naming the Ruby method.
# Yield (or <tt>block.call</tt>) within the method to run the action.
#
# around_filter :catch_exceptions
#
# private
# def catch_exceptions
# yield
# rescue => exception
# logger.debug "Caught exception! #{exception}"
# raise
# end
#
# To use a block as an +around_filter+, pass a block taking as args both
# the controller and the action block. You can't call yield directly from
# an +around_filter+ block; explicitly call the action block instead:
#
# around_filter do |controller, action|
# logger.debug "before #{controller.action_name}"
# action.call
# logger.debug "after #{controller.action_name}"
# end
#
# To use a filter object with +around_filter+, pass an object responding
# to <tt>:filter</tt> or both <tt>:before</tt> and <tt>:after</tt>. With a
# filter method, yield to the block as above:
#
# around_filter BenchmarkingFilter
#
# class BenchmarkingFilter
# def self.filter(controller, &block)
# Benchmark.measure(&block)
# end
# end
#
# With +before+ and +after+ methods:
#
# around_filter Authorizer.new
#
# class Authorizer
# # This will run before the action. Redirecting aborts the action.
# def before(controller)
# unless user.authorized?
# redirect_to(login_url)
# end
# end
#
# # This will run after the action if and only if before did not render or redirect.
# def after(controller)
# end
# end
#
# If the filter has +before+ and +after+ methods, the +before+ method will be
# called before the action. If +before+ renders or redirects, the filter chain is
# halted and +after+ will not be run. See Filter Chain Halting below for
# an example.
#
# == Filter chain skipping
#
# Declaring a filter on a base class conveniently applies to its subclasses,
# but sometimes a subclass should skip some of its superclass' filters:
#
# class ApplicationController < ActionController::Base
# before_filter :authenticate
# around_filter :catch_exceptions
# end
#
# class WeblogController < ApplicationController
# # Will run the :authenticate and :catch_exceptions filters.
# end
#
# class SignupController < ApplicationController
# # Skip :authenticate, run :catch_exceptions.
# skip_before_filter :authenticate
# end
#
# class ProjectsController < ApplicationController
# # Skip :catch_exceptions, run :authenticate.
# skip_filter :catch_exceptions
# end
#
# class ClientsController < ApplicationController
# # Skip :catch_exceptions and :authenticate unless action is index.
# skip_filter :catch_exceptions, :authenticate, :except => :index
# end
#
# == Filter conditions
#
# Filters may be limited to specific actions by declaring the actions to
# include or exclude. Both options accept single actions
# (<tt>:only => :index</tt>) or arrays of actions
# (<tt>:except => [:foo, :bar]</tt>).
#
# class Journal < ActionController::Base
# # Require authentication for edit and delete.
# before_filter :authorize, :only => [:edit, :delete]
#
# # Passing options to a filter with a block.
# around_filter(:except => :index) do |controller, action_block|
# results = Profiler.run(&action_block)
# controller.response.sub! "</body>", "#{results}</body>"
# end
#
# private
# def authorize
# # Redirect to login unless authenticated.
# end
# end
#
# == Filter Chain Halting
#
# <tt>before_filter</tt> and <tt>around_filter</tt> may halt the request
# before a controller action is run. This is useful, for example, to deny
# access to unauthenticated users or to redirect from HTTP to HTTPS.
# Simply call render or redirect. After filters will not be executed if the filter
# chain is halted.
#
# Around filters halt the request unless the action block is called.
# Given these filters
# after_filter :after
# around_filter :around
# before_filter :before
#
# The filter chain will look like:
#
# ...
# . \
# . #around (code before yield)
# . . \
# . . #before (actual filter code is run)
# . . . \
# . . . execute controller action
# . . . /
# . . ...
# . . /
# . #around (code after yield)
# . /
# #after (actual filter code is run, unless the around filter does not yield)
#
# If +around+ returns before yielding, +after+ will still not be run. The +before+
# filter and controller action will not be run. If +before+ renders or redirects,
# the second half of +around+ and will still run but +after+ and the
# action will not. If +around+ fails to yield, +after+ will not be run.
module ClassMethods
# The passed <tt>filters</tt> will be appended to the filter_chain and
# will execute before the action on this controller is performed.
def append_before_filter(*filters, &block)
filter_chain.append_filter_to_chain(filters, :before, &block)
end
# The passed <tt>filters</tt> will be prepended to the filter_chain and
# will execute before the action on this controller is performed.
def prepend_before_filter(*filters, &block)
filter_chain.prepend_filter_to_chain(filters, :before, &block)
end
# Shorthand for append_before_filter since it's the most common.
alias :before_filter :append_before_filter
# The passed <tt>filters</tt> will be appended to the array of filters
# that run _after_ actions on this controller are performed.
def append_after_filter(*filters, &block)
filter_chain.append_filter_to_chain(filters, :after, &block)
end
# The passed <tt>filters</tt> will be prepended to the array of filters
# that run _after_ actions on this controller are performed.
def prepend_after_filter(*filters, &block)
filter_chain.prepend_filter_to_chain(filters, :after, &block)
end
# Shorthand for append_after_filter since it's the most common.
alias :after_filter :append_after_filter
# If you <tt>append_around_filter A.new, B.new</tt>, the filter chain looks like
#
# B#before
# A#before
# # run the action
# A#after
# B#after
#
# With around filters which yield to the action block, +before+ and +after+
# are the code before and after the yield.
def append_around_filter(*filters, &block)
filter_chain.append_filter_to_chain(filters, :around, &block)
end
# If you <tt>prepend_around_filter A.new, B.new</tt>, the filter chain looks like:
#
# A#before
# B#before
# # run the action
# B#after
# A#after
#
# With around filters which yield to the action block, +before+ and +after+
# are the code before and after the yield.
def prepend_around_filter(*filters, &block)
filter_chain.prepend_filter_to_chain(filters, :around, &block)
end
# Shorthand for +append_around_filter+ since it's the most common.
alias :around_filter :append_around_filter
# Removes the specified filters from the +before+ filter chain. Note that this only works for skipping method-reference
# filters, not procs. This is especially useful for managing the chain in inheritance hierarchies where only one out
# of many sub-controllers need a different hierarchy.
#
# You can control the actions to skip the filter for with the <tt>:only</tt> and <tt>:except</tt> options,
# just like when you apply the filters.
def skip_before_filter(*filters)
filter_chain.skip_filter_in_chain(*filters, &:before?)
end
# Removes the specified filters from the +after+ filter chain. Note that this only works for skipping method-reference
# filters, not procs. This is especially useful for managing the chain in inheritance hierarchies where only one out
# of many sub-controllers need a different hierarchy.
#
# You can control the actions to skip the filter for with the <tt>:only</tt> and <tt>:except</tt> options,
# just like when you apply the filters.
def skip_after_filter(*filters)
filter_chain.skip_filter_in_chain(*filters, &:after?)
end
# Removes the specified filters from the filter chain. This only works for method reference (symbol)
# filters, not procs. This method is different from skip_after_filter and skip_before_filter in that
# it will match any before, after or yielding around filter.
#
# You can control the actions to skip the filter for with the <tt>:only</tt> and <tt>:except</tt> options,
# just like when you apply the filters.
def skip_filter(*filters)
filter_chain.skip_filter_in_chain(*filters)
end
# Returns an array of Filter objects for this controller.
def filter_chain
if chain = read_inheritable_attribute('filter_chain')
return chain
else
write_inheritable_attribute('filter_chain', FilterChain.new)
return filter_chain
end
end
# Returns all the before filters for this class and all its ancestors.
# This method returns the actual filter that was assigned in the controller to maintain existing functionality.
def before_filters #:nodoc:
filter_chain.select(&:before?).map(&:method)
end
# Returns all the after filters for this class and all its ancestors.
# This method returns the actual filter that was assigned in the controller to maintain existing functionality.
def after_filters #:nodoc:
filter_chain.select(&:after?).map(&:method)
end
end
module InstanceMethods # :nodoc:
def self.included(base)
base.class_eval do
alias_method_chain :perform_action, :filters
alias_method_chain :process, :filters
end
end
protected
def process_with_filters(request, response, method = :perform_action, *arguments) #:nodoc:
@before_filter_chain_aborted = false
process_without_filters(request, response, method, *arguments)
end
def perform_action_with_filters
call_filters(self.class.filter_chain, 0, 0)
end
private
def call_filters(chain, index, nesting)
index = run_before_filters(chain, index, nesting)
aborted = @before_filter_chain_aborted
perform_action_without_filters unless performed? || aborted
return index if nesting != 0 || aborted
run_after_filters(chain, index)
end
def run_before_filters(chain, index, nesting)
while chain[index]
filter, index = chain[index], index
break unless filter # end of call chain reached
case filter
when BeforeFilter
filter.call(self) # invoke before filter
index = index.next
break if @before_filter_chain_aborted
when AroundFilter
yielded = false
filter.call(self) do
yielded = true
# all remaining before and around filters will be run in this call
index = call_filters(chain, index.next, nesting.next)
end
halt_filter_chain(filter, :did_not_yield) unless yielded
break
else
break # no before or around filters left
end
end
index
end
def run_after_filters(chain, index)
seen_after_filter = false
while chain[index]
filter, index = chain[index], index
break unless filter # end of call chain reached
case filter
when AfterFilter
seen_after_filter = true
filter.call(self) # invoke after filter
else
# implementation error or someone has mucked with the filter chain
raise ActionControllerError, "filter #{filter.inspect} was in the wrong place!" if seen_after_filter
end
index = index.next
end
index.next
end
def halt_filter_chain(filter, reason)
@before_filter_chain_aborted = true
logger.info "Filter chain halted as [#{filter.inspect}] #{reason}." if logger
end
end
end
end
|
class ProjectFeature < Spinach::FeatureSteps
include SharedAuthentication
include SharedProject
include SharedPaths
step 'change project settings' do
fill_in 'project_name', with: 'NewName'
uncheck 'project_issues_enabled'
end
step 'I save project' do
click_button 'Save changes'
end
step 'I should see project with new settings' do
find_field('project_name').value.should == 'NewName'
end
step 'change project path settings' do
fill_in "project_path", with: "new-path"
click_button "Rename"
end
step 'I should see project with new path settings' do
project.path.should == "new-path"
end
step 'I should see project "Shop" README link' do
within '.project-side' do
page.should have_content "README.md"
end
end
step 'I should see project "Shop" version' do
within '.project-side' do
page.should have_content "Version: 2.2.0"
end
end
step 'change project default branch' do
select 'stable', from: 'project_default_branch'
end
step 'I should see project default branch changed' do
# TODO: Uncomment this when we can do real gitlab-shell calls
# from spinach tests. Right now gitlab-shell calls are stubbed so this test
# will not pass
# find(:css, 'select#project_default_branch').value.should == 'stable'
end
end
Uncomment change branch test
class ProjectFeature < Spinach::FeatureSteps
include SharedAuthentication
include SharedProject
include SharedPaths
step 'change project settings' do
fill_in 'project_name', with: 'NewName'
uncheck 'project_issues_enabled'
end
step 'I save project' do
click_button 'Save changes'
end
step 'I should see project with new settings' do
find_field('project_name').value.should == 'NewName'
end
step 'change project path settings' do
fill_in "project_path", with: "new-path"
click_button "Rename"
end
step 'I should see project with new path settings' do
project.path.should == "new-path"
end
step 'I should see project "Shop" README link' do
within '.project-side' do
page.should have_content "README.md"
end
end
step 'I should see project "Shop" version' do
within '.project-side' do
page.should have_content "Version: 6.7.0.pre"
end
end
step 'change project default branch' do
select 'fix', from: 'project_default_branch'
click_button 'Save changes'
end
step 'I should see project default branch changed' do
find(:css, 'select#project_default_branch').value.should == 'fix'
end
end
|
module ActiveRecord
class IrreversibleMigration < ActiveRecordError#:nodoc:
end
class DuplicateMigrationVersionError < ActiveRecordError#:nodoc:
def initialize(version)
super("Multiple migrations have the version number #{version}")
end
end
# Migrations can manage the evolution of a schema used by several physical databases. It's a solution
# to the common problem of adding a field to make a new feature work in your local database, but being unsure of how to
# push that change to other developers and to the production server. With migrations, you can describe the transformations
# in self-contained classes that can be checked into version control systems and executed against another database that
# might be one, two, or five versions behind.
#
# Example of a simple migration:
#
# class AddSsl < ActiveRecord::Migration
# def self.up
# add_column :accounts, :ssl_enabled, :boolean, :default => 1
# end
#
# def self.down
# remove_column :accounts, :ssl_enabled
# end
# end
#
# This migration will add a boolean flag to the accounts table and remove it again, if you're backing out of the migration.
# It shows how all migrations have two class methods +up+ and +down+ that describes the transformations required to implement
# or remove the migration. These methods can consist of both the migration specific methods, like add_column and remove_column,
# but may also contain regular Ruby code for generating data needed for the transformations.
#
# Example of a more complex migration that also needs to initialize data:
#
# class AddSystemSettings < ActiveRecord::Migration
# def self.up
# create_table :system_settings do |t|
# t.column :name, :string
# t.column :label, :string
# t.column :value, :text
# t.column :type, :string
# t.column :position, :integer
# end
#
# SystemSetting.create :name => "notice", :label => "Use notice?", :value => 1
# end
#
# def self.down
# drop_table :system_settings
# end
# end
#
# This migration first adds the system_settings table, then creates the very first row in it using the Active Record model
# that relies on the table. It also uses the more advanced create_table syntax where you can specify a complete table schema
# in one block call.
#
# == Available transformations
#
# * <tt>create_table(name, options)</tt> Creates a table called +name+ and makes the table object available to a block
# that can then add columns to it, following the same format as add_column. See example above. The options hash is for
# fragments like "DEFAULT CHARSET=UTF-8" that are appended to the create table definition.
# * <tt>drop_table(name)</tt>: Drops the table called +name+.
# * <tt>rename_table(old_name, new_name)</tt>: Renames the table called +old_name+ to +new_name+.
# * <tt>add_column(table_name, column_name, type, options)</tt>: Adds a new column to the table called +table_name+
# named +column_name+ specified to be one of the following types:
# :string, :text, :integer, :float, :decimal, :datetime, :timestamp, :time,
# :date, :binary, :boolean. A default value can be specified by passing an
# +options+ hash like { :default => 11 }. Other options include :limit and :null (e.g. { :limit => 50, :null => false })
# -- see ActiveRecord::ConnectionAdapters::TableDefinition#column for details.
# * <tt>rename_column(table_name, column_name, new_column_name)</tt>: Renames a column but keeps the type and content.
# * <tt>change_column(table_name, column_name, type, options)</tt>: Changes the column to a different type using the same
# parameters as add_column.
# * <tt>remove_column(table_name, column_name)</tt>: Removes the column named +column_name+ from the table called +table_name+.
# * <tt>add_index(table_name, column_names, index_type, index_name)</tt>: Add a new index with the name of the column, or +index_name+ (if specified) on the column(s). Specify an optional +index_type+ (e.g. UNIQUE).
# * <tt>remove_index(table_name, index_name)</tt>: Remove the index specified by +index_name+.
#
# == Irreversible transformations
#
# Some transformations are destructive in a manner that cannot be reversed. Migrations of that kind should raise
# an <tt>IrreversibleMigration</tt> exception in their +down+ method.
#
# == Running migrations from within Rails
#
# The Rails package has several tools to help create and apply migrations.
#
# To generate a new migration, use <tt>script/generate migration MyNewMigration</tt>
# where MyNewMigration is the name of your migration. The generator will
# create a file <tt>nnn_my_new_migration.rb</tt> in the <tt>db/migrate/</tt>
# directory, where <tt>nnn</tt> is the next largest migration number.
# You may then edit the <tt>self.up</tt> and <tt>self.down</tt> methods of
# n MyNewMigration.
#
# To run migrations against the currently configured database, use
# <tt>rake migrate</tt>. This will update the database by running all of the
# pending migrations, creating the <tt>schema_info</tt> table if missing.
#
# To roll the database back to a previous migration version, use
# <tt>rake migrate VERSION=X</tt> where <tt>X</tt> is the version to which
# you wish to downgrade. If any of the migrations throw an
# <tt>IrreversibleMigration</tt> exception, that step will fail and you'll
# have some manual work to do.
#
# == Database support
#
# Migrations are currently supported in MySQL, PostgreSQL, SQLite,
# SQL Server, Sybase, and Oracle (all supported databases except DB2).
#
# == More examples
#
# Not all migrations change the schema. Some just fix the data:
#
# class RemoveEmptyTags < ActiveRecord::Migration
# def self.up
# Tag.find(:all).each { |tag| tag.destroy if tag.pages.empty? }
# end
#
# def self.down
# # not much we can do to restore deleted data
# raise IrreversibleMigration
# end
# end
#
# Others remove columns when they migrate up instead of down:
#
# class RemoveUnnecessaryItemAttributes < ActiveRecord::Migration
# def self.up
# remove_column :items, :incomplete_items_count
# remove_column :items, :completed_items_count
# end
#
# def self.down
# add_column :items, :incomplete_items_count
# add_column :items, :completed_items_count
# end
# end
#
# And sometimes you need to do something in SQL not abstracted directly by migrations:
#
# class MakeJoinUnique < ActiveRecord::Migration
# def self.up
# execute "ALTER TABLE `pages_linked_pages` ADD UNIQUE `page_id_linked_page_id` (`page_id`,`linked_page_id`)"
# end
#
# def self.down
# execute "ALTER TABLE `pages_linked_pages` DROP INDEX `page_id_linked_page_id`"
# end
# end
#
# == Using a model after changing its table
#
# Sometimes you'll want to add a column in a migration and populate it immediately after. In that case, you'll need
# to make a call to Base#reset_column_information in order to ensure that the model has the latest column data from
# after the new column was added. Example:
#
# class AddPeopleSalary < ActiveRecord::Migration
# def self.up
# add_column :people, :salary, :integer
# Person.reset_column_information
# Person.find(:all).each do |p|
# p.update_attribute :salary, SalaryCalculator.compute(p)
# end
# end
# end
#
# == Controlling verbosity
#
# By default, migrations will describe the actions they are taking, writing
# them to the console as they happen, along with benchmarks describing how
# long each step took.
#
# You can quiet them down by setting ActiveRecord::Migration.verbose = false.
#
# You can also insert your own messages and benchmarks by using the #say_with_time
# method:
#
# def self.up
# ...
# say_with_time "Updating salaries..." do
# Person.find(:all).each do |p|
# p.update_attribute :salary, SalaryCalculator.compute(p)
# end
# end
# ...
# end
#
# The phrase "Updating salaries..." would then be printed, along with the
# benchmark for the block when the block completes.
class Migration
@@verbose = true
cattr_accessor :verbose
class << self
def up_with_benchmarks #:nodoc:
migrate(:up)
end
def down_with_benchmarks #:nodoc:
migrate(:down)
end
# Execute this migration in the named direction
def migrate(direction)
return unless respond_to?(direction)
case direction
when :up then announce "migrating"
when :down then announce "reverting"
end
result = nil
time = Benchmark.measure { result = send("#{direction}_without_benchmarks") }
case direction
when :up then announce "migrated (%.4fs)" % time.real; write
when :down then announce "reverted (%.4fs)" % time.real; write
end
result
end
# Because the method added may do an alias_method, it can be invoked
# recursively. We use @ignore_new_methods as a guard to indicate whether
# it is safe for the call to proceed.
def singleton_method_added(sym) #:nodoc:
return if @ignore_new_methods
begin
@ignore_new_methods = true
case sym
when :up, :down
klass = (class << self; self; end)
klass.send(:alias_method_chain, sym, "benchmarks")
end
ensure
@ignore_new_methods = false
end
end
def write(text="")
puts(text) if verbose
end
def announce(message)
text = "#{@version} #{name}: #{message}"
length = [0, 75 - text.length].max
write "== %s %s" % [text, "=" * length]
end
def say(message, subitem=false)
write "#{subitem ? " ->" : "--"} #{message}"
end
def say_with_time(message)
say(message)
result = nil
time = Benchmark.measure { result = yield }
say "%.4fs" % time.real, :subitem
result
end
def suppress_messages
save, self.verbose = verbose, false
yield
ensure
self.verbose = save
end
def method_missing(method, *arguments, &block)
arg_list = arguments.map(&:inspect) * ', '
say_with_time "#{method}(#{arg_list})" do
unless arguments.empty? || method == :execute
arguments[0] = Migrator.proper_table_name(arguments.first)
end
ActiveRecord::Base.connection.send(method, *arguments, &block)
end
end
end
end
class Migrator#:nodoc:
class << self
def migrate(migrations_path, target_version = nil)
Base.connection.initialize_schema_information
case
when target_version.nil?, current_version < target_version
up(migrations_path, target_version)
when current_version > target_version
down(migrations_path, target_version)
when current_version == target_version
return # You're on the right version
end
end
def up(migrations_path, target_version = nil)
self.new(:up, migrations_path, target_version).migrate
end
def down(migrations_path, target_version = nil)
self.new(:down, migrations_path, target_version).migrate
end
def schema_info_table_name
Base.table_name_prefix + "schema_info" + Base.table_name_suffix
end
def current_version
Base.connection.select_value("SELECT version FROM #{schema_info_table_name}").to_i
end
def proper_table_name(name)
# Use the ActiveRecord objects own table_name, or pre/suffix from ActiveRecord::Base if name is a symbol/string
name.table_name rescue "#{ActiveRecord::Base.table_name_prefix}#{name}#{ActiveRecord::Base.table_name_suffix}"
end
end
def initialize(direction, migrations_path, target_version = nil)
raise StandardError.new("This database does not yet support migrations") unless Base.connection.supports_migrations?
@direction, @migrations_path, @target_version = direction, migrations_path, target_version
Base.connection.initialize_schema_information
end
def current_version
self.class.current_version
end
def migrate
migration_classes.each do |migration_class|
Base.logger.info("Reached target version: #{@target_version}") and break if reached_target_version?(migration_class.version)
next if irrelevant_migration?(migration_class.version)
Base.logger.info "Migrating to #{migration_class} (#{migration_class.version})"
migration_class.migrate(@direction)
set_schema_version(migration_class.version)
end
end
private
def migration_classes
migrations = migration_files.inject([]) do |migrations, migration_file|
load(migration_file)
version, name = migration_version_and_name(migration_file)
assert_unique_migration_version(migrations, version.to_i)
migrations << migration_class(name, version.to_i)
end
sorted = migrations.sort_by { |m| m.version }
down? ? sorted.reverse : sorted
end
def assert_unique_migration_version(migrations, version)
if !migrations.empty? && migrations.find { |m| m.version == version }
raise DuplicateMigrationVersionError.new(version)
end
end
def migration_files
files = Dir["#{@migrations_path}/[0-9]*_*.rb"].sort_by do |f|
migration_version_and_name(f).first.to_i
end
down? ? files.reverse : files
end
def migration_class(migration_name, version)
klass = migration_name.camelize.constantize
class << klass; attr_accessor :version end
klass.version = version
klass
end
def migration_version_and_name(migration_file)
return *migration_file.scan(/([0-9]+)_([_a-z0-9]*).rb/).first
end
def set_schema_version(version)
Base.connection.update("UPDATE #{self.class.schema_info_table_name} SET version = #{down? ? version.to_i - 1 : version.to_i}")
end
def up?
@direction == :up
end
def down?
@direction == :down
end
def reached_target_version?(version)
return false if @target_version == nil
(up? && version.to_i - 1 >= @target_version) || (down? && version.to_i <= @target_version)
end
def irrelevant_migration?(version)
(up? && version.to_i <= current_version) || (down? && version.to_i > current_version)
end
end
end
Migrations: report the number of rows affected when calling insert, update, delete, etc.
git-svn-id: afc9fed30c1a09d8801d1e4fbe6e01c29c67d11f@6945 5ecf4fe2-1ee6-0310-87b1-e25e094e27de
module ActiveRecord
class IrreversibleMigration < ActiveRecordError#:nodoc:
end
class DuplicateMigrationVersionError < ActiveRecordError#:nodoc:
def initialize(version)
super("Multiple migrations have the version number #{version}")
end
end
# Migrations can manage the evolution of a schema used by several physical databases. It's a solution
# to the common problem of adding a field to make a new feature work in your local database, but being unsure of how to
# push that change to other developers and to the production server. With migrations, you can describe the transformations
# in self-contained classes that can be checked into version control systems and executed against another database that
# might be one, two, or five versions behind.
#
# Example of a simple migration:
#
# class AddSsl < ActiveRecord::Migration
# def self.up
# add_column :accounts, :ssl_enabled, :boolean, :default => 1
# end
#
# def self.down
# remove_column :accounts, :ssl_enabled
# end
# end
#
# This migration will add a boolean flag to the accounts table and remove it again, if you're backing out of the migration.
# It shows how all migrations have two class methods +up+ and +down+ that describes the transformations required to implement
# or remove the migration. These methods can consist of both the migration specific methods, like add_column and remove_column,
# but may also contain regular Ruby code for generating data needed for the transformations.
#
# Example of a more complex migration that also needs to initialize data:
#
# class AddSystemSettings < ActiveRecord::Migration
# def self.up
# create_table :system_settings do |t|
# t.column :name, :string
# t.column :label, :string
# t.column :value, :text
# t.column :type, :string
# t.column :position, :integer
# end
#
# SystemSetting.create :name => "notice", :label => "Use notice?", :value => 1
# end
#
# def self.down
# drop_table :system_settings
# end
# end
#
# This migration first adds the system_settings table, then creates the very first row in it using the Active Record model
# that relies on the table. It also uses the more advanced create_table syntax where you can specify a complete table schema
# in one block call.
#
# == Available transformations
#
# * <tt>create_table(name, options)</tt> Creates a table called +name+ and makes the table object available to a block
# that can then add columns to it, following the same format as add_column. See example above. The options hash is for
# fragments like "DEFAULT CHARSET=UTF-8" that are appended to the create table definition.
# * <tt>drop_table(name)</tt>: Drops the table called +name+.
# * <tt>rename_table(old_name, new_name)</tt>: Renames the table called +old_name+ to +new_name+.
# * <tt>add_column(table_name, column_name, type, options)</tt>: Adds a new column to the table called +table_name+
# named +column_name+ specified to be one of the following types:
# :string, :text, :integer, :float, :decimal, :datetime, :timestamp, :time,
# :date, :binary, :boolean. A default value can be specified by passing an
# +options+ hash like { :default => 11 }. Other options include :limit and :null (e.g. { :limit => 50, :null => false })
# -- see ActiveRecord::ConnectionAdapters::TableDefinition#column for details.
# * <tt>rename_column(table_name, column_name, new_column_name)</tt>: Renames a column but keeps the type and content.
# * <tt>change_column(table_name, column_name, type, options)</tt>: Changes the column to a different type using the same
# parameters as add_column.
# * <tt>remove_column(table_name, column_name)</tt>: Removes the column named +column_name+ from the table called +table_name+.
# * <tt>add_index(table_name, column_names, index_type, index_name)</tt>: Add a new index with the name of the column, or +index_name+ (if specified) on the column(s). Specify an optional +index_type+ (e.g. UNIQUE).
# * <tt>remove_index(table_name, index_name)</tt>: Remove the index specified by +index_name+.
#
# == Irreversible transformations
#
# Some transformations are destructive in a manner that cannot be reversed. Migrations of that kind should raise
# an <tt>IrreversibleMigration</tt> exception in their +down+ method.
#
# == Running migrations from within Rails
#
# The Rails package has several tools to help create and apply migrations.
#
# To generate a new migration, use <tt>script/generate migration MyNewMigration</tt>
# where MyNewMigration is the name of your migration. The generator will
# create a file <tt>nnn_my_new_migration.rb</tt> in the <tt>db/migrate/</tt>
# directory, where <tt>nnn</tt> is the next largest migration number.
# You may then edit the <tt>self.up</tt> and <tt>self.down</tt> methods of
# n MyNewMigration.
#
# To run migrations against the currently configured database, use
# <tt>rake migrate</tt>. This will update the database by running all of the
# pending migrations, creating the <tt>schema_info</tt> table if missing.
#
# To roll the database back to a previous migration version, use
# <tt>rake migrate VERSION=X</tt> where <tt>X</tt> is the version to which
# you wish to downgrade. If any of the migrations throw an
# <tt>IrreversibleMigration</tt> exception, that step will fail and you'll
# have some manual work to do.
#
# == Database support
#
# Migrations are currently supported in MySQL, PostgreSQL, SQLite,
# SQL Server, Sybase, and Oracle (all supported databases except DB2).
#
# == More examples
#
# Not all migrations change the schema. Some just fix the data:
#
# class RemoveEmptyTags < ActiveRecord::Migration
# def self.up
# Tag.find(:all).each { |tag| tag.destroy if tag.pages.empty? }
# end
#
# def self.down
# # not much we can do to restore deleted data
# raise IrreversibleMigration
# end
# end
#
# Others remove columns when they migrate up instead of down:
#
# class RemoveUnnecessaryItemAttributes < ActiveRecord::Migration
# def self.up
# remove_column :items, :incomplete_items_count
# remove_column :items, :completed_items_count
# end
#
# def self.down
# add_column :items, :incomplete_items_count
# add_column :items, :completed_items_count
# end
# end
#
# And sometimes you need to do something in SQL not abstracted directly by migrations:
#
# class MakeJoinUnique < ActiveRecord::Migration
# def self.up
# execute "ALTER TABLE `pages_linked_pages` ADD UNIQUE `page_id_linked_page_id` (`page_id`,`linked_page_id`)"
# end
#
# def self.down
# execute "ALTER TABLE `pages_linked_pages` DROP INDEX `page_id_linked_page_id`"
# end
# end
#
# == Using a model after changing its table
#
# Sometimes you'll want to add a column in a migration and populate it immediately after. In that case, you'll need
# to make a call to Base#reset_column_information in order to ensure that the model has the latest column data from
# after the new column was added. Example:
#
# class AddPeopleSalary < ActiveRecord::Migration
# def self.up
# add_column :people, :salary, :integer
# Person.reset_column_information
# Person.find(:all).each do |p|
# p.update_attribute :salary, SalaryCalculator.compute(p)
# end
# end
# end
#
# == Controlling verbosity
#
# By default, migrations will describe the actions they are taking, writing
# them to the console as they happen, along with benchmarks describing how
# long each step took.
#
# You can quiet them down by setting ActiveRecord::Migration.verbose = false.
#
# You can also insert your own messages and benchmarks by using the #say_with_time
# method:
#
# def self.up
# ...
# say_with_time "Updating salaries..." do
# Person.find(:all).each do |p|
# p.update_attribute :salary, SalaryCalculator.compute(p)
# end
# end
# ...
# end
#
# The phrase "Updating salaries..." would then be printed, along with the
# benchmark for the block when the block completes.
class Migration
@@verbose = true
cattr_accessor :verbose
class << self
def up_with_benchmarks #:nodoc:
migrate(:up)
end
def down_with_benchmarks #:nodoc:
migrate(:down)
end
# Execute this migration in the named direction
def migrate(direction)
return unless respond_to?(direction)
case direction
when :up then announce "migrating"
when :down then announce "reverting"
end
result = nil
time = Benchmark.measure { result = send("#{direction}_without_benchmarks") }
case direction
when :up then announce "migrated (%.4fs)" % time.real; write
when :down then announce "reverted (%.4fs)" % time.real; write
end
result
end
# Because the method added may do an alias_method, it can be invoked
# recursively. We use @ignore_new_methods as a guard to indicate whether
# it is safe for the call to proceed.
def singleton_method_added(sym) #:nodoc:
return if @ignore_new_methods
begin
@ignore_new_methods = true
case sym
when :up, :down
klass = (class << self; self; end)
klass.send(:alias_method_chain, sym, "benchmarks")
end
ensure
@ignore_new_methods = false
end
end
def write(text="")
puts(text) if verbose
end
def announce(message)
text = "#{@version} #{name}: #{message}"
length = [0, 75 - text.length].max
write "== %s %s" % [text, "=" * length]
end
def say(message, subitem=false)
write "#{subitem ? " ->" : "--"} #{message}"
end
def say_with_time(message)
say(message)
result = nil
time = Benchmark.measure { result = yield }
say "%.4fs" % time.real, :subitem
say("#{result} rows", :subitem) if result.is_a?(Integer)
result
end
def suppress_messages
save, self.verbose = verbose, false
yield
ensure
self.verbose = save
end
def method_missing(method, *arguments, &block)
arg_list = arguments.map(&:inspect) * ', '
say_with_time "#{method}(#{arg_list})" do
unless arguments.empty? || method == :execute
arguments[0] = Migrator.proper_table_name(arguments.first)
end
ActiveRecord::Base.connection.send(method, *arguments, &block)
end
end
end
end
class Migrator#:nodoc:
class << self
def migrate(migrations_path, target_version = nil)
Base.connection.initialize_schema_information
case
when target_version.nil?, current_version < target_version
up(migrations_path, target_version)
when current_version > target_version
down(migrations_path, target_version)
when current_version == target_version
return # You're on the right version
end
end
def up(migrations_path, target_version = nil)
self.new(:up, migrations_path, target_version).migrate
end
def down(migrations_path, target_version = nil)
self.new(:down, migrations_path, target_version).migrate
end
def schema_info_table_name
Base.table_name_prefix + "schema_info" + Base.table_name_suffix
end
def current_version
Base.connection.select_value("SELECT version FROM #{schema_info_table_name}").to_i
end
def proper_table_name(name)
# Use the ActiveRecord objects own table_name, or pre/suffix from ActiveRecord::Base if name is a symbol/string
name.table_name rescue "#{ActiveRecord::Base.table_name_prefix}#{name}#{ActiveRecord::Base.table_name_suffix}"
end
end
def initialize(direction, migrations_path, target_version = nil)
raise StandardError.new("This database does not yet support migrations") unless Base.connection.supports_migrations?
@direction, @migrations_path, @target_version = direction, migrations_path, target_version
Base.connection.initialize_schema_information
end
def current_version
self.class.current_version
end
def migrate
migration_classes.each do |migration_class|
Base.logger.info("Reached target version: #{@target_version}") and break if reached_target_version?(migration_class.version)
next if irrelevant_migration?(migration_class.version)
Base.logger.info "Migrating to #{migration_class} (#{migration_class.version})"
migration_class.migrate(@direction)
set_schema_version(migration_class.version)
end
end
private
def migration_classes
migrations = migration_files.inject([]) do |migrations, migration_file|
load(migration_file)
version, name = migration_version_and_name(migration_file)
assert_unique_migration_version(migrations, version.to_i)
migrations << migration_class(name, version.to_i)
end
sorted = migrations.sort_by { |m| m.version }
down? ? sorted.reverse : sorted
end
def assert_unique_migration_version(migrations, version)
if !migrations.empty? && migrations.find { |m| m.version == version }
raise DuplicateMigrationVersionError.new(version)
end
end
def migration_files
files = Dir["#{@migrations_path}/[0-9]*_*.rb"].sort_by do |f|
migration_version_and_name(f).first.to_i
end
down? ? files.reverse : files
end
def migration_class(migration_name, version)
klass = migration_name.camelize.constantize
class << klass; attr_accessor :version end
klass.version = version
klass
end
def migration_version_and_name(migration_file)
return *migration_file.scan(/([0-9]+)_([_a-z0-9]*).rb/).first
end
def set_schema_version(version)
Base.connection.update("UPDATE #{self.class.schema_info_table_name} SET version = #{down? ? version.to_i - 1 : version.to_i}")
end
def up?
@direction == :up
end
def down?
@direction == :down
end
def reached_target_version?(version)
return false if @target_version == nil
(up? && version.to_i - 1 >= @target_version) || (down? && version.to_i <= @target_version)
end
def irrelevant_migration?(version)
(up? && version.to_i <= current_version) || (down? && version.to_i > current_version)
end
end
end
|
require "active_support/core_ext/module/delegation"
require "active_support/core_ext/class/attribute_accessors"
require 'active_support/deprecation'
require 'active_record/schema_migration'
module ActiveRecord
# Exception that can be raised to stop migrations from going backwards.
class IrreversibleMigration < ActiveRecordError
end
class DuplicateMigrationVersionError < ActiveRecordError#:nodoc:
def initialize(version)
super("Multiple migrations have the version number #{version}")
end
end
class DuplicateMigrationNameError < ActiveRecordError#:nodoc:
def initialize(name)
super("Multiple migrations have the name #{name}")
end
end
class UnknownMigrationVersionError < ActiveRecordError #:nodoc:
def initialize(version)
super("No migration with version number #{version}")
end
end
class IllegalMigrationNameError < ActiveRecordError#:nodoc:
def initialize(name)
super("Illegal name for migration file: #{name}\n\t(only lower case letters, numbers, and '_' allowed)")
end
end
# = Active Record Migrations
#
# Migrations can manage the evolution of a schema used by several physical
# databases. It's a solution to the common problem of adding a field to make
# a new feature work in your local database, but being unsure of how to
# push that change to other developers and to the production server. With
# migrations, you can describe the transformations in self-contained classes
# that can be checked into version control systems and executed against
# another database that might be one, two, or five versions behind.
#
# Example of a simple migration:
#
# class AddSsl < ActiveRecord::Migration
# def up
# add_column :accounts, :ssl_enabled, :boolean, :default => 1
# end
#
# def down
# remove_column :accounts, :ssl_enabled
# end
# end
#
# This migration will add a boolean flag to the accounts table and remove it
# if you're backing out of the migration. It shows how all migrations have
# two methods +up+ and +down+ that describes the transformations
# required to implement or remove the migration. These methods can consist
# of both the migration specific methods like add_column and remove_column,
# but may also contain regular Ruby code for generating data needed for the
# transformations.
#
# Example of a more complex migration that also needs to initialize data:
#
# class AddSystemSettings < ActiveRecord::Migration
# def up
# create_table :system_settings do |t|
# t.string :name
# t.string :label
# t.text :value
# t.string :type
# t.integer :position
# end
#
# SystemSetting.create :name => "notice",
# :label => "Use notice?",
# :value => 1
# end
#
# def down
# drop_table :system_settings
# end
# end
#
# This migration first adds the system_settings table, then creates the very
# first row in it using the Active Record model that relies on the table. It
# also uses the more advanced create_table syntax where you can specify a
# complete table schema in one block call.
#
# == Available transformations
#
# * <tt>create_table(name, options)</tt> Creates a table called +name+ and
# makes the table object available to a block that can then add columns to it,
# following the same format as add_column. See example above. The options hash
# is for fragments like "DEFAULT CHARSET=UTF-8" that are appended to the create
# table definition.
# * <tt>drop_table(name)</tt>: Drops the table called +name+.
# * <tt>rename_table(old_name, new_name)</tt>: Renames the table called +old_name+
# to +new_name+.
# * <tt>add_column(table_name, column_name, type, options)</tt>: Adds a new column
# to the table called +table_name+
# named +column_name+ specified to be one of the following types:
# <tt>:string</tt>, <tt>:text</tt>, <tt>:integer</tt>, <tt>:float</tt>,
# <tt>:decimal</tt>, <tt>:datetime</tt>, <tt>:timestamp</tt>, <tt>:time</tt>,
# <tt>:date</tt>, <tt>:binary</tt>, <tt>:boolean</tt>. A default value can be
# specified by passing an +options+ hash like <tt>{ :default => 11 }</tt>.
# Other options include <tt>:limit</tt> and <tt>:null</tt> (e.g.
# <tt>{ :limit => 50, :null => false }</tt>) -- see
# ActiveRecord::ConnectionAdapters::TableDefinition#column for details.
# * <tt>rename_column(table_name, column_name, new_column_name)</tt>: Renames
# a column but keeps the type and content.
# * <tt>change_column(table_name, column_name, type, options)</tt>: Changes
# the column to a different type using the same parameters as add_column.
# * <tt>remove_column(table_name, column_names)</tt>: Removes the column listed in
# +column_names+ from the table called +table_name+.
# * <tt>add_index(table_name, column_names, options)</tt>: Adds a new index
# with the name of the column. Other options include
# <tt>:name</tt>, <tt>:unique</tt> (e.g.
# <tt>{ :name => "users_name_index", :unique => true }</tt>) and <tt>:order</tt>
# (e.g. { :order => {:name => :desc} }</tt>).
# * <tt>remove_index(table_name, :column => column_name)</tt>: Removes the index
# specified by +column_name+.
# * <tt>remove_index(table_name, :name => index_name)</tt>: Removes the index
# specified by +index_name+.
#
# == Irreversible transformations
#
# Some transformations are destructive in a manner that cannot be reversed.
# Migrations of that kind should raise an <tt>ActiveRecord::IrreversibleMigration</tt>
# exception in their +down+ method.
#
# == Running migrations from within Rails
#
# The Rails package has several tools to help create and apply migrations.
#
# To generate a new migration, you can use
# rails generate migration MyNewMigration
#
# where MyNewMigration is the name of your migration. The generator will
# create an empty migration file <tt>timestamp_my_new_migration.rb</tt>
# in the <tt>db/migrate/</tt> directory where <tt>timestamp</tt> is the
# UTC formatted date and time that the migration was generated.
#
# You may then edit the <tt>up</tt> and <tt>down</tt> methods of
# MyNewMigration.
#
# There is a special syntactic shortcut to generate migrations that add fields to a table.
#
# rails generate migration add_fieldname_to_tablename fieldname:string
#
# This will generate the file <tt>timestamp_add_fieldname_to_tablename</tt>, which will look like this:
# class AddFieldnameToTablename < ActiveRecord::Migration
# def up
# add_column :tablenames, :fieldname, :string
# end
#
# def down
# remove_column :tablenames, :fieldname
# end
# end
#
# To run migrations against the currently configured database, use
# <tt>rake db:migrate</tt>. This will update the database by running all of the
# pending migrations, creating the <tt>schema_migrations</tt> table
# (see "About the schema_migrations table" section below) if missing. It will also
# invoke the db:schema:dump task, which will update your db/schema.rb file
# to match the structure of your database.
#
# To roll the database back to a previous migration version, use
# <tt>rake db:migrate VERSION=X</tt> where <tt>X</tt> is the version to which
# you wish to downgrade. If any of the migrations throw an
# <tt>ActiveRecord::IrreversibleMigration</tt> exception, that step will fail and you'll
# have some manual work to do.
#
# == Database support
#
# Migrations are currently supported in MySQL, PostgreSQL, SQLite,
# SQL Server, Sybase, and Oracle (all supported databases except DB2).
#
# == More examples
#
# Not all migrations change the schema. Some just fix the data:
#
# class RemoveEmptyTags < ActiveRecord::Migration
# def up
# Tag.all.each { |tag| tag.destroy if tag.pages.empty? }
# end
#
# def down
# # not much we can do to restore deleted data
# raise ActiveRecord::IrreversibleMigration, "Can't recover the deleted tags"
# end
# end
#
# Others remove columns when they migrate up instead of down:
#
# class RemoveUnnecessaryItemAttributes < ActiveRecord::Migration
# def up
# remove_column :items, :incomplete_items_count
# remove_column :items, :completed_items_count
# end
#
# def down
# add_column :items, :incomplete_items_count
# add_column :items, :completed_items_count
# end
# end
#
# And sometimes you need to do something in SQL not abstracted directly by migrations:
#
# class MakeJoinUnique < ActiveRecord::Migration
# def up
# execute "ALTER TABLE `pages_linked_pages` ADD UNIQUE `page_id_linked_page_id` (`page_id`,`linked_page_id`)"
# end
#
# def down
# execute "ALTER TABLE `pages_linked_pages` DROP INDEX `page_id_linked_page_id`"
# end
# end
#
# == Using a model after changing its table
#
# Sometimes you'll want to add a column in a migration and populate it
# immediately after. In that case, you'll need to make a call to
# <tt>Base#reset_column_information</tt> in order to ensure that the model has the
# latest column data from after the new column was added. Example:
#
# class AddPeopleSalary < ActiveRecord::Migration
# def up
# add_column :people, :salary, :integer
# Person.reset_column_information
# Person.all.each do |p|
# p.update_attribute :salary, SalaryCalculator.compute(p)
# end
# end
# end
#
# == Controlling verbosity
#
# By default, migrations will describe the actions they are taking, writing
# them to the console as they happen, along with benchmarks describing how
# long each step took.
#
# You can quiet them down by setting ActiveRecord::Migration.verbose = false.
#
# You can also insert your own messages and benchmarks by using the +say_with_time+
# method:
#
# def up
# ...
# say_with_time "Updating salaries..." do
# Person.all.each do |p|
# p.update_attribute :salary, SalaryCalculator.compute(p)
# end
# end
# ...
# end
#
# The phrase "Updating salaries..." would then be printed, along with the
# benchmark for the block when the block completes.
#
# == About the schema_migrations table
#
# Rails versions 2.0 and prior used to create a table called
# <tt>schema_info</tt> when using migrations. This table contained the
# version of the schema as of the last applied migration.
#
# Starting with Rails 2.1, the <tt>schema_info</tt> table is
# (automatically) replaced by the <tt>schema_migrations</tt> table, which
# contains the version numbers of all the migrations applied.
#
# As a result, it is now possible to add migration files that are numbered
# lower than the current schema version: when migrating up, those
# never-applied "interleaved" migrations will be automatically applied, and
# when migrating down, never-applied "interleaved" migrations will be skipped.
#
# == Timestamped Migrations
#
# By default, Rails generates migrations that look like:
#
# 20080717013526_your_migration_name.rb
#
# The prefix is a generation timestamp (in UTC).
#
# If you'd prefer to use numeric prefixes, you can turn timestamped migrations
# off by setting:
#
# config.active_record.timestamped_migrations = false
#
# In application.rb.
#
# == Reversible Migrations
#
# Starting with Rails 3.1, you will be able to define reversible migrations.
# Reversible migrations are migrations that know how to go +down+ for you.
# You simply supply the +up+ logic, and the Migration system will figure out
# how to execute the down commands for you.
#
# To define a reversible migration, define the +change+ method in your
# migration like this:
#
# class TenderloveMigration < ActiveRecord::Migration
# def change
# create_table(:horses) do |t|
# t.column :content, :text
# t.column :remind_at, :datetime
# end
# end
# end
#
# This migration will create the horses table for you on the way up, and
# automatically figure out how to drop the table on the way down.
#
# Some commands like +remove_column+ cannot be reversed. If you care to
# define how to move up and down in these cases, you should define the +up+
# and +down+ methods as before.
#
# If a command cannot be reversed, an
# <tt>ActiveRecord::IrreversibleMigration</tt> exception will be raised when
# the migration is moving down.
#
# For a list of commands that are reversible, please see
# <tt>ActiveRecord::Migration::CommandRecorder</tt>.
class Migration
autoload :CommandRecorder, 'active_record/migration/command_recorder'
class << self
attr_accessor :delegate # :nodoc:
end
def self.method_missing(name, *args, &block) # :nodoc:
(delegate || superclass.delegate).send(name, *args, &block)
end
def self.migrate(direction)
new.migrate direction
end
cattr_accessor :verbose
attr_accessor :name, :version
def initialize(name = self.class.name, version = nil)
@name = name
@version = version
@connection = nil
@reverting = false
end
# instantiate the delegate object after initialize is defined
self.verbose = true
self.delegate = new
def revert
@reverting = true
yield
ensure
@reverting = false
end
def reverting?
@reverting
end
def up
self.class.delegate = self
return unless self.class.respond_to?(:up)
self.class.up
end
def down
self.class.delegate = self
return unless self.class.respond_to?(:down)
self.class.down
end
# Execute this migration in the named direction
def migrate(direction)
return unless respond_to?(direction)
case direction
when :up then announce "migrating"
when :down then announce "reverting"
end
time = nil
ActiveRecord::Base.connection_pool.with_connection do |conn|
@connection = conn
if respond_to?(:change)
if direction == :down
recorder = CommandRecorder.new(@connection)
suppress_messages do
@connection = recorder
change
end
@connection = conn
time = Benchmark.measure {
self.revert {
recorder.inverse.each do |cmd, args|
send(cmd, *args)
end
}
}
else
time = Benchmark.measure { change }
end
else
time = Benchmark.measure { send(direction) }
end
@connection = nil
end
case direction
when :up then announce "migrated (%.4fs)" % time.real; write
when :down then announce "reverted (%.4fs)" % time.real; write
end
end
def write(text="")
puts(text) if verbose
end
def announce(message)
text = "#{version} #{name}: #{message}"
length = [0, 75 - text.length].max
write "== %s %s" % [text, "=" * length]
end
def say(message, subitem=false)
write "#{subitem ? " ->" : "--"} #{message}"
end
def say_with_time(message)
say(message)
result = nil
time = Benchmark.measure { result = yield }
say "%.4fs" % time.real, :subitem
say("#{result} rows", :subitem) if result.is_a?(Integer)
result
end
def suppress_messages
save, self.verbose = verbose, false
yield
ensure
self.verbose = save
end
def connection
@connection || ActiveRecord::Base.connection
end
def method_missing(method, *arguments, &block)
arg_list = arguments.map{ |a| a.inspect } * ', '
say_with_time "#{method}(#{arg_list})" do
unless reverting?
unless arguments.empty? || method == :execute
arguments[0] = Migrator.proper_table_name(arguments.first)
arguments[1] = Migrator.proper_table_name(arguments.second) if method == :rename_table
end
end
return super unless connection.respond_to?(method)
connection.send(method, *arguments, &block)
end
end
def copy(destination, sources, options = {})
copied = []
FileUtils.mkdir_p(destination) unless File.exists?(destination)
destination_migrations = ActiveRecord::Migrator.migrations(destination)
last = destination_migrations.last
sources.each do |scope, path|
source_migrations = ActiveRecord::Migrator.migrations(path)
source_migrations.each do |migration|
source = File.read(migration.filename)
source = "# This migration comes from #{scope} (originally #{migration.version})\n#{source}"
if duplicate = destination_migrations.detect { |m| m.name == migration.name }
if options[:on_skip] && duplicate.scope != scope.to_s
options[:on_skip].call(scope, migration)
end
next
end
migration.version = next_migration_number(last ? last.version + 1 : 0).to_i
new_path = File.join(destination, "#{migration.version}_#{migration.name.underscore}.#{scope}.rb")
old_path, migration.filename = migration.filename, new_path
last = migration
File.open(migration.filename, "w") { |f| f.write source }
copied << migration
options[:on_copy].call(scope, migration, old_path) if options[:on_copy]
destination_migrations << migration
end
end
copied
end
def next_migration_number(number)
if ActiveRecord::Base.timestamped_migrations
[Time.now.utc.strftime("%Y%m%d%H%M%S"), "%.14d" % number].max
else
"%.3d" % number
end
end
end
# MigrationProxy is used to defer loading of the actual migration classes
# until they are needed
class MigrationProxy < Struct.new(:name, :version, :filename, :scope)
def initialize(name, version, filename, scope)
super
@migration = nil
end
def basename
File.basename(filename)
end
delegate :migrate, :announce, :write, :to => :migration
private
def migration
@migration ||= load_migration
end
def load_migration
require(File.expand_path(filename))
name.constantize.new
end
end
class Migrator#:nodoc:
class << self
attr_writer :migrations_paths
alias :migrations_path= :migrations_paths=
def migrate(migrations_paths, target_version = nil, &block)
case
when target_version.nil?
up(migrations_paths, target_version, &block)
when current_version == 0 && target_version == 0
[]
when current_version > target_version
down(migrations_paths, target_version, &block)
else
up(migrations_paths, target_version, &block)
end
end
def rollback(migrations_paths, steps=1)
move(:down, migrations_paths, steps)
end
def forward(migrations_paths, steps=1)
move(:up, migrations_paths, steps)
end
def up(migrations_paths, target_version = nil, &block)
self.new(:up, migrations(migrations_paths), target_version).migrate(&block)
end
def down(migrations_paths, target_version = nil, &block)
self.new(:down, migrations(migrations_paths), target_version).migrate(&block)
end
def run(direction, migrations_paths, target_version)
self.new(direction, migrations(migrations_paths), target_version).run
end
def open(migrations_paths)
self.new(:up, migrations(migrations_paths), nil)
end
def schema_migrations_table_name
SchemaMigration.table_name
end
def get_all_versions
SchemaMigration.all.map { |x| x.version.to_i }.sort
end
def current_version
sm_table = schema_migrations_table_name
if Base.connection.table_exists?(sm_table)
get_all_versions.max || 0
else
0
end
end
def proper_table_name(name)
# Use the Active Record objects own table_name, or pre/suffix from ActiveRecord::Base if name is a symbol/string
name.table_name rescue "#{ActiveRecord::Base.table_name_prefix}#{name}#{ActiveRecord::Base.table_name_suffix}"
end
def migrations_paths
@migrations_paths ||= ['db/migrate']
# just to not break things if someone uses: migration_path = some_string
Array(@migrations_paths)
end
def migrations_path
migrations_paths.first
end
def migrations(paths)
paths = Array(paths)
files = Dir[*paths.map { |p| "#{p}/**/[0-9]*_*.rb" }]
migrations = files.map do |file|
version, name, scope = file.scan(/([0-9]+)_([_a-z0-9]*)\.?([_a-z0-9]*)?.rb/).first
raise IllegalMigrationNameError.new(file) unless version
version = version.to_i
name = name.camelize
MigrationProxy.new(name, version, file, scope)
end
migrations.sort_by(&:version)
end
private
def move(direction, migrations_paths, steps)
migrator = self.new(direction, migrations(migrations_paths))
start_index = migrator.migrations.index(migrator.current_migration)
if start_index
finish = migrator.migrations[start_index + steps]
version = finish ? finish.version : 0
send(direction, migrations_paths, version)
end
end
end
def initialize(direction, migrations, target_version = nil)
raise StandardError.new("This database does not yet support migrations") unless Base.connection.supports_migrations?
@direction = direction
@target_version = target_version
if Array(migrations).grep(String).empty?
@migrations = migrations
else
ActiveSupport::Deprecation.warn "instantiate this class with a list of migrations"
@migrations = self.class.migrations(migrations)
end
validate(@migrations)
Base.connection.initialize_schema_migrations_table
end
def current_version
migrated.last || 0
end
def current_migration
migrations.detect { |m| m.version == current_version }
end
def run
target = migrations.detect { |m| m.version == @target_version }
raise UnknownMigrationVersionError.new(@target_version) if target.nil?
unless (up? && migrated.include?(target.version.to_i)) || (down? && !migrated.include?(target.version.to_i))
target.migrate(@direction)
record_version_state_after_migrating(target.version)
end
end
def migrate(&block)
current = migrations.detect { |m| m.version == current_version }
target = migrations.detect { |m| m.version == @target_version }
if target.nil? && @target_version && @target_version > 0
raise UnknownMigrationVersionError.new(@target_version)
end
start = up? ? 0 : (migrations.index(current) || 0)
finish = migrations.index(target) || migrations.size - 1
runnable = migrations[start..finish]
# skip the last migration if we're headed down, but not ALL the way down
runnable.pop if down? && target
ran = []
runnable.each do |migration|
if block && !block.call(migration)
next
end
Base.logger.info "Migrating to #{migration.name} (#{migration.version})" if Base.logger
seen = migrated.include?(migration.version.to_i)
# On our way up, we skip migrating the ones we've already migrated
next if up? && seen
# On our way down, we skip reverting the ones we've never migrated
if down? && !seen
migration.announce 'never migrated, skipping'; migration.write
next
end
begin
ddl_transaction do
migration.migrate(@direction)
record_version_state_after_migrating(migration.version)
end
ran << migration
rescue => e
canceled_msg = Base.connection.supports_ddl_transactions? ? "this and " : ""
raise StandardError, "An error has occurred, #{canceled_msg}all later migrations canceled:\n\n#{e}", e.backtrace
end
end
ran
end
def migrations
down? ? @migrations.reverse : @migrations
end
def pending_migrations
already_migrated = migrated
migrations.reject { |m| already_migrated.include?(m.version.to_i) }
end
def migrated
@migrated_versions ||= self.class.get_all_versions
end
private
def validate(migrations)
name ,= migrations.group_by(&:name).find { |_,v| v.length > 1 }
raise DuplicateMigrationNameError.new(name) if name
version ,= migrations.group_by(&:version).find { |_,v| v.length > 1 }
raise DuplicateMigrationVersionError.new(version) if version
end
def record_version_state_after_migrating(version)
table = Arel::Table.new(self.class.schema_migrations_table_name)
@migrated_versions ||= []
if down?
@migrated_versions.delete(version)
stmt = table.where(table["version"].eq(version.to_s)).compile_delete
Base.connection.delete stmt
else
@migrated_versions.push(version).sort!
stmt = table.compile_insert table["version"] => version.to_s
Base.connection.insert stmt
end
end
def up?
@direction == :up
end
def down?
@direction == :down
end
# Wrap the migration in a transaction only if supported by the adapter.
def ddl_transaction(&block)
if Base.connection.supports_ddl_transactions?
Base.transaction { block.call }
else
block.call
end
end
end
end
use the schema migration model to create a new record
require "active_support/core_ext/module/delegation"
require "active_support/core_ext/class/attribute_accessors"
require 'active_support/deprecation'
require 'active_record/schema_migration'
module ActiveRecord
# Exception that can be raised to stop migrations from going backwards.
class IrreversibleMigration < ActiveRecordError
end
class DuplicateMigrationVersionError < ActiveRecordError#:nodoc:
def initialize(version)
super("Multiple migrations have the version number #{version}")
end
end
class DuplicateMigrationNameError < ActiveRecordError#:nodoc:
def initialize(name)
super("Multiple migrations have the name #{name}")
end
end
class UnknownMigrationVersionError < ActiveRecordError #:nodoc:
def initialize(version)
super("No migration with version number #{version}")
end
end
class IllegalMigrationNameError < ActiveRecordError#:nodoc:
def initialize(name)
super("Illegal name for migration file: #{name}\n\t(only lower case letters, numbers, and '_' allowed)")
end
end
# = Active Record Migrations
#
# Migrations can manage the evolution of a schema used by several physical
# databases. It's a solution to the common problem of adding a field to make
# a new feature work in your local database, but being unsure of how to
# push that change to other developers and to the production server. With
# migrations, you can describe the transformations in self-contained classes
# that can be checked into version control systems and executed against
# another database that might be one, two, or five versions behind.
#
# Example of a simple migration:
#
# class AddSsl < ActiveRecord::Migration
# def up
# add_column :accounts, :ssl_enabled, :boolean, :default => 1
# end
#
# def down
# remove_column :accounts, :ssl_enabled
# end
# end
#
# This migration will add a boolean flag to the accounts table and remove it
# if you're backing out of the migration. It shows how all migrations have
# two methods +up+ and +down+ that describes the transformations
# required to implement or remove the migration. These methods can consist
# of both the migration specific methods like add_column and remove_column,
# but may also contain regular Ruby code for generating data needed for the
# transformations.
#
# Example of a more complex migration that also needs to initialize data:
#
# class AddSystemSettings < ActiveRecord::Migration
# def up
# create_table :system_settings do |t|
# t.string :name
# t.string :label
# t.text :value
# t.string :type
# t.integer :position
# end
#
# SystemSetting.create :name => "notice",
# :label => "Use notice?",
# :value => 1
# end
#
# def down
# drop_table :system_settings
# end
# end
#
# This migration first adds the system_settings table, then creates the very
# first row in it using the Active Record model that relies on the table. It
# also uses the more advanced create_table syntax where you can specify a
# complete table schema in one block call.
#
# == Available transformations
#
# * <tt>create_table(name, options)</tt> Creates a table called +name+ and
# makes the table object available to a block that can then add columns to it,
# following the same format as add_column. See example above. The options hash
# is for fragments like "DEFAULT CHARSET=UTF-8" that are appended to the create
# table definition.
# * <tt>drop_table(name)</tt>: Drops the table called +name+.
# * <tt>rename_table(old_name, new_name)</tt>: Renames the table called +old_name+
# to +new_name+.
# * <tt>add_column(table_name, column_name, type, options)</tt>: Adds a new column
# to the table called +table_name+
# named +column_name+ specified to be one of the following types:
# <tt>:string</tt>, <tt>:text</tt>, <tt>:integer</tt>, <tt>:float</tt>,
# <tt>:decimal</tt>, <tt>:datetime</tt>, <tt>:timestamp</tt>, <tt>:time</tt>,
# <tt>:date</tt>, <tt>:binary</tt>, <tt>:boolean</tt>. A default value can be
# specified by passing an +options+ hash like <tt>{ :default => 11 }</tt>.
# Other options include <tt>:limit</tt> and <tt>:null</tt> (e.g.
# <tt>{ :limit => 50, :null => false }</tt>) -- see
# ActiveRecord::ConnectionAdapters::TableDefinition#column for details.
# * <tt>rename_column(table_name, column_name, new_column_name)</tt>: Renames
# a column but keeps the type and content.
# * <tt>change_column(table_name, column_name, type, options)</tt>: Changes
# the column to a different type using the same parameters as add_column.
# * <tt>remove_column(table_name, column_names)</tt>: Removes the column listed in
# +column_names+ from the table called +table_name+.
# * <tt>add_index(table_name, column_names, options)</tt>: Adds a new index
# with the name of the column. Other options include
# <tt>:name</tt>, <tt>:unique</tt> (e.g.
# <tt>{ :name => "users_name_index", :unique => true }</tt>) and <tt>:order</tt>
# (e.g. { :order => {:name => :desc} }</tt>).
# * <tt>remove_index(table_name, :column => column_name)</tt>: Removes the index
# specified by +column_name+.
# * <tt>remove_index(table_name, :name => index_name)</tt>: Removes the index
# specified by +index_name+.
#
# == Irreversible transformations
#
# Some transformations are destructive in a manner that cannot be reversed.
# Migrations of that kind should raise an <tt>ActiveRecord::IrreversibleMigration</tt>
# exception in their +down+ method.
#
# == Running migrations from within Rails
#
# The Rails package has several tools to help create and apply migrations.
#
# To generate a new migration, you can use
# rails generate migration MyNewMigration
#
# where MyNewMigration is the name of your migration. The generator will
# create an empty migration file <tt>timestamp_my_new_migration.rb</tt>
# in the <tt>db/migrate/</tt> directory where <tt>timestamp</tt> is the
# UTC formatted date and time that the migration was generated.
#
# You may then edit the <tt>up</tt> and <tt>down</tt> methods of
# MyNewMigration.
#
# There is a special syntactic shortcut to generate migrations that add fields to a table.
#
# rails generate migration add_fieldname_to_tablename fieldname:string
#
# This will generate the file <tt>timestamp_add_fieldname_to_tablename</tt>, which will look like this:
# class AddFieldnameToTablename < ActiveRecord::Migration
# def up
# add_column :tablenames, :fieldname, :string
# end
#
# def down
# remove_column :tablenames, :fieldname
# end
# end
#
# To run migrations against the currently configured database, use
# <tt>rake db:migrate</tt>. This will update the database by running all of the
# pending migrations, creating the <tt>schema_migrations</tt> table
# (see "About the schema_migrations table" section below) if missing. It will also
# invoke the db:schema:dump task, which will update your db/schema.rb file
# to match the structure of your database.
#
# To roll the database back to a previous migration version, use
# <tt>rake db:migrate VERSION=X</tt> where <tt>X</tt> is the version to which
# you wish to downgrade. If any of the migrations throw an
# <tt>ActiveRecord::IrreversibleMigration</tt> exception, that step will fail and you'll
# have some manual work to do.
#
# == Database support
#
# Migrations are currently supported in MySQL, PostgreSQL, SQLite,
# SQL Server, Sybase, and Oracle (all supported databases except DB2).
#
# == More examples
#
# Not all migrations change the schema. Some just fix the data:
#
# class RemoveEmptyTags < ActiveRecord::Migration
# def up
# Tag.all.each { |tag| tag.destroy if tag.pages.empty? }
# end
#
# def down
# # not much we can do to restore deleted data
# raise ActiveRecord::IrreversibleMigration, "Can't recover the deleted tags"
# end
# end
#
# Others remove columns when they migrate up instead of down:
#
# class RemoveUnnecessaryItemAttributes < ActiveRecord::Migration
# def up
# remove_column :items, :incomplete_items_count
# remove_column :items, :completed_items_count
# end
#
# def down
# add_column :items, :incomplete_items_count
# add_column :items, :completed_items_count
# end
# end
#
# And sometimes you need to do something in SQL not abstracted directly by migrations:
#
# class MakeJoinUnique < ActiveRecord::Migration
# def up
# execute "ALTER TABLE `pages_linked_pages` ADD UNIQUE `page_id_linked_page_id` (`page_id`,`linked_page_id`)"
# end
#
# def down
# execute "ALTER TABLE `pages_linked_pages` DROP INDEX `page_id_linked_page_id`"
# end
# end
#
# == Using a model after changing its table
#
# Sometimes you'll want to add a column in a migration and populate it
# immediately after. In that case, you'll need to make a call to
# <tt>Base#reset_column_information</tt> in order to ensure that the model has the
# latest column data from after the new column was added. Example:
#
# class AddPeopleSalary < ActiveRecord::Migration
# def up
# add_column :people, :salary, :integer
# Person.reset_column_information
# Person.all.each do |p|
# p.update_attribute :salary, SalaryCalculator.compute(p)
# end
# end
# end
#
# == Controlling verbosity
#
# By default, migrations will describe the actions they are taking, writing
# them to the console as they happen, along with benchmarks describing how
# long each step took.
#
# You can quiet them down by setting ActiveRecord::Migration.verbose = false.
#
# You can also insert your own messages and benchmarks by using the +say_with_time+
# method:
#
# def up
# ...
# say_with_time "Updating salaries..." do
# Person.all.each do |p|
# p.update_attribute :salary, SalaryCalculator.compute(p)
# end
# end
# ...
# end
#
# The phrase "Updating salaries..." would then be printed, along with the
# benchmark for the block when the block completes.
#
# == About the schema_migrations table
#
# Rails versions 2.0 and prior used to create a table called
# <tt>schema_info</tt> when using migrations. This table contained the
# version of the schema as of the last applied migration.
#
# Starting with Rails 2.1, the <tt>schema_info</tt> table is
# (automatically) replaced by the <tt>schema_migrations</tt> table, which
# contains the version numbers of all the migrations applied.
#
# As a result, it is now possible to add migration files that are numbered
# lower than the current schema version: when migrating up, those
# never-applied "interleaved" migrations will be automatically applied, and
# when migrating down, never-applied "interleaved" migrations will be skipped.
#
# == Timestamped Migrations
#
# By default, Rails generates migrations that look like:
#
# 20080717013526_your_migration_name.rb
#
# The prefix is a generation timestamp (in UTC).
#
# If you'd prefer to use numeric prefixes, you can turn timestamped migrations
# off by setting:
#
# config.active_record.timestamped_migrations = false
#
# In application.rb.
#
# == Reversible Migrations
#
# Starting with Rails 3.1, you will be able to define reversible migrations.
# Reversible migrations are migrations that know how to go +down+ for you.
# You simply supply the +up+ logic, and the Migration system will figure out
# how to execute the down commands for you.
#
# To define a reversible migration, define the +change+ method in your
# migration like this:
#
# class TenderloveMigration < ActiveRecord::Migration
# def change
# create_table(:horses) do |t|
# t.column :content, :text
# t.column :remind_at, :datetime
# end
# end
# end
#
# This migration will create the horses table for you on the way up, and
# automatically figure out how to drop the table on the way down.
#
# Some commands like +remove_column+ cannot be reversed. If you care to
# define how to move up and down in these cases, you should define the +up+
# and +down+ methods as before.
#
# If a command cannot be reversed, an
# <tt>ActiveRecord::IrreversibleMigration</tt> exception will be raised when
# the migration is moving down.
#
# For a list of commands that are reversible, please see
# <tt>ActiveRecord::Migration::CommandRecorder</tt>.
class Migration
autoload :CommandRecorder, 'active_record/migration/command_recorder'
class << self
attr_accessor :delegate # :nodoc:
end
def self.method_missing(name, *args, &block) # :nodoc:
(delegate || superclass.delegate).send(name, *args, &block)
end
def self.migrate(direction)
new.migrate direction
end
cattr_accessor :verbose
attr_accessor :name, :version
def initialize(name = self.class.name, version = nil)
@name = name
@version = version
@connection = nil
@reverting = false
end
# instantiate the delegate object after initialize is defined
self.verbose = true
self.delegate = new
def revert
@reverting = true
yield
ensure
@reverting = false
end
def reverting?
@reverting
end
def up
self.class.delegate = self
return unless self.class.respond_to?(:up)
self.class.up
end
def down
self.class.delegate = self
return unless self.class.respond_to?(:down)
self.class.down
end
# Execute this migration in the named direction
def migrate(direction)
return unless respond_to?(direction)
case direction
when :up then announce "migrating"
when :down then announce "reverting"
end
time = nil
ActiveRecord::Base.connection_pool.with_connection do |conn|
@connection = conn
if respond_to?(:change)
if direction == :down
recorder = CommandRecorder.new(@connection)
suppress_messages do
@connection = recorder
change
end
@connection = conn
time = Benchmark.measure {
self.revert {
recorder.inverse.each do |cmd, args|
send(cmd, *args)
end
}
}
else
time = Benchmark.measure { change }
end
else
time = Benchmark.measure { send(direction) }
end
@connection = nil
end
case direction
when :up then announce "migrated (%.4fs)" % time.real; write
when :down then announce "reverted (%.4fs)" % time.real; write
end
end
def write(text="")
puts(text) if verbose
end
def announce(message)
text = "#{version} #{name}: #{message}"
length = [0, 75 - text.length].max
write "== %s %s" % [text, "=" * length]
end
def say(message, subitem=false)
write "#{subitem ? " ->" : "--"} #{message}"
end
def say_with_time(message)
say(message)
result = nil
time = Benchmark.measure { result = yield }
say "%.4fs" % time.real, :subitem
say("#{result} rows", :subitem) if result.is_a?(Integer)
result
end
def suppress_messages
save, self.verbose = verbose, false
yield
ensure
self.verbose = save
end
def connection
@connection || ActiveRecord::Base.connection
end
def method_missing(method, *arguments, &block)
arg_list = arguments.map{ |a| a.inspect } * ', '
say_with_time "#{method}(#{arg_list})" do
unless reverting?
unless arguments.empty? || method == :execute
arguments[0] = Migrator.proper_table_name(arguments.first)
arguments[1] = Migrator.proper_table_name(arguments.second) if method == :rename_table
end
end
return super unless connection.respond_to?(method)
connection.send(method, *arguments, &block)
end
end
def copy(destination, sources, options = {})
copied = []
FileUtils.mkdir_p(destination) unless File.exists?(destination)
destination_migrations = ActiveRecord::Migrator.migrations(destination)
last = destination_migrations.last
sources.each do |scope, path|
source_migrations = ActiveRecord::Migrator.migrations(path)
source_migrations.each do |migration|
source = File.read(migration.filename)
source = "# This migration comes from #{scope} (originally #{migration.version})\n#{source}"
if duplicate = destination_migrations.detect { |m| m.name == migration.name }
if options[:on_skip] && duplicate.scope != scope.to_s
options[:on_skip].call(scope, migration)
end
next
end
migration.version = next_migration_number(last ? last.version + 1 : 0).to_i
new_path = File.join(destination, "#{migration.version}_#{migration.name.underscore}.#{scope}.rb")
old_path, migration.filename = migration.filename, new_path
last = migration
File.open(migration.filename, "w") { |f| f.write source }
copied << migration
options[:on_copy].call(scope, migration, old_path) if options[:on_copy]
destination_migrations << migration
end
end
copied
end
def next_migration_number(number)
if ActiveRecord::Base.timestamped_migrations
[Time.now.utc.strftime("%Y%m%d%H%M%S"), "%.14d" % number].max
else
"%.3d" % number
end
end
end
# MigrationProxy is used to defer loading of the actual migration classes
# until they are needed
class MigrationProxy < Struct.new(:name, :version, :filename, :scope)
def initialize(name, version, filename, scope)
super
@migration = nil
end
def basename
File.basename(filename)
end
delegate :migrate, :announce, :write, :to => :migration
private
def migration
@migration ||= load_migration
end
def load_migration
require(File.expand_path(filename))
name.constantize.new
end
end
class Migrator#:nodoc:
class << self
attr_writer :migrations_paths
alias :migrations_path= :migrations_paths=
def migrate(migrations_paths, target_version = nil, &block)
case
when target_version.nil?
up(migrations_paths, target_version, &block)
when current_version == 0 && target_version == 0
[]
when current_version > target_version
down(migrations_paths, target_version, &block)
else
up(migrations_paths, target_version, &block)
end
end
def rollback(migrations_paths, steps=1)
move(:down, migrations_paths, steps)
end
def forward(migrations_paths, steps=1)
move(:up, migrations_paths, steps)
end
def up(migrations_paths, target_version = nil, &block)
self.new(:up, migrations(migrations_paths), target_version).migrate(&block)
end
def down(migrations_paths, target_version = nil, &block)
self.new(:down, migrations(migrations_paths), target_version).migrate(&block)
end
def run(direction, migrations_paths, target_version)
self.new(direction, migrations(migrations_paths), target_version).run
end
def open(migrations_paths)
self.new(:up, migrations(migrations_paths), nil)
end
def schema_migrations_table_name
SchemaMigration.table_name
end
def get_all_versions
SchemaMigration.all.map { |x| x.version.to_i }.sort
end
def current_version
sm_table = schema_migrations_table_name
if Base.connection.table_exists?(sm_table)
get_all_versions.max || 0
else
0
end
end
def proper_table_name(name)
# Use the Active Record objects own table_name, or pre/suffix from ActiveRecord::Base if name is a symbol/string
name.table_name rescue "#{ActiveRecord::Base.table_name_prefix}#{name}#{ActiveRecord::Base.table_name_suffix}"
end
def migrations_paths
@migrations_paths ||= ['db/migrate']
# just to not break things if someone uses: migration_path = some_string
Array(@migrations_paths)
end
def migrations_path
migrations_paths.first
end
def migrations(paths)
paths = Array(paths)
files = Dir[*paths.map { |p| "#{p}/**/[0-9]*_*.rb" }]
migrations = files.map do |file|
version, name, scope = file.scan(/([0-9]+)_([_a-z0-9]*)\.?([_a-z0-9]*)?.rb/).first
raise IllegalMigrationNameError.new(file) unless version
version = version.to_i
name = name.camelize
MigrationProxy.new(name, version, file, scope)
end
migrations.sort_by(&:version)
end
private
def move(direction, migrations_paths, steps)
migrator = self.new(direction, migrations(migrations_paths))
start_index = migrator.migrations.index(migrator.current_migration)
if start_index
finish = migrator.migrations[start_index + steps]
version = finish ? finish.version : 0
send(direction, migrations_paths, version)
end
end
end
def initialize(direction, migrations, target_version = nil)
raise StandardError.new("This database does not yet support migrations") unless Base.connection.supports_migrations?
@direction = direction
@target_version = target_version
if Array(migrations).grep(String).empty?
@migrations = migrations
else
ActiveSupport::Deprecation.warn "instantiate this class with a list of migrations"
@migrations = self.class.migrations(migrations)
end
validate(@migrations)
Base.connection.initialize_schema_migrations_table
end
def current_version
migrated.last || 0
end
def current_migration
migrations.detect { |m| m.version == current_version }
end
def run
target = migrations.detect { |m| m.version == @target_version }
raise UnknownMigrationVersionError.new(@target_version) if target.nil?
unless (up? && migrated.include?(target.version.to_i)) || (down? && !migrated.include?(target.version.to_i))
target.migrate(@direction)
record_version_state_after_migrating(target.version)
end
end
def migrate(&block)
current = migrations.detect { |m| m.version == current_version }
target = migrations.detect { |m| m.version == @target_version }
if target.nil? && @target_version && @target_version > 0
raise UnknownMigrationVersionError.new(@target_version)
end
start = up? ? 0 : (migrations.index(current) || 0)
finish = migrations.index(target) || migrations.size - 1
runnable = migrations[start..finish]
# skip the last migration if we're headed down, but not ALL the way down
runnable.pop if down? && target
ran = []
runnable.each do |migration|
if block && !block.call(migration)
next
end
Base.logger.info "Migrating to #{migration.name} (#{migration.version})" if Base.logger
seen = migrated.include?(migration.version.to_i)
# On our way up, we skip migrating the ones we've already migrated
next if up? && seen
# On our way down, we skip reverting the ones we've never migrated
if down? && !seen
migration.announce 'never migrated, skipping'; migration.write
next
end
begin
ddl_transaction do
migration.migrate(@direction)
record_version_state_after_migrating(migration.version)
end
ran << migration
rescue => e
canceled_msg = Base.connection.supports_ddl_transactions? ? "this and " : ""
raise StandardError, "An error has occurred, #{canceled_msg}all later migrations canceled:\n\n#{e}", e.backtrace
end
end
ran
end
def migrations
down? ? @migrations.reverse : @migrations
end
def pending_migrations
already_migrated = migrated
migrations.reject { |m| already_migrated.include?(m.version.to_i) }
end
def migrated
@migrated_versions ||= self.class.get_all_versions
end
private
def validate(migrations)
name ,= migrations.group_by(&:name).find { |_,v| v.length > 1 }
raise DuplicateMigrationNameError.new(name) if name
version ,= migrations.group_by(&:version).find { |_,v| v.length > 1 }
raise DuplicateMigrationVersionError.new(version) if version
end
def record_version_state_after_migrating(version)
table = Arel::Table.new(self.class.schema_migrations_table_name)
@migrated_versions ||= []
if down?
@migrated_versions.delete(version)
stmt = table.where(table["version"].eq(version.to_s)).compile_delete
Base.connection.delete stmt
else
@migrated_versions.push(version).sort!
ActiveRecord::SchemaMigration.create!(:version => version.to_s)
end
end
def up?
@direction == :up
end
def down?
@direction == :down
end
# Wrap the migration in a transaction only if supported by the adapter.
def ddl_transaction(&block)
if Base.connection.supports_ddl_transactions?
Base.transaction { block.call }
else
block.call
end
end
end
end
|
# frozen_string_literal: true
require "set"
require "zlib"
require "active_support/core_ext/module/attribute_accessors"
module ActiveRecord
class MigrationError < ActiveRecordError#:nodoc:
def initialize(message = nil)
message = "\n\n#{message}\n\n" if message
super
end
end
# Exception that can be raised to stop migrations from being rolled back.
# For example the following migration is not reversible.
# Rolling back this migration will raise an ActiveRecord::IrreversibleMigration error.
#
# class IrreversibleMigrationExample < ActiveRecord::Migration[5.0]
# def change
# create_table :distributors do |t|
# t.string :zipcode
# end
#
# execute <<-SQL
# ALTER TABLE distributors
# ADD CONSTRAINT zipchk
# CHECK (char_length(zipcode) = 5) NO INHERIT;
# SQL
# end
# end
#
# There are two ways to mitigate this problem.
#
# 1. Define <tt>#up</tt> and <tt>#down</tt> methods instead of <tt>#change</tt>:
#
# class ReversibleMigrationExample < ActiveRecord::Migration[5.0]
# def up
# create_table :distributors do |t|
# t.string :zipcode
# end
#
# execute <<-SQL
# ALTER TABLE distributors
# ADD CONSTRAINT zipchk
# CHECK (char_length(zipcode) = 5) NO INHERIT;
# SQL
# end
#
# def down
# execute <<-SQL
# ALTER TABLE distributors
# DROP CONSTRAINT zipchk
# SQL
#
# drop_table :distributors
# end
# end
#
# 2. Use the #reversible method in <tt>#change</tt> method:
#
# class ReversibleMigrationExample < ActiveRecord::Migration[5.0]
# def change
# create_table :distributors do |t|
# t.string :zipcode
# end
#
# reversible do |dir|
# dir.up do
# execute <<-SQL
# ALTER TABLE distributors
# ADD CONSTRAINT zipchk
# CHECK (char_length(zipcode) = 5) NO INHERIT;
# SQL
# end
#
# dir.down do
# execute <<-SQL
# ALTER TABLE distributors
# DROP CONSTRAINT zipchk
# SQL
# end
# end
# end
# end
class IrreversibleMigration < MigrationError
end
class DuplicateMigrationVersionError < MigrationError#:nodoc:
def initialize(version = nil)
if version
super("Multiple migrations have the version number #{version}.")
else
super("Duplicate migration version error.")
end
end
end
class DuplicateMigrationNameError < MigrationError#:nodoc:
def initialize(name = nil)
if name
super("Multiple migrations have the name #{name}.")
else
super("Duplicate migration name.")
end
end
end
class UnknownMigrationVersionError < MigrationError #:nodoc:
def initialize(version = nil)
if version
super("No migration with version number #{version}.")
else
super("Unknown migration version.")
end
end
end
class IllegalMigrationNameError < MigrationError#:nodoc:
def initialize(name = nil)
if name
super("Illegal name for migration file: #{name}\n\t(only lower case letters, numbers, and '_' allowed).")
else
super("Illegal name for migration.")
end
end
end
class PendingMigrationError < MigrationError#:nodoc:
def initialize(message = nil)
if !message && defined?(Rails.env)
super("Migrations are pending. To resolve this issue, run:\n\n bin/rails db:migrate RAILS_ENV=#{::Rails.env}")
elsif !message
super("Migrations are pending. To resolve this issue, run:\n\n bin/rails db:migrate")
else
super
end
end
end
class ConcurrentMigrationError < MigrationError #:nodoc:
DEFAULT_MESSAGE = "Cannot run migrations because another migration process is currently running.".freeze
def initialize(message = DEFAULT_MESSAGE)
super
end
end
class NoEnvironmentInSchemaError < MigrationError #:nodoc:
def initialize
msg = "Environment data not found in the schema. To resolve this issue, run: \n\n bin/rails db:environment:set"
if defined?(Rails.env)
super("#{msg} RAILS_ENV=#{::Rails.env}")
else
super(msg)
end
end
end
class ProtectedEnvironmentError < ActiveRecordError #:nodoc:
def initialize(env = "production")
msg = "You are attempting to run a destructive action against your '#{env}' database.\n".dup
msg << "If you are sure you want to continue, run the same command with the environment variable:\n"
msg << "DISABLE_DATABASE_ENVIRONMENT_CHECK=1"
super(msg)
end
end
class EnvironmentMismatchError < ActiveRecordError
def initialize(current: nil, stored: nil)
msg = "You are attempting to modify a database that was last run in `#{ stored }` environment.\n".dup
msg << "You are running in `#{ current }` environment. "
msg << "If you are sure you want to continue, first set the environment using:\n\n"
msg << " bin/rails db:environment:set"
if defined?(Rails.env)
super("#{msg} RAILS_ENV=#{::Rails.env}\n\n")
else
super("#{msg}\n\n")
end
end
end
# = Active Record Migrations
#
# Migrations can manage the evolution of a schema used by several physical
# databases. It's a solution to the common problem of adding a field to make
# a new feature work in your local database, but being unsure of how to
# push that change to other developers and to the production server. With
# migrations, you can describe the transformations in self-contained classes
# that can be checked into version control systems and executed against
# another database that might be one, two, or five versions behind.
#
# Example of a simple migration:
#
# class AddSsl < ActiveRecord::Migration[5.0]
# def up
# add_column :accounts, :ssl_enabled, :boolean, default: true
# end
#
# def down
# remove_column :accounts, :ssl_enabled
# end
# end
#
# This migration will add a boolean flag to the accounts table and remove it
# if you're backing out of the migration. It shows how all migrations have
# two methods +up+ and +down+ that describes the transformations
# required to implement or remove the migration. These methods can consist
# of both the migration specific methods like +add_column+ and +remove_column+,
# but may also contain regular Ruby code for generating data needed for the
# transformations.
#
# Example of a more complex migration that also needs to initialize data:
#
# class AddSystemSettings < ActiveRecord::Migration[5.0]
# def up
# create_table :system_settings do |t|
# t.string :name
# t.string :label
# t.text :value
# t.string :type
# t.integer :position
# end
#
# SystemSetting.create name: 'notice',
# label: 'Use notice?',
# value: 1
# end
#
# def down
# drop_table :system_settings
# end
# end
#
# This migration first adds the +system_settings+ table, then creates the very
# first row in it using the Active Record model that relies on the table. It
# also uses the more advanced +create_table+ syntax where you can specify a
# complete table schema in one block call.
#
# == Available transformations
#
# === Creation
#
# * <tt>create_join_table(table_1, table_2, options)</tt>: Creates a join
# table having its name as the lexical order of the first two
# arguments. See
# ActiveRecord::ConnectionAdapters::SchemaStatements#create_join_table for
# details.
# * <tt>create_table(name, options)</tt>: Creates a table called +name+ and
# makes the table object available to a block that can then add columns to it,
# following the same format as +add_column+. See example above. The options hash
# is for fragments like "DEFAULT CHARSET=UTF-8" that are appended to the create
# table definition.
# * <tt>add_column(table_name, column_name, type, options)</tt>: Adds a new column
# to the table called +table_name+
# named +column_name+ specified to be one of the following types:
# <tt>:string</tt>, <tt>:text</tt>, <tt>:integer</tt>, <tt>:float</tt>,
# <tt>:decimal</tt>, <tt>:datetime</tt>, <tt>:timestamp</tt>, <tt>:time</tt>,
# <tt>:date</tt>, <tt>:binary</tt>, <tt>:boolean</tt>. A default value can be
# specified by passing an +options+ hash like <tt>{ default: 11 }</tt>.
# Other options include <tt>:limit</tt> and <tt>:null</tt> (e.g.
# <tt>{ limit: 50, null: false }</tt>) -- see
# ActiveRecord::ConnectionAdapters::TableDefinition#column for details.
# * <tt>add_foreign_key(from_table, to_table, options)</tt>: Adds a new
# foreign key. +from_table+ is the table with the key column, +to_table+ contains
# the referenced primary key.
# * <tt>add_index(table_name, column_names, options)</tt>: Adds a new index
# with the name of the column. Other options include
# <tt>:name</tt>, <tt>:unique</tt> (e.g.
# <tt>{ name: 'users_name_index', unique: true }</tt>) and <tt>:order</tt>
# (e.g. <tt>{ order: { name: :desc } }</tt>).
# * <tt>add_reference(:table_name, :reference_name)</tt>: Adds a new column
# +reference_name_id+ by default an integer. See
# ActiveRecord::ConnectionAdapters::SchemaStatements#add_reference for details.
# * <tt>add_timestamps(table_name, options)</tt>: Adds timestamps (+created_at+
# and +updated_at+) columns to +table_name+.
#
# === Modification
#
# * <tt>change_column(table_name, column_name, type, options)</tt>: Changes
# the column to a different type using the same parameters as add_column.
# * <tt>change_column_default(table_name, column_name, default_or_changes)</tt>:
# Sets a default value for +column_name+ defined by +default_or_changes+ on
# +table_name+. Passing a hash containing <tt>:from</tt> and <tt>:to</tt>
# as +default_or_changes+ will make this change reversible in the migration.
# * <tt>change_column_null(table_name, column_name, null, default = nil)</tt>:
# Sets or removes a +NOT NULL+ constraint on +column_name+. The +null+ flag
# indicates whether the value can be +NULL+. See
# ActiveRecord::ConnectionAdapters::SchemaStatements#change_column_null for
# details.
# * <tt>change_table(name, options)</tt>: Allows to make column alterations to
# the table called +name+. It makes the table object available to a block that
# can then add/remove columns, indexes or foreign keys to it.
# * <tt>rename_column(table_name, column_name, new_column_name)</tt>: Renames
# a column but keeps the type and content.
# * <tt>rename_index(table_name, old_name, new_name)</tt>: Renames an index.
# * <tt>rename_table(old_name, new_name)</tt>: Renames the table called +old_name+
# to +new_name+.
#
# === Deletion
#
# * <tt>drop_table(name)</tt>: Drops the table called +name+.
# * <tt>drop_join_table(table_1, table_2, options)</tt>: Drops the join table
# specified by the given arguments.
# * <tt>remove_column(table_name, column_name, type, options)</tt>: Removes the column
# named +column_name+ from the table called +table_name+.
# * <tt>remove_columns(table_name, *column_names)</tt>: Removes the given
# columns from the table definition.
# * <tt>remove_foreign_key(from_table, options_or_to_table)</tt>: Removes the
# given foreign key from the table called +table_name+.
# * <tt>remove_index(table_name, column: column_names)</tt>: Removes the index
# specified by +column_names+.
# * <tt>remove_index(table_name, name: index_name)</tt>: Removes the index
# specified by +index_name+.
# * <tt>remove_reference(table_name, ref_name, options)</tt>: Removes the
# reference(s) on +table_name+ specified by +ref_name+.
# * <tt>remove_timestamps(table_name, options)</tt>: Removes the timestamp
# columns (+created_at+ and +updated_at+) from the table definition.
#
# == Irreversible transformations
#
# Some transformations are destructive in a manner that cannot be reversed.
# Migrations of that kind should raise an <tt>ActiveRecord::IrreversibleMigration</tt>
# exception in their +down+ method.
#
# == Running migrations from within Rails
#
# The Rails package has several tools to help create and apply migrations.
#
# To generate a new migration, you can use
# rails generate migration MyNewMigration
#
# where MyNewMigration is the name of your migration. The generator will
# create an empty migration file <tt>timestamp_my_new_migration.rb</tt>
# in the <tt>db/migrate/</tt> directory where <tt>timestamp</tt> is the
# UTC formatted date and time that the migration was generated.
#
# There is a special syntactic shortcut to generate migrations that add fields to a table.
#
# rails generate migration add_fieldname_to_tablename fieldname:string
#
# This will generate the file <tt>timestamp_add_fieldname_to_tablename.rb</tt>, which will look like this:
# class AddFieldnameToTablename < ActiveRecord::Migration[5.0]
# def change
# add_column :tablenames, :fieldname, :string
# end
# end
#
# To run migrations against the currently configured database, use
# <tt>rails db:migrate</tt>. This will update the database by running all of the
# pending migrations, creating the <tt>schema_migrations</tt> table
# (see "About the schema_migrations table" section below) if missing. It will also
# invoke the db:schema:dump task, which will update your db/schema.rb file
# to match the structure of your database.
#
# To roll the database back to a previous migration version, use
# <tt>rails db:migrate VERSION=X</tt> where <tt>X</tt> is the version to which
# you wish to downgrade. Alternatively, you can also use the STEP option if you
# wish to rollback last few migrations. <tt>rails db:migrate STEP=2</tt> will rollback
# the latest two migrations.
#
# If any of the migrations throw an <tt>ActiveRecord::IrreversibleMigration</tt> exception,
# that step will fail and you'll have some manual work to do.
#
# == Database support
#
# Migrations are currently supported in MySQL, PostgreSQL, SQLite,
# SQL Server, and Oracle (all supported databases except DB2).
#
# == More examples
#
# Not all migrations change the schema. Some just fix the data:
#
# class RemoveEmptyTags < ActiveRecord::Migration[5.0]
# def up
# Tag.all.each { |tag| tag.destroy if tag.pages.empty? }
# end
#
# def down
# # not much we can do to restore deleted data
# raise ActiveRecord::IrreversibleMigration, "Can't recover the deleted tags"
# end
# end
#
# Others remove columns when they migrate up instead of down:
#
# class RemoveUnnecessaryItemAttributes < ActiveRecord::Migration[5.0]
# def up
# remove_column :items, :incomplete_items_count
# remove_column :items, :completed_items_count
# end
#
# def down
# add_column :items, :incomplete_items_count
# add_column :items, :completed_items_count
# end
# end
#
# And sometimes you need to do something in SQL not abstracted directly by migrations:
#
# class MakeJoinUnique < ActiveRecord::Migration[5.0]
# def up
# execute "ALTER TABLE `pages_linked_pages` ADD UNIQUE `page_id_linked_page_id` (`page_id`,`linked_page_id`)"
# end
#
# def down
# execute "ALTER TABLE `pages_linked_pages` DROP INDEX `page_id_linked_page_id`"
# end
# end
#
# == Using a model after changing its table
#
# Sometimes you'll want to add a column in a migration and populate it
# immediately after. In that case, you'll need to make a call to
# <tt>Base#reset_column_information</tt> in order to ensure that the model has the
# latest column data from after the new column was added. Example:
#
# class AddPeopleSalary < ActiveRecord::Migration[5.0]
# def up
# add_column :people, :salary, :integer
# Person.reset_column_information
# Person.all.each do |p|
# p.update_attribute :salary, SalaryCalculator.compute(p)
# end
# end
# end
#
# == Controlling verbosity
#
# By default, migrations will describe the actions they are taking, writing
# them to the console as they happen, along with benchmarks describing how
# long each step took.
#
# You can quiet them down by setting ActiveRecord::Migration.verbose = false.
#
# You can also insert your own messages and benchmarks by using the +say_with_time+
# method:
#
# def up
# ...
# say_with_time "Updating salaries..." do
# Person.all.each do |p|
# p.update_attribute :salary, SalaryCalculator.compute(p)
# end
# end
# ...
# end
#
# The phrase "Updating salaries..." would then be printed, along with the
# benchmark for the block when the block completes.
#
# == Timestamped Migrations
#
# By default, Rails generates migrations that look like:
#
# 20080717013526_your_migration_name.rb
#
# The prefix is a generation timestamp (in UTC).
#
# If you'd prefer to use numeric prefixes, you can turn timestamped migrations
# off by setting:
#
# config.active_record.timestamped_migrations = false
#
# In application.rb.
#
# == Reversible Migrations
#
# Reversible migrations are migrations that know how to go +down+ for you.
# You simply supply the +up+ logic, and the Migration system figures out
# how to execute the down commands for you.
#
# To define a reversible migration, define the +change+ method in your
# migration like this:
#
# class TenderloveMigration < ActiveRecord::Migration[5.0]
# def change
# create_table(:horses) do |t|
# t.column :content, :text
# t.column :remind_at, :datetime
# end
# end
# end
#
# This migration will create the horses table for you on the way up, and
# automatically figure out how to drop the table on the way down.
#
# Some commands like +remove_column+ cannot be reversed. If you care to
# define how to move up and down in these cases, you should define the +up+
# and +down+ methods as before.
#
# If a command cannot be reversed, an
# <tt>ActiveRecord::IrreversibleMigration</tt> exception will be raised when
# the migration is moving down.
#
# For a list of commands that are reversible, please see
# <tt>ActiveRecord::Migration::CommandRecorder</tt>.
#
# == Transactional Migrations
#
# If the database adapter supports DDL transactions, all migrations will
# automatically be wrapped in a transaction. There are queries that you
# can't execute inside a transaction though, and for these situations
# you can turn the automatic transactions off.
#
# class ChangeEnum < ActiveRecord::Migration[5.0]
# disable_ddl_transaction!
#
# def up
# execute "ALTER TYPE model_size ADD VALUE 'new_value'"
# end
# end
#
# Remember that you can still open your own transactions, even if you
# are in a Migration with <tt>self.disable_ddl_transaction!</tt>.
class Migration
autoload :CommandRecorder, "active_record/migration/command_recorder"
autoload :Compatibility, "active_record/migration/compatibility"
# This must be defined before the inherited hook, below
class Current < Migration # :nodoc:
end
def self.inherited(subclass) # :nodoc:
super
if subclass.superclass == Migration
raise StandardError, "Directly inheriting from ActiveRecord::Migration is not supported. " \
"Please specify the Rails release the migration was written for:\n" \
"\n" \
" class #{subclass} < ActiveRecord::Migration[4.2]"
end
end
def self.[](version)
Compatibility.find(version)
end
def self.current_version
ActiveRecord::VERSION::STRING.to_f
end
MigrationFilenameRegexp = /\A([0-9]+)_([_a-z0-9]*)\.?([_a-z0-9]*)?\.rb\z/ # :nodoc:
# This class is used to verify that all migrations have been run before
# loading a web page if <tt>config.active_record.migration_error</tt> is set to :page_load
class CheckPending
def initialize(app)
@app = app
@last_check = 0
end
def call(env)
mtime = ActiveRecord::Migrator.last_migration.mtime.to_i
if @last_check < mtime
ActiveRecord::Migration.check_pending!(connection)
@last_check = mtime
end
@app.call(env)
end
private
def connection
ActiveRecord::Base.connection
end
end
class << self
attr_accessor :delegate # :nodoc:
attr_accessor :disable_ddl_transaction # :nodoc:
def nearest_delegate # :nodoc:
delegate || superclass.nearest_delegate
end
# Raises <tt>ActiveRecord::PendingMigrationError</tt> error if any migrations are pending.
def check_pending!(connection = Base.connection)
raise ActiveRecord::PendingMigrationError if ActiveRecord::Migrator.needs_migration?(connection)
end
def load_schema_if_pending!
if ActiveRecord::Migrator.needs_migration? || !ActiveRecord::Migrator.any_migrations?
# Roundtrip to Rake to allow plugins to hook into database initialization.
FileUtils.cd Rails.root do
current_config = Base.connection_config
Base.clear_all_connections!
system("bin/rails db:test:prepare")
# Establish a new connection, the old database may be gone (db:test:prepare uses purge)
Base.establish_connection(current_config)
end
check_pending!
end
end
def maintain_test_schema! # :nodoc:
if ActiveRecord::Base.maintain_test_schema
suppress_messages { load_schema_if_pending! }
end
end
def method_missing(name, *args, &block) # :nodoc:
nearest_delegate.send(name, *args, &block)
end
def migrate(direction)
new.migrate direction
end
# Disable the transaction wrapping this migration.
# You can still create your own transactions even after calling #disable_ddl_transaction!
#
# For more details read the {"Transactional Migrations" section above}[rdoc-ref:Migration].
def disable_ddl_transaction!
@disable_ddl_transaction = true
end
end
def disable_ddl_transaction # :nodoc:
self.class.disable_ddl_transaction
end
cattr_accessor :verbose
attr_accessor :name, :version
def initialize(name = self.class.name, version = nil)
@name = name
@version = version
@connection = nil
end
self.verbose = true
# instantiate the delegate object after initialize is defined
self.delegate = new
# Reverses the migration commands for the given block and
# the given migrations.
#
# The following migration will remove the table 'horses'
# and create the table 'apples' on the way up, and the reverse
# on the way down.
#
# class FixTLMigration < ActiveRecord::Migration[5.0]
# def change
# revert do
# create_table(:horses) do |t|
# t.text :content
# t.datetime :remind_at
# end
# end
# create_table(:apples) do |t|
# t.string :variety
# end
# end
# end
#
# Or equivalently, if +TenderloveMigration+ is defined as in the
# documentation for Migration:
#
# require_relative '20121212123456_tenderlove_migration'
#
# class FixupTLMigration < ActiveRecord::Migration[5.0]
# def change
# revert TenderloveMigration
#
# create_table(:apples) do |t|
# t.string :variety
# end
# end
# end
#
# This command can be nested.
def revert(*migration_classes)
run(*migration_classes.reverse, revert: true) unless migration_classes.empty?
if block_given?
if connection.respond_to? :revert
connection.revert { yield }
else
recorder = CommandRecorder.new(connection)
@connection = recorder
suppress_messages do
connection.revert { yield }
end
@connection = recorder.delegate
recorder.commands.each do |cmd, args, block|
send(cmd, *args, &block)
end
end
end
end
def reverting?
connection.respond_to?(:reverting) && connection.reverting
end
ReversibleBlockHelper = Struct.new(:reverting) do # :nodoc:
def up
yield unless reverting
end
def down
yield if reverting
end
end
# Used to specify an operation that can be run in one direction or another.
# Call the methods +up+ and +down+ of the yielded object to run a block
# only in one given direction.
# The whole block will be called in the right order within the migration.
#
# In the following example, the looping on users will always be done
# when the three columns 'first_name', 'last_name' and 'full_name' exist,
# even when migrating down:
#
# class SplitNameMigration < ActiveRecord::Migration[5.0]
# def change
# add_column :users, :first_name, :string
# add_column :users, :last_name, :string
#
# reversible do |dir|
# User.reset_column_information
# User.all.each do |u|
# dir.up { u.first_name, u.last_name = u.full_name.split(' ') }
# dir.down { u.full_name = "#{u.first_name} #{u.last_name}" }
# u.save
# end
# end
#
# revert { add_column :users, :full_name, :string }
# end
# end
def reversible
helper = ReversibleBlockHelper.new(reverting?)
execute_block { yield helper }
end
# Runs the given migration classes.
# Last argument can specify options:
# - :direction (default is :up)
# - :revert (default is false)
def run(*migration_classes)
opts = migration_classes.extract_options!
dir = opts[:direction] || :up
dir = (dir == :down ? :up : :down) if opts[:revert]
if reverting?
# If in revert and going :up, say, we want to execute :down without reverting, so
revert { run(*migration_classes, direction: dir, revert: true) }
else
migration_classes.each do |migration_class|
migration_class.new.exec_migration(connection, dir)
end
end
end
def up
self.class.delegate = self
return unless self.class.respond_to?(:up)
self.class.up
end
def down
self.class.delegate = self
return unless self.class.respond_to?(:down)
self.class.down
end
# Execute this migration in the named direction
def migrate(direction)
return unless respond_to?(direction)
case direction
when :up then announce "migrating"
when :down then announce "reverting"
end
time = nil
ActiveRecord::Base.connection_pool.with_connection do |conn|
time = Benchmark.measure do
exec_migration(conn, direction)
end
end
case direction
when :up then announce "migrated (%.4fs)" % time.real; write
when :down then announce "reverted (%.4fs)" % time.real; write
end
end
def exec_migration(conn, direction)
@connection = conn
if respond_to?(:change)
if direction == :down
revert { change }
else
change
end
else
send(direction)
end
ensure
@connection = nil
end
def write(text = "")
puts(text) if verbose
end
def announce(message)
text = "#{version} #{name}: #{message}"
length = [0, 75 - text.length].max
write "== %s %s" % [text, "=" * length]
end
def say(message, subitem = false)
write "#{subitem ? " ->" : "--"} #{message}"
end
def say_with_time(message)
say(message)
result = nil
time = Benchmark.measure { result = yield }
say "%.4fs" % time.real, :subitem
say("#{result} rows", :subitem) if result.is_a?(Integer)
result
end
def suppress_messages
save, self.verbose = verbose, false
yield
ensure
self.verbose = save
end
def connection
@connection || ActiveRecord::Base.connection
end
def method_missing(method, *arguments, &block)
arg_list = arguments.map(&:inspect) * ", "
say_with_time "#{method}(#{arg_list})" do
unless connection.respond_to? :revert
unless arguments.empty? || [:execute, :enable_extension, :disable_extension].include?(method)
arguments[0] = proper_table_name(arguments.first, table_name_options)
if [:rename_table, :add_foreign_key].include?(method) ||
(method == :remove_foreign_key && !arguments.second.is_a?(Hash))
arguments[1] = proper_table_name(arguments.second, table_name_options)
end
end
end
return super unless connection.respond_to?(method)
connection.send(method, *arguments, &block)
end
end
def copy(destination, sources, options = {})
copied = []
FileUtils.mkdir_p(destination) unless File.exist?(destination)
destination_migrations = ActiveRecord::Migrator.migrations(destination)
last = destination_migrations.last
sources.each do |scope, path|
source_migrations = ActiveRecord::Migrator.migrations(path)
source_migrations.each do |migration|
source = File.binread(migration.filename)
inserted_comment = "# This migration comes from #{scope} (originally #{migration.version})\n"
magic_comments = "".dup
loop do
# If we have a magic comment in the original migration,
# insert our comment after the first newline(end of the magic comment line)
# so the magic keep working.
# Note that magic comments must be at the first line(except sh-bang).
source.sub!(/\A(?:#.*\b(?:en)?coding:\s*\S+|#\s*frozen_string_literal:\s*(?:true|false)).*\n/) do |magic_comment|
magic_comments << magic_comment; ""
end || break
end
source = "#{magic_comments}#{inserted_comment}#{source}"
if duplicate = destination_migrations.detect { |m| m.name == migration.name }
if options[:on_skip] && duplicate.scope != scope.to_s
options[:on_skip].call(scope, migration)
end
next
end
migration.version = next_migration_number(last ? last.version + 1 : 0).to_i
new_path = File.join(destination, "#{migration.version}_#{migration.name.underscore}.#{scope}.rb")
old_path, migration.filename = migration.filename, new_path
last = migration
File.binwrite(migration.filename, source)
copied << migration
options[:on_copy].call(scope, migration, old_path) if options[:on_copy]
destination_migrations << migration
end
end
copied
end
# Finds the correct table name given an Active Record object.
# Uses the Active Record object's own table_name, or pre/suffix from the
# options passed in.
def proper_table_name(name, options = {})
if name.respond_to? :table_name
name.table_name
else
"#{options[:table_name_prefix]}#{name}#{options[:table_name_suffix]}"
end
end
# Determines the version number of the next migration.
def next_migration_number(number)
if ActiveRecord::Base.timestamped_migrations
[Time.now.utc.strftime("%Y%m%d%H%M%S"), "%.14d" % number].max
else
SchemaMigration.normalize_migration_number(number)
end
end
# Builds a hash for use in ActiveRecord::Migration#proper_table_name using
# the Active Record object's table_name prefix and suffix
def table_name_options(config = ActiveRecord::Base) #:nodoc:
{
table_name_prefix: config.table_name_prefix,
table_name_suffix: config.table_name_suffix
}
end
private
def execute_block
if connection.respond_to? :execute_block
super # use normal delegation to record the block
else
yield
end
end
end
# MigrationProxy is used to defer loading of the actual migration classes
# until they are needed
MigrationProxy = Struct.new(:name, :version, :filename, :scope) do
def initialize(name, version, filename, scope)
super
@migration = nil
end
def basename
File.basename(filename)
end
def mtime
File.mtime filename
end
delegate :migrate, :announce, :write, :disable_ddl_transaction, to: :migration
private
def migration
@migration ||= load_migration
end
def load_migration
require(File.expand_path(filename))
name.constantize.new(name, version)
end
end
class NullMigration < MigrationProxy #:nodoc:
def initialize
super(nil, 0, nil, nil)
end
def mtime
0
end
end
class Migrator#:nodoc:
class << self
attr_writer :migrations_paths
alias :migrations_path= :migrations_paths=
def migrate(migrations_paths, target_version = nil, &block)
case
when target_version.nil?
up(migrations_paths, target_version, &block)
when current_version == 0 && target_version == 0
[]
when current_version > target_version
down(migrations_paths, target_version, &block)
else
up(migrations_paths, target_version, &block)
end
end
def rollback(migrations_paths, steps = 1)
move(:down, migrations_paths, steps)
end
def forward(migrations_paths, steps = 1)
move(:up, migrations_paths, steps)
end
def up(migrations_paths, target_version = nil)
migrations = migrations(migrations_paths)
migrations.select! { |m| yield m } if block_given?
new(:up, migrations, target_version).migrate
end
def down(migrations_paths, target_version = nil)
migrations = migrations(migrations_paths)
migrations.select! { |m| yield m } if block_given?
new(:down, migrations, target_version).migrate
end
def run(direction, migrations_paths, target_version)
new(direction, migrations(migrations_paths), target_version).run
end
def open(migrations_paths)
new(:up, migrations(migrations_paths), nil)
end
def schema_migrations_table_name
SchemaMigration.table_name
end
deprecate :schema_migrations_table_name
def get_all_versions(connection = Base.connection)
if SchemaMigration.table_exists?
SchemaMigration.all_versions.map(&:to_i)
else
[]
end
end
def current_version(connection = Base.connection)
get_all_versions(connection).max || 0
end
def needs_migration?(connection = Base.connection)
(migrations(migrations_paths).collect(&:version) - get_all_versions(connection)).size > 0
end
def any_migrations?
migrations(migrations_paths).any?
end
def last_migration #:nodoc:
migrations(migrations_paths).last || NullMigration.new
end
def migrations_paths
@migrations_paths ||= ["db/migrate"]
# just to not break things if someone uses: migrations_path = some_string
Array(@migrations_paths)
end
def parse_migration_filename(filename) # :nodoc:
File.basename(filename).scan(Migration::MigrationFilenameRegexp).first
end
def migrations(paths)
paths = Array(paths)
migrations = migration_files(paths).map do |file|
version, name, scope = parse_migration_filename(file)
raise IllegalMigrationNameError.new(file) unless version
version = version.to_i
name = name.camelize
MigrationProxy.new(name, version, file, scope)
end
migrations.sort_by(&:version)
end
def migrations_status(paths)
paths = Array(paths)
db_list = ActiveRecord::SchemaMigration.normalized_versions
file_list = migration_files(paths).map do |file|
version, name, scope = parse_migration_filename(file)
raise IllegalMigrationNameError.new(file) unless version
version = ActiveRecord::SchemaMigration.normalize_migration_number(version)
status = db_list.delete(version) ? "up" : "down"
[status, version, (name + scope).humanize]
end.compact
db_list.map! do |version|
["up", version, "********** NO FILE **********"]
end
(db_list + file_list).sort_by { |_, version, _| version }
end
def migration_files(paths)
Dir[*paths.flat_map { |path| "#{path}/**/[0-9]*_*.rb" }]
end
private
def move(direction, migrations_paths, steps)
migrator = new(direction, migrations(migrations_paths))
if current_version != 0 && !migrator.current_migration
raise UnknownMigrationVersionError.new(current_version)
end
start_index =
if current_version == 0
0
else
migrator.migrations.index(migrator.current_migration)
end
finish = migrator.migrations[start_index + steps]
version = finish ? finish.version : 0
send(direction, migrations_paths, version)
end
end
def initialize(direction, migrations, target_version = nil)
@direction = direction
@target_version = target_version
@migrated_versions = nil
@migrations = migrations
validate(@migrations)
ActiveRecord::SchemaMigration.create_table
ActiveRecord::InternalMetadata.create_table
end
def current_version
migrated.max || 0
end
def current_migration
migrations.detect { |m| m.version == current_version }
end
alias :current :current_migration
def run
if use_advisory_lock?
with_advisory_lock { run_without_lock }
else
run_without_lock
end
end
def migrate
if use_advisory_lock?
with_advisory_lock { migrate_without_lock }
else
migrate_without_lock
end
end
def runnable
runnable = migrations[start..finish]
if up?
runnable.reject { |m| ran?(m) }
else
# skip the last migration if we're headed down, but not ALL the way down
runnable.pop if target
runnable.find_all { |m| ran?(m) }
end
end
def migrations
down? ? @migrations.reverse : @migrations.sort_by(&:version)
end
def pending_migrations
already_migrated = migrated
migrations.reject { |m| already_migrated.include?(m.version) }
end
def migrated
@migrated_versions || load_migrated
end
def load_migrated
@migrated_versions = Set.new(self.class.get_all_versions)
end
private
# Used for running a specific migration.
def run_without_lock
migration = migrations.detect { |m| m.version == @target_version }
raise UnknownMigrationVersionError.new(@target_version) if migration.nil?
result = execute_migration_in_transaction(migration, @direction)
record_environment
result
end
# Used for running multiple migrations up to or down to a certain value.
def migrate_without_lock
if invalid_target?
raise UnknownMigrationVersionError.new(@target_version)
end
result = runnable.each do |migration|
execute_migration_in_transaction(migration, @direction)
end
record_environment
result
end
# Stores the current environment in the database.
def record_environment
return if down?
ActiveRecord::InternalMetadata[:environment] = ActiveRecord::Migrator.current_environment
end
def ran?(migration)
migrated.include?(migration.version.to_i)
end
# Return true if a valid version is not provided.
def invalid_target?
!target && @target_version && @target_version > 0
end
def execute_migration_in_transaction(migration, direction)
return if down? && !migrated.include?(migration.version.to_i)
return if up? && migrated.include?(migration.version.to_i)
Base.logger.info "Migrating to #{migration.name} (#{migration.version})" if Base.logger
ddl_transaction(migration) do
migration.migrate(direction)
record_version_state_after_migrating(migration.version)
end
rescue => e
msg = "An error has occurred, ".dup
msg << "this and " if use_transaction?(migration)
msg << "all later migrations canceled:\n\n#{e}"
raise StandardError, msg, e.backtrace
end
def target
migrations.detect { |m| m.version == @target_version }
end
def finish
migrations.index(target) || migrations.size - 1
end
def start
up? ? 0 : (migrations.index(current) || 0)
end
def validate(migrations)
name , = migrations.group_by(&:name).find { |_, v| v.length > 1 }
raise DuplicateMigrationNameError.new(name) if name
version , = migrations.group_by(&:version).find { |_, v| v.length > 1 }
raise DuplicateMigrationVersionError.new(version) if version
end
def record_version_state_after_migrating(version)
if down?
migrated.delete(version)
ActiveRecord::SchemaMigration.where(version: version.to_s).delete_all
else
migrated << version
ActiveRecord::SchemaMigration.create!(version: version.to_s)
end
end
def self.last_stored_environment
return nil if current_version == 0
raise NoEnvironmentInSchemaError unless ActiveRecord::InternalMetadata.table_exists?
environment = ActiveRecord::InternalMetadata[:environment]
raise NoEnvironmentInSchemaError unless environment
environment
end
def self.current_environment
ActiveRecord::ConnectionHandling::DEFAULT_ENV.call
end
def self.protected_environment?
ActiveRecord::Base.protected_environments.include?(last_stored_environment) if last_stored_environment
end
def up?
@direction == :up
end
def down?
@direction == :down
end
# Wrap the migration in a transaction only if supported by the adapter.
def ddl_transaction(migration)
if use_transaction?(migration)
Base.transaction { yield }
else
yield
end
end
def use_transaction?(migration)
!migration.disable_ddl_transaction && Base.connection.supports_ddl_transactions?
end
def use_advisory_lock?
Base.connection.supports_advisory_locks?
end
def with_advisory_lock
lock_id = generate_migrator_advisory_lock_id
got_lock = Base.connection.get_advisory_lock(lock_id)
raise ConcurrentMigrationError unless got_lock
load_migrated # reload schema_migrations to be sure it wasn't changed by another process before we got the lock
yield
ensure
Base.connection.release_advisory_lock(lock_id) if got_lock
end
MIGRATOR_SALT = 2053462845
def generate_migrator_advisory_lock_id
db_name_hash = Zlib.crc32(Base.connection.current_database)
MIGRATOR_SALT * db_name_hash
end
end
end
Fix docs describing rollback [ci skip]
* `rails db:migrate STEP = 2` will not rollback the migrations, instead
`rails db:rollback STEP = 2` will do the rollback.
* Also, rewritten `rails db:migrate VERSION` => `rails db:rollback VERSION`
for consistency.
# frozen_string_literal: true
require "set"
require "zlib"
require "active_support/core_ext/module/attribute_accessors"
module ActiveRecord
class MigrationError < ActiveRecordError#:nodoc:
def initialize(message = nil)
message = "\n\n#{message}\n\n" if message
super
end
end
# Exception that can be raised to stop migrations from being rolled back.
# For example the following migration is not reversible.
# Rolling back this migration will raise an ActiveRecord::IrreversibleMigration error.
#
# class IrreversibleMigrationExample < ActiveRecord::Migration[5.0]
# def change
# create_table :distributors do |t|
# t.string :zipcode
# end
#
# execute <<-SQL
# ALTER TABLE distributors
# ADD CONSTRAINT zipchk
# CHECK (char_length(zipcode) = 5) NO INHERIT;
# SQL
# end
# end
#
# There are two ways to mitigate this problem.
#
# 1. Define <tt>#up</tt> and <tt>#down</tt> methods instead of <tt>#change</tt>:
#
# class ReversibleMigrationExample < ActiveRecord::Migration[5.0]
# def up
# create_table :distributors do |t|
# t.string :zipcode
# end
#
# execute <<-SQL
# ALTER TABLE distributors
# ADD CONSTRAINT zipchk
# CHECK (char_length(zipcode) = 5) NO INHERIT;
# SQL
# end
#
# def down
# execute <<-SQL
# ALTER TABLE distributors
# DROP CONSTRAINT zipchk
# SQL
#
# drop_table :distributors
# end
# end
#
# 2. Use the #reversible method in <tt>#change</tt> method:
#
# class ReversibleMigrationExample < ActiveRecord::Migration[5.0]
# def change
# create_table :distributors do |t|
# t.string :zipcode
# end
#
# reversible do |dir|
# dir.up do
# execute <<-SQL
# ALTER TABLE distributors
# ADD CONSTRAINT zipchk
# CHECK (char_length(zipcode) = 5) NO INHERIT;
# SQL
# end
#
# dir.down do
# execute <<-SQL
# ALTER TABLE distributors
# DROP CONSTRAINT zipchk
# SQL
# end
# end
# end
# end
class IrreversibleMigration < MigrationError
end
class DuplicateMigrationVersionError < MigrationError#:nodoc:
def initialize(version = nil)
if version
super("Multiple migrations have the version number #{version}.")
else
super("Duplicate migration version error.")
end
end
end
class DuplicateMigrationNameError < MigrationError#:nodoc:
def initialize(name = nil)
if name
super("Multiple migrations have the name #{name}.")
else
super("Duplicate migration name.")
end
end
end
class UnknownMigrationVersionError < MigrationError #:nodoc:
def initialize(version = nil)
if version
super("No migration with version number #{version}.")
else
super("Unknown migration version.")
end
end
end
class IllegalMigrationNameError < MigrationError#:nodoc:
def initialize(name = nil)
if name
super("Illegal name for migration file: #{name}\n\t(only lower case letters, numbers, and '_' allowed).")
else
super("Illegal name for migration.")
end
end
end
class PendingMigrationError < MigrationError#:nodoc:
def initialize(message = nil)
if !message && defined?(Rails.env)
super("Migrations are pending. To resolve this issue, run:\n\n bin/rails db:migrate RAILS_ENV=#{::Rails.env}")
elsif !message
super("Migrations are pending. To resolve this issue, run:\n\n bin/rails db:migrate")
else
super
end
end
end
class ConcurrentMigrationError < MigrationError #:nodoc:
DEFAULT_MESSAGE = "Cannot run migrations because another migration process is currently running.".freeze
def initialize(message = DEFAULT_MESSAGE)
super
end
end
class NoEnvironmentInSchemaError < MigrationError #:nodoc:
def initialize
msg = "Environment data not found in the schema. To resolve this issue, run: \n\n bin/rails db:environment:set"
if defined?(Rails.env)
super("#{msg} RAILS_ENV=#{::Rails.env}")
else
super(msg)
end
end
end
class ProtectedEnvironmentError < ActiveRecordError #:nodoc:
def initialize(env = "production")
msg = "You are attempting to run a destructive action against your '#{env}' database.\n".dup
msg << "If you are sure you want to continue, run the same command with the environment variable:\n"
msg << "DISABLE_DATABASE_ENVIRONMENT_CHECK=1"
super(msg)
end
end
class EnvironmentMismatchError < ActiveRecordError
def initialize(current: nil, stored: nil)
msg = "You are attempting to modify a database that was last run in `#{ stored }` environment.\n".dup
msg << "You are running in `#{ current }` environment. "
msg << "If you are sure you want to continue, first set the environment using:\n\n"
msg << " bin/rails db:environment:set"
if defined?(Rails.env)
super("#{msg} RAILS_ENV=#{::Rails.env}\n\n")
else
super("#{msg}\n\n")
end
end
end
# = Active Record Migrations
#
# Migrations can manage the evolution of a schema used by several physical
# databases. It's a solution to the common problem of adding a field to make
# a new feature work in your local database, but being unsure of how to
# push that change to other developers and to the production server. With
# migrations, you can describe the transformations in self-contained classes
# that can be checked into version control systems and executed against
# another database that might be one, two, or five versions behind.
#
# Example of a simple migration:
#
# class AddSsl < ActiveRecord::Migration[5.0]
# def up
# add_column :accounts, :ssl_enabled, :boolean, default: true
# end
#
# def down
# remove_column :accounts, :ssl_enabled
# end
# end
#
# This migration will add a boolean flag to the accounts table and remove it
# if you're backing out of the migration. It shows how all migrations have
# two methods +up+ and +down+ that describes the transformations
# required to implement or remove the migration. These methods can consist
# of both the migration specific methods like +add_column+ and +remove_column+,
# but may also contain regular Ruby code for generating data needed for the
# transformations.
#
# Example of a more complex migration that also needs to initialize data:
#
# class AddSystemSettings < ActiveRecord::Migration[5.0]
# def up
# create_table :system_settings do |t|
# t.string :name
# t.string :label
# t.text :value
# t.string :type
# t.integer :position
# end
#
# SystemSetting.create name: 'notice',
# label: 'Use notice?',
# value: 1
# end
#
# def down
# drop_table :system_settings
# end
# end
#
# This migration first adds the +system_settings+ table, then creates the very
# first row in it using the Active Record model that relies on the table. It
# also uses the more advanced +create_table+ syntax where you can specify a
# complete table schema in one block call.
#
# == Available transformations
#
# === Creation
#
# * <tt>create_join_table(table_1, table_2, options)</tt>: Creates a join
# table having its name as the lexical order of the first two
# arguments. See
# ActiveRecord::ConnectionAdapters::SchemaStatements#create_join_table for
# details.
# * <tt>create_table(name, options)</tt>: Creates a table called +name+ and
# makes the table object available to a block that can then add columns to it,
# following the same format as +add_column+. See example above. The options hash
# is for fragments like "DEFAULT CHARSET=UTF-8" that are appended to the create
# table definition.
# * <tt>add_column(table_name, column_name, type, options)</tt>: Adds a new column
# to the table called +table_name+
# named +column_name+ specified to be one of the following types:
# <tt>:string</tt>, <tt>:text</tt>, <tt>:integer</tt>, <tt>:float</tt>,
# <tt>:decimal</tt>, <tt>:datetime</tt>, <tt>:timestamp</tt>, <tt>:time</tt>,
# <tt>:date</tt>, <tt>:binary</tt>, <tt>:boolean</tt>. A default value can be
# specified by passing an +options+ hash like <tt>{ default: 11 }</tt>.
# Other options include <tt>:limit</tt> and <tt>:null</tt> (e.g.
# <tt>{ limit: 50, null: false }</tt>) -- see
# ActiveRecord::ConnectionAdapters::TableDefinition#column for details.
# * <tt>add_foreign_key(from_table, to_table, options)</tt>: Adds a new
# foreign key. +from_table+ is the table with the key column, +to_table+ contains
# the referenced primary key.
# * <tt>add_index(table_name, column_names, options)</tt>: Adds a new index
# with the name of the column. Other options include
# <tt>:name</tt>, <tt>:unique</tt> (e.g.
# <tt>{ name: 'users_name_index', unique: true }</tt>) and <tt>:order</tt>
# (e.g. <tt>{ order: { name: :desc } }</tt>).
# * <tt>add_reference(:table_name, :reference_name)</tt>: Adds a new column
# +reference_name_id+ by default an integer. See
# ActiveRecord::ConnectionAdapters::SchemaStatements#add_reference for details.
# * <tt>add_timestamps(table_name, options)</tt>: Adds timestamps (+created_at+
# and +updated_at+) columns to +table_name+.
#
# === Modification
#
# * <tt>change_column(table_name, column_name, type, options)</tt>: Changes
# the column to a different type using the same parameters as add_column.
# * <tt>change_column_default(table_name, column_name, default_or_changes)</tt>:
# Sets a default value for +column_name+ defined by +default_or_changes+ on
# +table_name+. Passing a hash containing <tt>:from</tt> and <tt>:to</tt>
# as +default_or_changes+ will make this change reversible in the migration.
# * <tt>change_column_null(table_name, column_name, null, default = nil)</tt>:
# Sets or removes a +NOT NULL+ constraint on +column_name+. The +null+ flag
# indicates whether the value can be +NULL+. See
# ActiveRecord::ConnectionAdapters::SchemaStatements#change_column_null for
# details.
# * <tt>change_table(name, options)</tt>: Allows to make column alterations to
# the table called +name+. It makes the table object available to a block that
# can then add/remove columns, indexes or foreign keys to it.
# * <tt>rename_column(table_name, column_name, new_column_name)</tt>: Renames
# a column but keeps the type and content.
# * <tt>rename_index(table_name, old_name, new_name)</tt>: Renames an index.
# * <tt>rename_table(old_name, new_name)</tt>: Renames the table called +old_name+
# to +new_name+.
#
# === Deletion
#
# * <tt>drop_table(name)</tt>: Drops the table called +name+.
# * <tt>drop_join_table(table_1, table_2, options)</tt>: Drops the join table
# specified by the given arguments.
# * <tt>remove_column(table_name, column_name, type, options)</tt>: Removes the column
# named +column_name+ from the table called +table_name+.
# * <tt>remove_columns(table_name, *column_names)</tt>: Removes the given
# columns from the table definition.
# * <tt>remove_foreign_key(from_table, options_or_to_table)</tt>: Removes the
# given foreign key from the table called +table_name+.
# * <tt>remove_index(table_name, column: column_names)</tt>: Removes the index
# specified by +column_names+.
# * <tt>remove_index(table_name, name: index_name)</tt>: Removes the index
# specified by +index_name+.
# * <tt>remove_reference(table_name, ref_name, options)</tt>: Removes the
# reference(s) on +table_name+ specified by +ref_name+.
# * <tt>remove_timestamps(table_name, options)</tt>: Removes the timestamp
# columns (+created_at+ and +updated_at+) from the table definition.
#
# == Irreversible transformations
#
# Some transformations are destructive in a manner that cannot be reversed.
# Migrations of that kind should raise an <tt>ActiveRecord::IrreversibleMigration</tt>
# exception in their +down+ method.
#
# == Running migrations from within Rails
#
# The Rails package has several tools to help create and apply migrations.
#
# To generate a new migration, you can use
# rails generate migration MyNewMigration
#
# where MyNewMigration is the name of your migration. The generator will
# create an empty migration file <tt>timestamp_my_new_migration.rb</tt>
# in the <tt>db/migrate/</tt> directory where <tt>timestamp</tt> is the
# UTC formatted date and time that the migration was generated.
#
# There is a special syntactic shortcut to generate migrations that add fields to a table.
#
# rails generate migration add_fieldname_to_tablename fieldname:string
#
# This will generate the file <tt>timestamp_add_fieldname_to_tablename.rb</tt>, which will look like this:
# class AddFieldnameToTablename < ActiveRecord::Migration[5.0]
# def change
# add_column :tablenames, :fieldname, :string
# end
# end
#
# To run migrations against the currently configured database, use
# <tt>rails db:migrate</tt>. This will update the database by running all of the
# pending migrations, creating the <tt>schema_migrations</tt> table
# (see "About the schema_migrations table" section below) if missing. It will also
# invoke the db:schema:dump task, which will update your db/schema.rb file
# to match the structure of your database.
#
# To roll the database back to a previous migration version, use
# <tt>rails db:rollback VERSION=X</tt> where <tt>X</tt> is the version to which
# you wish to downgrade. Alternatively, you can also use the STEP option if you
# wish to rollback last few migrations. <tt>rails db:rollback STEP=2</tt> will rollback
# the latest two migrations.
#
# If any of the migrations throw an <tt>ActiveRecord::IrreversibleMigration</tt> exception,
# that step will fail and you'll have some manual work to do.
#
# == Database support
#
# Migrations are currently supported in MySQL, PostgreSQL, SQLite,
# SQL Server, and Oracle (all supported databases except DB2).
#
# == More examples
#
# Not all migrations change the schema. Some just fix the data:
#
# class RemoveEmptyTags < ActiveRecord::Migration[5.0]
# def up
# Tag.all.each { |tag| tag.destroy if tag.pages.empty? }
# end
#
# def down
# # not much we can do to restore deleted data
# raise ActiveRecord::IrreversibleMigration, "Can't recover the deleted tags"
# end
# end
#
# Others remove columns when they migrate up instead of down:
#
# class RemoveUnnecessaryItemAttributes < ActiveRecord::Migration[5.0]
# def up
# remove_column :items, :incomplete_items_count
# remove_column :items, :completed_items_count
# end
#
# def down
# add_column :items, :incomplete_items_count
# add_column :items, :completed_items_count
# end
# end
#
# And sometimes you need to do something in SQL not abstracted directly by migrations:
#
# class MakeJoinUnique < ActiveRecord::Migration[5.0]
# def up
# execute "ALTER TABLE `pages_linked_pages` ADD UNIQUE `page_id_linked_page_id` (`page_id`,`linked_page_id`)"
# end
#
# def down
# execute "ALTER TABLE `pages_linked_pages` DROP INDEX `page_id_linked_page_id`"
# end
# end
#
# == Using a model after changing its table
#
# Sometimes you'll want to add a column in a migration and populate it
# immediately after. In that case, you'll need to make a call to
# <tt>Base#reset_column_information</tt> in order to ensure that the model has the
# latest column data from after the new column was added. Example:
#
# class AddPeopleSalary < ActiveRecord::Migration[5.0]
# def up
# add_column :people, :salary, :integer
# Person.reset_column_information
# Person.all.each do |p|
# p.update_attribute :salary, SalaryCalculator.compute(p)
# end
# end
# end
#
# == Controlling verbosity
#
# By default, migrations will describe the actions they are taking, writing
# them to the console as they happen, along with benchmarks describing how
# long each step took.
#
# You can quiet them down by setting ActiveRecord::Migration.verbose = false.
#
# You can also insert your own messages and benchmarks by using the +say_with_time+
# method:
#
# def up
# ...
# say_with_time "Updating salaries..." do
# Person.all.each do |p|
# p.update_attribute :salary, SalaryCalculator.compute(p)
# end
# end
# ...
# end
#
# The phrase "Updating salaries..." would then be printed, along with the
# benchmark for the block when the block completes.
#
# == Timestamped Migrations
#
# By default, Rails generates migrations that look like:
#
# 20080717013526_your_migration_name.rb
#
# The prefix is a generation timestamp (in UTC).
#
# If you'd prefer to use numeric prefixes, you can turn timestamped migrations
# off by setting:
#
# config.active_record.timestamped_migrations = false
#
# In application.rb.
#
# == Reversible Migrations
#
# Reversible migrations are migrations that know how to go +down+ for you.
# You simply supply the +up+ logic, and the Migration system figures out
# how to execute the down commands for you.
#
# To define a reversible migration, define the +change+ method in your
# migration like this:
#
# class TenderloveMigration < ActiveRecord::Migration[5.0]
# def change
# create_table(:horses) do |t|
# t.column :content, :text
# t.column :remind_at, :datetime
# end
# end
# end
#
# This migration will create the horses table for you on the way up, and
# automatically figure out how to drop the table on the way down.
#
# Some commands like +remove_column+ cannot be reversed. If you care to
# define how to move up and down in these cases, you should define the +up+
# and +down+ methods as before.
#
# If a command cannot be reversed, an
# <tt>ActiveRecord::IrreversibleMigration</tt> exception will be raised when
# the migration is moving down.
#
# For a list of commands that are reversible, please see
# <tt>ActiveRecord::Migration::CommandRecorder</tt>.
#
# == Transactional Migrations
#
# If the database adapter supports DDL transactions, all migrations will
# automatically be wrapped in a transaction. There are queries that you
# can't execute inside a transaction though, and for these situations
# you can turn the automatic transactions off.
#
# class ChangeEnum < ActiveRecord::Migration[5.0]
# disable_ddl_transaction!
#
# def up
# execute "ALTER TYPE model_size ADD VALUE 'new_value'"
# end
# end
#
# Remember that you can still open your own transactions, even if you
# are in a Migration with <tt>self.disable_ddl_transaction!</tt>.
class Migration
autoload :CommandRecorder, "active_record/migration/command_recorder"
autoload :Compatibility, "active_record/migration/compatibility"
# This must be defined before the inherited hook, below
class Current < Migration # :nodoc:
end
def self.inherited(subclass) # :nodoc:
super
if subclass.superclass == Migration
raise StandardError, "Directly inheriting from ActiveRecord::Migration is not supported. " \
"Please specify the Rails release the migration was written for:\n" \
"\n" \
" class #{subclass} < ActiveRecord::Migration[4.2]"
end
end
def self.[](version)
Compatibility.find(version)
end
def self.current_version
ActiveRecord::VERSION::STRING.to_f
end
MigrationFilenameRegexp = /\A([0-9]+)_([_a-z0-9]*)\.?([_a-z0-9]*)?\.rb\z/ # :nodoc:
# This class is used to verify that all migrations have been run before
# loading a web page if <tt>config.active_record.migration_error</tt> is set to :page_load
class CheckPending
def initialize(app)
@app = app
@last_check = 0
end
def call(env)
mtime = ActiveRecord::Migrator.last_migration.mtime.to_i
if @last_check < mtime
ActiveRecord::Migration.check_pending!(connection)
@last_check = mtime
end
@app.call(env)
end
private
def connection
ActiveRecord::Base.connection
end
end
class << self
attr_accessor :delegate # :nodoc:
attr_accessor :disable_ddl_transaction # :nodoc:
def nearest_delegate # :nodoc:
delegate || superclass.nearest_delegate
end
# Raises <tt>ActiveRecord::PendingMigrationError</tt> error if any migrations are pending.
def check_pending!(connection = Base.connection)
raise ActiveRecord::PendingMigrationError if ActiveRecord::Migrator.needs_migration?(connection)
end
def load_schema_if_pending!
if ActiveRecord::Migrator.needs_migration? || !ActiveRecord::Migrator.any_migrations?
# Roundtrip to Rake to allow plugins to hook into database initialization.
FileUtils.cd Rails.root do
current_config = Base.connection_config
Base.clear_all_connections!
system("bin/rails db:test:prepare")
# Establish a new connection, the old database may be gone (db:test:prepare uses purge)
Base.establish_connection(current_config)
end
check_pending!
end
end
def maintain_test_schema! # :nodoc:
if ActiveRecord::Base.maintain_test_schema
suppress_messages { load_schema_if_pending! }
end
end
def method_missing(name, *args, &block) # :nodoc:
nearest_delegate.send(name, *args, &block)
end
def migrate(direction)
new.migrate direction
end
# Disable the transaction wrapping this migration.
# You can still create your own transactions even after calling #disable_ddl_transaction!
#
# For more details read the {"Transactional Migrations" section above}[rdoc-ref:Migration].
def disable_ddl_transaction!
@disable_ddl_transaction = true
end
end
def disable_ddl_transaction # :nodoc:
self.class.disable_ddl_transaction
end
cattr_accessor :verbose
attr_accessor :name, :version
def initialize(name = self.class.name, version = nil)
@name = name
@version = version
@connection = nil
end
self.verbose = true
# instantiate the delegate object after initialize is defined
self.delegate = new
# Reverses the migration commands for the given block and
# the given migrations.
#
# The following migration will remove the table 'horses'
# and create the table 'apples' on the way up, and the reverse
# on the way down.
#
# class FixTLMigration < ActiveRecord::Migration[5.0]
# def change
# revert do
# create_table(:horses) do |t|
# t.text :content
# t.datetime :remind_at
# end
# end
# create_table(:apples) do |t|
# t.string :variety
# end
# end
# end
#
# Or equivalently, if +TenderloveMigration+ is defined as in the
# documentation for Migration:
#
# require_relative '20121212123456_tenderlove_migration'
#
# class FixupTLMigration < ActiveRecord::Migration[5.0]
# def change
# revert TenderloveMigration
#
# create_table(:apples) do |t|
# t.string :variety
# end
# end
# end
#
# This command can be nested.
def revert(*migration_classes)
run(*migration_classes.reverse, revert: true) unless migration_classes.empty?
if block_given?
if connection.respond_to? :revert
connection.revert { yield }
else
recorder = CommandRecorder.new(connection)
@connection = recorder
suppress_messages do
connection.revert { yield }
end
@connection = recorder.delegate
recorder.commands.each do |cmd, args, block|
send(cmd, *args, &block)
end
end
end
end
def reverting?
connection.respond_to?(:reverting) && connection.reverting
end
ReversibleBlockHelper = Struct.new(:reverting) do # :nodoc:
def up
yield unless reverting
end
def down
yield if reverting
end
end
# Used to specify an operation that can be run in one direction or another.
# Call the methods +up+ and +down+ of the yielded object to run a block
# only in one given direction.
# The whole block will be called in the right order within the migration.
#
# In the following example, the looping on users will always be done
# when the three columns 'first_name', 'last_name' and 'full_name' exist,
# even when migrating down:
#
# class SplitNameMigration < ActiveRecord::Migration[5.0]
# def change
# add_column :users, :first_name, :string
# add_column :users, :last_name, :string
#
# reversible do |dir|
# User.reset_column_information
# User.all.each do |u|
# dir.up { u.first_name, u.last_name = u.full_name.split(' ') }
# dir.down { u.full_name = "#{u.first_name} #{u.last_name}" }
# u.save
# end
# end
#
# revert { add_column :users, :full_name, :string }
# end
# end
def reversible
helper = ReversibleBlockHelper.new(reverting?)
execute_block { yield helper }
end
# Runs the given migration classes.
# Last argument can specify options:
# - :direction (default is :up)
# - :revert (default is false)
def run(*migration_classes)
opts = migration_classes.extract_options!
dir = opts[:direction] || :up
dir = (dir == :down ? :up : :down) if opts[:revert]
if reverting?
# If in revert and going :up, say, we want to execute :down without reverting, so
revert { run(*migration_classes, direction: dir, revert: true) }
else
migration_classes.each do |migration_class|
migration_class.new.exec_migration(connection, dir)
end
end
end
def up
self.class.delegate = self
return unless self.class.respond_to?(:up)
self.class.up
end
def down
self.class.delegate = self
return unless self.class.respond_to?(:down)
self.class.down
end
# Execute this migration in the named direction
def migrate(direction)
return unless respond_to?(direction)
case direction
when :up then announce "migrating"
when :down then announce "reverting"
end
time = nil
ActiveRecord::Base.connection_pool.with_connection do |conn|
time = Benchmark.measure do
exec_migration(conn, direction)
end
end
case direction
when :up then announce "migrated (%.4fs)" % time.real; write
when :down then announce "reverted (%.4fs)" % time.real; write
end
end
def exec_migration(conn, direction)
@connection = conn
if respond_to?(:change)
if direction == :down
revert { change }
else
change
end
else
send(direction)
end
ensure
@connection = nil
end
def write(text = "")
puts(text) if verbose
end
def announce(message)
text = "#{version} #{name}: #{message}"
length = [0, 75 - text.length].max
write "== %s %s" % [text, "=" * length]
end
def say(message, subitem = false)
write "#{subitem ? " ->" : "--"} #{message}"
end
def say_with_time(message)
say(message)
result = nil
time = Benchmark.measure { result = yield }
say "%.4fs" % time.real, :subitem
say("#{result} rows", :subitem) if result.is_a?(Integer)
result
end
def suppress_messages
save, self.verbose = verbose, false
yield
ensure
self.verbose = save
end
def connection
@connection || ActiveRecord::Base.connection
end
def method_missing(method, *arguments, &block)
arg_list = arguments.map(&:inspect) * ", "
say_with_time "#{method}(#{arg_list})" do
unless connection.respond_to? :revert
unless arguments.empty? || [:execute, :enable_extension, :disable_extension].include?(method)
arguments[0] = proper_table_name(arguments.first, table_name_options)
if [:rename_table, :add_foreign_key].include?(method) ||
(method == :remove_foreign_key && !arguments.second.is_a?(Hash))
arguments[1] = proper_table_name(arguments.second, table_name_options)
end
end
end
return super unless connection.respond_to?(method)
connection.send(method, *arguments, &block)
end
end
def copy(destination, sources, options = {})
copied = []
FileUtils.mkdir_p(destination) unless File.exist?(destination)
destination_migrations = ActiveRecord::Migrator.migrations(destination)
last = destination_migrations.last
sources.each do |scope, path|
source_migrations = ActiveRecord::Migrator.migrations(path)
source_migrations.each do |migration|
source = File.binread(migration.filename)
inserted_comment = "# This migration comes from #{scope} (originally #{migration.version})\n"
magic_comments = "".dup
loop do
# If we have a magic comment in the original migration,
# insert our comment after the first newline(end of the magic comment line)
# so the magic keep working.
# Note that magic comments must be at the first line(except sh-bang).
source.sub!(/\A(?:#.*\b(?:en)?coding:\s*\S+|#\s*frozen_string_literal:\s*(?:true|false)).*\n/) do |magic_comment|
magic_comments << magic_comment; ""
end || break
end
source = "#{magic_comments}#{inserted_comment}#{source}"
if duplicate = destination_migrations.detect { |m| m.name == migration.name }
if options[:on_skip] && duplicate.scope != scope.to_s
options[:on_skip].call(scope, migration)
end
next
end
migration.version = next_migration_number(last ? last.version + 1 : 0).to_i
new_path = File.join(destination, "#{migration.version}_#{migration.name.underscore}.#{scope}.rb")
old_path, migration.filename = migration.filename, new_path
last = migration
File.binwrite(migration.filename, source)
copied << migration
options[:on_copy].call(scope, migration, old_path) if options[:on_copy]
destination_migrations << migration
end
end
copied
end
# Finds the correct table name given an Active Record object.
# Uses the Active Record object's own table_name, or pre/suffix from the
# options passed in.
def proper_table_name(name, options = {})
if name.respond_to? :table_name
name.table_name
else
"#{options[:table_name_prefix]}#{name}#{options[:table_name_suffix]}"
end
end
# Determines the version number of the next migration.
def next_migration_number(number)
if ActiveRecord::Base.timestamped_migrations
[Time.now.utc.strftime("%Y%m%d%H%M%S"), "%.14d" % number].max
else
SchemaMigration.normalize_migration_number(number)
end
end
# Builds a hash for use in ActiveRecord::Migration#proper_table_name using
# the Active Record object's table_name prefix and suffix
def table_name_options(config = ActiveRecord::Base) #:nodoc:
{
table_name_prefix: config.table_name_prefix,
table_name_suffix: config.table_name_suffix
}
end
private
def execute_block
if connection.respond_to? :execute_block
super # use normal delegation to record the block
else
yield
end
end
end
# MigrationProxy is used to defer loading of the actual migration classes
# until they are needed
MigrationProxy = Struct.new(:name, :version, :filename, :scope) do
def initialize(name, version, filename, scope)
super
@migration = nil
end
def basename
File.basename(filename)
end
def mtime
File.mtime filename
end
delegate :migrate, :announce, :write, :disable_ddl_transaction, to: :migration
private
def migration
@migration ||= load_migration
end
def load_migration
require(File.expand_path(filename))
name.constantize.new(name, version)
end
end
class NullMigration < MigrationProxy #:nodoc:
def initialize
super(nil, 0, nil, nil)
end
def mtime
0
end
end
class Migrator#:nodoc:
class << self
attr_writer :migrations_paths
alias :migrations_path= :migrations_paths=
def migrate(migrations_paths, target_version = nil, &block)
case
when target_version.nil?
up(migrations_paths, target_version, &block)
when current_version == 0 && target_version == 0
[]
when current_version > target_version
down(migrations_paths, target_version, &block)
else
up(migrations_paths, target_version, &block)
end
end
def rollback(migrations_paths, steps = 1)
move(:down, migrations_paths, steps)
end
def forward(migrations_paths, steps = 1)
move(:up, migrations_paths, steps)
end
def up(migrations_paths, target_version = nil)
migrations = migrations(migrations_paths)
migrations.select! { |m| yield m } if block_given?
new(:up, migrations, target_version).migrate
end
def down(migrations_paths, target_version = nil)
migrations = migrations(migrations_paths)
migrations.select! { |m| yield m } if block_given?
new(:down, migrations, target_version).migrate
end
def run(direction, migrations_paths, target_version)
new(direction, migrations(migrations_paths), target_version).run
end
def open(migrations_paths)
new(:up, migrations(migrations_paths), nil)
end
def schema_migrations_table_name
SchemaMigration.table_name
end
deprecate :schema_migrations_table_name
def get_all_versions(connection = Base.connection)
if SchemaMigration.table_exists?
SchemaMigration.all_versions.map(&:to_i)
else
[]
end
end
def current_version(connection = Base.connection)
get_all_versions(connection).max || 0
end
def needs_migration?(connection = Base.connection)
(migrations(migrations_paths).collect(&:version) - get_all_versions(connection)).size > 0
end
def any_migrations?
migrations(migrations_paths).any?
end
def last_migration #:nodoc:
migrations(migrations_paths).last || NullMigration.new
end
def migrations_paths
@migrations_paths ||= ["db/migrate"]
# just to not break things if someone uses: migrations_path = some_string
Array(@migrations_paths)
end
def parse_migration_filename(filename) # :nodoc:
File.basename(filename).scan(Migration::MigrationFilenameRegexp).first
end
def migrations(paths)
paths = Array(paths)
migrations = migration_files(paths).map do |file|
version, name, scope = parse_migration_filename(file)
raise IllegalMigrationNameError.new(file) unless version
version = version.to_i
name = name.camelize
MigrationProxy.new(name, version, file, scope)
end
migrations.sort_by(&:version)
end
def migrations_status(paths)
paths = Array(paths)
db_list = ActiveRecord::SchemaMigration.normalized_versions
file_list = migration_files(paths).map do |file|
version, name, scope = parse_migration_filename(file)
raise IllegalMigrationNameError.new(file) unless version
version = ActiveRecord::SchemaMigration.normalize_migration_number(version)
status = db_list.delete(version) ? "up" : "down"
[status, version, (name + scope).humanize]
end.compact
db_list.map! do |version|
["up", version, "********** NO FILE **********"]
end
(db_list + file_list).sort_by { |_, version, _| version }
end
def migration_files(paths)
Dir[*paths.flat_map { |path| "#{path}/**/[0-9]*_*.rb" }]
end
private
def move(direction, migrations_paths, steps)
migrator = new(direction, migrations(migrations_paths))
if current_version != 0 && !migrator.current_migration
raise UnknownMigrationVersionError.new(current_version)
end
start_index =
if current_version == 0
0
else
migrator.migrations.index(migrator.current_migration)
end
finish = migrator.migrations[start_index + steps]
version = finish ? finish.version : 0
send(direction, migrations_paths, version)
end
end
def initialize(direction, migrations, target_version = nil)
@direction = direction
@target_version = target_version
@migrated_versions = nil
@migrations = migrations
validate(@migrations)
ActiveRecord::SchemaMigration.create_table
ActiveRecord::InternalMetadata.create_table
end
def current_version
migrated.max || 0
end
def current_migration
migrations.detect { |m| m.version == current_version }
end
alias :current :current_migration
def run
if use_advisory_lock?
with_advisory_lock { run_without_lock }
else
run_without_lock
end
end
def migrate
if use_advisory_lock?
with_advisory_lock { migrate_without_lock }
else
migrate_without_lock
end
end
def runnable
runnable = migrations[start..finish]
if up?
runnable.reject { |m| ran?(m) }
else
# skip the last migration if we're headed down, but not ALL the way down
runnable.pop if target
runnable.find_all { |m| ran?(m) }
end
end
def migrations
down? ? @migrations.reverse : @migrations.sort_by(&:version)
end
def pending_migrations
already_migrated = migrated
migrations.reject { |m| already_migrated.include?(m.version) }
end
def migrated
@migrated_versions || load_migrated
end
def load_migrated
@migrated_versions = Set.new(self.class.get_all_versions)
end
private
# Used for running a specific migration.
def run_without_lock
migration = migrations.detect { |m| m.version == @target_version }
raise UnknownMigrationVersionError.new(@target_version) if migration.nil?
result = execute_migration_in_transaction(migration, @direction)
record_environment
result
end
# Used for running multiple migrations up to or down to a certain value.
def migrate_without_lock
if invalid_target?
raise UnknownMigrationVersionError.new(@target_version)
end
result = runnable.each do |migration|
execute_migration_in_transaction(migration, @direction)
end
record_environment
result
end
# Stores the current environment in the database.
def record_environment
return if down?
ActiveRecord::InternalMetadata[:environment] = ActiveRecord::Migrator.current_environment
end
def ran?(migration)
migrated.include?(migration.version.to_i)
end
# Return true if a valid version is not provided.
def invalid_target?
!target && @target_version && @target_version > 0
end
def execute_migration_in_transaction(migration, direction)
return if down? && !migrated.include?(migration.version.to_i)
return if up? && migrated.include?(migration.version.to_i)
Base.logger.info "Migrating to #{migration.name} (#{migration.version})" if Base.logger
ddl_transaction(migration) do
migration.migrate(direction)
record_version_state_after_migrating(migration.version)
end
rescue => e
msg = "An error has occurred, ".dup
msg << "this and " if use_transaction?(migration)
msg << "all later migrations canceled:\n\n#{e}"
raise StandardError, msg, e.backtrace
end
def target
migrations.detect { |m| m.version == @target_version }
end
def finish
migrations.index(target) || migrations.size - 1
end
def start
up? ? 0 : (migrations.index(current) || 0)
end
def validate(migrations)
name , = migrations.group_by(&:name).find { |_, v| v.length > 1 }
raise DuplicateMigrationNameError.new(name) if name
version , = migrations.group_by(&:version).find { |_, v| v.length > 1 }
raise DuplicateMigrationVersionError.new(version) if version
end
def record_version_state_after_migrating(version)
if down?
migrated.delete(version)
ActiveRecord::SchemaMigration.where(version: version.to_s).delete_all
else
migrated << version
ActiveRecord::SchemaMigration.create!(version: version.to_s)
end
end
def self.last_stored_environment
return nil if current_version == 0
raise NoEnvironmentInSchemaError unless ActiveRecord::InternalMetadata.table_exists?
environment = ActiveRecord::InternalMetadata[:environment]
raise NoEnvironmentInSchemaError unless environment
environment
end
def self.current_environment
ActiveRecord::ConnectionHandling::DEFAULT_ENV.call
end
def self.protected_environment?
ActiveRecord::Base.protected_environments.include?(last_stored_environment) if last_stored_environment
end
def up?
@direction == :up
end
def down?
@direction == :down
end
# Wrap the migration in a transaction only if supported by the adapter.
def ddl_transaction(migration)
if use_transaction?(migration)
Base.transaction { yield }
else
yield
end
end
def use_transaction?(migration)
!migration.disable_ddl_transaction && Base.connection.supports_ddl_transactions?
end
def use_advisory_lock?
Base.connection.supports_advisory_locks?
end
def with_advisory_lock
lock_id = generate_migrator_advisory_lock_id
got_lock = Base.connection.get_advisory_lock(lock_id)
raise ConcurrentMigrationError unless got_lock
load_migrated # reload schema_migrations to be sure it wasn't changed by another process before we got the lock
yield
ensure
Base.connection.release_advisory_lock(lock_id) if got_lock
end
MIGRATOR_SALT = 2053462845
def generate_migrator_advisory_lock_id
db_name_hash = Zlib.crc32(Base.connection.current_database)
MIGRATOR_SALT * db_name_hash
end
end
end
|
module AsyncEndpoint
class AsyncEndpointWorker
include Sidekiq::Worker
def perform(async_request_id)
async_request = AsyncRequest.find(async_request_id)
async_request.execute
end
end
end
Retry false
module AsyncEndpoint
class AsyncEndpointWorker
include Sidekiq::Worker
sidekiq_options retry: false
def perform(async_request_id)
async_request = AsyncRequest.find(async_request_id)
async_request.execute
end
end
end
|
Puppet::Type.newtype(:mongodb_replset) do
@doc = 'Manage a MongoDB replicaSet'
ensurable
newparam(:name) do
desc 'The name of the replicaSet'
end
newproperty(:members, :array_matching => :all) do
desc 'Hostnames of members'
def insync?(is)
is.sort == should.sort
end
end
newparam(:arbiter) do
desc 'Hostname of arbiter'
defaultto nil
end
autorequire(:package) do
'mongodb'
end
autorequire(:service) do
'mongodb'
end
end
Make sure members doesn't contain arbiter
Puppet::Type.newtype(:mongodb_replset) do
@doc = 'Manage a MongoDB replicaSet'
ensurable
newparam(:name) do
desc 'The name of the replicaSet'
end
newproperty(:members, :array_matching => :all) do
desc 'Hostnames of members'
validate do |value|
arbiter = @resources[:arbiter]
if !arbiter.nil? and value.include?(arbiter)
raise Puppet::Error, 'Members shouldnt contain arbiter'
end
end
def insync?(is)
is.sort == should.sort
end
end
newparam(:arbiter) do
desc 'Hostname of arbiter'
defaultto nil
end
autorequire(:package) do
'mongodb'
end
autorequire(:service) do
'mongodb'
end
end
|
require 'cases/helper'
require 'models/company'
require 'models/developer'
require 'models/car'
require 'models/bulb'
class IntegrationTest < ActiveRecord::TestCase
fixtures :companies, :developers
def test_to_param_should_return_string
assert_kind_of String, Client.first.to_param
end
def test_to_param_returns_nil_if_not_persisted
client = Client.new
assert_equal nil, client.to_param
end
def test_to_param_returns_id_if_not_persisted_but_id_is_set
client = Client.new
client.id = 1
assert_equal '1', client.to_param
end
def test_cache_key_for_existing_record_is_not_timezone_dependent
ActiveRecord::Base.time_zone_aware_attributes = true
Time.zone = 'UTC'
utc_key = Developer.first.cache_key
Time.zone = 'EST'
est_key = Developer.first.cache_key
assert_equal utc_key, est_key
end
def test_cache_key_format_for_existing_record_with_updated_at
dev = Developer.first
assert_equal "developers/#{dev.id}-#{dev.updated_at.utc.to_s(:nsec)}", dev.cache_key
end
def test_cache_key_format_for_existing_record_with_updated_at_and_custom_cache_timestamp_format
dev = CachedDeveloper.first
assert_equal "cached_developers/#{dev.id}-#{dev.updated_at.utc.to_s(:number)}", dev.cache_key
end
def test_cache_key_changes_when_child_touched
car = Car.create
Bulb.create(car: car)
key = car.cache_key
car.bulb.touch
car.reload
assert_not_equal key, car.cache_key
end
def test_cache_key_format_for_existing_record_with_nil_updated_timestamps
dev = Developer.first
dev.update_columns(updated_at: nil, updated_on: nil)
assert_match(/\/#{dev.id}$/, dev.cache_key)
end
def test_cache_key_for_updated_on
dev = Developer.first
dev.updated_at = nil
assert_equal "developers/#{dev.id}-#{dev.updated_on.utc.to_s(:nsec)}", dev.cache_key
end
def test_cache_key_for_newer_updated_at
dev = Developer.first
dev.updated_at += 3600
assert_equal "developers/#{dev.id}-#{dev.updated_at.utc.to_s(:nsec)}", dev.cache_key
end
def test_cache_key_for_newer_updated_on
dev = Developer.first
dev.updated_on += 3600
assert_equal "developers/#{dev.id}-#{dev.updated_on.utc.to_s(:nsec)}", dev.cache_key
end
def test_cache_key_format_is_precise_enough
dev = Developer.first
key = dev.cache_key
dev.touch
assert_not_equal key, dev.cache_key
end
end
make test order independent by resetting timezone
require 'cases/helper'
require 'models/company'
require 'models/developer'
require 'models/car'
require 'models/bulb'
class IntegrationTest < ActiveRecord::TestCase
fixtures :companies, :developers
def test_to_param_should_return_string
assert_kind_of String, Client.first.to_param
end
def test_to_param_returns_nil_if_not_persisted
client = Client.new
assert_equal nil, client.to_param
end
def test_to_param_returns_id_if_not_persisted_but_id_is_set
client = Client.new
client.id = 1
assert_equal '1', client.to_param
end
def test_cache_key_for_existing_record_is_not_timezone_dependent
ActiveRecord::Base.time_zone_aware_attributes = true
Time.zone = 'UTC'
utc_key = Developer.first.cache_key
Time.zone = 'EST'
est_key = Developer.first.cache_key
assert_equal utc_key, est_key
ensure
Time.zone = 'UTC'
end
def test_cache_key_format_for_existing_record_with_updated_at
dev = Developer.first
assert_equal "developers/#{dev.id}-#{dev.updated_at.utc.to_s(:nsec)}", dev.cache_key
end
def test_cache_key_format_for_existing_record_with_updated_at_and_custom_cache_timestamp_format
dev = CachedDeveloper.first
assert_equal "cached_developers/#{dev.id}-#{dev.updated_at.utc.to_s(:number)}", dev.cache_key
end
def test_cache_key_changes_when_child_touched
car = Car.create
Bulb.create(car: car)
key = car.cache_key
car.bulb.touch
car.reload
assert_not_equal key, car.cache_key
end
def test_cache_key_format_for_existing_record_with_nil_updated_timestamps
dev = Developer.first
dev.update_columns(updated_at: nil, updated_on: nil)
assert_match(/\/#{dev.id}$/, dev.cache_key)
end
def test_cache_key_for_updated_on
dev = Developer.first
dev.updated_at = nil
assert_equal "developers/#{dev.id}-#{dev.updated_on.utc.to_s(:nsec)}", dev.cache_key
end
def test_cache_key_for_newer_updated_at
dev = Developer.first
dev.updated_at += 3600
assert_equal "developers/#{dev.id}-#{dev.updated_at.utc.to_s(:nsec)}", dev.cache_key
end
def test_cache_key_for_newer_updated_on
dev = Developer.first
dev.updated_on += 3600
assert_equal "developers/#{dev.id}-#{dev.updated_on.utc.to_s(:nsec)}", dev.cache_key
end
def test_cache_key_format_is_precise_enough
dev = Developer.first
key = dev.cache_key
dev.touch
assert_not_equal key, dev.cache_key
end
end
|
# -*- coding: utf-8 -*-
require 'pedant/rspec/common'
describe "users", :users do
def self.ruby?
Pedant::Config.ruby_users_endpoint?
end
def ruby_org_assoc?
true
end
let(:public_key_regex) do
# Because of a difference in the OpenSSL library between ruby 1.8.7
# (actually 1.9.2) and 1.9.3, we have to accept multiple patterns here:
/^-----BEGIN (RSA )?PUBLIC KEY-----/
end
let(:private_key_regex) do
/^-----BEGIN (RSA )?PRIVATE KEY-----/
end
# Pedant has configurable test users.
# Selects Pedant users that are marked as associated
let(:default_pedant_user_names) { platform.users.select(&:associate).map(&:name).sort }
let(:default_users_body) { default_pedant_user_names.map { |user| {"user" => {"username" => user} } } }
context "/organizations/<org>/users endpoint" do
let(:request_url) { api_url("users") }
context "GET /organizations/<org>/users" do
let(:users_body) { default_users_body }
context "admin user" do
it "can get org users", :smoke do
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body_exact => users_body
})
end
end
context "default normal user" do
it "can get org users", :smoke do
get(request_url, platform.non_admin_user).should look_like({
:status => 200,
:body_exact => users_body
})
end
end
context "default client" do
it "returns 403" do
get(request_url, platform.non_admin_client).should look_like({
:status => 403
})
end
end
context "outside user" do
it "returns 403" do
get(request_url, outside_user).should look_like({
:status => 403
})
end
end
context "invalid user" do
it "returns 401" do
get(request_url, invalid_user).should look_like({
:status => 401
})
end
end
end # context GET /organizations/<org>/users
context "PUT /organizations/<org>/users" do
context "admin user" do
it "returns 404[ruby]/405[erlang]" do
put(request_url, platform.admin_user).should look_like({
:status => ruby_org_assoc? ? 404 : 405
})
end
end
end # context PUT /organizations/<org>/users
context "POST /organizations/<org>/users" do
context "admin user" do
# A 405 here would be fine (and is no doubt coming with erlang)
it "returns 404[ruby]/405[erlang]" do
post(request_url, platform.admin_user).should look_like({
:status => ruby_org_assoc? ? 404 : 405
})
end
end
end # context POST /organizations/<org>/users
context "DELETE /organizations/<org>/users" do
context "admin user" do
# A 405 here would be fine (and is no doubt coming with erlang)
it "returns 404[ruby]/405[erlang]" do
delete(request_url, platform.admin_user).should look_like({
:status => ruby_org_assoc? ? 404 : 405
})
end
end
end # context DELETE /organizations/<org>/users
end # context /organizations/<org>/users endpoint
context "/organizations/<org>/users/<name>" do
let(:username) { platform.non_admin_user.name }
let(:request_url) { api_url("users/#{username}") }
context "GET /organizations/<org>/users/<name>" do
let(:user_body) do
{
"first_name" => username,
"last_name" => username,
"display_name" => username,
"email" => "#{username}@opscode.com",
"username" => username,
"public_key" => public_key_regex
}
end
context "superuser" do
it "can get user" do
get(request_url, platform.superuser).should look_like({
:status => 200,
:body_exact => user_body
})
end
end
context "admin user" do
it "can get user", :smoke do
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body_exact => user_body
})
end
end
context "default normal user" do
it "can get self", :smoke do
get(request_url, platform.non_admin_user).should look_like({
:status => 200,
:body_exact => user_body
})
end
end
context "default client" do
it "returns 403" do
get(request_url, platform.non_admin_client).should look_like({
:status => 403
})
end
end
context "outside user" do
it "returns 403" do
get(request_url, outside_user).should look_like({
:status => 403
})
end
end
context "invalid user" do
it "returns 401" do
get(request_url, invalid_user).should look_like({
:status => 401
})
end
end
context "when requesting user that doesn't exist" do
let(:username) { "bogus" }
it "returns 404" do
get(request_url, platform.admin_user).should look_like({
:status => 404
})
end
end
end # context GET /organizations/<org>/users/<name>
context "PUT /organizations/<org>/users/<name>" do
context "admin user" do
# A 405 here would be fine (and is no doubt coming with erlang)
it "returns 404[ruby]/405[erlang]" do
put(request_url, platform.admin_user).should look_like({
:status => ruby_org_assoc? ? 404 : 405
})
end
end
end # context PUT /organizations/<org>/users/<name>
context "POST /organizations/<org>/users/<name>" do
context "admin user" do
# A 405 here would be fine (and is no doubt coming with erlang)
it "returns 404[ruby]/405[erlang]" do
post(request_url, platform.admin_user).should look_like({
:status => ruby_org_assoc? ? 404 : 405
})
end
end
end # context POST /organizations/<org>/users/<name>
context "DELETE /organizations/<org>/users/<name>" do
let(:username) { "test-#{Time.now.to_i}-#{Process.pid}" }
let(:test_user) { platform.create_user(username) }
before :each do
platform.associate_user_with_org(org, test_user)
platform.add_user_to_group(org, test_user, "users")
end
after :each do
delete("#{platform.server}/users/#{username}", platform.superuser)
end
context "admin user" do
it "can delete user", :smoke do
delete(request_url, platform.admin_user).should look_like({
:status => 200
})
get(api_url("users"), platform.admin_user).should look_like({
:status => 200,
:body_exact => default_users_body })
end
end
context "non-admin user" do
it "returns 403" do
pending "actually returns 400" do # Wut?
delete(request_url, platform.non_admin_user).should look_like({
:status => 403
})
get(api_url("users"), platform.admin_user).should look_like({
:status => 200,
:body_exact => [
{"user" => {"username" => platform.admin_user.name}},
{"user" => {"username" => platform.non_admin_user.name}},
{"user" => {"username" => username}}
]})
end
end
end
context "default client" do
it "returns 403" do
pending "actually returns 400" do # Wut?
delete(request_url, platform.non_admin_client).should look_like({
:status => 403
})
get(api_url("users"), platform.admin_user).should look_like({
:status => 200,
:body_exact => [
{"user" => {"username" => platform.admin_user.name}},
{"user" => {"username" => platform.non_admin_user.name}},
{"user" => {"username" => username}}
]})
end
end
end
context "when user doesn't exist" do
let(:request_url) { api_url("users/bogus") }
it "returns 404" do
delete(request_url, platform.non_admin_client).should look_like({
:status => 404
})
get(api_url("users"), platform.admin_user).should look_like({
:status => 200,
:body_exact => default_users_body + [ {"user" => {"username" => username}} ]})
end
end
end # context DELETE /organizations/<org>/users/<name>
end # context /organizations/<org>/users/<name>
context "/users endpoint" do
let(:request_url) { "#{platform.server}/users" }
context "GET /users" do
let(:users_body) do
{
# There are other users, but these are ours, so they should always be
# somewhere in the userspace soup.
"pivotal" => "#{request_url}/pivotal",
platform.bad_user.name => "#{request_url}/#{platform.bad_user.name}",
platform.admin_user.name => "#{request_url}/#{platform.admin_user.name}",
platform.non_admin_user.name => "#{request_url}/#{platform.non_admin_user.name}",
}
end
let(:empty_users_body) do
{}
end
let(:filtered_users_body) do
{
platform.non_admin_user.name => "#{request_url}/#{platform.non_admin_user.name}"
}
end
context "superuser" do
it "can get all users", :smoke do
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_body
})
end
it "returns no users when filtering by non-existing email", :smoke do
get("#{request_url}?email=somenonexistingemail@somewhere.com", platform.superuser).should look_like({
:status => 200,
:body_exact => empty_users_body
})
end
it "returns a single user when filtering by that user's email address", :smoke do
# Let's get a known user and mail address.
response = get("#{request_url}/#{platform.non_admin_user.name}", platform.superuser)
email = JSON.parse(response)["email"]
get("#{request_url}?email=#{email}", platform.superuser).should look_like({
:status => 200,
:body_exact => filtered_users_body
})
end
it "returns a verbose list of users upon request" do
body = JSON.parse(get("#{request_url}?verbose=true", platform.superuser))
[ platform.non_admin_user.name, platform.admin_user.name, platform.superuser.name ].each do |name|
data = body[name]
data.should_not be nil
data.key?("first_name").should be true
data.key?("last_name").should be true
data.key?("email").should be true
end
end
end
context "admin user" do
it "returns 403", :smoke do
get(request_url, platform.admin_user).should look_like({
:status => 403
})
end
end
context "default normal user" do
it "returns 403" do
get(request_url, platform.non_admin_user).should look_like({
:status => 403
})
end
end
context "default client" do
it "returns 401" do
get(request_url, platform.non_admin_client).should look_like({
:status => 401
})
end
end
context "outside user" do
it "returns 403" do
get(request_url, outside_user).should look_like({
:status => 403
})
end
end
context "invalid user" do
it "returns 401" do
get(request_url, invalid_user).should look_like({
:status => 401
})
end
end
end # context GET /users
context "PUT /users" do
context "admin user" do
# A 405 here would be fine (and is no doubt coming with erlang)
it "returns 404[ruby]/405[erlang]" do
put(request_url, platform.admin_user).should look_like({
:status => ruby? ? 404 : 405
})
end
end
end # context PUT /users
context "POST /users" do
let(:username) { "test-#{Time.now.to_i}-#{Process.pid}" }
let(:user_url) { "#{request_url}/#{username}" }
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
let(:response_body) do
{
"uri" => "#{platform.server}/users/#{username}",
"private_key" => private_key_regex
}
end
let(:users_with_new_user) do
{
# There are other users, but these are ours, so they should always be
# somewhere in the userspace soup:
"pivotal" => "#{request_url}/pivotal",
platform.bad_user.name => "#{request_url}/#{platform.bad_user.name}",
platform.admin_user.name => "#{request_url}/#{platform.admin_user.name}",
platform.non_admin_user.name => "#{request_url}/#{platform.non_admin_user.name}",
# As should our test user:
username => user_url
}
end
after :each do
# For the test with a space: we can't create it -- but also can't delete it,
# ne? No naked spaces allowed in URLs.
if (username !~ / /)
delete("#{platform.server}/users/#{username}", platform.superuser)
end
end
context "superuser" do
it "can create new user", :smoke do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => response_body
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_with_new_user
})
get(user_url, platform.superuser).should look_like({
:status => 200
})
end
end
context "admin user" do
it "returns 403", :smoke do
post(request_url, platform.admin_user,
:payload => request_body).should look_like({
:status => 403
})
get(user_url, platform.superuser).should look_like({
:status => 404
})
end
end
context "creating users" do
context "without password" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username
}
end
it "returns 400" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "with external auth enabled" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"external_authentication_uid" => username
}
end
it "returns 201 when password is not provided" do
pending "work in flight on users endpoint" do
post(request_url, platform.superuser, :payload => request_body).should look_like({
:status => 201
})
end
end
it "returns 201 when password is provided" do
final_body = request_body.merge( { "password" => "foo bar"} )
post(request_url, platform.superuser, :payload => final_body).should look_like({
:status => 201
})
end
end
context "without display_name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "without first and last name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"display_name" => username,
"password" => "badger badger"
}
end
it "can create new user" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => response_body
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_with_new_user
})
end
end
context "without email" do
let(:request_body) do
{
"username" => username,
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "without email but with external auth enabled" do
let(:request_body) do
{
"username" => username,
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger",
"external_authentication_uid" => username
}
end
it "returns 201" do
pending "work in flight on users endpoint" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201
})
end
end
end
context "without username" do
let(:request_body) do
{
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "with invalid email" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@foo @ bar ahhh it's eating my eyes",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "with spaces in names" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => "Yi Ling",
"last_name" => "van Dijk",
"display_name" => username,
"password" => "badger badger"
}
end
it "can create new user" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => response_body
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_with_new_user
})
end
end
context "with bogus field" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger",
"bogus" => "look at me"
}
end
it "can create new user" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => response_body
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_with_new_user
})
end
end
context "with space in display_name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "some user",
"password" => "badger badger"
}
end
it "can create new user" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => response_body
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_with_new_user
})
end
end
context "with UTF-8 in display_name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "超人",
"password" => "badger badger"
}
end
it "can create new user" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => response_body
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_with_new_user
})
end
end
context "with UTF-8 in first/last name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => "Guðrún",
"last_name" => "Guðmundsdóttir",
"display_name" => username,
"password" => "badger badger"
}
end
it "can create new user" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => response_body
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_with_new_user
})
end
end
context "with capitalized username" do
let(:username) { "Test-#{Time.now.to_i}-#{Process.pid}" }
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "with space in username" do
let(:username) { "test #{Time.now.to_i}-#{Process.pid}" }
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "when user already exists" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 409" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => response_body
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_with_new_user
})
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 409
})
end
end
end # context creating users
end # context POST /users
context "DELETE /users" do
context "admin user" do
it "returns 404[ruby]/405[erlang]" do
delete(request_url, platform.admin_user).should look_like({
:status => ruby? ? 404 : 405
})
end
end
end # context DELETE /users
end # context /users endpoint
context "/users/<name> endpoint" do
let(:username) { platform.non_admin_user.name }
let(:request_url) { "#{platform.server}/users/#{username}" }
context "GET /users/<name>" do
let(:user_body) do
{
"first_name" => username,
"last_name" => username,
"display_name" => username,
"email" => "#{username}@opscode.com",
"username" => username,
"public_key" => public_key_regex
}
end
context "superuser" do
it "can get user" do
get(request_url, platform.superuser).should look_like({
:status => 200,
:body_exact => user_body
})
end
end
context "admin user" do
it "can get user" do
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body_exact => user_body
})
end
end
context "default normal user" do
it "can get self" do
get(request_url, platform.non_admin_user).should look_like({
:status => 200,
:body_exact => user_body
})
end
end
context "default client" do
it "returns 401" do
get(request_url, platform.non_admin_client).should look_like({
:status => 401
})
end
end
context "outside user" do
it "returns 403" do
get(request_url, outside_user).should look_like({
:status => 403
})
end
end
context "invalid user" do
it "returns 401" do
get(request_url, invalid_user).should look_like({
:status => 401
})
end
end
context "when user doesn't exist" do
let(:username) { "bogus" }
it "returns 404" do
get(request_url, platform.superuser).should look_like({
:status => 404
})
end
end
end # context GET /users/<name>
context "PUT /users/<name> when user created w/ external auth enabled" do
pending "work in flight on users endpoint" do
let(:username) { "test-#{Time.now.to_i}-#{Process.pid}" }
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"external_authentication_uid" => username
}
end
before :each do
response = post("#{platform.server}/users", platform.superuser,
:payload => {
"username" => username,
"first_name" => username,
"last_name" => username,
"display_name" => username,
"external_authentication_uid" => username
})
response.should look_like({ :status => 201 })
end
after :each do
delete("#{platform.server}/users/#{username}", platform.superuser)
end
context "without email and without specifying external auth uid" do
let(:request_body) do
{
"username" => username,
"display_name" => username
}
end
it "returns 200" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
end
end
end
end
context "PUT /users/<name>" do
let(:username) { "test-#{Time.now.to_i}-#{Process.pid}" }
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"password" => "badger badger"
}
end
let(:request_body_with_ext_id) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"password" => "badger badger",
"external_authentication_uid" => "bob"
}
end
let(:request_body_with_recovery) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"password" => "badger badger",
"recovery_authentication_enabled" => true
}
end
let(:modified_user) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"public_key" => public_key_regex
}
end
let(:input_public_key) do
<<EOF
-----BEGIN PUBLIC KEY-----
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+h5g/r/qaFH6OdYOG0OO
2/WpLb9qik7SPFmcOvujqZzLO2yv4kXwuvncx/ADHdkobaoFn3FE84uzIVCoSeaj
xTMeuTcPr5y+wsVqCYMkwIJpPezbwcrErt14BvD9BPN0UDyOJZW43ZN4iIw5xW8y
lQKuZtTNsm7FoznG+WsmRryTM3OjOrtDYjN/JHwDfrZZtVu7pT8FYnnz0O8j2zEf
9NALhpS7oDCf+VSo6UUk/w5m4/LpouDxT2dKBwQOuA8pzXd5jHP6rYdbHkroOUqx
Iy391UeSCiPVHcAN82sYV7R2MnUYj6b9Fev+62FKrQ6v9QYZcyljh6hldmcbmABy
EQIDAQAB
-----END PUBLIC KEY-----
EOF
end
before :each do
response = post("#{platform.server}/users", platform.superuser,
:payload => {
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
})
# Verify required preconditions are in place
response.should look_like({
:status => 201,
:body => { "private_key" => private_key_regex }})
@original_private_key = JSON.parse(response.body)["private_key"]
end
after :each do
delete("#{platform.server}/users/#{username}", platform.superuser)
@original_private_key = nil
end
context "superuser" do
it "can modify user", :smoke do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body_exact => modified_user
})
end
it "can enable recovery" do
put(request_url, platform.superuser,
:payload => request_body_with_recovery).should look_like({
:status => 200
})
end
it "can set external id" do
put(request_url, platform.superuser,
:payload => request_body_with_ext_id).should look_like({
:status => 200
})
end
end
context "admin user" do
it "returns 403", :smoke do
put(request_url, platform.admin_user,
:payload => request_body).should look_like({
:status => 403
})
end
it "cannot enable recovery" do
put(request_url, platform.admin_user,
:payload => request_body_with_recovery).should look_like({
:status => 403
})
end
it "cannot set external id" do
put(request_url, platform.admin_user,
:payload => request_body_with_ext_id).should look_like({
:status => 403
})
end
end
context "owning user" do
it "can modify its own account" do
put(request_url, platform.non_admin_user,
:payload => request_body).should look_like({
:status => 403
})
end
it "cannot enable recovery" do
put(request_url, platform.non_admin_user,
:payload => request_body_with_recovery).should look_like({
:status => 403
})
end
it "cannot set external id" do
put(request_url, platform.non_admin_user,
:payload => request_body_with_ext_id).should look_like({
:status => 403
})
end
end
context "default client" do
it "returns 401" do
put(request_url, platform.non_admin_client,
:payload => request_body).should look_like({
:status => 401
})
end
end
context "when modifying non-existent user" do
let(:request_url) { "#{platform.server}/users/bogus" }
it "returns 404" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 404
})
end
end
context "modifying users" do
context "without password" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name"
}
end
it "can modify user" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body_exact => modified_user
})
end
end
context "with bogus field" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"password" => "badger badger",
"bogus" => "not a badger"
}
end
it "can modify user" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body_exact => modified_user
})
end
end
context "without display_name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "without first and last name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"display_name" => "new name",
"password" => "badger badger"
}
end
let(:modified_user) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"display_name" => "new name",
"public_key" => public_key_regex
}
end
it "can modify user" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => modified_user
})
end
end
context "without email" do
let(:request_body) do
{
"username" => username,
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "without email but with external auth enabled" do
let(:request_body) do
{
"username" => username,
"display_name" => username,
"external_authentication_uid" => username
}
end
it "returns 200" do
pending "work in flight on users endpoint" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
end
end
end
context "without username" do
let(:request_body) do
{
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "with invalid email" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@foo @ bar no go",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "with spaces in names" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => "Ren Kai",
"last_name" => "de Boers",
"display_name" => username,
"password" => "badger badger"
}
end
let(:modified_user) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => "Ren Kai",
"last_name" => "de Boers",
"display_name" => username,
"public_key" => public_key_regex
}
end
it "can modify user" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => modified_user
})
end
end
context "with space in display_name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "some user",
"password" => "badger badger"
}
end
let(:modified_user) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "some user",
"public_key" => public_key_regex
}
end
it "can modify user" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => modified_user
})
end
end
context "with UTF-8 in display_name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "ギリギリ",
"password" => "badger badger"
}
end
let(:modified_user) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "ギリギリ",
"public_key" => public_key_regex
}
end
it "can modify user" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => modified_user
})
end
end
context "with UTF-8 in first/last name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => "Eliška",
"last_name" => "Horáčková",
"display_name" => username,
"password" => "badger badger"
}
end
let(:modified_user) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => "Eliška",
"last_name" => "Horáčková",
"display_name" => username,
"public_key" => public_key_regex
}
end
it "can modify user" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => modified_user
})
end
end
context "with new password provided" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"password" => "bidgerbidger"
}
end
it "changes the password" do
put_response = put(request_url, platform.superuser, :payload => request_body)
put_response.should look_like({ :status => 200 })
response = post("#{platform.server}/verify_password", platform.superuser,
:payload => { 'user_id_to_verify' => username, 'password' => 'bidgerbidger' })
JSON.parse(response.body)["password_is_correct"].should eq(true)
end
end
context "with public key provided" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"password" => "badger badger",
"public_key" => input_public_key
}
end
it "accepts the public key and subsequently responds with it" do
pending("Pending in ruby", :if => ruby?) do
put_response = put(request_url, platform.superuser, :payload => request_body)
put_response.should look_like({
:status => 200,
:body=> {
"uri" => request_url
},
})
get_response = get(request_url, platform.superuser)
new_public_key = JSON.parse(get_response.body)["public_key"]
new_public_key.should eq(input_public_key)
end
end
end
context "with private_key = true" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"password" => "badger badger",
"private_key" => true
}
end
it "returns a new private key, changes the public key" do
original_response = get(request_url, platform.superuser)
original_public_key = JSON.parse(original_response.body)["public_key"]
put_response = put(request_url, platform.superuser, :payload => request_body)
put_response.should look_like({
:status => 200,
:body_exact => {
"uri" => request_url,
"private_key" => private_key_regex
},
})
new_private_key = JSON.parse(put_response.body)["private_key"]
new_private_key.should_not eq(@original_private_key)
new_response = get(request_url, platform.superuser)
new_public_key = JSON.parse(new_response.body)["public_key"]
new_public_key.should_not eq(original_public_key)
end
end
context "with private_key = true and a public_key" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"password" => "badger badger",
"private_key" => true,
"public_key" => input_public_key
}
end
it "returns a new private key, changes the public key" do
original_response = get(request_url, platform.superuser)
original_public_key = JSON.parse(original_response.body)["public_key"]
put_response = put(request_url, platform.superuser, :payload => request_body)
put_response.should look_like({
:status => 200,
:body_exact => {
"uri" => request_url,
"private_key" => private_key_regex
},
})
new_private_key = JSON.parse(put_response.body)["private_key"]
new_private_key.should_not eq(@original_private_key)
new_response = get(request_url, platform.superuser)
new_public_key = JSON.parse(new_response.body)["public_key"]
new_public_key.should_not eq(input_public_key)
new_public_key.should_not eq(original_public_key)
end
end
end # context modifying users
context "renaming users" do
let(:new_name) { "test2-#{Time.now.to_i}-#{Process.pid}" }
let(:new_request_url) { "#{platform.server}/users/#{new_name}" }
context "changing username" do
let(:request_body) do
{
"username" => new_name,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
let(:modified_user) do
{
"username" => new_name,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"public_key" => public_key_regex
}
end
after :each do
delete("#{platform.server}/users/#{new_name}", platform.superuser)
end
context "and the username is valid" do
# Ideally these would be discrete tests: can we put it and get the correct response?
# But the top-level PUT /users/:id context causes us some problems with it's before :each
# behavior of recreating users.
it "updates the user to the new name and provides a new uri" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => { "uri" => new_request_url },
:headers => [ "Location" => new_request_url ]
})
# it "makes the user unavailable at the old URI"
get(request_url, platform.superuser).should look_like({
:status => 404
})
# it "makes the user available at the new URI"
get(new_request_url, platform.superuser).should look_like({
:status => 200,
:body_exact => modified_user
})
end
end
end
context "changing username with UTF-8" do
let(:new_name) { "テスト-#{Time.now.to_i}-#{Process.pid}" }
let(:request_body) do
{
"username" => new_name,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
# it "does not process any change to username" do
get(request_url, platform.superuser).should look_like({
:status => 200
})
end
end
context "changing username with spaces" do
let(:new_name) { "test #{Time.now.to_i}-#{Process.pid}" }
let(:request_body) do
{
"username" => new_name,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
put(request_url, platform.superuser, :payload => request_body).should look_like({
:status => 400
})
# it "does not process any change to username" do
get(request_url, platform.superuser).should look_like({
:status => 200
})
end
end
context "changing username with capital letters" do
let(:new_name) { "Test-#{Time.now.to_i}-#{Process.pid}" }
let(:request_body) do
{
"username" => new_name,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
# it "does not process any change to username" do
get(request_url, platform.superuser).should look_like({
:status => 200
})
end
end
context "new name already exists" do
let(:request_body) do
{
"username" => platform.non_admin_user.name,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
let(:unmodified_user) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"public_key" => public_key_regex
}
end
it "returns 409" do
pending("actually returns 403", :if => ruby?) do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 409
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body_exact => unmodified_user
})
end
end
end
end # context renaming users
end # context PUT /users/<name>
context "POST /users/<name>" do
context "admin user" do
# A 405 here would be fine (and is no doubt coming with erlang)
it "returns 404[ruby]/405[erlang]" do
post(request_url, platform.admin_user).should look_like({
:status => ruby? ? 404 : 405
})
end
end
end # context POST /users/<name>
context "DELETE /users/<name>" do
let(:username) { "test-#{Time.now.to_i}-#{Process.pid}" }
before :each do
post("#{platform.server}/users", platform.superuser,
:payload => {
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}).should look_like({
:status => 201,
:body_exact => {
"uri" => "#{platform.server}/users/#{username}",
"private_key" => private_key_regex
}})
end
after :each do
delete("#{platform.server}/users/#{username}", platform.superuser)
end
context "superuser" do
it "can delete user" do
delete(request_url, platform.superuser).should look_like({
:status => 200
})
# Similar to rename, the existing before :each interferese with making this into a separate test
# because it recreates the user.
# it "did delete the user"
get(request_url, platform.superuser).should look_like({
:status => 404
})
end
end
context "admin user" do
it "returns 403" do
delete(request_url, platform.admin_user).should look_like({
:status => 403
})
# it "did not delete user" do
get("#{platform.server}/users/#{username}",
platform.superuser).should look_like({
:status => 200
})
end
end
context "default client" do
it "returns 401" do
delete(request_url, platform.non_admin_client).should look_like({
:status => 401
})
# it "did not delete user" do
get("#{platform.server}/users/#{username}",
platform.superuser).should look_like({
:status => 200
})
end
end
context "when deleting a non-existent user" do
let(:request_url) { "#{platform.server}/users/bogus" }
it "returns 404" do
delete(request_url, platform.superuser).should look_like({
:status => 404
})
end
end
end # context DELETE /users/<name>
end # context /users/<name> endpoint
context "POST /verify_password" do
let(:request_url) { "#{platform.server}/verify_password" }
context "when the webui superuser is specified as the user" do
let(:requestor) { superuser }
let(:request_body) do
{
user_id_to_verify: superuser.name,
password: "DOES_NOT_MATTER_FOR_TEST",
}
end
it "should return Forbidden" do
post(request_url, superuser, :payload => request_body).should look_like(
:body => {
"error" => "Password authentication as the superuser is prohibited."
},
:status => 403
)
end
end # context when the webui superuser is specified as the user
end # context POST /verify_password
end # describe users
Ensure that when a PUT modifies password, the password works.
Conversely, ensure that when PUT does not modify a password,
the old one continues to work. Also removed password from updates
where we didn't need it as part of the test.
Finally, ensure that when a password is updated, the original password
produces the expected 403 response
# -*- coding: utf-8 -*-
require 'pedant/rspec/common'
describe "users", :users do
def self.ruby?
Pedant::Config.ruby_users_endpoint?
end
def ruby_org_assoc?
true
end
let(:public_key_regex) do
# Because of a difference in the OpenSSL library between ruby 1.8.7
# (actually 1.9.2) and 1.9.3, we have to accept multiple patterns here:
/^-----BEGIN (RSA )?PUBLIC KEY-----/
end
let(:private_key_regex) do
/^-----BEGIN (RSA )?PRIVATE KEY-----/
end
# Pedant has configurable test users.
# Selects Pedant users that are marked as associated
let(:default_pedant_user_names) { platform.users.select(&:associate).map(&:name).sort }
let(:default_users_body) { default_pedant_user_names.map { |user| {"user" => {"username" => user} } } }
context "/organizations/<org>/users endpoint" do
let(:request_url) { api_url("users") }
context "GET /organizations/<org>/users" do
let(:users_body) { default_users_body }
context "admin user" do
it "can get org users", :smoke do
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body_exact => users_body
})
end
end
context "default normal user" do
it "can get org users", :smoke do
get(request_url, platform.non_admin_user).should look_like({
:status => 200,
:body_exact => users_body
})
end
end
context "default client" do
it "returns 403" do
get(request_url, platform.non_admin_client).should look_like({
:status => 403
})
end
end
context "outside user" do
it "returns 403" do
get(request_url, outside_user).should look_like({
:status => 403
})
end
end
context "invalid user" do
it "returns 401" do
get(request_url, invalid_user).should look_like({
:status => 401
})
end
end
end # context GET /organizations/<org>/users
context "PUT /organizations/<org>/users" do
context "admin user" do
it "returns 404[ruby]/405[erlang]" do
put(request_url, platform.admin_user).should look_like({
:status => ruby_org_assoc? ? 404 : 405
})
end
end
end # context PUT /organizations/<org>/users
context "POST /organizations/<org>/users" do
context "admin user" do
# A 405 here would be fine (and is no doubt coming with erlang)
it "returns 404[ruby]/405[erlang]" do
post(request_url, platform.admin_user).should look_like({
:status => ruby_org_assoc? ? 404 : 405
})
end
end
end # context POST /organizations/<org>/users
context "DELETE /organizations/<org>/users" do
context "admin user" do
# A 405 here would be fine (and is no doubt coming with erlang)
it "returns 404[ruby]/405[erlang]" do
delete(request_url, platform.admin_user).should look_like({
:status => ruby_org_assoc? ? 404 : 405
})
end
end
end # context DELETE /organizations/<org>/users
end # context /organizations/<org>/users endpoint
context "/organizations/<org>/users/<name>" do
let(:username) { platform.non_admin_user.name }
let(:request_url) { api_url("users/#{username}") }
context "GET /organizations/<org>/users/<name>" do
let(:user_body) do
{
"first_name" => username,
"last_name" => username,
"display_name" => username,
"email" => "#{username}@opscode.com",
"username" => username,
"public_key" => public_key_regex
}
end
context "superuser" do
it "can get user" do
get(request_url, platform.superuser).should look_like({
:status => 200,
:body_exact => user_body
})
end
end
context "admin user" do
it "can get user", :smoke do
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body_exact => user_body
})
end
end
context "default normal user" do
it "can get self", :smoke do
get(request_url, platform.non_admin_user).should look_like({
:status => 200,
:body_exact => user_body
})
end
end
context "default client" do
it "returns 403" do
get(request_url, platform.non_admin_client).should look_like({
:status => 403
})
end
end
context "outside user" do
it "returns 403" do
get(request_url, outside_user).should look_like({
:status => 403
})
end
end
context "invalid user" do
it "returns 401" do
get(request_url, invalid_user).should look_like({
:status => 401
})
end
end
context "when requesting user that doesn't exist" do
let(:username) { "bogus" }
it "returns 404" do
get(request_url, platform.admin_user).should look_like({
:status => 404
})
end
end
end # context GET /organizations/<org>/users/<name>
context "PUT /organizations/<org>/users/<name>" do
context "admin user" do
# A 405 here would be fine (and is no doubt coming with erlang)
it "returns 404[ruby]/405[erlang]" do
put(request_url, platform.admin_user).should look_like({
:status => ruby_org_assoc? ? 404 : 405
})
end
end
end # context PUT /organizations/<org>/users/<name>
context "POST /organizations/<org>/users/<name>" do
context "admin user" do
# A 405 here would be fine (and is no doubt coming with erlang)
it "returns 404[ruby]/405[erlang]" do
post(request_url, platform.admin_user).should look_like({
:status => ruby_org_assoc? ? 404 : 405
})
end
end
end # context POST /organizations/<org>/users/<name>
context "DELETE /organizations/<org>/users/<name>" do
let(:username) { "test-#{Time.now.to_i}-#{Process.pid}" }
let(:test_user) { platform.create_user(username) }
before :each do
platform.associate_user_with_org(org, test_user)
platform.add_user_to_group(org, test_user, "users")
end
after :each do
delete("#{platform.server}/users/#{username}", platform.superuser)
end
context "admin user" do
it "can delete user", :smoke do
delete(request_url, platform.admin_user).should look_like({
:status => 200
})
get(api_url("users"), platform.admin_user).should look_like({
:status => 200,
:body_exact => default_users_body })
end
end
context "non-admin user" do
it "returns 403" do
pending "actually returns 400" do # Wut?
delete(request_url, platform.non_admin_user).should look_like({
:status => 403
})
get(api_url("users"), platform.admin_user).should look_like({
:status => 200,
:body_exact => [
{"user" => {"username" => platform.admin_user.name}},
{"user" => {"username" => platform.non_admin_user.name}},
{"user" => {"username" => username}}
]})
end
end
end
context "default client" do
it "returns 403" do
pending "actually returns 400" do # Wut?
delete(request_url, platform.non_admin_client).should look_like({
:status => 403
})
get(api_url("users"), platform.admin_user).should look_like({
:status => 200,
:body_exact => [
{"user" => {"username" => platform.admin_user.name}},
{"user" => {"username" => platform.non_admin_user.name}},
{"user" => {"username" => username}}
]})
end
end
end
context "when user doesn't exist" do
let(:request_url) { api_url("users/bogus") }
it "returns 404" do
delete(request_url, platform.non_admin_client).should look_like({
:status => 404
})
get(api_url("users"), platform.admin_user).should look_like({
:status => 200,
:body_exact => default_users_body + [ {"user" => {"username" => username}} ]})
end
end
end # context DELETE /organizations/<org>/users/<name>
end # context /organizations/<org>/users/<name>
context "/users endpoint" do
let(:request_url) { "#{platform.server}/users" }
context "GET /users" do
let(:users_body) do
{
# There are other users, but these are ours, so they should always be
# somewhere in the userspace soup.
"pivotal" => "#{request_url}/pivotal",
platform.bad_user.name => "#{request_url}/#{platform.bad_user.name}",
platform.admin_user.name => "#{request_url}/#{platform.admin_user.name}",
platform.non_admin_user.name => "#{request_url}/#{platform.non_admin_user.name}",
}
end
let(:empty_users_body) do
{}
end
let(:filtered_users_body) do
{
platform.non_admin_user.name => "#{request_url}/#{platform.non_admin_user.name}"
}
end
context "superuser" do
it "can get all users", :smoke do
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_body
})
end
it "returns no users when filtering by non-existing email", :smoke do
get("#{request_url}?email=somenonexistingemail@somewhere.com", platform.superuser).should look_like({
:status => 200,
:body_exact => empty_users_body
})
end
it "returns a single user when filtering by that user's email address", :smoke do
# Let's get a known user and mail address.
response = get("#{request_url}/#{platform.non_admin_user.name}", platform.superuser)
email = JSON.parse(response)["email"]
get("#{request_url}?email=#{email}", platform.superuser).should look_like({
:status => 200,
:body_exact => filtered_users_body
})
end
it "returns a verbose list of users upon request" do
body = JSON.parse(get("#{request_url}?verbose=true", platform.superuser))
[ platform.non_admin_user.name, platform.admin_user.name, platform.superuser.name ].each do |name|
data = body[name]
data.should_not be nil
data.key?("first_name").should be true
data.key?("last_name").should be true
data.key?("email").should be true
end
end
end
context "admin user" do
it "returns 403", :smoke do
get(request_url, platform.admin_user).should look_like({
:status => 403
})
end
end
context "default normal user" do
it "returns 403" do
get(request_url, platform.non_admin_user).should look_like({
:status => 403
})
end
end
context "default client" do
it "returns 401" do
get(request_url, platform.non_admin_client).should look_like({
:status => 401
})
end
end
context "outside user" do
it "returns 403" do
get(request_url, outside_user).should look_like({
:status => 403
})
end
end
context "invalid user" do
it "returns 401" do
get(request_url, invalid_user).should look_like({
:status => 401
})
end
end
end # context GET /users
context "PUT /users" do
context "admin user" do
# A 405 here would be fine (and is no doubt coming with erlang)
it "returns 404[ruby]/405[erlang]" do
put(request_url, platform.admin_user).should look_like({
:status => ruby? ? 404 : 405
})
end
end
end # context PUT /users
context "POST /users" do
let(:username) { "test-#{Time.now.to_i}-#{Process.pid}" }
let(:user_url) { "#{request_url}/#{username}" }
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
let(:response_body) do
{
"uri" => "#{platform.server}/users/#{username}",
"private_key" => private_key_regex
}
end
let(:users_with_new_user) do
{
# There are other users, but these are ours, so they should always be
# somewhere in the userspace soup:
"pivotal" => "#{request_url}/pivotal",
platform.bad_user.name => "#{request_url}/#{platform.bad_user.name}",
platform.admin_user.name => "#{request_url}/#{platform.admin_user.name}",
platform.non_admin_user.name => "#{request_url}/#{platform.non_admin_user.name}",
# As should our test user:
username => user_url
}
end
after :each do
# For the test with a space: we can't create it -- but also can't delete it,
# ne? No naked spaces allowed in URLs.
if (username !~ / /)
delete("#{platform.server}/users/#{username}", platform.superuser)
end
end
context "superuser" do
it "can create new user", :smoke do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => response_body
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_with_new_user
})
get(user_url, platform.superuser).should look_like({
:status => 200
})
end
end
context "admin user" do
it "returns 403", :smoke do
post(request_url, platform.admin_user,
:payload => request_body).should look_like({
:status => 403
})
get(user_url, platform.superuser).should look_like({
:status => 404
})
end
end
context "creating users" do
context "without password" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username
}
end
it "returns 400" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "with external auth enabled" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"external_authentication_uid" => username
}
end
it "returns 201 when password is not provided" do
pending "work in flight on users endpoint" do
post(request_url, platform.superuser, :payload => request_body).should look_like({
:status => 201
})
end
end
it "returns 201 when password is provided" do
final_body = request_body.merge( { "password" => "foo bar"} )
post(request_url, platform.superuser, :payload => final_body).should look_like({
:status => 201
})
end
end
context "without display_name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "without first and last name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"display_name" => username,
"password" => "badger badger"
}
end
it "can create new user" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => response_body
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_with_new_user
})
end
end
context "without email" do
let(:request_body) do
{
"username" => username,
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "without email but with external auth enabled" do
let(:request_body) do
{
"username" => username,
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger",
"external_authentication_uid" => username
}
end
it "returns 201" do
pending "work in flight on users endpoint" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201
})
end
end
end
context "without username" do
let(:request_body) do
{
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "with invalid email" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@foo @ bar ahhh it's eating my eyes",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "with spaces in names" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => "Yi Ling",
"last_name" => "van Dijk",
"display_name" => username,
"password" => "badger badger"
}
end
it "can create new user" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => response_body
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_with_new_user
})
end
end
context "with bogus field" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger",
"bogus" => "look at me"
}
end
it "can create new user" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => response_body
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_with_new_user
})
end
end
context "with space in display_name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "some user",
"password" => "badger badger"
}
end
it "can create new user" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => response_body
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_with_new_user
})
end
end
context "with UTF-8 in display_name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "超人",
"password" => "badger badger"
}
end
it "can create new user" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => response_body
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_with_new_user
})
end
end
context "with UTF-8 in first/last name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => "Guðrún",
"last_name" => "Guðmundsdóttir",
"display_name" => username,
"password" => "badger badger"
}
end
it "can create new user" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => response_body
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_with_new_user
})
end
end
context "with capitalized username" do
let(:username) { "Test-#{Time.now.to_i}-#{Process.pid}" }
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "with space in username" do
let(:username) { "test #{Time.now.to_i}-#{Process.pid}" }
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "when user already exists" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 409" do
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => response_body
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => users_with_new_user
})
post(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 409
})
end
end
end # context creating users
end # context POST /users
context "DELETE /users" do
context "admin user" do
it "returns 404[ruby]/405[erlang]" do
delete(request_url, platform.admin_user).should look_like({
:status => ruby? ? 404 : 405
})
end
end
end # context DELETE /users
end # context /users endpoint
context "/users/<name> endpoint" do
let(:username) { platform.non_admin_user.name }
let(:request_url) { "#{platform.server}/users/#{username}" }
context "GET /users/<name>" do
let(:user_body) do
{
"first_name" => username,
"last_name" => username,
"display_name" => username,
"email" => "#{username}@opscode.com",
"username" => username,
"public_key" => public_key_regex
}
end
context "superuser" do
it "can get user" do
get(request_url, platform.superuser).should look_like({
:status => 200,
:body_exact => user_body
})
end
end
context "admin user" do
it "can get user" do
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body_exact => user_body
})
end
end
context "default normal user" do
it "can get self" do
get(request_url, platform.non_admin_user).should look_like({
:status => 200,
:body_exact => user_body
})
end
end
context "default client" do
it "returns 401" do
get(request_url, platform.non_admin_client).should look_like({
:status => 401
})
end
end
context "outside user" do
it "returns 403" do
get(request_url, outside_user).should look_like({
:status => 403
})
end
end
context "invalid user" do
it "returns 401" do
get(request_url, invalid_user).should look_like({
:status => 401
})
end
end
context "when user doesn't exist" do
let(:username) { "bogus" }
it "returns 404" do
get(request_url, platform.superuser).should look_like({
:status => 404
})
end
end
end # context GET /users/<name>
context "PUT /users/<name> when user created w/ external auth enabled" do
pending "work in flight on users endpoint" do
let(:username) { "test-#{Time.now.to_i}-#{Process.pid}" }
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"external_authentication_uid" => username
}
end
before :each do
response = post("#{platform.server}/users", platform.superuser,
:payload => {
"username" => username,
"first_name" => username,
"last_name" => username,
"display_name" => username,
"external_authentication_uid" => username
})
response.should look_like({ :status => 201 })
end
after :each do
delete("#{platform.server}/users/#{username}", platform.superuser)
end
context "without email and without specifying external auth uid" do
let(:request_body) do
{
"username" => username,
"display_name" => username
}
end
it "returns 200" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
end
end
end
end
context "PUT /users/<name>" do
let(:username) { "test-#{Time.now.to_i}-#{Process.pid}" }
let(:password) { "badger badger" }
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"password" => password
}
end
let(:request_body_with_ext_id) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"external_authentication_uid" => "bob"
}
end
let(:request_body_with_recovery) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"recovery_authentication_enabled" => true
}
end
let(:modified_user) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"public_key" => public_key_regex
}
end
let(:input_public_key) do
<<EOF
-----BEGIN PUBLIC KEY-----
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+h5g/r/qaFH6OdYOG0OO
2/WpLb9qik7SPFmcOvujqZzLO2yv4kXwuvncx/ADHdkobaoFn3FE84uzIVCoSeaj
xTMeuTcPr5y+wsVqCYMkwIJpPezbwcrErt14BvD9BPN0UDyOJZW43ZN4iIw5xW8y
lQKuZtTNsm7FoznG+WsmRryTM3OjOrtDYjN/JHwDfrZZtVu7pT8FYnnz0O8j2zEf
9NALhpS7oDCf+VSo6UUk/w5m4/LpouDxT2dKBwQOuA8pzXd5jHP6rYdbHkroOUqx
Iy391UeSCiPVHcAN82sYV7R2MnUYj6b9Fev+62FKrQ6v9QYZcyljh6hldmcbmABy
EQIDAQAB
-----END PUBLIC KEY-----
EOF
end
before :each do
response = post("#{platform.server}/users", platform.superuser,
:payload => {
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
})
# Verify required preconditions are in place
response.should look_like({
:status => 201,
:body => { "private_key" => private_key_regex }})
@original_private_key = JSON.parse(response.body)["private_key"]
end
after :each do
delete("#{platform.server}/users/#{username}", platform.superuser)
@original_private_key = nil
end
context "superuser" do
it "can modify user", :smoke do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body_exact => modified_user
})
end
context "authenticating after updates" do
let(:auth_url) { "#{platform.server}/authenticate_user" }
context "when password is unchanged" do
it "can authenticate as the modified user when password has not been changed.", :smoke do
put(request_url, platform.superuser, :payload => request_body).should look_like({ :status => 200 })
post(auth_url, superuser, :payload => { 'username' => username,
'password' => password }).should look_like({
:status => 200
})
end
end
context "when password is updated" do
let(:password) { "bidger bidger"}
it "can authenticate as the modified user when password has been changed" do
put(request_url, platform.superuser, :payload => request_body).should look_like({ :status => 200 })
post(auth_url, superuser, :payload => { 'username' => username,
'password' => password }).should look_like({
:status => 200
})
end
it "fails to authenticate as the modified user using the old password" do
put(request_url, platform.superuser, :payload => request_body).should look_like({ :status => 200 })
post(auth_url, superuser, :payload => { 'username' => username,
'password' => 'badger badger' }).should look_like({
:status => 200
})
end
end
end
it "can enable recovery" do
put(request_url, platform.superuser,
:payload => request_body_with_recovery).should look_like({
:status => 200
})
end
it "can set external id" do
put(request_url, platform.superuser,
:payload => request_body_with_ext_id).should look_like({
:status => 200
})
end
end
context "admin user" do
it "returns 403", :smoke do
put(request_url, platform.admin_user,
:payload => request_body).should look_like({
:status => 403
})
end
it "cannot enable recovery" do
put(request_url, platform.admin_user,
:payload => request_body_with_recovery).should look_like({
:status => 403
})
end
it "cannot set external id" do
put(request_url, platform.admin_user,
:payload => request_body_with_ext_id).should look_like({
:status => 403
})
end
end
context "owning user" do
it "can modify its own account" do
put(request_url, platform.non_admin_user,
:payload => request_body).should look_like({
:status => 403
})
end
it "cannot enable recovery" do
put(request_url, platform.non_admin_user,
:payload => request_body_with_recovery).should look_like({
:status => 403
})
end
it "cannot set external id" do
put(request_url, platform.non_admin_user,
:payload => request_body_with_ext_id).should look_like({
:status => 403
})
end
end
context "default client" do
it "returns 401" do
put(request_url, platform.non_admin_client,
:payload => request_body).should look_like({
:status => 401
})
end
end
context "when modifying non-existent user" do
let(:request_url) { "#{platform.server}/users/bogus" }
it "returns 404" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 404
})
end
end
context "modifying users" do
context "without password" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name"
}
end
it "can modify user" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body_exact => modified_user
})
end
end
context "with bogus field" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"password" => "badger badger",
"bogus" => "not a badger"
}
end
it "can modify user" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body_exact => modified_user
})
end
end
context "without display_name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "without first and last name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"display_name" => "new name",
"password" => "badger badger"
}
end
let(:modified_user) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"display_name" => "new name",
"public_key" => public_key_regex
}
end
it "can modify user" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => modified_user
})
end
end
context "without email" do
let(:request_body) do
{
"username" => username,
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "without email but with external auth enabled" do
let(:request_body) do
{
"username" => username,
"display_name" => username,
"external_authentication_uid" => username
}
end
it "returns 200" do
pending "work in flight on users endpoint" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
end
end
end
context "without username" do
let(:request_body) do
{
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "with invalid email" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@foo @ bar no go",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
end
end
context "with spaces in names" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => "Ren Kai",
"last_name" => "de Boers",
"display_name" => username,
"password" => "badger badger"
}
end
let(:modified_user) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => "Ren Kai",
"last_name" => "de Boers",
"display_name" => username,
"public_key" => public_key_regex
}
end
it "can modify user" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => modified_user
})
end
end
context "with space in display_name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "some user",
"password" => "badger badger"
}
end
let(:modified_user) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "some user",
"public_key" => public_key_regex
}
end
it "can modify user" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => modified_user
})
end
end
context "with UTF-8 in display_name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "ギリギリ",
"password" => "badger badger"
}
end
let(:modified_user) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "ギリギリ",
"public_key" => public_key_regex
}
end
it "can modify user" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => modified_user
})
end
end
context "with UTF-8 in first/last name" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => "Eliška",
"last_name" => "Horáčková",
"display_name" => username,
"password" => "badger badger"
}
end
let(:modified_user) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => "Eliška",
"last_name" => "Horáčková",
"display_name" => username,
"public_key" => public_key_regex
}
end
it "can modify user" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 200
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => modified_user
})
end
end
context "with new password provided" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"password" => "bidgerbidger"
}
end
it "changes the password" do
put_response = put(request_url, platform.superuser, :payload => request_body)
put_response.should look_like({ :status => 200 })
response = post("#{platform.server}/verify_password", platform.superuser,
:payload => { 'user_id_to_verify' => username, 'password' => 'bidgerbidger' })
JSON.parse(response.body)["password_is_correct"].should eq(true)
end
end
context "with public key provided" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"password" => "badger badger",
"public_key" => input_public_key
}
end
it "accepts the public key and subsequently responds with it" do
pending("Pending in ruby", :if => ruby?) do
put_response = put(request_url, platform.superuser, :payload => request_body)
put_response.should look_like({
:status => 200,
:body=> {
"uri" => request_url
},
})
get_response = get(request_url, platform.superuser)
new_public_key = JSON.parse(get_response.body)["public_key"]
new_public_key.should eq(input_public_key)
end
end
end
context "with private_key = true" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"password" => "badger badger",
"private_key" => true
}
end
it "returns a new private key, changes the public key" do
original_response = get(request_url, platform.superuser)
original_public_key = JSON.parse(original_response.body)["public_key"]
put_response = put(request_url, platform.superuser, :payload => request_body)
put_response.should look_like({
:status => 200,
:body_exact => {
"uri" => request_url,
"private_key" => private_key_regex
},
})
new_private_key = JSON.parse(put_response.body)["private_key"]
new_private_key.should_not eq(@original_private_key)
new_response = get(request_url, platform.superuser)
new_public_key = JSON.parse(new_response.body)["public_key"]
new_public_key.should_not eq(original_public_key)
end
end
context "with private_key = true and a public_key" do
let(:request_body) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => "new name",
"password" => "badger badger",
"private_key" => true,
"public_key" => input_public_key
}
end
it "returns a new private key, changes the public key" do
original_response = get(request_url, platform.superuser)
original_public_key = JSON.parse(original_response.body)["public_key"]
put_response = put(request_url, platform.superuser, :payload => request_body)
put_response.should look_like({
:status => 200,
:body_exact => {
"uri" => request_url,
"private_key" => private_key_regex
},
})
new_private_key = JSON.parse(put_response.body)["private_key"]
new_private_key.should_not eq(@original_private_key)
new_response = get(request_url, platform.superuser)
new_public_key = JSON.parse(new_response.body)["public_key"]
new_public_key.should_not eq(input_public_key)
new_public_key.should_not eq(original_public_key)
end
end
end # context modifying users
context "renaming users" do
let(:new_name) { "test2-#{Time.now.to_i}-#{Process.pid}" }
let(:new_request_url) { "#{platform.server}/users/#{new_name}" }
context "changing username" do
let(:request_body) do
{
"username" => new_name,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
let(:modified_user) do
{
"username" => new_name,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"public_key" => public_key_regex
}
end
after :each do
delete("#{platform.server}/users/#{new_name}", platform.superuser)
end
context "and the username is valid" do
# Ideally these would be discrete tests: can we put it and get the correct response?
# But the top-level PUT /users/:id context causes us some problems with it's before :each
# behavior of recreating users.
it "updates the user to the new name and provides a new uri" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 201,
:body_exact => { "uri" => new_request_url },
:headers => [ "Location" => new_request_url ]
})
# it "makes the user unavailable at the old URI"
get(request_url, platform.superuser).should look_like({
:status => 404
})
# it "makes the user available at the new URI"
get(new_request_url, platform.superuser).should look_like({
:status => 200,
:body_exact => modified_user
})
end
end
end
context "changing username with UTF-8" do
let(:new_name) { "テスト-#{Time.now.to_i}-#{Process.pid}" }
let(:request_body) do
{
"username" => new_name,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
# it "does not process any change to username" do
get(request_url, platform.superuser).should look_like({
:status => 200
})
end
end
context "changing username with spaces" do
let(:new_name) { "test #{Time.now.to_i}-#{Process.pid}" }
let(:request_body) do
{
"username" => new_name,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
put(request_url, platform.superuser, :payload => request_body).should look_like({
:status => 400
})
# it "does not process any change to username" do
get(request_url, platform.superuser).should look_like({
:status => 200
})
end
end
context "changing username with capital letters" do
let(:new_name) { "Test-#{Time.now.to_i}-#{Process.pid}" }
let(:request_body) do
{
"username" => new_name,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
it "returns 400" do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 400
})
# it "does not process any change to username" do
get(request_url, platform.superuser).should look_like({
:status => 200
})
end
end
context "new name already exists" do
let(:request_body) do
{
"username" => platform.non_admin_user.name,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}
end
let(:unmodified_user) do
{
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"public_key" => public_key_regex
}
end
it "returns 409" do
pending("actually returns 403", :if => ruby?) do
put(request_url, platform.superuser,
:payload => request_body).should look_like({
:status => 409
})
get(request_url, platform.superuser).should look_like({
:status => 200,
:body_exact => unmodified_user
})
end
end
end
end # context renaming users
end # context PUT /users/<name>
context "POST /users/<name>" do
context "admin user" do
# A 405 here would be fine (and is no doubt coming with erlang)
it "returns 404[ruby]/405[erlang]" do
post(request_url, platform.admin_user).should look_like({
:status => ruby? ? 404 : 405
})
end
end
end # context POST /users/<name>
context "DELETE /users/<name>" do
let(:username) { "test-#{Time.now.to_i}-#{Process.pid}" }
before :each do
post("#{platform.server}/users", platform.superuser,
:payload => {
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "badger badger"
}).should look_like({
:status => 201,
:body_exact => {
"uri" => "#{platform.server}/users/#{username}",
"private_key" => private_key_regex
}})
end
after :each do
delete("#{platform.server}/users/#{username}", platform.superuser)
end
context "superuser" do
it "can delete user" do
delete(request_url, platform.superuser).should look_like({
:status => 200
})
# Similar to rename, the existing before :each interferese with making this into a separate test
# because it recreates the user.
# it "did delete the user"
get(request_url, platform.superuser).should look_like({
:status => 404
})
end
end
context "admin user" do
it "returns 403" do
delete(request_url, platform.admin_user).should look_like({
:status => 403
})
# it "did not delete user" do
get("#{platform.server}/users/#{username}",
platform.superuser).should look_like({
:status => 200
})
end
end
context "default client" do
it "returns 401" do
delete(request_url, platform.non_admin_client).should look_like({
:status => 401
})
# it "did not delete user" do
get("#{platform.server}/users/#{username}",
platform.superuser).should look_like({
:status => 200
})
end
end
context "when deleting a non-existent user" do
let(:request_url) { "#{platform.server}/users/bogus" }
it "returns 404" do
delete(request_url, platform.superuser).should look_like({
:status => 404
})
end
end
end # context DELETE /users/<name>
end # context /users/<name> endpoint
context "POST /verify_password" do
let(:request_url) { "#{platform.server}/verify_password" }
context "when the webui superuser is specified as the user" do
let(:requestor) { superuser }
let(:request_body) do
{
user_id_to_verify: superuser.name,
password: "DOES_NOT_MATTER_FOR_TEST",
}
end
it "should return Forbidden" do
post(request_url, superuser, :payload => request_body).should look_like(
:body => {
"error" => "Password authentication as the superuser is prohibited."
},
:status => 403
)
end
end # context when the webui superuser is specified as the user
end # context POST /verify_password
end # describe users
|
require 'rake'
require 'parallel'
require 'ciika'
module Quanto
class Records
class SRA
class << self
include RakeFileUtils
def set_number_of_parallels(nop)
@@num_of_parallels = nop
end
# Download metadata reference tables
def download_sra_metadata(dest_dir)
tarball_downloaded = File.join(dest_dir, sra_metadata_tarball_fname)
unpacked_metadata = tarball_downloaded.sub(/.tar.gz/,"")
metadata_dest_path = File.join(dest_dir, "sra_metadata")
if !File.exist?(metadata_dest_path)
download_metadata_via_ftp(dest_dir)
extract_metadata(dest_dir, tarball_downloaded)
end
if File.exist?(unpacked_metadata)
fix_sra_metadata_directory(unpacked_metadata)
sh "mv #{unpacked_metadata}, #{metadata_dest_path}"
sh "rm -f #{unpacked_metadata}"
end
end
def download_metadata_via_ftp(dest_dir)
sh "lftp -c \"open #{sra_ftp_base_url} && pget -n 8 -O #{dest_dir} #{sra_metadata_tarball_fname}\""
end
def extract_metadata(dest_dir, tarball_downloaded)
sh "cd #{dest_dir} && tar zxf #{dest_file}"
end
def sra_ftp_base_url
"ftp.ncbi.nlm.nih.gov/sra/reports/Metadata"
end
def sra_metadata_tarball_fname
ym = Time.now.strftime("%Y%m")
"NCBI_SRA_Metadata_Full_#{ym}01.tar.gz"
end
def fix_sra_metadata_directory(metadata_parent_dir)
cd metadata_parent_dir
pdir = get_accession_directories(metadata_parent_dir)
pdir.group_by{|id| id.sub(/...$/,"") }.each_pair do |pid, ids|
moveto = File.join(sra_metadata, pid)
mkdir moveto
mv ids, moveto
end
end
def get_accession_directories(metadata_parent_dir)
Dir.entries(metadata_parent_dir).select{|f| f =~ /^.RA\d{6,7}$/ }
end
end
def initialize(sra_metadata_dir)
@sra_metadata_dir = sra_metadata_dir
end
# Get a list of public/accesiible SRA entries with read layout
def available
layout_hash = read_layout
Parallel.map(public_idsets, :in_threads => @@num_of_parallels) do |idset|
exp_id = idset[2]
layout = layout_hash[exp_id]
read_layout = layout ? layout : "UNDEFINED"
idset << layout
end
end
def sra_accessions_path
"#{@sra_metadata_dir}/SRA_Accessions"
end
def awk_public_run_pattern
'$1 ~ /^.RR/ && $3 == "live" && $9 == "public"'
end
def public_idsets
# run id, submission id, experiment id, published date
list_public('$1 "\t" $2 "\t" $11 "\t" $5')
end
def public_accid
list_public('$2') # submission id
end
def list_public(fields)
cat = "cat #{sra_accessions_path}"
awk = "awk -F '\t' '#{awk_public_run_pattern} {print #{fields} }'"
out = `#{cat} | #{awk}`.split("\n")
Parallel.map(public_accid, :in_threads => @@num_of_parallels){|l| l.split("\t") }
end
# create hash for read layout reference
def read_layout
hash = {}
list_exp_with_read_layout.each do |id_layout|
id = id_layout[0]
layout = id_layout[1]
hash[id] = layout
end
hash
end
def public_exp_with_read_layout
Parallel.map(public_xml, :in_threads => @@num_of_parallels) do |xml|
extract_layout(xml)
end
end
def public_xml
list_public_xml = Parallel.map(public_accid, :in_threads => @@num_of_parallels) do |acc_id|
exp_xml_path(acc_id)
end
list_public_xml.compact
end
def exp_xml_path(acc_id)
xml = File.join(@sra_metadata_dir, acc_id.sub(/...$/,""), acc_id, acc_id + ".experiment.xml")
xml if File.exist?(xml)
end
def extract_layout(xml)
Ciika::SRA::Experiment.new(xml).parse.map do |a|
[a[:accession], a[:library_description][:library_layout]]
end
end
end
end
end
fix metadata download so it can start over
require 'rake'
require 'parallel'
require 'ciika'
module Quanto
class Records
class SRA
class << self
include RakeFileUtils
def set_number_of_parallels(nop)
@@num_of_parallels = nop
end
# Download metadata reference tables
def download_sra_metadata(dest_dir)
tarball_downloaded = File.join(dest_dir, sra_metadata_tarball_fname)
unpacked_metadata = tarball_downloaded.sub(/.tar.gz/,"")
metadata_dest_path = File.join(dest_dir, "sra_metadata")
if !File.exist?(metadata_dest_path) # not yet done
if !File.exist?(unpacked_metadata) # not yet downloaded nor unpacked
if !File.exist?(tarball_downloaded) # download unless not yet done
download_metadata_via_ftp(dest_dir)
end
# extract
extract_metadata(dest_dir, tarball_downloaded)
end
# fix and move
fix_sra_metadata_directory(unpacked_metadata)
sh "mv #{unpacked_metadata}, #{metadata_dest_path} && rm -f #{tarball_downloaded}"
end
end
def download_metadata_via_ftp(dest_dir)
sh "lftp -c \"open #{sra_ftp_base_url} && pget -n 8 -O #{dest_dir} #{sra_metadata_tarball_fname}\""
end
def extract_metadata(dest_dir, tarball_downloaded)
sh "cd #{dest_dir} && tar zxf #{tarball_downloaded}"
end
def sra_ftp_base_url
"ftp.ncbi.nlm.nih.gov/sra/reports/Metadata"
end
def sra_metadata_tarball_fname
ym = Time.now.strftime("%Y%m")
"NCBI_SRA_Metadata_Full_#{ym}01.tar.gz"
end
def fix_sra_metadata_directory(metadata_parent_dir)
cd metadata_parent_dir
pdir = get_accession_directories(metadata_parent_dir)
pdir.group_by{|id| id.sub(/...$/,"") }.each_pair do |pid, ids|
moveto = File.join(sra_metadata, pid)
mkdir moveto
mv ids, moveto
end
end
def get_accession_directories(metadata_parent_dir)
Dir.entries(metadata_parent_dir).select{|f| f =~ /^.RA\d{6,7}$/ }
end
end
def initialize(sra_metadata_dir)
@sra_metadata_dir = sra_metadata_dir
end
# Get a list of public/accesiible SRA entries with read layout
def available
layout_hash = read_layout
Parallel.map(public_idsets, :in_threads => @@num_of_parallels) do |idset|
exp_id = idset[2]
layout = layout_hash[exp_id]
read_layout = layout ? layout : "UNDEFINED"
idset << layout
end
end
def sra_accessions_path
"#{@sra_metadata_dir}/SRA_Accessions"
end
def awk_public_run_pattern
'$1 ~ /^.RR/ && $3 == "live" && $9 == "public"'
end
def public_idsets
# run id, submission id, experiment id, published date
list_public('$1 "\t" $2 "\t" $11 "\t" $5')
end
def public_accid
list_public('$2') # submission id
end
def list_public(fields)
cat = "cat #{sra_accessions_path}"
awk = "awk -F '\t' '#{awk_public_run_pattern} {print #{fields} }'"
out = `#{cat} | #{awk}`.split("\n")
Parallel.map(public_accid, :in_threads => @@num_of_parallels){|l| l.split("\t") }
end
# create hash for read layout reference
def read_layout
hash = {}
list_exp_with_read_layout.each do |id_layout|
id = id_layout[0]
layout = id_layout[1]
hash[id] = layout
end
hash
end
def public_exp_with_read_layout
Parallel.map(public_xml, :in_threads => @@num_of_parallels) do |xml|
extract_layout(xml)
end
end
def public_xml
list_public_xml = Parallel.map(public_accid, :in_threads => @@num_of_parallels) do |acc_id|
exp_xml_path(acc_id)
end
list_public_xml.compact
end
def exp_xml_path(acc_id)
xml = File.join(@sra_metadata_dir, acc_id.sub(/...$/,""), acc_id, acc_id + ".experiment.xml")
xml if File.exist?(xml)
end
def extract_layout(xml)
Ciika::SRA::Experiment.new(xml).parse.map do |a|
[a[:accession], a[:library_description][:library_layout]]
end
end
end
end
end
|
describe 'User Endpoint', :type => :request do
context :v1 do
let(:first_user) { User.new name: 'Allam Britto', age: 24, birthday: Date.parse('15-06-1990'), phone: '555-5555', address: 'Fake st. 1-23' }
let(:second_user) { User.new name: 'Elva Lasso', age: 25, birthday: Date.parse('15-06-1989'), phone: '777-5555', address: 'Fake st. 1-25' }
context 'GET' do
describe 'default transformation' do
before(:each){ allow(::User).to receive(:all) { [first_user, second_user] } }
describe '/' do
it 'gets all users with default transformation' do
expected_response = "[{\"name\":\"Allam Britto\",\"age\":24,\"birthday\":\"1990-06-15T00:00:00.000+00:00\",\"phone\":\"555-5555\",\"address\":\"Fake st. 1-23\"},{\"name\":\"Elva Lasso\",\"age\":25,\"birthday\":\"1989-06-15T00:00:00.000+00:00\",\"phone\":\"777-5555\",\"address\":\"Fake st. 1-25\"}]"
get '/api/v1/users'
expect(response.status).to eq 200
expect(response.body).to match expected_response
#RSpec::Mocks.space.proxy_for(::User).reset # It is not necessary
end
end
describe '/compact' do
it 'gets all users with compact transformation' do
expected_response = "[{\"name\":\"Allam Britto\",\"phone\":\"555-5555\"},{\"name\":\"Elva Lasso\",\"phone\":\"777-5555\"}]"
get '/api/v1/users/compact'
expect(response.status).to eq 200
expect(response.body).to match expected_response
end
end
end
describe 'compact transformation' do
before(:each){ allow(::User).to receive(:find) { first_user } }
describe '/:id' do
it 'gets specific user with default transformation' do
expected_response = "{\"name\":\"Allam Britto\",\"age\":24,\"birthday\":\"1990-06-15T00:00:00.000+00:00\",\"phone\":\"555-5555\",\"address\":\"Fake st. 1-23\"}"
get '/api/v1/users/0'
expect(response.status).to eq 200
expect(response.body).to match expected_response
end
end
describe 'compact/:id' do
it 'gets specific user with default transformation' do
expected_response = "{\"name\":\"Allam Britto\",\"phone\":\"555-5555\"}"
get '/api/v1/users/compact/0'
expect(response.status).to eq 200
expect(response.body).to match expected_response
end
end
end
end
end
end
fix typo in user spec
describe 'User Endpoint', :type => :request do
context :v1 do
let(:first_user) { User.new name: 'Allam Britto', age: 24, birthday: Date.parse('15-06-1990'), phone: '555-5555', address: 'Fake st. 1-23' }
let(:second_user) { User.new name: 'Elva Lasso', age: 25, birthday: Date.parse('15-06-1989'), phone: '777-5555', address: 'Fake st. 1-25' }
context 'GET' do
describe 'All Users' do
before(:each){ allow(::User).to receive(:all) { [first_user, second_user] } }
describe '/' do
it 'gets all users with default transformation' do
expected_response = "[{\"name\":\"Allam Britto\",\"age\":24,\"birthday\":\"1990-06-15T00:00:00.000+00:00\",\"phone\":\"555-5555\",\"address\":\"Fake st. 1-23\"},{\"name\":\"Elva Lasso\",\"age\":25,\"birthday\":\"1989-06-15T00:00:00.000+00:00\",\"phone\":\"777-5555\",\"address\":\"Fake st. 1-25\"}]"
get '/api/v1/users'
expect(response.status).to eq 200
expect(response.body).to match expected_response
#RSpec::Mocks.space.proxy_for(::User).reset # It is not necessary
end
end
describe '/compact' do
it 'gets all users with compact transformation' do
expected_response = "[{\"name\":\"Allam Britto\",\"phone\":\"555-5555\"},{\"name\":\"Elva Lasso\",\"phone\":\"777-5555\"}]"
get '/api/v1/users/compact'
expect(response.status).to eq 200
expect(response.body).to match expected_response
end
end
end
describe 'User by ID' do
before(:each){ allow(::User).to receive(:find) { first_user } }
describe '/:id' do
it 'gets specific user with default transformation' do
expected_response = "{\"name\":\"Allam Britto\",\"age\":24,\"birthday\":\"1990-06-15T00:00:00.000+00:00\",\"phone\":\"555-5555\",\"address\":\"Fake st. 1-23\"}"
get '/api/v1/users/0'
expect(response.status).to eq 200
expect(response.body).to match expected_response
end
end
describe 'compact/:id' do
it 'gets specific user with default transformation' do
expected_response = "{\"name\":\"Allam Britto\",\"phone\":\"555-5555\"}"
get '/api/v1/users/compact/0'
expect(response.status).to eq 200
expect(response.body).to match expected_response
end
end
end
end
end
end
|
module QuickTable
class Railtie < Rails::Railtie
initializer "quick_table" do
ActiveSupport.on_load(:active_record) do
include QuickTable::ActiveRecord
end
end
end
end
app/assets が読まれない→修正
module QuickTable
class Engine < Rails::Engine
initializer "quick_table" do
ActiveSupport.on_load(:active_record) do
include QuickTable::ActiveRecord
end
end
end
end
|
require 'securerandom'
require 'rack-rabbit'
require 'rack-rabbit/adapter'
require 'rack-rabbit/message'
require 'rack-rabbit/response'
module RackRabbit
class Client
#--------------------------------------------------------------------------
attr_reader :rabbit
def initialize(options = nil)
@rabbit = Adapter.load(DEFAULT_RABBIT.merge(options || {}))
connect
end
#--------------------------------------------------------------------------
def connect
rabbit.connect
end
def disconnect
rabbit.disconnect
end
#--------------------------------------------------------------------------
def get(queue, path, options = {})
request(queue, path, "", options.merge(:method => :GET))
end
def post(queue, path, body, options = {})
request(queue, path, body, options.merge(:method => :POST))
end
def put(queue, path, body, options = {})
request(queue, path, body, options.merge(:method => :PUT))
end
def delete(queue, path, options = {})
request(queue, path, "", options.merge(:method => :DELETE))
end
#--------------------------------------------------------------------------
def request(queue, path, body, options = {})
id = options[:id] || SecureRandom.uuid # allow dependency injection for test purposes
lock = Mutex.new
condition = ConditionVariable.new
method = options[:method] || :GET
headers = options[:headers] || {}
response = nil
rabbit.with_reply_queue do |reply_queue|
rabbit.subscribe(:queue => reply_queue) do |message|
if message.correlation_id == id
lock.synchronize do
response = Response.new(message.status, message.headers, message.body)
reply_queue.delete(if_empty: true)
condition.signal
end
end
end
rabbit.publish(body,
:correlation_id => id,
:routing_key => queue,
:reply_to => reply_queue.name,
:priority => options[:priority],
:content_type => options[:content_type] || default_content_type,
:content_encoding => options[:content_encoding] || default_content_encoding,
:timestamp => options[:timestamp] || default_timestamp,
:headers => headers.merge({
RackRabbit::HEADER::METHOD => method.to_s.upcase,
RackRabbit::HEADER::PATH => path
})
)
end
lock.synchronize do
condition.wait(lock) unless response
end
response
end
#--------------------------------------------------------------------------
def enqueue(queue, path, body, options = {})
method = options[:method] || :POST
headers = options[:headers] || {}
rabbit.publish(body,
:routing_key => queue,
:priority => options[:priority],
:content_type => options[:content_type] || default_content_type,
:content_encoding => options[:content_encoding] || default_content_encoding,
:timestamp => options[:timestamp] || default_timestamp,
:headers => headers.merge({
RackRabbit::HEADER::METHOD => method.to_s.upcase,
RackRabbit::HEADER::PATH => path
})
)
true
end
#--------------------------------------------------------------------------
def publish(exchange, path, body, options = {})
method = options[:method] || :POST
headers = options[:headers] || {}
rabbit.publish(body,
:exchange => exchange,
:exchange_type => options[:exchange_type] || options[:type] || :fanout,
:routing_key => options[:routing_key] || options[:route],
:priority => options[:priority],
:content_type => options[:content_type] || default_content_type,
:content_encoding => options[:content_encoding] || default_content_encoding,
:timestamp => options[:timestamp] || default_timestamp,
:headers => headers.merge({
RackRabbit::HEADER::METHOD => method.to_s.upcase,
RackRabbit::HEADER::PATH => path
})
)
true
end
#--------------------------------------------------------------------------
def default_content_type
'text/plain'
end
def default_content_encoding
'utf-8'
end
def default_timestamp
Time.now.to_i
end
#--------------------------------------------------------------------------
def self.define_class_method_for(method_name)
define_singleton_method(method_name) do |*params|
options = params.last.is_a?(Hash) ? params.pop : {}
client = Client.new(options.delete(:rabbit))
response = client.send(method_name, *params, options)
client.disconnect
response
end
end
define_class_method_for :get
define_class_method_for :post
define_class_method_for :put
define_class_method_for :delete
define_class_method_for :request
define_class_method_for :enqueue
define_class_method_for :publish
#--------------------------------------------------------------------------
end
end
RR = RackRabbit::Client # much less typing for client applications
added client timeout on synchronous request/response (default 1 second)
require 'securerandom'
require 'rack-rabbit'
require 'rack-rabbit/adapter'
require 'rack-rabbit/message'
require 'rack-rabbit/response'
module RackRabbit
class Client
#--------------------------------------------------------------------------
attr_reader :rabbit
def initialize(options = nil)
@rabbit = Adapter.load(DEFAULT_RABBIT.merge(options || {}))
connect
end
#--------------------------------------------------------------------------
def connect
rabbit.connect
end
def disconnect
rabbit.disconnect
end
#--------------------------------------------------------------------------
def get(queue, path, options = {})
request(queue, path, "", options.merge(:method => :GET))
end
def post(queue, path, body, options = {})
request(queue, path, body, options.merge(:method => :POST))
end
def put(queue, path, body, options = {})
request(queue, path, body, options.merge(:method => :PUT))
end
def delete(queue, path, options = {})
request(queue, path, "", options.merge(:method => :DELETE))
end
#--------------------------------------------------------------------------
def request(queue, path, body, options = {})
id = options[:id] || SecureRandom.uuid # allow dependency injection for test purposes
lock = Mutex.new
condition = ConditionVariable.new
method = options[:method] || :GET
headers = options[:headers] || {}
response = nil
timeout = options[:timeout] || 1
rabbit.with_reply_queue do |reply_queue|
rabbit.subscribe(:queue => reply_queue) do |message|
if message.correlation_id == id
lock.synchronize do
response = Response.new(message.status, message.headers, message.body)
reply_queue.delete(if_empty: true)
condition.signal
end
end
end
rabbit.publish(body,
:correlation_id => id,
:routing_key => queue,
:reply_to => reply_queue.name,
:priority => options[:priority],
:content_type => options[:content_type] || default_content_type,
:content_encoding => options[:content_encoding] || default_content_encoding,
:timestamp => options[:timestamp] || default_timestamp,
:headers => headers.merge({
RackRabbit::HEADER::METHOD => method.to_s.upcase,
RackRabbit::HEADER::PATH => path
})
)
end
lock.synchronize do
condition.wait(lock, timeout) unless response
end
response
end
#--------------------------------------------------------------------------
def enqueue(queue, path, body, options = {})
method = options[:method] || :POST
headers = options[:headers] || {}
rabbit.publish(body,
:routing_key => queue,
:priority => options[:priority],
:content_type => options[:content_type] || default_content_type,
:content_encoding => options[:content_encoding] || default_content_encoding,
:timestamp => options[:timestamp] || default_timestamp,
:headers => headers.merge({
RackRabbit::HEADER::METHOD => method.to_s.upcase,
RackRabbit::HEADER::PATH => path
})
)
true
end
#--------------------------------------------------------------------------
def publish(exchange, path, body, options = {})
method = options[:method] || :POST
headers = options[:headers] || {}
rabbit.publish(body,
:exchange => exchange,
:exchange_type => options[:exchange_type] || options[:type] || :fanout,
:routing_key => options[:routing_key] || options[:route],
:priority => options[:priority],
:content_type => options[:content_type] || default_content_type,
:content_encoding => options[:content_encoding] || default_content_encoding,
:timestamp => options[:timestamp] || default_timestamp,
:headers => headers.merge({
RackRabbit::HEADER::METHOD => method.to_s.upcase,
RackRabbit::HEADER::PATH => path
})
)
true
end
#--------------------------------------------------------------------------
def default_content_type
'text/plain'
end
def default_content_encoding
'utf-8'
end
def default_timestamp
Time.now.to_i
end
#--------------------------------------------------------------------------
def self.define_class_method_for(method_name)
define_singleton_method(method_name) do |*params|
options = params.last.is_a?(Hash) ? params.pop : {}
client = Client.new(options.delete(:rabbit))
response = client.send(method_name, *params, options)
client.disconnect
response
end
end
define_class_method_for :get
define_class_method_for :post
define_class_method_for :put
define_class_method_for :delete
define_class_method_for :request
define_class_method_for :enqueue
define_class_method_for :publish
#--------------------------------------------------------------------------
end
end
RR = RackRabbit::Client # much less typing for client applications
|
module Rack
module OAuth2
class Client
include AttrRequired, AttrOptional
attr_required :identifier
attr_optional :secret, :redirect_uri, :scheme, :host, :port, :authorization_endpoint, :token_endpoint
def initialize(attributes = {})
(required_attributes + optional_attributes).each do |key|
self.send :"#{key}=", attributes[key]
end
@grant = Grant::ClientCredentials.new
@authorization_endpoint ||= '/oauth2/authorize'
@token_endpoint ||= '/oauth2/token'
attr_missing!
end
def authorization_uri(params = {})
params[:response_type] ||= :code
params[:response_type] = Array(params[:response_type]).join(' ')
params[:scope] = Array(params[:scope]).join(' ')
Util.redirect_uri absolute_uri_for(authorization_endpoint), :query, params.merge(
client_id: self.identifier,
redirect_uri: self.redirect_uri
)
end
def authorization_code=(code)
@grant = Grant::AuthorizationCode.new(
code: code,
redirect_uri: self.redirect_uri
)
end
def resource_owner_credentials=(credentials)
@grant = Grant::Password.new(
username: credentials.first,
password: credentials.last
)
end
def refresh_token=(token)
@grant = Grant::RefreshToken.new(
refresh_token: token
)
end
def jwt_bearer=(assertion)
@grant = Grant::JWTBearer.new(
assertion: assertion
)
end
def saml2_bearer=(assertion)
@grant = Grant::SAML2Bearer.new(
assertion: assertion
)
end
def subject_token=(subject_token, subject_token_type = URN::TokenType::JWT)
@grant = Grant::TokenExchange.new(
subject_token: subject_token,
subject_token_type: subject_token_type
)
end
def force_token_type!(token_type)
@forced_token_type = token_type.to_s
end
def access_token!(*args)
headers, params = {}, @grant.as_json
# NOTE:
# Using Array#estract_options! for backward compatibility.
# Until v1.0.5, the first argument was 'client_auth_method' in scalar.
options = args.extract_options!
client_auth_method = args.first || options.delete(:client_auth_method) || :basic
params[:scope] = Array(options.delete(:scope)).join(' ') if options[:scope].present?
params.merge! options
case client_auth_method
when :basic
cred = ["#{identifier}:#{secret}"].pack('m').tr("\n", '')
headers.merge!(
'Authorization' => "Basic #{cred}"
)
when :jwt_bearer
params.merge!(
client_assertion_type: URN::ClientAssertionType::JWT_BEARER
)
when :saml2_bearer
params.merge!(
client_assertion_type: URN::ClientAssertionType::SAML2_BEARER
)
else
params.merge!(
client_id: identifier,
client_secret: secret
)
end
handle_response do
Rack::OAuth2.http_client.post(
absolute_uri_for(token_endpoint),
Util.compact_hash(params),
headers
)
end
end
private
def absolute_uri_for(endpoint)
_endpoint_ = Util.parse_uri endpoint
_endpoint_.scheme ||= self.scheme || 'https'
_endpoint_.host ||= self.host
_endpoint_.port ||= self.port
raise 'No Host Info' unless _endpoint_.host
_endpoint_.to_s
end
def handle_response
response = yield
case response.status
when 200..201
handle_success_response response
else
handle_error_response response
end
end
def handle_success_response(response)
token_hash = JSON.parse(response.body).with_indifferent_access
case (@forced_token_type || token_hash[:token_type]).try(:downcase)
when 'bearer'
AccessToken::Bearer.new(token_hash)
when 'mac'
AccessToken::MAC.new(token_hash)
when nil
AccessToken::Legacy.new(token_hash)
else
raise 'Unknown Token Type'
end
rescue JSON::ParserError
# NOTE: Facebook support (They don't use JSON as token response)
AccessToken::Legacy.new Rack::Utils.parse_nested_query(response.body).with_indifferent_access
end
def handle_error_response(response)
error = JSON.parse(response.body).with_indifferent_access
raise Error.new(response.status, error)
rescue JSON::ParserError
raise Error.new(response.status, error: 'Unknown', error_description: response.body)
end
end
end
end
require 'rack/oauth2/client/error'
require 'rack/oauth2/client/grant'
optionally generate JWT bearer assertion for client authn
and add "private_key" as client attributes
module Rack
module OAuth2
class Client
include AttrRequired, AttrOptional
attr_required :identifier
attr_optional :secret, :private_key, :redirect_uri, :scheme, :host, :port, :authorization_endpoint, :token_endpoint
def initialize(attributes = {})
(required_attributes + optional_attributes).each do |key|
self.send :"#{key}=", attributes[key]
end
@grant = Grant::ClientCredentials.new
@authorization_endpoint ||= '/oauth2/authorize'
@token_endpoint ||= '/oauth2/token'
attr_missing!
end
def authorization_uri(params = {})
params[:response_type] ||= :code
params[:response_type] = Array(params[:response_type]).join(' ')
params[:scope] = Array(params[:scope]).join(' ')
Util.redirect_uri absolute_uri_for(authorization_endpoint), :query, params.merge(
client_id: self.identifier,
redirect_uri: self.redirect_uri
)
end
def authorization_code=(code)
@grant = Grant::AuthorizationCode.new(
code: code,
redirect_uri: self.redirect_uri
)
end
def resource_owner_credentials=(credentials)
@grant = Grant::Password.new(
username: credentials.first,
password: credentials.last
)
end
def refresh_token=(token)
@grant = Grant::RefreshToken.new(
refresh_token: token
)
end
def jwt_bearer=(assertion)
@grant = Grant::JWTBearer.new(
assertion: assertion
)
end
def saml2_bearer=(assertion)
@grant = Grant::SAML2Bearer.new(
assertion: assertion
)
end
def subject_token=(subject_token, subject_token_type = URN::TokenType::JWT)
@grant = Grant::TokenExchange.new(
subject_token: subject_token,
subject_token_type: subject_token_type
)
end
def force_token_type!(token_type)
@forced_token_type = token_type.to_s
end
def access_token!(*args)
headers, params = {}, @grant.as_json
# NOTE:
# Using Array#estract_options! for backward compatibility.
# Until v1.0.5, the first argument was 'client_auth_method' in scalar.
options = args.extract_options!
client_auth_method = args.first || options.delete(:client_auth_method) || :basic
params[:scope] = Array(options.delete(:scope)).join(' ') if options[:scope].present?
params.merge! options
case client_auth_method
when :basic
cred = ["#{identifier}:#{secret}"].pack('m').tr("\n", '')
headers.merge!(
'Authorization' => "Basic #{cred}"
)
when :jwt_bearer
params.merge!(
client_assertion_type: URN::ClientAssertionType::JWT_BEARER
)
# NOTE: optionally auto-generate client_assertion.
if params[:client_assertion].blank?
require 'json/jwt'
params[:client_assertion] = JSON::JWT.new(
iss: identifier,
sub: identifier,
aud: absolute_uri_for(token_endpoint),
jti: SecureRandom.hex(16),
iat: Time.now,
exp: 3.minutes.from_now
).sign(private_key || secret).to_s
end
when :saml2_bearer
params.merge!(
client_assertion_type: URN::ClientAssertionType::SAML2_BEARER
)
else
params.merge!(
client_id: identifier,
client_secret: secret
)
end
handle_response do
Rack::OAuth2.http_client.post(
absolute_uri_for(token_endpoint),
Util.compact_hash(params),
headers
)
end
end
private
def absolute_uri_for(endpoint)
_endpoint_ = Util.parse_uri endpoint
_endpoint_.scheme ||= self.scheme || 'https'
_endpoint_.host ||= self.host
_endpoint_.port ||= self.port
raise 'No Host Info' unless _endpoint_.host
_endpoint_.to_s
end
def handle_response
response = yield
case response.status
when 200..201
handle_success_response response
else
handle_error_response response
end
end
def handle_success_response(response)
token_hash = JSON.parse(response.body).with_indifferent_access
case (@forced_token_type || token_hash[:token_type]).try(:downcase)
when 'bearer'
AccessToken::Bearer.new(token_hash)
when 'mac'
AccessToken::MAC.new(token_hash)
when nil
AccessToken::Legacy.new(token_hash)
else
raise 'Unknown Token Type'
end
rescue JSON::ParserError
# NOTE: Facebook support (They don't use JSON as token response)
AccessToken::Legacy.new Rack::Utils.parse_nested_query(response.body).with_indifferent_access
end
def handle_error_response(response)
error = JSON.parse(response.body).with_indifferent_access
raise Error.new(response.status, error)
rescue JSON::ParserError
raise Error.new(response.status, error: 'Unknown', error_description: response.body)
end
end
end
end
require 'rack/oauth2/client/error'
require 'rack/oauth2/client/grant'
|
module Rack
module OAuth2
class Client
include AttrRequired, AttrOptional
attr_required :identifier
attr_optional :secret, :private_key, :certificate, :redirect_uri, :scheme, :host, :port, :authorization_endpoint, :token_endpoint, :revocation_endpoint
def initialize(attributes = {})
(required_attributes + optional_attributes).each do |key|
self.send :"#{key}=", attributes[key]
end
@grant = Grant::ClientCredentials.new
@authorization_endpoint ||= '/oauth2/authorize'
@token_endpoint ||= '/oauth2/token'
attr_missing!
end
def authorization_uri(params = {})
params[:redirect_uri] ||= self.redirect_uri
params[:response_type] ||= :code
params[:response_type] = Array(params[:response_type]).join(' ')
params[:scope] = Array(params[:scope]).join(' ')
Util.redirect_uri absolute_uri_for(authorization_endpoint), :query, params.merge(
client_id: self.identifier
)
end
def authorization_code=(code)
@grant = Grant::AuthorizationCode.new(
code: code,
redirect_uri: self.redirect_uri
)
end
def resource_owner_credentials=(credentials)
@grant = Grant::Password.new(
username: credentials.first,
password: credentials.last
)
end
def refresh_token=(token)
@grant = Grant::RefreshToken.new(
refresh_token: token
)
end
def jwt_bearer=(assertion)
@grant = Grant::JWTBearer.new(
assertion: assertion
)
end
def saml2_bearer=(assertion)
@grant = Grant::SAML2Bearer.new(
assertion: assertion
)
end
def subject_token=(subject_token, subject_token_type = URN::TokenType::JWT)
@grant = Grant::TokenExchange.new(
subject_token: subject_token,
subject_token_type: subject_token_type
)
end
def force_token_type!(token_type)
@forced_token_type = token_type.to_s
end
def access_token!(*args, &local_http_config)
headers, params, http_client, options = authenticated_context_from(*args, &local_http_config)
params[:scope] = Array(options.delete(:scope)).join(' ') if options[:scope].present?
params.merge! @grant.as_json
params.merge! options
handle_response do
http_client.post(
absolute_uri_for(token_endpoint),
Util.compact_hash(params),
headers
)
end
end
def revoke!(*args, &local_http_config)
headers, params, http_client, options = authenticated_context_from(*args, &local_http_config)
params.merge! case
when access_token = options.delete(:access_token)
{
token: access_token,
token_type_hint: :access_token
}
when refresh_token = options.delete(:refresh_token)
{
token: refresh_token,
token_type_hint: :refresh_token
}
when @grant.is_a?(Grant::RefreshToken)
{
token: @grant.refresh_token,
token_type_hint: :refresh_token
}
when options[:token].blank?
raise ArgumentError, 'One of "token", "access_token" and "refresh_token" is required'
end
params.merge! options
handle_revocation_response do
http_client.post(
absolute_uri_for(revocation_endpoint),
Util.compact_hash(params),
headers
)
end
end
private
def absolute_uri_for(endpoint)
_endpoint_ = Util.parse_uri endpoint
_endpoint_.scheme ||= self.scheme || 'https'
_endpoint_.host ||= self.host
_endpoint_.port ||= self.port
raise 'No Host Info' unless _endpoint_.host
_endpoint_.to_s
end
def authenticated_context_from(*args, &local_http_config)
headers, params = {}, {}
http_client = Rack::OAuth2.http_client(&local_http_config)
# NOTE:
# Using Array#extract_options! for backward compatibility.
# Until v1.0.5, the first argument was 'client_auth_method' in scalar.
options = args.extract_options!
client_auth_method = args.first || options.delete(:client_auth_method)&.to_sym || :basic
case client_auth_method
when :basic
cred = Base64.strict_encode64 [
Util.www_form_url_encode(identifier),
Util.www_form_url_encode(secret)
].join(':')
headers.merge!(
'Authorization' => "Basic #{cred}"
)
when :basic_without_www_form_urlencode
cred = ["#{identifier}:#{secret}"].pack('m').tr("\n", '')
headers.merge!(
'Authorization' => "Basic #{cred}"
)
when :jwt_bearer
params.merge!(
client_assertion_type: URN::ClientAssertionType::JWT_BEARER
)
# NOTE: optionally auto-generate client_assertion.
params[:client_assertion] = if options[:client_assertion].present?
options.delete(:client_assertion)
else
require 'json/jwt'
JSON::JWT.new(
iss: identifier,
sub: identifier,
aud: absolute_uri_for(token_endpoint),
jti: SecureRandom.hex(16),
iat: Time.now,
exp: 3.minutes.from_now
).sign(private_key || secret).to_s
end
when :saml2_bearer
params.merge!(
client_assertion_type: URN::ClientAssertionType::SAML2_BEARER
)
when :mtls
params.merge!(
client_id: identifier
)
http_client.ssl.client_key = private_key
http_client.ssl.client_cert = certificate
else
params.merge!(
client_id: identifier,
client_secret: secret
)
end
[headers, params, http_client, options]
end
def handle_response
response = yield
case response.status
when 200..201
handle_success_response response
else
handle_error_response response
end
end
def handle_revocation_response
response = yield
case response.status
when 200..201
:success
else
handle_error_response response
end
end
def handle_success_response(response)
token_hash = JSON.parse(response.body).with_indifferent_access
case (@forced_token_type || token_hash[:token_type])&.downcase
when 'bearer'
AccessToken::Bearer.new(token_hash)
when nil
AccessToken::Legacy.new(token_hash)
else
raise 'Unknown Token Type'
end
rescue JSON::ParserError
# NOTE: Facebook support (They don't use JSON as token response)
AccessToken::Legacy.new Rack::Utils.parse_nested_query(response.body).with_indifferent_access
end
def handle_error_response(response)
error = JSON.parse(response.body).with_indifferent_access
raise Error.new(response.status, error)
rescue JSON::ParserError
raise Error.new(response.status, error: 'Unknown', error_description: response.body)
end
end
end
end
require 'rack/oauth2/client/error'
require 'rack/oauth2/client/grant'
no need for local_http_config, just use faraday feature
module Rack
module OAuth2
class Client
include AttrRequired, AttrOptional
attr_required :identifier
attr_optional :secret, :private_key, :certificate, :redirect_uri, :scheme, :host, :port, :authorization_endpoint, :token_endpoint, :revocation_endpoint
def initialize(attributes = {})
(required_attributes + optional_attributes).each do |key|
self.send :"#{key}=", attributes[key]
end
@grant = Grant::ClientCredentials.new
@authorization_endpoint ||= '/oauth2/authorize'
@token_endpoint ||= '/oauth2/token'
attr_missing!
end
def authorization_uri(params = {})
params[:redirect_uri] ||= self.redirect_uri
params[:response_type] ||= :code
params[:response_type] = Array(params[:response_type]).join(' ')
params[:scope] = Array(params[:scope]).join(' ')
Util.redirect_uri absolute_uri_for(authorization_endpoint), :query, params.merge(
client_id: self.identifier
)
end
def authorization_code=(code)
@grant = Grant::AuthorizationCode.new(
code: code,
redirect_uri: self.redirect_uri
)
end
def resource_owner_credentials=(credentials)
@grant = Grant::Password.new(
username: credentials.first,
password: credentials.last
)
end
def refresh_token=(token)
@grant = Grant::RefreshToken.new(
refresh_token: token
)
end
def jwt_bearer=(assertion)
@grant = Grant::JWTBearer.new(
assertion: assertion
)
end
def saml2_bearer=(assertion)
@grant = Grant::SAML2Bearer.new(
assertion: assertion
)
end
def subject_token=(subject_token, subject_token_type = URN::TokenType::JWT)
@grant = Grant::TokenExchange.new(
subject_token: subject_token,
subject_token_type: subject_token_type
)
end
def force_token_type!(token_type)
@forced_token_type = token_type.to_s
end
def access_token!(*args)
headers, params, http_client, options = authenticated_context_from(*args)
params[:scope] = Array(options.delete(:scope)).join(' ') if options[:scope].present?
params.merge! @grant.as_json
params.merge! options
handle_response do
http_client.post(
absolute_uri_for(token_endpoint),
Util.compact_hash(params),
headers
) do |req|
yield req if block_given?
end
end
end
def revoke!(*args)
headers, params, http_client, options = authenticated_context_from(*args)
params.merge! case
when access_token = options.delete(:access_token)
{
token: access_token,
token_type_hint: :access_token
}
when refresh_token = options.delete(:refresh_token)
{
token: refresh_token,
token_type_hint: :refresh_token
}
when @grant.is_a?(Grant::RefreshToken)
{
token: @grant.refresh_token,
token_type_hint: :refresh_token
}
when options[:token].blank?
raise ArgumentError, 'One of "token", "access_token" and "refresh_token" is required'
end
params.merge! options
handle_revocation_response do
http_client.post(
absolute_uri_for(revocation_endpoint),
Util.compact_hash(params),
headers
) do |req|
yield req if block_given?
end
end
end
private
def absolute_uri_for(endpoint)
_endpoint_ = Util.parse_uri endpoint
_endpoint_.scheme ||= self.scheme || 'https'
_endpoint_.host ||= self.host
_endpoint_.port ||= self.port
raise 'No Host Info' unless _endpoint_.host
_endpoint_.to_s
end
def authenticated_context_from(*args)
headers, params = {}, {}
http_client = Rack::OAuth2.http_client
# NOTE:
# Using Array#extract_options! for backward compatibility.
# Until v1.0.5, the first argument was 'client_auth_method' in scalar.
options = args.extract_options!
client_auth_method = args.first || options.delete(:client_auth_method)&.to_sym || :basic
case client_auth_method
when :basic
cred = Base64.strict_encode64 [
Util.www_form_url_encode(identifier),
Util.www_form_url_encode(secret)
].join(':')
headers.merge!(
'Authorization' => "Basic #{cred}"
)
when :basic_without_www_form_urlencode
cred = ["#{identifier}:#{secret}"].pack('m').tr("\n", '')
headers.merge!(
'Authorization' => "Basic #{cred}"
)
when :jwt_bearer
params.merge!(
client_assertion_type: URN::ClientAssertionType::JWT_BEARER
)
# NOTE: optionally auto-generate client_assertion.
params[:client_assertion] = if options[:client_assertion].present?
options.delete(:client_assertion)
else
require 'json/jwt'
JSON::JWT.new(
iss: identifier,
sub: identifier,
aud: absolute_uri_for(token_endpoint),
jti: SecureRandom.hex(16),
iat: Time.now,
exp: 3.minutes.from_now
).sign(private_key || secret).to_s
end
when :saml2_bearer
params.merge!(
client_assertion_type: URN::ClientAssertionType::SAML2_BEARER
)
when :mtls
params.merge!(
client_id: identifier
)
http_client.ssl.client_key = private_key
http_client.ssl.client_cert = certificate
else
params.merge!(
client_id: identifier,
client_secret: secret
)
end
[headers, params, http_client, options]
end
def handle_response
response = yield
case response.status
when 200..201
handle_success_response response
else
handle_error_response response
end
end
def handle_revocation_response
response = yield
case response.status
when 200..201
:success
else
handle_error_response response
end
end
def handle_success_response(response)
token_hash = JSON.parse(response.body).with_indifferent_access
case (@forced_token_type || token_hash[:token_type])&.downcase
when 'bearer'
AccessToken::Bearer.new(token_hash)
when nil
AccessToken::Legacy.new(token_hash)
else
raise 'Unknown Token Type'
end
rescue JSON::ParserError
# NOTE: Facebook support (They don't use JSON as token response)
AccessToken::Legacy.new Rack::Utils.parse_nested_query(response.body).with_indifferent_access
end
def handle_error_response(response)
error = JSON.parse(response.body).with_indifferent_access
raise Error.new(response.status, error)
rescue JSON::ParserError
raise Error.new(response.status, error: 'Unknown', error_description: response.body)
end
end
end
end
require 'rack/oauth2/client/error'
require 'rack/oauth2/client/grant'
|
require "mongo"
require "openssl"
require "rack/oauth2/server/errors"
require "rack/oauth2/server/utils"
module Rack
module OAuth2
class Server
class << self
# A Mongo::DB object.
attr_accessor :database
# Create new instance of the klass and populate its attributes.
def new_instance(klass, fields)
return unless fields
instance = klass.new
fields.each do |name, value|
instance.instance_variable_set :"@#{name}", value
end
instance
end
# Long, random and hexy.
def secure_random
OpenSSL::Random.random_bytes(32).unpack("H*")[0]
end
# @private
def create_indexes(&block)
if block
@create_indexes ||= []
@create_indexes << block
elsif @create_indexes
@create_indexes.each do |block|
block.call
end
@create_indexes = nil
end
end
end
end
end
end
require "rack/oauth2/models/client"
require "rack/oauth2/models/auth_request"
require "rack/oauth2/models/access_grant"
require "rack/oauth2/models/access_token"
Added a check when database is not setup properly ..
require "mongo"
require "openssl"
require "rack/oauth2/server/errors"
require "rack/oauth2/server/utils"
module Rack
module OAuth2
class Server
class << self
# A Mongo::DB object.
attr_accessor :database
# Create new instance of the klass and populate its attributes.
def new_instance(klass, fields)
return unless fields
instance = klass.new
fields.each do |name, value|
instance.instance_variable_set :"@#{name}", value
end
instance
end
# Long, random and hexy.
def secure_random
OpenSSL::Random.random_bytes(32).unpack("H*")[0]
end
# @private
def create_indexes(&block)
if block
@create_indexes ||= []
@create_indexes << block
elsif @create_indexes
@create_indexes.each do |block|
block.call
end
@create_indexes = nil
end
end
def database
raise 'No database Configured. You must configure it using Server.database = MongoDB::Connection.new()[db_name] ' unless @database
@database
end
end
end
end
end
require "rack/oauth2/models/client"
require "rack/oauth2/models/auth_request"
require "rack/oauth2/models/access_grant"
require "rack/oauth2/models/access_token"
|
require 'rack'
require 'sparql'
module Rack; module SPARQL
##
# Rack middleware for SPARQL content negotiation.
#
# Uses HTTP Content Negotiation to find an appropriate RDF
# format to serialize any result with a body being `RDF::Enumerable`.
#
# Override content negotiation by setting the :format option to
# {Rack::SPARQL#initialize}.
#
# This endpoint also serves the fuction of Rack::LinkedData, as it will serialize
# SPARQL results, which may be RDF Graphs
class ContentNegotiation
VARY = {'Vary' => 'Accept'}.freeze
# @return [#call]
attr_reader :app
# @return [Hash{Symbol => Object}]
attr_reader :options
##
# @param [#call] app
# @param [Hash{Symbol => Object}] options
# Other options passed to writer.
# @option options [RDF::Format, #to_sym] :format Specific RDF writer format to use
def initialize(app, options = {})
@app, @options = app, options
end
##
# Handles a Rack protocol request.
#
# If result is `RDF::Literal::Boolean`, `RDF::Query::Results`, or `RDF::Enumerable`
# The result is serialized using {SPARQL::Results}
#
# @param [Hash{String => String}] env
# @return [Array(Integer, Hash, #each)]
# @see http://rack.rubyforge.org/doc/SPEC.html
def call(env)
response = app.call(env)
case response[2] # the body
when RDF::Enumerable, RDF::Query::Solutions, RDF::Literal::Boolean
serialize(env, *response)
else response
end
end
##
# Serializes a SPARQL query result into a Rack protocol
# response using HTTP content negotiation rules or a specified Content-Type.
#
# @param [Hash{String => String}] env
# @param [Integer] status
# @param [Hash{String => Object}] headers
# @param [RDF::Enumerable] body
# @return [Array(Integer, Hash, #each)]
# @raise [RDF::WriterError] when no results are generated
def serialize(env, status, headers, body)
begin
serialize_options = {}
serialize_options[:content_types] = parse_accept_header(env['HTTP_ACCEPT']) if env.has_key?('HTTP_ACCEPT')
serialize_options.merge!(@options)
results = ::SPARQL.serialize_results(body, serialize_options)
raise RDF::WriterError, "can't serialize results" unless results
headers = headers.merge(VARY).merge('Content-Type' => results.content_type) # FIXME: don't overwrite existing Vary headers
[status, headers, [results]]
rescue RDF::WriterError => e
not_acceptable(e.message)
end
end
protected
##
# Parses an HTTP `Accept` header, returning an array of MIME content
# types ordered by the precedence rules defined in HTTP/1.1 Section 14.1.
#
# @param [String, #to_s] header
# @return [Array<String>]
# @see http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
def parse_accept_header(header)
entries = header.to_s.split(',')
entries.map { |e| accept_entry(e) }.sort_by(&:last).map(&:first)
end
def accept_entry(entry)
type, *options = entry.delete(' ').split(';')
quality = 0 # we sort smallest first
options.delete_if { |e| quality = 1 - e[2..-1].to_f if e.start_with? 'q=' }
[type, [quality, type.count('*'), 1 - options.size]]
end
##
# Outputs an HTTP `406 Not Acceptable` response.
#
# @param [String, #to_s] message
# @return [Array(Integer, Hash, #each)]
def not_acceptable(message = nil)
http_error(406, message, VARY)
end
##
# Outputs an HTTP `4xx` or `5xx` response.
#
# @param [Integer, #to_i] code
# @param [String, #to_s] message
# @param [Hash{String => String}] headers
# @return [Array(Integer, Hash, #each)]
def http_error(code, message = nil, headers = {})
message = http_status(code) + (message.nil? ? "\n" : " (#{message})\n")
[code, {'Content-Type' => 'text/plain; charset=utf-8'}.merge(headers), [message]]
end
##
# Returns the standard HTTP status message for the given status `code`.
#
# @param [Integer, #to_i] code
# @return [String]
def http_status(code)
[code, Rack::Utils::HTTP_STATUS_CODES[code]].join(' ')
end
end # class ContentNegotiation
end; end # module Rack::SPARQL
Modify Rack content negotiation to allow for the body to be a Rack::BodyProxy
require 'rack'
require 'sparql'
module Rack; module SPARQL
##
# Rack middleware for SPARQL content negotiation.
#
# Uses HTTP Content Negotiation to find an appropriate RDF
# format to serialize any result with a body being `RDF::Enumerable`.
#
# Override content negotiation by setting the :format option to
# {Rack::SPARQL#initialize}.
#
# This endpoint also serves the fuction of Rack::LinkedData, as it will serialize
# SPARQL results, which may be RDF Graphs
class ContentNegotiation
VARY = {'Vary' => 'Accept'}.freeze
# @return [#call]
attr_reader :app
# @return [Hash{Symbol => Object}]
attr_reader :options
##
# @param [#call] app
# @param [Hash{Symbol => Object}] options
# Other options passed to writer.
# @option options [RDF::Format, #to_sym] :format Specific RDF writer format to use
def initialize(app, options = {})
@app, @options = app, options
end
##
# Handles a Rack protocol request.
#
# If result is `RDF::Literal::Boolean`, `RDF::Query::Results`, or `RDF::Enumerable`
# The result is serialized using {SPARQL::Results}
#
# @param [Hash{String => String}] env
# @return [Array(Integer, Hash, #each)]
# @see http://rack.rubyforge.org/doc/SPEC.html
def call(env)
response = app.call(env)
body = response[2].respond_to?(:body) ? response[2].body : response[2]
case body
when RDF::Enumerable, RDF::Query::Solutions, RDF::Literal::Boolean
response[2] = body # Put it back in the response, it might have been a proxy
serialize(env, *response)
else response
end
end
##
# Serializes a SPARQL query result into a Rack protocol
# response using HTTP content negotiation rules or a specified Content-Type.
#
# @param [Hash{String => String}] env
# @param [Integer] status
# @param [Hash{String => Object}] headers
# @param [RDF::Enumerable] body
# @return [Array(Integer, Hash, #each)]
# @raise [RDF::WriterError] when no results are generated
def serialize(env, status, headers, body)
begin
serialize_options = {}
serialize_options[:content_types] = parse_accept_header(env['HTTP_ACCEPT']) if env.has_key?('HTTP_ACCEPT')
serialize_options.merge!(@options)
results = ::SPARQL.serialize_results(body, serialize_options)
raise RDF::WriterError, "can't serialize results" unless results
headers = headers.merge(VARY).merge('Content-Type' => results.content_type) # FIXME: don't overwrite existing Vary headers
[status, headers, [results]]
rescue RDF::WriterError => e
not_acceptable(e.message)
end
end
protected
##
# Parses an HTTP `Accept` header, returning an array of MIME content
# types ordered by the precedence rules defined in HTTP/1.1 Section 14.1.
#
# @param [String, #to_s] header
# @return [Array<String>]
# @see http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
def parse_accept_header(header)
entries = header.to_s.split(',')
entries.map { |e| accept_entry(e) }.sort_by(&:last).map(&:first)
end
def accept_entry(entry)
type, *options = entry.delete(' ').split(';')
quality = 0 # we sort smallest first
options.delete_if { |e| quality = 1 - e[2..-1].to_f if e.start_with? 'q=' }
[type, [quality, type.count('*'), 1 - options.size]]
end
##
# Outputs an HTTP `406 Not Acceptable` response.
#
# @param [String, #to_s] message
# @return [Array(Integer, Hash, #each)]
def not_acceptable(message = nil)
http_error(406, message, VARY)
end
##
# Outputs an HTTP `4xx` or `5xx` response.
#
# @param [Integer, #to_i] code
# @param [String, #to_s] message
# @param [Hash{String => String}] headers
# @return [Array(Integer, Hash, #each)]
def http_error(code, message = nil, headers = {})
message = http_status(code) + (message.nil? ? "\n" : " (#{message})\n")
[code, {'Content-Type' => 'text/plain; charset=utf-8'}.merge(headers), [message]]
end
##
# Returns the standard HTTP status message for the given status `code`.
#
# @param [Integer, #to_i] code
# @return [String]
def http_status(code)
[code, Rack::Utils::HTTP_STATUS_CODES[code]].join(' ')
end
end # class ContentNegotiation
end; end # module Rack::SPARQL
|
{
:tr_class => 'tr-dl',
:hidden_fields =>
[
{:id => {
:required => true,
:type => 'hidden',
}}
],
:field_list =>
[
{:display_name => {
:type => 'text',
:width => '50%',
:help => '',
:objLink => true,
:objLinkView => 'display',
}},
{:enabled => {
:type => 'text',
:help => '',
}},
]
}
putting in component DSL module_import support
|
module RansackUI
VERSION = "1.3.0"
end
Bunp version
module RansackUI
VERSION = "1.3.1"
end
|
module React
module DataHelpers
# This module expects to be included into a controller, so that view_context resolves
# to something that provides all the various view helpers.
private
def search_results_data
return {
results: [
{
type: 'investigations',
header: view_context.t(:investigation).pluralize.titleize,
count: @investigations_count,
materials: materials_data(@investigations),
pagination: {
current_page: @investigations.current_page,
total_pages: @investigations.total_pages,
start_item: @investigations.offset + 1,
end_item: @investigations.offset + @investigations.length,
total_items: @investigations.total_entries,
params: {
type: 'inv'
}
}
},
{
type: 'activities',
header: view_context.t(:activity).pluralize.titleize,
count: @activities_count,
materials: materials_data(@activities),
pagination: {
current_page: @activities.current_page,
total_pages: @activities.total_pages,
start_item: @activities.offset + 1,
end_item: @activities.offset + @activities.length,
total_items: @activities.total_entries,
params: {
type: 'act'
}
}
}
]
}
end
def materials_data(materials)
data = []
materials.each do |material|
parent_data = nil
material_count = material.offerings_count
if material.parent
material_count = material_count + material.parent.offerings_count
parent_data = {
id: material.parent.id,
type: view_context.t(:investigation),
name: material.parent.name
}
end
has_activities = material.respond_to?(:activities) && !material.activities.nil?
has_pretest = material.respond_to?(:has_pretest) && material.has_pretest
user_data = nil
if material.user && (!material.user.name.nil?)
user_data = {
id: material.user.id,
name: material.user.name
}
end
mat_data = {
id: material.id,
name: material.name,
description: (current_visitor.portal_teacher && material.description_for_teacher.present? ? view_context.sanitize(material.description_for_teacher) : view_context.sanitize(material.description)),
class_name_underscored: material.class.name.underscore,
icon: {
url: (material.respond_to?(:icon_image) ? material.icon_image : nil),
},
java_requirements: material.java_requirements,
is_official: material.is_official,
links: links_for_material(material),
assigned_classes: assigned_clazz_names(material),
class_count: material_count,
sensors: view_context.probe_types(material).map { |p| p.name },
has_activities: has_activities,
has_pretest: has_pretest,
activities: has_activities ? material.activities.map{|a| {id: a.id, name: a.name} } : [],
parent: parent_data,
user: user_data
}
data.push mat_data
end
return data
end
def links_for_material(material)
external = false
if material.is_a? Investigation
browse_url = browse_investigation_url(material)
elsif material.is_a? Activity
browse_url = browse_activity_url(material)
elsif material.is_a? ExternalActivity
browse_url = browse_external_activity_url(material)
external = true
end
links = {
browse: {
url: browse_url
}
}
if current_visitor.anonymous? or external
links[:preview] = {
url: view_context.run_url_for(material,{}),
text: 'Preview',
target: '_blank'
}
else
if material.teacher_only?
links[:preview] = {
url: view_context.run_url_for(material,{:teacher_mode => true}),
text: 'Preview',
target: '_blank'
}
else
links[:preview] = {
type: 'dropdown',
text: 'Preview ▼',
expandedText: 'Preview ▲',
url: 'javascript:void(0)',
className: 'button preview_Button Expand_Collapse_Link',
options: [
{
text: 'As Teacher',
url: view_context.run_url_for(material, {:teacher_mode => true}),
target: '_blank',
className: ''
},
{
text: 'As Student',
url: view_context.run_url_for(material, {}),
target: '_blank',
className: ''
}
]
}
end
end
if external && material.launch_url
if current_visitor.has_role?('admin','manager') || (material.author_email == current_visitor.email)
links[:external_edit] = {
url: matedit_external_activity_url(material, iFrame: false),
text: "Edit",
target: '_blank'
}
end
if current_visitor.has_role?('admin','manager') || (!material.is_locked && current_visitor.has_role?('author')) || material.author_email == current_visitor.email
links[:external_copy] = {
url: copy_external_activity_url(material),
text: "Copy",
target: '_blank'
}
end
if current_visitor.has_role?('admin')
links[:external_edit_iframe] = {
url: matedit_external_activity_url(material, iFrame: true),
text: "(edit in iframe)",
target: '_blank'
}
end
end
if material.respond_to?(:teacher_guide_url) && !material.teacher_guide_url.blank?
if current_visitor.portal_teacher || current_visitor.has_role?('admin','manager')
links[:teacher_guide] = {
text: "Teacher Guide",
url: material.teacher_guide_url
}
end
end
if current_visitor.portal_teacher
links[:assign_material] = {
text: "Assign to a Class",
url: "javascript:void(0)",
onclick: "get_Assign_To_Class_Popup(#{material.id},'#{material.class.to_s}')"
}
end
if current_visitor.has_role?('admin','manager')
links[:edit] = {
text: "(portal settings)",
url: edit_polymorphic_url(material)
}
end
if current_visitor.has_role?('admin')
links[:assign_collection] = {
text: "Add to Collection",
url: "javascript:void(0)",
onclick: "get_Assign_To_Collection_Popup(#{material.id},'#{material.class.to_s}')"
}
end
return links
end
def assigned_clazz_names(material)
return [] unless current_visitor.portal_teacher
offerings = current_visitor.portal_teacher.offerings.select{|o| o.runnable == material }
offering_clazz_names = offerings.sort{|a,b| a.clazz.position <=> b.clazz.position}.map{|o| o.clazz.name}
return offering_clazz_names
end
end
end
Fix requires java in search results
module React
module DataHelpers
# This module expects to be included into a controller, so that view_context resolves
# to something that provides all the various view helpers.
private
def search_results_data
return {
results: [
{
type: 'investigations',
header: view_context.t(:investigation).pluralize.titleize,
count: @investigations_count,
materials: materials_data(@investigations),
pagination: {
current_page: @investigations.current_page,
total_pages: @investigations.total_pages,
start_item: @investigations.offset + 1,
end_item: @investigations.offset + @investigations.length,
total_items: @investigations.total_entries,
params: {
type: 'inv'
}
}
},
{
type: 'activities',
header: view_context.t(:activity).pluralize.titleize,
count: @activities_count,
materials: materials_data(@activities),
pagination: {
current_page: @activities.current_page,
total_pages: @activities.total_pages,
start_item: @activities.offset + 1,
end_item: @activities.offset + @activities.length,
total_items: @activities.total_entries,
params: {
type: 'act'
}
}
}
]
}
end
def materials_data(materials)
data = []
materials.each do |material|
parent_data = nil
material_count = material.offerings_count
if material.parent
material_count = material_count + material.parent.offerings_count
parent_data = {
id: material.parent.id,
type: view_context.t(:investigation),
name: material.parent.name
}
end
has_activities = material.respond_to?(:activities) && !material.activities.nil?
has_pretest = material.respond_to?(:has_pretest) && material.has_pretest
user_data = nil
if material.user && (!material.user.name.nil?)
user_data = {
id: material.user.id,
name: material.user.name
}
end
mat_data = {
id: material.id,
name: material.name,
description: (current_visitor.portal_teacher && material.description_for_teacher.present? ? view_context.sanitize(material.description_for_teacher) : view_context.sanitize(material.description)),
class_name_underscored: material.class.name.underscore,
icon: {
url: (material.respond_to?(:icon_image) ? material.icon_image : nil),
},
requires_java: material.java_requirements == SearchModelInterface::JNLPJavaRequirement,
is_official: material.is_official,
links: links_for_material(material),
assigned_classes: assigned_clazz_names(material),
class_count: material_count,
sensors: view_context.probe_types(material).map { |p| p.name },
has_activities: has_activities,
has_pretest: has_pretest,
activities: has_activities ? material.activities.map{|a| {id: a.id, name: a.name} } : [],
parent: parent_data,
user: user_data
}
data.push mat_data
end
return data
end
def links_for_material(material)
external = false
if material.is_a? Investigation
browse_url = browse_investigation_url(material)
elsif material.is_a? Activity
browse_url = browse_activity_url(material)
elsif material.is_a? ExternalActivity
browse_url = browse_external_activity_url(material)
external = true
end
links = {
browse: {
url: browse_url
}
}
if current_visitor.anonymous? or external
links[:preview] = {
url: view_context.run_url_for(material,{}),
text: 'Preview',
target: '_blank'
}
else
if material.teacher_only?
links[:preview] = {
url: view_context.run_url_for(material,{:teacher_mode => true}),
text: 'Preview',
target: '_blank'
}
else
links[:preview] = {
type: 'dropdown',
text: 'Preview ▼',
expandedText: 'Preview ▲',
url: 'javascript:void(0)',
className: 'button preview_Button Expand_Collapse_Link',
options: [
{
text: 'As Teacher',
url: view_context.run_url_for(material, {:teacher_mode => true}),
target: '_blank',
className: ''
},
{
text: 'As Student',
url: view_context.run_url_for(material, {}),
target: '_blank',
className: ''
}
]
}
end
end
if external && material.launch_url
if current_visitor.has_role?('admin','manager') || (material.author_email == current_visitor.email)
links[:external_edit] = {
url: matedit_external_activity_url(material, iFrame: false),
text: "Edit",
target: '_blank'
}
end
if current_visitor.has_role?('admin','manager') || (!material.is_locked && current_visitor.has_role?('author')) || material.author_email == current_visitor.email
links[:external_copy] = {
url: copy_external_activity_url(material),
text: "Copy",
target: '_blank'
}
end
if current_visitor.has_role?('admin')
links[:external_edit_iframe] = {
url: matedit_external_activity_url(material, iFrame: true),
text: "(edit in iframe)",
target: '_blank'
}
end
end
if material.respond_to?(:teacher_guide_url) && !material.teacher_guide_url.blank?
if current_visitor.portal_teacher || current_visitor.has_role?('admin','manager')
links[:teacher_guide] = {
text: "Teacher Guide",
url: material.teacher_guide_url
}
end
end
if current_visitor.portal_teacher
links[:assign_material] = {
text: "Assign to a Class",
url: "javascript:void(0)",
onclick: "get_Assign_To_Class_Popup(#{material.id},'#{material.class.to_s}')"
}
end
if current_visitor.has_role?('admin','manager')
links[:edit] = {
text: "(portal settings)",
url: edit_polymorphic_url(material)
}
end
if current_visitor.has_role?('admin')
links[:assign_collection] = {
text: "Add to Collection",
url: "javascript:void(0)",
onclick: "get_Assign_To_Collection_Popup(#{material.id},'#{material.class.to_s}')"
}
end
return links
end
def assigned_clazz_names(material)
return [] unless current_visitor.portal_teacher
offerings = current_visitor.portal_teacher.offerings.select{|o| o.runnable == material }
offering_clazz_names = offerings.sort{|a,b| a.clazz.position <=> b.clazz.position}.map{|o| o.clazz.name}
return offering_clazz_names
end
end
end
|
require 'refinerycms-core'
require 'refinerycms-settings'
require 'filters_spam'
module Refinery
autoload :InquiriesGenerator, 'generators/refinery/inquiries/inquiries_generator'
module Inquiries
require 'refinery/inquiries/engine'
require 'refinery/inquiries/configuration'
autoload :Version, 'refinery/inquiries/version'
class << self
attr_writer :root
def root
@root ||= Pathname.new(File.expand_path('../../../', __FILE__))
end
def factory_paths
@factory_paths ||= [ root.join("spec/factories").to_s ]
end
end
end
end
Add missing require "acts_as_indexed".
require 'refinerycms-core'
require 'refinerycms-settings'
require 'filters_spam'
require 'acts_as_indexed'
module Refinery
autoload :InquiriesGenerator, 'generators/refinery/inquiries/inquiries_generator'
module Inquiries
require 'refinery/inquiries/engine'
require 'refinery/inquiries/configuration'
autoload :Version, 'refinery/inquiries/version'
class << self
attr_writer :root
def root
@root ||= Pathname.new(File.expand_path('../../../', __FILE__))
end
def factory_paths
@factory_paths ||= [ root.join("spec/factories").to_s ]
end
end
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'pretty_bytes/version'
Gem::Specification.new do |spec|
spec.name = "pretty_bytes"
spec.version = PrettyBytes::VERSION
spec.authors = ["Gabriel Montalvo"]
spec.email = ["gmontalvo.riv@gmail.com"]
spec.summary = %q{Byte converter}
spec.description = %q{Convert bytes to a human readable string: 1337 → 1.34 kB}
spec.homepage = "https://github.com/gmontalvoriv/pretty-bytes-rb"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or
# delete this section to allow pushing this gem to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.11"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
end
update
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'pretty_bytes/version'
Gem::Specification.new do |spec|
spec.name = "pretty_bytes"
spec.version = PrettyBytes::VERSION
spec.authors = ["Gabriel Montalvo"]
spec.email = ["gmontalvo.riv@gmail.com"]
spec.summary = %q{Byte converter}
spec.description = %q{Convert bytes to a human readable string: 1337 → 1.34 kB}
spec.homepage = "https://github.com/gmontalvoriv/pretty-bytes-rb"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or
# delete this section to allow pushing this gem to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "https://rubygems.org"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.11"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
end |
require 'spec_helper'
describe Pond, "#checkout" do
it "should yield objects specified in the block" do
pond = Pond.new { 1 }
pond.checkout { |i| i.should == 1 }
end
it "should return the value returned by the block" do
pond = Pond.new { 1 }
value = pond.checkout { |i| 'value' }
value.should == 'value'
end
it "removes the object from the pool if the detach_if block returns true" do
int = 0
pond = Pond.new(detach_if: lambda { |obj| obj < 2 }) { int += 1 }
pond.available.should == []
# allocate 1, should not check back in
pond.checkout {|i| i.should == 1}
pond.available.should == []
# allocate 2, should be nothing else in the pond
pond.checkout do |i|
i.should == 2
pond.available.should == []
end
# 2 should still be in the pond
pond.available.should == [2]
end
it "should instantiate objects when needed" do
int = 0
pond = Pond.new { int += 1 }
pond.size.should == 0
pond.checkout do |i|
pond.available.should == []
pond.allocated.should == {Thread.current => 1}
i.should == 1
end
pond.available.should == [1]
pond.allocated.should == {}
pond.size.should == 1
pond.checkout do |i|
pond.available.should == []
pond.allocated.should == {Thread.current => 1}
i.should == 1
end
pond.available.should == [1]
pond.allocated.should == {}
pond.size.should == 1
end
it "should not instantiate objects in excess of the specified maximum_size" do
object = nil
pond = Pond.new(:maximum_size => 1) { object = Object.new }
object_ids = []
threads = 20.times.map do
pond.checkout do |obj|
object_ids << obj.object_id
end
end
object_ids.uniq.should == [object.object_id]
end
it "should give different objects to different threads" do
int = 0
pond = Pond.new { int += 1 }
q1, q2 = Queue.new, Queue.new
t = Thread.new do
pond.checkout do |i|
i.should == 1
q1.push nil
q2.pop
end
end
q1.pop
pond.size.should == 1
pond.allocated.should == {t => 1}
pond.available.should == []
pond.checkout { |i| i.should == 2 }
pond.size.should == 2
pond.allocated.should == {t => 1}
pond.available.should == [2]
q2.push nil
t.join
pond.allocated.should == {}
pond.available.should == [2, 1]
end
it "should be re-entrant" do
pond = Pond.new { Object.new }
pond.checkout do |obj1|
pond.checkout do |obj2|
obj1.should == obj2
end
end
end
it "should support a thread checking out objects from distinct Pond instances" do
pond1 = Pond.new { [] }
pond2 = Pond.new { {} }
pond1.checkout do |one|
pond2.checkout do |two|
one.should == []
two.should == {}
end
end
end
it "should yield an object to only one thread when many are waiting" do
pond = Pond.new(:maximum_size => 1) { 2 }
q1, q2, q3 = Queue.new, Queue.new, Queue.new
threads = 4.times.map do
Thread.new do
Thread.current[:value] = 0
q1.push nil
pond.checkout do |o|
Thread.current[:value] = o
q2.push nil
q3.pop
end
end
end
4.times { q1.pop }
q2.pop
threads.map{|t| t[:value]}.sort.should == [0, 0, 0, 2]
4.times { q3.push nil }
threads.each &:join
end
it "should treat the collection of objects as a queue by default" do
int = 0
pond = Pond.new { int += 1 }
results = []
q = Queue.new
m = Mutex.new
cv = ConditionVariable.new
4.times do
threads = 4.times.map do
Thread.new do
m.synchronize do
pond.checkout do |i|
results << i
q.push nil
cv.wait(m)
end
cv.signal
end
end
end
4.times { q.pop }
cv.signal
threads.each(&:join)
end
pond.size.should == 4
results.should == (1..4).cycle(4).to_a
end
it "should treat the collection of objects as a stack if configured that way" do
int = 0
pond = Pond.new(:collection => :stack) { int += 1 }
results = []
q = Queue.new
m = Mutex.new
cv = ConditionVariable.new
4.times do
threads = 4.times.map do
Thread.new do
m.synchronize do
pond.checkout do |i|
results << i
q.push nil
cv.wait(m)
end
cv.signal
end
end
end
4.times { q.pop }
cv.signal
threads.each(&:join)
end
pond.size.should == 4
results.should == [1, 2, 3, 4, 4, 3, 2, 1, 1, 2, 3, 4, 4, 3, 2, 1]
end
it "should raise a timeout error if it takes too long to return an object" do
pond = Pond.new(:timeout => 0.01, :maximum_size => 1){1}
q1, q2 = Queue.new, Queue.new
t = Thread.new do
pond.checkout do
q1.push nil
q2.pop
end
end
q1.pop
proc{pond.checkout{}}.should raise_error Pond::Timeout
q2.push nil
t.join
end
it "with a block that raises an error should check the object back in and propagate the error" do
pond = Pond.new { 1 }
proc do
pond.checkout do
raise "Blah!"
end
end.should raise_error RuntimeError, "Blah!"
pond.allocated.should == {}
pond.available.should == [1]
end
it "should not block other threads if the object instantiation takes a long time" do
t = nil
q1, q2, q3 = Queue.new, Queue.new, Queue.new
pond = Pond.new do
q1.push nil
q2.pop
end
q2.push 1
pond.checkout do |i|
q1.pop
i.should == 1
t = Thread.new do
pond.checkout do |i|
i.should == 2
end
end
q1.pop
end
pond.checkout { |i| i.should == 1 }
q2.push 2
t.join
end
it "should not leave the Pond in a bad state if object instantiation fails" do
int = 0
error = false
pond = Pond.new do
raise "Instantiation Error!" if error
int += 1
end
pond.checkout { |i| i.should == 1 }
pond.size.should == 1
pond.allocated.should == {}
pond.available.should == [1]
error = true
pond.checkout do |i|
i.should == 1
t = Thread.new do
pond.checkout{}
end
proc { t.join }.should raise_error RuntimeError, "Instantiation Error!"
end
pond.size.should == 1
pond.allocated.should == {}
pond.available.should == [1]
error = false
pond.checkout do |i|
i.should == 1
t = Thread.new do
pond.checkout { |j| j.should == 2 }
end
t.join
end
pond.size.should == 2
pond.allocated.should == {}
pond.available.should == [2, 1]
end
end
Reorganize specs.
require 'spec_helper'
describe Pond, "#checkout" do
it "should yield objects specified in the block" do
pond = Pond.new { 1 }
pond.checkout { |i| i.should == 1 }
end
it "should return the value returned by the block" do
pond = Pond.new { 1 }
value = pond.checkout { |i| 'value' }
value.should == 'value'
end
it "should instantiate objects when needed" do
int = 0
pond = Pond.new { int += 1 }
pond.size.should == 0
pond.checkout do |i|
pond.available.should == []
pond.allocated.should == {Thread.current => 1}
i.should == 1
end
pond.available.should == [1]
pond.allocated.should == {}
pond.size.should == 1
pond.checkout do |i|
pond.available.should == []
pond.allocated.should == {Thread.current => 1}
i.should == 1
end
pond.available.should == [1]
pond.allocated.should == {}
pond.size.should == 1
end
it "should not instantiate objects in excess of the specified maximum_size" do
object = nil
pond = Pond.new(:maximum_size => 1) { object = Object.new }
object_ids = []
threads = 20.times.map do
pond.checkout do |obj|
object_ids << obj.object_id
end
end
object_ids.uniq.should == [object.object_id]
end
it "should give different objects to different threads" do
int = 0
pond = Pond.new { int += 1 }
q1, q2 = Queue.new, Queue.new
t = Thread.new do
pond.checkout do |i|
i.should == 1
q1.push nil
q2.pop
end
end
q1.pop
pond.size.should == 1
pond.allocated.should == {t => 1}
pond.available.should == []
pond.checkout { |i| i.should == 2 }
pond.size.should == 2
pond.allocated.should == {t => 1}
pond.available.should == [2]
q2.push nil
t.join
pond.allocated.should == {}
pond.available.should == [2, 1]
end
it "should be re-entrant" do
pond = Pond.new { Object.new }
pond.checkout do |obj1|
pond.checkout do |obj2|
obj1.should == obj2
end
end
end
it "should support a thread checking out objects from distinct Pond instances" do
pond1 = Pond.new { [] }
pond2 = Pond.new { {} }
pond1.checkout do |one|
pond2.checkout do |two|
one.should == []
two.should == {}
end
end
end
it "should yield an object to only one thread when many are waiting" do
pond = Pond.new(:maximum_size => 1) { 2 }
q1, q2, q3 = Queue.new, Queue.new, Queue.new
threads = 4.times.map do
Thread.new do
Thread.current[:value] = 0
q1.push nil
pond.checkout do |o|
Thread.current[:value] = o
q2.push nil
q3.pop
end
end
end
4.times { q1.pop }
q2.pop
threads.map{|t| t[:value]}.sort.should == [0, 0, 0, 2]
4.times { q3.push nil }
threads.each &:join
end
it "should treat the collection of objects as a queue by default" do
int = 0
pond = Pond.new { int += 1 }
results = []
q = Queue.new
m = Mutex.new
cv = ConditionVariable.new
4.times do
threads = 4.times.map do
Thread.new do
m.synchronize do
pond.checkout do |i|
results << i
q.push nil
cv.wait(m)
end
cv.signal
end
end
end
4.times { q.pop }
cv.signal
threads.each(&:join)
end
pond.size.should == 4
results.should == (1..4).cycle(4).to_a
end
it "should treat the collection of objects as a stack if configured that way" do
int = 0
pond = Pond.new(:collection => :stack) { int += 1 }
results = []
q = Queue.new
m = Mutex.new
cv = ConditionVariable.new
4.times do
threads = 4.times.map do
Thread.new do
m.synchronize do
pond.checkout do |i|
results << i
q.push nil
cv.wait(m)
end
cv.signal
end
end
end
4.times { q.pop }
cv.signal
threads.each(&:join)
end
pond.size.should == 4
results.should == [1, 2, 3, 4, 4, 3, 2, 1, 1, 2, 3, 4, 4, 3, 2, 1]
end
it "should raise a timeout error if it takes too long to return an object" do
pond = Pond.new(:timeout => 0.01, :maximum_size => 1){1}
q1, q2 = Queue.new, Queue.new
t = Thread.new do
pond.checkout do
q1.push nil
q2.pop
end
end
q1.pop
proc{pond.checkout{}}.should raise_error Pond::Timeout
q2.push nil
t.join
end
it "with a block that raises an error should check the object back in and propagate the error" do
pond = Pond.new { 1 }
proc do
pond.checkout do
raise "Blah!"
end
end.should raise_error RuntimeError, "Blah!"
pond.allocated.should == {}
pond.available.should == [1]
end
it "should not block other threads if the object instantiation takes a long time" do
t = nil
q1, q2, q3 = Queue.new, Queue.new, Queue.new
pond = Pond.new do
q1.push nil
q2.pop
end
q2.push 1
pond.checkout do |i|
q1.pop
i.should == 1
t = Thread.new do
pond.checkout do |i|
i.should == 2
end
end
q1.pop
end
pond.checkout { |i| i.should == 1 }
q2.push 2
t.join
end
it "should not leave the Pond in a bad state if object instantiation fails" do
int = 0
error = false
pond = Pond.new do
raise "Instantiation Error!" if error
int += 1
end
pond.checkout { |i| i.should == 1 }
pond.size.should == 1
pond.allocated.should == {}
pond.available.should == [1]
error = true
pond.checkout do |i|
i.should == 1
t = Thread.new do
pond.checkout{}
end
proc { t.join }.should raise_error RuntimeError, "Instantiation Error!"
end
pond.size.should == 1
pond.allocated.should == {}
pond.available.should == [1]
error = false
pond.checkout do |i|
i.should == 1
t = Thread.new do
pond.checkout { |j| j.should == 2 }
end
t.join
end
pond.size.should == 2
pond.allocated.should == {}
pond.available.should == [2, 1]
end
it "removes the object from the pool if the detach_if block returns true" do
int = 0
pond = Pond.new(detach_if: lambda { |obj| obj < 2 }) { int += 1 }
pond.available.should == []
# allocate 1, should not check back in
pond.checkout {|i| i.should == 1}
pond.available.should == []
# allocate 2, should be nothing else in the pond
pond.checkout do |i|
i.should == 2
pond.available.should == []
end
# 2 should still be in the pond
pond.available.should == [2]
end
end
|
require 'librarian'
require 'librarian/chef'
module Librarian
module Chef
describe Dsl do
context "a simple specfile - a single source, a single dependency, no transitive dependencies" do
it "should run with a source given as hash options on a dependency" do
deps = Dsl.run do
cookbook 'apt',
:git => 'https://github.com/opscode/cookbooks.git'
end.dependencies
deps.should_not be_empty
deps.first.name.should == 'apt'
deps.first.source.uri.should =~ /opscode\/cookbooks/
end
it "should run with a hash block source" do
deps = Dsl.run do
source :git => 'https://github.com/opscode/cookbooks.git' do
cookbook 'apt'
end
end.dependencies
deps.should_not be_empty
deps.first.name.should == 'apt'
deps.first.source.uri.should =~ /opscode\/cookbooks/
end
it "should run with a named block source" do
deps = Dsl.run do
git 'https://github.com/opscode/cookbooks.git' do
cookbook 'apt'
end
end.dependencies
deps.should_not be_empty
deps.first.name.should == 'apt'
deps.first.source.uri.should =~ /opscode\/cookbooks/
end
it "should run with a default source" do
deps = Dsl.run do
git 'https://github.com/opscode/cookbooks.git'
cookbook 'apt'
end.dependencies
deps.should_not be_empty
deps.first.name.should == 'apt'
deps.first.source.uri.should =~ /opscode\/cookbooks/
end
end
end
end
end
The dsl spec should specify the spec sources.
require 'librarian'
require 'librarian/chef'
module Librarian
module Chef
describe Dsl do
context "a simple specfile - a single source, a single dependency, no transitive dependencies" do
it "should run with a hash source" do
spec = Dsl.run do
cookbook 'apt',
:git => 'https://github.com/opscode/cookbooks.git'
end
spec.dependencies.should_not be_empty
spec.dependencies.first.name.should == 'apt'
spec.dependencies.first.source.uri.should =~ /opscode\/cookbooks/
spec.sources.should be_empty
end
it "should run with a block hash source" do
spec = Dsl.run do
source :git => 'https://github.com/opscode/cookbooks.git' do
cookbook 'apt'
end
end
spec.dependencies.should_not be_empty
spec.dependencies.first.name.should == 'apt'
spec.dependencies.first.source.uri.should =~ /opscode\/cookbooks/
spec.sources.should be_empty
end
it "should run with a block named source" do
spec = Dsl.run do
git 'https://github.com/opscode/cookbooks.git' do
cookbook 'apt'
end
end
spec.dependencies.should_not be_empty
spec.dependencies.first.name.should == 'apt'
spec.dependencies.first.source.uri.should =~ /opscode\/cookbooks/
spec.sources.should be_empty
end
it "should run with a default hash source" do
spec = Dsl.run do
source :git => 'https://github.com/opscode/cookbooks.git'
cookbook 'apt'
end
spec.dependencies.should_not be_empty
spec.dependencies.first.name.should == 'apt'
spec.dependencies.first.source.uri.should =~ /opscode\/cookbooks/
spec.sources.should_not be_empty
spec.sources.size.should == 1
spec.dependencies.first.source.should == spec.sources.first
end
it "should run with a default named source" do
spec = Dsl.run do
git 'https://github.com/opscode/cookbooks.git'
cookbook 'apt'
end
spec.dependencies.should_not be_empty
spec.dependencies.first.name.should == 'apt'
spec.dependencies.first.source.uri.should =~ /opscode\/cookbooks/
spec.sources.should_not be_empty
spec.sources.size.should == 1
spec.dependencies.first.source.should == spec.sources.first
end
end
end
end
end |
require File.expand_path("spec_helper", File.dirname(__FILE__))
require File.expand_path("../lib/nesta/commands", File.dirname(__FILE__))
describe "nesta" do
before(:each) do
create_temp_directory
@project_path = temp_path('mysite.com')
end
after(:each) do
remove_temp_directory
end
def project_path(path)
File.join(@project_path, path)
end
def should_exist(file)
File.exist?(project_path(file)).should be_true
end
def create_config_yaml(text)
File.open(Nesta::Config.yaml_path, 'w') { |f| f.puts(text) }
end
describe "new" do
def gemfile_source
File.read(project_path('Gemfile'))
end
def rakefile_source
File.read(project_path('Rakefile'))
end
describe "without options" do
before(:each) do
Nesta::Commands::New.new(@project_path).execute
end
it "should create the content directories" do
should_exist('content/attachments')
should_exist('content/pages')
end
it "should create the home page" do
should_exist('content/pages/index.haml')
end
it "should create the rackup file" do
should_exist('config.ru')
end
it "should create the config.yml file" do
should_exist('config/config.yml')
end
it "should add a Gemfile" do
should_exist('Gemfile')
gemfile_source.should match(/gem 'nesta', '#{Nesta::VERSION}'/)
end
end
describe "--git" do
before(:each) do
@command = Nesta::Commands::New.new(@project_path, 'git' => '')
@command.stub(:system)
end
it "should create a .gitignore file" do
@command.execute
File.read(project_path('.gitignore')).should match(/\.bundle/)
end
it "should create a git repo" do
@command.should_receive(:system).with('git', 'init')
@command.execute
end
it "should commit the blank project" do
@command.should_receive(:system).with('git', 'add', '.')
@command.should_receive(:system).with(
'git', 'commit', '-m', 'Initial commit')
@command.execute
end
end
describe "--vlad" do
before(:each) do
Nesta::Commands::New.new(@project_path, 'vlad' => '').execute
end
it "should add vlad to Gemfile" do
gemfile_source.should match(/gem 'vlad', '2.1.0'/)
gemfile_source.should match(/gem 'vlad-git', '2.2.0'/)
end
it "should configure the vlad rake tasks" do
should_exist('Rakefile')
rakefile_source.should match(/require 'vlad'/)
end
it "should create deploy.rb" do
should_exist('config/deploy.rb')
deploy_source = File.read(project_path('config/deploy.rb'))
deploy_source.should match(/set :application, 'mysite.com'/)
end
end
end
describe "demo:content" do
before(:each) do
@config_path = project_path('config/config.yml')
FileUtils.mkdir_p(File.dirname(@config_path))
Nesta::Config.stub(:yaml_path).and_return(@config_path)
create_config_yaml('content: path/to/content')
Nesta::App.stub(:root).and_return(@project_path)
@repo_url = 'git://github.com/gma/nesta-demo-content.git'
@demo_path = project_path('content-demo')
@command = Nesta::Commands::Demo::Content.new
@command.stub(:system)
end
it "should clone the repository" do
@command.should_receive(:system).with(
'git', 'clone', @repo_url, @demo_path)
@command.execute
end
it "should configure the content directory" do
@command.execute
File.read(@config_path).should match(/^content: content-demo/)
end
describe "when repository already exists" do
before(:each) do
FileUtils.mkdir_p(@demo_path)
end
it "should update the repository" do
@command.should_receive(:system).with('git', 'pull', 'origin', 'master')
@command.execute
end
end
describe "when site versioned with git" do
before(:each) do
@exclude_path = project_path('.git/info/exclude')
FileUtils.mkdir_p(File.dirname(@exclude_path))
File.open(@exclude_path, 'w') { |file| file.puts '# Excludes' }
end
it "should tell git to ignore content-demo" do
@command.execute
File.read(@exclude_path).should match(/content-demo/)
end
describe "and content-demo already ignored" do
before(:each) do
File.open(@exclude_path, 'w') { |file| file.puts 'content-demo' }
end
it "shouldn't tell git to ignore it twice" do
@command.execute
File.read(@exclude_path).scan('content-demo').size.should == 1
end
end
end
end
describe "edit" do
before(:each) do
Nesta::Config.stub(:content_path).and_return('content')
@page_path = 'path/to/page.mdown'
@command = Nesta::Commands::Edit.new(@page_path)
@command.stub(:system)
end
it "should launch the editor" do
ENV['EDITOR'] = 'vi'
full_path = File.join('content/pages', @page_path)
@command.should_receive(:system).with(ENV['EDITOR'], full_path)
@command.execute
end
it "should not try and launch an editor if environment not setup" do
ENV.delete('EDITOR')
@command.should_not_receive(:system)
$stderr.stub(:puts)
@command.execute
end
end
describe "plugin:create" do
before(:each) do
@name = 'my-feature'
@gem_name = "nesta-plugin-#{@name}"
@plugins_path = temp_path('plugins')
@working_dir = Dir.pwd
Dir.mkdir(@plugins_path)
Dir.chdir(@plugins_path)
@command = Nesta::Commands::Plugin::Create.new(@name)
@command.stub(:system)
end
after(:each) do
Dir.chdir(@working_dir)
FileUtils.rm_r(@plugins_path)
end
it "should create a new gem prefixed with nesta-plugin" do
@command.should_receive(:system).with('bundle', 'gem', @gem_name)
begin
@command.execute
rescue Errno::ENOENT
# This test is only concerned with running bundle gem; ENOENT
# errors are raised because we didn't create a real gem.
end
end
describe "after gem created" do
def create_gem_file(*components)
path = File.join(@plugins_path, @gem_name, *components)
FileUtils.makedirs(File.dirname(path))
File.open(path, 'w') { |f| yield f if block_given? }
path
end
before(:each) do
@required_file = create_gem_file('lib', "#{@gem_name}.rb")
@init_file = create_gem_file('lib', @gem_name, 'init.rb')
@gem_spec = create_gem_file("#{@gem_name}.gemspec") do |file|
file.puts " # specify any dependencies here; for example:"
file.puts "end"
end
end
after(:each) do
FileUtils.rm(@required_file)
FileUtils.rm(@init_file)
end
it "should create the ruby file loaded on require" do
@command.execute
File.read(@required_file).should include('Plugin.register(__FILE__)')
end
it "should create a default init.rb file" do
@command.execute
init = File.read(@init_file)
boilerplate = <<-EOF
module My::Feature
module Helpers
EOF
init.should include(boilerplate)
init.should include('helpers Nesta::Plugin::My::Feature::Helpers')
end
it "should specify plugin gem's dependencies" do
@command.execute
text = File.read(@gem_spec)
text.should include('gem.add_dependency("nesta", ">= 0.9.11")')
text.should include('gem.add_development_dependency("rake")')
end
end
end
describe "theme:install" do
before(:each) do
@repo_url = 'git://github.com/gma/nesta-theme-mine.git'
@theme_dir = 'themes/mine'
FileUtils.mkdir_p(File.join(@theme_dir, '.git'))
@command = Nesta::Commands::Theme::Install.new(@repo_url)
@command.stub(:enable)
@command.stub(:system)
end
after(:each) do
FileUtils.rm_r(@theme_dir)
end
it "should clone the repository" do
@command.should_receive(:system).with(
'git', 'clone', @repo_url, @theme_dir)
@command.execute
end
it "should remove the theme's .git directory" do
@command.execute
File.exist?(@theme_dir).should be_true
File.exist?(File.join(@theme_dir, '.git')).should be_false
end
it "should enable the freshly installed theme" do
@command.should_receive(:enable)
@command.execute
end
describe "when theme URL doesn't match recommended pattern" do
before(:each) do
@repo_url = 'git://foobar.com/path/to/mytheme.git'
@other_theme_dir = 'themes/mytheme'
FileUtils.mkdir_p(File.join(@other_theme_dir, '.git'))
@command = Nesta::Commands::Theme::Install.new(@repo_url)
@command.stub(:enable)
end
after(:each) do
FileUtils.rm_r(@other_theme_dir)
end
it "should use the basename as theme dir" do
@command.should_receive(:system).with(
'git', 'clone', @repo_url, @other_theme_dir)
@command.execute
end
end
end
describe "theme:enable" do
before(:each) do
config = temp_path('config.yml')
Nesta::Config.stub(:yaml_path).and_return(config)
@name = 'mytheme'
@command = Nesta::Commands::Theme::Enable.new(@name)
end
shared_examples_for "command that configures the theme" do
it "should enable the theme" do
@command.execute
File.read(Nesta::Config.yaml_path).should match(/^theme: #{@name}/)
end
end
describe "when theme config is commented out" do
before(:each) do
create_config_yaml(' # theme: blah')
end
it_should_behave_like "command that configures the theme"
end
describe "when another theme is configured" do
before(:each) do
create_config_yaml('theme: another')
end
it_should_behave_like "command that configures the theme"
end
describe "when no theme config exists" do
before(:each) do
create_config_yaml('# I have no theme config')
end
it_should_behave_like "command that configures the theme"
end
end
describe "theme:create" do
def should_exist(file)
File.exist?(Nesta::Path.themes(@name, file)).should be_true
end
before(:each) do
Nesta::App.stub(:root).and_return(TempFileHelper::TEMP_DIR)
@name = 'my-new-theme'
Nesta::Commands::Theme::Create.new(@name).execute
end
it "should create the theme directory" do
File.directory?(Nesta::Path.themes(@name)).should be_true
end
it "should create a dummy README file" do
should_exist('README.md')
text = File.read(Nesta::Path.themes(@name, 'README.md'))
text.should match(/#{@name} is a theme/)
end
it "should create a default app.rb file" do
should_exist('app.rb')
end
it "should create public and views directories" do
should_exist("public/#{@name}")
should_exist('views')
end
it "should copy the default view templates into views" do
%w(layout.haml page.haml master.sass).each do |file|
should_exist("views/#{file}")
end
end
end
end
Fix test
require File.expand_path("spec_helper", File.dirname(__FILE__))
require File.expand_path("../lib/nesta/commands", File.dirname(__FILE__))
describe "nesta" do
before(:each) do
create_temp_directory
@project_path = temp_path('mysite.com')
end
after(:each) do
remove_temp_directory
end
def project_path(path)
File.join(@project_path, path)
end
def should_exist(file)
File.exist?(project_path(file)).should be_true
end
def create_config_yaml(text)
File.open(Nesta::Config.yaml_path, 'w') { |f| f.puts(text) }
end
describe "new" do
def gemfile_source
File.read(project_path('Gemfile'))
end
def rakefile_source
File.read(project_path('Rakefile'))
end
describe "without options" do
before(:each) do
Nesta::Commands::New.new(@project_path).execute
end
it "should create the content directories" do
should_exist('content/attachments')
should_exist('content/pages')
end
it "should create the home page" do
should_exist('content/pages/index.haml')
end
it "should create the rackup file" do
should_exist('config.ru')
end
it "should create the config.yml file" do
should_exist('config/config.yml')
end
it "should add a Gemfile" do
should_exist('Gemfile')
gemfile_source.should match(/gem 'nesta'/)
end
end
describe "--git" do
before(:each) do
@command = Nesta::Commands::New.new(@project_path, 'git' => '')
@command.stub(:system)
end
it "should create a .gitignore file" do
@command.execute
File.read(project_path('.gitignore')).should match(/\.bundle/)
end
it "should create a git repo" do
@command.should_receive(:system).with('git', 'init')
@command.execute
end
it "should commit the blank project" do
@command.should_receive(:system).with('git', 'add', '.')
@command.should_receive(:system).with(
'git', 'commit', '-m', 'Initial commit')
@command.execute
end
end
describe "--vlad" do
before(:each) do
Nesta::Commands::New.new(@project_path, 'vlad' => '').execute
end
it "should add vlad to Gemfile" do
gemfile_source.should match(/gem 'vlad', '2.1.0'/)
gemfile_source.should match(/gem 'vlad-git', '2.2.0'/)
end
it "should configure the vlad rake tasks" do
should_exist('Rakefile')
rakefile_source.should match(/require 'vlad'/)
end
it "should create deploy.rb" do
should_exist('config/deploy.rb')
deploy_source = File.read(project_path('config/deploy.rb'))
deploy_source.should match(/set :application, 'mysite.com'/)
end
end
end
describe "demo:content" do
before(:each) do
@config_path = project_path('config/config.yml')
FileUtils.mkdir_p(File.dirname(@config_path))
Nesta::Config.stub(:yaml_path).and_return(@config_path)
create_config_yaml('content: path/to/content')
Nesta::App.stub(:root).and_return(@project_path)
@repo_url = 'git://github.com/gma/nesta-demo-content.git'
@demo_path = project_path('content-demo')
@command = Nesta::Commands::Demo::Content.new
@command.stub(:system)
end
it "should clone the repository" do
@command.should_receive(:system).with(
'git', 'clone', @repo_url, @demo_path)
@command.execute
end
it "should configure the content directory" do
@command.execute
File.read(@config_path).should match(/^content: content-demo/)
end
describe "when repository already exists" do
before(:each) do
FileUtils.mkdir_p(@demo_path)
end
it "should update the repository" do
@command.should_receive(:system).with('git', 'pull', 'origin', 'master')
@command.execute
end
end
describe "when site versioned with git" do
before(:each) do
@exclude_path = project_path('.git/info/exclude')
FileUtils.mkdir_p(File.dirname(@exclude_path))
File.open(@exclude_path, 'w') { |file| file.puts '# Excludes' }
end
it "should tell git to ignore content-demo" do
@command.execute
File.read(@exclude_path).should match(/content-demo/)
end
describe "and content-demo already ignored" do
before(:each) do
File.open(@exclude_path, 'w') { |file| file.puts 'content-demo' }
end
it "shouldn't tell git to ignore it twice" do
@command.execute
File.read(@exclude_path).scan('content-demo').size.should == 1
end
end
end
end
describe "edit" do
before(:each) do
Nesta::Config.stub(:content_path).and_return('content')
@page_path = 'path/to/page.mdown'
@command = Nesta::Commands::Edit.new(@page_path)
@command.stub(:system)
end
it "should launch the editor" do
ENV['EDITOR'] = 'vi'
full_path = File.join('content/pages', @page_path)
@command.should_receive(:system).with(ENV['EDITOR'], full_path)
@command.execute
end
it "should not try and launch an editor if environment not setup" do
ENV.delete('EDITOR')
@command.should_not_receive(:system)
$stderr.stub(:puts)
@command.execute
end
end
describe "plugin:create" do
before(:each) do
@name = 'my-feature'
@gem_name = "nesta-plugin-#{@name}"
@plugins_path = temp_path('plugins')
@working_dir = Dir.pwd
Dir.mkdir(@plugins_path)
Dir.chdir(@plugins_path)
@command = Nesta::Commands::Plugin::Create.new(@name)
@command.stub(:system)
end
after(:each) do
Dir.chdir(@working_dir)
FileUtils.rm_r(@plugins_path)
end
it "should create a new gem prefixed with nesta-plugin" do
@command.should_receive(:system).with('bundle', 'gem', @gem_name)
begin
@command.execute
rescue Errno::ENOENT
# This test is only concerned with running bundle gem; ENOENT
# errors are raised because we didn't create a real gem.
end
end
describe "after gem created" do
def create_gem_file(*components)
path = File.join(@plugins_path, @gem_name, *components)
FileUtils.makedirs(File.dirname(path))
File.open(path, 'w') { |f| yield f if block_given? }
path
end
before(:each) do
@required_file = create_gem_file('lib', "#{@gem_name}.rb")
@init_file = create_gem_file('lib', @gem_name, 'init.rb')
@gem_spec = create_gem_file("#{@gem_name}.gemspec") do |file|
file.puts " # specify any dependencies here; for example:"
file.puts "end"
end
end
after(:each) do
FileUtils.rm(@required_file)
FileUtils.rm(@init_file)
end
it "should create the ruby file loaded on require" do
@command.execute
File.read(@required_file).should include('Plugin.register(__FILE__)')
end
it "should create a default init.rb file" do
@command.execute
init = File.read(@init_file)
boilerplate = <<-EOF
module My::Feature
module Helpers
EOF
init.should include(boilerplate)
init.should include('helpers Nesta::Plugin::My::Feature::Helpers')
end
it "should specify plugin gem's dependencies" do
@command.execute
text = File.read(@gem_spec)
text.should include('gem.add_dependency("nesta", ">= 0.9.11")')
text.should include('gem.add_development_dependency("rake")')
end
end
end
describe "theme:install" do
before(:each) do
@repo_url = 'git://github.com/gma/nesta-theme-mine.git'
@theme_dir = 'themes/mine'
FileUtils.mkdir_p(File.join(@theme_dir, '.git'))
@command = Nesta::Commands::Theme::Install.new(@repo_url)
@command.stub(:enable)
@command.stub(:system)
end
after(:each) do
FileUtils.rm_r(@theme_dir)
end
it "should clone the repository" do
@command.should_receive(:system).with(
'git', 'clone', @repo_url, @theme_dir)
@command.execute
end
it "should remove the theme's .git directory" do
@command.execute
File.exist?(@theme_dir).should be_true
File.exist?(File.join(@theme_dir, '.git')).should be_false
end
it "should enable the freshly installed theme" do
@command.should_receive(:enable)
@command.execute
end
describe "when theme URL doesn't match recommended pattern" do
before(:each) do
@repo_url = 'git://foobar.com/path/to/mytheme.git'
@other_theme_dir = 'themes/mytheme'
FileUtils.mkdir_p(File.join(@other_theme_dir, '.git'))
@command = Nesta::Commands::Theme::Install.new(@repo_url)
@command.stub(:enable)
end
after(:each) do
FileUtils.rm_r(@other_theme_dir)
end
it "should use the basename as theme dir" do
@command.should_receive(:system).with(
'git', 'clone', @repo_url, @other_theme_dir)
@command.execute
end
end
end
describe "theme:enable" do
before(:each) do
config = temp_path('config.yml')
Nesta::Config.stub(:yaml_path).and_return(config)
@name = 'mytheme'
@command = Nesta::Commands::Theme::Enable.new(@name)
end
shared_examples_for "command that configures the theme" do
it "should enable the theme" do
@command.execute
File.read(Nesta::Config.yaml_path).should match(/^theme: #{@name}/)
end
end
describe "when theme config is commented out" do
before(:each) do
create_config_yaml(' # theme: blah')
end
it_should_behave_like "command that configures the theme"
end
describe "when another theme is configured" do
before(:each) do
create_config_yaml('theme: another')
end
it_should_behave_like "command that configures the theme"
end
describe "when no theme config exists" do
before(:each) do
create_config_yaml('# I have no theme config')
end
it_should_behave_like "command that configures the theme"
end
end
describe "theme:create" do
def should_exist(file)
File.exist?(Nesta::Path.themes(@name, file)).should be_true
end
before(:each) do
Nesta::App.stub(:root).and_return(TempFileHelper::TEMP_DIR)
@name = 'my-new-theme'
Nesta::Commands::Theme::Create.new(@name).execute
end
it "should create the theme directory" do
File.directory?(Nesta::Path.themes(@name)).should be_true
end
it "should create a dummy README file" do
should_exist('README.md')
text = File.read(Nesta::Path.themes(@name, 'README.md'))
text.should match(/#{@name} is a theme/)
end
it "should create a default app.rb file" do
should_exist('app.rb')
end
it "should create public and views directories" do
should_exist("public/#{@name}")
should_exist('views')
end
it "should copy the default view templates into views" do
%w(layout.haml page.haml master.sass).each do |file|
should_exist("views/#{file}")
end
end
end
end
|
# encoding: utf-8
require "spec_helper"
describe Money::Currency do
FOO = '{ "priority": 1, "iso_code": "FOO", "iso_numeric": "840", "name": "United States Dollar", "symbol": "$", "subunit": "Cent", "subunit_to_unit": 450, "symbol_first": true, "html_entity": "$", "decimal_mark": ".", "thousands_separator": "," }'
describe ".find" do
it "returns currency matching given id" do
Money::Currency.register(JSON.parse(FOO, :symbolize_names => true))
expected = Money::Currency.new(:foo)
Money::Currency.find(:foo).should == expected
Money::Currency.find(:FOO).should == expected
Money::Currency.find("foo").should == expected
Money::Currency.find("FOO").should == expected
end
it "returns nil unless currency matching given id" do
Money::Currency.find("ZZZ").should be_nil
end
end
describe ".find_by_iso_numeric" do
it "returns currency matching given numeric code" do
Money::Currency.find_by_iso_numeric(978).should == Money::Currency.new(:eur)
Money::Currency.find_by_iso_numeric(208).should_not == Money::Currency.new(:eur)
Money::Currency.find_by_iso_numeric('840').should == Money::Currency.new(:usd)
class Mock
def to_s
'208'
end
end
Money::Currency.find_by_iso_numeric(Mock.new).should == Money::Currency.new(:dkk)
Money::Currency.find_by_iso_numeric(Mock.new).should_not == Money::Currency.new(:usd)
end
it "returns nil if no currency has the given numeric code" do
Money::Currency.find_by_iso_numeric('non iso 4217 numeric code').should == nil
Money::Currency.find_by_iso_numeric(0).should == nil
end
end
describe ".wrap" do
it "returns nil if object is nil" do
Money::Currency.wrap(nil).should == nil
Money::Currency.wrap(Money::Currency.new(:usd)).should == Money::Currency.new(:usd)
Money::Currency.wrap(:usd).should == Money::Currency.new(:usd)
end
end
describe ".all" do
it "returns an array of currencies" do
Money::Currency.all.should include Money::Currency.new(:usd)
end
it "includes registered currencies" do
Money::Currency.register(JSON.parse(FOO, :symbolize_names => true))
Money::Currency.all.should include Money::Currency.new(:foo)
end
it 'is sorted by priority' do
Money::Currency.register(JSON.parse(FOO, :symbolize_names => true))
Money::Currency.all.first.should == Money::Currency.new(:foo)
end
end
describe "#initialize" do
it "lookups data from loaded config" do
currency = Money::Currency.new("USD")
currency.id.should == :usd
currency.priority.should == 1
currency.iso_code.should == "USD"
currency.iso_numeric.should == "840"
currency.name.should == "United States Dollar"
currency.decimal_mark.should == "."
currency.separator.should == "."
currency.thousands_separator.should == ","
currency.delimiter.should == ","
end
it "raises UnknownMoney::Currency with unknown currency" do
expect { Money::Currency.new("xxx") }.to raise_error(Money::Currency::UnknownCurrency, /xxx/)
end
end
describe "#<=>" do
it "compares objects by priority" do
Money::Currency.new(:cad).should > Money::Currency.new(:usd)
Money::Currency.new(:usd).should < Money::Currency.new(:eur)
end
end
describe "#==" do
it "returns true if self === other" do
currency = Money::Currency.new(:eur)
currency.should == currency
end
it "returns true if the id is equal" do
Money::Currency.new(:eur).should == Money::Currency.new(:eur)
Money::Currency.new(:eur).should_not == Money::Currency.new(:usd)
end
end
describe "#eql?" do
it "returns true if #== returns true" do
Money::Currency.new(:eur).eql?(Money::Currency.new(:eur)).should be true
Money::Currency.new(:eur).eql?(Money::Currency.new(:usd)).should be false
end
end
describe "#hash" do
it "returns the same value for equal objects" do
Money::Currency.new(:eur).hash.should == Money::Currency.new(:eur).hash
Money::Currency.new(:eur).hash.should_not == Money::Currency.new(:usd).hash
end
it "can be used to return the intersection of Currency object arrays" do
intersection = [Money::Currency.new(:eur), Money::Currency.new(:usd)] & [Money::Currency.new(:eur)]
intersection.should == [Money::Currency.new(:eur)]
end
end
describe "#inspect" do
it "works as documented" do
Money::Currency.new(:usd).inspect.should ==
%Q{#<Money::Currency id: usd, priority: 1, symbol_first: true, thousands_separator: ,, html_entity: $, decimal_mark: ., name: United States Dollar, symbol: $, subunit_to_unit: 100, exponent: 2.0, iso_code: USD, iso_numeric: 840, subunit: Cent>}
end
end
describe "#to_s" do
it "works as documented" do
Money::Currency.new(:usd).to_s.should == "USD"
Money::Currency.new(:eur).to_s.should == "EUR"
end
end
describe "#to_currency" do
it "works as documented" do
usd = Money::Currency.new(:usd)
usd.to_currency.should == usd
end
it "doesn't create new symbols indefinitely" do
expect { Money::Currency.new("bogus") }.to raise_exception(Money::Currency::UnknownCurrency)
Symbol.all_symbols.map{|s| s.to_s}.should_not include("bogus")
end
end
describe "#code" do
it "works as documented" do
Money::Currency.new(:usd).code.should == "$"
Money::Currency.new(:azn).code.should == "AZN"
end
end
describe "#exponent" do
it "conforms to iso 4217" do
Money::Currency.new(:jpy).exponent == 0
Money::Currency.new(:usd).exponent == 2
Money::Currency.new(:iqd).exponent == 3
end
end
describe "#decimal_places" do
it "proper places for known currency" do
Money::Currency.new(:mro).decimal_places == 1
Money::Currency.new(:usd).decimal_places == 2
end
it "proper places for custom currency" do
Money::Currency.register(JSON.parse(FOO, :symbolize_names => true))
Money::Currency.new(:foo).decimal_places == 3
end
end
end
test priority value rather than a specific currency which might be a different currency with the same priority. oops ;)
# encoding: utf-8
require "spec_helper"
describe Money::Currency do
FOO = '{ "priority": 1, "iso_code": "FOO", "iso_numeric": "840", "name": "United States Dollar", "symbol": "$", "subunit": "Cent", "subunit_to_unit": 450, "symbol_first": true, "html_entity": "$", "decimal_mark": ".", "thousands_separator": "," }'
describe ".find" do
it "returns currency matching given id" do
Money::Currency.register(JSON.parse(FOO, :symbolize_names => true))
expected = Money::Currency.new(:foo)
Money::Currency.find(:foo).should == expected
Money::Currency.find(:FOO).should == expected
Money::Currency.find("foo").should == expected
Money::Currency.find("FOO").should == expected
end
it "returns nil unless currency matching given id" do
Money::Currency.find("ZZZ").should be_nil
end
end
describe ".find_by_iso_numeric" do
it "returns currency matching given numeric code" do
Money::Currency.find_by_iso_numeric(978).should == Money::Currency.new(:eur)
Money::Currency.find_by_iso_numeric(208).should_not == Money::Currency.new(:eur)
Money::Currency.find_by_iso_numeric('840').should == Money::Currency.new(:usd)
class Mock
def to_s
'208'
end
end
Money::Currency.find_by_iso_numeric(Mock.new).should == Money::Currency.new(:dkk)
Money::Currency.find_by_iso_numeric(Mock.new).should_not == Money::Currency.new(:usd)
end
it "returns nil if no currency has the given numeric code" do
Money::Currency.find_by_iso_numeric('non iso 4217 numeric code').should == nil
Money::Currency.find_by_iso_numeric(0).should == nil
end
end
describe ".wrap" do
it "returns nil if object is nil" do
Money::Currency.wrap(nil).should == nil
Money::Currency.wrap(Money::Currency.new(:usd)).should == Money::Currency.new(:usd)
Money::Currency.wrap(:usd).should == Money::Currency.new(:usd)
end
end
describe ".all" do
it "returns an array of currencies" do
Money::Currency.all.should include Money::Currency.new(:usd)
end
it "includes registered currencies" do
Money::Currency.register(JSON.parse(FOO, :symbolize_names => true))
Money::Currency.all.should include Money::Currency.new(:foo)
end
it 'is sorted by priority' do
Money::Currency.all.first.priority.should == 1
end
end
describe "#initialize" do
it "lookups data from loaded config" do
currency = Money::Currency.new("USD")
currency.id.should == :usd
currency.priority.should == 1
currency.iso_code.should == "USD"
currency.iso_numeric.should == "840"
currency.name.should == "United States Dollar"
currency.decimal_mark.should == "."
currency.separator.should == "."
currency.thousands_separator.should == ","
currency.delimiter.should == ","
end
it "raises UnknownMoney::Currency with unknown currency" do
expect { Money::Currency.new("xxx") }.to raise_error(Money::Currency::UnknownCurrency, /xxx/)
end
end
describe "#<=>" do
it "compares objects by priority" do
Money::Currency.new(:cad).should > Money::Currency.new(:usd)
Money::Currency.new(:usd).should < Money::Currency.new(:eur)
end
end
describe "#==" do
it "returns true if self === other" do
currency = Money::Currency.new(:eur)
currency.should == currency
end
it "returns true if the id is equal" do
Money::Currency.new(:eur).should == Money::Currency.new(:eur)
Money::Currency.new(:eur).should_not == Money::Currency.new(:usd)
end
end
describe "#eql?" do
it "returns true if #== returns true" do
Money::Currency.new(:eur).eql?(Money::Currency.new(:eur)).should be true
Money::Currency.new(:eur).eql?(Money::Currency.new(:usd)).should be false
end
end
describe "#hash" do
it "returns the same value for equal objects" do
Money::Currency.new(:eur).hash.should == Money::Currency.new(:eur).hash
Money::Currency.new(:eur).hash.should_not == Money::Currency.new(:usd).hash
end
it "can be used to return the intersection of Currency object arrays" do
intersection = [Money::Currency.new(:eur), Money::Currency.new(:usd)] & [Money::Currency.new(:eur)]
intersection.should == [Money::Currency.new(:eur)]
end
end
describe "#inspect" do
it "works as documented" do
Money::Currency.new(:usd).inspect.should ==
%Q{#<Money::Currency id: usd, priority: 1, symbol_first: true, thousands_separator: ,, html_entity: $, decimal_mark: ., name: United States Dollar, symbol: $, subunit_to_unit: 100, exponent: 2.0, iso_code: USD, iso_numeric: 840, subunit: Cent>}
end
end
describe "#to_s" do
it "works as documented" do
Money::Currency.new(:usd).to_s.should == "USD"
Money::Currency.new(:eur).to_s.should == "EUR"
end
end
describe "#to_currency" do
it "works as documented" do
usd = Money::Currency.new(:usd)
usd.to_currency.should == usd
end
it "doesn't create new symbols indefinitely" do
expect { Money::Currency.new("bogus") }.to raise_exception(Money::Currency::UnknownCurrency)
Symbol.all_symbols.map{|s| s.to_s}.should_not include("bogus")
end
end
describe "#code" do
it "works as documented" do
Money::Currency.new(:usd).code.should == "$"
Money::Currency.new(:azn).code.should == "AZN"
end
end
describe "#exponent" do
it "conforms to iso 4217" do
Money::Currency.new(:jpy).exponent == 0
Money::Currency.new(:usd).exponent == 2
Money::Currency.new(:iqd).exponent == 3
end
end
describe "#decimal_places" do
it "proper places for known currency" do
Money::Currency.new(:mro).decimal_places == 1
Money::Currency.new(:usd).decimal_places == 2
end
it "proper places for custom currency" do
Money::Currency.register(JSON.parse(FOO, :symbolize_names => true))
Money::Currency.new(:foo).decimal_places == 3
end
end
end
|
# frozen_string_literal: true
RSpec.describe Graphiti::Debugger do
context 'when disabled' do
around do |example|
old_value = described_class.enabled
described_class.enabled = false
example.run
described_class.enabled = old_value
end
describe '#on_render' do
it 'does not add data to chunks Array' do
expect { described_class.on_render('foo', 0, 100, :foo, {}) }.not_to change(described_class.chunks, :count)
end
end
describe '#on_data' do
let(:payload) do
{
resource: :foo,
parent: nil,
params: {},
results: []
}
end
it 'does not add data to chunks Array' do
expect { described_class.on_data('test', 0, 100, :foo, payload) }.not_to change(described_class.chunks, :count)
end
end
end
end
Fix rubocop violations
# frozen_string_literal: true
RSpec.describe Graphiti::Debugger do
context "when disabled" do
around do |example|
old_value = described_class.enabled
described_class.enabled = false
example.run
described_class.enabled = old_value
end
describe "#on_render" do
it "does not add data to chunks Array" do
expect { described_class.on_render("foo", 0, 100, :foo, {}) }.not_to change(described_class.chunks, :count)
end
end
describe "#on_data" do
let(:payload) do
{
resource: :foo,
parent: nil,
params: {},
results: []
}
end
it "does not add data to chunks Array" do
expect { described_class.on_data("test", 0, 100, :foo, payload) }.not_to change(described_class.chunks, :count)
end
end
end
end
|
describe Neo4j::ActiveNode do
before(:each) do
delete_schema
delete_db
create_index :StoredFile, :type, type: :exact
create_index :StoredFile, :size, type: :exact
create_index :StoredFile, :flag, type: :exact
stub_active_node_class('StoredFile') do
enum type: [:unknown, :image, :video], _default: :unknown
enum size: {big: 100, medium: 7, small: 2}, _prefix: :dimension
enum flag: [:clean, :dangerous], _suffix: true
enum type_format: [:Mpeg, :Png], _case_sensitive: true, _index: false
has_one :in, :uploader, rel_class: :UploaderRel
end
stub_active_node_class('User') do
has_many :out, :files, rel_class: :UploaderRel
end
stub_active_rel_class('UploaderRel') do
from_class :User
to_class :StoredFile
type 'uploaded'
enum origin: [:disk, :web]
end
end
describe 'ClassMethods' do
it 'lists all types and sizes' do
expect(StoredFile.types).to eq(unknown: 0, image: 1, video: 2)
expect(StoredFile.sizes).to eq(big: 100, medium: 7, small: 2)
expect(StoredFile.flags).to eq(clean: 0, dangerous: 1)
expect(StoredFile.type_formats).to eq(Mpeg: 0, Png: 1)
expect { StoredFile.enum something: [:value1, :Value1] }.to raise_error(ArgumentError)
expect(UploaderRel.origins).to eq(disk: 0, web: 1)
end
it 'respects _index = false option' do
expect { StoredFile.as(:f).pluck('f.type_format') }.to_not raise_error
end
end
describe 'getters and setters' do
it 'returns nil by default' do
file = StoredFile.new
expect(file.flag).to be_nil
end
it 'returns the default value' do
file = StoredFile.new
expect(file.type).to eq(:unknown)
end
it 'assigns using types' do
file = StoredFile.new
file.type = :video
expect(file.type).to eq(:video)
end
it 'gets serialized correctly as integer' do
file = StoredFile.new
file.type = :video
file.save!
expect(StoredFile.as(:f).pluck('f.type')).to eq([2])
expect(file.reload.type).to eq(:video)
end
it 'accepts nil as value' do
file = StoredFile.new
file.flag = nil
file.save!
expect(StoredFile.as(:f).where(id: file.id).pluck('f.flag')).to eq([nil])
expect(file.reload.flag).to eq(nil)
end
it 'respects local _case_sensitive option' do
file = StoredFile.new
file.type_format = :png
file.save!
expect(StoredFile.as(:f).pluck('f.type_format')).to eq([0])
expect(file.reload.type_format).to eq(:Mpeg)
file.type_format = :Png
file.save!
expect(StoredFile.as(:f).pluck('f.type_format')).to eq([1])
expect(file.reload.type_format).to eq(:Png)
end
it 'respects global _case_sensitive = false default' do
file = StoredFile.new
file.type = :VIdeO
file.save!
expect(StoredFile.as(:f).pluck('f.type')).to eq([2])
expect(file.reload.type).to eq(:video)
end
end
describe 'scopes' do
it 'finds elements by enum key' do
file1 = StoredFile.create!(type: :unknown)
file2 = StoredFile.create!(type: :video)
ids = StoredFile.video.map(&:id)
expect(ids).not_to include(file1.id)
expect(ids).to include(file2.id)
end
end
describe '#where' do
it '(type: :video) finds elements by enum key' do
file1 = StoredFile.create!(type: :unknown)
file2 = StoredFile.create!(type: :video)
ids = StoredFile.where(type: :video).pluck(:id)
expect(ids).not_to include(file1.id)
expect(ids).to include(file2.id)
end
it '(type: [:unknown, :video]) finds elements matching the provided enum keys' do
file1 = StoredFile.create!(type: :unknown)
file2 = StoredFile.create!(type: :video)
file3 = StoredFile.create!(type: :image)
ids = StoredFile.where(type: [:unknown, :video]).pluck(:id)
expect(ids).to include(file1.id)
expect(ids).to include(file2.id)
expect(ids).to_not include(file3.id)
end
end
describe '#rel_where' do
it 'finds relations matching given enum key' do
user = User.create!
file = StoredFile.create!
file2 = StoredFile.create!
UploaderRel.create!(from_node: user, to_node: file, origin: :web)
UploaderRel.create!(from_node: user, to_node: file2, origin: :disk)
expect(user.files(:f).rel_where(origin: :web).pluck(:id)).to contain_exactly(file.id)
end
end
describe '? methods' do
it 'returns false when accessing to a nil value' do
file = StoredFile.new
expect(file).not_to be_clean_flag
expect(file).not_to be_dangerous_flag
end
it 'returns true when the enum is in the current state' do
file = StoredFile.new
file.type = :video
expect(file).to be_video
end
it 'returns false when the enum is in the current state' do
file = StoredFile.new
file.type = :image
expect(file).not_to be_video
end
it 'returns true when the enum is in the current state (with prefix)' do
file = StoredFile.new
file.size = :big
expect(file).to be_dimension_big
end
it 'returns true when the enum is in the current state (with prefix)' do
file = StoredFile.new
file.size = :small
expect(file).not_to be_dimension_big
end
it 'returns true when the enum is in the current state (with prefix)' do
file = StoredFile.new
file.flag = :dangerous
expect(file).to be_dangerous_flag
end
it 'returns false when the enum is not in the current state (with prefix)' do
file = StoredFile.new
file.flag = :dangerous
expect(file).not_to be_clean_flag
end
end
describe '! methods' do
it 'sets to a state' do
file = StoredFile.new
file.video!
expect(file.type).to eq(:video)
end
it 'sets to a state (with prefix)' do
file = StoredFile.new
file.dimension_big!
expect(file.size).to eq(:big)
end
it 'sets to a state (with suffix)' do
file = StoredFile.new
file.dangerous_flag!
expect(file.flag).to eq(:dangerous)
end
end
describe 'conflicts' do
it 'raises an error when two enums are conflicting' do
create_index :ConflictingModel, :enum1, type: :exact
create_index :ConflictingModel, :enum2, type: :exact
expect do
stub_active_node_class('ConflictingModel') do
enum enum1: [:a, :b, :c]
enum enum2: [:c, :d]
end
end.to raise_error(Neo4j::Shared::Enum::ConflictingEnumMethodError)
end
end
context 'when using `ActionController::Parameters`' do
let(:params) { action_controller_params('type' => 'image').permit! }
it 'assigns enums correctly when instancing a new class' do
using_action_controller do
file = StoredFile.new(params)
expect(file.type).to eq('image')
end
end
it 'assigns enums correctly when assigning to `attributes`' do
using_action_controller do
file = StoredFile.new
file.attributes = params
expect(file.type).to eq('image')
end
end
end
describe 'required index behavior' do
before do
create_index(:Incomplete, :foo, type: :exact)
stub_active_node_class('Incomplete') do
enum foo: [:a, :b]
enum bar: [:c, :d]
end
end
it_behaves_like 'raises schema error not including', :index, :Incomplete, :foo
it_behaves_like 'raises schema error including', :index, :Incomplete, :bar
context 'second enum index created' do
before { create_index(:Incomplete, :bar, type: :exact) }
it_behaves_like 'does not raise schema error', :Incomplete
it_behaves_like 'does not log schema option warning', :index, :Incomplete
end
end
end
Add: enum value & key validation specs
describe Neo4j::ActiveNode do
before(:each) do
delete_schema
delete_db
create_index :StoredFile, :type, type: :exact
create_index :StoredFile, :size, type: :exact
create_index :StoredFile, :flag, type: :exact
stub_active_node_class('StoredFile') do
enum type: [:unknown, :image, :video], _default: :unknown
enum size: {big: 100, medium: 7, small: 2}, _prefix: :dimension
enum flag: [:clean, :dangerous], _suffix: true
enum type_format: [:Mpeg, :Png], _case_sensitive: true, _index: false
has_one :in, :uploader, rel_class: :UploaderRel
end
stub_active_node_class('User') do
has_many :out, :files, rel_class: :UploaderRel
end
stub_active_rel_class('UploaderRel') do
from_class :User
to_class :StoredFile
type 'uploaded'
enum origin: [:disk, :web]
end
end
describe 'ClassMethods' do
it 'lists all types and sizes' do
expect(StoredFile.types).to eq(unknown: 0, image: 1, video: 2)
expect(StoredFile.sizes).to eq(big: 100, medium: 7, small: 2)
expect(StoredFile.flags).to eq(clean: 0, dangerous: 1)
expect(StoredFile.type_formats).to eq(Mpeg: 0, Png: 1)
expect(UploaderRel.origins).to eq(disk: 0, web: 1)
end
it 'respects _index = false option' do
expect { StoredFile.as(:f).pluck('f.type_format') }.to_not raise_error
end
it 'raises error if keys are invalid' do
expect { StoredFile.enum something: [:value1, :Value1] }.to raise_error(ArgumentError)
end
it "raises error if _default option doesn't match key" do
expect { StoredFile.enum something: [:value1, :value2], _default: :value3 }.to raise_error(ArgumentError)
end
end
describe 'getters and setters' do
it 'returns nil by default' do
file = StoredFile.new
expect(file.flag).to be_nil
end
it 'returns the default value' do
file = StoredFile.new
expect(file.type).to eq(:unknown)
end
it 'assigns using types' do
file = StoredFile.new
file.type = :video
expect(file.type).to eq(:video)
end
it 'gets serialized correctly as integer' do
file = StoredFile.new
file.type = :video
file.save!
expect(StoredFile.as(:f).pluck('f.type')).to eq([2])
expect(file.reload.type).to eq(:video)
end
it 'accepts nil as value' do
file = StoredFile.new
file.flag = nil
file.save!
expect(StoredFile.as(:f).where(id: file.id).pluck('f.flag')).to eq([nil])
expect(file.reload.flag).to eq(nil)
end
it "raises error if value doesn't match an enum key" do
file = StoredFile.new
file.type = :audio
expect { file.save! }.to raise_error Neo4j::Shared::Enum::InvalidEnumValueError
end
it 'respects local _case_sensitive option' do
file = StoredFile.new
file.type_format = :png
expect { file.save! }.to raise_error(Neo4j::Shared::Enum::InvalidEnumValueError)
file.type_format = :Png
file.save!
expect(StoredFile.as(:f).pluck('f.type_format')).to eq([1])
expect(file.reload.type_format).to eq(:Png)
end
it 'respects global _case_sensitive = false default' do
file = StoredFile.new
file.type = :VIdeO
file.save!
expect(StoredFile.as(:f).pluck('f.type')).to eq([2])
expect(file.reload.type).to eq(:video)
end
end
describe 'scopes' do
it 'finds elements by enum key' do
file1 = StoredFile.create!(type: :unknown)
file2 = StoredFile.create!(type: :video)
ids = StoredFile.video.map(&:id)
expect(ids).not_to include(file1.id)
expect(ids).to include(file2.id)
end
end
describe '#where' do
it '(type: :video) finds elements by enum key' do
file1 = StoredFile.create!(type: :unknown)
file2 = StoredFile.create!(type: :video)
ids = StoredFile.where(type: :video).pluck(:id)
expect(ids).not_to include(file1.id)
expect(ids).to include(file2.id)
end
it '(type: [:unknown, :video]) finds elements matching the provided enum keys' do
file1 = StoredFile.create!(type: :unknown)
file2 = StoredFile.create!(type: :video)
file3 = StoredFile.create!(type: :image)
ids = StoredFile.where(type: [:unknown, :video]).pluck(:id)
expect(ids).to include(file1.id)
expect(ids).to include(file2.id)
expect(ids).to_not include(file3.id)
end
end
describe '#rel_where' do
it 'finds relations matching given enum key' do
user = User.create!
file = StoredFile.create!
file2 = StoredFile.create!
UploaderRel.create!(from_node: user, to_node: file, origin: :web)
UploaderRel.create!(from_node: user, to_node: file2, origin: :disk)
expect(user.files(:f).rel_where(origin: :web).pluck(:id)).to contain_exactly(file.id)
end
end
describe '? methods' do
it 'returns false when accessing to a nil value' do
file = StoredFile.new
expect(file).not_to be_clean_flag
expect(file).not_to be_dangerous_flag
end
it 'returns true when the enum is in the current state' do
file = StoredFile.new
file.type = :video
expect(file).to be_video
end
it 'returns false when the enum is in the current state' do
file = StoredFile.new
file.type = :image
expect(file).not_to be_video
end
it 'returns true when the enum is in the current state (with prefix)' do
file = StoredFile.new
file.size = :big
expect(file).to be_dimension_big
end
it 'returns true when the enum is in the current state (with prefix)' do
file = StoredFile.new
file.size = :small
expect(file).not_to be_dimension_big
end
it 'returns true when the enum is in the current state (with prefix)' do
file = StoredFile.new
file.flag = :dangerous
expect(file).to be_dangerous_flag
end
it 'returns false when the enum is not in the current state (with prefix)' do
file = StoredFile.new
file.flag = :dangerous
expect(file).not_to be_clean_flag
end
end
describe '! methods' do
it 'sets to a state' do
file = StoredFile.new
file.video!
expect(file.type).to eq(:video)
end
it 'sets to a state (with prefix)' do
file = StoredFile.new
file.dimension_big!
expect(file.size).to eq(:big)
end
it 'sets to a state (with suffix)' do
file = StoredFile.new
file.dangerous_flag!
expect(file.flag).to eq(:dangerous)
end
end
describe 'conflicts' do
it 'raises an error when two enums are conflicting' do
create_index :ConflictingModel, :enum1, type: :exact
create_index :ConflictingModel, :enum2, type: :exact
expect do
stub_active_node_class('ConflictingModel') do
enum enum1: [:a, :b, :c]
enum enum2: [:c, :d]
end
end.to raise_error(Neo4j::Shared::Enum::ConflictingEnumMethodError)
end
end
context 'when using `ActionController::Parameters`' do
let(:params) { action_controller_params('type' => 'image').permit! }
it 'assigns enums correctly when instancing a new class' do
using_action_controller do
file = StoredFile.new(params)
expect(file.type).to eq('image')
end
end
it 'assigns enums correctly when assigning to `attributes`' do
using_action_controller do
file = StoredFile.new
file.attributes = params
expect(file.type).to eq('image')
end
end
end
describe 'required index behavior' do
before do
create_index(:Incomplete, :foo, type: :exact)
stub_active_node_class('Incomplete') do
enum foo: [:a, :b]
enum bar: [:c, :d]
end
end
it_behaves_like 'raises schema error not including', :index, :Incomplete, :foo
it_behaves_like 'raises schema error including', :index, :Incomplete, :bar
context 'second enum index created' do
before { create_index(:Incomplete, :bar, type: :exact) }
it_behaves_like 'does not raise schema error', :Incomplete
it_behaves_like 'does not log schema option warning', :index, :Incomplete
end
end
end
|
# Numbers to Commas Solo Challenge
# I spent [] hours on this challenge.
# Complete each step below according to the challenge directions and
# include it in this file. Also make sure everything that isn't code
# is commented in the file.
# 0. Pseudocode
# What is the input? --> The Input is an integer, but will be taken as a string. It's going to look funny if you put in words.
# What is the output? (i.e. What should the code return?) --> it should return a number with properly formatted commas, aka 1,000 for 1000
# What are the steps needed to solve the problem?
=begin
Define a Method that Takes an Integer- main Method
create an empty container
put the input into the container as seperate digits
count the digits in the containter
Do integer division of the number of digits and 3, to find out how many commas are needed. Assign to variable x
Take the modulus of the number of digits and 3, to find out how many digits before first comma. assign to variable y
IF the modulus returned 0 and the number of digits isn't 0
print print the first three numbers in the container. then print a comma and next three numbers in containter, repeated by one less than the number of digits divided by three.
ELSEIF the modulus returned 1
Print the first number in the containter.print a comma and next three numbers in containter, repeated by the number of digits divided by three.
ELSEIF
Print the first two numbers in the container. print a comma and next three numbers in containter, repeated by the number of digits divided by three.
ELSE
return a statement that there was no input
END IF
=end
# 1. Initial Solution
def seperate_comma(integer)
digits = array.new
digit_count = integer.length
counter = 0
if counter < digit_count do
digits << integer[counter]
counter += 1
end
x = digit_count / 3
y = digit_count % 3
IF y = 0 && digit_count != 0
p
print print the first three numbers in the container. then print a comma and next three numbers in containter, repeated by one less than the number of digits divided by three.
ELSEIF the modulus returned 1
Print the first number in the containter.print a comma and next three numbers in containter, repeated by the number of digits divided by three.
ELSEIF
Print the first two numbers in the container. print a comma and next three numbers in containter, repeated by the number of digits divided by three.
ELSE
return a statement that there was no input
END IF
# 2. Refactored Solution
#notes to self from chap 10 WGR: check out [.each_char, .join, .split]
# 3. Reflection
Complete solo challenge 5
# Numbers to Commas Solo Challenge
# I spent [] hours on this challenge.
# Complete each step below according to the challenge directions and
# include it in this file. Also make sure everything that isn't code
# is commented in the file.
# 0. Pseudocode
# What is the input? --> The Input is an integer, but will be taken as a string. It's going to look funny if you put in words.
# What is the output? (i.e. What should the code return?) --> it should return a number with properly formatted commas, aka 1,000 for 1000
# What are the steps needed to solve the problem?
=begin
Define a Method that Takes an Integer- main Method
create an empty container
put the input into the container as seperate digits
count the digits in the containter
Do integer division of the number of digits and 3, to find out how many commas are needed. Assign to variable x
Take the modulus of the number of digits and 3, to find out how many digits before first comma. assign to variable y
IF the modulus returned 0 and the number of digits isn't 0
print print the first three numbers in the container. then print a comma and next three numbers in containter, repeated by one less than the number of digits divided by three.
ELSEIF the modulus returned 1
Print the first number in the containter.print a comma and next three numbers in containter, repeated by the number of digits divided by three.
ELSEIF
Print the first two numbers in the container. print a comma and next three numbers in containter, repeated by the number of digits divided by three.
ELSE
return a statement that there was no input
END IF
=end
# 1. Initial Solution
def separate_comma(integer)
digits = Array.new
string_maker = Array.new
s_integer = integer.to_s
digits += s_integer.split ("")
num_fix = digits.length
x = num_fix / 3
y = num_fix % 3
if num_fix == 0
puts "There is no number!"
elsif num_fix <= 3
string_maker << digits
elsif y == 0
string_maker << digits[0] + digits[1] + digits[2]
counter = 1
(x-1).times do
string_maker << ","
string_maker << digits[0 +(3 * counter)]
string_maker << digits[1 +(3 * counter)]
string_maker << digits[2 +(3 * counter)]
counter += 1
end
elsif y == 1
string_maker << digits[0]
counter = 0
(x).times do
string_maker << ","
string_maker << digits[1 +(3 * counter)]
string_maker << digits[2 +(3 * counter)]
string_maker << digits[3 +(3 * counter)]
counter += 1
end
elsif y == 2
string_maker << digits[0] + digits[1]
counter = 0
x.times do
string_maker << ","
string_maker << digits[2 +(3 * counter)]
string_maker << digits[3 +(3 * counter)]
string_maker << digits[4 +(3 * counter)]
counter += 1
end
end
p string_maker.join
end
# 2. Refactored Solution
#notes to self from chap 10 WGR: check out [.each_char, .join, .split]
# 3. Reflection |
Updating plunit url
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.