CombinedText stringlengths 4 3.42M |
|---|
Pod::Spec.new do |s|
s.name = "PhyreKit"
s.version = "0.1.4"
s.summary = "Common utilities for Phyre Inc."
s.homepage = "http://rallyapp.io"
s.license = 'MIT'
s.author = { "Matt Ricketson" => "matt@phyreup.com" }
s.source = { :git => "git@bitbucket.org:phyre/phyrekit.git", :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/phyreup'
s.platform = :ios, '7.0'
s.requires_arc = true
s.source_files = 'Classes/*.{h,m}'
s.subspec 'Foundation' do |ss|
ss.source_files = 'Classes/Foundation/*.{h,m}'
ss.requires_arc = true
end
s.subspec 'UIKit' do |ss|
ss.source_files = 'Classes/UIKit/*.{h,m}'
ss.requires_arc = true
ss.dependency 'PhyreKit/Foundation'
end
s.subspec 'CoreLocation' do |ss|
ss.source_files = 'Classes/CoreLocation/*.{h,m}'
ss.requires_arc = true
ss.frameworks = 'CoreLocation'
end
s.subspec 'Vendor' do |ss|
ss.source_files = 'Classes/Vendor/*.{h,m}'
ss.requires_arc = true
end
end
Bumps podspec version to 0.2.0
Pod::Spec.new do |s|
s.name = "PhyreKit"
s.version = "0.2.0"
s.summary = "Common utilities for Phyre Inc."
s.homepage = "http://rallyapp.io"
s.license = 'MIT'
s.author = { "Matt Ricketson" => "matt@phyreup.com" }
s.source = { :git => "git@bitbucket.org:phyre/phyrekit.git", :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/phyreup'
s.platform = :ios, '7.0'
s.requires_arc = true
s.source_files = 'Classes/*.{h,m}'
s.subspec 'Foundation' do |ss|
ss.source_files = 'Classes/Foundation/*.{h,m}'
ss.requires_arc = true
end
s.subspec 'UIKit' do |ss|
ss.source_files = 'Classes/UIKit/*.{h,m}'
ss.requires_arc = true
ss.dependency 'PhyreKit/Foundation'
end
s.subspec 'CoreLocation' do |ss|
ss.source_files = 'Classes/CoreLocation/*.{h,m}'
ss.requires_arc = true
ss.frameworks = 'CoreLocation'
end
s.subspec 'Vendor' do |ss|
ss.source_files = 'Classes/Vendor/*.{h,m}'
ss.requires_arc = true
end
end
|
require "order_approval/version"
module OrderApproval
class OrderApprovalFeatureDefinition
include FeatureSystem::Provides
def permissions
[
{
can: true,
callback_name: 'can_manage_order_holds',
name: 'Can Manage Order Approvals'
},
{
can: true,
callback_name: 'can_approve_sub_order_holds',
name: 'Can Manage Order Approvals in Sub Roles'
}
]
end
end
module Authorization
module Permissions
## TODO - Not able to get this working
#def can_manage_order_holds
# can :manage, OrderHold
#end
#def can_approve_sub_order_holds
# UserEditContext.call(@user, @site)
# sub_claim_ids = @user.full_claims.map do |claim|
# claim.descendants
# end.flatten.map(&:id)
# can :view, OrderHold, claim_id: sub_claim_ids
# can :read, OrderHold, claim_id: sub_claim_ids
# can :manage, OrderHold, claim_id: sub_claim_ids
# can :approve_sub_orders, OrderHold, claim_id: sub_claim_ids
#end
end
end
end
require 'order_approval/railtie' if defined?(Rails)
add permission methods back
require "order_approval/version"
module OrderApproval
class OrderApprovalFeatureDefinition
include FeatureSystem::Provides
def permissions
[
{
can: true,
callback_name: 'can_manage_order_holds',
name: 'Can Manage Order Approvals'
},
{
can: true,
callback_name: 'can_approve_sub_order_holds',
name: 'Can Manage Order Approvals in Sub Roles'
}
]
end
end
module Authorization
module Permissions
def can_manage_order_holds
can :manage, OrderHold
end
def can_approve_sub_order_holds
UserEditContext.call(@user, @site)
sub_claim_ids = @user.full_claims.map do |claim|
claim.descendants
end.flatten.map(&:id)
can :view, OrderHold, claim_id: sub_claim_ids
can :read, OrderHold, claim_id: sub_claim_ids
can :manage, OrderHold, claim_id: sub_claim_ids
can :approve_sub_orders, OrderHold, claim_id: sub_claim_ids
end
end
end
end
require 'order_approval/railtie' if defined?(Rails)
|
module Osrcry
class License
def self.execute
unix_times = `git log --pretty=format:"%at"`.split("\n")
first_year, last_year = Time.at(unix_times.first.to_i).utc.year, Time.at(unix_times.last.to_i).utc.year
year_range = if first_year == last_year
first_year.to_s
else
"#{first_year}-#{last_year}"
end
license = <<-LICENSE
The MIT License (MIT)
Copyright (c) #{year_range} [CONTRIBUTORS.md](https://github.com/geemus/oscrcy/blob/master/CONTRIBUTORS.md)
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
LICENSE
File.open('LICENSE.md', 'w') do |file|
file.write(license)
end
end
end
end
fix order of years in license
module Osrcry
class License
def self.execute
unix_times = `git log --pretty=format:"%at"`.split("\n")
latest_year, earliest_year = Time.at(unix_times.first.to_i).utc.year, Time.at(unix_times.last.to_i).utc.year
year_range = if latest_year == earliest_year
latest_year.to_s
else
"#{earliest_year}-#{latest_year}"
end
license = <<-LICENSE
The MIT License (MIT)
Copyright (c) #{year_range} [CONTRIBUTORS.md](https://github.com/geemus/oscrcy/blob/master/CONTRIBUTORS.md)
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
LICENSE
File.open('LICENSE.md', 'w') do |file|
file.write(license)
end
end
end
end
|
require 'directed_edge'
require 'active_support'
require 'active_record'
require 'will_paginate'
module DirectedEdge
module Edgy
class << self
attr_accessor :database, :models
end
def self.configure(&block)
config = Configuration.instance
block.call(config)
raise "user= and password= must be set in config block" unless config.user && config.password
Edgy.database = DirectedEdge::Database.new(config.user, config.password)
end
def self.included(base)
base.send :include, Utilities
base.send :extend, ClassMethods
base.send :alias_method, :pre_edgy_save, :save
base.alias_method_chain :save, :edgy
base.allow_concurrency = true
end
def self.export
throw "No acts_as_edgy models in use." if @models.blank?
throw "Database not set." unless Edgy.database
file = "#{Rails.root}/tmp/edgy_export.xml"
exporter = DirectedEdge::Exporter.new(file)
@models.each { |m| m.edgy_export(exporter) }
exporter.finish
self.clear
Edgy.database.import(file)
end
def self.clear
empty = "#{Rails.root}/tmp/edgy_empty.xml"
DirectedEdge::Exporter.new(empty).finish unless File.exists? empty
Edgy.database.import(empty)
end
def save_with_edgy(*args)
save_without_edgy(*args)
end
def edgy_related(options = {})
Future.new do
item_type = self.class.name.underscore
tags = options.delete(:tags) || Set.new([ item_type ])
item = DirectedEdge::Item.new(Edgy.database, "#{item_type}_#{id}")
edgy_records(item.related(tags, options))
end
end
def edgy_recommended(options = {})
Future.new do
item_type = self.class.name.underscore
tags = options.delete(:tags)
unless tags
tags = Set.new
self.class.edgy_routes.each { |name, c| tags.add(c.to_class.name.underscore) }
end
item = DirectedEdge::Item.new(Edgy.database, "#{item_type}_#{id}")
edgy_records(item.recommended(tags, options))
end
end
private
def edgy_records(ids)
return [] if ids.empty?
same_names = true
first_name = edgy_name(ids.first)
record_ids = ids.map { |i| same_names = false if edgy_name(i) != first_name ; edgy_id(i) }
if same_names
first_name.classify.constantize.find(record_ids)
else
ids.map { |i| edgy_record(i) }
end
end
def edgy_record(item_id)
edgy_name(item_id).classify.constantize.find(edgy_id(item_id))
end
class Configuration
include Singleton
attr_accessor :user, :password
end
module Utilities
private
def edgy_name(item_id)
item_id.sub(/_.*/, '')
end
def edgy_id(item_id)
item_id.sub(/.*_/, '')
end
end
module ClassMethods
include Utilities
attr_reader :edgy_routes
def acts_as_edgy(name, *bridges)
@edgy_routes ||= {}
Edgy.models ||= Set.new
Edgy.models.add(self)
if bridges.first.is_a? Bridge
to_class =
unless bridges.last.is_a? Bridge
bridges.pop
else
edgy_name(bridges.last.to_column.to_s).classify.constantize
end
@edgy_routes[name] = Connection.new(self, to_class, *bridges)
else
@edgy_routes[name] = edgy_build_connection(self, *bridges)
end
end
def edgy_export(exporter)
raise "Model not initialized with acts_as_edgy" unless @edgy_routes
@edgy_routes.each do |name, connection|
from_type = connection.from_class.name.underscore
to_type = connection.to_class.name.underscore
from_id = nil
link_ids = Set.new
to_ids = Set.new
export = lambda do
item = DirectedEdge::Item.new(exporter.database, "#{from_type}_#{from_id}")
item.add_tag(from_type)
link_ids.each { |link| item.link_to("#{to_type}_#{link}", 0, name) }
exporter.export(item)
link_ids.clear
end
edgy_paginated_sql_each(connection.sql_for_export) do |record|
export.call unless from_id == record.from_id || link_ids.empty?
from_id = record.from_id
link_ids.add(record.to_id)
to_ids.add(record.to_id)
end
export.call unless link_ids.empty?
to_ids.each do |id|
item = DirectedEdge::Item.new(exporter.database, "#{to_type}_#{id}")
item.add_tag(to_type)
exporter.export(item)
end
end
exporter
end
def edgy_paginated_sql_each(query, &block)
page = 1
begin
results = paginate_by_sql(query, :page => page)
results.each { |r| block.call(r) }
page += 1
end while !results.empty?
end
private
def edgy_find_method(in_class, referring_to)
if in_class.column_names.include? referring_to.name.foreign_key
referring_to.name.foreign_key
else
'id'
end
end
def edgy_build_connection(*classes)
raise "There must be at least three classes in an edgy path." if classes.size < 3
bridges = []
first = previous = classes.shift
while classes.size > 1
current = classes.shift
bridges.push(Bridge.new(current,
edgy_find_method(current, previous),
edgy_find_method(current, classes.first)))
previous = current
end
Connection.new(first, classes.last, *bridges)
end
end
end
# By default strings of classes can be used and bridges will be built
# automatically between them based on the standard foreign keys. However, in
# cases where non-standard foreign keys are used, a Bridge may be explicitly
# created.
class Bridge
attr_reader :klass, :from_column, :to_column
def initialize(klass, from_column, to_column)
@klass = klass
@from_column = from_column
@to_column = to_column
end
end
private
class Connection
attr_accessor :from_class, :to_class
def initialize(from_class, to_class, *bridges)
@from_class = from_class
@to_class = to_class
@bridges = bridges
end
def sql_for_single(from_id)
what = "#{@to_class.table_name}.id"
from = "#{@to_class.table_name}"
where = from_id.to_s
@bridges.each do |bridge|
from << ", #{bridge.klass.table_name}"
where << " = #{bridge.klass.table_name}.#{bridge.from_column}"
where << " and #{bridge.klass.table_name}.#{bridge.to_column}"
end
where << " = #{@to_class.table_name}.id"
"select #{what} from #{from} where #{where}"
end
def sql_for_export
first = @bridges.first
last = @bridges.last
from_column = "#{first.klass.table_name}.#{first.from_column}"
to_column = "#{last.klass.table_name}.#{last.to_column}"
what = "#{from_column} as from_id, #{to_column} as to_id"
from = ""
where = "#{from_column} is not null and #{to_column} is not null and "
@bridges.each do |bridge|
from << ", " unless bridge == first
from << bridge.klass.table_name
where << " = #{bridge.klass.table_name}.#{bridge.from_column}" unless bridge == first
where << " and " unless (bridge == first || bridge == last)
where << "#{bridge.klass.table_name}.#{bridge.to_column}" unless bridge == last
end
"select #{what} from #{from} where #{where} order by from_id"
end
end
class Future
def initialize(&finalize)
@future = Thread.new(&finalize)
end
def method_missing(method, *args, &block)
data.send(method, *args, &block)
end
def to_s
data.to_s
end
private
def data
@data ||= @future.value
end
end
end
ActiveRecord::Base.send :include, DirectedEdge::Edgy
Trigger updates when the model is saved
require 'directed_edge'
require 'active_support'
require 'active_record'
require 'will_paginate'
module DirectedEdge
module Edgy
class << self
attr_accessor :database, :models
end
def self.configure(&block)
config = Configuration.instance
block.call(config)
raise "user= and password= must be set in config block" unless config.user && config.password
Edgy.database = DirectedEdge::Database.new(config.user, config.password)
end
def self.included(base)
base.send :include, Utilities
base.send :extend, ClassMethods
base.send :alias_method, :pre_edgy_save, :save
base.alias_method_chain :save, :edgy
base.allow_concurrency = true
end
def self.export
throw "No acts_as_edgy models in use." if @models.blank?
throw "Database not set." unless Edgy.database
file = "#{Rails.root}/tmp/edgy_export.xml"
exporter = DirectedEdge::Exporter.new(file)
@models.each { |m| m.edgy_export(exporter) }
exporter.finish
self.clear
Edgy.database.import(file)
end
def self.clear
empty = "#{Rails.root}/tmp/edgy_empty.xml"
DirectedEdge::Exporter.new(empty).finish unless File.exists? empty
Edgy.database.import(empty)
end
def save_with_edgy(*args)
Future.new do
self.class.edgy_triggers.each do |trigger|
### TODO: This should use the ID from the bridge rather than just
### assuming foreign_key is the right one.
trigger_id = send(trigger.name.foreign_key)
trigger.edgy_routes.each do |name, connection|
ids = Set.new
self.class.edgy_paginated_sql_each(connection.sql_for_single(trigger_id)) do |record|
ids.add(record.id)
end
end if trigger_id
end if self.class.edgy_triggers
end
save_without_edgy(*args)
end
def edgy_related(options = {})
Future.new do
item_type = self.class.name.underscore
tags = options.delete(:tags) || Set.new([ item_type ])
item = DirectedEdge::Item.new(Edgy.database, "#{item_type}_#{id}")
edgy_records(item.related(tags, options))
end
end
def edgy_recommended(options = {})
Future.new do
item_type = self.class.name.underscore
tags = options.delete(:tags)
unless tags
tags = Set.new
self.class.edgy_routes.each { |name, c| tags.add(c.to_class.name.underscore) }
end
item = DirectedEdge::Item.new(Edgy.database, "#{item_type}_#{id}")
edgy_records(item.recommended(tags, options))
end
end
private
def edgy_records(ids)
return [] if ids.empty?
same_names = true
first_name = edgy_name(ids.first)
record_ids = ids.map { |i| same_names = false if edgy_name(i) != first_name ; edgy_id(i) }
if same_names
first_name.classify.constantize.find(record_ids)
else
ids.map { |i| edgy_record(i) }
end
end
def edgy_record(item_id)
edgy_name(item_id).classify.constantize.find(edgy_id(item_id))
end
class Configuration
include Singleton
attr_accessor :user, :password
end
module Utilities
private
def edgy_name(item_id)
item_id.sub(/_.*/, '')
end
def edgy_id(item_id)
item_id.sub(/.*_/, '')
end
end
module ClassMethods
include Utilities
attr_reader :edgy_routes
attr_accessor :edgy_triggers
def acts_as_edgy(name, *bridges)
Edgy.models ||= Set.new
Edgy.models.add(self)
trigger_from = bridges.first.is_a?(Bridge) ? bridges.first.klass : bridges.first
trigger_from.edgy_triggers ||= Set.new
trigger_from.edgy_triggers.add(self)
@edgy_routes ||= {}
if bridges.first.is_a? Bridge
to_class =
unless bridges.last.is_a? Bridge
bridges.pop
else
edgy_name(bridges.last.to_column.to_s).classify.constantize
end
@edgy_routes[name] = Connection.new(self, to_class, *bridges)
else
@edgy_routes[name] = edgy_build_connection(self, *bridges)
end
end
def edgy_export(exporter)
raise "Model not initialized with acts_as_edgy" unless @edgy_routes
@edgy_routes.each do |name, connection|
from_type = connection.from_class.name.underscore
to_type = connection.to_class.name.underscore
from_id = nil
link_ids = Set.new
to_ids = Set.new
export = lambda do
item = DirectedEdge::Item.new(exporter.database, "#{from_type}_#{from_id}")
item.add_tag(from_type)
link_ids.each { |link| item.link_to("#{to_type}_#{link}", 0, name) }
exporter.export(item)
link_ids.clear
end
edgy_paginated_sql_each(connection.sql_for_export) do |record|
export.call unless from_id == record.from_id || link_ids.empty?
from_id = record.from_id
link_ids.add(record.to_id)
to_ids.add(record.to_id)
end
export.call unless link_ids.empty?
to_ids.each do |id|
item = DirectedEdge::Item.new(exporter.database, "#{to_type}_#{id}")
item.add_tag(to_type)
exporter.export(item)
end
end
exporter
end
def edgy_paginated_sql_each(query, &block)
page = 1
begin
results = paginate_by_sql(query, :page => page)
results.each { |r| block.call(r) }
page += 1
end while !results.empty?
end
private
def edgy_find_method(in_class, referring_to)
if in_class.column_names.include? referring_to.name.foreign_key
referring_to.name.foreign_key
else
'id'
end
end
def edgy_build_connection(*classes)
raise "There must be at least three classes in an edgy path." if classes.size < 3
bridges = []
first = previous = classes.shift
while classes.size > 1
current = classes.shift
bridges.push(Bridge.new(current,
edgy_find_method(current, previous),
edgy_find_method(current, classes.first)))
previous = current
end
Connection.new(first, classes.last, *bridges)
end
end
end
# By default strings of classes can be used and bridges will be built
# automatically between them based on the standard foreign keys. However, in
# cases where non-standard foreign keys are used, a Bridge may be explicitly
# created.
class Bridge
attr_reader :klass, :from_column, :to_column
def initialize(klass, from_column, to_column)
@klass = klass
@from_column = from_column
@to_column = to_column
end
end
private
class Connection
attr_accessor :from_class, :to_class
def initialize(from_class, to_class, *bridges)
@from_class = from_class
@to_class = to_class
@bridges = bridges
end
def sql_for_single(from_id)
what = "#{@to_class.table_name}.id"
from = "#{@to_class.table_name}"
where = from_id.to_s
@bridges.each do |bridge|
from << ", #{bridge.klass.table_name}"
where << " = #{bridge.klass.table_name}.#{bridge.from_column}"
where << " and #{bridge.klass.table_name}.#{bridge.to_column}"
end
where << " = #{@to_class.table_name}.id"
"select #{what} from #{from} where #{where}"
end
def sql_for_export
first = @bridges.first
last = @bridges.last
from_column = "#{first.klass.table_name}.#{first.from_column}"
to_column = "#{last.klass.table_name}.#{last.to_column}"
what = "#{from_column} as from_id, #{to_column} as to_id"
from = ""
where = "#{from_column} is not null and #{to_column} is not null and "
@bridges.each do |bridge|
from << ", " unless bridge == first
from << bridge.klass.table_name
where << " = #{bridge.klass.table_name}.#{bridge.from_column}" unless bridge == first
where << " and " unless (bridge == first || bridge == last)
where << "#{bridge.klass.table_name}.#{bridge.to_column}" unless bridge == last
end
"select #{what} from #{from} where #{where} order by from_id"
end
end
class Future
def initialize(&finalize)
@future = Thread.new(&finalize)
end
def method_missing(method, *args, &block)
data.send(method, *args, &block)
end
def to_s
data.to_s
end
private
def data
@data ||= @future.value
end
end
end
ActiveRecord::Base.send :include, DirectedEdge::Edgy
|
namespace :config do
desc "Synchronize config files from remote folder"
task :synchronize do
require 'pansophy/config_synchronizer'
require 'dotenv'
Dotenv.load
Pansophy::ConfigSynchronizer.new.pull
end
end
Rubocop
namespace :config do
desc 'Synchronize config files from remote folder'
task :synchronize do
require 'pansophy/config_synchronizer'
require 'dotenv'
Dotenv.load
Pansophy::ConfigSynchronizer.new.pull
end
end
|
module Pantry
VERSION = "0.0.1"
end
Set initial version so pantry-chef can build
module Pantry
VERSION = "0.1.0"
end
|
require "parametric"
module Parametric
module DSL
# Example
# class Foo
# include Parametric::DSL
#
# schema do
# field(:title).type(:string).present
# field(:age).type(:integer).default(20)
# end
#
# attr_reader :params
#
# def initialize(input)
# @params = self.class.schema.resolve(input)
# end
# end
#
# foo = Foo.new(title: "A title", nope: "hello")
#
# foo.params # => {title: "A title", age: 20}
#
DEFAULT_SCHEMA_NAME = :schema
def self.included(base)
base.extend(ClassMethods)
base.schemas = {DEFAULT_SCHEMA_NAME => Parametric::Schema.new}
end
module ClassMethods
def schema=(sc)
@schemas[DEFAULT_SCHEMA_NAME] = sc
end
def schemas=(sc)
@schemas = sc
end
def inherited(subclass)
subclass.schemas = @schemas.each_with_object({}) do |(key, sc), hash|
hash[key] = sc.merge(Parametric::Schema.new)
end
end
def schema(*args, &block)
options = args.last.is_a?(Hash) ? args.last : {}
key = args.first.is_a?(Symbol) ? args.first : DEFAULT_SCHEMA_NAME
current_schema = @schemas[key]
return current_schema unless options.any? || block_given?
new_schema = Parametric::Schema.new(options, &block)
@schemas[key] = current_schema ? current_schema.merge(new_schema) : new_schema
after_define_schema(@schemas[key])
end
def after_define_schema(sc)
# noop hook
end
end
end
end
Return new schema if none exists
require "parametric"
module Parametric
module DSL
# Example
# class Foo
# include Parametric::DSL
#
# schema do
# field(:title).type(:string).present
# field(:age).type(:integer).default(20)
# end
#
# attr_reader :params
#
# def initialize(input)
# @params = self.class.schema.resolve(input)
# end
# end
#
# foo = Foo.new(title: "A title", nope: "hello")
#
# foo.params # => {title: "A title", age: 20}
#
DEFAULT_SCHEMA_NAME = :schema
def self.included(base)
base.extend(ClassMethods)
base.schemas = {DEFAULT_SCHEMA_NAME => Parametric::Schema.new}
end
module ClassMethods
def schema=(sc)
@schemas[DEFAULT_SCHEMA_NAME] = sc
end
def schemas=(sc)
@schemas = sc
end
def inherited(subclass)
subclass.schemas = @schemas.each_with_object({}) do |(key, sc), hash|
hash[key] = sc.merge(Parametric::Schema.new)
end
end
def schema(*args, &block)
options = args.last.is_a?(Hash) ? args.last : {}
key = args.first.is_a?(Symbol) ? args.first : DEFAULT_SCHEMA_NAME
current_schema = @schemas.fetch(key) { Parametric::Schema.new }
return current_schema unless options.any? || block_given?
new_schema = Parametric::Schema.new(options, &block)
@schemas[key] = current_schema ? current_schema.merge(new_schema) : new_schema
after_define_schema(@schemas[key])
end
def after_define_schema(sc)
# noop hook
end
end
end
end
|
class Mothur < Formula
desc "16s analysis software"
homepage "https://www.mothur.org/"
url "https://github.com/mothur/mothur/archive/v1.39.5.tar.gz"
sha256 "9f1cd691e9631a2ab7647b19eb59cd21ea643f29b22cde73d7f343372dfee342"
head "https://github.com/mothur/mothur.git"
# tag "bioinformatics"
# doi "10.1128/AEM.01541-09"
bottle do
sha256 "62d736a1aa21e1d15f7ced134c7672e12f527fd228aa73c4b9ff0fc045607e37" => :sierra
sha256 "5af142d0c836d13e80218fec004764f64659896cd5a8792b0d885305d86cfdd6" => :el_capitan
sha256 "a06890a543f796d9faf9b66fce1a191dbd59dabc068401a358b5d6313fa884bf" => :yosemite
sha256 "8f339e1bb9ee729da2fb98746309a7c95ed7dbe76ee15e271106760d876c7f76" => :x86_64_linux
end
depends_on "boost"
depends_on "readline" unless OS.mac?
def install
boost = Formula["boost"]
inreplace "Makefile", '"\"Enter_your_boost_library_path_here\""', boost.opt_lib
inreplace "Makefile", '"\"Enter_your_boost_include_path_here\""', boost.opt_include
system "make"
bin.install "mothur", "uchime"
end
test do
system "#{bin}/mothur", "-h"
system "#{bin}/uchime", "--help"
end
end
mothur: update 1.39.5 bottle.
class Mothur < Formula
desc "16s analysis software"
homepage "https://www.mothur.org/"
url "https://github.com/mothur/mothur/archive/v1.39.5.tar.gz"
sha256 "9f1cd691e9631a2ab7647b19eb59cd21ea643f29b22cde73d7f343372dfee342"
head "https://github.com/mothur/mothur.git"
# tag "bioinformatics"
# doi "10.1128/AEM.01541-09"
bottle do
sha256 "e36995d4192047ec7426e0e5c4861065f7c0d1a3bc5219c4f57b2d501f866ac7" => :sierra
sha256 "366829a37a3bab6c743b96c4453dc10c72cd0824bab47f9f118e138eb56dc5b8" => :el_capitan
sha256 "b0c33d37d6e78767afd43f4b847695d0d17cb58c8752300a01efbfc8da3bc595" => :yosemite
end
depends_on "boost"
depends_on "readline" unless OS.mac?
def install
boost = Formula["boost"]
inreplace "Makefile", '"\"Enter_your_boost_library_path_here\""', boost.opt_lib
inreplace "Makefile", '"\"Enter_your_boost_include_path_here\""', boost.opt_include
system "make"
bin.install "mothur", "uchime"
end
test do
system "#{bin}/mothur", "-h"
system "#{bin}/uchime", "--help"
end
end
|
module PassiveRecord
class Base
# Assign values to each of the attributes passed in the params hash
def initialize(params = {})
params.each { |k, v| send("#{k}=", v)}
end
# Date accessors to be consistent with ActiveRecord:
attr_accessor :created_at, :updated_at
# Return a hash of the class's attribute names and values
# @name.attributes => {:first_name=>"Dima", :last_name=>"Dozen"}
def attributes
@attributes = {}
self.class.fields.flatten.each {|att| @attributes[att] = self.send(att) }
@attributes
end
class_inheritable_accessor :fields
# Compare this object with another object of the same class.
# Returns true if the attributes and values are identical.
# @name === @name #=> true
# @name === @name2 #=> false
def ===(other)
self.attributes == other.attributes
end
class << self
# Provide some basic ActiveRecord-like methods for working with non-ActiveRecord objects
# class Person < PassiveRecord::Base
# has_many :names, :addresses, :phone_numbers
# end
def has_many(*associations)
# Simply sets up an attr_accessor for each item in the list
@associations = associations
associations.each {|association| attr_accessor association}
end
# Creates instance methods for each item in the list. Expects an array
# class Address < PassiveRecord::Base
# define_fields [:street, :city, :state, :postal_code, :country]
# end
def define_fields(attrs)
self.fields = attrs
# Assign attr_accessor for each attribute in the list
attrs.each {|att| attr_accessor att}
end
# Return the list of available fields for the class
#
# Name.fields #=> [:id, :first_name, :last_name]
# def fields
# @fields
# end
# Returns the list of available has_many associations
#
# Model.associations #=> [:names, :addresses]
def associations
@associations
end
end
end
end
Made associations class_inheritable_accessor.
module PassiveRecord
class Base
# Assign values to each of the attributes passed in the params hash
def initialize(params = {})
params.each { |k, v| send("#{k}=", v)}
end
# Date accessors to be consistent with ActiveRecord:
attr_accessor :created_at, :updated_at
# Return a hash of the class's attribute names and values
# @name.attributes => {:first_name=>"Dima", :last_name=>"Dozen"}
def attributes
@attributes = {}
self.class.fields.flatten.each {|att| @attributes[att] = self.send(att) }
@attributes
end
# Compare this object with another object of the same class.
# Returns true if the attributes and values are identical.
# @name === @name #=> true
# @name === @name2 #=> false
def ===(other)
self.attributes == other.attributes
end
class_inheritable_accessor :fields, :associations
class << self
# Provide some basic ActiveRecord-like methods for working with non-ActiveRecord objects
# class Person < PassiveRecord::Base
# has_many :names, :addresses, :phone_numbers
# end
def has_many(*associations)
# Simply sets up an attr_accessor for each item in the list
self.associations = associations
associations.each {|association| attr_accessor association}
end
# Creates instance methods for each item in the list. Expects an array
# class Address < PassiveRecord::Base
# define_fields [:street, :city, :state, :postal_code, :country]
# end
def define_fields(attrs)
self.fields = attrs
# Assign attr_accessor for each attribute in the list
attrs.each {|att| attr_accessor att}
end
# Return the list of available fields for the class
#
# Name.fields #=> [:id, :first_name, :last_name]
# def fields
# @fields
# end
# Returns the list of available has_many associations
#
# Model.associations #=> [:names, :addresses]
end
end
end |
require "ffi"
module AhoCorasick
module C
extend FFI::Library
ffi_lib File.dirname(__FILE__) + "/ahocorasick.so"
class MatchStruct < FFI::Struct
layout :pattern, :pointer,
:position, :long,
:number, :uint
end
class PatternStruct < FFI::Struct
layout :string, :pointer,
:length, :uint,
:representative, :pointer
end
class TextStruct < FFI::Struct
layout :string, :pointer,
:length, :uint
end
callback :match_callback, [:pointer, :pointer], :int
attach_function :ac_automata_init, [:match_callback], :pointer
attach_function :ac_automata_add, [:pointer, :pointer], :int
attach_function :ac_automata_finalize, [:pointer], :void
attach_function :ac_automata_search, [:pointer, :pointer, :pointer], :int
attach_function :ac_automata_reset, [:pointer], :void
attach_function :ac_automata_release, [:pointer], :void
attach_function :ac_automata_display, [:pointer, :char], :void
end
class Search
include C
attr_reader :trie
CALLBACK = lambda {|match_pointer, void_pointer|
match_struct = MatchStruct.new(match_pointer)
pattern_struct = PatternStruct.new(match_struct[:pattern])
match = OpenStruct.new representative: pattern_struct[:representative].read_string.dup,
matched: pattern_struct[:string].read_string.dup,
position: match_struct[:position] - pattern_struct[:length]
@@callback.call match
return 0
}
def initialize(dictionary, &callback)
@@callback = callback
@trie = ac_automata_init(CALLBACK)
dictionary.each do |key, values|
values.each {|value| add(key, value) }
end
finalize
end
def match(string)
reset
loop { break if GC.disable }
text = TextStruct.new
text[:string] = string_pointer(string)
text[:length] = string.length
ac_automata_search(trie, text.pointer, nil)
ensure
GC.enable
end
private
def add(representative, string)
pattern = PatternStruct.new
pattern[:string] = string_pointer(string)
pattern[:length] = string.length
pattern[:representative] = string_pointer(representative)
ac_automata_add(trie, pattern.pointer)
end
def finalize
ac_automata_finalize(trie)
end
def reset
ac_automata_reset(trie)
end
def release
ac_automata_release(trie)
end
def display
ac_automata_display(trie, ?s.ord)
end
def string_pointer(string)
FFI::MemoryPointer.from_string(string.to_s.dup)
end
end
end
i hate GC
require "ffi"
module AhoCorasick
module C
extend FFI::Library
ffi_lib File.dirname(__FILE__) + "/ahocorasick.so"
class MatchStruct < FFI::Struct
layout :pattern, :pointer,
:position, :long,
:number, :uint
end
class PatternStruct < FFI::Struct
layout :string, :pointer,
:length, :uint,
:representative, :pointer
end
class TextStruct < FFI::Struct
layout :string, :pointer,
:length, :uint
end
callback :match_callback, [:pointer, :pointer], :int
attach_function :ac_automata_init, [:match_callback], :pointer
attach_function :ac_automata_add, [:pointer, :pointer], :int
attach_function :ac_automata_finalize, [:pointer], :void
attach_function :ac_automata_search, [:pointer, :pointer, :pointer], :int
attach_function :ac_automata_reset, [:pointer], :void
attach_function :ac_automata_release, [:pointer], :void
attach_function :ac_automata_display, [:pointer, :char], :void
end
class Search
include C
attr_reader :trie
CALLBACK = lambda {|match_pointer, void_pointer|
match_struct = MatchStruct.new(match_pointer)
pattern_struct = PatternStruct.new(match_struct[:pattern])
match = OpenStruct.new representative: pattern_struct[:representative].read_string.dup,
matched: pattern_struct[:string].read_string.dup,
position: match_struct[:position] - pattern_struct[:length]
@@callback.call match
return 0
}
def initialize(dictionary, &callback)
@@callback = callback
@trie = ac_automata_init(CALLBACK)
dictionary.each do |key, values|
values.each {|value| add(key, value) }
end
finalize
end
def match(string)
reset
text = TextStruct.new
text[:string] = string_pointer(string)
text[:length] = string.length
ac_automata_search(trie, text.pointer, nil)
end
private
def add(representative, string)
pattern = PatternStruct.new
pattern[:string] = string_pointer(string)
pattern[:length] = string.length
pattern[:representative] = string_pointer(representative)
ac_automata_add(trie, pattern.pointer)
end
def finalize
ac_automata_finalize(trie)
end
def reset
ac_automata_reset(trie)
end
def release
ac_automata_release(trie)
end
def display
ac_automata_display(trie, ?s.ord)
end
def string_pointer(string)
FFI::MemoryPointer.from_string(string.to_s.dup)
end
end
end
|
module AmqpManager
class << self
def numbers_channel
Thread.current[:numbers_channel] ||= @connection.create_channel
end
def numbers_xchange
Thread.current[:numbers_xchange] ||= numbers_channel.topic('voice.numbers', auto_delete: false)
end
def numbers_queue
Thread.current[:numbers_queue] ||= numbers_channel.queue('voice.numbers', auto_delete: false)
end
def rails_channel
Thread.current[:rails_channel] ||= @connection.create_channel
end
def rails_xchange
Thread.current[:rails_xchange] ||= rails_channel.topic('voice.rails', auto_delete: false)
end
def rails_queue
Thread.current[:rails_queue] ||= rails_channel.queue('voice.rails', auto_delete: false)
end
def rails_publish(payload)
rails_xchange.publish(payload, routing_key: 'voice.rails')
end
def shutdown
@connection.close
end
def establish_connection
@connection = Bunny.new(
host: Numbers.number_conf['rabbit_host'],
user: Numbers.number_conf['rabbit_user'],
password: Numbers.number_conf['rabbit_pass']
).tap { |c| c.start }
end
def start
establish_connection
numbers_queue.bind(numbers_xchange, routing_key: 'voice.numbers')
numbers_queue.subscribe { |delivery_info, metadata, payload|
rails_publish(payload)
AmiEvent.log(payload)
}
rails_channel.queue('voice.rails', auto_delete: false)
.bind(rails_xchange, routing_key: 'voice.rails')
end
end
end
retry amqp connection at boot time
module AmqpManager
class << self
def numbers_channel
Thread.current[:numbers_channel] ||= @connection.create_channel
end
def numbers_xchange
Thread.current[:numbers_xchange] ||= numbers_channel.topic('voice.numbers', auto_delete: false)
end
def numbers_queue
Thread.current[:numbers_queue] ||= numbers_channel.queue('voice.numbers', auto_delete: false)
end
def rails_channel
Thread.current[:rails_channel] ||= @connection.create_channel
end
def rails_xchange
Thread.current[:rails_xchange] ||= rails_channel.topic('voice.rails', auto_delete: false)
end
def rails_queue
Thread.current[:rails_queue] ||= rails_channel.queue('voice.rails', auto_delete: false)
end
def rails_publish(payload)
rails_xchange.publish(payload, routing_key: 'voice.rails')
end
def shutdown
@connection.close
end
def establish_connection
@connection = Bunny.new(
host: Numbers.number_conf['rabbit_host'],
user: Numbers.number_conf['rabbit_user'],
password: Numbers.number_conf['rabbit_pass']
).tap { |c| c.start }
rescue Bunny::TCPConnectionFailed
sleep 1
retry
end
def start
establish_connection
numbers_queue.bind(numbers_xchange, routing_key: 'voice.numbers')
numbers_queue.subscribe { |delivery_info, metadata, payload|
rails_publish(payload)
AmiEvent.log(payload)
}
rails_channel.queue('voice.rails', auto_delete: false)
.bind(rails_xchange, routing_key: 'voice.rails')
end
end
end
|
require 'anemone/http'
require 'hpricot'
module Anemone
class Page
# The URL of the page
attr_reader :url
# Array of distinct A tag HREFs from the page
attr_reader :links
#Body of the HTTP response
attr_reader :body
#Content-type of the HTTP response
attr_reader :content_type
# Integer response code of the page
attr_accessor :code
# Array of redirect-aliases for the page
attr_accessor :aliases
# Boolean indicating whether or not this page has been visited in PageHash#shortest_paths!
attr_accessor :visited
# Used by PageHash#shortest_paths! to store depth of the page
attr_accessor :depth
#
# Create a new Page from the response of an HTTP request to *url*
#
def self.fetch(url)
begin
url = URI(url) if url.is_a?(String)
response, code, location = Anemone::HTTP.get(url)
aka = nil
if !url.eql?(location)
aka = location
end
return Page.new(url, response.body, code, response['Content-Type'], aka)
rescue
return Page.new(url)
end
end
#
# Create a new page
#
def initialize(url, body = nil, code = nil, content_type = nil, aka = nil)
@url = url
@body = body unless Anemone.options.discard_page_bodies
@code = code
@content_type = content_type
@links = []
@aliases = []
@aliases << aka if !aka.nil?
#get a list of distinct links on the page, in absolute url form
if body
Hpricot(body).search('a').each do |a|
u = a['href']
next if u.nil?
begin
u = URI(u)
rescue
next
end
abs = to_absolute(u)
@links << abs if in_domain?(abs)
end
@links.uniq!
end
end
#
# Return a new page with the same *response* and *url*, but
# with a 200 response code
#
def alias_clone(url)
p = clone
p.add_alias!(@aka) if !@aka.nil?
p.code = 200
p
end
#
# Add a redirect-alias String *aka* to the list of the page's aliases
#
# Returns *self*
#
def add_alias!(aka)
@aliases << aka if !@aliases.include?(aka)
self
end
#
# Returns an Array of all links from this page, and all the
# redirect-aliases of those pages, as String objects.
#
# *page_hash* is a PageHash object with the results of the current crawl.
#
def links_and_their_aliases(page_hash)
@links.inject([]) do |results, link|
results.concat([link].concat(page_hash[link].aliases))
end
end
#
# Returns +true+ if the page is a HTML document, returns +false+
# otherwise.
#
def html?
(@content_type =~ /text\/html/) == 0
end
#
# Returns +true+ if the page is a HTTP redirect, returns +false+
# otherwise.
#
def redirect?
(300..399).include?(@code)
end
#
# Returns +true+ if the page was not found (returned 404 code),
# returns +false+ otherwise.
#
def not_found?
404 == @code
end
#
# Converts relative URL *link* into an absolute URL based on the
# location of the page
#
def to_absolute(link)
# remove anchor
link = URI.encode(link.to_s.gsub(/#[a-zA-Z0-9_-]*$/,''))
relative = URI(link)
absolute = @url.merge(relative)
absolute.path = '/' if absolute.path.empty?
return absolute
end
#
# Returns +true+ if *uri* is in the same domain as the page, returns
# +false+ otherwise
#
def in_domain?(uri)
uri.host == @url.host
end
end
end
added title, h1, h2, description fields to page
require 'anemone/http'
require 'hpricot'
module Anemone
class Page
# The URL of the page
attr_reader :url
# Array of distinct A tag HREFs from the page
attr_reader :links
#Body of the HTTP response
attr_reader :body
#Content-type of the HTTP response
attr_reader :content_type
#title of the page if it is an HTML document
attr_reader :title
#first h1 on the page, if present
attr_reader :h1
#first h2 on the page, if present
attr_reader :h2
#meta-description of the page, if present
attr_reader :description
# Integer response code of the page
attr_accessor :code
# Array of redirect-aliases for the page
attr_accessor :aliases
# Boolean indicating whether or not this page has been visited in PageHash#shortest_paths!
attr_accessor :visited
# Used by PageHash#shortest_paths! to store depth of the page
attr_accessor :depth
#
# Create a new Page from the response of an HTTP request to *url*
#
def self.fetch(url)
begin
url = URI(url) if url.is_a?(String)
response, code, location = Anemone::HTTP.get(url)
aka = nil
if !url.eql?(location)
aka = location
end
return Page.new(url, response.body, code, response['Content-Type'], aka)
rescue
return Page.new(url)
end
end
#
# Create a new page
#
def initialize(url, body = nil, code = nil, content_type = nil, aka = nil)
@url = url
@body = body unless Anemone.options.discard_page_bodies
@code = code
@content_type = content_type
@links = []
@aliases = []
@aliases << aka if !aka.nil?
h = Hpricot(body)
#save page title
title_elem = h.at('title')
@title = title_elem.inner_html if !title_elem.nil?
#save page h1
h1_elem = h.at('h1')
@h1 = h1_elem.inner_html if !h1_elem.nil?
#save page h2
h2_elem = h.at('h2')
@h2 = h2_elem.inner_html if !h2_elem.nil?
#save page meta-description
description_elem = h.at('meta[@name=description]')
@description = description_elem['content'] if !description_elem.nil?
#get a list of distinct links on the page, in absolute url form
if body
h.search('a').each do |a|
u = a['href']
next if u.nil?
begin
u = URI(u)
rescue
next
end
abs = to_absolute(u)
@links << abs if in_domain?(abs)
end
@links.uniq!
end
end
#
# Return a new page with the same *response* and *url*, but
# with a 200 response code
#
def alias_clone(url)
p = clone
p.add_alias!(@aka) if !@aka.nil?
p.code = 200
p
end
#
# Add a redirect-alias String *aka* to the list of the page's aliases
#
# Returns *self*
#
def add_alias!(aka)
@aliases << aka if !@aliases.include?(aka)
self
end
#
# Returns an Array of all links from this page, and all the
# redirect-aliases of those pages, as String objects.
#
# *page_hash* is a PageHash object with the results of the current crawl.
#
def links_and_their_aliases(page_hash)
@links.inject([]) do |results, link|
results.concat([link].concat(page_hash[link].aliases))
end
end
#
# Returns +true+ if the page is a HTML document, returns +false+
# otherwise.
#
def html?
(@content_type =~ /text\/html/) == 0
end
#
# Returns +true+ if the page is a HTTP redirect, returns +false+
# otherwise.
#
def redirect?
(300..399).include?(@code)
end
#
# Returns +true+ if the page was not found (returned 404 code),
# returns +false+ otherwise.
#
def not_found?
404 == @code
end
#
# Converts relative URL *link* into an absolute URL based on the
# location of the page
#
def to_absolute(link)
# remove anchor
link = URI.encode(link.to_s.gsub(/#[a-zA-Z0-9_-]*$/,''))
relative = URI(link)
absolute = @url.merge(relative)
absolute.path = '/' if absolute.path.empty?
return absolute
end
#
# Returns +true+ if *uri* is in the same domain as the page, returns
# +false+ otherwise
#
def in_domain?(uri)
uri.host == @url.host
end
end
end |
# Copyright (C) 2007, 2008, The Collaborative Software Foundation
#
# This file is part of TriSano.
#
# TriSano is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# TriSano is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with TriSano. If not, see http://www.gnu.org/licenses/agpl-3.0.txt.
require 'ftools'
require 'fileutils'
gem 'mechanize', "< 0.8.0"
require 'mechanize'
require 'rexml/document'
require 'rest-open-uri'
require 'logger'
namespace :trisano do
# Supported rake task arguments:
# RAILS_ENV - controls what database config to use
# basicauth - whether or not to use HTTP Basic Auth from within the .war file (default = true)
# min - sets the minimum number of Rails instances in the pool (default is 4)
# max - sets the maximum number of Rails instances in the pool (defaul is 10)
# Example: jruby -S rake trisano:deploy:buildwar RAILS_ENV=production basicauth=false
namespace :deploy do
WAR_FILE_NAME = 'trisano.war'
# Override with env variable if you have a different Tomcat home - just export it
TOMCAT_HOME = ENV['TOMCAT_HOME'] ||= '/opt/tomcat/apache-tomcat-6.0.14'
TOMCAT_BIN = TOMCAT_HOME + '/bin'
TOMCAT_DEPLOY_DIR_NAME = TOMCAT_HOME + '/webapps'
TOMCAT_DEPLOYED_EXPLODED_WAR_DIR = TOMCAT_DEPLOY_DIR_NAME + '/' + 'trisano'
TOMCAT_DEPLOYED_WAR_NAME = TOMCAT_DEPLOY_DIR_NAME + '/' + WAR_FILE_NAME
# Override with env variable if you are running locally http://localhost:8080
TRISANO_URL = ENV['TRISANO_URL'] ||= 'http://ut-nedss-dev.csinitiative.com'
TRISANO_SVN_ROOT = ENV['TRISANO_SVN_ROOT'] ||= '~/projects/trisano'
TRISANO_DIST_DIR = ENV['TRISANO_DIST_DIR'] ||= '~/trisano-dist'
def core_release_tasks(delete_war = true)
t = Time.now
tformated = t.strftime("%m-%d-%Y-%I%M%p")
filename = "trisano-release-" + t.strftime("%m-%d-%Y-%I%M%p") + ".tar.gz"
dist_dirname = TRISANO_DIST_DIR + "/" + tformated
sh "cp -R #{TRISANO_SVN_ROOT} #{dist_dirname}"
p "removing .git directory"
sh "rm -rf #{dist_dirname}/.git"
# tried to get tar --exclude to work, but had no luck - bailing to a simpler approach
p "removing tmp directories from #{dist_dirname}"
cd dist_dirname
trisano_war_file = "trisano.war"
if File.file? "./webapp/#{trisano_war_file}"
File.delete("./webapp/#{trisano_war_file}")
puts "deleted ./webapp/#{trisano_war_file}"
end
if File.file? "./distro/#{trisano_war_file}" and delete_war
File.delete("./distro/#{trisano_war_file}")
puts "deleted ./distro/#{trisano_war_file}"
end
sh "rm -f ./webapp/log/*.*"
sh "rm -rf ./webapp/nbproject"
sh "rm -rf ./distro/dump"
sh "rm -rf ./webapp/tmp"
sh "rm -rf ./distro/*.txt"
sh "rm -rf ./webapp/vendor/plugins/safe_record"
sh "rm -rf ./webapp/vendor/plugins/safe_erb"
cd TRISANO_DIST_DIR
sh "tar czfh #{filename} ./#{tformated}"
end
desc "delete trisano war file and exploded directory from Tomcat"
task :deletewar do
puts "attempting to delete war file from Tomcat"
if File.file? TOMCAT_DEPLOYED_WAR_NAME
File.delete(TOMCAT_DEPLOYED_WAR_NAME)
puts "deleted deployed war file"
else
puts "war file not found - did not delete"
end
puts "attempting to delete deployed exploded war directory #{TOMCAT_DEPLOYED_EXPLODED_WAR_DIR}"
if File.directory? TOMCAT_DEPLOYED_EXPLODED_WAR_DIR
FileUtils.remove_dir(TOMCAT_DEPLOYED_EXPLODED_WAR_DIR)
puts "deleted deployed exploded war directory"
else
puts "deployed exploded war directory not found - did not delete"
end
end
desc "build war file"
task :buildwar do
puts "running warble clean"
ruby "-S warble war:clean"
puts "running warble war"
ruby "-S warble war"
end
desc "copy trisano war file to Tomcat"
task :copywar do
puts "attempting to copy #{WAR_FILE_NAME} war file to Tomcat #{TOMCAT_DEPLOY_DIR_NAME}"
if files_exist
File.copy(WAR_FILE_NAME, TOMCAT_DEPLOY_DIR_NAME, true)
else
which_files_exist
end
end
def files_exist
File.file? WAR_FILE_NAME
File.directory? TOMCAT_DEPLOY_DIR_NAME
end
def which_files_exist
puts "#{WAR_FILE_NAME} exists? #{File.file? WAR_FILE_NAME} #{TOMCAT_DEPLOY_DIR_NAME} exists? #{File.directory? TOMCAT_DEPLOY_DIR_NAME}"
end
desc "stop Tomcat"
task :stoptomcat do
puts "attempting to stop Tomcat"
sh TOMCAT_BIN + "/shutdown.sh"
sleep 10
end
desc "start Tomcat"
task :starttomcat do
puts "attempting to start Tomcat"
sh TOMCAT_BIN + "/startup.sh"
end
desc "smoke test that ensures trisano was deployed"
task :smoke do
retries = 5
begin
sleep 10
puts "executing smoke test"
Hpricot.buffer_size = 65536
#agent = WWW::Mechanize.new {|a| a.log = Logger.new(STDERR) }
agent = WWW::Mechanize.new
agent.read_timeout = 300
#agent.set_proxy("localhost", "8118")
puts "GET / to #{TRISANO_URL}/trisano/"
url = TRISANO_URL + '/trisano'
page = agent.get(url)
raise "GET content invalid" unless (page.search("//#errorExplanation")).empty?
puts "smoke test success"
rescue => error
puts error
puts "smoke test retry attempts remaining: #{retries - 1}"
retry if (retries -= 1) > 0
raise
end
end
desc "redeploy Tomcat"
task :redeploytomcat => [:stoptomcat, :deletewar, :copywar, :starttomcat, :smoke] do
puts "redeploy Tomcat success"
end
desc "redeploy Tomcat"
task :redeploytomcat_no_smoke => [:stoptomcat, :deletewar, :copywar, :starttomcat] do
puts "redeploy Tomcat success"
end
desc "build war and redeploy Tomcat"
task :buildandredeploy => [:buildwar, :redeploytomcat] do
puts "build and redeploy success"
end
desc "build and redeploy full: alias for build and redeploy"
task :buildandredeployfull => [:buildandredeploy] do
puts "build and redeploy"
end
desc "Create database configuration file for a production install"
task :create_db_config do
ruby "-S rake trisano:dev:release_db_rebuild_full RAILS_ENV=development"
sh "pg_dump -x -O trisano_development > ../distro/database/trisano_schema.sql"
end
desc "Create database configuration file for a test or demo install"
task :create_demo_db_config do
ruby "-S rake trisano:dev:db_rebuild_full RAILS_ENV=development"
sh "pg_dump -x -O trisano_development > ../distro/database/trisano_schema.sql"
end
desc "package production .war file, include database dump, scripts, and configuration files in a .tar"
task :prod_release do
puts "!!WARNING!!: using following TRISANO_SVN_ROOT: #{TRISANO_SVN_ROOT}. Please ensure it is correct."
ruby "-S rake trisano:deploy:create_db_config"
ruby "-S rake -f ../webapp/Rakefile trisano:distro:package_app"
core_release_tasks(false)
end
desc "package production .war file with demo/testing data, include database dump, scripts, and configuration files in a .tar"
task :test_release do
puts "!!WARNING!!: using following TRISANO_SVN_ROOT: #{TRISANO_SVN_ROOT}. Please ensure it is correct."
puts "==================== This release will include test/demo data. ===================="
puts "==================== It is not intended to be used for a clean system install ====="
ruby "-S rake trisano:deploy:create_demo_db_config"
core_release_tasks
end
end
end
Put the release task back where it belongs
# Copyright (C) 2007, 2008, The Collaborative Software Foundation
#
# This file is part of TriSano.
#
# TriSano is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# TriSano is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with TriSano. If not, see http://www.gnu.org/licenses/agpl-3.0.txt.
require 'ftools'
require 'fileutils'
gem 'mechanize', "< 0.8.0"
require 'mechanize'
require 'rexml/document'
require 'rest-open-uri'
require 'logger'
namespace :trisano do
# Supported rake task arguments:
# RAILS_ENV - controls what database config to use
# basicauth - whether or not to use HTTP Basic Auth from within the .war file (default = true)
# min - sets the minimum number of Rails instances in the pool (default is 4)
# max - sets the maximum number of Rails instances in the pool (defaul is 10)
# Example: jruby -S rake trisano:deploy:buildwar RAILS_ENV=production basicauth=false
namespace :deploy do
WAR_FILE_NAME = 'trisano.war'
# Override with env variable if you have a different Tomcat home - just export it
TOMCAT_HOME = ENV['TOMCAT_HOME'] ||= '/opt/tomcat/apache-tomcat-6.0.14'
TOMCAT_BIN = TOMCAT_HOME + '/bin'
TOMCAT_DEPLOY_DIR_NAME = TOMCAT_HOME + '/webapps'
TOMCAT_DEPLOYED_EXPLODED_WAR_DIR = TOMCAT_DEPLOY_DIR_NAME + '/' + 'trisano'
TOMCAT_DEPLOYED_WAR_NAME = TOMCAT_DEPLOY_DIR_NAME + '/' + WAR_FILE_NAME
# Override with env variable if you are running locally http://localhost:8080
TRISANO_URL = ENV['TRISANO_URL'] ||= 'http://ut-nedss-dev.csinitiative.com'
TRISANO_SVN_ROOT = ENV['TRISANO_SVN_ROOT'] ||= '~/projects/trisano'
TRISANO_DIST_DIR = ENV['TRISANO_DIST_DIR'] ||= '~/trisano-dist'
def core_release_tasks(delete_war = true)
t = Time.now
tformated = t.strftime("%m-%d-%Y-%I%M%p")
filename = "trisano-release-" + t.strftime("%m-%d-%Y-%I%M%p") + ".tar.gz"
dist_dirname = TRISANO_DIST_DIR + "/" + tformated
sh "cp -R #{TRISANO_SVN_ROOT} #{dist_dirname}"
p "removing .git directory"
sh "rm -rf #{dist_dirname}/.git"
# tried to get tar --exclude to work, but had no luck - bailing to a simpler approach
p "removing tmp directories from #{dist_dirname}"
cd dist_dirname
trisano_war_file = "trisano.war"
if File.file? "./webapp/#{trisano_war_file}"
File.delete("./webapp/#{trisano_war_file}")
puts "deleted ./webapp/#{trisano_war_file}"
end
if File.file? "./distro/#{trisano_war_file}" and delete_war
File.delete("./distro/#{trisano_war_file}")
puts "deleted ./distro/#{trisano_war_file}"
end
sh "rm -f ./webapp/log/*.*"
sh "rm -rf ./webapp/nbproject"
sh "rm -rf ./distro/dump"
sh "rm -rf ./webapp/tmp"
sh "rm -rf ./distro/*.txt"
sh "rm -rf ./webapp/vendor/plugins/safe_record"
sh "rm -rf ./webapp/vendor/plugins/safe_erb"
cd TRISANO_DIST_DIR
sh "tar czfh #{filename} ./#{tformated}"
end
desc "delete trisano war file and exploded directory from Tomcat"
task :deletewar do
puts "attempting to delete war file from Tomcat"
if File.file? TOMCAT_DEPLOYED_WAR_NAME
File.delete(TOMCAT_DEPLOYED_WAR_NAME)
puts "deleted deployed war file"
else
puts "war file not found - did not delete"
end
puts "attempting to delete deployed exploded war directory #{TOMCAT_DEPLOYED_EXPLODED_WAR_DIR}"
if File.directory? TOMCAT_DEPLOYED_EXPLODED_WAR_DIR
FileUtils.remove_dir(TOMCAT_DEPLOYED_EXPLODED_WAR_DIR)
puts "deleted deployed exploded war directory"
else
puts "deployed exploded war directory not found - did not delete"
end
end
desc "build war file"
task :buildwar do
puts "running warble clean"
ruby "-S warble war:clean"
puts "running warble war"
ruby "-S warble war"
end
desc "copy trisano war file to Tomcat"
task :copywar do
puts "attempting to copy #{WAR_FILE_NAME} war file to Tomcat #{TOMCAT_DEPLOY_DIR_NAME}"
if files_exist
File.copy(WAR_FILE_NAME, TOMCAT_DEPLOY_DIR_NAME, true)
else
which_files_exist
end
end
def files_exist
File.file? WAR_FILE_NAME
File.directory? TOMCAT_DEPLOY_DIR_NAME
end
def which_files_exist
puts "#{WAR_FILE_NAME} exists? #{File.file? WAR_FILE_NAME} #{TOMCAT_DEPLOY_DIR_NAME} exists? #{File.directory? TOMCAT_DEPLOY_DIR_NAME}"
end
desc "stop Tomcat"
task :stoptomcat do
puts "attempting to stop Tomcat"
sh TOMCAT_BIN + "/shutdown.sh"
sleep 10
end
desc "start Tomcat"
task :starttomcat do
puts "attempting to start Tomcat"
sh TOMCAT_BIN + "/startup.sh"
end
desc "smoke test that ensures trisano was deployed"
task :smoke do
retries = 5
begin
sleep 10
puts "executing smoke test"
Hpricot.buffer_size = 65536
#agent = WWW::Mechanize.new {|a| a.log = Logger.new(STDERR) }
agent = WWW::Mechanize.new
agent.read_timeout = 300
#agent.set_proxy("localhost", "8118")
puts "GET / to #{TRISANO_URL}/trisano/"
url = TRISANO_URL + '/trisano'
page = agent.get(url)
raise "GET content invalid" unless (page.search("//#errorExplanation")).empty?
puts "smoke test success"
rescue => error
puts error
puts "smoke test retry attempts remaining: #{retries - 1}"
retry if (retries -= 1) > 0
raise
end
end
desc "redeploy Tomcat"
task :redeploytomcat => [:stoptomcat, :deletewar, :copywar, :starttomcat, :smoke] do
puts "redeploy Tomcat success"
end
desc "redeploy Tomcat"
task :redeploytomcat_no_smoke => [:stoptomcat, :deletewar, :copywar, :starttomcat] do
puts "redeploy Tomcat success"
end
desc "build war and redeploy Tomcat"
task :buildandredeploy => [:buildwar, :redeploytomcat] do
puts "build and redeploy success"
end
desc "build and redeploy full: alias for build and redeploy"
task :buildandredeployfull => [:buildandredeploy] do
puts "build and redeploy"
end
desc "Create database configuration file for a production install"
task :create_db_config do
ruby "-S rake trisano:dev:release_db_rebuild_full RAILS_ENV=development"
sh "pg_dump -x -O trisano_development > ../distro/database/trisano_schema.sql"
end
desc "Create database configuration file for a test or demo install"
task :create_demo_db_config do
ruby "-S rake trisano:dev:db_rebuild_full RAILS_ENV=development"
sh "pg_dump -x -O trisano_development > ../distro/database/trisano_schema.sql"
end
desc "package production .war file, include database dump, scripts, and configuration files in a .tar"
task :release do
puts "!!WARNING!!: using following TRISANO_SVN_ROOT: #{TRISANO_SVN_ROOT}. Please ensure it is correct."
ruby "-S rake trisano:deploy:create_db_config"
core_release_tasks
end
desc "package production .war file, include database dump, scripts, and configuration files in a .tar"
task :prod_release do
puts "!!WARNING!!: using following TRISANO_SVN_ROOT: #{TRISANO_SVN_ROOT}. Please ensure it is correct."
ruby "-S rake trisano:deploy:create_db_config"
ruby "-S rake -f ../webapp/Rakefile trisano:distro:package_app"
core_release_tasks(false)
end
desc "package production .war file with demo/testing data, include database dump, scripts, and configuration files in a .tar"
task :test_release do
puts "!!WARNING!!: using following TRISANO_SVN_ROOT: #{TRISANO_SVN_ROOT}. Please ensure it is correct."
puts "==================== This release will include test/demo data. ===================="
puts "==================== It is not intended to be used for a clean system install ====="
ruby "-S rake trisano:deploy:create_demo_db_config"
core_release_tasks
end
end
end
|
module Phonelib
# class for parsed phone number, includes basic validation methods
class Phone
# defining reader methods for class variables
attr_reader :original, :sanitized, :national_number
# class initialization method
#
# ==== Attributes
#
# * +phone+ - Phone number for parsing
# * +country_data+ - Hash of data for parsing
#
def initialize(phone, country_data)
@original = phone
@sanitized = sanitize_phone(@original)
@analyzed_data = {}
analyze_phone(country_data) unless @sanitized.empty?
end
# Returns all phone types that matched valid patterns
def types
@analyzed_data.flat_map {|iso2, data| data[:valid]}.uniq
end
# Returns first phone type that matched
def type
types.first
end
# Returns all countries that matched valid patterns
def countries
@analyzed_data.map {|iso2, data| iso2}
end
# Returns first country that matched valid patterns
def country
countries.first
end
# Returns whether a current parsed phone number is valid
def valid?
@analyzed_data.select {|iso2, data| data[:valid].any? }.any?
end
# Returns whether a current parsed phone number is invalid
def invalid?
!valid?
end
# Returns whether a current parsed phone number is possible
def possible?
@analyzed_data.select {|iso2, data| data[:possible].any? }.any?
end
# Returns whether a current parsed phone number is impossible
def impossible?
!possible?
end
def international
format = @analyzed_data[country][:format]
country_code = @analyzed_data[country][:countryCode]
regexp = Regexp.new(format[:regex])
format_string = format[:format].gsub(/(\d)\$/, "\\1 $")
md = regexp.match(@national_number)
national_part = format_string.gsub(/\$(\d)/){||md[$~[1].to_i]}
"+" + country_code + " " +
national_part.gsub(/^(\d{3,})\s+/, "(\\1) ")
end
# Returns whether a current parsed phone number is valid for specified
# country
#
# ==== Attributes
#
# * +country+ - ISO code of country (2 letters) like 'US' for United States
#
def valid_for_country?(country)
@analyzed_data.select {|iso2, data| country == iso2 &&
data[:valid].any? }.any?
end
# Returns whether a current parsed phone number is invalid for specified
# country
#
# ==== Attributes
#
# * +country+ - ISO code of country (2 letters) like 'US' for United States
#
def invalid_for_country?(country)
@analyzed_data.select {|iso2, data| country == iso2 &&
data[:valid].any? }.empty?
end
private
# Analyze current phone with provided data hash
def analyze_phone(country_data)
possible_countries = country_data.select do |data|
@sanitized.start_with?(data[:countryCode])
end
if possible_countries.size > 1
possible_countries = possible_countries.select! do |data|
country_code = data[:countryCode]
general_description = data[:types][Core::GENERAL]
if general_description
pattern = general_description[:nationalNumberPattern]
re = Regexp.new(country_code + pattern)
re === @sanitized
else
false
end
end
end
possible_countries.each do |country_data|
next if country_data[:types].empty?
prefix_length = country_data[:countryCode].length
@national_number = @sanitized[prefix_length..@sanitized.length]
@analyzed_data[country_data[:id]] =
get_all_number_types(@national_number, country_data[:types])
@analyzed_data[country_data[:id]][:countryCode] = country_data[:countryCode]
@analyzed_data[country_data[:id]][:format] =
get_number_format(@national_number, country_data[:formats])
end
end
# Returns all valid and possible phone number types for currently parsed
# phone for provided data hash.
def get_all_number_types(number, data)
response = {valid: [], possible: []}
return response if data[Core::GENERAL].empty?
possible, national = get_patterns(data[Core::GENERAL])
return response unless number_valid_and_possible?(number,
possible, national)
same_fixed_and_mobile, additional_check =
check_same_types(data[Core::FIXED_LINE], data[Core::MOBILE])
(Core::TYPES.keys - Core::NOT_FOR_CHECK + additional_check).each do |type|
next if data[type].nil? || data[type].empty?
patterns = data[type]
if same_fixed_and_mobile && additional_check.include?(type)
type = Core::FIXED_OR_MOBILE
end
possible, national = get_patterns(patterns)
if number_possible?(number, possible)
response[:possible] << type
response[:valid] << type if number_valid_and_possible?(number,
possible,
national)
end
end
response
end
def get_number_format(number, format_data)
if format_data
format_data.find { |f|
Regexp.new("^" + f[:regex] + "$") === @national_number
}
else
{
:regex => "(\\d+)(\\d{3})(\\d\\d)(\\d\\d)",
:format => "$1 $2-$3-$3"
}
end
end
# Checks if fixed line pattern and mobile pattern are the same
def check_same_types(fixed, mobile)
if fixed == mobile
[ true, [ Core::FIXED_LINE ] ]
else
[ false, [ Core::FIXED_LINE, Core::MOBILE ] ]
end
end
def get_patterns(patterns)
national_pattern = patterns[:nationalNumberPattern]
possible_pattern = patterns[:possibleNumberPattern] || national_pattern
[national_pattern, possible_pattern]
end
# Checks if passed number matches both valid and possible patterns
def number_valid_and_possible?(number, possible_pattern, national_pattern)
national_match = number.match(/^(?:#{national_pattern})$/)
possible_match = number.match(/^(?:#{possible_pattern})$/)
national_match && possible_match &&
national_match.to_s.length == number.length &&
possible_match.to_s.length == number.length
end
# Checks if passed number matches possible pattern
def number_possible?(number, possible_pattern)
possible_match = number.match(/^(?:#{possible_pattern})$/)
possible_match && possible_match.to_s.length == number.length
end
# Sanitizes passed phone number. Returns only digits from passed string.
def sanitize_phone(phone)
phone && phone.gsub(/[^0-9]+/, '') || ''
end
end
end
Didn't mean to put bang here
module Phonelib
# class for parsed phone number, includes basic validation methods
class Phone
# defining reader methods for class variables
attr_reader :original, :sanitized, :national_number
# class initialization method
#
# ==== Attributes
#
# * +phone+ - Phone number for parsing
# * +country_data+ - Hash of data for parsing
#
def initialize(phone, country_data)
@original = phone
@sanitized = sanitize_phone(@original)
@analyzed_data = {}
analyze_phone(country_data) unless @sanitized.empty?
end
# Returns all phone types that matched valid patterns
def types
@analyzed_data.flat_map {|iso2, data| data[:valid]}.uniq
end
# Returns first phone type that matched
def type
types.first
end
# Returns all countries that matched valid patterns
def countries
@analyzed_data.map {|iso2, data| iso2}
end
# Returns first country that matched valid patterns
def country
countries.first
end
# Returns whether a current parsed phone number is valid
def valid?
@analyzed_data.select {|iso2, data| data[:valid].any? }.any?
end
# Returns whether a current parsed phone number is invalid
def invalid?
!valid?
end
# Returns whether a current parsed phone number is possible
def possible?
@analyzed_data.select {|iso2, data| data[:possible].any? }.any?
end
# Returns whether a current parsed phone number is impossible
def impossible?
!possible?
end
def international
format = @analyzed_data[country][:format]
country_code = @analyzed_data[country][:countryCode]
regexp = Regexp.new(format[:regex])
format_string = format[:format].gsub(/(\d)\$/, "\\1 $")
md = regexp.match(@national_number)
national_part = format_string.gsub(/\$(\d)/){||md[$~[1].to_i]}
"+" + country_code + " " +
national_part.gsub(/^(\d{3,})\s+/, "(\\1) ")
end
# Returns whether a current parsed phone number is valid for specified
# country
#
# ==== Attributes
#
# * +country+ - ISO code of country (2 letters) like 'US' for United States
#
def valid_for_country?(country)
@analyzed_data.select {|iso2, data| country == iso2 &&
data[:valid].any? }.any?
end
# Returns whether a current parsed phone number is invalid for specified
# country
#
# ==== Attributes
#
# * +country+ - ISO code of country (2 letters) like 'US' for United States
#
def invalid_for_country?(country)
@analyzed_data.select {|iso2, data| country == iso2 &&
data[:valid].any? }.empty?
end
private
# Analyze current phone with provided data hash
def analyze_phone(country_data)
possible_countries = country_data.select do |data|
@sanitized.start_with?(data[:countryCode])
end
if possible_countries.size > 1
possible_countries = possible_countries.select do |data|
country_code = data[:countryCode]
general_description = data[:types][Core::GENERAL]
if general_description
pattern = general_description[:nationalNumberPattern]
re = Regexp.new(country_code + pattern)
re === @sanitized
else
false
end
end
end
possible_countries.each do |country_data|
next if country_data[:types].empty?
prefix_length = country_data[:countryCode].length
@national_number = @sanitized[prefix_length..@sanitized.length]
@analyzed_data[country_data[:id]] =
get_all_number_types(@national_number, country_data[:types])
@analyzed_data[country_data[:id]][:countryCode] = country_data[:countryCode]
@analyzed_data[country_data[:id]][:format] =
get_number_format(@national_number, country_data[:formats])
end
end
# Returns all valid and possible phone number types for currently parsed
# phone for provided data hash.
def get_all_number_types(number, data)
response = {valid: [], possible: []}
return response if data[Core::GENERAL].empty?
possible, national = get_patterns(data[Core::GENERAL])
return response unless number_valid_and_possible?(number,
possible, national)
same_fixed_and_mobile, additional_check =
check_same_types(data[Core::FIXED_LINE], data[Core::MOBILE])
(Core::TYPES.keys - Core::NOT_FOR_CHECK + additional_check).each do |type|
next if data[type].nil? || data[type].empty?
patterns = data[type]
if same_fixed_and_mobile && additional_check.include?(type)
type = Core::FIXED_OR_MOBILE
end
possible, national = get_patterns(patterns)
if number_possible?(number, possible)
response[:possible] << type
response[:valid] << type if number_valid_and_possible?(number,
possible,
national)
end
end
response
end
def get_number_format(number, format_data)
if format_data
format_data.find { |f|
Regexp.new("^" + f[:regex] + "$") === @national_number
}
else
{
:regex => "(\\d+)(\\d{3})(\\d\\d)(\\d\\d)",
:format => "$1 $2-$3-$3"
}
end
end
# Checks if fixed line pattern and mobile pattern are the same
def check_same_types(fixed, mobile)
if fixed == mobile
[ true, [ Core::FIXED_LINE ] ]
else
[ false, [ Core::FIXED_LINE, Core::MOBILE ] ]
end
end
def get_patterns(patterns)
national_pattern = patterns[:nationalNumberPattern]
possible_pattern = patterns[:possibleNumberPattern] || national_pattern
[national_pattern, possible_pattern]
end
# Checks if passed number matches both valid and possible patterns
def number_valid_and_possible?(number, possible_pattern, national_pattern)
national_match = number.match(/^(?:#{national_pattern})$/)
possible_match = number.match(/^(?:#{possible_pattern})$/)
national_match && possible_match &&
national_match.to_s.length == number.length &&
possible_match.to_s.length == number.length
end
# Checks if passed number matches possible pattern
def number_possible?(number, possible_pattern)
possible_match = number.match(/^(?:#{possible_pattern})$/)
possible_match && possible_match.to_s.length == number.length
end
# Sanitizes passed phone number. Returns only digits from passed string.
def sanitize_phone(phone)
phone && phone.gsub(/[^0-9]+/, '') || ''
end
end
end
|
require 'arena/configurable'
require 'httparty'
require 'json'
module Arena
class Client
include HTTParty
include Arena::Configurable
def initialize(options={})
Arena::Configurable.keys.each do |key|
instance_variable_set(:"@#{key}", options[key] || Arena.instance_variable_get(:"@#{key}"))
end
end
def channels(options={})
get_json "/channels", options
end
def channel(id, options={})
get_json "/channels/#{id}", options
end
def block(id, options={})
get_json "/blocks/#{id}", options
end
def user(id, options={})
get_json "/users/#{id}", options
end
def user_channels(id, options={})
get_json "/users/#{id}", options
end
def search(query, option={})
get_json "/api/v2/search?q=#{query}", options
end
private
def get_json(path, opts)
options = { :query => opts }
JSON.parse(
(self.class.get "http://#{@base_domain}/api/#{@api_version}#{path}", options).body
)
end
end
end
remove root
require 'arena/configurable'
require 'httparty'
require 'json'
module Arena
class Client
include HTTParty
include Arena::Configurable
def initialize(options={})
Arena::Configurable.keys.each do |key|
instance_variable_set(:"@#{key}", options[key] || Arena.instance_variable_get(:"@#{key}"))
end
end
def channels(options={})
get_json "/channels", options
end
def channel(id, options={})
get_json "/channels/#{id}", options
end
def block(id, options={})
get_json "/blocks/#{id}", options
end
def user(id, options={})
get_json "/users/#{id}", options
end
def user_channels(id, options={})
get_json "/users/#{id}", options
end
def search(query, option={})
get_json "/api/v2/search?q=#{query}", options
end
private
def get_json(path, opts)
options = { :query => opts }
remove_root(
JSON.parse(
(self.class.get "http://#{@base_domain}/api/#{@api_version}#{path}", options).body
)
)
end
def remove_root(object)
object.first[1]
end
end
end |
module Armg
VERSION = '0.4.4'
end
Bump up version [ci skip]
module Armg
VERSION = '0.5.0'
end
|
require 'optparse'
class Pickler
class Runner
class Base
attr_reader :argv
def initialize(argv)
@argv = argv
@tty = $stdout.tty?
@opts = OptionParser.new
@opts.version = "0.0"
@opts.banner = "Usage: pickler #{self.class.command_name} #{self.class.banner_arguments}"
@opts.base.long["help"] = OptionParser::Switch::NoArgument.new do
help = @opts.help.chomp.chomp + "\n"
help += "\n#{self.class.description}" if self.class.description
puts help
@exit = 0
end
@opts.separator("")
end
def self.options
@options ||= []
end
def self.on(*args, &block)
options << args
define_method("option_#{args.object_id}", &block)
end
def self.banner_arguments(value = nil)
if value
@banner_arguments = value
else
@banner_arguments || (arity.zero? ? "" : "...")
end
end
def self.summary(value = nil)
if value
@summary = value
else
@summary
end
end
def self.description(value = nil)
if value
@description = value
else
@description || "#@summary."
end
end
def self.command_name
name.split('::').last.gsub(/(.)([A-Z])/) {"#$1-#$2"}.downcase
end
def self.method_name
command_name.gsub('-','_')
end
def self.process(&block)
define_method(:process, &block)
end
def self.arity
instance_method(:process).arity
end
def arity
self.class.arity
end
def pickler
@pickler ||= Pickler.new(Dir.getwd)
end
def abort(message)
raise Error, message
end
def too_many
abort "too many arguments"
end
def run
self.class.options.each do |arguments|
@opts.on(*arguments, &method("option_#{arguments.object_id}"))
end
begin
@opts.parse!(@argv)
rescue OptionParser::InvalidOption
abort $!.message
end
return @exit if @exit
minimum = arity < 0 ? -1 - arity : arity
if arity >= 0 && arity < @argv.size
too_many
elsif minimum > @argv.size
abort "not enough arguments"
end
process(*@argv)
end
def process(*argv)
pickler.send(self.class.method_name,*argv)
end
def color?
case pickler.config["color"]
when "always" then true
when "never" then false
else
@tty && RUBY_PLATFORM !~ /mswin|mingw/
end
end
def colorize(code, string)
if color?
"\e[#{code}m#{string}\e[00m"
else
string
end
end
def puts_summary(story)
summary = "%6d " % story.id
type = story.estimate || TYPE_SYMBOLS[story.story_type]
state = STATE_SYMBOLS[story.current_state]
summary << colorize("3#{STATE_COLORS[story.current_state]}", state) << ' '
summary << colorize("01;3#{TYPE_COLORS[story.story_type]}", type) << ' '
summary << story.name
puts summary
end
def paginated_output
stdout = $stdout
if @tty && pager = pickler.config["pager"]
# Modeled after git
ENV["LESS"] ||= "FRSX"
IO.popen(pager,"w") do |io|
$stdout = io
yield
end
else
yield
end
ensure
$stdout = stdout
end
end
def self.[](command)
klass_name = command.to_s.capitalize.gsub(/[-_](.)/) { $1.upcase }
if klass_name =~ /^[A-Z]\w*$/ && const_defined?(klass_name)
klass = const_get(klass_name)
if Class === klass && klass < Base
return klass
end
end
end
def self.commands
constants.map {|c| Runner.const_get(c)}.select {|c| Class === c && c < Runner::Base}.sort_by {|r| r.command_name}.uniq
end
def self.command(name, &block)
const_set(name.to_s.capitalize.gsub(/[-_](.)/) { $1.upcase },Class.new(Base,&block))
end
command :show do
banner_arguments "<story>"
summary "Show details for a story"
process do |*args|
case args.size
when 0
puts "#{pickler.project_id} #{pickler.project.name}"
when 1
story = pickler.story(args.first)
paginated_output do
puts story
end
else
too_many
end
end
end
command :search do
banner_arguments "[query]"
summary "List all stories matching a query"
def modifications
@modifications ||= {}
end
[:label, :type, :state].each do |o|
on "--#{o} #{o.to_s.upcase}" do |value|
modifications[o] = value
end
end
[:requester, :owner, :mywork].each do |o|
on "--#{o} USERNAME" do |value|
modifications[o] = value
end
end
on "--[no-]includedone", "include accepted stories" do |value|
modifications[:includedone] = value
end
attr_writer :current
on "-c", "--current", "filter results to current iteration" do |b|
self.current = b
end
process do |*argv|
argv << modifications unless modifications.empty?
if argv == [{:includedone => true}]
# Bypass the 200 search results limitation
stories = pickler.project.stories
else
stories = pickler.project.stories(*argv)
end
stories.reject! {|s| !s.current?} if argv.empty? || @current
paginated_output do
stories.each do |story|
puts_summary story
end
end
end
end
command :push do
banner_arguments "[story] ..."
summary "Upload stories"
description <<-EOF
Upload the given story or all features with a tracker url in a comment on the
first line.
EOF
process do |*args|
args.replace(pickler.local_features) if args.empty?
args.each do |arg|
pickler.feature(arg).push
end
end
end
command :pull do
banner_arguments "[story] ..."
summary "Download stories"
description <<-EOF
Download the given story or all well formed stories to the features/ directory.
Previously unseen stories will be given a numeric filename that you are
encouraged to change.
EOF
process do |*args|
args.replace(pickler.scenario_features) if args.empty?
args.each do |arg|
pickler.feature(arg).pull
end
end
end
command :start do
banner_arguments "<story> [basename]"
summary "Pull a story and mark it started"
description <<-EOF
Pull a given story and change its state to started. If basename is given
and no local file exists, features/basename.feature will be created in lieu
of features/id.feature.
EOF
process do |story, *args|
pickler.feature(story).start(args.first)
end
end
command :finish do
banner_arguments "<story>"
summary "Push a story and mark it finished"
process do |story|
pickler.feature(story).finish
end
end
command :deliver do
banner_arguments "[story] ..."
summary "Mark stories delivered"
on "--all-finished", "deliver all finished stories" do
@all = true
end
process do |*args|
if @all
pickler.deliver_all_finished_stories
end
args.each do |arg|
pickler.story(arg).transition!('delivered')
end
end
end
command :browse do
banner_arguments "[story]"
summary "Open a story in the web browser"
description <<-EOF
Open project or a story in the web browser.
Requires launchy (gem install launchy).
EOF
on "--dashboard" do
@special = "dashboard"
end
on "--faq" do
@special = "help"
end
on "--profile", "get your API Token here" do
@special = "profile"
end
on "--time", "not publicly available" do
@special = "time_shifts?project=#{pickler.project_id}"
end
process do |*args|
too_many if args.size > 1 || @special && args.first
if args.first
url = pickler.story(args.first).url
elsif @special
url = "http://www.pivotaltracker.com/#@special"
else
url = "http://www.pivotaltracker.com/projects/#{pickler.project_id}/stories"
end
require 'launchy'
Launchy.open(url)
end
end
def initialize(argv)
@argv = argv
end
COLORS = {
:black => 0,
:red => 1,
:green => 2,
:yellow => 3,
:blue => 4,
:magenta => 5,
:cyan => 6,
:white => 7
}
STATE_COLORS = {
nil => COLORS[:black],
"rejected" => COLORS[:red],
"accepted" => COLORS[:green],
"delivered" => COLORS[:yellow],
"unscheduled" => COLORS[:white],
"started" => COLORS[:magenta],
"finished" => COLORS[:cyan],
"unstarted" => COLORS[:blue]
}
STATE_SYMBOLS = {
"unscheduled" => " ",
"unstarted" => ":|",
"started" => ":/",
"finished" => ":)",
"delivered" => ";)",
"rejected" => ":(",
"accepted" => ":D"
}
TYPE_COLORS = {
'chore' => COLORS[:blue],
'feature' => COLORS[:magenta],
'bug' => COLORS[:red],
'release' => COLORS[:cyan]
}
TYPE_SYMBOLS = {
"feature" => "*",
"chore" => "%",
"release" => "!",
"bug" => "/"
}
def run
command = @argv.shift
if klass = self.class[command]
result = klass.new(@argv).run
exit result.respond_to?(:to_int) ? result.to_int : 0
elsif ['help', '--help', '-h', '', nil].include?(command)
puts "usage: pickler <command> [options] [arguments]"
puts
puts "Commands:"
self.class.commands.each do |command|
puts " %-19s %s" % [command.command_name, command.summary]
end
puts
puts "Run pickler <command> --help for help with a given command"
else
raise Error, "Unknown pickler command #{command}"
end
rescue Pickler::Error
$stderr.puts "#$!"
exit 1
rescue Interrupt
$stderr.puts "Interrupted!"
exit 130
end
end
end
pickler unstart and pickler unschedule
require 'optparse'
class Pickler
class Runner
class Base
attr_reader :argv
def initialize(argv)
@argv = argv
@tty = $stdout.tty?
@opts = OptionParser.new
@opts.version = "0.0"
@opts.banner = "Usage: pickler #{self.class.command_name} #{self.class.banner_arguments}"
@opts.base.long["help"] = OptionParser::Switch::NoArgument.new do
help = @opts.help.chomp.chomp + "\n"
help += "\n#{self.class.description}" if self.class.description
puts help
@exit = 0
end
@opts.separator("")
end
def self.options
@options ||= []
end
def self.on(*args, &block)
options << args
define_method("option_#{args.object_id}", &block)
end
def self.banner_arguments(value = nil)
if value
@banner_arguments = value
else
@banner_arguments || (arity.zero? ? "" : "...")
end
end
def self.summary(value = nil)
if value
@summary = value
else
@summary
end
end
def self.description(value = nil)
if value
@description = value
else
@description || "#@summary."
end
end
def self.command_name
name.split('::').last.gsub(/(.)([A-Z])/) {"#$1-#$2"}.downcase
end
def self.method_name
command_name.gsub('-','_')
end
def self.process(&block)
define_method(:process, &block)
end
def self.arity
instance_method(:process).arity
end
def arity
self.class.arity
end
def pickler
@pickler ||= Pickler.new(Dir.getwd)
end
def abort(message)
raise Error, message
end
def too_many
abort "too many arguments"
end
def run
self.class.options.each do |arguments|
@opts.on(*arguments, &method("option_#{arguments.object_id}"))
end
begin
@opts.parse!(@argv)
rescue OptionParser::InvalidOption
abort $!.message
end
return @exit if @exit
minimum = arity < 0 ? -1 - arity : arity
if arity >= 0 && arity < @argv.size
too_many
elsif minimum > @argv.size
abort "not enough arguments"
end
process(*@argv)
end
def process(*argv)
pickler.send(self.class.method_name,*argv)
end
def color?
case pickler.config["color"]
when "always" then true
when "never" then false
else
@tty && RUBY_PLATFORM !~ /mswin|mingw/
end
end
def colorize(code, string)
if color?
"\e[#{code}m#{string}\e[00m"
else
string
end
end
def puts_summary(story)
summary = "%6d " % story.id
type = story.estimate || TYPE_SYMBOLS[story.story_type]
state = STATE_SYMBOLS[story.current_state]
summary << colorize("3#{STATE_COLORS[story.current_state]}", state) << ' '
summary << colorize("01;3#{TYPE_COLORS[story.story_type]}", type) << ' '
summary << story.name
puts summary
end
def paginated_output
stdout = $stdout
if @tty && pager = pickler.config["pager"]
# Modeled after git
ENV["LESS"] ||= "FRSX"
IO.popen(pager,"w") do |io|
$stdout = io
yield
end
else
yield
end
ensure
$stdout = stdout
end
end
def self.[](command)
klass_name = command.to_s.capitalize.gsub(/[-_](.)/) { $1.upcase }
if klass_name =~ /^[A-Z]\w*$/ && const_defined?(klass_name)
klass = const_get(klass_name)
if Class === klass && klass < Base
return klass
end
end
end
def self.commands
constants.map {|c| Runner.const_get(c)}.select {|c| Class === c && c < Runner::Base}.sort_by {|r| r.command_name}.uniq
end
def self.command(name, &block)
const_set(name.to_s.capitalize.gsub(/[-_](.)/) { $1.upcase },Class.new(Base,&block))
end
command :show do
banner_arguments "<story>"
summary "Show details for a story"
process do |*args|
case args.size
when 0
puts "#{pickler.project_id} #{pickler.project.name}"
when 1
story = pickler.story(args.first)
paginated_output do
puts story
end
else
too_many
end
end
end
command :search do
banner_arguments "[query]"
summary "List all stories matching a query"
def modifications
@modifications ||= {}
end
[:label, :type, :state].each do |o|
on "--#{o} #{o.to_s.upcase}" do |value|
modifications[o] = value
end
end
[:requester, :owner, :mywork].each do |o|
on "--#{o} USERNAME" do |value|
modifications[o] = value
end
end
on "--[no-]includedone", "include accepted stories" do |value|
modifications[:includedone] = value
end
attr_writer :current
on "-c", "--current", "filter results to current iteration" do |b|
self.current = b
end
process do |*argv|
argv << modifications unless modifications.empty?
if argv == [{:includedone => true}]
# Bypass the 200 search results limitation
stories = pickler.project.stories
else
stories = pickler.project.stories(*argv)
end
stories.reject! {|s| !s.current?} if argv.empty? || @current
paginated_output do
stories.each do |story|
puts_summary story
end
end
end
end
command :push do
banner_arguments "[story] ..."
summary "Upload stories"
description <<-EOF
Upload the given story or all features with a tracker url in a comment on the
first line.
EOF
process do |*args|
args.replace(pickler.local_features) if args.empty?
args.each do |arg|
pickler.feature(arg).push
end
end
end
command :pull do
banner_arguments "[story] ..."
summary "Download stories"
description <<-EOF
Download the given story or all well formed stories to the features/ directory.
Previously unseen stories will be given a numeric filename that you are
encouraged to change.
EOF
process do |*args|
args.replace(pickler.scenario_features) if args.empty?
args.each do |arg|
pickler.feature(arg).pull
end
end
end
command :start do
banner_arguments "<story> [basename]"
summary "Pull a story and mark it started"
description <<-EOF
Pull a given story and change its state to started. If basename is given
and no local file exists, features/basename.feature will be created in lieu
of features/id.feature.
EOF
process do |story, *args|
pickler.feature(story).start(args.first)
end
end
command :finish do
banner_arguments "<story>"
summary "Push a story and mark it finished"
process do |story|
pickler.feature(story).finish
end
end
command :deliver do
banner_arguments "[story] ..."
summary "Mark stories delivered"
on "--all-finished", "deliver all finished stories" do
@all = true
end
process do |*args|
if @all
pickler.deliver_all_finished_stories
end
args.each do |arg|
pickler.story(arg).transition!('delivered')
end
end
end
command :unstart do
banner_arguments "[story] ..."
summary "Mark stories unstarted"
on "--all-started", "unstart all started stories" do
@all = true
end
process do |*args|
if @all
pickler.project.stories(:state => "started").each do |story|
story.transition!('unstarted')
end
end
args.each do |arg|
pickler.story(arg).transition!('unstarted')
end
end
end
command :unschedule do
banner_arguments "[story] ..."
summary "Move stories to icebox"
process do |*args|
args.each do |arg|
pickler.story(arg).transition!('unscheduled')
end
end
end
command :browse do
banner_arguments "[story]"
summary "Open a story in the web browser"
description <<-EOF
Open project or a story in the web browser.
Requires launchy (gem install launchy).
EOF
on "--dashboard" do
@special = "dashboard"
end
on "--faq" do
@special = "help"
end
on "--profile", "get your API Token here" do
@special = "profile"
end
on "--time", "not publicly available" do
@special = "time_shifts?project=#{pickler.project_id}"
end
process do |*args|
too_many if args.size > 1 || @special && args.first
if args.first
url = pickler.story(args.first).url
elsif @special
url = "http://www.pivotaltracker.com/#@special"
else
url = "http://www.pivotaltracker.com/projects/#{pickler.project_id}/stories"
end
require 'launchy'
Launchy.open(url)
end
end
def initialize(argv)
@argv = argv
end
COLORS = {
:black => 0,
:red => 1,
:green => 2,
:yellow => 3,
:blue => 4,
:magenta => 5,
:cyan => 6,
:white => 7
}
STATE_COLORS = {
nil => COLORS[:black],
"rejected" => COLORS[:red],
"accepted" => COLORS[:green],
"delivered" => COLORS[:yellow],
"unscheduled" => COLORS[:white],
"started" => COLORS[:magenta],
"finished" => COLORS[:cyan],
"unstarted" => COLORS[:blue]
}
STATE_SYMBOLS = {
"unscheduled" => " ",
"unstarted" => ":|",
"started" => ":/",
"finished" => ":)",
"delivered" => ";)",
"rejected" => ":(",
"accepted" => ":D"
}
TYPE_COLORS = {
'chore' => COLORS[:blue],
'feature' => COLORS[:magenta],
'bug' => COLORS[:red],
'release' => COLORS[:cyan]
}
TYPE_SYMBOLS = {
"feature" => "*",
"chore" => "%",
"release" => "!",
"bug" => "/"
}
def run
command = @argv.shift
if klass = self.class[command]
result = klass.new(@argv).run
exit result.respond_to?(:to_int) ? result.to_int : 0
elsif ['help', '--help', '-h', '', nil].include?(command)
puts "usage: pickler <command> [options] [arguments]"
puts
puts "Commands:"
self.class.commands.each do |command|
puts " %-19s %s" % [command.command_name, command.summary]
end
puts
puts "Run pickler <command> --help for help with a given command"
else
raise Error, "Unknown pickler command #{command}"
end
rescue Pickler::Error
$stderr.puts "#$!"
exit 1
rescue Interrupt
$stderr.puts "Interrupted!"
exit 130
end
end
end
|
module Piculet
class Client
include Logger::ClientHelper
def initialize(options = {})
@options = OpenStruct.new(options)
@options_hash = options
@options.ec2 = AWS::EC2.new
end
def apply(file)
@options.ec2.owner_id
AWS.memoize { walk(file) }
end
def export(options = {})
exported = AWS.memoize do
Exporter.export(@options.ec2, @options_hash.merge(options))
end
converter = proc do |src|
if options[:without_convert]
exported
else
DSL.convert(src, @options.ec2.owner_id)
end
end
if block_given?
yield(exported, converter)
else
converter.call(exported)
end
end
private
def load_file(file)
if file.kind_of?(String)
open(file) do |f|
load_by_format(f.read, file)
end
elsif file.respond_to?(:read)
load_by_format(file.read, file.path)
else
raise TypeError, "can't convert #{file} into File"
end
end
def load_by_format(src, path)
if @options.format == :json
src = load_json(src, path)
end
DSL.define(src, path).result
end
def load_json(json, path)
json = JSON.parse(json, :symbolize_names => true)
if json.has_key?(:'')
json[nil] = json.delete(:'')
end
DSL.convert(json, @options.ec2.owner_id)
end
def walk(file)
dsl = load_file(file)
dsl_ec2s = dsl.ec2s
ec2 = EC2Wrapper.new(@options.ec2, @options)
aws_ec2s = collect_to_hash(ec2.security_groups, :has_many => true) do |item|
item.vpc? ? item.vpc_id : nil
end
dsl_ec2s.each do |vpc, ec2_dsl|
if @options.ec2s
next unless @options.ec2s.any? {|i| (i == 'classic' and vpc.nil?) or i == vpc }
end
ec2_aws = aws_ec2s[vpc]
if ec2_aws
walk_ec2(vpc, ec2_dsl, ec2_aws, ec2.security_groups)
else
log(:warn, "EC2 `#{vpc || :classic}` is not found", :yellow)
end
end
ec2.updated?
end
def walk_ec2(vpc, ec2_dsl, ec2_aws, collection_api)
sg_list_dsl = collect_to_hash(ec2_dsl.security_groups, :name)
sg_list_aws = collect_to_hash(ec2_aws, :name)
sg_list_dsl.each do |key, sg_dsl|
name = key[0]
if @options.sg_names
next unless @options.sg_names.include?(name)
end
if @options.exclude_sgs
next if @options.exclude_sgs.any? {|regex| name =~ regex}
end
sg_aws = sg_list_aws[key]
unless sg_aws
sg_aws = collection_api.create(name, :vpc => vpc, :description => sg_dsl.description)
if vpc and sg_dsl.egress.empty?
log(:warn, '`egress any 0.0.0.0/0` is implicitly defined', :yellow)
end
sg_list_aws[key] = sg_aws
end
end
sg_list_dsl.each do |key, sg_dsl|
name = key[0]
if @options.sg_names
next unless @options.sg_names.include?(name)
end
if @options.exclude_sgs
next if @options.exclude_sgs.any? {|regex| name =~ regex}
end
sg_aws = sg_list_aws.delete(key)
walk_security_group(sg_dsl, sg_aws)
end
sg_list_aws.each do |key, sg_aws|
name = key[0]
if @options.sg_names
next unless @options.sg_names.include?(name)
end
if @options.exclude_sgs
next if @options.exclude_sgs.any? {|regex| name =~ regex}
end
sg_aws.ingress_ip_permissions.each {|i| i.delete }
sg_aws.egress_ip_permissions.each {|i| i.delete } if vpc
end
sg_list_aws.each do |key, sg_aws|
name = key[0]
if @options.sg_names
next unless @options.sg_names.include?(name)
end
if @options.exclude_sgs
next if @options.exclude_sgs.any? {|regex| name =~ regex}
end
sg_aws.delete
end
end
def walk_security_group(security_group_dsl, security_group_aws)
unless security_group_aws.eql?(security_group_dsl)
security_group_aws.update(security_group_dsl)
end
walk_permissions(
security_group_dsl.ingress,
security_group_aws.ingress_ip_permissions)
if security_group_aws.vpc?
walk_permissions(
security_group_dsl.egress,
security_group_aws.egress_ip_permissions)
end
end
def walk_permissions(permissions_dsl, permissions_aws)
perm_list_dsl = collect_to_hash(permissions_dsl, :protocol, :port_range)
perm_list_aws = collect_to_hash(permissions_aws, :protocol, :port_range)
perm_list_aws.each do |key, perm_aws|
protocol, port_range = key
perm_dsl = perm_list_dsl.delete(key)
if perm_dsl
unless perm_aws.eql?(perm_dsl)
perm_aws.update(perm_dsl)
end
else
perm_aws.delete
end
end
perm_list_dsl.each do |key, perm_dsl|
permissions_aws.create(protocol, port_range, perm_dsl)
end
end
def collect_to_hash(collection, *key_attrs)
options = key_attrs.last.kind_of?(Hash) ? key_attrs.pop : {}
hash = {}
collection.each do |item|
key = block_given? ? yield(item) : key_attrs.map {|k| item.send(k) }
if options[:has_many]
hash[key] ||= []
hash[key] << item
else
hash[key] = item
end
end
return hash
end
end # Client
end # Piculet
Fix create permission
module Piculet
class Client
include Logger::ClientHelper
def initialize(options = {})
@options = OpenStruct.new(options)
@options_hash = options
@options.ec2 = AWS::EC2.new
end
def apply(file)
@options.ec2.owner_id
AWS.memoize { walk(file) }
end
def export(options = {})
exported = AWS.memoize do
Exporter.export(@options.ec2, @options_hash.merge(options))
end
converter = proc do |src|
if options[:without_convert]
exported
else
DSL.convert(src, @options.ec2.owner_id)
end
end
if block_given?
yield(exported, converter)
else
converter.call(exported)
end
end
private
def load_file(file)
if file.kind_of?(String)
open(file) do |f|
load_by_format(f.read, file)
end
elsif file.respond_to?(:read)
load_by_format(file.read, file.path)
else
raise TypeError, "can't convert #{file} into File"
end
end
def load_by_format(src, path)
if @options.format == :json
src = load_json(src, path)
end
DSL.define(src, path).result
end
def load_json(json, path)
json = JSON.parse(json, :symbolize_names => true)
if json.has_key?(:'')
json[nil] = json.delete(:'')
end
DSL.convert(json, @options.ec2.owner_id)
end
def walk(file)
dsl = load_file(file)
dsl_ec2s = dsl.ec2s
ec2 = EC2Wrapper.new(@options.ec2, @options)
aws_ec2s = collect_to_hash(ec2.security_groups, :has_many => true) do |item|
item.vpc? ? item.vpc_id : nil
end
dsl_ec2s.each do |vpc, ec2_dsl|
if @options.ec2s
next unless @options.ec2s.any? {|i| (i == 'classic' and vpc.nil?) or i == vpc }
end
ec2_aws = aws_ec2s[vpc]
if ec2_aws
walk_ec2(vpc, ec2_dsl, ec2_aws, ec2.security_groups)
else
log(:warn, "EC2 `#{vpc || :classic}` is not found", :yellow)
end
end
ec2.updated?
end
def walk_ec2(vpc, ec2_dsl, ec2_aws, collection_api)
sg_list_dsl = collect_to_hash(ec2_dsl.security_groups, :name)
sg_list_aws = collect_to_hash(ec2_aws, :name)
sg_list_dsl.each do |key, sg_dsl|
name = key[0]
if @options.sg_names
next unless @options.sg_names.include?(name)
end
if @options.exclude_sgs
next if @options.exclude_sgs.any? {|regex| name =~ regex}
end
sg_aws = sg_list_aws[key]
unless sg_aws
sg_aws = collection_api.create(name, :vpc => vpc, :description => sg_dsl.description)
if vpc and sg_dsl.egress.empty?
log(:warn, '`egress any 0.0.0.0/0` is implicitly defined', :yellow)
end
sg_list_aws[key] = sg_aws
end
end
sg_list_dsl.each do |key, sg_dsl|
name = key[0]
if @options.sg_names
next unless @options.sg_names.include?(name)
end
if @options.exclude_sgs
next if @options.exclude_sgs.any? {|regex| name =~ regex}
end
sg_aws = sg_list_aws.delete(key)
walk_security_group(sg_dsl, sg_aws)
end
sg_list_aws.each do |key, sg_aws|
name = key[0]
if @options.sg_names
next unless @options.sg_names.include?(name)
end
if @options.exclude_sgs
next if @options.exclude_sgs.any? {|regex| name =~ regex}
end
sg_aws.ingress_ip_permissions.each {|i| i.delete }
sg_aws.egress_ip_permissions.each {|i| i.delete } if vpc
end
sg_list_aws.each do |key, sg_aws|
name = key[0]
if @options.sg_names
next unless @options.sg_names.include?(name)
end
if @options.exclude_sgs
next if @options.exclude_sgs.any? {|regex| name =~ regex}
end
sg_aws.delete
end
end
def walk_security_group(security_group_dsl, security_group_aws)
unless security_group_aws.eql?(security_group_dsl)
security_group_aws.update(security_group_dsl)
end
walk_permissions(
security_group_dsl.ingress,
security_group_aws.ingress_ip_permissions)
if security_group_aws.vpc?
walk_permissions(
security_group_dsl.egress,
security_group_aws.egress_ip_permissions)
end
end
def walk_permissions(permissions_dsl, permissions_aws)
perm_list_dsl = collect_to_hash(permissions_dsl, :protocol, :port_range)
perm_list_aws = collect_to_hash(permissions_aws, :protocol, :port_range)
perm_list_aws.each do |key, perm_aws|
perm_dsl = perm_list_dsl.delete(key)
if perm_dsl
unless perm_aws.eql?(perm_dsl)
perm_aws.update(perm_dsl)
end
else
perm_aws.delete
end
end
perm_list_dsl.each do |key, perm_dsl|
protocol, port_range = key
permissions_aws.create(protocol, port_range, perm_dsl)
end
end
def collect_to_hash(collection, *key_attrs)
options = key_attrs.last.kind_of?(Hash) ? key_attrs.pop : {}
hash = {}
collection.each do |item|
key = block_given? ? yield(item) : key_attrs.map {|k| item.send(k) }
if options[:has_many]
hash[key] ||= []
hash[key] << item
else
hash[key] = item
end
end
return hash
end
end # Client
end # Piculet
|
# encoding: UTF-8
module Plucky
Version = '0.6.4'
end
Release 0.6.5.
# encoding: UTF-8
module Plucky
Version = '0.6.5'
end
|
Add migration script for adding column of umaka viewer link url to `endpoints`
class AddViewerUrlColumnToEndpoint < ActiveRecord::Migration
def change
add_column :endpoints, :viewer_url, :string
end
end
|
woops, add missing module lib
module Rubinius
class StaticScope
attr_accessor :atomy_visibility
end
end
module Atomy
class Module < ::Module
attr_accessor :file
def make_send(node)
node.to_send
end
def define_macro(pattern, body, file)
name = pattern.macro_name || :_expand
Atomy::AST::Define.new(
0,
Atomy::AST::Compose.new(
0,
Atomy::AST::Block.new(
0,
[Atomy::AST::Literal.new(0, self)],
[]
),
Atomy::AST::Call.new(
0,
Atomy::AST::Word.new(0, name),
[Atomy::AST::Compose.new(
0,
Atomy::AST::Word.new(0, :node),
Atomy::AST::Block.new(
0,
[Atomy::AST::QuasiQuote.new(0, pattern)],
[]
)
)]
)
),
Atomy::AST::Send.new(
body.line,
body,
[],
:to_node
)
).evaluate(
Binding.setup(
TOPLEVEL_BINDING.variables,
TOPLEVEL_BINDING.code,
Rubinius::StaticScope.new(Atomy::AST, Rubinius::StaticScope.new(self))
), file.to_s, pattern.line
)
end
def execute_macro(node)
[node.macro_name, :_expand].each do |meth|
next unless meth and respond_to?(meth)
begin
return send(meth, node)
rescue Atomy::MethodFail => e
# TODO: make sure this is never a false-positive
raise unless e.method_name == meth
end
end
nil
end
def expand_using(node)
if delegating_expansion?
if res = @delegate_expansion.expand_node(node)
return res
end
end
using.each do |u|
expanded = u.execute_macro(node)
return expanded if expanded
end
nil
end
def delegating_expansion?
!!@delegate_expansion
end
def expand_node(node)
execute_macro(node) || expand_using(node)
end
def with_context(what, node)
node.context && node.context != self &&
node.context.send(what, node) ||
send(what, node)
end
def expand(node)
if direct = with_context(:execute_macro, node)
expand(direct)
elsif using = with_context(:expand_using, node)
expand(using)
else
node
end
rescue
if node.respond_to?(:show)
begin
$stderr.puts "while expanding #{node.show}"
rescue
$stderr.puts "while expanding #{node.to_sexp.inspect}"
end
else
$stderr.puts "while expanding #{node.to_sexp.inspect}"
end
raise
end
def delegate_expansion_to(mod)
@delegate_expansion = mod
end
def to_node
return super unless @file
Atomy::AST::Send.new(
0,
Atomy::AST::ScopedConstant.new(
0,
Atomy::AST::ScopedConstant.new(
0,
Atomy::AST::ToplevelConstant.new(
0,
:Atomy
),
:CodeLoader
),
:LOADED
),
[@file.to_node],
:[]
)
end
def use(path)
x = require(path)
extend(x)
include(x)
using.unshift x
x
rescue
$stderr.puts "while using #{path}..."
raise
end
def using
@using ||= []
end
def export(*names)
if block_given?
scope = Rubinius::StaticScope.of_sender
old = scope.atomy_visibility
scope.atomy_visibility = :module
begin
yield
ensure
scope.atomy_visibility = old
end
elsif names.empty?
Rubinius::StaticScope.of_sender.atomy_visibility = :module
else
names.each do |meth|
singleton_class.set_visibility(meth, :public)
end
end
self
end
def private_module_function(*args)
if args.empty?
Rubinius::StaticScope.of_sender.atomy_visibility = :private_module
else
sc = Rubinius::Type.object_singleton_class(self)
args.each do |meth|
method_name = Rubinius::Type.coerce_to_symbol meth
mod, method = lookup_method(method_name)
sc.method_table.store method_name, method.method, :private
Rubinius::VM.reset_method_cache method_name
set_visibility method_name, :private
end
return self
end
end
end
end
|
module Pomato
VERSION = '0.0.1'
end
prepare for push
module Pomato
VERSION = '0.0.2'
end
|
module AVM2
VERSION = "0.0.2"
end
Bump version.
module AVM2
VERSION = "0.0.3"
end
|
module Awful
class Lambda < Cli
no_commands do
def lambda
@lambda ||= Aws::Lambda::Client.new
end
end
desc 'ls NAME', 'list lambda functions matching NAME pattern'
method_option :long, aliases: '-l', default: false, desc: 'Long listing'
method_option :arns, aliases: '-a', default: false, desc: 'List ARNs for functions'
def ls(name = /./)
lambda.list_functions.functions.select do |function|
function.function_name.match(name)
end.tap do |functions|
if options[:long]
print_table functions.map { |f| [f.function_name, f.description, f.last_modified] }.sort
elsif options[:arns]
puts functions.map(&:function_arn).sort
else
puts functions.map(&:function_name).sort
end
end
end
desc 'dump NAME', 'get configuration of lambda function NAME'
def dump(name)
function = lambda.get_function(function_name: name)
hash = function.configuration.to_hash
hash[:code] = function.code.to_hash
hash.tap do |h|
puts YAML.dump(stringify_keys(h))
end
end
end
end
add create task
module Awful
class Lambda < Cli
no_commands do
def lambda
@lambda ||= Aws::Lambda::Client.new
end
## return zip file contents, make it if necessary
def zip_thing(thing)
if File.directory?(thing)
Dir.chdir(thing) do
%x[zip -q -r - .] # zip dir contents
end
elsif thing.match(/\.zip$/i)
File.read(thing) # raw zipfile contents
elsif File.file?(thing)
%x[zip -q -j - #{thing}] # zip a single file
else
nil
end
end
end
desc 'ls NAME', 'list lambda functions matching NAME pattern'
method_option :long, aliases: '-l', default: false, desc: 'Long listing'
method_option :arns, aliases: '-a', default: false, desc: 'List ARNs for functions'
def ls(name = /./)
lambda.list_functions.functions.select do |function|
function.function_name.match(name)
end.tap do |functions|
if options[:long]
print_table functions.map { |f| [f.function_name, f.description, f.last_modified] }.sort
elsif options[:arns]
puts functions.map(&:function_arn).sort
else
puts functions.map(&:function_name).sort
end
end
end
desc 'dump NAME', 'get configuration of lambda function NAME'
def dump(name)
function = lambda.get_function(function_name: name)
hash = function.configuration.to_hash
hash[:code] = function.code.to_hash
hash.tap do |h|
puts YAML.dump(stringify_keys(h))
end
end
desc 'create', 'create a new lambda function'
def create(name = nil)
opt = load_cfg
opt[:function_name] = name unless name.nil?
opt[:code][:zip_file] = zip_thing(opt[:code][:zip_file])
whitelist = %i[function_name runtime role handler description timeout memory_size code]
lambda.create_function(only_keys_matching(opt, whitelist)).tap do |response|
puts YAML.dump(stringify_keys(response.to_hash))
end
end
end
end
|
module AWS #:nodoc:
# AWS::SES is a Ruby library for Amazon's Simple Email Service's REST API (http://aws.amazon.com/ses).
#
# == Getting started
#
# To get started you need to require 'aws/ses':
#
# % irb -rubygems
# irb(main):001:0> require 'aws/ses'
# # => true
#
# Before you can do anything, you must establish a connection using Base.new. A basic connection would look something like this:
#
# ses = AWS::SES::Base.new(
# :access_key_id => 'abc',
# :secret_access_key => '123'
# )
#
# The minimum connection options that you must specify are your access key id and your secret access key.
module SES
API_VERSION = '2010-12-01'
DEFAULT_HOST = 'email.us-east-1.amazonaws.com'
USER_AGENT = 'github-aws-ses-ruby-gem'
# Encodes the given string with the secret_access_key by taking the
# hmac-sha1 sum, and then base64 encoding it. Optionally, it will also
# url encode the result of that to protect the string if it's going to
# be used as a query string parameter.
#
# @param [String] secret_access_key the user's secret access key for signing.
# @param [String] str the string to be hashed and encoded.
# @param [Boolean] urlencode whether or not to url encode the result., true or false
# @return [String] the signed and encoded string.
def SES.encode(secret_access_key, str, urlencode=true)
digest = OpenSSL::Digest::Digest.new('sha256')
b64_hmac =
Base64.encode64(
OpenSSL::HMAC.digest(digest, secret_access_key, str)).gsub("\n","")
if urlencode
return CGI::escape(b64_hmac)
else
return b64_hmac
end
end
# Generates the HTTP Header String that Amazon looks for
#
# @param [String] key the AWS Access Key ID
# @param [String] alg the algorithm used for the signature
# @param [String] sig the signature itself
def SES.authorization_header(key, alg, sig)
"AWS3-HTTPS AWSAccessKeyId=#{key}, Algorithm=#{alg}, Signature=#{sig}"
end
# AWS::SES::Base is the abstract super class of all classes who make requests against SES
class Base
include SendEmail
include Info
attr_reader :use_ssl, :server, :proxy_server, :port
# @option options [String] :access_key_id ("") The user's AWS Access Key ID
# @option options [String] :secret_access_key ("") The user's AWS Secret Access Key
# @option options [Boolean] :use_ssl (true) Connect using SSL?
# @option options [String] :server ("email.us-east-1.amazonaws.com") The server API endpoint host
# @option options [String] :proxy_server (nil) An HTTP proxy server FQDN
# @option options [String] :user_agent ("github-aws-ses-ruby-gem") The HTTP User-Agent header value
# @return [Object] the object.
def initialize( options = {} )
options = { :access_key_id => "",
:secret_access_key => "",
:use_ssl => true,
:server => DEFAULT_HOST,
:path => "/",
:user_agent => USER_AGENT,
:proxy_server => nil
}.merge(options)
@server = options[:server]
@proxy_server = options[:proxy_server]
@use_ssl = options[:use_ssl]
@path = options[:path]
@user_agent = options[:user_agent]
raise ArgumentError, "No :access_key_id provided" if options[:access_key_id].nil? || options[:access_key_id].empty?
raise ArgumentError, "No :secret_access_key provided" if options[:secret_access_key].nil? || options[:secret_access_key].empty?
raise ArgumentError, "No :use_ssl value provided" if options[:use_ssl].nil?
raise ArgumentError, "Invalid :use_ssl value provided, only 'true' or 'false' allowed" unless options[:use_ssl] == true || options[:use_ssl] == false
raise ArgumentError, "No :server provided" if options[:server].nil? || options[:server].empty?
if options[:port]
# user-specified port
@port = options[:port]
elsif @use_ssl
# https
@port = 443
else
# http
@port = 80
end
@access_key_id = options[:access_key_id]
@secret_access_key = options[:secret_access_key]
# Use proxy server if defined
# Based on patch by Mathias Dalheimer. 20070217
proxy = @proxy_server ? URI.parse(@proxy_server) : OpenStruct.new
@http = Net::HTTP::Proxy( proxy.host,
proxy.port,
proxy.user,
proxy.password).new(options[:server], @port)
@http.use_ssl = @use_ssl
# Don't verify the SSL certificates. Avoids SSL Cert warning in log on every GET.
@http.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
attr_accessor :settings
def connection
@http
end
# Make the connection to AWS passing in our request.
# allow us to have a one line call in each method which will do all of the work
# in making the actual request to AWS.
def request(action, params = {})
# Use a copy so that we don't modify the caller's Hash, remove any keys that have nil or empty values
params = params.reject { |key, value| value.nil? or value.empty?}
timestamp = Time.now.getutc
params.merge!( {"Action" => action,
"SignatureVersion" => "2",
"SignatureMethod" => 'HmacSHA256',
"AWSAccessKeyId" => @access_key_id,
"Version" => API_VERSION,
"Timestamp" => timestamp.iso8601 } )
query = params.sort.collect do |param|
CGI::escape(param[0]) + "=" + CGI::escape(param[1])
end.join("&")
req = {}
req['X-Amzn-Authorization'] = get_aws_auth_param(timestamp.httpdate, @secret_access_key)
req['Date'] = timestamp.httpdate
req['User-Agent'] = @user_agent
response = connection.post(@path, query, req)
response_class = AWS::SES.const_get( "#{action}Response" )
result = response_class.new(action, response)
if result.error?
raise ResponseError.new(result)
end
result
end
# Set the Authorization header using AWS signed header authentication
def get_aws_auth_param(timestamp, secret_access_key)
encoded_canonical = SES.encode(secret_access_key, timestamp, false)
SES.authorization_header(@access_key_id, 'HmacSHA256', encoded_canonical)
end
end # class Base
end # Module SES
end # Module AWS
initialize settings for SES Base for response object, fixes #23
module AWS #:nodoc:
# AWS::SES is a Ruby library for Amazon's Simple Email Service's REST API (http://aws.amazon.com/ses).
#
# == Getting started
#
# To get started you need to require 'aws/ses':
#
# % irb -rubygems
# irb(main):001:0> require 'aws/ses'
# # => true
#
# Before you can do anything, you must establish a connection using Base.new. A basic connection would look something like this:
#
# ses = AWS::SES::Base.new(
# :access_key_id => 'abc',
# :secret_access_key => '123'
# )
#
# The minimum connection options that you must specify are your access key id and your secret access key.
module SES
API_VERSION = '2010-12-01'
DEFAULT_HOST = 'email.us-east-1.amazonaws.com'
USER_AGENT = 'github-aws-ses-ruby-gem'
# Encodes the given string with the secret_access_key by taking the
# hmac-sha1 sum, and then base64 encoding it. Optionally, it will also
# url encode the result of that to protect the string if it's going to
# be used as a query string parameter.
#
# @param [String] secret_access_key the user's secret access key for signing.
# @param [String] str the string to be hashed and encoded.
# @param [Boolean] urlencode whether or not to url encode the result., true or false
# @return [String] the signed and encoded string.
def SES.encode(secret_access_key, str, urlencode=true)
digest = OpenSSL::Digest::Digest.new('sha256')
b64_hmac =
Base64.encode64(
OpenSSL::HMAC.digest(digest, secret_access_key, str)).gsub("\n","")
if urlencode
return CGI::escape(b64_hmac)
else
return b64_hmac
end
end
# Generates the HTTP Header String that Amazon looks for
#
# @param [String] key the AWS Access Key ID
# @param [String] alg the algorithm used for the signature
# @param [String] sig the signature itself
def SES.authorization_header(key, alg, sig)
"AWS3-HTTPS AWSAccessKeyId=#{key}, Algorithm=#{alg}, Signature=#{sig}"
end
# AWS::SES::Base is the abstract super class of all classes who make requests against SES
class Base
include SendEmail
include Info
attr_reader :use_ssl, :server, :proxy_server, :port
attr_accessor :settings
# @option options [String] :access_key_id ("") The user's AWS Access Key ID
# @option options [String] :secret_access_key ("") The user's AWS Secret Access Key
# @option options [Boolean] :use_ssl (true) Connect using SSL?
# @option options [String] :server ("email.us-east-1.amazonaws.com") The server API endpoint host
# @option options [String] :proxy_server (nil) An HTTP proxy server FQDN
# @option options [String] :user_agent ("github-aws-ses-ruby-gem") The HTTP User-Agent header value
# @return [Object] the object.
def initialize( options = {} )
options = { :access_key_id => "",
:secret_access_key => "",
:use_ssl => true,
:server => DEFAULT_HOST,
:path => "/",
:user_agent => USER_AGENT,
:proxy_server => nil
}.merge(options)
@server = options[:server]
@proxy_server = options[:proxy_server]
@use_ssl = options[:use_ssl]
@path = options[:path]
@user_agent = options[:user_agent]
@settings = {}
raise ArgumentError, "No :access_key_id provided" if options[:access_key_id].nil? || options[:access_key_id].empty?
raise ArgumentError, "No :secret_access_key provided" if options[:secret_access_key].nil? || options[:secret_access_key].empty?
raise ArgumentError, "No :use_ssl value provided" if options[:use_ssl].nil?
raise ArgumentError, "Invalid :use_ssl value provided, only 'true' or 'false' allowed" unless options[:use_ssl] == true || options[:use_ssl] == false
raise ArgumentError, "No :server provided" if options[:server].nil? || options[:server].empty?
if options[:port]
# user-specified port
@port = options[:port]
elsif @use_ssl
# https
@port = 443
else
# http
@port = 80
end
@access_key_id = options[:access_key_id]
@secret_access_key = options[:secret_access_key]
# Use proxy server if defined
# Based on patch by Mathias Dalheimer. 20070217
proxy = @proxy_server ? URI.parse(@proxy_server) : OpenStruct.new
@http = Net::HTTP::Proxy( proxy.host,
proxy.port,
proxy.user,
proxy.password).new(options[:server], @port)
@http.use_ssl = @use_ssl
# Don't verify the SSL certificates. Avoids SSL Cert warning in log on every GET.
@http.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
def connection
@http
end
# Make the connection to AWS passing in our request.
# allow us to have a one line call in each method which will do all of the work
# in making the actual request to AWS.
def request(action, params = {})
# Use a copy so that we don't modify the caller's Hash, remove any keys that have nil or empty values
params = params.reject { |key, value| value.nil? or value.empty?}
timestamp = Time.now.getutc
params.merge!( {"Action" => action,
"SignatureVersion" => "2",
"SignatureMethod" => 'HmacSHA256',
"AWSAccessKeyId" => @access_key_id,
"Version" => API_VERSION,
"Timestamp" => timestamp.iso8601 } )
query = params.sort.collect do |param|
CGI::escape(param[0]) + "=" + CGI::escape(param[1])
end.join("&")
req = {}
req['X-Amzn-Authorization'] = get_aws_auth_param(timestamp.httpdate, @secret_access_key)
req['Date'] = timestamp.httpdate
req['User-Agent'] = @user_agent
response = connection.post(@path, query, req)
response_class = AWS::SES.const_get( "#{action}Response" )
result = response_class.new(action, response)
if result.error?
raise ResponseError.new(result)
end
result
end
# Set the Authorization header using AWS signed header authentication
def get_aws_auth_param(timestamp, secret_access_key)
encoded_canonical = SES.encode(secret_access_key, timestamp, false)
SES.authorization_header(@access_key_id, 'HmacSHA256', encoded_canonical)
end
end # class Base
end # Module SES
end # Module AWS
|
# Copyright (c) 2015, Groupon, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# Neither the name of GROUPON nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
require 'grape'
require 'backbeat/web/middleware/log'
require 'backbeat/web/middleware/health'
require 'backbeat/web/middleware/heartbeat'
require 'backbeat/web/middleware/sidekiq_stats'
require 'backbeat/web/workflows_api'
require 'backbeat/web/activities_api'
require 'backbeat/web/debug_api'
require 'backbeat/web/users_api'
module Backbeat
module Web
class API < Grape::API
format :json
helpers CurrentUserHelper
before do
@params = Util.underscore(params, { ignore: [:client_data, :metadata] })
end
rescue_from :all do |e|
Logger.error({ error_type: e.class, error: e.message, backtrace: e.backtrace })
error!(ErrorPresenter.present(e), 500)
end
rescue_from ActiveRecord::RecordNotFound do |e|
Logger.info(e)
error!(ErrorPresenter.present(e), 404)
end
RESCUED_ERRORS = [
WorkflowComplete,
Grape::Exceptions::Validation,
Grape::Exceptions::ValidationErrors,
ActiveRecord::StatementInvalid
]
rescue_from *RESCUED_ERRORS do |e|
Logger.info(e)
error!(ErrorPresenter.present(e), 400)
end
rescue_from InvalidServerStatusChange do |e|
Logger.info(e)
error!(ErrorPresenter.present(e), 500)
end
rescue_from InvalidClientStatusChange do |e|
Logger.info(e)
error!(ErrorPresenter.present(e), 409)
end
mount WorkflowsAPI.versioned('/')
mount ActivitiesAPI.versioned('/activities')
mount UsersAPI
# Deprecated V2 API
mount WorkflowsAPI.versioned('/v2')
mount ActivitiesAPI.versioned('/v2/events')
mount DebugAPI.versioned('/v2')
end
App = Rack::Builder.new do
use Middleware::Log
use Middleware::Heartbeat
use ActiveRecord::ConnectionAdapters::ConnectionManagement
use Middleware::Health
use Middleware::SidekiqStats
run API
end
end
end
Log error messages rather than entire error objects in rescue handlers
# Copyright (c) 2015, Groupon, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# Neither the name of GROUPON nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
require 'grape'
require 'backbeat/web/middleware/log'
require 'backbeat/web/middleware/health'
require 'backbeat/web/middleware/heartbeat'
require 'backbeat/web/middleware/sidekiq_stats'
require 'backbeat/web/workflows_api'
require 'backbeat/web/activities_api'
require 'backbeat/web/debug_api'
require 'backbeat/web/users_api'
module Backbeat
module Web
class API < Grape::API
format :json
helpers CurrentUserHelper
before do
@params = Util.underscore(params, { ignore: [:client_data, :metadata] })
end
rescue_from :all do |e|
Logger.error({ error_type: e.class, error: e.message, backtrace: e.backtrace })
error!(ErrorPresenter.present(e), 500)
end
rescue_from ActiveRecord::RecordNotFound do |e|
Logger.info(e.message)
error!(ErrorPresenter.present(e), 404)
end
RESCUED_ERRORS = [
WorkflowComplete,
Grape::Exceptions::Validation,
Grape::Exceptions::ValidationErrors,
ActiveRecord::StatementInvalid
]
rescue_from *RESCUED_ERRORS do |e|
Logger.info(e.message)
error!(ErrorPresenter.present(e), 400)
end
rescue_from InvalidServerStatusChange do |e|
Logger.info(e.message)
error!(ErrorPresenter.present(e), 500)
end
rescue_from InvalidClientStatusChange do |e|
Logger.info(e.message)
error!(ErrorPresenter.present(e), 409)
end
mount WorkflowsAPI.versioned('/')
mount ActivitiesAPI.versioned('/activities')
mount UsersAPI
# Deprecated V2 API
mount WorkflowsAPI.versioned('/v2')
mount ActivitiesAPI.versioned('/v2/events')
mount DebugAPI.versioned('/v2')
end
App = Rack::Builder.new do
use Middleware::Log
use Middleware::Heartbeat
use ActiveRecord::ConnectionAdapters::ConnectionManagement
use Middleware::Health
use Middleware::SidekiqStats
run API
end
end
end
|
require 'activesupport'
module Backgrounded
mattr_accessor :handler
def self.handler
@@handler ||= Backgrounded::Handler::InprocessHandler.new
end
module Handler
#simple handler to process synchronously and not actually in the background
#useful for testing
class InprocessHandler
def request(object, method)
object.send method
end
end
# passes the job to bj by serializing the options and invoking the object's method through script/runner
# see http://github.com/github/bj/tree/master
class BackgroundJobHandler
require 'bj'
def request(object, method)
Bj.submit "./script/runner #{object.class}.find(#{object.id}).#{method}"
end
end
# use amqp client (bunny) to publish requests
# see http://github.com/celldee/bunny/tree/master
class BunnyQueueHandler
require 'bunny'
def initialize(queue)
@queue = queue
end
def request(object, method)
hash = {:object => object.class, :id => object.id, :method => method}
@queue.publish(YAML::dump(hash), :persistent => true)
end
# poll for new requests on the queue
def poll
value = @queue.pop
value == :queue_empty ? nil : YAML::load(value)
value[:object].constantize.find(value[:id]).send(value[:method]) if value
end
end
end
module Model
def self.included(base)
base.extend(ClassMethods)
end
module ClassMethods
def backgrounded(*methods)
methods.each do |method|
define_method "#{method.to_s}_in_background" do
Backgrounded.handler.request(self, method)
end
end
include Backgrounded::Model::InstanceMethods
extend Backgrounded::Model::SingletonMethods
end
end
module SingletonMethods
end
module InstanceMethods
end
end
end
Object.send(:include, Backgrounded::Model)
fix running of bj jobs
require 'activesupport'
module Backgrounded
mattr_accessor :handler
def self.handler
@@handler ||= Backgrounded::Handler::InprocessHandler.new
end
module Handler
#simple handler to process synchronously and not actually in the background
#useful for testing
class InprocessHandler
def request(object, method)
object.send method
end
end
# passes the job to bj by serializing the options and invoking the object's method through script/runner
# see http://github.com/github/bj/tree/master
class BackgroundJobHandler
require 'bj'
def request(object, method)
Bj.submit "./script/runner \"#{object.class}.find(#{object.id}).#{method}\""
end
end
# use amqp client (bunny) to publish requests
# see http://github.com/celldee/bunny/tree/master
class BunnyQueueHandler
require 'bunny'
def initialize(queue)
@queue = queue
end
def request(object, method)
hash = {:object => object.class, :id => object.id, :method => method}
@queue.publish(YAML::dump(hash), :persistent => true)
end
# poll for new requests on the queue
def poll
value = @queue.pop
value == :queue_empty ? nil : YAML::load(value)
value[:object].constantize.find(value[:id]).send(value[:method]) if value
end
end
end
module Model
def self.included(base)
base.extend(ClassMethods)
end
module ClassMethods
def backgrounded(*methods)
methods.each do |method|
define_method "#{method.to_s}_in_background" do
Backgrounded.handler.request(self, method)
end
end
include Backgrounded::Model::InstanceMethods
extend Backgrounded::Model::SingletonMethods
end
end
module SingletonMethods
end
module InstanceMethods
end
end
end
Object.send(:include, Backgrounded::Model)
|
module Banken
class Error < StandardError; end
class NotAuthorizedError < Error
attr_reader :controller, :action, :policy
def initialize(options = {})
if options.is_a? String
message = options
else
@controller = options[:controller]
@action = options[:action]
@policy = options[:policy]
message = options.fetch(:message) { "not allowed to #{action} of #{controller} by #{policy.inspect}" }
end
super(message)
end
end
class NotDefinedError < Error; end
class AuthorizationNotPerformedError < Error; end
class PolicyScopingNotPerformedError < AuthorizationNotPerformedError; end
end
Remove unnecessary space
module Banken
class Error < StandardError; end
class NotAuthorizedError < Error
attr_reader :controller, :action, :policy
def initialize(options={})
if options.is_a? String
message = options
else
@controller = options[:controller]
@action = options[:action]
@policy = options[:policy]
message = options.fetch(:message) { "not allowed to #{action} of #{controller} by #{policy.inspect}" }
end
super(message)
end
end
class NotDefinedError < Error; end
class AuthorizationNotPerformedError < Error; end
class PolicyScopingNotPerformedError < AuthorizationNotPerformedError; end
end
|
puts __FILE__ if defined?(DEBUG)
require 'json'
require 'rake'
require_relative('../apps/svn.rb')
require_relative('dir.rb')
require_relative('environment.rb')
require_relative('string.rb')
class Project < Hash
attr_accessor :filename,:env
def initialize value='',fullname=''
@filename=''
@env=Environment.new
self[:url]=Project.get_url
self[:fullname]=Project.get_fullname_from_url self[:url] if self[:url].length > 0
self[:timeout]=60*5
if value.is_a?(String)
self[:url] = value if value.is_a?(String) && value.length > 0
self[:fullname] = Project.get_fullname_from_url self[:url]
elsif(value.is_a?(Hash))
value.each{|k,v|self[k.to_sym]=v}
else
self[:fullname]=Project.get_fullname_from_url self[:url] if self[:url].length > 0
end
self[:fullname] = fullname if fullname.length > 0
end
def set_timeout value
self[:timeout] = value if value.is_a? Numeric
self[:timeout] = value.gsub('m','').strip.to_f * 60 if value.include?('m')
self[:timeout] = value.gsub('s','').strip.to_f * 60 if value.include?('s')
end
def self.get_url directory=Rake.application.original_dir
url=''
Dir.chdir(directory) do#Rake.application.original_dir) do
url=`git config --get remote.origin.url`.strip if(File.exists?('.git'))
url= Svn.url.strip if(File.exists?('.svn'))
end
url
end
def self.get_fullname directory
directory.gsub(@env.wrk_dir,'')
end
def self.get_fullname_from_url url
return url.gsub('http://','').gsub('https://','').gsub('.com/','/').gsub('.git','')
end
def url; self[:url]; end
def fullname; self[:fullname]; end
def name
parts=fullname.split('/')
parts[parts.length-1]
end
def wrk_dir; "#{@env.wrk_dir}/#{self.fullname}"; end
def make_dir tag=''
"#{@env.make_dir}/#{self.fullname}" if tag.length==0
"#{@env.make_dir}/#{self.fullname}-#{tag}"
end
def pull
if(File.exists?(wrk_dir) && File.exists?("#{wrk_dir}/.git"))
Dir.chdir(wrk_dir) do
puts "git pull (#{wrk_dir})"
puts `git pull`
end
end
end
def clone
if(!File.exists?(wrk_dir) && self[:url].include?('.git'))
cmd=Command.new({ :input => "git clone #{self[:url]} #{self.wrk_dir}", :quiet => true,:ignore_failure => true})
cmd.execute
@env.out cmd.summary
end
end
def checkout
if(!File.exists?(wrk_dir) && self[:url].include?('svn'))
#puts "checkout #{self.url} to #{self.wrk_dir}"
#puts `svn checkout #{self.url} #{self.wrk_dir}`
cmd=Command.new({ :input => "svn checkout #{self.url} #{self.wrk_dir}", :quiet => true,:ignore_failure => true})
cmd.execute
@env.out cmd.summary
end
end
def rake
if(!File.exists?(self.wrk_dir))
clone
checkout
end
if(File.exists?(self.wrk_dir))
Dir.chdir(self.wrk_dir) do
rake = Command.new({ :input => 'rake', :timeout => 300, :ignore_failure => true })
rake.execute
@env.out rake.summary
end
end
end
def latest_tag update=false
makedir="#{@env.make_dir}/#{self.fullname}"
FileUtils.mkdir_p(File.dirname(makedir)) if !File.exists?(File.dirname(makedir))
if(File.exists?(makedir))
Dir.chdir(makedir) do
Command.exit_code('git pull') if update
end
else
if(update)
clone=Command.new("git clone #{self.url} #{makedir}")
clone[:quiet]=true
clone[:ignore_failure]=true
clone.execute
end
end
if(File.exists?(makedir))
Dir.chdir(makedir) do
begin
return Git.latest_tag
rescue
end
end
end
''
end
def log_filenames tags=nil
tags=Array.new if tags.nil?
filenames=Array.new
Dir.chdir(@env.log_dir) do
dotname=fullname.gsub('/','.')
Dir.glob("#{dotname}*.json").each{|f|
if(tags.length==0)
filenames << "#{@env.log_dir}/#{f}"
else
has_tags=true
tags.each{|tag|
has_tags=false if !f.include? tag
}
filenames << "#{@env.log_dir}/#{f}" if has_tags
end
}
end
filenames
end
def command_history tags=nil
commands=Array.new
log_filenames(tags).each{|logfile|
commands << Command.new(JSON.parse(IO.read(logfile)))
}
commands
end
def work_up_to_date?
if wrk_dir == Rake.application.original_dir
logfile=get_logfile ['work','up2date']
if File.exists? logfile
last_work_time=File.mtime(logfile)
last_file_changed=Dir.get_latest_mtime Rake.application.original_dir
if last_work_time > last_file_changed
CLEAN.include logfile
return true
else
File.delete(logfile)
end
end
else
puts "wrk_dir does not match Rake.application.original_dir" if @env.debug?
end
false
end
def mark_work_up_to_date
if wrk_dir == Rake.application.original_dir
logfile=get_logfile ['work','up2date']
File.open(logfile,'w'){|f|f.write(' ')}
else
puts "wrk_dir does not match Rake.application.original_dir" if @env.debug?
end
end
def get_logfile tags
tagstring=''
tagstring=tags if tags.kind_of?(String)
tagstring=tags.join('.') if tags.kind_of?(Array)
name="#{self.fullname}.#{tagstring}.json".gsub('/','.')
"#{@env.log_dir}/#{name}"
end
def list
history=command_history
if(history.length==0)
@env.out "? #{fullname}"
else
status=0
history.each{|c|
status=c.exit_code if c.exit_code != 0
}
if(status==0)
@env.out " #{fullname}"
else
if(@env.colorize?)
require 'ansi/code'
@env.out ANSI.red + ANSI.bright + "X #{fullname}" + ANSI.reset
else
@env.out "X #{fullname}"
end
end
end
end
def out_brackets message
if(@env.colorize?)
require 'ansi/code'
@env.out "[" + ANSI.blue + ANSI.bright + message + ANSI.reset + ']'
else
@env.out "[#{message}]"
end
end
def out_cyan message
if(@env.colorize?)
require 'ansi/code'
@env.out ANSI.cyan + ANSI.bright + message + ANSI.reset
else
@env.out "#{message}"
end
end
def out_property name,value
if(@env.colorize?)
require 'ansi/code'
@env.out "#{name}: " + ANSI.white + ANSI.bold + value.to_s.strip + ANSI.reset
else
@env.out "#{name}: #{value}"
end
end
#def info
# @env.out "Project #{name}"
# @env.out "#{'fullname'.fix(13)}: #{self.fullname}"
# @env.out "#{'url'.fix(13)}: #{self[:url]}"
# @env.out "#{'version'.fix(13)}: #{VERSION}" if defined? VERSION
#end
def info
infoCmd=Command.new({ :input => 'info', :exit_code => 0 })
#out_cyan '========================================================='
#out_cyan fullname
out_property "fullname".fix(15), fullname
out_property "url".fix(15), url
wrk_history=command_history ['work']
out_property "work status".fix(15), "?" if wrk_history.length == 0
out_property "work status".fix(15), wrk_history[0].summary if wrk_history.length > 0
if(wrk_history.length > 0)
@env.out wrk_history[0].info
end
make_history=command_history ['make', latest_tag]
out_property "make status".fix(15),"?" if make_history.length == 0
out_property "make status".fix(15), make_history[0].summary if make_history.length > 0
if(make_history.length >0)
@env.out make_history[0].info
end
infoCmd
end
def clobber
clobberCmd=Command.new('clobber')
clobberCmd[:exit_code]=0
if(File.exists?(wrk_dir))
Dir.remove wrk_dir,true
@env.out "removed #{wrk_dir}"
end
if(File.exists?(make_dir))
Dir.remove make_dir,true
@env.out "removed #{make_dir}"
end
clobberCmd
end
def work
clone
checkout
logfile=get_logfile ['work']
if(File.exists?(wrk_dir))
rake_default=Command.new({:input =>'rake default',:quiet => true,:ignore_failure => true})
if(last_work_mtime.nil? || last_work_mtime < Environment.get_latest_mtime(wrk_dir))
Dir.chdir(wrk_dir) do
@env.out fullname
if(!File.exists?'rakefile.rb')
rake_default[:exit_code]=1
rake_default[:error]="rakefile.rb not found."
rake_default[:start_time]=Time.now
rake_default[:end_time]=Time.now
else
#rake_default[:timeout] = self[:timeout]
rake_default.execute
end
rake_default.save logfile
update_status
@env.out rake_default.summary true
return rake_default
end
else
if(File.exists?(logfile))
rake_default.open logfile
@env.out rake_default.summary true if(rake_default[:exit_code] != 0 || @env.show_success?)
end
end
rake_default
end
end
def make tag=''
tag=latest_tag true if tag.length==0
#return if tag.length==0
raise 'no tag specified' if tag.length==0
rake_default=Command.new({:input => 'rake default',:quiet => true,:ignore_failure => true})
logfile=get_logfile ['make',tag]
if(File.exists?(logfile))
rake_default.open logfile
@env.out rake_default.summary true if(rake_default[:exit_code] != 0) || @env.show_success?
rake_default
else
makedir=make_dir tag
FileUtils.mkdir_p(File.dirname(makedir)) if !File.exists? File.dirname(makedir)
if(self[:url].include?('.git'))
if(!File.exists?(makedir))
clone=Command.new({:input=>"git clone #{self[:url]} #{makedir}",:quiet=>true})
clone.execute
end
end
if(File.exists?(makedir))
Dir.chdir(makedir) do
checkout=Command.new({:input=>"git checkout #{tag}",:quiet=>true})
checkout.execute
FileUtils.rm_r '.git'
if(!File.exists?'rakefile.rb')
rake_default[:exit_code]=1
rake_default[:error]="rakefile.rb not found."
rake_default[:start_time]=Time.now
rake_default[:end_time]=Time.now
else
#rake_default[:timeout] = self[:timeout]
rake_default.execute
end
rake_default.save logfile
update_status
@env.out rake_default.summary true
rake_default
end
else
puts "Project make make_dir #{makedir} does not exist." if @env.debug?
end
begin
FileUtils.rm_r makedir
rescue
end
rake_default
end
end
def last_work_mtime
logfile=get_logfile ['work']
return File.mtime(logfile) if File.exists? logfile
nil
end
def update_status
status_logfile="#{@env.root_dir}/log/#{self.fullname}/#{@env.user}@#{@env.machine}.status.json"
status=Hash.new({'status'=>'?'})
wrk_logfile="#{@env.root_dir}/log/#{self.fullname}/#{@env.user}@#{@env.machine}.json"
if(File.exists?(wrk_logfile))
rake_default=Command.new(JSON.parse(IO.read(wrk_logfile)))
status[:work_logfile]=wrk_logfile
status['status']='0'
status['status']='X' if rake_default[:exit_code] != 0
end
make_logfile="#{@env.root_dir}/log/#{self.fullname}/#{latest_tag}/#{@env.user}@#{@env.machine}.json"
if(File.exists?(make_logfile))
rake_default=Command.new(JSON.parse(IO.read(make_logfile)))
status[:make_logfile]=make_logfile
status['status']='0'
status['status']='X' if rake_default[:exit_code] != 0
else
status['status']='?'
end
FileUtils.mkdir_p(File.dirname(status_logfile)) if !File.exists?(File.dirname(status_logfile))
File.open(status_logfile,'w'){|f|f.write(status.to_json)}
end
def status
status_logfile="#{@env.root_dir}/log/#{self.fullname}/#{@env.user}@#{@env.machine}.status.json"
update_status if !File.exists? status_logfile
if(File.exists?(status_logfile))
statusHash=JSON.parse(IO.read(status_logfile))
return statusHash['status'] if(statusHash.has_key?('status'))
end
'?'
end
def report
end
def update
clone
checkout
if(File.exists?(wrk_dir))
Dir.chdir(wrk_dir) do
if(File.exists?('.git'))
pull=Command.execute(Command.new({:input => 'git pull', :quiet => true, :ignore_failure => true}))
@env.out pull.summary
return pull
#pull=Command.new('git pull')
#rake_default[:quiet]=true
#rake_default[:ignore_failure]=true
#rake_default.execute
#return rake_defa
end
if(File.exists?('svn'))
updateCmd=Command.execute(Command.new({:input => 'svn update', :quiet => true, :ignore_failure => true}))
@env.out updateCmd.summary
return updateCmd
end
#rake_default=Command.new('svn update')
#rake_default[:quiet]=true
#rake_default[:ignore_failure]=true
#rake_default.execute
end
end
return Command.new({:exit_code => 1})
end
def tags
tags=Array.new
if !File.exists? wrk_dir
clone=Command.new({:input=>'git clone #{self[:url]} #{wrk_dir}',:quiet=>true})
clone.execute
end
Dir.chdir(wrk_dir) do
Command.output('git tag').split('\n').each{|line|
tag=line.strip
tags << tag if tag.length < 0
}
end
tags
end
end
'all'
puts __FILE__ if defined?(DEBUG)
require 'json'
require 'rake'
require_relative('../apps/svn.rb')
require_relative('dir.rb')
require_relative('environment.rb')
require_relative('string.rb')
class Project < Hash
attr_accessor :filename,:env
def initialize value='',fullname=''
@filename=''
@env=Environment.new
self[:url]=Project.get_url
self[:fullname]=Project.get_fullname_from_url self[:url] if self[:url].length > 0
self[:timeout]=60*5
if value.is_a?(String)
self[:url] = value if value.is_a?(String) && value.length > 0
self[:fullname] = Project.get_fullname_from_url self[:url]
elsif(value.is_a?(Hash))
value.each{|k,v|self[k.to_sym]=v}
else
self[:fullname]=Project.get_fullname_from_url self[:url] if self[:url].length > 0
end
self[:fullname] = fullname if fullname.length > 0
end
def set_timeout value
self[:timeout] = value if value.is_a? Numeric
self[:timeout] = value.gsub('m','').strip.to_f * 60 if value.include?('m')
self[:timeout] = value.gsub('s','').strip.to_f * 60 if value.include?('s')
end
def self.get_url directory=Rake.application.original_dir
url=''
Dir.chdir(directory) do#Rake.application.original_dir) do
url=`git config --get remote.origin.url`.strip if(File.exists?('.git'))
url= Svn.url.strip if(File.exists?('.svn'))
end
url
end
def self.get_fullname directory
directory.gsub(@env.wrk_dir,'')
end
def self.get_fullname_from_url url
return url.gsub('http://','').gsub('https://','').gsub('.com/','/').gsub('.git','')
end
def url; self[:url]; end
def fullname; self[:fullname]; end
def name
parts=fullname.split('/')
parts[parts.length-1]
end
def wrk_dir; "#{@env.wrk_dir}/#{self.fullname}"; end
def make_dir tag=''
"#{@env.make_dir}/#{self.fullname}" if tag.length==0
"#{@env.make_dir}/#{self.fullname}-#{tag}"
end
def pull
if(File.exists?(wrk_dir) && File.exists?("#{wrk_dir}/.git"))
Dir.chdir(wrk_dir) do
puts "git pull (#{wrk_dir})"
puts `git pull`
end
end
end
def clone
if(!File.exists?(wrk_dir) && self[:url].include?('.git'))
cmd=Command.new({ :input => "git clone #{self[:url]} #{self.wrk_dir}", :quiet => true,:ignore_failure => true})
cmd.execute
@env.out cmd.summary
end
end
def checkout
if(!File.exists?(wrk_dir) && self[:url].include?('svn'))
#puts "checkout #{self.url} to #{self.wrk_dir}"
#puts `svn checkout #{self.url} #{self.wrk_dir}`
cmd=Command.new({ :input => "svn checkout #{self.url} #{self.wrk_dir}", :quiet => true,:ignore_failure => true})
cmd.execute
@env.out cmd.summary
end
end
def rake
if(!File.exists?(self.wrk_dir))
clone
checkout
end
if(File.exists?(self.wrk_dir))
Dir.chdir(self.wrk_dir) do
rake = Command.new({ :input => 'rake', :timeout => 300, :ignore_failure => true })
rake.execute
@env.out rake.summary
end
end
end
def latest_tag update=false
makedir="#{@env.make_dir}/#{self.fullname}"
FileUtils.mkdir_p(File.dirname(makedir)) if !File.exists?(File.dirname(makedir))
if(File.exists?(makedir))
Dir.chdir(makedir) do
Command.exit_code('git pull') if update
end
else
if(update)
clone=Command.new("git clone #{self.url} #{makedir}")
clone[:quiet]=true
clone[:ignore_failure]=true
clone.execute
end
end
if(File.exists?(makedir))
Dir.chdir(makedir) do
begin
return Git.latest_tag
rescue
end
end
end
''
end
def log_filenames tags=nil
tags=Array.new if tags.nil?
filenames=Array.new
Dir.chdir(@env.log_dir) do
dotname=fullname.gsub('/','.')
Dir.glob("#{dotname}*.json").each{|f|
if(tags.length==0)
filenames << "#{@env.log_dir}/#{f}"
else
has_tags=true
tags.each{|tag|
has_tags=false if !f.include? tag
}
filenames << "#{@env.log_dir}/#{f}" if has_tags
end
}
end
filenames
end
def command_history tags=nil
commands=Array.new
log_filenames(tags).each{|logfile|
commands << Command.new(JSON.parse(IO.read(logfile)))
}
commands
end
def work_up_to_date?
if wrk_dir == Rake.application.original_dir
logfile=get_logfile ['up2date']
if File.exists? logfile
last_work_time=File.mtime(logfile)
last_file_changed=Dir.get_latest_mtime Rake.application.original_dir
if last_work_time > last_file_changed
CLEAN.include logfile
return true
else
File.delete(logfile)
end
end
else
puts "wrk_dir does not match Rake.application.original_dir" if @env.debug?
end
false
end
def mark_work_up_to_date
if wrk_dir == Rake.application.original_dir
logfile=get_logfile ['up2date']
File.open(logfile,'w'){|f|f.write(' ')}
else
puts "wrk_dir does not match Rake.application.original_dir" if @env.debug?
end
end
def get_logfile tags
tagstring=''
tagstring=tags if tags.kind_of?(String)
tagstring=tags.join('.') if tags.kind_of?(Array)
name="#{self.fullname}.#{tagstring}.json".gsub('/','.')
"#{@env.log_dir}/#{name}"
end
def list
history=command_history
if(history.length==0)
@env.out "? #{fullname}"
else
status=0
history.each{|c|
status=c.exit_code if c.exit_code != 0
}
if(status==0)
@env.out " #{fullname}"
else
if(@env.colorize?)
require 'ansi/code'
@env.out ANSI.red + ANSI.bright + "X #{fullname}" + ANSI.reset
else
@env.out "X #{fullname}"
end
end
end
end
def out_brackets message
if(@env.colorize?)
require 'ansi/code'
@env.out "[" + ANSI.blue + ANSI.bright + message + ANSI.reset + ']'
else
@env.out "[#{message}]"
end
end
def out_cyan message
if(@env.colorize?)
require 'ansi/code'
@env.out ANSI.cyan + ANSI.bright + message + ANSI.reset
else
@env.out "#{message}"
end
end
def out_property name,value
if(@env.colorize?)
require 'ansi/code'
@env.out "#{name}: " + ANSI.white + ANSI.bold + value.to_s.strip + ANSI.reset
else
@env.out "#{name}: #{value}"
end
end
#def info
# @env.out "Project #{name}"
# @env.out "#{'fullname'.fix(13)}: #{self.fullname}"
# @env.out "#{'url'.fix(13)}: #{self[:url]}"
# @env.out "#{'version'.fix(13)}: #{VERSION}" if defined? VERSION
#end
def info
infoCmd=Command.new({ :input => 'info', :exit_code => 0 })
#out_cyan '========================================================='
#out_cyan fullname
out_property "fullname".fix(15), fullname
out_property "url".fix(15), url
wrk_history=command_history ['work']
out_property "work status".fix(15), "?" if wrk_history.length == 0
out_property "work status".fix(15), wrk_history[0].summary if wrk_history.length > 0
if(wrk_history.length > 0)
@env.out wrk_history[0].info
end
make_history=command_history ['make', latest_tag]
out_property "make status".fix(15),"?" if make_history.length == 0
out_property "make status".fix(15), make_history[0].summary if make_history.length > 0
if(make_history.length >0)
@env.out make_history[0].info
end
infoCmd
end
def clobber
clobberCmd=Command.new('clobber')
clobberCmd[:exit_code]=0
if(File.exists?(wrk_dir))
Dir.remove wrk_dir,true
@env.out "removed #{wrk_dir}"
end
if(File.exists?(make_dir))
Dir.remove make_dir,true
@env.out "removed #{make_dir}"
end
clobberCmd
end
def work
clone
checkout
logfile=get_logfile ['work']
if(File.exists?(wrk_dir))
rake_default=Command.new({:input =>'rake default',:quiet => true,:ignore_failure => true})
if(last_work_mtime.nil? || last_work_mtime < Environment.get_latest_mtime(wrk_dir))
Dir.chdir(wrk_dir) do
@env.out fullname
if(!File.exists?'rakefile.rb')
rake_default[:exit_code]=1
rake_default[:error]="rakefile.rb not found."
rake_default[:start_time]=Time.now
rake_default[:end_time]=Time.now
else
#rake_default[:timeout] = self[:timeout]
rake_default.execute
end
rake_default.save logfile
update_status
@env.out rake_default.summary true
return rake_default
end
else
if(File.exists?(logfile))
rake_default.open logfile
@env.out rake_default.summary true if(rake_default[:exit_code] != 0 || @env.show_success?)
end
end
rake_default
end
end
def make tag=''
tag=latest_tag true if tag.length==0
#return if tag.length==0
raise 'no tag specified' if tag.length==0
rake_default=Command.new({:input => 'rake default',:quiet => true,:ignore_failure => true})
logfile=get_logfile ['make',tag]
if(File.exists?(logfile))
rake_default.open logfile
@env.out rake_default.summary true if(rake_default[:exit_code] != 0) || @env.show_success?
rake_default
else
makedir=make_dir tag
FileUtils.mkdir_p(File.dirname(makedir)) if !File.exists? File.dirname(makedir)
if(self[:url].include?('.git'))
if(!File.exists?(makedir))
clone=Command.new({:input=>"git clone #{self[:url]} #{makedir}",:quiet=>true})
clone.execute
end
end
if(File.exists?(makedir))
Dir.chdir(makedir) do
checkout=Command.new({:input=>"git checkout #{tag}",:quiet=>true})
checkout.execute
FileUtils.rm_r '.git'
if(!File.exists?'rakefile.rb')
rake_default[:exit_code]=1
rake_default[:error]="rakefile.rb not found."
rake_default[:start_time]=Time.now
rake_default[:end_time]=Time.now
else
#rake_default[:timeout] = self[:timeout]
rake_default.execute
end
rake_default.save logfile
update_status
@env.out rake_default.summary true
rake_default
end
else
puts "Project make make_dir #{makedir} does not exist." if @env.debug?
end
begin
FileUtils.rm_r makedir
rescue
end
rake_default
end
end
def last_work_mtime
logfile=get_logfile ['work']
return File.mtime(logfile) if File.exists? logfile
nil
end
def update_status
status_logfile="#{@env.root_dir}/log/#{self.fullname}/#{@env.user}@#{@env.machine}.status.json"
status=Hash.new({'status'=>'?'})
wrk_logfile="#{@env.root_dir}/log/#{self.fullname}/#{@env.user}@#{@env.machine}.json"
if(File.exists?(wrk_logfile))
rake_default=Command.new(JSON.parse(IO.read(wrk_logfile)))
status[:work_logfile]=wrk_logfile
status['status']='0'
status['status']='X' if rake_default[:exit_code] != 0
end
make_logfile="#{@env.root_dir}/log/#{self.fullname}/#{latest_tag}/#{@env.user}@#{@env.machine}.json"
if(File.exists?(make_logfile))
rake_default=Command.new(JSON.parse(IO.read(make_logfile)))
status[:make_logfile]=make_logfile
status['status']='0'
status['status']='X' if rake_default[:exit_code] != 0
else
status['status']='?'
end
FileUtils.mkdir_p(File.dirname(status_logfile)) if !File.exists?(File.dirname(status_logfile))
File.open(status_logfile,'w'){|f|f.write(status.to_json)}
end
def status
status_logfile="#{@env.root_dir}/log/#{self.fullname}/#{@env.user}@#{@env.machine}.status.json"
update_status if !File.exists? status_logfile
if(File.exists?(status_logfile))
statusHash=JSON.parse(IO.read(status_logfile))
return statusHash['status'] if(statusHash.has_key?('status'))
end
'?'
end
def report
end
def update
clone
checkout
if(File.exists?(wrk_dir))
Dir.chdir(wrk_dir) do
if(File.exists?('.git'))
pull=Command.execute(Command.new({:input => 'git pull', :quiet => true, :ignore_failure => true}))
@env.out pull.summary
return pull
#pull=Command.new('git pull')
#rake_default[:quiet]=true
#rake_default[:ignore_failure]=true
#rake_default.execute
#return rake_defa
end
if(File.exists?('svn'))
updateCmd=Command.execute(Command.new({:input => 'svn update', :quiet => true, :ignore_failure => true}))
@env.out updateCmd.summary
return updateCmd
end
#rake_default=Command.new('svn update')
#rake_default[:quiet]=true
#rake_default[:ignore_failure]=true
#rake_default.execute
end
end
return Command.new({:exit_code => 1})
end
def tags
tags=Array.new
if !File.exists? wrk_dir
clone=Command.new({:input=>'git clone #{self[:url]} #{wrk_dir}',:quiet=>true})
clone.execute
end
Dir.chdir(wrk_dir) do
Command.output('git tag').split('\n').each{|line|
tag=line.strip
tags << tag if tag.length < 0
}
end
tags
end
end
|
require 'rest-client'
require 'json'
module Bcnd
class QuayIo
class Connection
BASE_URL = 'https://quay.io/api/v1'
attr_accessor :token
def initialize(token)
@token = token
end
def request(method: :get, path:, body: {}, query_params: {})
response = RestClient::Request.execute(
method: method,
url: "#{BASE_URL}#{path}",
payload: body.empty? ? nil : body.to_json,
headers: {
"Authorization" => "Bearer #{token}",
"Content-Type" => "application/json",
params: query_params
}
)
JSON.load(response.to_s)
end
def get(path:, body: {}, query_params: {})
request(method: :get, path: path, body: body, query_params: query_params)
end
def put(path:, body: {}, query_params: {})
request(method: :put, path: path, body: body, query_params: query_params)
end
def post(path:, body: {}, query_params: {})
request(method: :post, path: path, body: body, query_params: query_params)
end
def delete(path:, body: {}, query_params: {})
request(method: :delete, path: path, body: body, query_params: query_params)
end
end
attr_accessor :conn
def initialize(token)
@conn = Connection.new(token)
end
def automated_builds_for(repo:, git_sha:)
builds = conn.get(path: "/repository/#{repo}/build/")["builds"]
builds.select do |b|
b["trigger_metadata"]["commit"] == git_sha.downcase
end
end
def automated_build_status(repo:, git_sha:)
builds = automated_builds_for(repo: repo, git_sha: git_sha)
phases = builds.map { |b| b["phase"] }
if !phases.include?("complete") && phases.include?("error")
return :failed
end
if phases.include?("complete")
return :finished
else
return :building
end
end
def wait_for_automated_build(repo:, git_sha:, timeout: 3600)
loop do
status = automated_build_status(repo: repo, git_sha: git_sha)
case status
when :failed
raise "The docker build failed"
when :finished
puts ""
return
when :building
print '.'
sleep 5
end
end
end
def docker_image_id_for_tag(repo:, tag:)
resp = conn.get(
path: "/repository/#{repo}/tag/",
query_params: {
"specificTag" => tag
}
)
tags = resp["tags"]
tags.find { |tag|
tag["end_ts"].nil?
}["docker_image_id"]
end
def put_tag(repo:, image_id:, tag:)
conn.put(
path: "/repository/#{repo}/tag/#{tag}",
body: {
image: image_id
}
)
end
end
end
Increase build list limit from default (5) to 20
require 'rest-client'
require 'json'
module Bcnd
class QuayIo
class Connection
BASE_URL = 'https://quay.io/api/v1'
attr_accessor :token
def initialize(token)
@token = token
end
def request(method: :get, path:, body: {}, query_params: {})
response = RestClient::Request.execute(
method: method,
url: "#{BASE_URL}#{path}",
payload: body.empty? ? nil : body.to_json,
headers: {
"Authorization" => "Bearer #{token}",
"Content-Type" => "application/json",
params: query_params
}
)
JSON.load(response.to_s)
end
def get(path:, body: {}, query_params: {})
request(method: :get, path: path, body: body, query_params: query_params)
end
def put(path:, body: {}, query_params: {})
request(method: :put, path: path, body: body, query_params: query_params)
end
def post(path:, body: {}, query_params: {})
request(method: :post, path: path, body: body, query_params: query_params)
end
def delete(path:, body: {}, query_params: {})
request(method: :delete, path: path, body: body, query_params: query_params)
end
end
attr_accessor :conn
def initialize(token)
@conn = Connection.new(token)
end
def automated_builds_for(repo:, git_sha:)
builds = conn.get(path: "/repository/#{repo}/build/?limit=20")["builds"]
builds.select do |b|
b["trigger_metadata"]["commit"] == git_sha.downcase
end
end
def automated_build_status(repo:, git_sha:)
builds = automated_builds_for(repo: repo, git_sha: git_sha)
phases = builds.map { |b| b["phase"] }
if !phases.include?("complete") && phases.include?("error")
return :failed
end
if phases.include?("complete")
return :finished
else
return :building
end
end
def wait_for_automated_build(repo:, git_sha:, timeout: 3600)
loop do
status = automated_build_status(repo: repo, git_sha: git_sha)
case status
when :failed
raise "The docker build failed"
when :finished
puts ""
return
when :building
print '.'
sleep 5
end
end
end
def docker_image_id_for_tag(repo:, tag:)
resp = conn.get(
path: "/repository/#{repo}/tag/",
query_params: {
"specificTag" => tag
}
)
tags = resp["tags"]
tags.find { |tag|
tag["end_ts"].nil?
}["docker_image_id"]
end
def put_tag(repo:, image_id:, tag:)
conn.put(
path: "/repository/#{repo}/tag/#{tag}",
body: {
image: image_id
}
)
end
end
end
|
# taken from irb
require "readline"
class Pry
# Implements tab completion for Readline in Pry
module InputCompleter
if Readline.respond_to?("basic_word_break_characters=")
Readline.basic_word_break_characters= " \t\n\"\\'`><=;|&{("
end
Readline.completion_append_character = nil
ReservedWords = [
"BEGIN", "END",
"alias", "and",
"begin", "break",
"case", "class",
"def", "defined", "do",
"else", "elsif", "end", "ensure",
"false", "for",
"if", "in",
"module",
"next", "nil", "not",
"or",
"redo", "rescue", "retry", "return",
"self", "super",
"then", "true",
"undef", "unless", "until",
"when", "while",
"yield" ]
Operators = [
"%", "&", "*", "**", "+", "-", "/",
"<", "<<", "<=", "<=>", "==", "===", "=~", ">", ">=", ">>",
"[]", "[]=", "^", "!", "!=", "!~"
]
# Return a new completion proc for use by Readline.
# @param [Binding] target The current binding context.
# @param [Array<String>] commands The array of Pry commands.
def self.build_completion_proc(target, commands=[""])
proc do |input|
bind = target
case input
when /^(\/[^\/]*\/)\.([^.]*)$/
# Regexp
receiver = $1
message = Regexp.quote($2)
candidates = Regexp.instance_methods.collect{|m| m.to_s}
select_message(receiver, message, candidates)
when /^([^\]]*\])\.([^.]*)$/
# Array
receiver = $1
message = Regexp.quote($2)
candidates = Array.instance_methods.collect{|m| m.to_s}
select_message(receiver, message, candidates)
when /^([^\}]*\})\.([^.]*)$/
# Proc or Hash
receiver = $1
message = Regexp.quote($2)
candidates = Proc.instance_methods.collect{|m| m.to_s}
candidates |= Hash.instance_methods.collect{|m| m.to_s}
select_message(receiver, message, candidates)
when /^(:[^:.]*)$/
# Symbol
if Symbol.respond_to?(:all_symbols)
sym = $1
candidates = Symbol.all_symbols.collect{|s| ":" + s.id2name}
candidates.grep(/^#{sym}/)
else
[]
end
when /^::([A-Z][^:\.\(]*)$/
# Absolute Constant or class methods
receiver = $1
candidates = Object.constants.collect{|m| m.to_s}
candidates.grep(/^#{receiver}/).collect{|e| "::" + e}
when /^([A-Z].*)::([^:.]*)$/
# Constant or class methods
receiver = $1
message = Regexp.quote($2)
begin
candidates = eval("#{receiver}.constants.collect{|m| m.to_s}", bind)
candidates |= eval("#{receiver}.methods.collect{|m| m.to_s}", bind)
rescue RescuableException
candidates = []
end
candidates.grep(/^#{message}/).collect{|e| receiver + "::" + e}
when /^(:[^:.]+)\.([^.]*)$/
# Symbol
receiver = $1
message = Regexp.quote($2)
candidates = Symbol.instance_methods.collect{|m| m.to_s}
select_message(receiver, message, candidates)
when /^(-?(0[dbo])?[0-9_]+(\.[0-9_]+)?([eE]-?[0-9]+)?)\.([^.]*)$/
# Numeric
receiver = $1
message = Regexp.quote($5)
begin
candidates = eval(receiver, bind).methods.collect{|m| m.to_s}
rescue RescuableException
candidates = []
end
select_message(receiver, message, candidates)
when /^(-?0x[0-9a-fA-F_]+)\.([^.]*)$/
# Numeric(0xFFFF)
receiver = $1
message = Regexp.quote($2)
begin
candidates = eval(receiver, bind).methods.collect{|m| m.to_s}
rescue RescuableException
candidates = []
end
select_message(receiver, message, candidates)
when /^(\$[^.]*)$/
regmessage = Regexp.new(Regexp.quote($1))
candidates = global_variables.collect{|m| m.to_s}.grep(regmessage)
when /^([^."].*)\.([^.]*)$/
# variable
receiver = $1
message = Regexp.quote($2)
gv = eval("global_variables", bind).collect{|m| m.to_s}
lv = eval("local_variables", bind).collect{|m| m.to_s}
cv = eval("self.class.constants", bind).collect{|m| m.to_s}
if (gv | lv | cv).include?(receiver) or /^[A-Z]/ =~ receiver && /\./ !~ receiver
# foo.func and foo is local var. OR
# Foo::Bar.func
begin
candidates = eval("#{receiver}.methods", bind).collect{|m| m.to_s}
rescue RescuableException
candidates = []
end
else
# func1.func2
candidates = []
ObjectSpace.each_object(Module){|m|
begin
name = m.name.to_s
rescue RescuableException
name = ""
end
next if name != "IRB::Context" and
/^(IRB|SLex|RubyLex|RubyToken)/ =~ name
candidates.concat m.instance_methods(false).collect{|x| x.to_s}
}
candidates.sort!
candidates.uniq!
end
select_message(receiver, message, candidates)
when /^\.([^.]*)$/
# unknown(maybe String)
receiver = ""
message = Regexp.quote($1)
candidates = String.instance_methods(true).collect{|m| m.to_s}
select_message(receiver, message, candidates)
else
candidates = eval(
"methods | private_methods | local_variables | " \
"self.class.constants | instance_variables",
bind
).collect{|m| m.to_s}
if eval("respond_to?(:class_variables)", bind)
candidates += eval("class_variables", bind).collect { |m| m.to_s }
end
(candidates|ReservedWords|commands).grep(/^#{Regexp.quote(input)}/)
end
end
end
def self.select_message(receiver, message, candidates)
candidates.grep(/^#{message}/).collect do |e|
case e
when /^[a-zA-Z_]/
receiver + "." + e
when /^[0-9]/
when *Operators
#receiver + " " + e
end
end
end
end
end
Completion fix for Jruby.
Jruby doesn't always provide the method `#instance_methods()` on objects. By
checking to see if this method exists before using it the completion system
won't totally barf itself when used on Jruby.
Signed-off-by: Yorick Peterse <82349cb6397bb932b4bf3561b4ea2fad50571f50@gmail.com>
# taken from irb
require "readline"
class Pry
# Implements tab completion for Readline in Pry
module InputCompleter
if Readline.respond_to?("basic_word_break_characters=")
Readline.basic_word_break_characters= " \t\n\"\\'`><=;|&{("
end
Readline.completion_append_character = nil
ReservedWords = [
"BEGIN", "END",
"alias", "and",
"begin", "break",
"case", "class",
"def", "defined", "do",
"else", "elsif", "end", "ensure",
"false", "for",
"if", "in",
"module",
"next", "nil", "not",
"or",
"redo", "rescue", "retry", "return",
"self", "super",
"then", "true",
"undef", "unless", "until",
"when", "while",
"yield" ]
Operators = [
"%", "&", "*", "**", "+", "-", "/",
"<", "<<", "<=", "<=>", "==", "===", "=~", ">", ">=", ">>",
"[]", "[]=", "^", "!", "!=", "!~"
]
# Return a new completion proc for use by Readline.
# @param [Binding] target The current binding context.
# @param [Array<String>] commands The array of Pry commands.
def self.build_completion_proc(target, commands=[""])
proc do |input|
bind = target
case input
when /^(\/[^\/]*\/)\.([^.]*)$/
# Regexp
receiver = $1
message = Regexp.quote($2)
candidates = Regexp.instance_methods.collect{|m| m.to_s}
select_message(receiver, message, candidates)
when /^([^\]]*\])\.([^.]*)$/
# Array
receiver = $1
message = Regexp.quote($2)
candidates = Array.instance_methods.collect{|m| m.to_s}
select_message(receiver, message, candidates)
when /^([^\}]*\})\.([^.]*)$/
# Proc or Hash
receiver = $1
message = Regexp.quote($2)
candidates = Proc.instance_methods.collect{|m| m.to_s}
candidates |= Hash.instance_methods.collect{|m| m.to_s}
select_message(receiver, message, candidates)
when /^(:[^:.]*)$/
# Symbol
if Symbol.respond_to?(:all_symbols)
sym = $1
candidates = Symbol.all_symbols.collect{|s| ":" + s.id2name}
candidates.grep(/^#{sym}/)
else
[]
end
when /^::([A-Z][^:\.\(]*)$/
# Absolute Constant or class methods
receiver = $1
candidates = Object.constants.collect{|m| m.to_s}
candidates.grep(/^#{receiver}/).collect{|e| "::" + e}
when /^([A-Z].*)::([^:.]*)$/
# Constant or class methods
receiver = $1
message = Regexp.quote($2)
begin
candidates = eval("#{receiver}.constants.collect{|m| m.to_s}", bind)
candidates |= eval("#{receiver}.methods.collect{|m| m.to_s}", bind)
rescue RescuableException
candidates = []
end
candidates.grep(/^#{message}/).collect{|e| receiver + "::" + e}
when /^(:[^:.]+)\.([^.]*)$/
# Symbol
receiver = $1
message = Regexp.quote($2)
candidates = Symbol.instance_methods.collect{|m| m.to_s}
select_message(receiver, message, candidates)
when /^(-?(0[dbo])?[0-9_]+(\.[0-9_]+)?([eE]-?[0-9]+)?)\.([^.]*)$/
# Numeric
receiver = $1
message = Regexp.quote($5)
begin
candidates = eval(receiver, bind).methods.collect{|m| m.to_s}
rescue RescuableException
candidates = []
end
select_message(receiver, message, candidates)
when /^(-?0x[0-9a-fA-F_]+)\.([^.]*)$/
# Numeric(0xFFFF)
receiver = $1
message = Regexp.quote($2)
begin
candidates = eval(receiver, bind).methods.collect{|m| m.to_s}
rescue RescuableException
candidates = []
end
select_message(receiver, message, candidates)
when /^(\$[^.]*)$/
regmessage = Regexp.new(Regexp.quote($1))
candidates = global_variables.collect{|m| m.to_s}.grep(regmessage)
when /^([^."].*)\.([^.]*)$/
# variable
receiver = $1
message = Regexp.quote($2)
gv = eval("global_variables", bind).collect{|m| m.to_s}
lv = eval("local_variables", bind).collect{|m| m.to_s}
cv = eval("self.class.constants", bind).collect{|m| m.to_s}
if (gv | lv | cv).include?(receiver) or /^[A-Z]/ =~ receiver && /\./ !~ receiver
# foo.func and foo is local var. OR
# Foo::Bar.func
begin
candidates = eval("#{receiver}.methods", bind).collect{|m| m.to_s}
rescue RescuableException
candidates = []
end
else
# func1.func2
candidates = []
ObjectSpace.each_object(Module){|m|
begin
name = m.name.to_s
rescue RescuableException
name = ""
end
next if name != "IRB::Context" and
/^(IRB|SLex|RubyLex|RubyToken)/ =~ name
# jruby doesn't always provide #instance_methods() on each
# object.
if m.respond_to?(:instance_methods)
candidates.concat m.instance_methods(false).collect{|x| x.to_s}
end
}
candidates.sort!
candidates.uniq!
end
select_message(receiver, message, candidates)
when /^\.([^.]*)$/
# unknown(maybe String)
receiver = ""
message = Regexp.quote($1)
candidates = String.instance_methods(true).collect{|m| m.to_s}
select_message(receiver, message, candidates)
else
candidates = eval(
"methods | private_methods | local_variables | " \
"self.class.constants | instance_variables",
bind
).collect{|m| m.to_s}
if eval("respond_to?(:class_variables)", bind)
candidates += eval("class_variables", bind).collect { |m| m.to_s }
end
(candidates|ReservedWords|commands).grep(/^#{Regexp.quote(input)}/)
end
end
end
def self.select_message(receiver, message, candidates)
candidates.grep(/^#{message}/).collect do |e|
case e
when /^[a-zA-Z_]/
receiver + "." + e
when /^[0-9]/
when *Operators
#receiver + " " + e
end
end
end
end
end
|
require "puppet/moddeps/version"
require 'rubygems'
require 'json'
module Puppet
module Moddeps
@@default_module_path = '/etc/puppet/modules'
def self.installModuleDependencies(puppet_module)
@puppet_module = puppet_module
@metadata = File.read("#{@@default_module_path}/#{@puppet_module}/metadata.json")
@data = JSON.parse(@metadata)
@data['dependencies'].each do |dep|
@note = 'Installing dependency'
@depname = dep["name"].sub '/', '-'
@cmd = "puppet module install #{@depname}"
puts "#{@cmd}"
exec("#{@cmd}")
end
end
end
end
Replaced double quotes with singles and added a blank line at the end of the file
require 'puppet/moddeps/version'
require 'rubygems'
require 'json'
module Puppet
module Moddeps
@@default_module_path = '/etc/puppet/modules'
def self.installModuleDependencies(puppet_module)
@puppet_module = puppet_module
@metadata = File.read("#{@@default_module_path}/#{@puppet_module}/metadata.json")
@data = JSON.parse(@metadata)
@data['dependencies'].each do |dep|
@note = 'Installing dependency'
@depname = dep["name"].sub '/', '-'
@cmd = "puppet module install #{@depname}"
puts "#{@cmd}"
exec("#{@cmd}")
end
end
end
end
|
#--
# Copyright (c) 2011 SUSE LINUX Products GmbH
#
# Author: Duncan Mac-Vicar P. <dmacvicar@suse.de>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
require 'inifile'
require 'uri'
require 'xmlrpc/client'
require 'bicho/bug'
require 'bicho/query'
require 'bicho/logging'
# Helper IO device that forwards to the logger, we use it
# to debug XMLRPC by monkey patching it
#
# @private
class Bicho::LoggerIODevice
def <<(msg)
Bicho::Logging.logger.debug(msg)
end
end
# monkey patch XMLRPC
#
# @private
class XMLRPC::Client
def set_debug
@http.set_debug_output(Bicho::LoggerIODevice.new);
end
end
module Bicho
module Plugins
end
# Client to query bugzilla
class Client
include Bicho::Logging
attr_reader :url, :userid
def initialize(url)
url = URI.parse(url) if not url.is_a?(URI)
# save the unmodified (by plugins) url
@url = url.clone
url.path = '/xmlrpc.cgi'
# Scan plugins
plugin_glob = File.join(File.dirname(__FILE__), 'plugins', '*.rb')
Dir.glob(plugin_glob).each do |plugin|
logger.debug("Loading file: #{plugin}")
load plugin
end
#instantiate plugins
::Bicho::Plugins.constants.each do |cnt|
pl_class = ::Bicho::Plugins.const_get(cnt)
pl_instance = pl_class.new
logger.debug("Loaded: #{pl_instance}")
pl_instance.initialize_hook(url, logger)
end
@client = XMLRPC::Client.new_from_uri(url.to_s, nil, 900)
@client.set_debug
# User.login sets the credentials cookie for subsequent calls
ret = @client.call("User.login", { 'login' => @client.user, 'password' => @client.password, 'remember' => 0 } )
handle_faults(ret)
@userid = ret['id']
end
def cookie
@client.cookie
end
def handle_faults(ret)
if ret.has_key?('faults')
ret['faults'].each do |fault|
logger.error fault
end
end
end
# Search for a bug
#
# +query+ has to be either a +Query+ object or
# a +String+ that will be searched in the summary
# of the bugs.
#
def search_bugs(query)
# allow plain strings to be passed, interpretting them
query = Query.new.summary(query) if query.is_a?(String)
ret = @client.call("Bug.search", query.query_map)
handle_faults(ret)
bugs = []
ret['bugs'].each do |bug_data|
bugs << Bug.new(self, bug_data)
end
bugs
end
# Retrieves one or more bugs by id
def get_bugs(*ids)
params = Hash.new
params[:ids] = ids.collect(&:to_s)
bugs = []
ret = @client.call("Bug.get", params)
handle_faults(ret)
ret['bugs'].each do |bug_data|
bugs << Bug.new(self, bug_data)
end
bugs
end
end
end
Don't modify the input url parameter (I broke dm-bugzilla-adapter)
#--
# Copyright (c) 2011 SUSE LINUX Products GmbH
#
# Author: Duncan Mac-Vicar P. <dmacvicar@suse.de>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
require 'inifile'
require 'uri'
require 'xmlrpc/client'
require 'bicho/bug'
require 'bicho/query'
require 'bicho/logging'
# Helper IO device that forwards to the logger, we use it
# to debug XMLRPC by monkey patching it
#
# @private
class Bicho::LoggerIODevice
def <<(msg)
Bicho::Logging.logger.debug(msg)
end
end
# monkey patch XMLRPC
#
# @private
class XMLRPC::Client
def set_debug
@http.set_debug_output(Bicho::LoggerIODevice.new);
end
end
module Bicho
module Plugins
end
# Client to query bugzilla
class Client
include Bicho::Logging
attr_reader :url, :userid
def initialize(url)
# Don't modify the original url
url = url.is_a?(URI) ? url.clone : URI.parse(url)
# save the unmodified (by plugins) url
@original_url = url.clone
url.path = '/xmlrpc.cgi'
# Scan plugins
plugin_glob = File.join(File.dirname(__FILE__), 'plugins', '*.rb')
Dir.glob(plugin_glob).each do |plugin|
logger.debug("Loading file: #{plugin}")
load plugin
end
#instantiate plugins
::Bicho::Plugins.constants.each do |cnt|
pl_class = ::Bicho::Plugins.const_get(cnt)
pl_instance = pl_class.new
logger.debug("Loaded: #{pl_instance}")
pl_instance.initialize_hook(url, logger)
end
@client = XMLRPC::Client.new_from_uri(url.to_s, nil, 900)
@client.set_debug
# User.login sets the credentials cookie for subsequent calls
ret = @client.call("User.login", { 'login' => @client.user, 'password' => @client.password, 'remember' => 0 } )
handle_faults(ret)
@userid = ret['id']
end
def cookie
@client.cookie
end
def handle_faults(ret)
if ret.has_key?('faults')
ret['faults'].each do |fault|
logger.error fault
end
end
end
# Search for a bug
#
# +query+ has to be either a +Query+ object or
# a +String+ that will be searched in the summary
# of the bugs.
#
def search_bugs(query)
# allow plain strings to be passed, interpretting them
query = Query.new.summary(query) if query.is_a?(String)
ret = @client.call("Bug.search", query.query_map)
handle_faults(ret)
bugs = []
ret['bugs'].each do |bug_data|
bugs << Bug.new(self, bug_data)
end
bugs
end
# Retrieves one or more bugs by id
def get_bugs(*ids)
params = Hash.new
params[:ids] = ids.collect(&:to_s)
bugs = []
ret = @client.call("Bug.get", params)
handle_faults(ret)
ret['bugs'].each do |bug_data|
bugs << Bug.new(self, bug_data)
end
bugs
end
end
end
|
describe :time_gm, :shared => true do
it "creates a time based on given values, interpreted as UTC (GMT)" do
Time.send(@method, 2000,"jan",1,20,15,1).inspect.should == "Sat Jan 01 20:15:01 UTC 2000"
end
it "creates a time based on given C-style gmtime arguments, interpreted as UTC (GMT)" do
time = Time.send(@method, 1, 15, 20, 1, 1, 2000, :ignored, :ignored, :ignored, :ignored)
time.inspect.should == "Sat Jan 01 20:15:01 UTC 2000"
end
end
Time.gm|.utc: Update for 1.9 change in return val.
describe :time_gm, :shared => true do
ruby_version_is ""..."1.9" do
it "creates a time based on given values, interpreted as UTC (GMT)" do
Time.send(@method, 2000,"jan",1,20,15,1).inspect.should == "Sat Jan 01 20:15:01 UTC 2000"
end
it "creates a time based on given C-style gmtime arguments, interpreted as UTC (GMT)" do
time = Time.send(@method, 1, 15, 20, 1, 1, 2000, :ignored, :ignored, :ignored, :ignored)
time.inspect.should == "Sat Jan 01 20:15:01 UTC 2000"
end
end
ruby_version_is "1.9" do
it "creates a time based on given values, interpreted as UTC (GMT)" do
Time.send(@method, 2000,"jan",1,20,15,1).inspect.should == "2000-01-01 20:15:01 UTC"
end
it "creates a time based on given C-style gmtime arguments, interpreted as UTC (GMT)" do
time = Time.send(@method, 1, 15, 20, 1, 1, 2000, :ignored, :ignored, :ignored, :ignored)
time.inspect.should == "2000-01-01 20:15:01 UTC"
end
end
end
|
module Pushbullet
module API
def devices
get('/v2/devices')
end
def push_note(device_id, title, body)
push :note, device_id, title: title, body: body
end
def push_link(device_id, title, url)
push :link, device_id, title: title, url: url
end
def push_address(device_id, title, address)
push :address, device_id, title: title, address: address
end
def push_list(device_id, title, items)
push :list, device_id, title: title, items: items
end
def push_file(device_id, file_path)
mime_type = MIME::Types.type_for(file_path).first.to_s
io = Faraday::UploadIO.new(file_path, mime_type)
push :file, device_id, file: io
end
private
def push(type, device_id, payload)
post '/v2/pushes', payload.merge(device_id: device_id, type: type)
end
end
end
for v2 device_iden param for targeting particular devices.
module Pushbullet
module API
def devices
get('/v2/devices')
end
def push_note(device_iden, title, body)
push :note, device_iden, title: title, body: body
end
def push_link(device_iden, title, url)
push :link, device_iden, title: title, url: url
end
def push_address(device_iden, title, address)
push :address, device_iden, title: title, address: address
end
def push_list(device_iden, title, items)
push :list, device_iden, title: title, items: items
end
def push_file(device_iden, file_path)
mime_type = MIME::Types.type_for(file_path).first.to_s
io = Faraday::UploadIO.new(file_path, mime_type)
push :file, device_iden, file: io
end
private
def push(type, device_iden, payload)
post '/v2/pushes', payload.merge(device_iden: device_iden, type: type)
end
end
end
|
#--
# Copyright (c) 2011 SUSE LINUX Products GmbH
#
# Author: Duncan Mac-Vicar P. <dmacvicar@suse.de>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
require 'inifile'
require 'uri'
require 'xmlrpc/client'
require 'nokogiri'
require 'net/https'
require 'cgi'
require 'bicho/attachment'
require 'bicho/bug'
require 'bicho/history'
require 'bicho/query'
require 'bicho/logging'
# Helper IO device that forwards to the logger, we use it
# to debug XMLRPC by monkey patching it
#
# @private
class Bicho::LoggerIODevice
def <<(msg)
Bicho::Logging.logger.debug(msg)
end
end
# monkey patch XMLRPC
#
# @private
class XMLRPC::Client
def set_debug
@http.set_debug_output(Bicho::LoggerIODevice.new)
end
end
module Bicho
module Plugins
end
# Client to query bugzilla
class Client
include Bicho::Logging
# @return [URI] XML-RPC API end-point
#
# This URL is automatically inferred from the
# Client#site_url
#
# Plugins can modify the inferred value by providing
# a transform_api_url_hook(url, logger) method returning
# the modified value.
#
attr_reader :api_url
# @return [URI] Bugzilla installation website
#
# This value is provided at construction time
attr_reader :site_url
# @return [String] user id, available after login
attr_reader :userid
# @visibility private
# Implemented only to warn users about the replacement
# APIs
def url
warn 'url is deprecated. Use site_url or api_url'
raise NoMethodError
end
# @param [String] site_url Bugzilla installation site url
def initialize(site_url)
@plugins = []
load_plugins!
instantiate_plugins!
if site_url.nil?
@plugins.each do |pl_instance|
if pl_instance.respond_to?(:default_site_url_hook)
default = pl_instance.default_site_url_hook(logger)
site_url = default unless default.nil?
end
end
end
# If the default url is still null, we can't continue
raise ArgumentError, 'missing bugzilla site' if site_url.nil?
@plugins.each do |pl_instance|
if pl_instance.respond_to?(:transform_site_url_hook)
site_url = pl_instance.transform_site_url_hook(site_url, logger)
end
end
# Don't modify the original url
@site_url = site_url.is_a?(URI) ? site_url.clone : URI.parse(site_url)
api_url = @site_url.clone
api_url.path = '/xmlrpc.cgi'
@plugins.each do |pl_instance|
# Modify API url
if pl_instance.respond_to?(:transform_api_url_hook)
api_url = pl_instance.transform_api_url_hook(api_url, logger)
end
end
@api_url = api_url.is_a?(URI) ? api_url.clone : URI.parse(api_url)
@client = XMLRPC::Client.new_from_uri(@api_url.to_s, nil, 900)
@client.set_debug
# User.login sets the credentials cookie for subsequent calls
if @client.user && @client.password
ret = @client.call('User.login', 'login' => @client.user, 'password' => @client.password, 'remember' => 0)
handle_faults(ret)
@userid = ret['id']
end
end
# ruby-load all the files in the plugins directory
def load_plugins!
# Scan plugins
plugin_glob = File.join(File.dirname(__FILE__), 'plugins', '*.rb')
Dir.glob(plugin_glob).each do |plugin|
logger.debug("Loading file: #{plugin}")
load plugin
end
end
# instantiate all plugin classes in the Bicho::Plugins
# module and add them to the list of known plugins
def instantiate_plugins!
# instantiate plugins
::Bicho::Plugins.constants.each do |cnt|
pl_class = ::Bicho::Plugins.const_get(cnt)
pl_instance = pl_class.new
logger.debug("Loaded: #{pl_instance}")
@plugins << pl_instance
end
end
def cookie
@client.cookie
end
def handle_faults(ret)
if ret.key?('faults')
ret['faults'].each do |fault|
logger.error fault
end
end
end
# Return Bugzilla API version
def version
ret = @client.call('Bugzilla.version')
handle_faults(ret)
ret['version']
end
# Search for a bug
#
# +query+ has to be either a +Query+ object or
# a +String+ that will be searched in the summary
# of the bugs.
#
def search_bugs(query)
# allow plain strings to be passed, interpretting them
query = Query.new.summary(query) if query.is_a?(String)
ret = @client.call('Bug.search', query.query_map)
handle_faults(ret)
bugs = []
ret['bugs'].each do |bug_data|
bugs << Bug.new(self, bug_data)
end
bugs
end
# Given a named query's name, runs it
# on the server
# @returns [Array<String>] list of bugs
def expand_named_query(what)
url = @api_url.clone
url.path = '/buglist.cgi'
url.query = "cmdtype=runnamed&namedcmd=#{URI.escape(what)}&ctype=atom"
logger.info("Expanding named query: '#{what}' to #{url.request_uri}")
fetch_named_query_url(url, 5)
end
# Fetches a named query by its full url
# @private
# @returns [Array<String>] list of bugs
def fetch_named_query_url(url, redirects_left)
unless @userid
raise 'You need to be authenticated to use named queries'
end
http = Net::HTTP.new(@api_url.host, @api_url.port)
http.set_debug_output(Bicho::LoggerIODevice.new)
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
http.use_ssl = (@api_url.scheme == 'https')
# request = Net::HTTP::Get.new(url.request_uri, {'Cookie' => self.cookie})
request = Net::HTTP::Get.new(url.request_uri)
request.basic_auth @api_url.user, @api_url.password
response = http.request(request)
case response
when Net::HTTPSuccess
bugs = []
begin
xml = Nokogiri::XML.parse(response.body)
xml.root.xpath('//xmlns:entry/xmlns:link/@href', xml.root.namespace).each do |attr|
uri = URI.parse attr.value
bugs << uri.query.split('=')[1]
end
return bugs
rescue Nokogiri::XML::XPath::SyntaxError
raise "Named query '#{url.request_uri}' not found"
end
when Net::HTTPRedirection
location = response['location']
if redirects_left == 0
raise "Maximum redirects exceeded (redirected to #{location})"
end
new_location_uri = URI.parse(location)
logger.debug("Moved to #{new_location_uri}")
fetch_named_query_url(new_location_uri, redirects_left - 1)
else
raise "Error when expanding named query '#{url.request_uri}': #{response}"
end
end
# Gets a single bug
# @return [Bug] a single bug by id
def get_bug(id)
get_bugs(id).first
end
# Retrieves one or more bugs by id
# @return [Array<Bug>] a list of bugs
def get_bugs(*ids)
params = {}
params[:ids] = ids.collect(&:to_s).map do |what|
if what =~ /^[0-9]+$/
next what.to_i
else
next expand_named_query(what)
end
end.flatten
bugs = []
ret = @client.call('Bug.get', params)
handle_faults(ret)
ret['bugs'].each do |bug_data|
bugs << Bug.new(self, bug_data)
end
bugs
end
# @return [Array<History>] the history of the given bugs
def get_history(*ids)
params = {}
params[:ids] = ids.collect(&:to_s).map do |what|
if what =~ /^[0-9]+$/
next what.to_i
else
next expand_named_query(what)
end
end.flatten
histories = []
ret = @client.call('Bug.history', params)
handle_faults(ret)
ret['bugs'].each do |history_data|
histories << History.new(self, history_data)
end
histories
end
# @return [Array<Attachment>] a list of attachments for the
# given bugs.
#
# Payload is lazy-loaded
def get_attachments(*ids)
params = {}
params[:ids] = ids.collect(&:to_s).map do |what|
if what =~ /^[0-9]+$/
next what.to_i
else
next expand_named_query(what)
end
end.flatten
attachments = []
ret = @client.call('Bug.attachments',
params.merge(exclude_fields: ['data']))
handle_faults(ret)
ret['bugs'].map do |_, attachments_data|
attachments_data.map do |attachment_data|
Attachment.new(self, @client, attachment_data)
end
end.flatten
end
end
end
do not redefine methods by loading plugins multiple times
#--
# Copyright (c) 2011 SUSE LINUX Products GmbH
#
# Author: Duncan Mac-Vicar P. <dmacvicar@suse.de>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
require 'inifile'
require 'uri'
require 'xmlrpc/client'
require 'nokogiri'
require 'net/https'
require 'cgi'
require 'bicho/attachment'
require 'bicho/bug'
require 'bicho/history'
require 'bicho/query'
require 'bicho/logging'
# Helper IO device that forwards to the logger, we use it
# to debug XMLRPC by monkey patching it
#
# @private
class Bicho::LoggerIODevice
def <<(msg)
Bicho::Logging.logger.debug(msg)
end
end
# monkey patch XMLRPC
#
# @private
class XMLRPC::Client
def set_debug
@http.set_debug_output(Bicho::LoggerIODevice.new)
end
end
module Bicho
module Plugins
end
# Client to query bugzilla
class Client
include Bicho::Logging
# @return [URI] XML-RPC API end-point
#
# This URL is automatically inferred from the
# Client#site_url
#
# Plugins can modify the inferred value by providing
# a transform_api_url_hook(url, logger) method returning
# the modified value.
#
attr_reader :api_url
# @return [URI] Bugzilla installation website
#
# This value is provided at construction time
attr_reader :site_url
# @return [String] user id, available after login
attr_reader :userid
# @visibility private
# Implemented only to warn users about the replacement
# APIs
def url
warn 'url is deprecated. Use site_url or api_url'
raise NoMethodError
end
# @param [String] site_url Bugzilla installation site url
def initialize(site_url)
@plugins = []
load_plugins!
instantiate_plugins!
if site_url.nil?
@plugins.each do |pl_instance|
if pl_instance.respond_to?(:default_site_url_hook)
default = pl_instance.default_site_url_hook(logger)
site_url = default unless default.nil?
end
end
end
# If the default url is still null, we can't continue
raise ArgumentError, 'missing bugzilla site' if site_url.nil?
@plugins.each do |pl_instance|
if pl_instance.respond_to?(:transform_site_url_hook)
site_url = pl_instance.transform_site_url_hook(site_url, logger)
end
end
# Don't modify the original url
@site_url = site_url.is_a?(URI) ? site_url.clone : URI.parse(site_url)
api_url = @site_url.clone
api_url.path = '/xmlrpc.cgi'
@plugins.each do |pl_instance|
# Modify API url
if pl_instance.respond_to?(:transform_api_url_hook)
api_url = pl_instance.transform_api_url_hook(api_url, logger)
end
end
@api_url = api_url.is_a?(URI) ? api_url.clone : URI.parse(api_url)
@client = XMLRPC::Client.new_from_uri(@api_url.to_s, nil, 900)
@client.set_debug
# User.login sets the credentials cookie for subsequent calls
if @client.user && @client.password
ret = @client.call('User.login', 'login' => @client.user, 'password' => @client.password, 'remember' => 0)
handle_faults(ret)
@userid = ret['id']
end
end
# ruby-load all the files in the plugins directory
def load_plugins!
# Scan plugins
plugin_glob = File.join(File.dirname(__FILE__), 'plugins', '*.rb')
Dir.glob(plugin_glob).each do |plugin|
logger.debug("Loading file: #{plugin}")
require plugin
end
end
# instantiate all plugin classes in the Bicho::Plugins
# module and add them to the list of known plugins
def instantiate_plugins!
# instantiate plugins
::Bicho::Plugins.constants.each do |cnt|
pl_class = ::Bicho::Plugins.const_get(cnt)
pl_instance = pl_class.new
logger.debug("Loaded: #{pl_instance}")
@plugins << pl_instance
end
end
def cookie
@client.cookie
end
def handle_faults(ret)
if ret.key?('faults')
ret['faults'].each do |fault|
logger.error fault
end
end
end
# Return Bugzilla API version
def version
ret = @client.call('Bugzilla.version')
handle_faults(ret)
ret['version']
end
# Search for a bug
#
# +query+ has to be either a +Query+ object or
# a +String+ that will be searched in the summary
# of the bugs.
#
def search_bugs(query)
# allow plain strings to be passed, interpretting them
query = Query.new.summary(query) if query.is_a?(String)
ret = @client.call('Bug.search', query.query_map)
handle_faults(ret)
bugs = []
ret['bugs'].each do |bug_data|
bugs << Bug.new(self, bug_data)
end
bugs
end
# Given a named query's name, runs it
# on the server
# @returns [Array<String>] list of bugs
def expand_named_query(what)
url = @api_url.clone
url.path = '/buglist.cgi'
url.query = "cmdtype=runnamed&namedcmd=#{URI.escape(what)}&ctype=atom"
logger.info("Expanding named query: '#{what}' to #{url.request_uri}")
fetch_named_query_url(url, 5)
end
# Fetches a named query by its full url
# @private
# @returns [Array<String>] list of bugs
def fetch_named_query_url(url, redirects_left)
unless @userid
raise 'You need to be authenticated to use named queries'
end
http = Net::HTTP.new(@api_url.host, @api_url.port)
http.set_debug_output(Bicho::LoggerIODevice.new)
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
http.use_ssl = (@api_url.scheme == 'https')
# request = Net::HTTP::Get.new(url.request_uri, {'Cookie' => self.cookie})
request = Net::HTTP::Get.new(url.request_uri)
request.basic_auth @api_url.user, @api_url.password
response = http.request(request)
case response
when Net::HTTPSuccess
bugs = []
begin
xml = Nokogiri::XML.parse(response.body)
xml.root.xpath('//xmlns:entry/xmlns:link/@href', xml.root.namespace).each do |attr|
uri = URI.parse attr.value
bugs << uri.query.split('=')[1]
end
return bugs
rescue Nokogiri::XML::XPath::SyntaxError
raise "Named query '#{url.request_uri}' not found"
end
when Net::HTTPRedirection
location = response['location']
if redirects_left == 0
raise "Maximum redirects exceeded (redirected to #{location})"
end
new_location_uri = URI.parse(location)
logger.debug("Moved to #{new_location_uri}")
fetch_named_query_url(new_location_uri, redirects_left - 1)
else
raise "Error when expanding named query '#{url.request_uri}': #{response}"
end
end
# Gets a single bug
# @return [Bug] a single bug by id
def get_bug(id)
get_bugs(id).first
end
# Retrieves one or more bugs by id
# @return [Array<Bug>] a list of bugs
def get_bugs(*ids)
params = {}
params[:ids] = ids.collect(&:to_s).map do |what|
if what =~ /^[0-9]+$/
next what.to_i
else
next expand_named_query(what)
end
end.flatten
bugs = []
ret = @client.call('Bug.get', params)
handle_faults(ret)
ret['bugs'].each do |bug_data|
bugs << Bug.new(self, bug_data)
end
bugs
end
# @return [Array<History>] the history of the given bugs
def get_history(*ids)
params = {}
params[:ids] = ids.collect(&:to_s).map do |what|
if what =~ /^[0-9]+$/
next what.to_i
else
next expand_named_query(what)
end
end.flatten
histories = []
ret = @client.call('Bug.history', params)
handle_faults(ret)
ret['bugs'].each do |history_data|
histories << History.new(self, history_data)
end
histories
end
# @return [Array<Attachment>] a list of attachments for the
# given bugs.
#
# Payload is lazy-loaded
def get_attachments(*ids)
params = {}
params[:ids] = ids.collect(&:to_s).map do |what|
if what =~ /^[0-9]+$/
next what.to_i
else
next expand_named_query(what)
end
end.flatten
attachments = []
ret = @client.call('Bug.attachments',
params.merge(exclude_fields: ['data']))
handle_faults(ret)
ret['bugs'].map do |_, attachments_data|
attachments_data.map do |attachment_data|
Attachment.new(self, @client, attachment_data)
end
end.flatten
end
end
end
|
#--
# Copyright (c) 2011 SUSE LINUX Products GmbH
#
# Author: Duncan Mac-Vicar P. <dmacvicar@suse.de>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
require 'inifile'
require 'uri'
require 'xmlrpc/client'
require 'nokogiri'
require 'net/https'
require 'cgi'
require 'bicho/bug'
require 'bicho/query'
require 'bicho/logging'
# Helper IO device that forwards to the logger, we use it
# to debug XMLRPC by monkey patching it
#
# @private
class Bicho::LoggerIODevice
def <<(msg)
Bicho::Logging.logger.debug(msg)
end
end
# monkey patch XMLRPC
#
# @private
class XMLRPC::Client
def set_debug
@http.set_debug_output(Bicho::LoggerIODevice.new);
end
end
module Bicho
module Plugins
end
# Client to query bugzilla
class Client
include Bicho::Logging
# @return [URI] XML-RPC API end-point
#
# This URL is automatically inferred from the
# Client#site_url
#
# Plugins can modify the inferred value by providing
# a transform_api_url_hook(url, logger) method returning
# the modified value.
#
attr_reader :api_url
# @return [URI] Bugzilla installation website
#
# This value is provided at construction time
attr_reader :site_url
# @return [String] user id, available after login
attr_reader :userid
# @visibility private
# Implemented only to warn users about the replacement
# APIs
def url
warn "url is deprecated. Use site_url or api_url"
raise NoMethodError
end
# @param [String] site_url Bugzilla installation site url
def initialize(site_url)
# Don't modify the original url
@site_url = site_url.is_a?(URI) ? site_url.clone : URI.parse(site_url)
@api_url = @site_url.clone
@api_url.path = '/xmlrpc.cgi'
# Scan plugins
plugin_glob = File.join(File.dirname(__FILE__), 'plugins', '*.rb')
Dir.glob(plugin_glob).each do |plugin|
logger.debug("Loading file: #{plugin}")
load plugin
end
#instantiate plugins
::Bicho::Plugins.constants.each do |cnt|
pl_class = ::Bicho::Plugins.const_get(cnt)
pl_instance = pl_class.new
logger.debug("Loaded: #{pl_instance}")
# Modify API url
if pl_instance.respond_to?(:transform_api_url_hook)
@api_url = pl_instance.transform_api_url_hook(@api_url, logger)
end
end
@client = XMLRPC::Client.new_from_uri(@api_url.to_s, nil, 900)
@client.set_debug
# User.login sets the credentials cookie for subsequent calls
if @client.user && @client.password
ret = @client.call("User.login", { 'login' => @client.user, 'password' => @client.password, 'remember' => 0 } )
handle_faults(ret)
@userid = ret['id']
end
end
def cookie
@client.cookie
end
def handle_faults(ret)
if ret.has_key?('faults')
ret['faults'].each do |fault|
logger.error fault
end
end
end
# Search for a bug
#
# +query+ has to be either a +Query+ object or
# a +String+ that will be searched in the summary
# of the bugs.
#
def search_bugs(query)
# allow plain strings to be passed, interpretting them
query = Query.new.summary(query) if query.is_a?(String)
ret = @client.call("Bug.search", query.query_map)
handle_faults(ret)
bugs = []
ret['bugs'].each do |bug_data|
bugs << Bug.new(self, bug_data)
end
bugs
end
# Given a named query's name, runs it
# on the server
# @returns [Array<String>] list of bugs
def expand_named_query(what)
if not @userid
raise "You need to be authenticated to use named queries"
end
logger.info("Expanding named query: '#{what}' with '#{cookie}'")
http = Net::HTTP.new(@url.host, @url.port)
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
http.set_debug_output(Bicho::LoggerIODevice.new)
request = Net::HTTP::Get.new("/buglist.cgi?cmdtype=runnamed&namedcmd=#{CGI.escape(what)}&ctype=atom", {"Cookie" => self.cookie} )
response = http.request(request)
case response
when Net::HTTPSuccess
bugs = []
xml = Nokogiri::XML.parse(response.body)
xml.root.xpath("//xmlns:entry/xmlns:link/@href", xml.root.namespace).each do |attr|
uri = URI.parse attr.value
bugs << uri.query.split("=")[1]
end
return bugs
when Net::HTTPRedirect
raise "HTTP redirect not supported in named_query"
else
raise "Error when expanding named query '#{what}': #{response}"
end
end
# Retrieves one or more bugs by id
def get_bugs(*ids)
params = Hash.new
params[:ids] = ids.collect(&:to_s).map do |what|
if what =~ /^[0-9]+$/
next what.to_i
else
next expand_named_query(what)
end
end.flatten
bugs = []
ret = @client.call("Bug.get", params)
handle_faults(ret)
ret['bugs'].each do |bug_data|
bugs << Bug.new(self, bug_data)
end
bugs
end
end
end
Remove occurrence of the deprecated @url
#--
# Copyright (c) 2011 SUSE LINUX Products GmbH
#
# Author: Duncan Mac-Vicar P. <dmacvicar@suse.de>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
require 'inifile'
require 'uri'
require 'xmlrpc/client'
require 'nokogiri'
require 'net/https'
require 'cgi'
require 'bicho/bug'
require 'bicho/query'
require 'bicho/logging'
# Helper IO device that forwards to the logger, we use it
# to debug XMLRPC by monkey patching it
#
# @private
class Bicho::LoggerIODevice
def <<(msg)
Bicho::Logging.logger.debug(msg)
end
end
# monkey patch XMLRPC
#
# @private
class XMLRPC::Client
def set_debug
@http.set_debug_output(Bicho::LoggerIODevice.new);
end
end
module Bicho
module Plugins
end
# Client to query bugzilla
class Client
include Bicho::Logging
# @return [URI] XML-RPC API end-point
#
# This URL is automatically inferred from the
# Client#site_url
#
# Plugins can modify the inferred value by providing
# a transform_api_url_hook(url, logger) method returning
# the modified value.
#
attr_reader :api_url
# @return [URI] Bugzilla installation website
#
# This value is provided at construction time
attr_reader :site_url
# @return [String] user id, available after login
attr_reader :userid
# @visibility private
# Implemented only to warn users about the replacement
# APIs
def url
warn "url is deprecated. Use site_url or api_url"
raise NoMethodError
end
# @param [String] site_url Bugzilla installation site url
def initialize(site_url)
# Don't modify the original url
@site_url = site_url.is_a?(URI) ? site_url.clone : URI.parse(site_url)
@api_url = @site_url.clone
@api_url.path = '/xmlrpc.cgi'
# Scan plugins
plugin_glob = File.join(File.dirname(__FILE__), 'plugins', '*.rb')
Dir.glob(plugin_glob).each do |plugin|
logger.debug("Loading file: #{plugin}")
load plugin
end
#instantiate plugins
::Bicho::Plugins.constants.each do |cnt|
pl_class = ::Bicho::Plugins.const_get(cnt)
pl_instance = pl_class.new
logger.debug("Loaded: #{pl_instance}")
# Modify API url
if pl_instance.respond_to?(:transform_api_url_hook)
@api_url = pl_instance.transform_api_url_hook(@api_url, logger)
end
end
@client = XMLRPC::Client.new_from_uri(@api_url.to_s, nil, 900)
@client.set_debug
# User.login sets the credentials cookie for subsequent calls
if @client.user && @client.password
ret = @client.call("User.login", { 'login' => @client.user, 'password' => @client.password, 'remember' => 0 } )
handle_faults(ret)
@userid = ret['id']
end
end
def cookie
@client.cookie
end
def handle_faults(ret)
if ret.has_key?('faults')
ret['faults'].each do |fault|
logger.error fault
end
end
end
# Search for a bug
#
# +query+ has to be either a +Query+ object or
# a +String+ that will be searched in the summary
# of the bugs.
#
def search_bugs(query)
# allow plain strings to be passed, interpretting them
query = Query.new.summary(query) if query.is_a?(String)
ret = @client.call("Bug.search", query.query_map)
handle_faults(ret)
bugs = []
ret['bugs'].each do |bug_data|
bugs << Bug.new(self, bug_data)
end
bugs
end
# Given a named query's name, runs it
# on the server
# @returns [Array<String>] list of bugs
def expand_named_query(what)
if not @userid
raise "You need to be authenticated to use named queries"
end
logger.info("Expanding named query: '#{what}' with '#{cookie}'")
http = Net::HTTP.new(@api_url.host, @api_url.port)
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
http.set_debug_output(Bicho::LoggerIODevice.new)
request = Net::HTTP::Get.new("/buglist.cgi?cmdtype=runnamed&namedcmd=#{CGI.escape(what)}&ctype=atom", {"Cookie" => self.cookie} )
response = http.request(request)
case response
when Net::HTTPSuccess
bugs = []
xml = Nokogiri::XML.parse(response.body)
xml.root.xpath("//xmlns:entry/xmlns:link/@href", xml.root.namespace).each do |attr|
uri = URI.parse attr.value
bugs << uri.query.split("=")[1]
end
return bugs
when Net::HTTPRedirect
raise "HTTP redirect not supported in named_query"
else
raise "Error when expanding named query '#{what}': #{response}"
end
end
# Retrieves one or more bugs by id
def get_bugs(*ids)
params = Hash.new
params[:ids] = ids.collect(&:to_s).map do |what|
if what =~ /^[0-9]+$/
next what.to_i
else
next expand_named_query(what)
end
end.flatten
bugs = []
ret = @client.call("Bug.get", params)
handle_faults(ret)
ret['bugs'].each do |bug_data|
bugs << Bug.new(self, bug_data)
end
bugs
end
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "ffmpeg-video-info"
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Pavel Tatarsky"]
s.date = "2013-11-30"
s.description = "TODO: longer description of your gem"
s.email = "fazzzenda@mail.ru"
s.extensions = ["ext/ffmpeg_video_info/extconf.rb"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"ext/ffmpeg_video_info/extconf.rb",
"ext/ffmpeg_video_info/ffmpeg_video_info.c",
"ffmpeg-video-info.gemspec",
"spec/fixtures/test.mp4",
"spec/spec_helper.rb"
]
s.homepage = "http://github.com/vintikzzz/ffmpeg-video-info"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "2.0.5"
s.summary = "TODO: one-line summary of your gem"
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["~> 1.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_development_dependency(%q<rspec>, [">= 0"])
s.add_development_dependency(%q<rake-compiler>, [">= 0"])
else
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rake-compiler>, [">= 0"])
end
else
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rake-compiler>, [">= 0"])
end
end
Regenerate gemspec for version 0.1.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "ffmpeg-video-info"
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Pavel Tatarsky"]
s.date = "2013-11-30"
s.description = "It only provides gathering info about media files"
s.email = "fazzzenda@mail.ru"
s.extensions = ["ext/ffmpeg_video_info/extconf.rb"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"ext/ffmpeg_video_info/extconf.rb",
"ext/ffmpeg_video_info/ffmpeg_video_info.c",
"ffmpeg-video-info.gemspec",
"spec/fixtures/test.mp4",
"spec/spec_helper.rb"
]
s.homepage = "http://github.com/vintikzzz/ffmpeg-video-info"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "2.0.5"
s.summary = "Ruby binding for FFmpeg library"
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["~> 1.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_development_dependency(%q<rspec>, [">= 0"])
s.add_development_dependency(%q<rake-compiler>, [">= 0"])
else
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rake-compiler>, [">= 0"])
end
else
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rake-compiler>, [">= 0"])
end
end
|
# ## Qiita::Response
# A class for response data returned from API.
#
module Qiita
class Response
def initialize(faraday_response)
@raw_body = faraday_response.body
@raw_headers = faraday_response.headers
@raw_status = faraday_response.status
end
# ### Qiita::Response#body
# Returns response body returned from API as a `Hash` or an `Array` of `Hash`.
#
# ```rb
# response.body #=> { ... }
# ```
#
def body
@raw_body
end
# ### Qiita::Response#headers
# Returns response headers returned from API as a `Hash`.
#
# ```rb
# response.headers #=> { "Content-Type" => "application/json" }
# ```
#
def headers
@headers ||= @raw_headers.inject({}) do |result, (key, value)|
result.merge(key.split("-").map(&:capitalize).join("-") => value)
end
end
# ### Qiita::Response#status
# Returns response status code returned from API as a `Fixnum`.
#
# ```rb
# response.status #=> 200
# ```
#
def status
@raw_status
end
# ### Qiita::Response#to_s
# Returns a String containing curl-like response.
#
# ```rb
# puts client.get("/api/v2/user")
# ```
#
# ```
# HTTP/1.1 200
# Cache-Control: max-age=0, private, must-revalidate
# Connection: Close
# Content-Length: 448
# Content-Type: application/json; charset=utf-8
# Date: Fri, 19 Sep 2014 10:26:38 GMT
# Etag: "15e5e2071ea5c4ba75908b351f049ec0"
# Rate-Limit: 300
# Rate-Remaining: 289
# Rate-Reset: 1411122764
# Server: nginx
# Status: 200 OK
# X-Content-Type-Options: nosniff
# X-Frame-Options: SAMEORIGIN
# X-Request-Id: 3efdda8c-ae89-4a96-8e53-6279856b5a9e
# X-Runtime: 0.126421
# X-Xss-Protection: 1; mode=block
#
# {"description"=>"", "facebook_id"=>"", "followers_count"=>149, "followees_count"=>22, "github_login_name"=>"r7kamura", "items_count"=>13, "linkedin_id"=>"", "location"=>"", "name"=>"", "organization"=>"Increments Inc.", "profile_image_url"=>"https://secure.gravatar.com/avatar/089127ffb92a19d3d37815673cca06dc?d=https://a248.e.akamai.net/assets.github.com%2Fimages%2Fgravatars%2Fgravatar-140.png", "twitter_screen_name"=>"r7kamura", "url_name"=>"r7kamura", "website_url"=>""}
# ```
#
def to_s
@to_s ||= begin
string = "HTTP/1.1 #{status}\n"
string << headers.sort.map {|key, value| "#{key}: #{value}" }.join("\n") << "\n"
string << "\n#{body}\n" unless body.empty?
string
end
end
end
end
Add missing status message to Qiita::Response#to_s
# ## Qiita::Response
# A class for response data returned from API.
#
module Qiita
class Response
def initialize(faraday_response)
@raw_body = faraday_response.body
@raw_headers = faraday_response.headers
@raw_status = faraday_response.status
end
# ### Qiita::Response#body
# Returns response body returned from API as a `Hash` or an `Array` of `Hash`.
#
# ```rb
# response.body #=> { ... }
# ```
#
def body
@raw_body
end
# ### Qiita::Response#headers
# Returns response headers returned from API as a `Hash`.
#
# ```rb
# response.headers #=> { "Content-Type" => "application/json" }
# ```
#
def headers
@headers ||= @raw_headers.inject({}) do |result, (key, value)|
result.merge(key.split("-").map(&:capitalize).join("-") => value)
end
end
# ### Qiita::Response#status
# Returns response status code returned from API as a `Fixnum`.
#
# ```rb
# response.status #=> 200
# ```
#
def status
@raw_status
end
# ### Qiita::Response#to_s
# Returns a String containing curl-like response.
#
# ```rb
# puts client.get("/api/v2/user")
# ```
#
# ```
# HTTP/1.1 200 OK
# Cache-Control: max-age=0, private, must-revalidate
# Connection: Close
# Content-Length: 448
# Content-Type: application/json; charset=utf-8
# Date: Fri, 19 Sep 2014 10:26:38 GMT
# Etag: "15e5e2071ea5c4ba75908b351f049ec0"
# Rate-Limit: 300
# Rate-Remaining: 289
# Rate-Reset: 1411122764
# Server: nginx
# Status: 200 OK
# X-Content-Type-Options: nosniff
# X-Frame-Options: SAMEORIGIN
# X-Request-Id: 3efdda8c-ae89-4a96-8e53-6279856b5a9e
# X-Runtime: 0.126421
# X-Xss-Protection: 1; mode=block
#
# {"description"=>"", "facebook_id"=>"", "followers_count"=>149, "followees_count"=>22, "github_login_name"=>"r7kamura", "items_count"=>13, "linkedin_id"=>"", "location"=>"", "name"=>"", "organization"=>"Increments Inc.", "profile_image_url"=>"https://secure.gravatar.com/avatar/089127ffb92a19d3d37815673cca06dc?d=https://a248.e.akamai.net/assets.github.com%2Fimages%2Fgravatars%2Fgravatar-140.png", "twitter_screen_name"=>"r7kamura", "url_name"=>"r7kamura", "website_url"=>""}
# ```
#
def to_s
@to_s ||= begin
string = "HTTP/1.1 #{status} #{status_message}\n"
string << headers.sort.map {|key, value| "#{key}: #{value}" }.join("\n") << "\n"
string << "\n#{body}\n" unless body.empty?
string
end
end
private
def status_message
Rack::Utils::HTTP_STATUS_CODES[status]
end
end
end
|
#- Ruby source code
#- defs-sqlite.rb ~~
# ~~ (c) SRW, 16 Jul 2014
# ~~ last updated 31 Jan 2015
require 'json'
require 'sqlite3'
module QM
class SqliteApiStore
def close()
# This method isn't meaningful for the current implementation because
# the `execute` method opens and closes the SQLite database file for
# each and every request. If you want high performance, do *NOT* use
# SQLite as storage for QMachine :-P
return
end
def collect_garbage()
# This method needs documentation.
execute("DELETE FROM avars WHERE (exp_date < #{now})")
return
end
def connect(opts = {})
# This method needs documentation.
if opts.has_key?(:sqlite) then
@filename ||= opts[:sqlite]
execute <<-sql
CREATE TABLE IF NOT EXISTS avars (
body TEXT NOT NULL,
box TEXT NOT NULL,
exp_date INTEGER NOT NULL,
key TEXT NOT NULL,
status TEXT,
PRIMARY KEY (box, key)
);
sql
collect_garbage
end
return @filename
end
def execute(query)
# This helper method helps DRY out the code for database queries.
done = false
until (done == true) do
begin
db = SQLite3::Database.open(@filename)
x = db.execute(query)
done = true
rescue SQLite3::Exception => err
if (err.is_a?(SQLite3::BusyException) == false) then
STDERR.puts "Exception occurred: '#{err}':\n#{query}"
end
ensure
db.close if db
end
end
return x
end
def exp_date()
# This method needs documentation.
return now + @settings.avar_ttl.to_i(10)
end
def get_avar(params)
# This method needs documentation.
box, key = params[0], params[1]
x = execute <<-sql
SELECT body FROM avars
WHERE box = '#{box}' AND exp_date > #{now} AND key = '#{key}'
sql
if x.length == 0 then
y = '{}'
else
# If a row was found, update its expiration date.
y = x[0][0]
x = execute <<-sql
UPDATE avars SET exp_date = #{exp_date}
WHERE box = '#{box}' and key = '#{key}'
sql
end
return y
end
def get_list(params)
# This method needs documentation.
b, s = params[0], params[1]
x = execute <<-sql
SELECT key FROM avars
WHERE box = '#{b}' AND exp_date > #{now} AND status = '#{s}'
sql
return (x.length == 0) ? '[]' : (x.map {|row| row[0]}).to_json
end
def initialize(opts = {})
# This constructor needs documentation.
@settings = opts
end
def now()
# This method needs documentation.
return Time.now.to_i(10)
end
def set_avar(params)
# This method needs documentation.
body, box, key = params.last, params[0], params[1]
status = (params.length == 4) ? "'#{params[2]}'" : 'NULL'
execute <<-sql
INSERT OR REPLACE INTO avars (body, box, exp_date, key, status)
VALUES ('#{body}', '#{box}', #{exp_date}, '#{key}', #{status})
sql
collect_garbage
return
end
private :collect_garbage, :execute, :exp_date, :now
end
# NOTE: There is no `SqliteLogStore` class.
end
#- vim:set syntax=ruby:
Moved private `SqliteApiStore` methods to bottom of class definition
#- Ruby source code
#- defs-sqlite.rb ~~
# ~~ (c) SRW, 16 Jul 2014
# ~~ last updated 31 Jan 2015
require 'json'
require 'sqlite3'
module QM
class SqliteApiStore
def close()
# This method isn't meaningful for the current implementation because
# the `execute` method opens and closes the SQLite database file for
# each and every request. If you want high performance, do *NOT* use
# SQLite as storage for QMachine :-P
return
end
def connect(opts = {})
# This method needs documentation.
if opts.has_key?(:sqlite) then
@filename ||= opts[:sqlite]
execute <<-sql
CREATE TABLE IF NOT EXISTS avars (
body TEXT NOT NULL,
box TEXT NOT NULL,
exp_date INTEGER NOT NULL,
key TEXT NOT NULL,
status TEXT,
PRIMARY KEY (box, key)
);
sql
collect_garbage
end
return @filename
end
def get_avar(params)
# This method needs documentation.
box, key = params[0], params[1]
x = execute <<-sql
SELECT body FROM avars
WHERE box = '#{box}' AND exp_date > #{now} AND key = '#{key}'
sql
if x.length == 0 then
y = '{}'
else
# If a row was found, update its expiration date.
y = x[0][0]
x = execute <<-sql
UPDATE avars SET exp_date = #{exp_date}
WHERE box = '#{box}' and key = '#{key}'
sql
end
return y
end
def get_list(params)
# This method needs documentation.
b, s = params[0], params[1]
x = execute <<-sql
SELECT key FROM avars
WHERE box = '#{b}' AND exp_date > #{now} AND status = '#{s}'
sql
return (x.length == 0) ? '[]' : (x.map {|row| row[0]}).to_json
end
def initialize(opts = {})
# This constructor needs documentation.
@settings = opts
end
def set_avar(params)
# This method needs documentation.
body, box, key = params.last, params[0], params[1]
status = (params.length == 4) ? "'#{params[2]}'" : 'NULL'
execute <<-sql
INSERT OR REPLACE INTO avars (body, box, exp_date, key, status)
VALUES ('#{body}', '#{box}', #{exp_date}, '#{key}', #{status})
sql
collect_garbage
return
end
private
def collect_garbage()
# This method needs documentation.
execute("DELETE FROM avars WHERE (exp_date < #{now})")
return
end
def execute(query)
# This helper method helps DRY out the code for database queries.
done = false
until (done == true) do
begin
db = SQLite3::Database.open(@filename)
x = db.execute(query)
done = true
rescue SQLite3::Exception => err
if (err.is_a?(SQLite3::BusyException) == false) then
STDERR.puts "Exception occurred: '#{err}':\n#{query}"
end
ensure
db.close if db
end
end
return x
end
def exp_date()
# This method needs documentation.
return now + @settings.avar_ttl.to_i
end
def now()
# This method needs documentation.
return Time.now.to_i
end
end
# NOTE: There is no `SqliteLogStore` class.
end
#- vim:set syntax=ruby:
|
require 'rack'
require 'sinatra/base'
require 'sequel'
module Rack
class Newsstand < Sinatra::Base
autoload :Issue, 'rack/newsstand/models/issue'
disable :raise_errors, :show_exceptions
configure do
Sequel.extension :core_extensions, :migration, :pg_array, :pg_hstore, :pg_hstore_ops
if ENV['DATABASE_URL']
DB = Sequel.connect(ENV['DATABASE_URL'])
DB.extend Sequel::Postgres::PGArray::DatabaseMethods
DB.extend Sequel::Postgres::HStore::DatabaseMethods
Sequel::Migrator.run(DB, ::File.join(::File.dirname(__FILE__), "newsstand/migrations"), table: 'newsstand_schema_info')
end
end
get '/issues' do
@issues = Issue.order(:published_at).all
request.accept.each do |type|
case type.to_s
when 'application/atom+xml', 'application/xml', 'text/xml'
content_type 'application/x-plist'
return builder :atom
when 'application/x-plist'
content_type 'application/x-plist'
return @issues.to_plist
when 'application/json'
pass
end
end
halt 406
end
get '/issues/:name' do
pass unless request.accept? 'application/x-plist'
content_type 'application/x-plist'
param :name, String, empty: false
Issue.find(name: params[:name]).to_plist
end
template :atom do
<<-EOF
xml.instruct! :xml, :version => '1.1'
xml.feed "xmlns" => "http://www.w3.org/2005/Atom",
"xmlns:news" => "http://itunes.apple.com/2011/Newsstand" do
xml.updated { @issues.first.updated_at rescue Time.now }
@issues.each do |issue|
xml.entry do
xml.id issue.name
xml.summary issue.summary
xml.updated issue.updated_at
xml.published issue.published_at
xml.tag!("news:end_date"){ issue.expires_at } if issue.expires_at
xml.tag!("news:cover_art_icons") do
issue.cover_urls.each do |size, url|
xml.tag!("news:cover_art_icon", size: size, src: url)
end
end
end
end
end
EOF
end
end
end
Removing stray call to param helper
require 'rack'
require 'sinatra/base'
require 'sequel'
module Rack
class Newsstand < Sinatra::Base
autoload :Issue, 'rack/newsstand/models/issue'
disable :raise_errors, :show_exceptions
configure do
Sequel.extension :core_extensions, :migration, :pg_array, :pg_hstore, :pg_hstore_ops
if ENV['DATABASE_URL']
DB = Sequel.connect(ENV['DATABASE_URL'])
DB.extend Sequel::Postgres::PGArray::DatabaseMethods
DB.extend Sequel::Postgres::HStore::DatabaseMethods
Sequel::Migrator.run(DB, ::File.join(::File.dirname(__FILE__), "newsstand/migrations"), table: 'newsstand_schema_info')
end
end
get '/issues' do
@issues = Issue.order(:published_at).all
request.accept.each do |type|
case type.to_s
when 'application/atom+xml', 'application/xml', 'text/xml'
content_type 'application/x-plist'
return builder :atom
when 'application/x-plist'
content_type 'application/x-plist'
return @issues.to_plist
when 'application/json'
pass
end
end
halt 406
end
get '/issues/:name' do
pass unless request.accept? 'application/x-plist'
content_type 'application/x-plist'
Issue.find(name: params[:name]).to_plist
end
template :atom do
<<-EOF
xml.instruct! :xml, :version => '1.1'
xml.feed "xmlns" => "http://www.w3.org/2005/Atom",
"xmlns:news" => "http://itunes.apple.com/2011/Newsstand" do
xml.updated { @issues.first.updated_at rescue Time.now }
@issues.each do |issue|
xml.entry do
xml.id issue.name
xml.summary issue.summary
xml.updated issue.updated_at
xml.published issue.published_at
xml.tag!("news:end_date"){ issue.expires_at } if issue.expires_at
xml.tag!("news:cover_art_icons") do
issue.cover_urls.each do |size, url|
xml.tag!("news:cover_art_icon", size: size, src: url)
end
end
end
end
end
EOF
end
end
end
|
module Ragoon
VERSION = '1.0.0'
end
:point_up: Bump to version 1.1.0
module Ragoon
VERSION = '1.1.0'
end
|
module Rakali
VERSION = "0.0.3"
end
Bump to 0.0.4
module Rakali
VERSION = "0.0.4"
end
|
# frozen_string_literal: true
require "rake/task.rb"
require "rake/early_time"
module Rake
# A FileTask is a task that includes time based dependencies. If any of a
# FileTask's prerequisites have a timestamp that is later than the file
# represented by this task, then the file must be rebuilt (using the
# supplied actions).
#
class FileTask < Task
# Is this file task needed? Yes if it doesn't exist, or if its time stamp
# is out of date.
def needed?
! File.exist?(name) || out_of_date?(timestamp) || @application.options.build_all
end
# Time stamp for file task.
def timestamp
if File.exist?(name)
File.mtime(name.to_s)
else
Rake::LATE
end
end
private
# Are there any prerequisites with a later time than the given time stamp?
def out_of_date?(stamp)
all_prerequisite_tasks.any? { |prereq|
prereq_task = application[prereq, @scope]
if prereq_task.instance_of?(Rake::FileTask)
prereq_task.timestamp > stamp || @application.options.build_all
else
prereq_task.timestamp > stamp
end
}
end
# ----------------------------------------------------------------
# Task class methods.
#
class << self
# Apply the scope to the task name according to the rules for this kind
# of task. File based tasks ignore the scope when creating the name.
def scope_name(scope, task_name)
Rake.from_pathname(task_name)
end
end
end
end
remove trailing extension name in require
* remove trailing extension name
* remove space after `!` operator
# frozen_string_literal: true
require "rake/task"
require "rake/early_time"
module Rake
# A FileTask is a task that includes time based dependencies. If any of a
# FileTask's prerequisites have a timestamp that is later than the file
# represented by this task, then the file must be rebuilt (using the
# supplied actions).
#
class FileTask < Task
# Is this file task needed? Yes if it doesn't exist, or if its time stamp
# is out of date.
def needed?
!File.exist?(name) || out_of_date?(timestamp) || @application.options.build_all
end
# Time stamp for file task.
def timestamp
if File.exist?(name)
File.mtime(name.to_s)
else
Rake::LATE
end
end
private
# Are there any prerequisites with a later time than the given time stamp?
def out_of_date?(stamp)
all_prerequisite_tasks.any? { |prereq|
prereq_task = application[prereq, @scope]
if prereq_task.instance_of?(Rake::FileTask)
prereq_task.timestamp > stamp || @application.options.build_all
else
prereq_task.timestamp > stamp
end
}
end
# ----------------------------------------------------------------
# Task class methods.
#
class << self
# Apply the scope to the task name according to the rules for this kind
# of task. File based tasks ignore the scope when creating the name.
def scope_name(scope, task_name)
Rake.from_pathname(task_name)
end
end
end
end
|
class Rant
class << self
def singleton
@singleton ||= Rant.new
@singleton
end
def gen
self.singleton
end
end
class GuardFailure < RuntimeError
end
class TooManyTries < RuntimeError
def initialize(limit,nfailed)
@limit = limit
@nfailed = nfailed
end
def tries
@nfailed
end
def to_s
"Exceed gen limit(#{@limit}: #{@nfailed} failed guards)"
end
end
# limit attempts to 10 times of how many things we want to generate
def each(n,limit=10,&block)
generate(n,limit,block)
end
def map(n,limit=10,&block)
acc = []
generate(n,limit,block) do |val|
acc << val
end
acc
end
def value(limit=10,&block)
generate(1,limit,block) do |val|
return val
end
end
def generate(n,limit,gen_block,&handler)
limit = n * 10
nfailed = 0
nsuccess = 0
while nsuccess < n
raise TooManyTries.new(limit,nfailed) if limit < 0
begin
val = self.instance_eval(&gen_block)
rescue GuardFailure
nfailed += 1
limit -= 1
next
end
nsuccess += 1
limit -= 1
handler.call(val) if handler
end
end
attr_accessor :classifiers
def initialize
reset
end
def reset
@size = nil
@classifiers = Hash.new(0)
end
def classify(classifier)
@classifiers[classifier] += 1
end
def guard(test)
raise GuardFailure.new unless test
end
def size
raise "size not set" unless @size
@size
end
def sized(n,&block)
raise "size needs to be greater than zero" if n < 0
old_size = @size
@size = n
r = self.instance_eval(&block)
@size = old_size
return r
end
# wanna avoid going into Bignum when calling range with these.
INTEGER_MAX = (2**(0.size * 8 -2) -1) / 2
INTEGER_MIN = -(INTEGER_MAX)
def integer(n=nil)
if n
raise "n should be greater than zero" if n < 0
hi, lo = n, -n
else
hi, lo = INTEGER_MAX, INTEGER_MIN
end
range(lo,hi)
end
def float
rand
end
def range(lo,hi)
rand(hi+1-lo) + lo
end
def call(gen,*args)
case gen
when Symbol
return self.send(gen,*args)
when Array
raise "empty array" if gen.empty?
return self.send(gen[0],*gen[1..-1])
when Proc
return self.instance_eval(&gen)
else
raise "don't know how to call type: #{gen}"
end
end
def branch(*gens)
self.call(choose(gens))
end
def choose(*vals)
vals[range(0,vals.length-1)]
end
def literal(value)
value
end
def bool
range(0,1) == 0 ? true : false
end
def freq(*pairs)
pairs = pairs.map do |pair|
case pair
when Symbol, String, Proc
[1,pair]
when Array
unless pair.first.is_a?(Integer)
[1] + pair
else
pair
end
end
end
total = pairs.inject(0) { |sum,p| sum + p.first }
raise(RuntimeError, "Illegal frequency:#{xs.inspect}") if total == 0
pos = range(1,total)
pairs.each do |p|
weight, gen, *args = p
if pos <= p[0]
return self.call(gen,*args)
else
pos -= weight
end
end
end
def array(*freq_pairs)
acc = []
self.size.times { acc << freq(*freq_pairs) }
acc
end
module Chars
class << self
ASCII = ""
(0..127).to_a.each do |i|
ASCII << i
end
def of(regexp)
ASCII.scan(regexp).to_a.map! { |char| char[0] }
end
end
ALNUM = Chars.of /[[:alnum:]]/
ALPHA = Chars.of /[[:alpha:]]/
BLANK = Chars.of /[[:blank:]]/
CNTRL = Chars.of /[[:cntrl:]]/
DIGIT = Chars.of /[[:digit:]]/
GRAPH = Chars.of /[[:graph:]]/
LOWER = Chars.of /[[:lower:]]/
PRINT = Chars.of /[[:print:]]/
PUNCT = Chars.of /[[:punct:]]/
SPACE = Chars.of /[[:space:]]/
UPPER = Chars.of /[[:upper:]]/
XDIGIT = Chars.of /[[:xdigit:]]/
ASCII = Chars.of /./
CLASSES = {
:alnum => ALNUM,
:alpha => ALPHA,
:blank => BLANK,
:cntrl => CNTRL,
:digit => DIGIT,
:graph => GRAPH,
:lower => LOWER,
:print => PRINT,
:punct => PUNCT,
:space => SPACE,
:upper => UPPER,
:xdigit => XDIGIT,
:ascii => ASCII,
}
end
def string(char_class=:print)
chars = case char_class
when Regexp
Chars.of(char_class)
when Symbol
Chars::CLASSES[char_class]
end
raise "bad arg" unless chars
str = ""
size.times do
str << choose(*chars)
end
str
end
end
fixed bug
class Rant
class << self
def singleton
@singleton ||= Rant.new
@singleton
end
def gen
self.singleton
end
end
class GuardFailure < RuntimeError
end
class TooManyTries < RuntimeError
def initialize(limit,nfailed)
@limit = limit
@nfailed = nfailed
end
def tries
@nfailed
end
def to_s
"Exceed gen limit(#{@limit}: #{@nfailed} failed guards)"
end
end
# limit attempts to 10 times of how many things we want to generate
def each(n,limit=10,&block)
generate(n,limit,block)
end
def map(n,limit=10,&block)
acc = []
generate(n,limit,block) do |val|
acc << val
end
acc
end
def value(limit=10,&block)
generate(1,limit,block) do |val|
return val
end
end
def generate(n,limit,gen_block,&handler)
limit = n * 10
nfailed = 0
nsuccess = 0
while nsuccess < n
raise TooManyTries.new(limit,nfailed) if limit < 0
begin
val = self.instance_eval(&gen_block)
rescue GuardFailure
nfailed += 1
limit -= 1
next
end
nsuccess += 1
limit -= 1
handler.call(val) if handler
end
end
attr_accessor :classifiers
def initialize
reset
end
def reset
@size = nil
@classifiers = Hash.new(0)
end
def classify(classifier)
@classifiers[classifier] += 1
end
def guard(test)
raise GuardFailure.new unless test
end
def size
raise "size not set" unless @size
@size
end
def sized(n,&block)
raise "size needs to be greater than zero" if n < 0
old_size = @size
@size = n
r = self.instance_eval(&block)
@size = old_size
return r
end
# wanna avoid going into Bignum when calling range with these.
INTEGER_MAX = (2**(0.size * 8 -2) -1) / 2
INTEGER_MIN = -(INTEGER_MAX)
def integer(n=nil)
if n
raise "n should be greater than zero" if n < 0
hi, lo = n, -n
else
hi, lo = INTEGER_MAX, INTEGER_MIN
end
range(lo,hi)
end
def float
rand
end
def range(lo,hi)
rand(hi+1-lo) + lo
end
def call(gen,*args)
case gen
when Symbol
return self.send(gen,*args)
when Array
raise "empty array" if gen.empty?
return self.send(gen[0],*gen[1..-1])
when Proc
return self.instance_eval(&gen)
else
raise "don't know how to call type: #{gen}"
end
end
def branch(*gens)
self.call(choose(*gens))
end
def choose(*vals)
vals[range(0,vals.length-1)]
end
def literal(value)
value
end
def bool
range(0,1) == 0 ? true : false
end
def freq(*pairs)
pairs = pairs.map do |pair|
case pair
when Symbol, String, Proc
[1,pair]
when Array
unless pair.first.is_a?(Integer)
[1] + pair
else
pair
end
end
end
total = pairs.inject(0) { |sum,p| sum + p.first }
raise(RuntimeError, "Illegal frequency:#{pairs.inspect}") if total == 0
pos = range(1,total)
pairs.each do |p|
weight, gen, *args = p
if pos <= p[0]
return self.call(gen,*args)
else
pos -= weight
end
end
end
def array(*freq_pairs)
acc = []
self.size.times { acc << freq(*freq_pairs) }
acc
end
module Chars
class << self
ASCII = ""
(0..127).to_a.each do |i|
ASCII << i
end
def of(regexp)
ASCII.scan(regexp).to_a.map! { |char| char[0] }
end
end
ALNUM = Chars.of /[[:alnum:]]/
ALPHA = Chars.of /[[:alpha:]]/
BLANK = Chars.of /[[:blank:]]/
CNTRL = Chars.of /[[:cntrl:]]/
DIGIT = Chars.of /[[:digit:]]/
GRAPH = Chars.of /[[:graph:]]/
LOWER = Chars.of /[[:lower:]]/
PRINT = Chars.of /[[:print:]]/
PUNCT = Chars.of /[[:punct:]]/
SPACE = Chars.of /[[:space:]]/
UPPER = Chars.of /[[:upper:]]/
XDIGIT = Chars.of /[[:xdigit:]]/
ASCII = Chars.of /./
CLASSES = {
:alnum => ALNUM,
:alpha => ALPHA,
:blank => BLANK,
:cntrl => CNTRL,
:digit => DIGIT,
:graph => GRAPH,
:lower => LOWER,
:print => PRINT,
:punct => PUNCT,
:space => SPACE,
:upper => UPPER,
:xdigit => XDIGIT,
:ascii => ASCII,
}
end
def string(char_class=:print)
chars = case char_class
when Regexp
Chars.of(char_class)
when Symbol
Chars::CLASSES[char_class]
end
raise "bad arg" unless chars
str = ""
size.times do
str << choose(*chars)
end
str
end
end
|
module RDF
##
# An RDF repository.
#
# @example Creating a transient in-memory repository
# repository = RDF::Repository.new
#
# @example Checking whether a repository is readable/writable
# repository.readable?
# repository.writable?
#
# @example Checking whether a repository is persistent or transient
# repository.persistent?
# repository.transient?
#
# @example Checking whether a repository is empty
# repository.empty?
#
# @example Checking how many statements a repository contains
# repository.count
#
# @example Checking whether a repository contains a specific statement
# repository.has_statement?(statement)
#
# @example Enumerating statements in a repository
# repository.each_statement { |statement| statement.inspect! }
#
# @example Inserting statements into a repository
# repository.insert(*statements)
# repository.insert(statement)
# repository.insert([subject, predicate, object])
# repository << statement
# repository << [subject, predicate, object]
#
# @example Deleting statements from a repository
# repository.delete(*statements)
# repository.delete(statement)
# repository.delete([subject, predicate, object])
#
# @example Deleting all statements from a repository
# repository.clear!
#
class Repository < Dataset
include RDF::Durable
include RDF::Mutable
##
# Returns the options passed to this repository when it was constructed.
#
# @return [Hash{Symbol => Object}]
attr_reader :options
##
# Returns the {URI} of this repository.
#
# @return [URI]
attr_reader :uri
alias_method :url, :uri
##
# Returns the title of this repository.
#
# @return [String]
attr_reader :title
##
# Loads one or more RDF files into a new transient in-memory repository.
#
# @param [String, Array<String>] urls
# @param [Hash{Symbol => Object}] options
# Options from {RDF::Repository#initialize} and {RDF::Mutable#load}
# @yield [repository]
# @yieldparam [Repository]
# @return [void]
def self.load(urls, options = {}, &block)
self.new(options) do |repository|
Array(urls).each do |url|
repository.load(url, options)
end
if block_given?
case block.arity
when 1 then block.call(repository)
else repository.instance_eval(&block)
end
end
end
end
##
# Initializes this repository instance.
#
# @param [URI, #to_s] uri (nil)
# @param [String, #to_s] title (nil)
# @param [Hash{Symbol => Object}] options
# @option options [Boolean] :with_graph_name (true)
# Indicates that the repository supports named graphs, otherwise,
# only the default graph is supported.
# @option options [Boolean] :with_validity (true)
# Indicates that the repository supports named validation.
# @yield [repository]
# @yieldparam [Repository] repository
def initialize(uri: nil, title: nil, **options, &block)
@options = {with_graph_name: true, with_validity: true}.merge(options)
@uri = uri
@title = title
@tx_class = @options.delete(:transaction_class) { RDF::Transaction }
# Provide a default in-memory implementation:
send(:extend, Implementation) if self.class.equal?(RDF::Repository)
if block_given?
case block.arity
when 1 then block.call(self)
else instance_eval(&block)
end
end
end
##
# @private
# @see RDF::Enumerable#project_graph
def project_graph(graph_name, &block)
RDF::Graph.new(graph_name: graph_name, data: self).
project_graph(graph_name, &block)
end
##
# Executes the given block in a transaction.
#
# @example
# repository.transaction do |tx|
# tx.insert [RDF::URI("http://rubygems.org/gems/rdf"), RDF::RDFS.label, "RDF.rb"]
# end
#
# @param mutable [Boolean]
# allows changes to the transaction, otherwise it is a read-only snapshot of the underlying repository.
# @yield [tx]
# @yieldparam [RDF::Transaction] tx
# @yieldreturn [void] ignored
# @return [self]
# @see RDF::Transaction
# @since 0.3.0
def transaction(mutable: false, &block)
tx = begin_transaction(mutable: mutable)
begin
case block.arity
when 1 then block.call(tx)
else tx.instance_eval(&block)
end
rescue => error
rollback_transaction(tx)
raise error
end
commit_transaction(tx)
self
end
alias_method :transact, :transaction
protected
##
# Begins a new transaction.
#
# Subclasses implementing transaction-capable storage adapters may wish
# to override this method in order to begin a transaction against the
# underlying storage.
#
# @param mutable [Boolean] Create a mutable or immutable transaction.
# @return [RDF::Transaction]
# @since 0.3.0
def begin_transaction(mutable: false)
@tx_class.new(self, mutable: mutable)
end
##
# Rolls back the given transaction.
#
# @param [RDF::Transaction] tx
# @return [void] ignored
# @since 0.3.0
def rollback_transaction(tx)
tx.rollback
end
##
# Commits the given transaction.
#
# Subclasses implementing transaction-capable storage adapters may wish
# to override this method in order to commit the given transaction to
# the underlying storage.
#
# @param [RDF::Transaction] tx
# @return [void] ignored
# @since 0.3.0
def commit_transaction(tx)
tx.execute(self)
end
##
# @see RDF::Repository
module Implementation
require 'hamster'
DEFAULT_GRAPH = false
##
# @private
def self.extend_object(obj)
obj.instance_variable_set(:@data, obj.options.delete(:data) || Hamster::Hash.new)
super
end
##
# @private
# @see RDF::Enumerable#supports?
def supports?(feature)
case feature.to_sym
when :graph_name then @options[:with_graph_name]
when :inference then false # forward-chaining inference
when :validity then @options.fetch(:with_validity, true)
when :snapshots then true
else false
end
end
##
# @private
# @see RDF::Countable#count
def count
count = 0
@data.each do |_, ss|
ss.each do |_, ps|
ps.each { |_, os| count += os.size }
end
end
count
end
##
# @private
# @see RDF::Enumerable#has_graph?
def has_graph?(graph)
@data.has_key?(graph)
end
##
# @private
# @see RDF::Enumerable#each_graph
def graph_names(options = nil, &block)
@data.keys.reject { |g| g == DEFAULT_GRAPH }.to_a
end
##
# @private
# @see RDF::Enumerable#each_graph
def each_graph(&block)
if block_given?
@data.each_key do |gn|
yield RDF::Graph.new(graph_name: (gn == DEFAULT_GRAPH ? nil : gn), data: self)
end
end
enum_graph
end
##
# @private
# @see RDF::Enumerable#has_statement?
def has_statement?(statement)
has_statement_in?(@data, statement)
end
##
# @private
# @see RDF::Enumerable#each_statement
def each_statement(&block)
if block_given?
@data.each do |g, ss|
ss.each do |s, ps|
ps.each do |p, os|
os.each do |o|
yield RDF::Statement.new(s, p, o, graph_name: g.equal?(DEFAULT_GRAPH) ? nil : g)
end
end
end
end
end
enum_statement
end
alias_method :each, :each_statement
##
# @see Mutable#apply_changeset
def apply_changeset(changeset)
data = @data
changeset.deletes.each { |del| data = delete_from(data, del) }
changeset.inserts.each { |ins| data = insert_to(data, ins) }
@data = data
end
##
# A queryable snapshot of the repository for isolated reads.
#
# @return [Dataset] an immutable Dataset containing a current snapshot of
# the Repository contents.
def snapshot
self.class.new(data: @data).freeze
end
protected
##
# Match elements with `eql?`, not `==`
#
# `graph_name` of `false` matches default graph. Unbound variable matches
# non-false graph name
#
# @private
# @see RDF::Queryable#query_pattern
def query_pattern(pattern, options = {}, &block)
snapshot = @data
if block_given?
graph_name = pattern.graph_name
subject = pattern.subject
predicate = pattern.predicate
object = pattern.object
cs = snapshot.has_key?(graph_name) ? { graph_name => snapshot[graph_name] } : snapshot
cs.each do |c, ss|
next unless graph_name.nil? ||
graph_name == false && !c ||
graph_name.eql?(c)
ss = if subject.nil? || subject.is_a?(RDF::Query::Variable)
ss
elsif ss.has_key?(subject)
{ subject => ss[subject] }
else
[]
end
ss.each do |s, ps|
ps = if predicate.nil? || predicate.is_a?(RDF::Query::Variable)
ps
elsif ps.has_key?(predicate)
{ predicate => ps[predicate] }
else
[]
end
ps.each do |p, os|
os.each do |o|
next unless object.nil? || object.eql?(o)
yield RDF::Statement.new(s, p, o, graph_name: c.equal?(DEFAULT_GRAPH) ? nil : c)
end
end
end
end
else
enum_for(:query_pattern, pattern, options)
end
end
##
# @private
# @see RDF::Mutable#insert
def insert_statement(statement)
@data = insert_to(@data, statement)
end
##
# @private
# @see RDF::Mutable#delete
def delete_statement(statement)
@data = delete_from(@data, statement)
end
##
# @private
# @see RDF::Mutable#clear
def clear_statements
@data = @data.clear
end
private
##
# @private
# @see #has_statement
def has_statement_in?(data, statement)
s, p, o, g = statement.to_quad
g ||= DEFAULT_GRAPH
data.has_key?(g) &&
data[g].has_key?(s) &&
data[g][s].has_key?(p) &&
data[g][s][p].include?(o)
end
##
# @private
# @return [Hamster::Hash] a new, updated hamster hash
def insert_to(data, statement)
raise ArgumentError, "Statement #{statement.inspect} is incomplete" if statement.incomplete?
unless has_statement_in?(data, statement)
s, p, o, c = statement.to_quad
c ||= DEFAULT_GRAPH
return data.put(c) do |subs|
subs = (subs || Hamster::Hash.new).put(s) do |preds|
preds = (preds || Hamster::Hash.new).put(p) do |objs|
(objs || Hamster::Set.new).add(o)
end
end
end
end
data
end
##
# @private
# @return [Hamster::Hash] a new, updated hamster hash
def delete_from(data, statement)
if has_statement_in?(data, statement)
s, p, o, g = statement.to_quad
g = DEFAULT_GRAPH unless supports?(:graph_name)
g ||= DEFAULT_GRAPH
os = data[g][s][p].delete(o)
ps = os.empty? ? data[g][s].delete(p) : data[g][s].put(p, os)
ss = ps.empty? ? data[g].delete(s) : data[g].put(s, ps)
return ss.empty? ? data.delete(g) : data.put(g, ss)
end
data
end
end # Implementation
end # Repository
end # RDF
Use a transaction for Repostiory#delete_insert
module RDF
##
# An RDF repository.
#
# @example Creating a transient in-memory repository
# repository = RDF::Repository.new
#
# @example Checking whether a repository is readable/writable
# repository.readable?
# repository.writable?
#
# @example Checking whether a repository is persistent or transient
# repository.persistent?
# repository.transient?
#
# @example Checking whether a repository is empty
# repository.empty?
#
# @example Checking how many statements a repository contains
# repository.count
#
# @example Checking whether a repository contains a specific statement
# repository.has_statement?(statement)
#
# @example Enumerating statements in a repository
# repository.each_statement { |statement| statement.inspect! }
#
# @example Inserting statements into a repository
# repository.insert(*statements)
# repository.insert(statement)
# repository.insert([subject, predicate, object])
# repository << statement
# repository << [subject, predicate, object]
#
# @example Deleting statements from a repository
# repository.delete(*statements)
# repository.delete(statement)
# repository.delete([subject, predicate, object])
#
# @example Deleting all statements from a repository
# repository.clear!
#
class Repository < Dataset
include RDF::Durable
include RDF::Mutable
##
# Returns the options passed to this repository when it was constructed.
#
# @return [Hash{Symbol => Object}]
attr_reader :options
##
# Returns the {URI} of this repository.
#
# @return [URI]
attr_reader :uri
alias_method :url, :uri
##
# Returns the title of this repository.
#
# @return [String]
attr_reader :title
##
# Loads one or more RDF files into a new transient in-memory repository.
#
# @param [String, Array<String>] urls
# @param [Hash{Symbol => Object}] options
# Options from {RDF::Repository#initialize} and {RDF::Mutable#load}
# @yield [repository]
# @yieldparam [Repository]
# @return [void]
def self.load(urls, options = {}, &block)
self.new(options) do |repository|
Array(urls).each do |url|
repository.load(url, options)
end
if block_given?
case block.arity
when 1 then block.call(repository)
else repository.instance_eval(&block)
end
end
end
end
##
# Initializes this repository instance.
#
# @param [URI, #to_s] uri (nil)
# @param [String, #to_s] title (nil)
# @param [Hash{Symbol => Object}] options
# @option options [Boolean] :with_graph_name (true)
# Indicates that the repository supports named graphs, otherwise,
# only the default graph is supported.
# @option options [Boolean] :with_validity (true)
# Indicates that the repository supports named validation.
# @yield [repository]
# @yieldparam [Repository] repository
def initialize(uri: nil, title: nil, **options, &block)
@options = {with_graph_name: true, with_validity: true}.merge(options)
@uri = uri
@title = title
@tx_class = @options.delete(:transaction_class) { RDF::Transaction }
# Provide a default in-memory implementation:
send(:extend, Implementation) if self.class.equal?(RDF::Repository)
if block_given?
case block.arity
when 1 then block.call(self)
else instance_eval(&block)
end
end
end
##
# Performs a set of deletes and inserts as a combined operation within a
# transaction. The Repository's transaction semantics apply to updates made
# through this method.
#
# @see RDF::Mutable#delete_insert
def delete_insert(deletes, inserts)
transaction(mutable: true) do
deletes.respond_to?(:each_statement) ? delete(deletes) : delete(*deletes)
inserts.respond_to?(:each_statement) ? insert(inserts) : insert(*inserts)
end
end
##
# @private
# @see RDF::Enumerable#project_graph
def project_graph(graph_name, &block)
RDF::Graph.new(graph_name: graph_name, data: self).
project_graph(graph_name, &block)
end
##
# Executes the given block in a transaction.
#
# @example
# repository.transaction do |tx|
# tx.insert [RDF::URI("http://rubygems.org/gems/rdf"), RDF::RDFS.label, "RDF.rb"]
# end
#
# @param mutable [Boolean]
# allows changes to the transaction, otherwise it is a read-only snapshot of the underlying repository.
# @yield [tx]
# @yieldparam [RDF::Transaction] tx
# @yieldreturn [void] ignored
# @return [self]
# @see RDF::Transaction
# @since 0.3.0
def transaction(mutable: false, &block)
tx = begin_transaction(mutable: mutable)
begin
case block.arity
when 1 then block.call(tx)
else tx.instance_eval(&block)
end
rescue => error
rollback_transaction(tx)
raise error
end
commit_transaction(tx)
self
end
alias_method :transact, :transaction
protected
##
# Begins a new transaction.
#
# Subclasses implementing transaction-capable storage adapters may wish
# to override this method in order to begin a transaction against the
# underlying storage.
#
# @param mutable [Boolean] Create a mutable or immutable transaction.
# @return [RDF::Transaction]
# @since 0.3.0
def begin_transaction(mutable: false)
@tx_class.new(self, mutable: mutable)
end
##
# Rolls back the given transaction.
#
# @param [RDF::Transaction] tx
# @return [void] ignored
# @since 0.3.0
def rollback_transaction(tx)
tx.rollback
end
##
# Commits the given transaction.
#
# Subclasses implementing transaction-capable storage adapters may wish
# to override this method in order to commit the given transaction to
# the underlying storage.
#
# @param [RDF::Transaction] tx
# @return [void] ignored
# @since 0.3.0
def commit_transaction(tx)
tx.execute
end
##
# @see RDF::Repository
module Implementation
require 'hamster'
DEFAULT_GRAPH = false
##
# @private
def self.extend_object(obj)
obj.instance_variable_set(:@data, obj.options.delete(:data) || Hamster::Hash.new)
super
end
##
# @private
# @see RDF::Enumerable#supports?
def supports?(feature)
case feature.to_sym
when :graph_name then @options[:with_graph_name]
when :inference then false # forward-chaining inference
when :validity then @options.fetch(:with_validity, true)
when :snapshots then true
else false
end
end
##
# @private
# @see RDF::Countable#count
def count
count = 0
@data.each do |_, ss|
ss.each do |_, ps|
ps.each { |_, os| count += os.size }
end
end
count
end
##
# @private
# @see RDF::Enumerable#has_graph?
def has_graph?(graph)
@data.has_key?(graph)
end
##
# @private
# @see RDF::Enumerable#each_graph
def graph_names(options = nil, &block)
@data.keys.reject { |g| g == DEFAULT_GRAPH }.to_a
end
##
# @private
# @see RDF::Enumerable#each_graph
def each_graph(&block)
if block_given?
@data.each_key do |gn|
yield RDF::Graph.new(graph_name: (gn == DEFAULT_GRAPH ? nil : gn), data: self)
end
end
enum_graph
end
##
# @private
# @see RDF::Enumerable#has_statement?
def has_statement?(statement)
has_statement_in?(@data, statement)
end
##
# @private
# @see RDF::Enumerable#each_statement
def each_statement(&block)
if block_given?
@data.each do |g, ss|
ss.each do |s, ps|
ps.each do |p, os|
os.each do |o|
yield RDF::Statement.new(s, p, o, graph_name: g.equal?(DEFAULT_GRAPH) ? nil : g)
end
end
end
end
end
enum_statement
end
alias_method :each, :each_statement
##
# @see Mutable#apply_changeset
def apply_changeset(changeset)
data = @data
changeset.deletes.each { |del| data = delete_from(data, del) }
changeset.inserts.each { |ins| data = insert_to(data, ins) }
@data = data
end
##
# A queryable snapshot of the repository for isolated reads.
#
# @return [Dataset] an immutable Dataset containing a current snapshot of
# the Repository contents.
def snapshot
self.class.new(data: @data).freeze
end
protected
##
# Match elements with `eql?`, not `==`
#
# `graph_name` of `false` matches default graph. Unbound variable matches
# non-false graph name
#
# @private
# @see RDF::Queryable#query_pattern
def query_pattern(pattern, options = {}, &block)
snapshot = @data
if block_given?
graph_name = pattern.graph_name
subject = pattern.subject
predicate = pattern.predicate
object = pattern.object
cs = snapshot.has_key?(graph_name) ? { graph_name => snapshot[graph_name] } : snapshot
cs.each do |c, ss|
next unless graph_name.nil? ||
graph_name == false && !c ||
graph_name.eql?(c)
ss = if subject.nil? || subject.is_a?(RDF::Query::Variable)
ss
elsif ss.has_key?(subject)
{ subject => ss[subject] }
else
[]
end
ss.each do |s, ps|
ps = if predicate.nil? || predicate.is_a?(RDF::Query::Variable)
ps
elsif ps.has_key?(predicate)
{ predicate => ps[predicate] }
else
[]
end
ps.each do |p, os|
os.each do |o|
next unless object.nil? || object.eql?(o)
yield RDF::Statement.new(s, p, o, graph_name: c.equal?(DEFAULT_GRAPH) ? nil : c)
end
end
end
end
else
enum_for(:query_pattern, pattern, options)
end
end
##
# @private
# @see RDF::Mutable#insert
def insert_statement(statement)
@data = insert_to(@data, statement)
end
##
# @private
# @see RDF::Mutable#delete
def delete_statement(statement)
@data = delete_from(@data, statement)
end
##
# @private
# @see RDF::Mutable#clear
def clear_statements
@data = @data.clear
end
private
##
# @private
# @see #has_statement
def has_statement_in?(data, statement)
s, p, o, g = statement.to_quad
g ||= DEFAULT_GRAPH
data.has_key?(g) &&
data[g].has_key?(s) &&
data[g][s].has_key?(p) &&
data[g][s][p].include?(o)
end
##
# @private
# @return [Hamster::Hash] a new, updated hamster hash
def insert_to(data, statement)
raise ArgumentError, "Statement #{statement.inspect} is incomplete" if statement.incomplete?
unless has_statement_in?(data, statement)
s, p, o, c = statement.to_quad
c ||= DEFAULT_GRAPH
return data.put(c) do |subs|
subs = (subs || Hamster::Hash.new).put(s) do |preds|
preds = (preds || Hamster::Hash.new).put(p) do |objs|
(objs || Hamster::Set.new).add(o)
end
end
end
end
data
end
##
# @private
# @return [Hamster::Hash] a new, updated hamster hash
def delete_from(data, statement)
if has_statement_in?(data, statement)
s, p, o, g = statement.to_quad
g = DEFAULT_GRAPH unless supports?(:graph_name)
g ||= DEFAULT_GRAPH
os = data[g][s][p].delete(o)
ps = os.empty? ? data[g][s].delete(p) : data[g][s].put(p, os)
ss = ps.empty? ? data[g].delete(s) : data[g].put(s, ps)
return ss.empty? ? data.delete(g) : data.put(g, ss)
end
data
end
end # Implementation
end # Repository
end # RDF
|
module RDF
##
# An RDF repository.
#
# @example Creating a transient in-memory repository
# repository = RDF::Repository.new
#
# @example Checking whether a repository is readable/writable
# repository.readable?
# repository.writable?
#
# @example Checking whether a repository is persistent or transient
# repository.persistent?
# repository.transient?
#
# @example Checking whether a repository is empty
# repository.empty?
#
# @example Checking how many statements a repository contains
# repository.count
#
# @example Checking whether a repository contains a specific statement
# repository.has_statement?(statement)
#
# @example Enumerating statements in a repository
# repository.each_statement { |statement| statement.inspect! }
#
# @example Inserting statements into a repository
# repository.insert(*statements)
# repository.insert(statement)
# repository.insert([subject, predicate, object])
# repository << statement
# repository << [subject, predicate, object]
#
# @example Deleting statements from a repository
# repository.delete(*statements)
# repository.delete(statement)
# repository.delete([subject, predicate, object])
#
# @example Deleting all statements from a repository
# repository.clear!
#
class Repository
include RDF::Countable
include RDF::Enumerable
include RDF::Queryable
include RDF::Mutable
include RDF::Durable
##
# Returns the options passed to this repository when it was constructed.
#
# @return [Hash{Symbol => Object}]
attr_reader :options
##
# Returns the {URI} of this repository.
#
# @return [URI]
attr_reader :uri
alias_method :url, :uri
##
# Returns the title of this repository.
#
# @return [String]
attr_reader :title
##
# Loads one or more RDF files into a new transient in-memory repository.
#
# @param [String, Array<String>] filenames
# @param [Hash{Symbol => Object}] options
# Options from {RDF::Reader#initialize}, {RDF::Format.for} and {RDF::Repository#initialize}
# @yield [repository]
# @yieldparam [Repository]
# @return [void]
def self.load(filenames, options = {}, &block)
self.new(options) do |repository|
[filenames].flatten.each do |filename|
repository.load(filename, options)
end
if block_given?
case block.arity
when 1 then block.call(repository)
else repository.instance_eval(&block)
end
end
end
end
##
# Initializes this repository instance.
#
# @param [Hash{Symbol => Object}] options
# @option options [URI, #to_s] :uri (nil)
# @option options [String, #to_s] :title (nil)
# @yield [repository]
# @yieldparam [Repository] repository
def initialize(options = {}, &block)
@options = options.dup
@uri = @options.delete(:uri)
@title = @options.delete(:title)
# Provide a default in-memory implementation:
send(:extend, Implementation) if self.class.equal?(RDF::Repository)
if block_given?
case block.arity
when 1 then block.call(self)
else instance_eval(&block)
end
end
end
##
# Returns a developer-friendly representation of this object.
#
# @return [String]
def inspect
sprintf("#<%s:%#0x(%s)>", self.class.name, __id__, uri.to_s)
end
##
# Outputs a developer-friendly representation of this object to
# `stderr`.
#
# @return [void]
def inspect!
each_statement { |statement| statement.inspect! }
nil
end
##
# Executes the given block in a transaction.
#
# @example
# repository.transaction do |tx|
# tx.insert [RDF::URI("http://rdf.rubyforge.org/"), RDF::DC.title, "RDF.rb"]
# end
#
# @param [RDF::Resource] context
# @yield [tx]
# @yieldparam [RDF::Transaction] tx
# @yieldreturn [void] ignored
# @return [void] `self`
# @see RDF::Transaction
# @since 0.3.0
def transaction(context = nil, &block)
tx = begin_transaction(context)
begin
case block.arity
when 1 then block.call(tx)
else tx.instance_eval(&block)
end
rescue => error
rollback_transaction(tx)
raise error
end
commit_transaction(tx)
self
end
alias_method :transact, :transaction
protected
##
# Begins a new transaction.
#
# Subclasses implementing transaction-capable storage adapters may wish
# to override this method in order to begin a transaction against the
# underlying storage.
#
# @param [RDF::Resource] context
# @return [RDF::Transaction]
# @since 0.3.0
def begin_transaction(context)
RDF::Transaction.new(:context => context)
end
##
# Rolls back the given transaction.
#
# Subclasses implementing transaction-capable storage adapters may wish
# to override this method in order to roll back the given transaction in
# the underlying storage.
#
# @param [RDF::Transaction] tx
# @return [void] ignored
# @since 0.3.0
def rollback_transaction(tx)
# nothing to do
end
##
# Commits the given transaction.
#
# Subclasses implementing transaction-capable storage adapters may wish
# to override this method in order to commit the given transaction to
# the underlying storage.
#
# @param [RDF::Transaction] tx
# @return [void] ignored
# @since 0.3.0
def commit_transaction(tx)
tx.execute(self)
end
##
# @see RDF::Repository
module Implementation
DEFAULT_CONTEXT = false
##
# @private
def self.extend_object(obj)
obj.instance_variable_set(:@data, obj.options.delete(:data) || {})
super
end
##
# @private
# @see RDF::Readable#supports?
def supports?(feature)
case feature.to_sym
when :context then true # statement contexts / named graphs
when :inference then false # forward-chaining inference
else false
end
end
##
# @private
# @see RDF::Durable#durable?
def durable?
false
end
##
# @private
# @see RDF::Countable#empty?
def empty?
@data.empty?
end
##
# @private
# @see RDF::Countable#count
def count
count = 0
@data.each do |c, ss|
ss.each do |s, ps|
ps.each do |p, os|
count += os.size
end
end
end
count
end
##
# @private
# @see RDF::Enumerable#has_statement?
def has_statement?(statement)
s, p, o, c = statement.to_quad
c ||= DEFAULT_CONTEXT
@data.has_key?(c) &&
@data[c].has_key?(s) &&
@data[c][s].has_key?(p) &&
@data[c][s][p].include?(o)
end
##
# @private
# @see RDF::Enumerable#each_statement
def each_statement(&block)
if block_given?
# Note that to iterate in a more consistent fashion despite
# possible concurrent mutations to `@data`, we use `#dup` to make
# shallow copies of the nested hashes before beginning the
# iteration over their keys and values.
@data.dup.each do |c, ss|
ss.dup.each do |s, ps|
ps.dup.each do |p, os|
os.dup.each do |o|
block.call(RDF::Statement.new(s, p, o, :context => c.equal?(DEFAULT_CONTEXT) ? nil : c))
end
end
end
end
end
enum_statement
end
alias_method :each, :each_statement
##
# @private
# @see RDF::Enumerable#has_context?
def has_context?(value)
@data.keys.include?(value)
end
##
# @private
# @see RDF::Enumerable#each_context
def each_context(&block)
if block_given?
contexts = @data.keys
contexts.delete(DEFAULT_CONTEXT)
contexts.each(&block)
end
enum_context
end
protected
##
# Match elements with eql?, not ==
# Context of `false` matches default context. Unbound variable matches non-false context
# @private
# @see RDF::Queryable#query
def query_pattern(pattern, &block)
context = pattern.context
subject = pattern.subject
predicate = pattern.predicate
object = pattern.object
cs = @data.has_key?(context) ? {context => @data[context]} : @data.dup
cs.each do |c, ss|
next unless context.nil? || context == false && !c || context.eql?(c)
ss = ss.has_key?(subject) ? {subject => ss[subject]} : ss.dup
ss.each do |s, ps|
next unless subject.nil? || subject.eql?(s)
ps = ps.has_key?(predicate) ? {predicate => ps[predicate]} : ps.dup
ps.each do |p, os|
next unless predicate.nil? || predicate.eql?(p)
os = os.dup # TODO: is this really needed?
os.each do |o|
next unless object.nil? || object.eql?(o)
block.call(RDF::Statement.new(s, p, o, :context => c.equal?(DEFAULT_CONTEXT) ? nil : c))
end
end
end
end
end
##
# @private
# @see RDF::Mutable#insert
def insert_statement(statement)
unless has_statement?(statement)
s, p, o, c = statement.to_quad
c ||= DEFAULT_CONTEXT
@data[c] ||= {}
@data[c][s] ||= {}
@data[c][s][p] ||= []
@data[c][s][p] << o
end
end
##
# @private
# @see RDF::Mutable#delete
def delete_statement(statement)
if has_statement?(statement)
s, p, o, c = statement.to_quad
c ||= DEFAULT_CONTEXT
@data[c][s][p].delete(o)
@data[c][s].delete(p) if @data[c][s][p].empty?
@data[c].delete(s) if @data[c][s].empty?
@data.delete(c) if @data[c].empty?
end
end
##
# @private
# @see RDF::Mutable#clear
def clear_statements
@data.clear
end
protected :query_pattern
protected :insert_statement
protected :delete_statement
protected :clear_statements
end # Implementation
end # Repository
end # RDF
For RDF::Repository, add :with_context option, defaulting to true. If false, the repo does not support contexts (named graphs); this also effects the #supports?(:context) result.
Added RDF::Dataset as a class-alias for RDF::Repository.
module RDF
##
# An RDF repository.
#
# @example Creating a transient in-memory repository
# repository = RDF::Repository.new
#
# @example Checking whether a repository is readable/writable
# repository.readable?
# repository.writable?
#
# @example Checking whether a repository is persistent or transient
# repository.persistent?
# repository.transient?
#
# @example Checking whether a repository is empty
# repository.empty?
#
# @example Checking how many statements a repository contains
# repository.count
#
# @example Checking whether a repository contains a specific statement
# repository.has_statement?(statement)
#
# @example Enumerating statements in a repository
# repository.each_statement { |statement| statement.inspect! }
#
# @example Inserting statements into a repository
# repository.insert(*statements)
# repository.insert(statement)
# repository.insert([subject, predicate, object])
# repository << statement
# repository << [subject, predicate, object]
#
# @example Deleting statements from a repository
# repository.delete(*statements)
# repository.delete(statement)
# repository.delete([subject, predicate, object])
#
# @example Deleting all statements from a repository
# repository.clear!
#
class Repository
include RDF::Countable
include RDF::Enumerable
include RDF::Queryable
include RDF::Mutable
include RDF::Durable
##
# Returns the options passed to this repository when it was constructed.
#
# @return [Hash{Symbol => Object}]
attr_reader :options
##
# Returns the {URI} of this repository.
#
# @return [URI]
attr_reader :uri
alias_method :url, :uri
##
# Returns the title of this repository.
#
# @return [String]
attr_reader :title
##
# Loads one or more RDF files into a new transient in-memory repository.
#
# @param [String, Array<String>] filenames
# @param [Hash{Symbol => Object}] options
# Options from {RDF::Reader#initialize}, {RDF::Format.for} and {RDF::Repository#initialize}
# @yield [repository]
# @yieldparam [Repository]
# @return [void]
def self.load(filenames, options = {}, &block)
self.new(options) do |repository|
[filenames].flatten.each do |filename|
repository.load(filename, options)
end
if block_given?
case block.arity
when 1 then block.call(repository)
else repository.instance_eval(&block)
end
end
end
end
##
# Initializes this repository instance.
#
# @param [Hash{Symbol => Object}] options
# @option options [URI, #to_s] :uri (nil)
# @option options [String, #to_s] :title (nil)
# @option options [Boolean] :with_context (true)
# Indicates that the repository supports named graphs, otherwise,
# only the default graph is supported.
# @yield [repository]
# @yieldparam [Repository] repository
def initialize(options = {}, &block)
@options = {:with_context => true}.merge(options)
@uri = @options.delete(:uri)
@title = @options.delete(:title)
# Provide a default in-memory implementation:
send(:extend, Implementation) if self.class.equal?(RDF::Repository)
if block_given?
case block.arity
when 1 then block.call(self)
else instance_eval(&block)
end
end
end
##
# Returns a developer-friendly representation of this object.
#
# @return [String]
def inspect
sprintf("#<%s:%#0x(%s)>", self.class.name, __id__, uri.to_s)
end
##
# Outputs a developer-friendly representation of this object to
# `stderr`.
#
# @return [void]
def inspect!
each_statement { |statement| statement.inspect! }
nil
end
##
# Executes the given block in a transaction.
#
# @example
# repository.transaction do |tx|
# tx.insert [RDF::URI("http://rdf.rubyforge.org/"), RDF::DC.title, "RDF.rb"]
# end
#
# @param [RDF::Resource] context
# Context on which to run the transaction, use `false` for the default
# context and `nil` the entire Repository
# @yield [tx]
# @yieldparam [RDF::Transaction] tx
# @yieldreturn [void] ignored
# @return [void] `self`
# @see RDF::Transaction
# @since 0.3.0
def transaction(context = nil, &block)
tx = begin_transaction(context)
begin
case block.arity
when 1 then block.call(tx)
else tx.instance_eval(&block)
end
rescue => error
rollback_transaction(tx)
raise error
end
commit_transaction(tx)
self
end
alias_method :transact, :transaction
protected
##
# Begins a new transaction.
#
# Subclasses implementing transaction-capable storage adapters may wish
# to override this method in order to begin a transaction against the
# underlying storage.
#
# @param [RDF::Resource] context
# @return [RDF::Transaction]
# @since 0.3.0
def begin_transaction(context)
RDF::Transaction.new(:graph => context)
end
##
# Rolls back the given transaction.
#
# Subclasses implementing transaction-capable storage adapters may wish
# to override this method in order to roll back the given transaction in
# the underlying storage.
#
# @param [RDF::Transaction] tx
# @return [void] ignored
# @since 0.3.0
def rollback_transaction(tx)
# nothing to do
end
##
# Commits the given transaction.
#
# Subclasses implementing transaction-capable storage adapters may wish
# to override this method in order to commit the given transaction to
# the underlying storage.
#
# @param [RDF::Transaction] tx
# @return [void] ignored
# @since 0.3.0
def commit_transaction(tx)
tx.execute(self)
end
##
# @see RDF::Repository
module Implementation
DEFAULT_CONTEXT = false
##
# @private
def self.extend_object(obj)
obj.instance_variable_set(:@data, obj.options.delete(:data) || {})
super
end
##
# @private
# @see RDF::Enumerable#supports?
def supports?(feature)
case feature.to_sym
# statement contexts / named graphs
when :context then @options[:with_context]
when :inference then false # forward-chaining inference
else false
end
end
##
# @private
# @see RDF::Durable#durable?
def durable?
false
end
##
# @private
# @see RDF::Countable#empty?
def empty?
@data.empty?
end
##
# @private
# @see RDF::Countable#count
def count
count = 0
@data.each do |c, ss|
ss.each do |s, ps|
ps.each do |p, os|
count += os.size
end
end
end
count
end
##
# @private
# @see RDF::Enumerable#has_statement?
def has_statement?(statement)
s, p, o, c = statement.to_quad
c ||= DEFAULT_CONTEXT
@data.has_key?(c) &&
@data[c].has_key?(s) &&
@data[c][s].has_key?(p) &&
@data[c][s][p].include?(o)
end
##
# @private
# @see RDF::Enumerable#each_statement
def each_statement(&block)
if block_given?
# Note that to iterate in a more consistent fashion despite
# possible concurrent mutations to `@data`, we use `#dup` to make
# shallow copies of the nested hashes before beginning the
# iteration over their keys and values.
@data.dup.each do |c, ss|
ss.dup.each do |s, ps|
ps.dup.each do |p, os|
os.dup.each do |o|
block.call(RDF::Statement.new(s, p, o, :context => c.equal?(DEFAULT_CONTEXT) ? nil : c))
end
end
end
end
end
enum_statement
end
alias_method :each, :each_statement
##
# @private
# @see RDF::Enumerable#has_context?
def has_context?(value)
@data.keys.include?(value)
end
##
# @private
# @see RDF::Enumerable#each_context
def each_context(&block)
if block_given?
contexts = @data.keys
contexts.delete(DEFAULT_CONTEXT)
contexts.each(&block)
end
enum_context
end
protected
##
# Match elements with eql?, not ==
# Context of `false` matches default context. Unbound variable matches non-false context
# @private
# @see RDF::Queryable#query
def query_pattern(pattern, &block)
context = pattern.context
subject = pattern.subject
predicate = pattern.predicate
object = pattern.object
cs = @data.has_key?(context) ? {context => @data[context]} : @data.dup
cs.each do |c, ss|
next unless context.nil? || context == false && !c || context.eql?(c)
ss = ss.has_key?(subject) ? {subject => ss[subject]} : ss.dup
ss.each do |s, ps|
next unless subject.nil? || subject.eql?(s)
ps = ps.has_key?(predicate) ? {predicate => ps[predicate]} : ps.dup
ps.each do |p, os|
next unless predicate.nil? || predicate.eql?(p)
os = os.dup # TODO: is this really needed?
os.each do |o|
next unless object.nil? || object.eql?(o)
block.call(RDF::Statement.new(s, p, o, :context => c.equal?(DEFAULT_CONTEXT) ? nil : c))
end
end
end
end
end
##
# @private
# @see RDF::Mutable#insert
def insert_statement(statement)
unless has_statement?(statement)
s, p, o, c = statement.to_quad
c = DEFAULT_CONTEXT unless supports?(:context)
c ||= DEFAULT_CONTEXT
@data[c] ||= {}
@data[c][s] ||= {}
@data[c][s][p] ||= []
@data[c][s][p] << o
end
end
##
# @private
# @see RDF::Mutable#delete
def delete_statement(statement)
if has_statement?(statement)
s, p, o, c = statement.to_quad
c = DEFAULT_CONTEXT unless supports?(:context)
c ||= DEFAULT_CONTEXT
@data[c][s][p].delete(o)
@data[c][s].delete(p) if @data[c][s][p].empty?
@data[c].delete(s) if @data[c][s].empty?
@data.delete(c) if @data[c].empty?
end
end
##
# @private
# @see RDF::Mutable#clear
def clear_statements
@data.clear
end
protected :query_pattern
protected :insert_statement
protected :delete_statement
protected :clear_statements
end # Implementation
end # Repository
# RDF::Dataset is a synonym for RDF::Repository
Dataset = Repository
end # RDF
|
module RDF
##
# Hypertext Transfer Protocol (HTTP) vocabulary.
#
# @see http://www.w3.org/2006/http
class HTTP < Vocabulary("http://www.w3.org/2006/http#")
# TODO
end
end
Imported Hypertext Transfer Protocol (HTTP) properties.
module RDF
##
# Hypertext Transfer Protocol (HTTP) vocabulary.
#
# @see http://www.w3.org/2006/http
class HTTP < Vocabulary("http://www.w3.org/2006/http#")
property :abs_path
property :absoluteURI
property :authority
property :body
property :connectionAuthority
property :elementName
property :elementValue
property :fieldName
property :fieldValue
property :header
property :param
property :paramName
property :paramValue
property :request
property :requestURI
property :response
property :responseCode
property :version
end
end
|
module ReOrg
VERSION = "0.0.1"
end
Bump version
module ReOrg
VERSION = "0.0.2"
end
|
require 'readthis/entity'
require 'readthis/expanders'
require 'readthis/notifications'
require 'readthis/passthrough'
require 'redis'
require 'connection_pool'
module Readthis
class Cache
attr_reader :entity, :expires_in, :namespace, :pool
# Provide a class level lookup of the proper notifications module.
# Instrumention is expected to occur within applications that have
# ActiveSupport::Notifications available, but needs to work even when it
# isn't.
def self.notifications
if Object.const_defined?('ActiveSupport::Notifications')
ActiveSupport::Notifications
else
Readthis::Notifications
end
end
# Creates a new Readthis::Cache object with the given redis URL. The URL
# is parsed by the redis client directly.
#
# @param [String] A redis compliant url with necessary connection details
# @option [Boolean] :compress (false) Enable or disable automatic compression
# @option [Number] :compression_threshold (8k) The size a string must be for compression
# @option [Number] :expires_in The number of seconds until an entry expires
# @option [Module] :marshal (Marshal) Any module that responds to `dump` and `load`
# @option [String] :namespace Prefix used to namespace entries
# @option [Symbol] :driver (:hiredis) Specify a driver to be used for Redis connections
# @option [Number] :pool_size (5) The number of threads in the pool
# @option [Number] :pool_timeout (5) How long before a thread times out
#
# @example Create a new cache instance
# Readthis::Cache.new('redis://localhost:6379/0', namespace: 'cache')
#
# @example Create a compressed cache instance
# Readthis::Cache.new('redis://localhost:6379/0', compress: true, compression_threshold: 2048)
#
def initialize(url, options = {})
@expires_in = options.fetch(:expires_in, nil)
@namespace = options.fetch(:namespace, nil)
@entity = Readthis::Entity.new(
marshal: options.fetch(:marshal, Marshal),
compress: options.fetch(:compress, false),
threshold: options.fetch(:compression_threshold, 1024)
)
@pool = ConnectionPool.new(pool_options(options)) do
Redis.new(url: url, driver: options.fetch(:driver, :hiredis))
end
end
# Fetches data from the cache, using the given key. If there is data in
# the cache with the given key, then that data is returned. Otherwise, nil
# is returned.
#
# @param [String] Key for lookup
# @param [Hash] Optional overrides
#
# @example
#
# cache.read('missing') # => nil
# cache.read('matched') # => 'some value'
#
def read(key, options = {})
invoke(:read, key) do |store|
value = store.get(namespaced_key(key, merged_options(options)))
entity.load(value)
end
end
# Writes data to the cache using the given key. Will overwrite whatever
# value is already stored at that key.
#
# @param [String] Key for lookup
# @param [Hash] Optional overrides
#
# @example
#
# cache.write('some-key', 'a bunch of text') # => 'OK'
# cache.write('some-key', 'short lived', expires_in: 60) # => 'OK'
# cache.write('some-key', 'lives elsehwere', namespace: 'cache') # => 'OK'
#
def write(key, value, options = {})
options = merged_options(options)
invoke(:write, key) do |store|
write_entity(key, value, store, options)
end
end
# Delete the value stored at the specified key. Returns `true` if
# anything was deleted, `false` otherwise.
#
# @params [String] The key for lookup
# @params [Hash] Optional overrides
#
# @example
#
# cache.delete('existing-key') # => true
# cache.delete('random-key') # => false
def delete(key, options = {})
namespaced = namespaced_key(key, merged_options(options))
invoke(:delete, key) do |store|
store.del(namespaced) > 0
end
end
# Fetches data from the cache, using the given key. If there is data in the
# cache with the given key, then that data is returned.
#
# If there is no such data in the cache (a cache miss), then `nil` will be
# returned. However, if a block has been passed, that block will be passed
# the key and executed in the event of a cache miss. The return value of
# the block will be written to the cache under the given cache key, and
# that return value will be returned.
#
# @param [String] Key for lookup
# @param [Block] Optional block for generating the value when missing
# @param options [Hash] Optional overrides
# @option options [Boolean] :force Force a cache miss
#
# @example Typical
#
# cache.write('today', 'Monday')
# cache.fetch('today') # => "Monday"
# cache.fetch('city') # => nil
#
# @example With a block
#
# cache.fetch('city') do
# 'Duckburgh'
# end
# cache.fetch('city') # => "Duckburgh"
#
# @example Cache Miss
#
# cache.write('today', 'Monday')
# cache.fetch('today', force: true) # => nil
#
def fetch(key, options = {})
value = read(key, options) unless options[:force]
if value.nil? && block_given?
value = yield(key)
write(key, value, options)
end
value
end
# Increment a key in the store.
#
# If the key doesn't exist it will be initialized at 0. If the key exists
# but it isn't a Fixnum it will be initialized at 0.
#
# @param [String] Key for lookup
# @param [Fixnum] Value to increment by
# @param [Hash] Optional overrides
#
# @example
#
# cache.increment('counter') # => 0
# cache.increment('counter') # => 1
# cache.increment('counter', 2) # => 3
#
def increment(key, amount = 1, options = {})
invoke(:incremenet, key) do |store|
alter(key, amount, options)
end
end
# Decrement a key in the store.
#
# If the key doesn't exist it will be initialized at 0. If the key exists
# but it isn't a Fixnum it will be initialized at 0.
#
# @param [String] Key for lookup
# @param [Fixnum] Value to decrement by
# @param [Hash] Optional overrides
#
# @example
#
# cache.write('counter', 20) # => 20
# cache.decrement('counter') # => 19
# cache.decrement('counter', 2) # => 17
#
def decrement(key, amount = 1, options = {})
invoke(:decrement, key) do |store|
alter(key, amount * -1, options)
end
end
# Read multiple values at once from the cache. Options can be passed in the
# last argument.
#
# @overload read_multi(keys)
# Return all values for the given keys.
# @param [String] One or more keys to fetch
#
# @return [Hash] A hash mapping keys to the values found.
#
# @example
#
# cache.write('a', 1)
# cache.read_multi('a', 'b') # => { 'a' => 1, 'b' => nil }
#
def read_multi(*keys)
options = merged_options(extract_options!(keys))
mapping = keys.map { |key| namespaced_key(key, options) }
return {} if keys.empty?
invoke(:read_multi, keys) do |store|
values = store.mget(mapping).map { |value| entity.load(value) }
keys.zip(values).to_h
end
end
# Write multiple key value pairs simultaneously. This is an atomic
# operation that will always succeed and will overwrite existing
# values.
#
# This is a non-standard, but useful, cache method.
#
# @param [Hash] Key value hash to write
# @param [Hash] Optional overrides
#
# @example
#
# cache.write_multi({ 'a' => 1, 'b' => 2 }) # => true
#
def write_multi(hash, options = {})
options = merged_options(options)
invoke(:write_multi, hash.keys) do |store|
store.multi do
hash.each { |key, value| write_entity(key, value, store, options) }
end
end
end
# Fetches multiple keys from the cache using a single call to the server
# and filling in any cache misses. All read and write operations are
# executed atomically.
#
# @overload fetch_multi(keys)
# Return all values for the given keys, applying the block to the key
# when a value is missing.
# @param [String] One or more keys to fetch
#
# @example
#
# cache.fetch_multi('alpha', 'beta') do |key|
# "#{key}-was-missing"
# end
#
# cache.fetch_multi('a', 'b', expires_in: 60) do |key|
# key * 2
# end
#
def fetch_multi(*keys)
results = read_multi(*keys)
extracted = extract_options!(keys)
missing = {}
invoke(:fetch_multi, keys) do |store|
results.each do |key, value|
if value.nil?
value = yield(key)
missing[key] = value
results[key] = value
end
end
end
write_multi(missing, extracted) if missing.any?
results
end
# Returns `true` if the cache contains an entry for the given key.
#
# @param [String] Key for lookup
# @param [Hash] Optional overrides
#
# @example
#
# cache.exist?('some-key') # => false
# cache.exist?('some-key', namespace: 'cache') # => true
#
def exist?(key, options = {})
invoke(:exist?, key) do |store|
store.exists(namespaced_key(key, merged_options(options)))
end
end
# Clear the entire cache. This flushes the current database, no
# globbing is applied.
#
# @param [Hash] Options, only present for compatibility.
#
# @example
#
# cache.clear #=> 'OK'
def clear(options = {})
invoke(:clear, '*', &:flushdb)
end
protected
def write_entity(key, value, store, options)
namespaced = namespaced_key(key, options)
if expiration = options[:expires_in]
store.setex(namespaced, expiration.to_i, entity.dump(value))
else
store.set(namespaced, entity.dump(value))
end
end
private
def alter(key, amount, options)
number = read(key, options)
delta = number.to_i + amount
write(key, delta, options)
delta
end
def instrument(operation, key)
name = "cache_#{operation}.active_support"
payload = { key: key }
self.class.notifications.instrument(name, key) { yield(payload) }
end
def invoke(operation, key, &block)
instrument(operation, key) do
pool.with(&block)
end
end
def extract_options!(array)
array.last.is_a?(Hash) ? array.pop : {}
end
def merged_options(options)
options = options || {}
options[:namespace] ||= namespace
options[:expires_in] ||= expires_in
options
end
def pool_options(options)
{ size: options.fetch(:pool_size, 5),
timeout: options.fetch(:pool_timeout, 5) }
end
def namespaced_key(key, options)
Readthis::Expanders.namespace_key(key, options[:namespace])
end
end
end
Use defined? rather than const_defined?
Fixes the constant check for ActiveSupport::Notifications that was
caused by this bug: https://bugs.ruby-lang.org/issues/7414.
Closes #8
require 'readthis/entity'
require 'readthis/expanders'
require 'readthis/notifications'
require 'readthis/passthrough'
require 'redis'
require 'connection_pool'
module Readthis
class Cache
attr_reader :entity, :expires_in, :namespace, :pool
# Provide a class level lookup of the proper notifications module.
# Instrumention is expected to occur within applications that have
# ActiveSupport::Notifications available, but needs to work even when it
# isn't.
def self.notifications
if defined?(ActiveSupport::Notifications)
ActiveSupport::Notifications
else
Readthis::Notifications
end
end
# Creates a new Readthis::Cache object with the given redis URL. The URL
# is parsed by the redis client directly.
#
# @param [String] A redis compliant url with necessary connection details
# @option [Boolean] :compress (false) Enable or disable automatic compression
# @option [Number] :compression_threshold (8k) The size a string must be for compression
# @option [Number] :expires_in The number of seconds until an entry expires
# @option [Module] :marshal (Marshal) Any module that responds to `dump` and `load`
# @option [String] :namespace Prefix used to namespace entries
# @option [Symbol] :driver (:hiredis) Specify a driver to be used for Redis connections
# @option [Number] :pool_size (5) The number of threads in the pool
# @option [Number] :pool_timeout (5) How long before a thread times out
#
# @example Create a new cache instance
# Readthis::Cache.new('redis://localhost:6379/0', namespace: 'cache')
#
# @example Create a compressed cache instance
# Readthis::Cache.new('redis://localhost:6379/0', compress: true, compression_threshold: 2048)
#
def initialize(url, options = {})
@expires_in = options.fetch(:expires_in, nil)
@namespace = options.fetch(:namespace, nil)
@entity = Readthis::Entity.new(
marshal: options.fetch(:marshal, Marshal),
compress: options.fetch(:compress, false),
threshold: options.fetch(:compression_threshold, 1024)
)
@pool = ConnectionPool.new(pool_options(options)) do
Redis.new(url: url, driver: options.fetch(:driver, :hiredis))
end
end
# Fetches data from the cache, using the given key. If there is data in
# the cache with the given key, then that data is returned. Otherwise, nil
# is returned.
#
# @param [String] Key for lookup
# @param [Hash] Optional overrides
#
# @example
#
# cache.read('missing') # => nil
# cache.read('matched') # => 'some value'
#
def read(key, options = {})
invoke(:read, key) do |store|
value = store.get(namespaced_key(key, merged_options(options)))
entity.load(value)
end
end
# Writes data to the cache using the given key. Will overwrite whatever
# value is already stored at that key.
#
# @param [String] Key for lookup
# @param [Hash] Optional overrides
#
# @example
#
# cache.write('some-key', 'a bunch of text') # => 'OK'
# cache.write('some-key', 'short lived', expires_in: 60) # => 'OK'
# cache.write('some-key', 'lives elsehwere', namespace: 'cache') # => 'OK'
#
def write(key, value, options = {})
options = merged_options(options)
invoke(:write, key) do |store|
write_entity(key, value, store, options)
end
end
# Delete the value stored at the specified key. Returns `true` if
# anything was deleted, `false` otherwise.
#
# @params [String] The key for lookup
# @params [Hash] Optional overrides
#
# @example
#
# cache.delete('existing-key') # => true
# cache.delete('random-key') # => false
def delete(key, options = {})
namespaced = namespaced_key(key, merged_options(options))
invoke(:delete, key) do |store|
store.del(namespaced) > 0
end
end
# Fetches data from the cache, using the given key. If there is data in the
# cache with the given key, then that data is returned.
#
# If there is no such data in the cache (a cache miss), then `nil` will be
# returned. However, if a block has been passed, that block will be passed
# the key and executed in the event of a cache miss. The return value of
# the block will be written to the cache under the given cache key, and
# that return value will be returned.
#
# @param [String] Key for lookup
# @param [Block] Optional block for generating the value when missing
# @param options [Hash] Optional overrides
# @option options [Boolean] :force Force a cache miss
#
# @example Typical
#
# cache.write('today', 'Monday')
# cache.fetch('today') # => "Monday"
# cache.fetch('city') # => nil
#
# @example With a block
#
# cache.fetch('city') do
# 'Duckburgh'
# end
# cache.fetch('city') # => "Duckburgh"
#
# @example Cache Miss
#
# cache.write('today', 'Monday')
# cache.fetch('today', force: true) # => nil
#
def fetch(key, options = {})
value = read(key, options) unless options[:force]
if value.nil? && block_given?
value = yield(key)
write(key, value, options)
end
value
end
# Increment a key in the store.
#
# If the key doesn't exist it will be initialized at 0. If the key exists
# but it isn't a Fixnum it will be initialized at 0.
#
# @param [String] Key for lookup
# @param [Fixnum] Value to increment by
# @param [Hash] Optional overrides
#
# @example
#
# cache.increment('counter') # => 0
# cache.increment('counter') # => 1
# cache.increment('counter', 2) # => 3
#
def increment(key, amount = 1, options = {})
invoke(:incremenet, key) do |store|
alter(key, amount, options)
end
end
# Decrement a key in the store.
#
# If the key doesn't exist it will be initialized at 0. If the key exists
# but it isn't a Fixnum it will be initialized at 0.
#
# @param [String] Key for lookup
# @param [Fixnum] Value to decrement by
# @param [Hash] Optional overrides
#
# @example
#
# cache.write('counter', 20) # => 20
# cache.decrement('counter') # => 19
# cache.decrement('counter', 2) # => 17
#
def decrement(key, amount = 1, options = {})
invoke(:decrement, key) do |store|
alter(key, amount * -1, options)
end
end
# Read multiple values at once from the cache. Options can be passed in the
# last argument.
#
# @overload read_multi(keys)
# Return all values for the given keys.
# @param [String] One or more keys to fetch
#
# @return [Hash] A hash mapping keys to the values found.
#
# @example
#
# cache.write('a', 1)
# cache.read_multi('a', 'b') # => { 'a' => 1, 'b' => nil }
#
def read_multi(*keys)
options = merged_options(extract_options!(keys))
mapping = keys.map { |key| namespaced_key(key, options) }
return {} if keys.empty?
invoke(:read_multi, keys) do |store|
values = store.mget(mapping).map { |value| entity.load(value) }
keys.zip(values).to_h
end
end
# Write multiple key value pairs simultaneously. This is an atomic
# operation that will always succeed and will overwrite existing
# values.
#
# This is a non-standard, but useful, cache method.
#
# @param [Hash] Key value hash to write
# @param [Hash] Optional overrides
#
# @example
#
# cache.write_multi({ 'a' => 1, 'b' => 2 }) # => true
#
def write_multi(hash, options = {})
options = merged_options(options)
invoke(:write_multi, hash.keys) do |store|
store.multi do
hash.each { |key, value| write_entity(key, value, store, options) }
end
end
end
# Fetches multiple keys from the cache using a single call to the server
# and filling in any cache misses. All read and write operations are
# executed atomically.
#
# @overload fetch_multi(keys)
# Return all values for the given keys, applying the block to the key
# when a value is missing.
# @param [String] One or more keys to fetch
#
# @example
#
# cache.fetch_multi('alpha', 'beta') do |key|
# "#{key}-was-missing"
# end
#
# cache.fetch_multi('a', 'b', expires_in: 60) do |key|
# key * 2
# end
#
def fetch_multi(*keys)
results = read_multi(*keys)
extracted = extract_options!(keys)
missing = {}
invoke(:fetch_multi, keys) do |store|
results.each do |key, value|
if value.nil?
value = yield(key)
missing[key] = value
results[key] = value
end
end
end
write_multi(missing, extracted) if missing.any?
results
end
# Returns `true` if the cache contains an entry for the given key.
#
# @param [String] Key for lookup
# @param [Hash] Optional overrides
#
# @example
#
# cache.exist?('some-key') # => false
# cache.exist?('some-key', namespace: 'cache') # => true
#
def exist?(key, options = {})
invoke(:exist?, key) do |store|
store.exists(namespaced_key(key, merged_options(options)))
end
end
# Clear the entire cache. This flushes the current database, no
# globbing is applied.
#
# @param [Hash] Options, only present for compatibility.
#
# @example
#
# cache.clear #=> 'OK'
def clear(options = {})
invoke(:clear, '*', &:flushdb)
end
protected
def write_entity(key, value, store, options)
namespaced = namespaced_key(key, options)
if expiration = options[:expires_in]
store.setex(namespaced, expiration.to_i, entity.dump(value))
else
store.set(namespaced, entity.dump(value))
end
end
private
def alter(key, amount, options)
number = read(key, options)
delta = number.to_i + amount
write(key, delta, options)
delta
end
def instrument(operation, key)
name = "cache_#{operation}.active_support"
payload = { key: key }
self.class.notifications.instrument(name, key) { yield(payload) }
end
def invoke(operation, key, &block)
instrument(operation, key) do
pool.with(&block)
end
end
def extract_options!(array)
array.last.is_a?(Hash) ? array.pop : {}
end
def merged_options(options)
options = options || {}
options[:namespace] ||= namespace
options[:expires_in] ||= expires_in
options
end
def pool_options(options)
{ size: options.fetch(:pool_size, 5),
timeout: options.fetch(:pool_timeout, 5) }
end
def namespaced_key(key, options)
Readthis::Expanders.namespace_key(key, options[:namespace])
end
end
end
|
Script for bulk loading data into redis.
#!/usr/bin/env ruby
require 'time'
require 'optparse'
ENV['TZ'] = 'UTC'
def gen_redis_proto(*cmd)
proto = ""
proto << "*" + cmd.length.to_s + "\r\n"
cmd.each do |arg|
proto << "$" + arg.to_s.bytesize.to_s + "\r\n"
proto << arg.to_s + "\r\n"
end
proto
end
def get_score(time)
score = Time.parse(time)
"%10.6f" % score.to_f
end
opts = {}
OptionParser.new do |options|
options.set_banner "Usage: redis_protocol.rb [options] [files]\n" \
"Generate redis protocol from the output of:\n" \
"COPY entries (id, feed_id, public_id, created_at, published) TO '/tmp/redis_data';"
options.separator ""
options.separator "Options:"
options.on("--data DATA", "Specify which data you want. Options: public_id, created_at, published") do |data|
opts[:data] = data
end
options.on_tail("-h", "--help") do
$stderr.puts options
exit 1
end
end.parse!
while input = ARGF.gets
input.each_line do |line|
line = line.chop
entry_id, feed_id, public_id, created_at, published = line.split("\t")
begin
case opts[:data]
when "public_id"
$stdout.write(gen_redis_proto("HSET", "entry:public_ids:#{public_id[0..4]}", public_id, 1))
when "created_at"
score = get_score(created_at)
$stdout.write(gen_redis_proto("ZADD", "feed:#{feed_id}:entry_ids:created_at", score, entry_id))
when "published"
score = get_score(published)
$stdout.write(gen_redis_proto("ZADD", "feed:#{feed_id}:entry_ids:published", score, entry_id))
else
$stderr.puts "--data needs to be specified"
exit 1
end
rescue Errno::EPIPE
exit(74)
end
end
end
|
module Reform
VERSION = "2.2.0"
end
releasing 2.2.0.rc1.
module Reform
VERSION = "2.2.0.rc1"
end
|
require 'json'
require 'builder'
require 'base64'
# Add except method to Hash
class Hash
def except(*keys)
dup.except!(*keys)
end
def except!(*keys)
keys.each { |key| delete(key) }
self
end
end
# Main report builder class
class ReportBuilder
# report_builder:
#
# ReportBuilder.configure do |config|
# config.json_path = 'cucumber_sample/logs'
# config.report_path = 'my_test_report'
# config.report_types = [:json, :html]
# config.report_tabs = [:overview, :features, :scenarios, :errors]
# config.report_title = 'My Test Results'
# config.compress_images = false
# config.additional_info = {browser: 'Chrome', environment: 'Stage 5'}
# end
#
# ReportBuilder.build_report
#
# colors corresponding to status
COLOR = {
passed: '#90ed7d',
working: '#90ed7d',
failed: '#f45b5b',
broken: '#f45b5b',
undefined: '#e4d354',
incomplete: '#e7a35c',
pending: '#f7a35c',
skipped: '#7cb5ec',
output: '#007fff'
}
#
# Ex: ReportBuilder.configure do |config|
# config.json_path = 'cucumber_sample/logs'
# config.report_path = 'my_test_report'
# config.report_types = [:JSON, :HTML]
# config.report_tabs = [:Overview, :Features, :Scenarios, :Errors]
# config.report_title = 'My Test Results'
# config.compress_images = true
# config.additional_info = {Browser: 'Chrome', Environment: 'Stage 5'}
# end
#
def self.configure
default_options = OpenStruct.new(
json_path: nil, # [String] / [Array] Input json file, array of json files/path or json files path, (Default current directory)
report_path: 'test_report', # [String] Output file path with name
report_types: [:html], # [Array] Output file types to build, [:json, :html] or ['html', 'json']
report_tabs: [:overview, :features], # [Array] Tabs to build, [:overview, :features, :scenarios, :errors] or ['overview', 'features', 'scenarios', 'errors']
report_title: 'Test Results', # [String] Report and html title
compress_images: false, # [Boolean] Set true to reducing the size of HTML report, Note: If true, takes more time to build report
additional_info: {} # [Hash] Additional info for report summary
)
yield default_options if block_given?
@options = default_options.marshal_dump
end
#
# @param [Hash] options override the default and configured options.
#
# Ex: options = {
# json_path: 'cucumber_sample/logs',
# report_path: 'my_test_report',
# report_types: ['json', 'html'],
# report_tabs: [ 'overview', 'features', 'scenarios', 'errors'],
# report_title: 'My Test Results',
# compress_images: false,
# additional_info: {'browser' => 'Chrome', 'environment' => 'Stage 5'}
# }
#
# ReportBuilder.build_report options
#
def self.build_report(options = nil)
configure unless @options
@options.merge! options if options.is_a? Hash
raise 'Error: Invalid report_types Use: [:json, :html]' unless @options[:report_types].is_a? Array
raise 'Error: Invalid report_tabs Use: [:overview, :features, :scenarios, :errors]' unless @options[:report_tabs].is_a? Array
@options[:report_types].map!(&:to_s).map!(&:upcase)
@options[:report_tabs].map!(&:to_s).map!(&:downcase)
input = files @options[:json_path]
all_features = features input rescue (raise 'ReportBuilderParsingError')
File.open(@options[:report_path] + '.json', 'w') do |file|
file.write JSON.pretty_generate all_features
puts "JSON test report generated: '#{@options[:report_path]}.json'"
end if @options[:report_types].include? 'JSON'
all_scenarios = scenarios all_features
all_steps = steps all_scenarios
all_tags = tags all_scenarios
total_time = total_time all_features
feature_data = data all_features
scenario_data = data all_scenarios
step_data = data all_steps
File.open(@options[:report_path] + '.html', 'w:UTF-8') do |file|
@builder = Builder::XmlMarkup.new(target: file, indent: 0)
@builder.declare!(:DOCTYPE, :html)
@builder << '<html>'
@builder.head do
@builder.meta(charset: 'UTF-8')
@builder.title @options[:report_title]
@builder.style(type: 'text/css') do
@builder << File.read(File.dirname(__FILE__) + '/../vendor/assets/stylesheets/jquery-ui.min.css')
COLOR.each do |color|
@builder << ".#{color[0].to_s}{background:#{color[1]};color:#434348;padding:2px}"
end
@builder << '.summary{margin-bottom:4px;border: 1px solid #c5c5c5;border-radius:4px;background:#f1f1f1;color:#434348;padding:4px;overflow:hidden;vertical-align:bottom;}'
@builder << '.summary .results{text-align:right;float:right;}'
@builder << '.summary .info{text-align:left;float:left;}'
@builder << '.data_table{border-collapse: collapse;} .data_table td{padding: 5px; border: 1px solid #ddd;}'
@builder << '.ui-tooltip{background: black; color: white; font-size: 12px; padding: 2px 4px; border-radius: 20px; box-shadow: 0 0 7px black;}'
end
@builder.script(type: 'text/javascript') do
%w(jquery-min jquery-ui.min highcharts highcharts-3d).each do |js|
@builder << File.read(File.dirname(__FILE__) + '/../vendor/assets/javascripts/' + js + '.js')
end
@builder << '$(function(){$("#results").tabs();});'
@builder << "$(function(){$('#features').accordion({collapsible: true, heightStyle: 'content', active: false, icons: false});});"
(0..all_features.size).each do |n|
@builder << "$(function(){$('#feature#{n}').accordion({collapsible: true, heightStyle: 'content', active: false, icons: false});});"
end
@builder << "$(function(){$('#status').accordion({collapsible: true, heightStyle: 'content', active: false, icons: false});});"
scenario_data.each do |data|
@builder << "$(function(){$('##{data[:name]}').accordion({collapsible: true, heightStyle: 'content', active: false, icons: false});});"
end
@builder << '$(function() {$(document).tooltip({track: true});});'
end
end
@builder << '<body>'
@builder.div(class: 'summary') do
@builder.span(class: 'info') do
info = @options[:additional_info].empty?
@builder << '<br/> ' if info
@builder.span(style: "font-size:#{info ? 36 : 18 }px;font-weight: bold;") do
@builder << @options[:report_title]
end
@options[:additional_info].each do |l|
@builder << '<br/>' + l[0].to_s.capitalize + ' : ' + l[1].to_s
end
end if @options[:additional_info].is_a? Hash
@builder.span(class: 'results') do
s = all_features.size
@builder << s.to_s + " feature#{'s' if s > 1} ("
feature_data.each do |data|
@builder << ' ' + data[:count].to_s + ' ' + data[:name]
end
s = all_scenarios.size
@builder << ')<br/>' + s.to_s + " scenario#{'s' if s > 1} ("
scenario_data.each do |data|
@builder << ' ' + data[:count].to_s + ' ' + data[:name]
end
s = all_steps.size
@builder << ')<br/>' + s.to_s + " step#{'s' if s > 1} ("
step_data.each do |data|
@builder << ' ' + data[:count].to_s + ' ' + data[:name]
end
@builder << ')<br/>🕐 ' + duration(total_time).to_s
end
end
@builder.div(id: 'results') do
build_menu @options[:report_tabs]
@builder.div(id: 'overviewTab') do
@builder << "<div id='featurePieChart' style=\"float:left;width:33%\"></div>"
@builder << "<div id='scenarioPieChart' style=\"display:inline-block;width:33%\"></div>"
@builder << "<div id='stepPieChart' style=\"float:right;width:33%\"></div>"
end if @options[:report_tabs].include? 'overview'
@builder.div(id: 'featuresTab') do
build_tags_drop_down(all_tags)
@builder.div(id: 'features') do
all_features.each_with_index do |feature, n|
@builder.h3(style: "background:#{COLOR[feature['status'].to_sym]}") do
@builder.span(class: feature['status']) do
@builder << "<strong>#{feature['keyword']}</strong> #{feature['name']} (#{duration(feature['duration'])})"
end
end
@builder.div do
@builder.div(id: "feature#{n}") do
feature['elements'].each{|scenario| build_scenario scenario}
end
end
end
end
@builder << "<div id='featureTabPieChart'></div>"
end if @options[:report_tabs].include? 'features'
@builder.div(id: 'scenariosTab') do
build_tags_drop_down(all_tags)
@builder.div(id: 'status') do
all_scenarios.group_by{|scenario| scenario['status']}.each do |data|
@builder.h3(style: "background:#{COLOR[data[0].to_sym]}") do
@builder.span(class: data[0]) do
@builder << "<strong>#{data[0].capitalize} scenarios (Count: <span id='count'>#{data[1].size}</span>)</strong>"
end
end
@builder.div do
@builder.div(id: data[0]) do
data[1].sort_by{|scenario| scenario['name']}.each{|scenario| build_scenario scenario}
end
end
end
end
@builder << "<div id='scenarioTabPieChart'></div>"
end if @options[:report_tabs].include? 'scenarios'
@builder.div(id: 'errorsTab') do
@builder.ol do
all_scenarios.each{|scenario| build_error_list scenario}
end
end if @options[:report_tabs].include? 'errors'
end
@builder.script(type: 'text/javascript') do
@builder << pie_chart_js('featurePieChart', 'Features', feature_data) if @options[:report_tabs].include? 'overview'
@builder << donut_js('featureTabPieChart', 'Features', feature_data) if @options[:report_tabs].include? 'features'
@builder << pie_chart_js('scenarioPieChart', 'Scenarios', scenario_data) if @options[:report_tabs].include? 'overview'
@builder << donut_js('scenarioTabPieChart', 'Scenarios', scenario_data) if @options[:report_tabs].include? 'scenarios'
@builder << pie_chart_js('stepPieChart', 'Steps', step_data) if @options[:report_tabs].include? 'overview'
unless all_tags.empty?
@builder << '$("#featuresTab .select-tags").change(function(){
$("#featuresTab .scenario-all").hide().next().hide().parent().hide().parent().hide().prev().hide();
$("#featuresTab ." + $(this).val()).show().parent().show().parent().prev().show();});' if @options[:report_tabs].include? 'features'
@builder << '$("#scenariosTab .select-tags").change(function(){var val = $(this).val();$("#scenariosTab .scenario-all").hide().next().hide();
$("#scenariosTab ." + val).show();$("#scenariosTab #count").each(function(){status = $(this).parent().parent().prop("className");
count = $("#scenariosTab #" + status + " ." + val).length;countElement = $("#scenariosTab ." + status + " #count");
countElement.parent().parent().parent().show();if(count==0){countElement.parent().parent().parent().hide().next().hide();}
countElement.html(count);});});' if @options[:report_tabs].include? 'scenarios'
end
end
@builder << '</body>'
@builder << '</html>'
puts "HTML test report generated: '#{@options[:report_path]}.html'"
end if @options[:report_types].include? 'HTML'
[total_time, feature_data, scenario_data, step_data]
end
def self.build_menu(tabs)
@builder.ul do
tabs.each do |tab|
@builder.li do
@builder.a(href: "##{tab}Tab") do
@builder << tab.capitalize
end
end
end
end
end
def self.build_scenario(scenario)
tags = (scenario['tags'] ? scenario['tags'].map{|tag| tag['name']}.join(' ') : '')
@builder.h3(style: "background:#{COLOR[scenario['status'].to_sym]}", title: tags, class: 'scenario-all ' + tags.gsub('@','tag-')) do
@builder.span(class: scenario['status']) do
@builder << "<strong>#{scenario['keyword']}</strong> #{scenario['name']} (#{duration(scenario['duration'])})"
end
end
@builder.div do
scenario['before'].each do |before|
build_hook_error before
end
scenario['steps'].each do |step|
build_step step, scenario['keyword']
end
scenario['after'].each do |after|
build_output after['output']
build_hook_error after
build_embedding after['embeddings']
end
end
end
def self.build_step(step, scenario_keyword)
@builder.div(class: step['status']) do
@builder << "<strong>#{step['keyword']}</strong> #{step['name']} (#{duration(step['duration'])})"
end
build_data_table step['rows']
build_output step['output']
build_step_error step
build_embedding step['embeddings']
step['after'].each do |after|
build_output after['output']
build_step_hook_error after, scenario_keyword
build_embedding after['embeddings']
end if step['after']
end
def self.build_data_table(rows)
@builder.table(class: 'data_table', style: 'margin: 10px') do
rows.each do |row|
@builder.tr do
row['cells'].each do |cell|
@builder << "<td> #{cell} </td>"
end
end
end
end if rows.is_a? Array
end
def self.build_output(outputs)
outputs.each do |output|
@builder << "<span style='color:#{COLOR[:output]}'>#{output.gsub("\n",'</br>').gsub("\t",' ').gsub(' ',' ')}</span><br/>"
end if outputs.is_a?(Array)
end
def self.build_tags_drop_down(tags)
@builder.div(style: 'text-align:center;padding:5px;') do
@builder << '<strong>Tag: </strong>'
@builder.select(class: 'select-tags') do
@builder.option(value: 'scenario-all') do
@builder << 'All'
end
tags.each do |tag|
@builder.option(value: tag.gsub('@','tag-')) do
@builder << tag
end
end
end
end if tags.is_a?(Array)
end
def self.build_step_error(step)
if step['status'] == 'failed' && step['result']['error_message']
@builder << "<strong style=color:#{COLOR[:failed]}>Error: </strong>"
error = step['result']['error_message'].split("\n")
@builder.span(style: "color:#{COLOR[:failed]}") do
error[0..-3].each do |line|
@builder << line + '<br/>'
end
end
@builder << "<strong>SD: </strong>#{error[-2]} <br/>"
@builder << "<strong>FF: </strong>#{error[-1]}<br/>"
end
end
def self.build_hook_error(hook)
if hook['status'] == 'failed'
@builder << "<strong style=color:#{COLOR[:failed]}>Error: </strong>"
error = hook['result']['error_message'].split("\n")
@builder.span(style: "color:#{COLOR[:failed]}") do
error[0..-2].each do |line|
@builder << line + '<br/>'
end
end
@builder << "<strong>Hook: </strong>#{error[-1]}<br/>"
end
end
def self.build_step_hook_error(hook, scenario_keyword)
if hook['result']['error_message']
@builder << "<strong style=color:#{COLOR[:failed]}>Error: </strong>"
error = hook['result']['error_message'].split("\n")
@builder.span(style: "color:#{COLOR[:failed]}") do
(scenario_keyword == 'Scenario Outline' ? error[0..-8] : error[0..-5]).each do |line|
@builder << line + '<br/>'
end
end
@builder << "<strong>Hook: </strong>#{scenario_keyword == 'Scenario Outline' ? error[-7] : error[-4]} <br/>"
@builder << "<strong>FF: </strong>#{error[-2]}<br/>"
end
end
def self.build_embedding(embeddings)
@embedding_count ||= 0
embeddings.each do |embedding|
id = "embedding_#{@embedding_count}"
if embedding['mime_type'] =~ /^image\/(png|gif|jpg|jpeg)/
@builder.span(class: 'image') do
@builder.a(href: '', style: 'text-decoration: none;', onclick: "img=document.getElementById('#{id}');img.style.display = (img.style.display == 'none' ? 'block' : 'none');return false") do
@builder.span(style: "color: #{COLOR[:output]}; font-weight: bold; border-bottom: 1px solid #{COLOR[:output]};") do
@builder << 'Screenshot'
end
end
@builder << '<br/>'
@options[:compress_images] ? build_unique_image(embedding, id) : build_image(embedding,id) rescue puts 'Image embedding failed!'
end
elsif embedding['mime_type'] =~ /^text\/plain/
@builder.span(class: 'link') do
src = Base64.decode64(embedding['data'])
@builder.a(id: id, style: 'text-decoration: none;', href: src, title: 'Link') do
@builder.span(style: "color: #{COLOR[:output]}; font-weight: bold; border-bottom: 1px solid #{COLOR[:output]};") do
@builder << src
end
end
@builder << '<br/>'
end rescue puts('Link embedding skipped!')
end
@embedding_count += 1
end if embeddings.is_a?(Array)
end
def self.build_unique_image(image, id)
@images ||= []
index = @images.find_index image
if index
klass = "image_#{index}"
else
@images << image
klass = "image_#{@images.size - 1}"
@builder.style(type: 'text/css') do
begin
src = Base64.decode64(image['data'])
src = 'data:' + image['mime_type'] + ';base64,' + src unless src =~ /^data:image\/(png|gif|jpg|jpeg);base64,/
@builder << "img.#{klass} {content: url(#{src});}"
rescue
src = image['data']
src = 'data:' + image['mime_type'] + ';base64,' + src unless src =~ /^data:image\/(png|gif|jpg|jpeg);base64,/
@builder << "img.#{klass} {content: url(#{src});}"
end
end
end
@builder << %{<img id='#{id}' class='#{klass}' style='display: none; border: 1px solid #{COLOR[:output]};' />}
end
def self.build_image(image, id)
begin
src = Base64.decode64(image['data'])
src = 'data:' + image['mime_type'] + ';base64,' + src unless src =~ /^data:image\/(png|gif|jpg|jpeg);base64,/
@builder << %{<img id='#{id}' style='display: none; border: 1px solid #{COLOR[:output]};' src='#{src}'/>}
rescue
src = image['data']
src = 'data:' + image['mime_type'] + ';base64,' + src unless src =~ /^data:image\/(png|gif|jpg|jpeg);base64,/
@builder << %{<img id='#{id}' style='display: none; border: 1px solid #{COLOR[:output]};' src='#{src}'/>}
end
end
def self.build_error_list(scenario)
scenario['before'].each do |before|
next unless before['status'] == 'failed'
@builder.li do
error = before['result']['error_message'].split("\n")
@builder.span(style: "color:#{COLOR[:failed]}") do
error[0..-2].each do |line|
@builder << line + '<br/>'
end
end
@builder << "<strong>Hook: </strong>#{error[-1]} <br/>"
@builder << "<strong>Scenario: </strong>#{scenario['name']} <br/><hr/>"
end
end
scenario['steps'].each do |step|
step['after'].each do |after|
next unless after['status'] == 'failed'
@builder.li do
error = after['result']['error_message'].split("\n")
@builder.span(style: "color:#{COLOR[:failed]}") do
(scenario['keyword'] == 'Scenario Outline' ? error[0..-8] : error[0..-5]).each do |line|
@builder << line + '<br/>'
end
end
@builder << "<strong>Hook: </strong>#{scenario['keyword'] == 'Scenario Outline' ? error[-7] : error[-4]} <br/>"
@builder << "<strong>FF: </strong>#{error[-2]} <br/><hr/>"
end
end if step['after']
next unless step['status'] == 'failed' && step['result']['error_message']
@builder.li do
error = step['result']['error_message'].split("\n")
@builder.span(style: "color:#{COLOR[:failed]}") do
error[0..-3].each do |line|
@builder << line + '<br/>'
end
end
@builder << "<strong>SD: </strong>#{error[-2]} <br/>"
@builder << "<strong>FF: </strong>#{error[-1]} <br/><hr/>"
end
end
scenario['after'].each do |after|
next unless after['status'] == 'failed'
@builder.li do
error = after['result']['error_message'].split("\n")
@builder.span(style: "color:#{COLOR[:failed]}") do
error[0..-2].each do |line|
@builder << line + '<br/>'
end
end
@builder << "<strong>Hook: </strong>#{error[-1]} <br/>"
@builder << "<strong>Scenario: </strong>#{scenario['name']} <br/><hr/>"
end
end
end
def self.features(files)
files.each_with_object([]) { |file, features|
data = File.read(file)
next if data.empty?
features << JSON.parse(data)
}.flatten.group_by { |feature|
feature['uri']+feature['id']+feature['line'].to_s
}.values.each_with_object([]) { |group, features|
features << group.first.except('elements').merge('elements' => group.map{|feature| feature['elements']}.flatten)
}.sort_by!{|feature| feature['name']}.each{|feature|
if feature['elements'][0]['type'] == 'background'
(0..feature['elements'].size-1).step(2) do |i|
feature['elements'][i]['steps'].each{|step| step['name']+=(' ('+feature['elements'][i]['keyword']+')')}
feature['elements'][i+1]['steps'] = feature['elements'][i]['steps'] + feature['elements'][i+1]['steps']
feature['elements'][i+1]['before'] = feature['elements'][i]['before'] if feature['elements'][i]['before']
end
feature['elements'].reject!{|element| element['type'] == 'background'}
end
feature['elements'].each { |scenario|
scenario['before'] ||= []
scenario['before'].each { |before|
before['result']['duration'] ||= 0
before.merge! 'status' => before['result']['status'], 'duration' => before['result']['duration']
}
scenario['steps'].each { |step|
step['result']['duration'] ||= 0
duration = step['result']['duration']
status = step['result']['status']
step['after'].each { |after|
after['result']['duration'] ||= 0
duration += after['result']['duration']
status = 'failed' if after['result']['status'] == 'failed'
after.merge! 'status' => after['result']['status'], 'duration' => after['result']['duration']
} if step['after']
step.merge! 'status' => status, 'duration' => duration
}
scenario['after'] ||= []
scenario['after'].each { |after|
after['result']['duration'] ||= 0
after.merge! 'status' => after['result']['status'], 'duration' => after['result']['duration']
}
scenario.merge! 'status' => scenario_status(scenario), 'duration' => total_time(scenario['before']) + total_time(scenario['steps']) + total_time(scenario['after'])
}
feature.merge! 'status' => feature_status(feature), 'duration' => total_time(feature['elements'])
}
end
def self.feature_status(feature)
feature_status = 'working'
feature['elements'].each do |scenario|
status = scenario['status']
return 'broken' if status == 'failed'
feature_status = 'incomplete' if %w(undefined pending).include?(status)
end
feature_status
end
def self.scenarios(features)
features.map do |feature|
feature['elements']
end.flatten
end
def self.scenario_status(scenario)
(scenario['before'] + scenario['steps'] + scenario['after']).each do |step|
status = step['status']
return status unless status == 'passed'
end
'passed'
end
def self.steps(scenarios)
scenarios.map do |scenario|
scenario['steps']
end.flatten
end
def self.tags(scenarios)
scenarios.map do |scenario|
scenario['tags'] ? scenario['tags'].map{|t| t['name']} : []
end.flatten.uniq
end
def self.files(path)
files = if path.is_a? String
(path =~ /\.json$/) ? [path] : Dir.glob("#{path}/*.json")
elsif path.nil?
Dir.glob('*.json')
elsif path.is_a? Array
path.map do |file|
(file =~ /\.json$/) ? file : Dir.glob("#{file}/*.json")
end.flatten
else
raise 'InvalidInput'
end
raise 'InvalidOrNoInputFile' if files.empty?
files.uniq
end
def self.data(all_data)
all_data.group_by{|db| db['status']}.map do |data|
{name: data[0],
count: data[1].size,
color: COLOR[data[0].to_sym]}
end
end
def self.total_time(data)
total_time = 0
data.each{|item| total_time += item['duration']}
total_time
end
def self.duration(seconds)
seconds = seconds.to_f/1000000000
m, s = seconds.divmod(60)
"#{m}m #{'%.3f' % s}s"
end
def self.pie_chart_js(id, title, data)
data = data.each_with_object('') do |h, s|
s << "{name: '#{h[:name].capitalize}'"
s << ",y: #{h[:count]}"
s << ',sliced: true' if h[:sliced]
s << ',selected: true' if h[:selected]
s << ",color: '#{h[:color]}'" if h[:color]
s << '},'
end.chop
"$(function (){$('##{id}').highcharts({credits: {enabled: false}, chart: {type: 'pie',
options3d: {enabled: true, alpha: 45, beta: 0}}, title: {text: '#{title}'},
tooltip: {pointFormat: 'Count: <b>{point.y}</b><br/>Percentage: <b>{point.percentage:.1f}%</b>'},
plotOptions: {pie: {allowPointSelect: true, cursor: 'pointer', depth: 35, dataLabels: {enabled: true,
format: '{point.name}'}}}, series: [{type: 'pie', name: 'Results', data: [#{data}]}]});});"
end
def self.donut_js(id, title, data)
data = data.each_with_object('') do |h, s|
s << "{name: '#{h[:name].capitalize}'"
s << ",y: #{h[:count]}"
s << ',sliced: true' if h[:sliced]
s << ',selected: true' if h[:selected]
s << ",color: '#{h[:color]}'" if h[:color]
s << '},'
end.chop
"$(function (){$('##{id}').highcharts({credits: {enabled: false},
chart: {plotBackgroundColor: null, plotBorderWidth: 0, plotShadow: false, width: $(document).width()-80},
title: {text: '#{title}', align: 'center', verticalAlign: 'middle', y: 40},
tooltip: {pointFormat: 'Count: <b>{point.y}</b><br/>Percentage: <b>{point.percentage:.1f}%</b>'},
plotOptions: {pie: {dataLabels: {enabled: true, distance: -50,
style: {fontWeight: 'bold', color: 'white', textShadow: '0px 1px 2px black'}},
startAngle: -90, endAngle: 90, center: ['50%', '75%']}},
series: [{type: 'pie', innerSize: '50%', name: 'Results', data: [#{data}]}]});});"
end
private_class_method :donut_js, :pie_chart_js, :files,
:features, :feature_status,
:scenarios, :scenario_status, :steps,
:data, :duration, :total_time,
:build_scenario, :build_step,
:build_menu, :build_output, :build_embedding,
:build_error_list, :build_step_error,
:build_hook_error, :build_step_hook_error,
:build_unique_image, :build_image,
:build_data_table, :tags, :build_tags_drop_down
end
scenario without steps
require 'json'
require 'builder'
require 'base64'
# Add except method to Hash
class Hash
def except(*keys)
dup.except!(*keys)
end
def except!(*keys)
keys.each { |key| delete(key) }
self
end
end
# Main report builder class
class ReportBuilder
# report_builder:
#
# ReportBuilder.configure do |config|
# config.json_path = 'cucumber_sample/logs'
# config.report_path = 'my_test_report'
# config.report_types = [:json, :html]
# config.report_tabs = [:overview, :features, :scenarios, :errors]
# config.report_title = 'My Test Results'
# config.compress_images = false
# config.additional_info = {browser: 'Chrome', environment: 'Stage 5'}
# end
#
# ReportBuilder.build_report
#
# colors corresponding to status
COLOR = {
passed: '#90ed7d',
working: '#90ed7d',
failed: '#f45b5b',
broken: '#f45b5b',
undefined: '#e4d354',
incomplete: '#e7a35c',
pending: '#f7a35c',
skipped: '#7cb5ec',
output: '#007fff'
}
#
# Ex: ReportBuilder.configure do |config|
# config.json_path = 'cucumber_sample/logs'
# config.report_path = 'my_test_report'
# config.report_types = [:JSON, :HTML]
# config.report_tabs = [:Overview, :Features, :Scenarios, :Errors]
# config.report_title = 'My Test Results'
# config.compress_images = true
# config.additional_info = {Browser: 'Chrome', Environment: 'Stage 5'}
# end
#
def self.configure
default_options = OpenStruct.new(
json_path: nil, # [String] / [Array] Input json file, array of json files/path or json files path, (Default current directory)
report_path: 'test_report', # [String] Output file path with name
report_types: [:html], # [Array] Output file types to build, [:json, :html] or ['html', 'json']
report_tabs: [:overview, :features], # [Array] Tabs to build, [:overview, :features, :scenarios, :errors] or ['overview', 'features', 'scenarios', 'errors']
report_title: 'Test Results', # [String] Report and html title
compress_images: false, # [Boolean] Set true to reducing the size of HTML report, Note: If true, takes more time to build report
additional_info: {} # [Hash] Additional info for report summary
)
yield default_options if block_given?
@options = default_options.marshal_dump
end
#
# @param [Hash] options override the default and configured options.
#
# Ex: options = {
# json_path: 'cucumber_sample/logs',
# report_path: 'my_test_report',
# report_types: ['json', 'html'],
# report_tabs: [ 'overview', 'features', 'scenarios', 'errors'],
# report_title: 'My Test Results',
# compress_images: false,
# additional_info: {'browser' => 'Chrome', 'environment' => 'Stage 5'}
# }
#
# ReportBuilder.build_report options
#
def self.build_report(options = nil)
configure unless @options
@options.merge! options if options.is_a? Hash
raise 'Error: Invalid report_types Use: [:json, :html]' unless @options[:report_types].is_a? Array
raise 'Error: Invalid report_tabs Use: [:overview, :features, :scenarios, :errors]' unless @options[:report_tabs].is_a? Array
@options[:report_types].map!(&:to_s).map!(&:upcase)
@options[:report_tabs].map!(&:to_s).map!(&:downcase)
input = files @options[:json_path]
all_features = features input rescue (raise 'ReportBuilderParsingError')
File.open(@options[:report_path] + '.json', 'w') do |file|
file.write JSON.pretty_generate all_features
puts "JSON test report generated: '#{@options[:report_path]}.json'"
end if @options[:report_types].include? 'JSON'
all_scenarios = scenarios all_features
all_steps = steps all_scenarios
all_tags = tags all_scenarios
total_time = total_time all_features
feature_data = data all_features
scenario_data = data all_scenarios
step_data = data all_steps
File.open(@options[:report_path] + '.html', 'w:UTF-8') do |file|
@builder = Builder::XmlMarkup.new(target: file, indent: 0)
@builder.declare!(:DOCTYPE, :html)
@builder << '<html>'
@builder.head do
@builder.meta(charset: 'UTF-8')
@builder.title @options[:report_title]
@builder.style(type: 'text/css') do
@builder << File.read(File.dirname(__FILE__) + '/../vendor/assets/stylesheets/jquery-ui.min.css')
COLOR.each do |color|
@builder << ".#{color[0].to_s}{background:#{color[1]};color:#434348;padding:2px}"
end
@builder << '.summary{margin-bottom:4px;border: 1px solid #c5c5c5;border-radius:4px;background:#f1f1f1;color:#434348;padding:4px;overflow:hidden;vertical-align:bottom;}'
@builder << '.summary .results{text-align:right;float:right;}'
@builder << '.summary .info{text-align:left;float:left;}'
@builder << '.data_table{border-collapse: collapse;} .data_table td{padding: 5px; border: 1px solid #ddd;}'
@builder << '.ui-tooltip{background: black; color: white; font-size: 12px; padding: 2px 4px; border-radius: 20px; box-shadow: 0 0 7px black;}'
end
@builder.script(type: 'text/javascript') do
%w(jquery-min jquery-ui.min highcharts highcharts-3d).each do |js|
@builder << File.read(File.dirname(__FILE__) + '/../vendor/assets/javascripts/' + js + '.js')
end
@builder << '$(function(){$("#results").tabs();});'
@builder << "$(function(){$('#features').accordion({collapsible: true, heightStyle: 'content', active: false, icons: false});});"
(0..all_features.size).each do |n|
@builder << "$(function(){$('#feature#{n}').accordion({collapsible: true, heightStyle: 'content', active: false, icons: false});});"
end
@builder << "$(function(){$('#status').accordion({collapsible: true, heightStyle: 'content', active: false, icons: false});});"
scenario_data.each do |data|
@builder << "$(function(){$('##{data[:name]}').accordion({collapsible: true, heightStyle: 'content', active: false, icons: false});});"
end
@builder << '$(function() {$(document).tooltip({track: true});});'
end
end
@builder << '<body>'
@builder.div(class: 'summary') do
@builder.span(class: 'info') do
info = @options[:additional_info].empty?
@builder << '<br/> ' if info
@builder.span(style: "font-size:#{info ? 36 : 18 }px;font-weight: bold;") do
@builder << @options[:report_title]
end
@options[:additional_info].each do |l|
@builder << '<br/>' + l[0].to_s.capitalize + ' : ' + l[1].to_s
end
end if @options[:additional_info].is_a? Hash
@builder.span(class: 'results') do
s = all_features.size
@builder << s.to_s + " feature#{'s' if s > 1} ("
feature_data.each do |data|
@builder << ' ' + data[:count].to_s + ' ' + data[:name]
end
s = all_scenarios.size
@builder << ')<br/>' + s.to_s + " scenario#{'s' if s > 1} ("
scenario_data.each do |data|
@builder << ' ' + data[:count].to_s + ' ' + data[:name]
end
s = all_steps.size
@builder << ')<br/>' + s.to_s + " step#{'s' if s > 1} ("
step_data.each do |data|
@builder << ' ' + data[:count].to_s + ' ' + data[:name]
end
@builder << ')<br/>🕐 ' + duration(total_time).to_s
end
end
@builder.div(id: 'results') do
build_menu @options[:report_tabs]
@builder.div(id: 'overviewTab') do
@builder << "<div id='featurePieChart' style=\"float:left;width:33%\"></div>"
@builder << "<div id='scenarioPieChart' style=\"display:inline-block;width:33%\"></div>"
@builder << "<div id='stepPieChart' style=\"float:right;width:33%\"></div>"
end if @options[:report_tabs].include? 'overview'
@builder.div(id: 'featuresTab') do
build_tags_drop_down(all_tags)
@builder.div(id: 'features') do
all_features.each_with_index do |feature, n|
@builder.h3(style: "background:#{COLOR[feature['status'].to_sym]}") do
@builder.span(class: feature['status']) do
@builder << "<strong>#{feature['keyword']}</strong> #{feature['name']} (#{duration(feature['duration'])})"
end
end
@builder.div do
@builder.div(id: "feature#{n}") do
feature['elements'].each{|scenario| build_scenario scenario}
end
end
end
end
@builder << "<div id='featureTabPieChart'></div>"
end if @options[:report_tabs].include? 'features'
@builder.div(id: 'scenariosTab') do
build_tags_drop_down(all_tags)
@builder.div(id: 'status') do
all_scenarios.group_by{|scenario| scenario['status']}.each do |data|
@builder.h3(style: "background:#{COLOR[data[0].to_sym]}") do
@builder.span(class: data[0]) do
@builder << "<strong>#{data[0].capitalize} scenarios (Count: <span id='count'>#{data[1].size}</span>)</strong>"
end
end
@builder.div do
@builder.div(id: data[0]) do
data[1].sort_by{|scenario| scenario['name']}.each{|scenario| build_scenario scenario}
end
end
end
end
@builder << "<div id='scenarioTabPieChart'></div>"
end if @options[:report_tabs].include? 'scenarios'
@builder.div(id: 'errorsTab') do
@builder.ol do
all_scenarios.each{|scenario| build_error_list scenario}
end
end if @options[:report_tabs].include? 'errors'
end
@builder.script(type: 'text/javascript') do
@builder << pie_chart_js('featurePieChart', 'Features', feature_data) if @options[:report_tabs].include? 'overview'
@builder << donut_js('featureTabPieChart', 'Features', feature_data) if @options[:report_tabs].include? 'features'
@builder << pie_chart_js('scenarioPieChart', 'Scenarios', scenario_data) if @options[:report_tabs].include? 'overview'
@builder << donut_js('scenarioTabPieChart', 'Scenarios', scenario_data) if @options[:report_tabs].include? 'scenarios'
@builder << pie_chart_js('stepPieChart', 'Steps', step_data) if @options[:report_tabs].include? 'overview'
unless all_tags.empty?
@builder << '$("#featuresTab .select-tags").change(function(){
$("#featuresTab .scenario-all").hide().next().hide().parent().hide().parent().hide().prev().hide();
$("#featuresTab ." + $(this).val()).show().parent().show().parent().prev().show();});' if @options[:report_tabs].include? 'features'
@builder << '$("#scenariosTab .select-tags").change(function(){var val = $(this).val();$("#scenariosTab .scenario-all").hide().next().hide();
$("#scenariosTab ." + val).show();$("#scenariosTab #count").each(function(){status = $(this).parent().parent().prop("className");
count = $("#scenariosTab #" + status + " ." + val).length;countElement = $("#scenariosTab ." + status + " #count");
countElement.parent().parent().parent().show();if(count==0){countElement.parent().parent().parent().hide().next().hide();}
countElement.html(count);});});' if @options[:report_tabs].include? 'scenarios'
end
end
@builder << '</body>'
@builder << '</html>'
puts "HTML test report generated: '#{@options[:report_path]}.html'"
end if @options[:report_types].include? 'HTML'
[total_time, feature_data, scenario_data, step_data]
end
def self.build_menu(tabs)
@builder.ul do
tabs.each do |tab|
@builder.li do
@builder.a(href: "##{tab}Tab") do
@builder << tab.capitalize
end
end
end
end
end
def self.build_scenario(scenario)
tags = (scenario['tags'] ? scenario['tags'].map{|tag| tag['name']}.join(' ') : '')
@builder.h3(style: "background:#{COLOR[scenario['status'].to_sym]}", title: tags, class: 'scenario-all ' + tags.gsub('@','tag-')) do
@builder.span(class: scenario['status']) do
@builder << "<strong>#{scenario['keyword']}</strong> #{scenario['name']} (#{duration(scenario['duration'])})"
end
end
@builder.div do
scenario['before'].each do |before|
build_hook_error before
end
scenario['steps'].each do |step|
build_step step, scenario['keyword']
end
scenario['after'].each do |after|
build_output after['output']
build_hook_error after
build_embedding after['embeddings']
end
end
end
def self.build_step(step, scenario_keyword)
@builder.div(class: step['status']) do
@builder << "<strong>#{step['keyword']}</strong> #{step['name']} (#{duration(step['duration'])})"
end
build_data_table step['rows']
build_output step['output']
build_step_error step
build_embedding step['embeddings']
step['after'].each do |after|
build_output after['output']
build_step_hook_error after, scenario_keyword
build_embedding after['embeddings']
end if step['after']
end
def self.build_data_table(rows)
@builder.table(class: 'data_table', style: 'margin: 10px') do
rows.each do |row|
@builder.tr do
row['cells'].each do |cell|
@builder << "<td> #{cell} </td>"
end
end
end
end if rows.is_a? Array
end
def self.build_output(outputs)
outputs.each do |output|
@builder << "<span style='color:#{COLOR[:output]}'>#{output.gsub("\n",'</br>').gsub("\t",' ').gsub(' ',' ')}</span><br/>"
end if outputs.is_a?(Array)
end
def self.build_tags_drop_down(tags)
@builder.div(style: 'text-align:center;padding:5px;') do
@builder << '<strong>Tag: </strong>'
@builder.select(class: 'select-tags') do
@builder.option(value: 'scenario-all') do
@builder << 'All'
end
tags.each do |tag|
@builder.option(value: tag.gsub('@','tag-')) do
@builder << tag
end
end
end
end if tags.is_a?(Array)
end
def self.build_step_error(step)
if step['status'] == 'failed' && step['result']['error_message']
@builder << "<strong style=color:#{COLOR[:failed]}>Error: </strong>"
error = step['result']['error_message'].split("\n")
@builder.span(style: "color:#{COLOR[:failed]}") do
error[0..-3].each do |line|
@builder << line + '<br/>'
end
end
@builder << "<strong>SD: </strong>#{error[-2]} <br/>"
@builder << "<strong>FF: </strong>#{error[-1]}<br/>"
end
end
def self.build_hook_error(hook)
if hook['status'] == 'failed'
@builder << "<strong style=color:#{COLOR[:failed]}>Error: </strong>"
error = hook['result']['error_message'].split("\n")
@builder.span(style: "color:#{COLOR[:failed]}") do
error[0..-2].each do |line|
@builder << line + '<br/>'
end
end
@builder << "<strong>Hook: </strong>#{error[-1]}<br/>"
end
end
def self.build_step_hook_error(hook, scenario_keyword)
if hook['result']['error_message']
@builder << "<strong style=color:#{COLOR[:failed]}>Error: </strong>"
error = hook['result']['error_message'].split("\n")
@builder.span(style: "color:#{COLOR[:failed]}") do
(scenario_keyword == 'Scenario Outline' ? error[0..-8] : error[0..-5]).each do |line|
@builder << line + '<br/>'
end
end
@builder << "<strong>Hook: </strong>#{scenario_keyword == 'Scenario Outline' ? error[-7] : error[-4]} <br/>"
@builder << "<strong>FF: </strong>#{error[-2]}<br/>"
end
end
def self.build_embedding(embeddings)
@embedding_count ||= 0
embeddings.each do |embedding|
id = "embedding_#{@embedding_count}"
if embedding['mime_type'] =~ /^image\/(png|gif|jpg|jpeg)/
@builder.span(class: 'image') do
@builder.a(href: '', style: 'text-decoration: none;', onclick: "img=document.getElementById('#{id}');img.style.display = (img.style.display == 'none' ? 'block' : 'none');return false") do
@builder.span(style: "color: #{COLOR[:output]}; font-weight: bold; border-bottom: 1px solid #{COLOR[:output]};") do
@builder << 'Screenshot'
end
end
@builder << '<br/>'
@options[:compress_images] ? build_unique_image(embedding, id) : build_image(embedding,id) rescue puts 'Image embedding failed!'
end
elsif embedding['mime_type'] =~ /^text\/plain/
@builder.span(class: 'link') do
src = Base64.decode64(embedding['data'])
@builder.a(id: id, style: 'text-decoration: none;', href: src, title: 'Link') do
@builder.span(style: "color: #{COLOR[:output]}; font-weight: bold; border-bottom: 1px solid #{COLOR[:output]};") do
@builder << src
end
end
@builder << '<br/>'
end rescue puts('Link embedding skipped!')
end
@embedding_count += 1
end if embeddings.is_a?(Array)
end
def self.build_unique_image(image, id)
@images ||= []
index = @images.find_index image
if index
klass = "image_#{index}"
else
@images << image
klass = "image_#{@images.size - 1}"
@builder.style(type: 'text/css') do
begin
src = Base64.decode64(image['data'])
src = 'data:' + image['mime_type'] + ';base64,' + src unless src =~ /^data:image\/(png|gif|jpg|jpeg);base64,/
@builder << "img.#{klass} {content: url(#{src});}"
rescue
src = image['data']
src = 'data:' + image['mime_type'] + ';base64,' + src unless src =~ /^data:image\/(png|gif|jpg|jpeg);base64,/
@builder << "img.#{klass} {content: url(#{src});}"
end
end
end
@builder << %{<img id='#{id}' class='#{klass}' style='display: none; border: 1px solid #{COLOR[:output]};' />}
end
def self.build_image(image, id)
begin
src = Base64.decode64(image['data'])
src = 'data:' + image['mime_type'] + ';base64,' + src unless src =~ /^data:image\/(png|gif|jpg|jpeg);base64,/
@builder << %{<img id='#{id}' style='display: none; border: 1px solid #{COLOR[:output]};' src='#{src}'/>}
rescue
src = image['data']
src = 'data:' + image['mime_type'] + ';base64,' + src unless src =~ /^data:image\/(png|gif|jpg|jpeg);base64,/
@builder << %{<img id='#{id}' style='display: none; border: 1px solid #{COLOR[:output]};' src='#{src}'/>}
end
end
def self.build_error_list(scenario)
scenario['before'].each do |before|
next unless before['status'] == 'failed'
@builder.li do
error = before['result']['error_message'].split("\n")
@builder.span(style: "color:#{COLOR[:failed]}") do
error[0..-2].each do |line|
@builder << line + '<br/>'
end
end
@builder << "<strong>Hook: </strong>#{error[-1]} <br/>"
@builder << "<strong>Scenario: </strong>#{scenario['name']} <br/><hr/>"
end
end
scenario['steps'].each do |step|
step['after'].each do |after|
next unless after['status'] == 'failed'
@builder.li do
error = after['result']['error_message'].split("\n")
@builder.span(style: "color:#{COLOR[:failed]}") do
(scenario['keyword'] == 'Scenario Outline' ? error[0..-8] : error[0..-5]).each do |line|
@builder << line + '<br/>'
end
end
@builder << "<strong>Hook: </strong>#{scenario['keyword'] == 'Scenario Outline' ? error[-7] : error[-4]} <br/>"
@builder << "<strong>FF: </strong>#{error[-2]} <br/><hr/>"
end
end if step['after']
next unless step['status'] == 'failed' && step['result']['error_message']
@builder.li do
error = step['result']['error_message'].split("\n")
@builder.span(style: "color:#{COLOR[:failed]}") do
error[0..-3].each do |line|
@builder << line + '<br/>'
end
end
@builder << "<strong>SD: </strong>#{error[-2]} <br/>"
@builder << "<strong>FF: </strong>#{error[-1]} <br/><hr/>"
end
end
scenario['after'].each do |after|
next unless after['status'] == 'failed'
@builder.li do
error = after['result']['error_message'].split("\n")
@builder.span(style: "color:#{COLOR[:failed]}") do
error[0..-2].each do |line|
@builder << line + '<br/>'
end
end
@builder << "<strong>Hook: </strong>#{error[-1]} <br/>"
@builder << "<strong>Scenario: </strong>#{scenario['name']} <br/><hr/>"
end
end
end
def self.features(files)
files.each_with_object([]) { |file, features|
data = File.read(file)
next if data.empty?
features << JSON.parse(data)
}.flatten.group_by { |feature|
feature['uri']+feature['id']+feature['line'].to_s
}.values.each_with_object([]) { |group, features|
features << group.first.except('elements').merge('elements' => group.map{|feature| feature['elements']}.flatten)
}.sort_by!{|feature| feature['name']}.each{|feature|
if feature['elements'][0]['type'] == 'background'
(0..feature['elements'].size-1).step(2) do |i|
feature['elements'][i]['steps'].each{|step| step['name']+=(' ('+feature['elements'][i]['keyword']+')')}
feature['elements'][i+1]['steps'] = feature['elements'][i]['steps'] + feature['elements'][i+1]['steps']
feature['elements'][i+1]['before'] = feature['elements'][i]['before'] if feature['elements'][i]['before']
end
feature['elements'].reject!{|element| element['type'] == 'background'}
end
feature['elements'].each { |scenario|
scenario['before'] ||= []
scenario['before'].each { |before|
before['result']['duration'] ||= 0
before.merge! 'status' => before['result']['status'], 'duration' => before['result']['duration']
}
scenario['steps'] ||= []
scenario['steps'].each { |step|
step['result']['duration'] ||= 0
duration = step['result']['duration']
status = step['result']['status']
step['after'].each { |after|
after['result']['duration'] ||= 0
duration += after['result']['duration']
status = 'failed' if after['result']['status'] == 'failed'
after.merge! 'status' => after['result']['status'], 'duration' => after['result']['duration']
} if step['after']
step.merge! 'status' => status, 'duration' => duration
}
scenario['after'] ||= []
scenario['after'].each { |after|
after['result']['duration'] ||= 0
after.merge! 'status' => after['result']['status'], 'duration' => after['result']['duration']
}
scenario.merge! 'status' => scenario_status(scenario), 'duration' => total_time(scenario['before']) + total_time(scenario['steps']) + total_time(scenario['after'])
}
feature.merge! 'status' => feature_status(feature), 'duration' => total_time(feature['elements'])
}
end
def self.feature_status(feature)
feature_status = 'working'
feature['elements'].each do |scenario|
status = scenario['status']
return 'broken' if status == 'failed'
feature_status = 'incomplete' if %w(undefined pending).include?(status)
end
feature_status
end
def self.scenarios(features)
features.map do |feature|
feature['elements']
end.flatten
end
def self.scenario_status(scenario)
(scenario['before'] + scenario['steps'] + scenario['after']).each do |step|
status = step['status']
return status unless status == 'passed'
end
'passed'
end
def self.steps(scenarios)
scenarios.map do |scenario|
scenario['steps']
end.flatten
end
def self.tags(scenarios)
scenarios.map do |scenario|
scenario['tags'] ? scenario['tags'].map{|t| t['name']} : []
end.flatten.uniq
end
def self.files(path)
files = if path.is_a? String
(path =~ /\.json$/) ? [path] : Dir.glob("#{path}/*.json")
elsif path.nil?
Dir.glob('*.json')
elsif path.is_a? Array
path.map do |file|
(file =~ /\.json$/) ? file : Dir.glob("#{file}/*.json")
end.flatten
else
raise 'InvalidInput'
end
raise 'InvalidOrNoInputFile' if files.empty?
files.uniq
end
def self.data(all_data)
all_data.group_by{|db| db['status']}.map do |data|
{name: data[0],
count: data[1].size,
color: COLOR[data[0].to_sym]}
end
end
def self.total_time(data)
total_time = 0
data.each{|item| total_time += item['duration']}
total_time
end
def self.duration(seconds)
seconds = seconds.to_f/1000000000
m, s = seconds.divmod(60)
"#{m}m #{'%.3f' % s}s"
end
def self.pie_chart_js(id, title, data)
data = data.each_with_object('') do |h, s|
s << "{name: '#{h[:name].capitalize}'"
s << ",y: #{h[:count]}"
s << ',sliced: true' if h[:sliced]
s << ',selected: true' if h[:selected]
s << ",color: '#{h[:color]}'" if h[:color]
s << '},'
end.chop
"$(function (){$('##{id}').highcharts({credits: {enabled: false}, chart: {type: 'pie',
options3d: {enabled: true, alpha: 45, beta: 0}}, title: {text: '#{title}'},
tooltip: {pointFormat: 'Count: <b>{point.y}</b><br/>Percentage: <b>{point.percentage:.1f}%</b>'},
plotOptions: {pie: {allowPointSelect: true, cursor: 'pointer', depth: 35, dataLabels: {enabled: true,
format: '{point.name}'}}}, series: [{type: 'pie', name: 'Results', data: [#{data}]}]});});"
end
def self.donut_js(id, title, data)
data = data.each_with_object('') do |h, s|
s << "{name: '#{h[:name].capitalize}'"
s << ",y: #{h[:count]}"
s << ',sliced: true' if h[:sliced]
s << ',selected: true' if h[:selected]
s << ",color: '#{h[:color]}'" if h[:color]
s << '},'
end.chop
"$(function (){$('##{id}').highcharts({credits: {enabled: false},
chart: {plotBackgroundColor: null, plotBorderWidth: 0, plotShadow: false, width: $(document).width()-80},
title: {text: '#{title}', align: 'center', verticalAlign: 'middle', y: 40},
tooltip: {pointFormat: 'Count: <b>{point.y}</b><br/>Percentage: <b>{point.percentage:.1f}%</b>'},
plotOptions: {pie: {dataLabels: {enabled: true, distance: -50,
style: {fontWeight: 'bold', color: 'white', textShadow: '0px 1px 2px black'}},
startAngle: -90, endAngle: 90, center: ['50%', '75%']}},
series: [{type: 'pie', innerSize: '50%', name: 'Results', data: [#{data}]}]});});"
end
private_class_method :donut_js, :pie_chart_js, :files,
:features, :feature_status,
:scenarios, :scenario_status, :steps,
:data, :duration, :total_time,
:build_scenario, :build_step,
:build_menu, :build_output, :build_embedding,
:build_error_list, :build_step_error,
:build_hook_error, :build_step_hook_error,
:build_unique_image, :build_image,
:build_data_table, :tags, :build_tags_drop_down
end
|
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'coveralls/version'
Gem::Specification.new do |gem|
gem.authors = ["Nick Merwin", "Wil Gieseler"]
gem.email = ["nick@lemurheavy.com", "supapuerco@gmail.com"]
gem.description = "A Ruby implementation of the Coveralls API."
gem.summary = "A Ruby implementation of the Coveralls API."
gem.homepage = "https://coveralls.io"
gem.license = "MIT"
gem.files = `git ls-files`.split($\)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.name = "coveralls"
gem.require_paths = ["lib"]
gem.version = Coveralls::VERSION
gem.required_ruby_version = '>= 1.8.7'
gem.add_dependency 'json', '~> 1.8'
gem.add_dependency 'rest-client', '>= 1.6.8', '< 2'
gem.add_dependency 'simplecov', '~> 0.10.0'
gem.add_dependency 'term-ansicolor', '~> 1.3'
gem.add_dependency 'thor', '~> 0.19.1'
gem.add_development_dependency 'bundler', '~> 1.7'
end
pegged term-ansicolor at 1.3 for Ruby <2 compatibility
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'coveralls/version'
Gem::Specification.new do |gem|
gem.authors = ["Nick Merwin", "Wil Gieseler"]
gem.email = ["nick@lemurheavy.com", "supapuerco@gmail.com"]
gem.description = "A Ruby implementation of the Coveralls API."
gem.summary = "A Ruby implementation of the Coveralls API."
gem.homepage = "https://coveralls.io"
gem.license = "MIT"
gem.files = `git ls-files`.split($\)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.name = "coveralls"
gem.require_paths = ["lib"]
gem.version = Coveralls::VERSION
gem.required_ruby_version = '>= 1.8.7'
gem.add_dependency 'json', '~> 1.8'
gem.add_dependency 'rest-client', '>= 1.6.8', '< 2'
gem.add_dependency 'simplecov', '~> 0.10.0'
gem.add_dependency 'term-ansicolor', '1.3'
gem.add_dependency 'thor', '~> 0.19.1'
gem.add_development_dependency 'bundler', '~> 1.7'
end
|
require "acceptance/webdriver_test_case"
# :stopdoc:
class PromotersTest < WebDriverTestCase
def test_browse
year = RacingAssociation.current.effective_year
series = Series.create!(:name => "Cross Crusade", :promoter => Person.find_by_name("Brad Ross"), :date => Date.new(year, 10))
event = SingleDayEvent.create!(:name => "Cross Crusade: Alpenrose", :promoter => Person.find_by_name("Brad Ross"), :date => Date.new(year, 10))
series.children << event
login_as :promoter
click "events_tab"
click :link_text => "Cross Crusade"
click "save"
click "create_race"
wait_for_element :css => "td.race"
type "Senior Women", :css => "form.editor_field input"
type :return, { :css => "form.editor_field input" }, false
wait_for_no_element :css => "form.editor_field input"
wait_for_page_source "Senior Women"
race = series.races(true).first
assert_equal "Senior Women", race.category_name, "Should update category name"
click "edit_race_#{race.id}"
click "save"
click "events_tab"
click :link_text => "Cross Crusade: Alpenrose"
click "create_race"
wait_for_element :css => "td.race"
type "Masters Women 40+", :css => ".race form.editor_field input"
type :return, { :css => ".race form.editor_field input" }, false
wait_for_no_element :css => ".race form.editor_field input"
wait_for_page_source "Masters Women 40+"
race = event.races(true).first
assert_equal "Masters Women 40+", race.category_name, "Should update category name"
click "edit_race_#{race.id}"
click "save"
click "people_tab"
remove_download "scoring_sheet.xls"
click "export_link"
wait_for_not_current_url(/\/admin\/people.xls\?excel_layout=scoring_sheet&include=members_only/)
wait_for_download "scoring_sheet.xls"
assert_no_errors
click "discount_codes_tab"
select_option event.to_param, "discount_code_event_id"
click "add_discount_code"
click "events_tab"
click :link_text => "Cross Crusade"
click_ok_on_alert_dialog
click "propagate_races"
end
end
Remove discount codes
require "acceptance/webdriver_test_case"
# :stopdoc:
class PromotersTest < WebDriverTestCase
def test_browse
year = RacingAssociation.current.effective_year
series = Series.create!(:name => "Cross Crusade", :promoter => Person.find_by_name("Brad Ross"), :date => Date.new(year, 10))
event = SingleDayEvent.create!(:name => "Cross Crusade: Alpenrose", :promoter => Person.find_by_name("Brad Ross"), :date => Date.new(year, 10))
series.children << event
login_as :promoter
click "events_tab"
click :link_text => "Cross Crusade"
click "save"
click "create_race"
wait_for_element :css => "td.race"
type "Senior Women", :css => "form.editor_field input"
type :return, { :css => "form.editor_field input" }, false
wait_for_no_element :css => "form.editor_field input"
wait_for_page_source "Senior Women"
race = series.races(true).first
assert_equal "Senior Women", race.category_name, "Should update category name"
click "edit_race_#{race.id}"
click "save"
click "events_tab"
click :link_text => "Cross Crusade: Alpenrose"
click "create_race"
wait_for_element :css => "td.race"
type "Masters Women 40+", :css => ".race form.editor_field input"
type :return, { :css => ".race form.editor_field input" }, false
wait_for_no_element :css => ".race form.editor_field input"
wait_for_page_source "Masters Women 40+"
race = event.races(true).first
assert_equal "Masters Women 40+", race.category_name, "Should update category name"
click "edit_race_#{race.id}"
click "save"
click "people_tab"
remove_download "scoring_sheet.xls"
click "export_link"
wait_for_not_current_url(/\/admin\/people.xls\?excel_layout=scoring_sheet&include=members_only/)
wait_for_download "scoring_sheet.xls"
assert_no_errors
click "events_tab"
click :link_text => "Cross Crusade"
click_ok_on_alert_dialog
click "propagate_races"
end
end
|
# Command is the main point of entry for boom commands; shell arguments are
# passd through to Command, which then filters and parses through indivdual
# commands and reroutes them to constituent object classes.
#
# Command also keeps track of one connection to Storage, which is how new data
# changes are persisted to disk. It takes care of any data changes by calling
# #persist!.
#
module Boom
class Command
class << self
attr_accessor :storage
# Public: executes a command.
#
# storage - The Storage instance off which to run commands. This is
# likely just Boom::Storage.new, since Boom::Storage should
# pick up the appropriate JSON file paths on its own.
# args - The actual commands to operate on. Can be as few as zero
# arguments or as many as three.
def execute(storage,*args)
@storage = storage
command = args[0]
major = args[1]
minor = args[2]
return overview unless command
delegate(command, major, minor)
end
# Public: prints any given string.
#
# s = String output
#
# Prints to STDOUT and returns. This method exists to standardize output
# and for easy mocking or overriding.
def output(s)
puts(s)
end
# Public: prints a tidy overview of your Lists in descending order of
# number of Items.
#
# Returns nothing.
def overview
storage.lists.each do |list|
output " #{list.name} (#{list.items.size})"
end
end
# Public: prints the detailed view of all your Lists and all their
# Items.
#
# Returns nothing.
def all
storage.lists.each do |list|
output " #{list.name}"
list.items.each do |item|
output " #{item.name}: #{item.value}"
end
end
end
# Public: allows main access to most commands.
#
# Returns output based on method calls.
def delegate(command, major, minor)
return all if command == 'all'
# if we're operating on a List
if storage.list_exists?(command)
return list_delete(command) if major == 'delete'
return list_detail(command) unless major
unless minor == 'delete'
return add_item(command,major,minor) if minor
return search_list_for_item(command, major)
end
end
return search_items(command) if storage.item_exists?(command)
if minor == 'delete' and storage.item_exists?(major)
return item_delete(major)
end
return list_create(command)
end
# Public: prints all Items over a List.
#
# list - the List object to iterate over
#
# Returns nothing.
def list_detail(list_name)
list = storage.lists.first { |list| list.name == list_name }
list.items.sort{ |x,y| x.name <=> y.name }.each do |item|
output " #{item.name}: #{item.value}"
end
end
# Public: add a new List.
#
# name - the String name of the List.
#
# Example
#
# Commands.list_create("snippets")
#
# Returns the newly created List.
def list_create(name)
lists = (storage.lists << List.new(name))
storage.lists = lists
output "Boom! Created a new list called \"#{name}\"."
save!
end
# Public: remove a named List.
#
# name - the String name of the List.
#
# Example
#
# Commands.delete_list("snippets")
#
# Returns nothing.
def list_delete(name)
lists = storage.lists.reverse.reject { |list| list.name == name }
output "You sure you want to delete everything in \"#{name}\"? (y/n):"
if gets == 'y'
storage.lists = lists
output "Boom! Deleted all your #{name}."
save!
else
output "Just kidding then."
end
end
# Public: add a new Item to a list.
#
# list - the String name of the List to associate with this Item
# name - the String name of the Item
# value - the String value of the Item
#
# Example
#
# Commands.add_item("snippets","sig","- @holman")
#
# Returns the newly created Item.
def add_item(list,name,value)
list = storage.lists.find{|storage_list| storage_list.name == list}
list.add_item(Item.new(name,value))
output "Boom! \"#{name}\" in \"#{list.name}\" is \"#{value}\". Got it."
save!
end
# Public: remove a named Item.
#
# name - the String name of the Item.
#
# Example
#
# Commands.delete_item("an-item-name")
#
# Returns nothing.
def item_delete(name)
storage.lists = storage.lists.each do |list|
list.items.reject! { |item| item.name == name }
end
output "Boom! \"#{name}\" is gone forever."
save!
end
# Public: search for an Item in all lists by name. Drops the
# corresponding entry into your clipboard.
#
# name - the String term to search for in all Item names
#
# Returns the matching Item.
def search_items(name)
item = storage.items.detect do |item|
item.name == name
end
output Clipboard.copy(item)
end
# Public: search for an Item in a particular list by name. Drops the
# corresponding entry into your clipboard.
#
# list_name - the String name of the List in which to scope the search
# item_name - the String term to search for in all Item names
#
# Returns the matching Item.
def search_list_for_item(list_name, item_name)
list = storage.lists.first { |list| list.name == list_name }
item = list.items.first { |item| item.name == item_name }
output Clipboard.copy(item)
end
# Public: save in-memory data to disk.
#
# Returns whether or not data was saved.
def save!
storage.save!
end
end
end
end
this was straight-up wrong
# Command is the main point of entry for boom commands; shell arguments are
# passd through to Command, which then filters and parses through indivdual
# commands and reroutes them to constituent object classes.
#
# Command also keeps track of one connection to Storage, which is how new data
# changes are persisted to disk. It takes care of any data changes by calling
# Boom::Command#save!.
#
module Boom
class Command
class << self
attr_accessor :storage
# Public: executes a command.
#
# storage - The Storage instance off which to run commands. This is
# likely just Boom::Storage.new, since Boom::Storage should
# pick up the appropriate JSON file paths on its own.
# args - The actual commands to operate on. Can be as few as zero
# arguments or as many as three.
def execute(storage,*args)
@storage = storage
command = args[0]
major = args[1]
minor = args[2]
return overview unless command
delegate(command, major, minor)
end
# Public: prints any given string.
#
# s = String output
#
# Prints to STDOUT and returns. This method exists to standardize output
# and for easy mocking or overriding.
def output(s)
puts(s)
end
# Public: prints a tidy overview of your Lists in descending order of
# number of Items.
#
# Returns nothing.
def overview
storage.lists.each do |list|
output " #{list.name} (#{list.items.size})"
end
end
# Public: prints the detailed view of all your Lists and all their
# Items.
#
# Returns nothing.
def all
storage.lists.each do |list|
output " #{list.name}"
list.items.each do |item|
output " #{item.name}: #{item.value}"
end
end
end
# Public: allows main access to most commands.
#
# Returns output based on method calls.
def delegate(command, major, minor)
return all if command == 'all'
# if we're operating on a List
if storage.list_exists?(command)
return list_delete(command) if major == 'delete'
return list_detail(command) unless major
unless minor == 'delete'
return add_item(command,major,minor) if minor
return search_list_for_item(command, major)
end
end
return search_items(command) if storage.item_exists?(command)
if minor == 'delete' and storage.item_exists?(major)
return item_delete(major)
end
return list_create(command)
end
# Public: prints all Items over a List.
#
# list - the List object to iterate over
#
# Returns nothing.
def list_detail(list_name)
list = storage.lists.first { |list| list.name == list_name }
list.items.sort{ |x,y| x.name <=> y.name }.each do |item|
output " #{item.name}: #{item.value}"
end
end
# Public: add a new List.
#
# name - the String name of the List.
#
# Example
#
# Commands.list_create("snippets")
#
# Returns the newly created List.
def list_create(name)
lists = (storage.lists << List.new(name))
storage.lists = lists
output "Boom! Created a new list called \"#{name}\"."
save!
end
# Public: remove a named List.
#
# name - the String name of the List.
#
# Example
#
# Commands.delete_list("snippets")
#
# Returns nothing.
def list_delete(name)
lists = storage.lists.reverse.reject { |list| list.name == name }
output "You sure you want to delete everything in \"#{name}\"? (y/n):"
if gets == 'y'
storage.lists = lists
output "Boom! Deleted all your #{name}."
save!
else
output "Just kidding then."
end
end
# Public: add a new Item to a list.
#
# list - the String name of the List to associate with this Item
# name - the String name of the Item
# value - the String value of the Item
#
# Example
#
# Commands.add_item("snippets","sig","- @holman")
#
# Returns the newly created Item.
def add_item(list,name,value)
list = storage.lists.find{|storage_list| storage_list.name == list}
list.add_item(Item.new(name,value))
output "Boom! \"#{name}\" in \"#{list.name}\" is \"#{value}\". Got it."
save!
end
# Public: remove a named Item.
#
# name - the String name of the Item.
#
# Example
#
# Commands.delete_item("an-item-name")
#
# Returns nothing.
def item_delete(name)
storage.lists = storage.lists.each do |list|
list.items.reject! { |item| item.name == name }
end
output "Boom! \"#{name}\" is gone forever."
save!
end
# Public: search for an Item in all lists by name. Drops the
# corresponding entry into your clipboard.
#
# name - the String term to search for in all Item names
#
# Returns the matching Item.
def search_items(name)
item = storage.items.detect do |item|
item.name == name
end
output Clipboard.copy(item)
end
# Public: search for an Item in a particular list by name. Drops the
# corresponding entry into your clipboard.
#
# list_name - the String name of the List in which to scope the search
# item_name - the String term to search for in all Item names
#
# Returns the matching Item.
def search_list_for_item(list_name, item_name)
list = storage.lists.first { |list| list.name == list_name }
item = list.items.first { |item| item.name == item_name }
output Clipboard.copy(item)
end
# Public: save in-memory data to disk.
#
# Returns whether or not data was saved.
def save!
storage.save!
end
end
end
end
|
class Bora
VERSION = '1.7.3'.freeze
end
Version 1.7.4
class Bora
VERSION = '1.7.4'.freeze
end
|
require "boxen/keychain"
require "boxen/project"
require "fileutils"
require "json"
require "octokit"
require "shellwords"
module Boxen
# All configuration for Boxen, whether it's loaded from command-line
# args, environment variables, config files, or the keychain.
class Config
def self.load(&block)
new do |config|
file = "#{config.homedir}/config/boxen/defaults.json"
if File.file? file
attrs = JSON.parse File.read file
attrs.each do |key, value|
if !value.nil? && config.respond_to?(selector = "#{key}=")
config.send selector, value
end
end
end
if Boxen::Util.osx?
keychain = Boxen::Keychain.new config.user
config.token = keychain.token
else
config.token = ''
end
if config.enterprise?
# configure to talk to GitHub Enterprise
Octokit.configure do |c|
c.api_endpoint = "#{config.ghurl}/api/v3"
c.web_endpoint = config.ghurl
end
end
yield config if block_given?
end
end
# Save `config`. Returns `config`. Note that this only saves data,
# not flags. For example, `login` will be saved, but `stealth?`
# won't.
def self.save(config)
attrs = {
:email => config.email,
:fde => config.fde?,
:homedir => config.homedir,
:login => config.login,
:name => config.name,
:puppetdir => config.puppetdir,
:repodir => config.repodir,
:reponame => config.reponame,
:ghurl => config.ghurl,
:srcdir => config.srcdir,
:user => config.user,
:repotemplate => config.repotemplate,
:s3host => config.s3host,
:s3bucket => config.s3bucket
}
file = "#{config.homedir}/config/boxen/defaults.json"
FileUtils.mkdir_p File.dirname file
File.open file, "wb" do |f|
f.write JSON.generate Hash[attrs.reject { |k, v| v.nil? }]
end
if Boxen::Util.osx?
keychain = Boxen::Keychain.new config.user
keychain.token = config.token
end
config
end
# Create a new instance. Yields `self` if `block` is given.
def initialize(&block)
@fde = true
@pull = true
yield self if block_given?
end
# Create an API instance using the current user creds. A new
# instance is created any time `token` changes.
def api
@api ||= Octokit::Client.new :login => token, :password => 'x-oauth-basic'
end
# Spew a bunch of debug logging? Default is `false`.
def debug?
!!@debug
end
attr_writer :debug
# A GitHub user's public email.
attr_accessor :email
# The shell script that loads Boxen's environment.
def envfile
"#{homedir}/env.sh"
end
# Is full disk encryption required? Default is `true`. Respects
# the `BOXEN_NO_FDE` environment variable.
def fde?
!ENV["BOXEN_NO_FDE"] && @fde
end
attr_writer :fde
# Boxen's home directory. Default is `"/opt/boxen"`. Respects the
# `BOXEN_HOME` environment variable.
def homedir
@homedir || ENV["BOXEN_HOME"] || "/opt/boxen"
end
attr_writer :homedir
# Boxen's log file. Default is `"#{repodir}/log/boxen.log"`.
# Respects the `BOXEN_LOG_FILE` environment variable. The log is
# overwritten on every run.
def logfile
@logfile || ENV["BOXEN_LOG_FILE"] || "#{repodir}/log/boxen.log"
end
attr_writer :logfile
# A GitHub user login. Default is `nil`.
attr_accessor :login
# A GitHub user's profile name.
attr_accessor :name
# Just go through the motions? Default is `false`.
def pretend?
!!@pretend
end
attr_writer :pretend
# Run a profiler on Puppet? Default is `false`.
def profile?
!!@profile
end
attr_writer :profile
# Enable the Puppet future parser? Default is `false`.
def future_parser?
!!@future_parser
end
attr_writer :future_parser
# Enable puppet reports ? Default is `false`.
def report?
!!@report
end
attr_writer :report
# Enable generation of dependency graphs.
def graph?
!!@graph
end
attr_writer :graph
# An Array of Boxen::Project entries, one for each project Boxen
# knows how to manage.
#
# FIX: Revisit this once we restructure template projects. It's
# broken for several reasons: It assumes paths that won't be
# right, and it assumes projects live in the same repo as this
# file.
def projects
files = Dir["#{repodir}/modules/projects/manifests/*.pp"]
names = files.map { |m| File.basename m, ".pp" }.sort
names.map do |name|
Boxen::Project.new "#{srcdir}/#{name}"
end
end
# The directory where Puppet expects configuration (which we don't
# use) and runtime information (which we generally don't care
# about). Default is `/tmp/boxen/puppet`. Respects the
# `BOXEN_PUPPET_DIR` environment variable.
def puppetdir
@puppetdir || ENV["BOXEN_PUPPET_DIR"] || "/tmp/boxen/puppet"
end
attr_writer :puppetdir
# The directory of the custom Boxen repo for an org. Default is
# `Dir.pwd`. Respects the `BOXEN_REPO_DIR` environment variable.
def repodir
@repodir || ENV["BOXEN_REPO_DIR"] || Dir.pwd
end
attr_writer :repodir
# The repo on GitHub to use for error reports and automatic
# updates, in `owner/repo` format. Default is the `origin` of a
# Git repo in `repodir`, if it exists and points at GitHub.
# Respects the `BOXEN_REPO_NAME` environment variable.
def reponame
override = @reponame || ENV["BOXEN_REPO_NAME"]
return override unless override.nil?
if File.directory? repodir
ghuri = URI(ghurl)
url = Dir.chdir(repodir) { `git config remote.origin.url`.strip }
# find the path and strip off the .git suffix
repo_exp = Regexp.new Regexp.escape(ghuri.host) + "[/:]([^/]+/[^/]+)"
if $?.success? && repo_exp.match(url)
@reponame = $1.sub /\.git$/, ""
end
end
end
attr_writer :reponame
# GitHub location (public or GitHub Enterprise)
def ghurl
@ghurl || ENV["BOXEN_GITHUB_ENTERPRISE_URL"] || "https://github.com"
end
attr_writer :ghurl
# Repository URL template (required for GitHub Enterprise)
def repotemplate
default = 'https://github.com/%s'
@repotemplate || ENV["BOXEN_REPO_URL_TEMPLATE"] || default
end
attr_writer :repotemplate
# Does this Boxen use a GitHub Enterprise instance?
def enterprise?
ghurl != "https://github.com"
end
# The directory where repos live. Default is
# `"/Users/#{user}/src"`.
def srcdir
@srcdir || ENV["BOXEN_SRC_DIR"] || "/Users/#{user}/src"
end
attr_writer :srcdir
# Don't auto-create issues on failure? Default is `false`.
# Respects the `BOXEN_NO_ISSUE` environment variable.
def stealth?
!!ENV["BOXEN_NO_ISSUE"] || @stealth
end
attr_writer :stealth
# A GitHub OAuth token. Default is `nil`.
attr_reader :token
def token=(token)
@token = token
@api = nil
end
# A local user login. Default is the `USER` environment variable.
def user
@user || ENV["USER"]
end
attr_writer :user
def color?
@color
end
attr_writer :color
# The S3 host name. Default is `"s3.amazonaws.com"`.
# Respects the `BOXEN_S3_HOST` environment variable.
def s3host
@s3host || ENV["BOXEN_S3_HOST"] || "s3.amazonaws.com"
end
attr_writer :s3host
# The S3 bucket name. Default is `"boxen-downloads"`.
# Respects the `BOXEN_S3_BUCKET` environment variable.
def s3bucket
@s3bucket || ENV["BOXEN_S3_BUCKET"] || "boxen-downloads"
end
attr_writer :s3bucket
end
end
Include boxen/util before using it
require "boxen/keychain"
require "boxen/project"
require "boxen/util"
require "fileutils"
require "json"
require "octokit"
require "shellwords"
module Boxen
# All configuration for Boxen, whether it's loaded from command-line
# args, environment variables, config files, or the keychain.
class Config
def self.load(&block)
new do |config|
file = "#{config.homedir}/config/boxen/defaults.json"
if File.file? file
attrs = JSON.parse File.read file
attrs.each do |key, value|
if !value.nil? && config.respond_to?(selector = "#{key}=")
config.send selector, value
end
end
end
if Boxen::Util.osx?
keychain = Boxen::Keychain.new config.user
config.token = keychain.token
else
config.token = ''
end
if config.enterprise?
# configure to talk to GitHub Enterprise
Octokit.configure do |c|
c.api_endpoint = "#{config.ghurl}/api/v3"
c.web_endpoint = config.ghurl
end
end
yield config if block_given?
end
end
# Save `config`. Returns `config`. Note that this only saves data,
# not flags. For example, `login` will be saved, but `stealth?`
# won't.
def self.save(config)
attrs = {
:email => config.email,
:fde => config.fde?,
:homedir => config.homedir,
:login => config.login,
:name => config.name,
:puppetdir => config.puppetdir,
:repodir => config.repodir,
:reponame => config.reponame,
:ghurl => config.ghurl,
:srcdir => config.srcdir,
:user => config.user,
:repotemplate => config.repotemplate,
:s3host => config.s3host,
:s3bucket => config.s3bucket
}
file = "#{config.homedir}/config/boxen/defaults.json"
FileUtils.mkdir_p File.dirname file
File.open file, "wb" do |f|
f.write JSON.generate Hash[attrs.reject { |k, v| v.nil? }]
end
if Boxen::Util.osx?
keychain = Boxen::Keychain.new config.user
keychain.token = config.token
end
config
end
# Create a new instance. Yields `self` if `block` is given.
def initialize(&block)
@fde = true
@pull = true
yield self if block_given?
end
# Create an API instance using the current user creds. A new
# instance is created any time `token` changes.
def api
@api ||= Octokit::Client.new :login => token, :password => 'x-oauth-basic'
end
# Spew a bunch of debug logging? Default is `false`.
def debug?
!!@debug
end
attr_writer :debug
# A GitHub user's public email.
attr_accessor :email
# The shell script that loads Boxen's environment.
def envfile
"#{homedir}/env.sh"
end
# Is full disk encryption required? Default is `true`. Respects
# the `BOXEN_NO_FDE` environment variable.
def fde?
!ENV["BOXEN_NO_FDE"] && @fde
end
attr_writer :fde
# Boxen's home directory. Default is `"/opt/boxen"`. Respects the
# `BOXEN_HOME` environment variable.
def homedir
@homedir || ENV["BOXEN_HOME"] || "/opt/boxen"
end
attr_writer :homedir
# Boxen's log file. Default is `"#{repodir}/log/boxen.log"`.
# Respects the `BOXEN_LOG_FILE` environment variable. The log is
# overwritten on every run.
def logfile
@logfile || ENV["BOXEN_LOG_FILE"] || "#{repodir}/log/boxen.log"
end
attr_writer :logfile
# A GitHub user login. Default is `nil`.
attr_accessor :login
# A GitHub user's profile name.
attr_accessor :name
# Just go through the motions? Default is `false`.
def pretend?
!!@pretend
end
attr_writer :pretend
# Run a profiler on Puppet? Default is `false`.
def profile?
!!@profile
end
attr_writer :profile
# Enable the Puppet future parser? Default is `false`.
def future_parser?
!!@future_parser
end
attr_writer :future_parser
# Enable puppet reports ? Default is `false`.
def report?
!!@report
end
attr_writer :report
# Enable generation of dependency graphs.
def graph?
!!@graph
end
attr_writer :graph
# An Array of Boxen::Project entries, one for each project Boxen
# knows how to manage.
#
# FIX: Revisit this once we restructure template projects. It's
# broken for several reasons: It assumes paths that won't be
# right, and it assumes projects live in the same repo as this
# file.
def projects
files = Dir["#{repodir}/modules/projects/manifests/*.pp"]
names = files.map { |m| File.basename m, ".pp" }.sort
names.map do |name|
Boxen::Project.new "#{srcdir}/#{name}"
end
end
# The directory where Puppet expects configuration (which we don't
# use) and runtime information (which we generally don't care
# about). Default is `/tmp/boxen/puppet`. Respects the
# `BOXEN_PUPPET_DIR` environment variable.
def puppetdir
@puppetdir || ENV["BOXEN_PUPPET_DIR"] || "/tmp/boxen/puppet"
end
attr_writer :puppetdir
# The directory of the custom Boxen repo for an org. Default is
# `Dir.pwd`. Respects the `BOXEN_REPO_DIR` environment variable.
def repodir
@repodir || ENV["BOXEN_REPO_DIR"] || Dir.pwd
end
attr_writer :repodir
# The repo on GitHub to use for error reports and automatic
# updates, in `owner/repo` format. Default is the `origin` of a
# Git repo in `repodir`, if it exists and points at GitHub.
# Respects the `BOXEN_REPO_NAME` environment variable.
def reponame
override = @reponame || ENV["BOXEN_REPO_NAME"]
return override unless override.nil?
if File.directory? repodir
ghuri = URI(ghurl)
url = Dir.chdir(repodir) { `git config remote.origin.url`.strip }
# find the path and strip off the .git suffix
repo_exp = Regexp.new Regexp.escape(ghuri.host) + "[/:]([^/]+/[^/]+)"
if $?.success? && repo_exp.match(url)
@reponame = $1.sub /\.git$/, ""
end
end
end
attr_writer :reponame
# GitHub location (public or GitHub Enterprise)
def ghurl
@ghurl || ENV["BOXEN_GITHUB_ENTERPRISE_URL"] || "https://github.com"
end
attr_writer :ghurl
# Repository URL template (required for GitHub Enterprise)
def repotemplate
default = 'https://github.com/%s'
@repotemplate || ENV["BOXEN_REPO_URL_TEMPLATE"] || default
end
attr_writer :repotemplate
# Does this Boxen use a GitHub Enterprise instance?
def enterprise?
ghurl != "https://github.com"
end
# The directory where repos live. Default is
# `"/Users/#{user}/src"`.
def srcdir
@srcdir || ENV["BOXEN_SRC_DIR"] || "/Users/#{user}/src"
end
attr_writer :srcdir
# Don't auto-create issues on failure? Default is `false`.
# Respects the `BOXEN_NO_ISSUE` environment variable.
def stealth?
!!ENV["BOXEN_NO_ISSUE"] || @stealth
end
attr_writer :stealth
# A GitHub OAuth token. Default is `nil`.
attr_reader :token
def token=(token)
@token = token
@api = nil
end
# A local user login. Default is the `USER` environment variable.
def user
@user || ENV["USER"]
end
attr_writer :user
def color?
@color
end
attr_writer :color
# The S3 host name. Default is `"s3.amazonaws.com"`.
# Respects the `BOXEN_S3_HOST` environment variable.
def s3host
@s3host || ENV["BOXEN_S3_HOST"] || "s3.amazonaws.com"
end
attr_writer :s3host
# The S3 bucket name. Default is `"boxen-downloads"`.
# Respects the `BOXEN_S3_BUCKET` environment variable.
def s3bucket
@s3bucket || ENV["BOXEN_S3_BUCKET"] || "boxen-downloads"
end
attr_writer :s3bucket
end
end
|
require 'rule_tagger'
module Brill
class Tagger
#
# will use the brown corpus as the default
#
def initialize( lexicon = nil, lexical_rules = nil, contextual_rules = nil)
@tagger = ::Tagger::BrillTagger.new
lexicon ||= File.join(File.dirname(__FILE__),"brown","LEXICON")
lexical_rules ||= File.join(File.dirname(__FILE__),"brown","LEXICALRULEFILE")
contextual_rules ||= File.join(File.dirname(__FILE__),"brown","CONTEXTUALRULEFILE")
Brill::Tagger.load_lexicon(@tagger, lexicon )
Brill::Tagger.load_lexical_rules(@tagger, lexical_rules )
Brill::Tagger.load_contextual_rules(@tagger, contextual_rules )
end
# given a body of text return a list of adjectives
def adjectives( text )
tag(text).select{|t| t.last == 'JJ' }
end
# given a body of text return a list of nouns
def nouns( text )
tag(text).select{|t| t.last.match(/NN/) }
end
# returns similar results as tag, but further reduced by only selecting nouns
def suggest( text, max = 10 )
tags = tag(text)
#puts tags.inspect
ptag = [nil,nil]
# join NNP's together for names
reduced_tags = []
mappings = {} # keep a mapping of the joined words to expand
tags.each{|tag|
if ptag.last == 'NNP' and tag.last == 'NNP' and !ptag.first.match(/\.$/)
ptag[0] += " " + tag.first
# before combining these two create a mapping for each word to each word
words = ptag.first.split(/\s/)
i = 0
#puts words.inspect
until (i + 1) == words.size
mappings[words[i]] = ptag.first
mappings[words[i+1]] = ptag.first
i += 1
end
#puts mappings.inspect
elsif tag.last == 'NNP'
ptag = tag
elsif tag.last != 'NNP' and ptag.first != nil
reduced_tags << ptag
reduced_tags << tag if tag.last.match( /^\w+$/ ) and tag.first.match(/^\w+$/)
ptag = [nil,nil]
elsif tag.last.match( /^\w+$/ ) and tag.first.match(/^\w+$/)
reduced_tags << tag
end
}
# now expand any NNP that appear
tags = reduced_tags.map{|tag|
if tag.last == 'NNP'
#puts "#{tag.first} => #{mappings[tag.first]}"
tag[0] = mappings[tag.first] if mappings.key?(tag.first)
end
tag
}
results = tags.select{|tag| tag.last.match(/NN/) and tag.first.size > 3 }
if results.size > max
counts = {}
tags = []
results.each {|tag| counts[tag.first] = 0 }
results.each do |tag|
tags << tag if counts[tag.first] == 0
counts[tag.first] += tag.last == 'NNP' ? 3 : (tag.last == 'NNS' ? 2 : 1)
end
tags.map!{|tag| [tag.first, tag.last,counts[tag.first]]}
t = 1
until tags.size <= max
tags = tags.sort_by{|tag| tag.last}.select{|tag| tag.last > t }
t += 1
if t == 5
tags = tags.reverse[0..max]
break
end
end
tags
else
results
end
end
# Tag a body of text
# returns an array like [[token,tag],[token,tag]...[token,tag]]
#
def tag( text )
# XXX: the list of contractions is much larger then this... find'em
text = text.gsub(/dont/,"don't").gsub(/Dont/,"Don't")
text = text.gsub(/youre/,"you're")
tokens = Brill::Tagger.tokenize( text )
tags = Brill::Tagger.tag_start( tokens )
@tagger.apply_lexical_rules( tokens, tags, [], 0 )
@tagger.default_tag_finish( tokens, tags )
# Brill uses these fake "STAART" tags to delimit the start & end of sentence.
tokens << "STAART"
tokens << "STAART"
tokens.unshift "STAART"
tokens.unshift "STAART"
tags << "STAART"
tags << "STAART"
tags.unshift "STAART"
tags.unshift "STAART"
@tagger.apply_contextual_rules( tokens, tags, 1 )
tags.shift
tags.shift
tokens.shift
tokens.shift
tags.pop
tags.pop
tokens.pop
tokens.pop
pairs = []
tokens.each_with_index do|t,i|
pairs << [t,tags[i]]
end
pairs
end
private
def self.lines( file )
lines = []
File.open(file,'r') do|f|
lines = f.readlines
end
lines
end
# load LEXICON
def self.load_lexicon(tagger,lexicon)
lines = Brill::Tagger.lines(lexicon)
i = 0
count = lines.size
while i < count
line = lines[i]
#puts "line: #{line.inspect}:#{i.inspect}"
parts = line.split(/\s/)
#puts "word: #{word.inspect}, tags: #{tags.inspect}"
word = parts.first
tags = parts[1..-1]
tagger.add_to_lexicon(word,tags.first)
#puts "#{word} => #{tags.inspect}"
tags.each do|tag|
tagger.add_to_lexicon_tags("#{word} #{tag}")
end
i += 1
end
end
# load LEXICALRULEFILE
def self.load_lexical_rules(tagger,rules)
lines = self.lines(rules)
i = 0
count = lines.size
=begin
# original perl
chomp;
my @line = split or next;
$self->_add_lexical_rule($_);
if ($line[1] eq 'goodright') {
$self->_add_goodright($line[0]);
} elsif ($line[2] eq 'fgoodright') {
$self->_add_goodright($line[1]);
} elsif ($line[1] eq 'goodleft') {
$self->_add_goodleft($line[0]);
} elsif ($line[2] eq 'fgoodleft') {
$self->_add_goodleft($line[1]);
}
=end
while i < count
line = lines[i].chomp
cols = line.split(/\s/)
next unless line.size > 0
tagger.add_lexical_rule(line)
if cols[1] == 'goodright'
tagger.add_goodright(cols[0])
elsif cols[2] == 'fgoodright'
tagger.add_goodright(cols[1])
elsif cols[1] == 'goodleft'
tagger.add_goodleft(cols[0])
elsif cols[2] == 'fgoodleft'
tagger.add_goodleft(cols[1])
end
i += 1
end
end
# load CONTEXTUALRULEFILE
def self.load_contextual_rules(tagger,rules)
lines = self.lines(rules)
i = 0
count = lines.size
while i < count
line = lines[i].chomp
next unless line.size > 0
tagger.add_contextual_rule(line);
i += 1
end
end
def self.tag_start(tokens)
tokens.map{|token| token.match(/^[A-Z]/) ? 'NNP' : 'NN' }
end
# this tokenize code is a port from perl
def self.tokenize(text)
if "1.9".respond_to?(:encoding)
text.force_encoding('ASCII-8BIT')
end
# Normalize all whitespace
text = text.gsub(/\s+/,' ')
# translate some common extended ascii characters to quotes
text.gsub!(/#{145.chr}/,'`')
text.gsub!(/#{146.chr}/,"'")
text.gsub!(/#{147.chr}/,"``")
text.gsub!(/#{148.chr}/,"''")
# Attempt to get correct directional quotes
# s{\"\b} { `` }g;
text.gsub!(/\"\b/,' `` ')
# s{\b\"} { '' }g;
text.gsub!(/\b\"/," '' ")
#s{\"(?=\s)} { '' }g;
text.gsub!(/\"(?=\s)/," '' ")
#s{\"} { `` }g;
text.gsub!(/\"(?=\s)/," `` ")
# Isolate ellipses
# s{\.\.\.} { ... }g;
text.gsub!(/\.\.\./,' ... ')
# Isolate any embedded punctuation chars
# s{([,;:\@\#\$\%&])} { $1 }g;
text.gsub!(/([,;:\@\#\$\%&])/, ' \1 ')
# Assume sentence tokenization has been done first, so split FINAL
# periods only.
# s/ ([^.]) \. ([\]\)\}\>\"\']*) [ \t]* $ /$1 .$2 /gx;
text.gsub!(/ ([^.]) \. ([\]\)\}\>\"\']*) [ \t]* $ /x, '\1 .\2 ')
# however, we may as well split ALL question marks and exclamation points,
# since they shouldn't have the abbrev.-marker ambiguity problem
#s{([?!])} { $1 }g;
text.gsub!(/([?!])/, ' \1 ')
# parentheses, brackets, etc.
#s{([\]\[\(\)\{\}\<\>])} { $1 }g;
text.gsub!(/([\]\[\(\)\{\}\<\>])/,' \1 ')
#s/(-{2,})/ $1 /g;
text.gsub!(/(-{2,})/,' \1 ')
# Add a space to the beginning and end of each line, to reduce
# necessary number of regexps below.
#s/$/ /;
text.gsub!(/$/," ")
#s/^/ /;
text.gsub!(/^/," ")
# possessive or close-single-quote
#s/\([^\']\)\' /$1 \' /g;
text.gsub!(/\([^\']\)\' /,%q(\1 ' ))
# as in it's, I'm, we'd
#s/\'([smd]) / \'$1 /ig;
text.gsub!(/\'([smd]) /i,%q( '\1 ))
#s/\'(ll|re|ve) / \'$1 /ig;
text.gsub!(/\'(ll|re|ve) /i,%q( '\1 ))
#s/n\'t / n\'t /ig;
text.gsub!(/n\'t /i," n't ")
#s/ (can)(not) / $1 $2 /ig;
text.gsub!(/ (can)(not) /i,' \1 \2 ')
#s/ (d\')(ye) / $1 $2 /ig;
text.gsub!(/ (d\')(ye) /i,' \1 \2 ')
#s/ (gim)(me) / $1 $2 /ig;
text.gsub!(/ (gim)(me) /i,' \1 \2 ')
#s/ (gon)(na) / $1 $2 /ig;
text.gsub!(/ (gon)(na) /i,' \1 \2 ')
#s/ (got)(ta) / $1 $2 /ig;
text.gsub!(/ (got)(ta) /i,' \1 \2 ')
#s/ (lem)(me) / $1 $2 /ig;
text.gsub!(/ (lem)(me) /i,' \1 \2 ')
#s/ (more)(\'n) / $1 $2 /ig;
text.gsub!(/ (more)(\'n) /i,' \1 \2 ')
#s/ (\'t)(is|was) / $1 $2 /ig;
text.gsub!(/ (\'t)(is|was) /i,' \1 \2 ')
#s/ (wan)(na) / $1 $2 /ig;
text.gsub!(/ (wan)(na) /i,' \1 \2 ')
text.split(/\s/)
end
end
end
fix unicode handling issue in tokenizer
require 'rule_tagger'
module Brill
class Tagger
#
# will use the brown corpus as the default
#
def initialize( lexicon = nil, lexical_rules = nil, contextual_rules = nil)
@tagger = ::Tagger::BrillTagger.new
lexicon ||= File.join(File.dirname(__FILE__),"brown","LEXICON")
lexical_rules ||= File.join(File.dirname(__FILE__),"brown","LEXICALRULEFILE")
contextual_rules ||= File.join(File.dirname(__FILE__),"brown","CONTEXTUALRULEFILE")
Brill::Tagger.load_lexicon(@tagger, lexicon )
Brill::Tagger.load_lexical_rules(@tagger, lexical_rules )
Brill::Tagger.load_contextual_rules(@tagger, contextual_rules )
end
# given a body of text return a list of adjectives
def adjectives( text )
tag(text).select{|t| t.last == 'JJ' }
end
# given a body of text return a list of nouns
def nouns( text )
tag(text).select{|t| t.last.match(/NN/) }
end
# returns similar results as tag, but further reduced by only selecting nouns
def suggest( text, max = 10 )
tags = tag(text)
#puts tags.inspect
ptag = [nil,nil]
# join NNP's together for names
reduced_tags = []
mappings = {} # keep a mapping of the joined words to expand
tags.each{|tag|
if ptag.last == 'NNP' and tag.last == 'NNP' and !ptag.first.match(/\.$/)
ptag[0] += " " + tag.first
# before combining these two create a mapping for each word to each word
words = ptag.first.split(/\s/)
i = 0
#puts words.inspect
until (i + 1) == words.size
mappings[words[i]] = ptag.first
mappings[words[i+1]] = ptag.first
i += 1
end
#puts mappings.inspect
elsif tag.last == 'NNP'
ptag = tag
elsif tag.last != 'NNP' and ptag.first != nil
reduced_tags << ptag
reduced_tags << tag if tag.last.match( /^\w+$/ ) and tag.first.match(/^\w+$/)
ptag = [nil,nil]
elsif tag.last.match( /^\w+$/ ) and tag.first.match(/^\w+$/)
reduced_tags << tag
end
}
# now expand any NNP that appear
tags = reduced_tags.map{|tag|
if tag.last == 'NNP'
#puts "#{tag.first} => #{mappings[tag.first]}"
tag[0] = mappings[tag.first] if mappings.key?(tag.first)
end
tag
}
results = tags.select{|tag| tag.last.match(/NN/) and tag.first.size > 3 }
if results.size > max
counts = {}
tags = []
results.each {|tag| counts[tag.first] = 0 }
results.each do |tag|
tags << tag if counts[tag.first] == 0
counts[tag.first] += tag.last == 'NNP' ? 3 : (tag.last == 'NNS' ? 2 : 1)
end
tags.map!{|tag| [tag.first, tag.last,counts[tag.first]]}
t = 1
until tags.size <= max
tags = tags.sort_by{|tag| tag.last}.select{|tag| tag.last > t }
t += 1
if t == 5
tags = tags.reverse[0..max]
break
end
end
tags
else
results
end
end
# Tag a body of text
# returns an array like [[token,tag],[token,tag]...[token,tag]]
#
def tag( text )
# XXX: the list of contractions is much larger then this... find'em
text = text.gsub(/dont/,"don't").gsub(/Dont/,"Don't")
text = text.gsub(/youre/,"you're")
tokens = Brill::Tagger.tokenize( text )
tags = Brill::Tagger.tag_start( tokens )
@tagger.apply_lexical_rules( tokens, tags, [], 0 )
@tagger.default_tag_finish( tokens, tags )
# Brill uses these fake "STAART" tags to delimit the start & end of sentence.
tokens << "STAART"
tokens << "STAART"
tokens.unshift "STAART"
tokens.unshift "STAART"
tags << "STAART"
tags << "STAART"
tags.unshift "STAART"
tags.unshift "STAART"
@tagger.apply_contextual_rules( tokens, tags, 1 )
tags.shift
tags.shift
tokens.shift
tokens.shift
tags.pop
tags.pop
tokens.pop
tokens.pop
pairs = []
tokens.each_with_index do|t,i|
pairs << [t,tags[i]]
end
pairs
end
private
def self.lines( file )
lines = []
File.open(file,'r') do|f|
lines = f.readlines
end
lines
end
# load LEXICON
def self.load_lexicon(tagger,lexicon)
lines = Brill::Tagger.lines(lexicon)
i = 0
count = lines.size
while i < count
line = lines[i]
#puts "line: #{line.inspect}:#{i.inspect}"
parts = line.split(/\s/)
#puts "word: #{word.inspect}, tags: #{tags.inspect}"
word = parts.first
tags = parts[1..-1]
tagger.add_to_lexicon(word,tags.first)
#puts "#{word} => #{tags.inspect}"
tags.each do|tag|
tagger.add_to_lexicon_tags("#{word} #{tag}")
end
i += 1
end
end
# load LEXICALRULEFILE
def self.load_lexical_rules(tagger,rules)
lines = self.lines(rules)
i = 0
count = lines.size
=begin
# original perl
chomp;
my @line = split or next;
$self->_add_lexical_rule($_);
if ($line[1] eq 'goodright') {
$self->_add_goodright($line[0]);
} elsif ($line[2] eq 'fgoodright') {
$self->_add_goodright($line[1]);
} elsif ($line[1] eq 'goodleft') {
$self->_add_goodleft($line[0]);
} elsif ($line[2] eq 'fgoodleft') {
$self->_add_goodleft($line[1]);
}
=end
while i < count
line = lines[i].chomp
cols = line.split(/\s/)
next unless line.size > 0
tagger.add_lexical_rule(line)
if cols[1] == 'goodright'
tagger.add_goodright(cols[0])
elsif cols[2] == 'fgoodright'
tagger.add_goodright(cols[1])
elsif cols[1] == 'goodleft'
tagger.add_goodleft(cols[0])
elsif cols[2] == 'fgoodleft'
tagger.add_goodleft(cols[1])
end
i += 1
end
end
# load CONTEXTUALRULEFILE
def self.load_contextual_rules(tagger,rules)
lines = self.lines(rules)
i = 0
count = lines.size
while i < count
line = lines[i].chomp
next unless line.size > 0
tagger.add_contextual_rule(line);
i += 1
end
end
def self.tag_start(tokens)
tokens.map{|token| token.match(/^[A-Z]/) ? 'NNP' : 'NN' }
end
# this tokenize code is a port from perl
def self.tokenize(text)
# Normalize all whitespace
text = text.gsub(/\s+/,' ')
# translate some common extended ascii characters to quotes
text.gsub!(/‘/,'`')
text.gsub!(/’/,"'")
text.gsub!(/“/,"``")
text.gsub!(/”/,"''")
# Attempt to get correct directional quotes
# s{\"\b} { `` }g;
text.gsub!(/\"\b/,' `` ')
# s{\b\"} { '' }g;
text.gsub!(/\b\"/," '' ")
#s{\"(?=\s)} { '' }g;
text.gsub!(/\"(?=\s)/," '' ")
#s{\"} { `` }g;
text.gsub!(/\"(?=\s)/," `` ")
# Isolate ellipses
# s{\.\.\.} { ... }g;
text.gsub!(/\.\.\./,' ... ')
# Isolate any embedded punctuation chars
# s{([,;:\@\#\$\%&])} { $1 }g;
text.gsub!(/([,;:\@\#\$\%&])/, ' \1 ')
# Assume sentence tokenization has been done first, so split FINAL
# periods only.
# s/ ([^.]) \. ([\]\)\}\>\"\']*) [ \t]* $ /$1 .$2 /gx;
text.gsub!(/ ([^.]) \. ([\]\)\}\>\"\']*) [ \t]* $ /x, '\1 .\2 ')
# however, we may as well split ALL question marks and exclamation points,
# since they shouldn't have the abbrev.-marker ambiguity problem
#s{([?!])} { $1 }g;
text.gsub!(/([?!])/, ' \1 ')
# parentheses, brackets, etc.
#s{([\]\[\(\)\{\}\<\>])} { $1 }g;
text.gsub!(/([\]\[\(\)\{\}\<\>])/,' \1 ')
#s/(-{2,})/ $1 /g;
text.gsub!(/(-{2,})/,' \1 ')
# Add a space to the beginning and end of each line, to reduce
# necessary number of regexps below.
#s/$/ /;
text.gsub!(/$/," ")
#s/^/ /;
text.gsub!(/^/," ")
# possessive or close-single-quote
#s/\([^\']\)\' /$1 \' /g;
text.gsub!(/\([^\']\)\' /,%q(\1 ' ))
# as in it's, I'm, we'd
#s/\'([smd]) / \'$1 /ig;
text.gsub!(/\'([smd]) /i,%q( '\1 ))
#s/\'(ll|re|ve) / \'$1 /ig;
text.gsub!(/\'(ll|re|ve) /i,%q( '\1 ))
#s/n\'t / n\'t /ig;
text.gsub!(/n\'t /i," n't ")
#s/ (can)(not) / $1 $2 /ig;
text.gsub!(/ (can)(not) /i,' \1 \2 ')
#s/ (d\')(ye) / $1 $2 /ig;
text.gsub!(/ (d\')(ye) /i,' \1 \2 ')
#s/ (gim)(me) / $1 $2 /ig;
text.gsub!(/ (gim)(me) /i,' \1 \2 ')
#s/ (gon)(na) / $1 $2 /ig;
text.gsub!(/ (gon)(na) /i,' \1 \2 ')
#s/ (got)(ta) / $1 $2 /ig;
text.gsub!(/ (got)(ta) /i,' \1 \2 ')
#s/ (lem)(me) / $1 $2 /ig;
text.gsub!(/ (lem)(me) /i,' \1 \2 ')
#s/ (more)(\'n) / $1 $2 /ig;
text.gsub!(/ (more)(\'n) /i,' \1 \2 ')
#s/ (\'t)(is|was) / $1 $2 /ig;
text.gsub!(/ (\'t)(is|was) /i,' \1 \2 ')
#s/ (wan)(na) / $1 $2 /ig;
text.gsub!(/ (wan)(na) /i,' \1 \2 ')
text.split(/\s/)
end
end
end
|
require 'rubygems'
require 'bud/depanalysis'
require 'bud/provenance'
require 'bud/rewrite'
require 'bud/sane_r2r'
require 'parse_tree'
class Bud
attr_reader :shredded_rules, :provides, :demands, :strat_state
def meta_rewrite
# N.B. -- parse_tree will not be supported in ruby 1.9.
# however, we can still pass the "string" code of bud modules
# to ruby_parse (but not the "live" class)
@defns = []
@shredded_rules = shred_rules
@strat_state = stratify(@shredded_rules)
smap = binaryrel2map(@strat_state.stratum)
done = {}
@rewritten_strata = []
@shredded_rules.sort{|a, b| oporder(a[2]) <=> oporder(b[2])}.each do |d|
belongs_in = smap[d[1]]
belongs_in = 0 if belongs_in.nil?
if @rewritten_strata[belongs_in].nil?
@rewritten_strata[belongs_in] = ""
end
unless done[d[0]]
@rewritten_strata[belongs_in] = @rewritten_strata[belongs_in] + "\n"+ d[5]
end
done[d[0]] = true
end
visualize(@strat_state, "#{self.class}_gvoutput", @shredded_rules) if @options['visualize']
dump_rewrite if @options['dump']
return @rewritten_strata
end
def binaryrel2map(rel)
smap = {}
rel.each do |s|
smap[s[0]] = s[1]
end
return smap
end
def dump_rewrite
fout = File.new(self.class.to_s + "_rewritten.txt", "w")
fout.puts "Declarations:"
@defns.each do |d|
fout.puts d
end
@rewritten_strata.each_with_index do |r, i|
fout.puts "R[#{i}] :\n #{r}"
end
fout.close
end
def each_relevant_ancestor
on = false
self.class.ancestors.reverse.each do |anc|
if on
yield anc
elsif anc == Bud
on = true
end
end
end
def rewrite(pt, tab_map, seed)
rules = []
unless pt[0].nil?
rewriter = Rewriter.new(seed, tab_map, @options['provenance'])
rewriter.process(pt)
end
return rewriter
end
def write_postamble(tabs, seed)
# rationale for the postamble:
# for any module M, any table T declared within is internally named m_t.
# if T is an input interface, we need to add a rule m_t <- t.
# if T is an output interface, we need a rule t <- m_t.
postamble = "def foobar\n"
tabs.each_pair do |k, v|
last = v[v.length-1]
if last[1] == "input"
postamble = postamble + "#{last[0]} <= #{k}.map{|t| puts \"INPUT POSTAMBLE\" or t }\n\n"
elsif last[1] == "output"
postamble = postamble + "#{k} <= #{last[0]}.map{|t| puts \"OUTPUT POSTAMBLE\" or t }\n\n"
else
left = "#{k} <= #{last[0]}"
right = "#{last[0]} <= #{k}"
postamble = postamble + "#{left}.map{|t| puts \"VISIBILITy POSTAMBLE #{left} :: \" + t.inspect or t }\n\n"
postamble = postamble + "#{right}.map{|t| puts \"VISIBILITy POSTAMBLE #{right} :: \" + t.inspect or t }\n\n"
end
end
postamble = postamble + "\nend\n"
return rewrite(ParseTree.translate(postamble), {}, seed)
end
def shred_state(anc, tabs)
stp = ParseTree.translate(anc, "state")
return tabs if stp[0].nil?
state_reader = StateExtractor.new(anc.to_s)
res = state_reader.process(stp)
# create the state
#puts "DEFN : #{res}"
@defns << res
eval(res)
state_reader.tabs.each_pair do |k, v|
#puts "tab KEYPAIR #{k.inspect} = #{v.inspect}"
unless tabs[k]
tabs[k] = []
end
tabs[k] << v
end
return tabs
end
def shred_rules
# to completely characterize the rules of a bud class we must extract
# from all parent classes/modules
# after making this pass, we no longer care about the names of methods.
# we are shredding down to the granularity of rule heads.
rules = []
seed = 0
rulebag = {}
tabs = {}
each_relevant_ancestor do |anc|
tabs = shred_state(anc, tabs) if @options['scoping']
@declarations.each do |d|
rw = rewrite(ParseTree.translate(anc, d), tabs, seed)
unless rw.nil?
seed = rw.rule_indx
rulebag[d] = []
rw.each{ |r| rulebag[d] << r }
end
end
end
rulebag.each_pair do |k, v|
v.each do |val|
#puts "RULEBAG #{k.inspect} = #{val.inspect}"
rules << val
end
end
if @options['scoping']
res = write_postamble(tabs, seed + 100)
res.each {|p| rules << p }
end
return rules
end
def stratify(depends)
strat = Stratification.new("localhost", 12345)
strat.tick
@tables.each do |t|
strat.tab_info << [t[0].to_s, t[1].class, t[1].schema.length]
end
depends.each do |d|
if d[2] == '<'
if d[5] =~ /-@/
realop = "<-"
elsif d[5] =~ /\~ \)/
# hackerly
realop = "<~"
else
realop = "<+"
end
else
realop = d[2]
end
# seriously, consider named notation for d.
strat.depends << [ d[0], d[1], realop, d[3], d[4] ]
end
strat.tick
return strat
end
def oporder(op)
case op
when '='
return 0
when '<<'
return 1
when '<='
return 2
else
return 3
end
end
def do_cards
return unless options['visualize']
cards = {}
@tables.each do |t|
puts "#{@budtime}, #{t[0]}, #{t[1].length}"
cards[t[0].to_s] = t[1].length
end
write_svgs(cards)
write_html
end
def write_svgs(c)
`mkdir time_pics`
return if @strat_state.nil?
puts "construct viz with cards = #{c.class}"
gv = Viz.new(@strat_state.stratum, @tables, @strat_state.cycle, nil, c)
gv.process(@strat_state.depends)
gv.finish("time_pics/#{self.class}_tm_#{@budtime}")
end
def write_html
nm = "#{self.class}_tm_#{@budtime}"
prev = "#{self.class}_tm_#{@budtime-1}"
nxt = "#{self.class}_tm_#{@budtime+1}"
fout = File.new("time_pics/#{nm}.html", "w")
fout.puts "<center><h1>#{self.class} @ #{@budtime}</h1><center>"
#fout.puts "<img src=\"#{ENV['PWD']}/time_pics/#{nm}.svg\">"
fout.puts "<embed src=\"#{ENV['PWD']}/time_pics/#{nm}.svg\" width=\"100%\" height=\"75%\" type=\"image/svg+xml\" pluginspage=\"http://www.adobe.com/svg/viewer/install/\" />"
#fout.puts "<embed src=\"#{ENV['PWD']}/time_pics/#{nm}.svg\" type=\"image/svg+xml\" pluginspage=\"http://www.adobe.com/svg/viewer/install/\" />"
fout.puts "<hr><h2><a href=\"#{ENV['PWD']}/time_pics/#{prev}.html\">last</a>"
fout.puts "<a href=\"#{ENV['PWD']}/time_pics/#{nxt}.html\">next</a>"
fout.close
end
def visualize(strat, name, rules, depa=nil)
#@tables.each do |t|
# @table_meta << [t[0], t[1].class]
#end
#gv = Viz.new(strat.stratum, @table_meta, strat.cycle, depa)
puts "VIZZ"
gv = Viz.new(strat.stratum, @tables, strat.cycle, depa)
gv.process(strat.depends)
gv.dump(rules)
gv.finish(name)
end
end
Code cleanup for meta stuff.
require 'rubygems'
require 'bud/depanalysis'
require 'bud/provenance'
require 'bud/rewrite'
require 'bud/sane_r2r'
require 'parse_tree'
class Bud
attr_reader :shredded_rules, :provides, :demands, :strat_state
def meta_rewrite
# N.B. -- parse_tree will not be supported in ruby 1.9.
# however, we can still pass the "string" code of bud modules
# to ruby_parse (but not the "live" class)
@defns = []
@shredded_rules = shred_rules
@strat_state = stratify(@shredded_rules)
smap = binaryrel2map(@strat_state.stratum)
done = {}
@rewritten_strata = []
@shredded_rules.sort{|a, b| oporder(a[2]) <=> oporder(b[2])}.each do |d|
belongs_in = smap[d[1]]
belongs_in = 0 if belongs_in.nil?
if @rewritten_strata[belongs_in].nil?
@rewritten_strata[belongs_in] = ""
end
unless done[d[0]]
@rewritten_strata[belongs_in] = @rewritten_strata[belongs_in] + "\n"+ d[5]
end
done[d[0]] = true
end
visualize(@strat_state, "#{self.class}_gvoutput", @shredded_rules) if @options['visualize']
dump_rewrite if @options['dump']
return @rewritten_strata
end
def binaryrel2map(rel)
smap = {}
rel.each do |s|
smap[s[0]] = s[1]
end
return smap
end
def dump_rewrite
fout = File.new(self.class.to_s + "_rewritten.txt", "w")
fout.puts "Declarations:"
@defns.each do |d|
fout.puts d
end
@rewritten_strata.each_with_index do |r, i|
fout.puts "R[#{i}] :\n #{r}"
end
fout.close
end
def each_relevant_ancestor
on = false
self.class.ancestors.reverse.each do |anc|
if on
yield anc
elsif anc == Bud
on = true
end
end
end
def rewrite(pt, tab_map, seed)
unless pt[0].nil?
rewriter = Rewriter.new(seed, tab_map, @options['provenance'])
rewriter.process(pt)
end
return rewriter
end
def write_postamble(tabs, seed)
# rationale for the postamble:
# for any module M, any table T declared within is internally named m_t.
# if T is an input interface, we need to add a rule m_t <- t.
# if T is an output interface, we need a rule t <- m_t.
postamble = "def foobar\n"
tabs.each_pair do |k, v|
last = v[v.length-1]
if last[1] == "input"
postamble = postamble + "#{last[0]} <= #{k}.map{|t| puts \"INPUT POSTAMBLE\" or t }\n\n"
elsif last[1] == "output"
postamble = postamble + "#{k} <= #{last[0]}.map{|t| puts \"OUTPUT POSTAMBLE\" or t }\n\n"
else
left = "#{k} <= #{last[0]}"
right = "#{last[0]} <= #{k}"
postamble = postamble + "#{left}.map{|t| puts \"VISIBILITy POSTAMBLE #{left} :: \" + t.inspect or t }\n\n"
postamble = postamble + "#{right}.map{|t| puts \"VISIBILITy POSTAMBLE #{right} :: \" + t.inspect or t }\n\n"
end
end
postamble = postamble + "\nend\n"
return rewrite(ParseTree.translate(postamble), {}, seed)
end
def shred_state(anc, tabs)
stp = ParseTree.translate(anc, "state")
return tabs if stp[0].nil?
state_reader = StateExtractor.new(anc.to_s)
res = state_reader.process(stp)
# create the state
#puts "DEFN : #{res}"
@defns << res
eval(res)
state_reader.tabs.each_pair do |k, v|
#puts "tab KEYPAIR #{k.inspect} = #{v.inspect}"
tabs[k] ||= []
tabs[k] << v
end
return tabs
end
def shred_rules
# to completely characterize the rules of a bud class we must extract
# from all parent classes/modules
# after making this pass, we no longer care about the names of methods.
# we are shredding down to the granularity of rule heads.
rules = []
seed = 0
rulebag = {}
tabs = {}
each_relevant_ancestor do |anc|
tabs = shred_state(anc, tabs) if @options['scoping']
@declarations.each do |d|
rw = rewrite(ParseTree.translate(anc, d), tabs, seed)
unless rw.nil?
seed = rw.rule_indx
rulebag[d] = []
rw.each{ |r| rulebag[d] << r }
end
end
end
rulebag.each_pair do |k, v|
v.each do |val|
#puts "RULEBAG #{k.inspect} = #{val.inspect}"
rules << val
end
end
if @options['scoping']
res = write_postamble(tabs, seed + 100)
res.each {|p| rules << p }
end
return rules
end
def stratify(depends)
strat = Stratification.new("localhost", 12345)
strat.tick
@tables.each do |t|
strat.tab_info << [t[0].to_s, t[1].class, t[1].schema.length]
end
depends.each do |d|
if d[2] == '<'
if d[5] =~ /-@/
realop = "<-"
elsif d[5] =~ /\~ \)/
# hackerly
realop = "<~"
else
realop = "<+"
end
else
realop = d[2]
end
# seriously, consider named notation for d.
strat.depends << [ d[0], d[1], realop, d[3], d[4] ]
end
strat.tick
return strat
end
def oporder(op)
case op
when '='
return 0
when '<<'
return 1
when '<='
return 2
else
return 3
end
end
def do_cards
return unless options['visualize']
cards = {}
@tables.each do |t|
puts "#{@budtime}, #{t[0]}, #{t[1].length}"
cards[t[0].to_s] = t[1].length
end
write_svgs(cards)
write_html
end
def write_svgs(c)
`mkdir time_pics`
return if @strat_state.nil?
puts "construct viz with cards = #{c.class}"
gv = Viz.new(@strat_state.stratum, @tables, @strat_state.cycle, nil, c)
gv.process(@strat_state.depends)
gv.finish("time_pics/#{self.class}_tm_#{@budtime}")
end
def write_html
nm = "#{self.class}_tm_#{@budtime}"
prev = "#{self.class}_tm_#{@budtime-1}"
nxt = "#{self.class}_tm_#{@budtime+1}"
fout = File.new("time_pics/#{nm}.html", "w")
fout.puts "<center><h1>#{self.class} @ #{@budtime}</h1><center>"
#fout.puts "<img src=\"#{ENV['PWD']}/time_pics/#{nm}.svg\">"
fout.puts "<embed src=\"#{ENV['PWD']}/time_pics/#{nm}.svg\" width=\"100%\" height=\"75%\" type=\"image/svg+xml\" pluginspage=\"http://www.adobe.com/svg/viewer/install/\" />"
#fout.puts "<embed src=\"#{ENV['PWD']}/time_pics/#{nm}.svg\" type=\"image/svg+xml\" pluginspage=\"http://www.adobe.com/svg/viewer/install/\" />"
fout.puts "<hr><h2><a href=\"#{ENV['PWD']}/time_pics/#{prev}.html\">last</a>"
fout.puts "<a href=\"#{ENV['PWD']}/time_pics/#{nxt}.html\">next</a>"
fout.close
end
def visualize(strat, name, rules, depa=nil)
#@tables.each do |t|
# @table_meta << [t[0], t[1].class]
#end
#gv = Viz.new(strat.stratum, @table_meta, strat.cycle, depa)
puts "VIZZ"
gv = Viz.new(strat.stratum, @tables, strat.cycle, depa)
gv.process(strat.depends)
gv.dump(rules)
gv.finish(name)
end
end
|
#!/usr/bin/env ruby
require 'ftools'
require 'benchmark'
require 'net/ssh'
require 'memoize'
require 'wopen3'
require 'buffet/master'
require 'buffet/campfire'
require 'buffet/status_message'
include Memoize
module Buffet
class Setup
# Initialize contains stuff that can be done preliminarily; that is, it's not
# imperative that we have to be running a test in order to run this function.
# Right now, it just sets instance variables and clones the working directory
# if it doesn't already exist.
def initialize working_dir, hosts, status, repo
@status = status
@buffet_dir = File.expand_path(File.dirname(__FILE__) + "/../..")
@working_dir = working_dir
@hosts = hosts
@progress = 0
@repo = repo
clone_repo
end
# Clone the repository into the working directory, if necessary. Will happen
# if the dir is nonexistent or a clone of the wrong repository.
def clone_repo
# TODO: This is a sloppy way to get the remote. Move towards using a ruby
# git wrapper.
remote = `cd #{Settings.working_dir} && git remote -v | grep "(fetch)" | head -1 | cut -f2 | cut -d" " -f1`.chomp
return if remote == Settings.get["repository"]
puts "DELETING EVERYTHING."
FileUtils.rm_rf Settings.working_dir if File.directory? Settings.working_dir
@status.set "Cloning #{@repo}. This will only happen once.\n"
`git clone #{@repo} #{Settings.working_dir}`
end
# Synchronize this directory (the buffet directory) to all hosts.
def sync_hosts hosts
threads = []
@status.set "Updating #{hosts.join(", ")}"
hosts.each do |host|
threads << Thread.new do
# Sync all of Buffet.
`rsync -aqz --delete --exclude=tmp --exclude=.bundle --exclude=log --exclude=doc --exclude=.git #{Settings.root_dir} -e "ssh " buffet@#{host}:~/`
# Run bundle install if necessary.
`ssh buffet@#{host} 'cd ~/#{Settings.root_dir_name}/working-directory && bundle check > /dev/null; if (($? != 0)); then bundle install --without production --path ~/buffet-gems; fi'`
end
end
threads.each do |thread|
thread.join
end
end
def setup_db
Dir.chdir(@working_dir) do
@status.set "Running db_setup\n"
if Settings.get['hosts'].include? Settings.hostname
@status.increase_progress /^== [\d]+ /, 1120, Settings.root_dir + "/db_setup " + @hosts.join(" ")
else
# We don't want to execute db_setup on current machine, since it's not in the hosts.
# Copy db_setup to an arbitrary host we're allowed to use.
#
# This is primarily useful for developing Buffet, since we want to be
# able to run Buffet from the same computer we run tests on, but we
# don't want to have conflicts on the database.
new_setup_host = "buffet@#{@hosts.first}"
new_setup_location = "~/#{Settings.root_dir_name}/working-directory"
`scp #{Settings.root_dir}/db_setup #{new_setup_host}:#{new_setup_location}/db_setup`
command = "ssh #{new_setup_host} \"cd #{new_setup_location}; ./db_setup " + @hosts.join(" ") + "\""
puts command
Net::SSH.start(@hosts.first, 'buffet') do |ssh|
channel = ssh.open_channel do |ch|
ch.exec "cd #{new_setup_location}; ./db_setup " + @hosts.join(" ") do |ch, success|
ch.on_data do |c, data|
puts data
end
# can also capture on_extended_data for stderr
end
end
channel.wait
end
end
expect_success("Failed to db_setup on local machine.")
end
end
def update_local_dir remote, branch
Dir.chdir(@working_dir) do
`git fetch #{remote}`
rev = `git rev-parse #{remote}/#{branch}`.chomp # Get hash
if $?.exitstatus != 0
# probably got passed a SHA-1 hash instead of a branch name
rev = `git rev-parse #{branch}`.chomp
end
expect_success('Rev-parse failed')
@status.set "Updating local repository.\n"
@status.increase_progress(/a/, 30,
"git checkout #{rev} &&
git reset --hard #{rev} &&
git clean -f &&
git submodule update --init &&
git submodule foreach git reset --hard HEAD &&
git submodule foreach git clean -f".gsub(/\n/, ''))
expect_success("Failed to clone local repository.")
ENV['RAILS_ENV'] = 'test'
@status.set "Updating local gems.\n"
`bundle install --without production --path ~/buffet-gems`
expect_success("Failed to bundle install on local machine.")
end
end
# Run the tests. There's lots of setup required before we can actaully run
# them, including grabbing the latest version, installing gems, etc.
def run(dont_run_migrations, branch="master")
remote = 'origin'
update_local_dir remote, branch
@status.set "Copying Buffet to hosts."
sync_hosts @hosts
@status.set "Running bundle install on hosts."
setup_db unless dont_run_migrations or (not File.exists?(Settings.root_dir + "/setup_db"))
end
def get_failures
if @master
@master.get_failures_list
else
[]
end
end
private
#TODO: Take diagnostic output also.
def expect_success(failure_msg)
if $?.exitstatus != 0
puts failure_msg
exit 1
end
end
end
end
Be a little more cautious about deleting the entire repository.
#!/usr/bin/env ruby
require 'ftools'
require 'benchmark'
require 'net/ssh'
require 'memoize'
require 'wopen3'
require 'buffet/master'
require 'buffet/campfire'
require 'buffet/status_message'
include Memoize
module Buffet
class Setup
# Initialize contains stuff that can be done preliminarily; that is, it's not
# imperative that we have to be running a test in order to run this function.
# Right now, it just sets instance variables and clones the working directory
# if it doesn't already exist.
def initialize working_dir, hosts, status, repo
@status = status
@buffet_dir = File.expand_path(File.dirname(__FILE__) + "/../..")
@working_dir = working_dir
@hosts = hosts
@progress = 0
@repo = repo
clone_repo
end
# Clone the repository into the working directory, if necessary. Will happen
# if the dir is nonexistent or a clone of the wrong repository.
def clone_repo
# TODO: This is a sloppy way to get the remote. Move towards using a ruby
# git wrapper.
remote = `cd #{Settings.working_dir} && git remote -v | grep "(fetch)" | head -1 | cut -f2 | cut -d" " -f1`.chomp
return if remote == Settings.get["repository"]
puts "About to delete everything. Continue? (y/n)"
exit 0 unless gets.chomp == "y"
FileUtils.rm_rf Settings.working_dir if File.directory? Settings.working_dir
@status.set "Cloning #{@repo}. This will only happen once.\n"
`git clone #{@repo} #{Settings.working_dir}`
end
# Synchronize this directory (the buffet directory) to all hosts.
def sync_hosts hosts
threads = []
@status.set "Updating #{hosts.join(", ")}"
hosts.each do |host|
threads << Thread.new do
# Sync all of Buffet.
`rsync -aqz --delete --exclude=tmp --exclude=.bundle --exclude=log --exclude=doc --exclude=.git #{Settings.root_dir} -e "ssh " buffet@#{host}:~/`
# Run bundle install if necessary.
`ssh buffet@#{host} 'cd ~/#{Settings.root_dir_name}/working-directory && bundle check > /dev/null; if (($? != 0)); then bundle install --without production --path ~/buffet-gems; fi'`
end
end
threads.each do |thread|
thread.join
end
end
def setup_db
Dir.chdir(@working_dir) do
@status.set "Running db_setup\n"
if Settings.get['hosts'].include? Settings.hostname
@status.increase_progress /^== [\d]+ /, 1120, Settings.root_dir + "/db_setup " + @hosts.join(" ")
else
# We don't want to execute db_setup on current machine, since it's not in the hosts.
# Copy db_setup to an arbitrary host we're allowed to use.
#
# This is primarily useful for developing Buffet, since we want to be
# able to run Buffet from the same computer we run tests on, but we
# don't want to have conflicts on the database.
new_setup_host = "buffet@#{@hosts.first}"
new_setup_location = "~/#{Settings.root_dir_name}/working-directory"
`scp #{Settings.root_dir}/db_setup #{new_setup_host}:#{new_setup_location}/db_setup`
command = "ssh #{new_setup_host} \"cd #{new_setup_location}; ./db_setup " + @hosts.join(" ") + "\""
puts command
Net::SSH.start(@hosts.first, 'buffet') do |ssh|
channel = ssh.open_channel do |ch|
ch.exec "cd #{new_setup_location}; ./db_setup " + @hosts.join(" ") do |ch, success|
ch.on_data do |c, data|
puts data
end
# can also capture on_extended_data for stderr
end
end
channel.wait
end
end
expect_success("Failed to db_setup on local machine.")
end
end
def update_local_dir remote, branch
Dir.chdir(@working_dir) do
`git fetch #{remote}`
rev = `git rev-parse #{remote}/#{branch}`.chomp # Get hash
if $?.exitstatus != 0
# probably got passed a SHA-1 hash instead of a branch name
rev = `git rev-parse #{branch}`.chomp
end
expect_success('Rev-parse failed')
@status.set "Updating local repository.\n"
@status.increase_progress(/a/, 30,
"git checkout #{rev} &&
git reset --hard #{rev} &&
git clean -f &&
git submodule update --init &&
git submodule foreach git reset --hard HEAD &&
git submodule foreach git clean -f".gsub(/\n/, ''))
expect_success("Failed to clone local repository.")
ENV['RAILS_ENV'] = 'test'
@status.set "Updating local gems.\n"
`bundle install --without production --path ~/buffet-gems`
expect_success("Failed to bundle install on local machine.")
end
end
# Run the tests. There's lots of setup required before we can actaully run
# them, including grabbing the latest version, installing gems, etc.
def run(dont_run_migrations, branch="master")
remote = 'origin'
update_local_dir remote, branch
@status.set "Copying Buffet to hosts."
sync_hosts @hosts
@status.set "Running bundle install on hosts."
setup_db unless dont_run_migrations or (not File.exists?(Settings.root_dir + "/setup_db"))
end
def get_failures
if @master
@master.get_failures_list
else
[]
end
end
private
#TODO: Take diagnostic output also.
def expect_success(failure_msg)
if $?.exitstatus != 0
puts failure_msg
exit 1
end
end
end
end
|
require "bugsnag/middleware/rack_request"
require "bugsnag/middleware/warden_user"
require "bugsnag/middleware/callbacks"
module Bugsnag
class Rack
def initialize(app)
@app = app
# Configure bugsnag rack defaults
Bugsnag.configure do |config|
# Try to set the release_stage automatically if it hasn't already been set
config.release_stage ||= ENV["RACK_ENV"] if ENV["RACK_ENV"]
# Try to set the project_root if it hasn't already been set, or show a warning if we can't
unless config.project_root && !config.project_root.empty?
if defined?(settings)
config.project_root = settings.root
else
Bugsnag.warn("You should set your app's project_root (see https://bugsnag.com/docs/notifiers/ruby#project_root).")
end
end
# Hook up rack-based notification middlewares
config.middleware.use Bugsnag::Middleware::RackRequest if defined?(::Rack)
config.middleware.use Bugsnag::Middleware::WardenUser if defined?(Warden)
end
end
def call(env)
# Set the request data for bugsnag middleware to use
Bugsnag.set_request_data(:rack_env, env)
begin
response = @app.call(env)
rescue Exception => raised
# Notify bugsnag of rack exceptions
Bugsnag.auto_notify(raised)
# Re-raise the exception
raised
end
# Notify bugsnag of rack exceptions
if env["rack.exception"]
Bugsnag.auto_notify(env["rack.exception"])
end
response
ensure
# Clear per-request data after processing the each request
Bugsnag.clear_request_data
end
end
end
Fix typo. Thanks travis!
require "bugsnag/middleware/rack_request"
require "bugsnag/middleware/warden_user"
require "bugsnag/middleware/callbacks"
module Bugsnag
class Rack
def initialize(app)
@app = app
# Configure bugsnag rack defaults
Bugsnag.configure do |config|
# Try to set the release_stage automatically if it hasn't already been set
config.release_stage ||= ENV["RACK_ENV"] if ENV["RACK_ENV"]
# Try to set the project_root if it hasn't already been set, or show a warning if we can't
unless config.project_root && !config.project_root.empty?
if defined?(settings)
config.project_root = settings.root
else
Bugsnag.warn("You should set your app's project_root (see https://bugsnag.com/docs/notifiers/ruby#project_root).")
end
end
# Hook up rack-based notification middlewares
config.middleware.use Bugsnag::Middleware::RackRequest if defined?(::Rack)
config.middleware.use Bugsnag::Middleware::WardenUser if defined?(Warden)
end
end
def call(env)
# Set the request data for bugsnag middleware to use
Bugsnag.set_request_data(:rack_env, env)
begin
response = @app.call(env)
rescue Exception => raised
# Notify bugsnag of rack exceptions
Bugsnag.auto_notify(raised)
# Re-raise the exception
raise
end
# Notify bugsnag of rack exceptions
if env["rack.exception"]
Bugsnag.auto_notify(env["rack.exception"])
end
response
ensure
# Clear per-request data after processing the each request
Bugsnag.clear_request_data
end
end
end |
module Bundleup
class CLI
include Console
def run
puts \
"Please wait a moment while I upgrade your Gemfile.lock..."
committed = false
review_upgrades
review_pins
committed = upgrades.any? && confirm_commit
puts "Done!" if committed
ensure
restore_lockfile unless committed
end
private
def review_upgrades
if upgrades.any?
puts "\nThe following gem(s) will be updated:\n\n"
print_upgrades_table
else
ok("Nothing to update.")
end
end
def review_pins
return if pins.empty?
puts "\nNote that the following gem(s) are being held back:\n\n"
print_pins_table
end
def confirm_commit
confirm("\nDo you want to apply these changes?")
end
def restore_lockfile
return unless defined?(@upgrade)
return unless upgrade.lockfile_changed?
upgrade.undo
puts "Your original Gemfile.lock has been restored."
end
def upgrade
@upgrade ||= Upgrade.new(ARGV)
end
def upgrades
upgrade.upgrades
end
def pins
upgrade.pins
end
def print_upgrades_table
rows = tableize(upgrades) do |g|
[g.name, g.old_version || "(new)", "→", g.new_version || "(removed)"]
end
upgrades.zip(rows).each do |g, row|
color(g.color, row)
end
end
def print_pins_table
rows = tableize(pins) do |g|
pin_operator, pin_version = g.pin.split(" ", 2)
reason = [":", "pinned at", pin_operator, pin_version]
[g.name, g.new_version, "→", g.newest_version, *reason]
end
puts rows.join("\n")
end
end
end
Tweak alignment
module Bundleup
class CLI
include Console
def run
puts \
"Please wait a moment while I upgrade your Gemfile.lock..."
committed = false
review_upgrades
review_pins
committed = upgrades.any? && confirm_commit
puts "Done!" if committed
ensure
restore_lockfile unless committed
end
private
def review_upgrades
if upgrades.any?
puts "\nThe following gem(s) will be updated:\n\n"
print_upgrades_table
else
ok("Nothing to update.")
end
end
def review_pins
return if pins.empty?
puts "\nNote that the following gem(s) are being held back:\n\n"
print_pins_table
end
def confirm_commit
confirm("\nDo you want to apply these changes?")
end
def restore_lockfile
return unless defined?(@upgrade)
return unless upgrade.lockfile_changed?
upgrade.undo
puts "Your original Gemfile.lock has been restored."
end
def upgrade
@upgrade ||= Upgrade.new(ARGV)
end
def upgrades
upgrade.upgrades
end
def pins
upgrade.pins
end
def print_upgrades_table
rows = tableize(upgrades) do |g|
[g.name, g.old_version || "(new)", "→", g.new_version || "(removed)"]
end
upgrades.zip(rows).each do |g, row|
color(g.color, row)
end
end
def print_pins_table
rows = tableize(pins) do |g|
pin_operator, pin_version = g.pin.split(" ", 2)
reason = [":", "pinned at", pin_operator.rjust(2), pin_version]
[g.name, g.new_version, "→", g.newest_version, *reason]
end
puts rows.join("\n")
end
end
end
|
# coding: utf-8
class Retter::Command < Thor
include Retter::Stationery
map '-v' => :version,
'-e' => :edit,
'-p' => :preview,
'-o' => :open,
'-r' => :rebind,
'-b' => :bind
desc 'edit', 'Open $EDITOR. Write an article with Markdown.'
method_options date: :string, key: :string, silent: :boolean
def edit(identifier = options[:date] || options[:key])
entry = entries.detect_by_string(identifier)
system "#{config.editor} #{entry.path}"
invoke_after :edit unless silent?
end
default_task :edit
desc 'preview', 'Preview the draft article (browser will open).'
method_options date: :string, key: :string
def preview(identifier = options[:date] || options[:key])
entry = entries.detect_by_string(identifier)
preprint.print entry
Launchy.open preprint.path
end
desc 'open', 'Open your (static) site top page (browser will open).'
def open
Launchy.open pages.index.path
end
desc 'rebind', 'Bind the draft article, re-generate all html pages.'
method_options silent: :boolean
def rebind
entries.commit_wip_entry!
pages.bind!
unless silent?
invoke_after :bind
invoke_after :rebind
end
end
desc 'bind', 'Re-bind the draft article, re-generate all html pages.'
method_options silent: :boolean
alias_method :bind, :rebind
desc 'commit', "cd $RETTER_HOME && git add . && git commit -m 'Retter commit'"
method_options silent: :boolean
def commit
repository.open do |git|
say git.add(config.retter_home), :green
say git.commit_all('Retter commit'), :green
end
invoke_after :commit unless silent?
end
desc 'list', 'List retters'
def list
entries.each_with_index do |entry, n|
say "[e#{n}] #{entry.date}"
say " #{entry.articles.map(&:title).join(', ')}"
say
end
end
desc 'home', 'Open a new shell in $RETTER_HOME'
def home
Dir.chdir config.retter_home.to_s
system %(PS1="(retter) " #{config.shell})
say 'bye', :green
end
desc 'callback', 'Call a callback process only'
method_options after: :string
def callback
invoke_after options[:after].intern
end
desc 'new', 'Create a new site'
def new; end
desc 'gen', 'Generate initial files'
def gen; end
desc 'usage', 'Show usage.'
def usage
say Retter::Command.usage, :green
end
desc 'version', 'Show version.'
def version
say "Retter version #{Retter::VERSION}"
end
private
def silent?
!options[:silent].nil?
end
def invoke_after(name)
callback = config.after(name)
return unless callback
case callback
when Proc
instance_eval &callback
when Symbol
invoke callback
else
raise ArgumentError
end
end
def self.usage
<<-EOM
Usage:
# Startup
cd /path/to/dir
retter new my_sweet_diary
echo "export EDITOR=vim" >> ~/.zshenv # retter requires $EDITOR.
echo "export RETTER_HOME=/path/to/my_sweet_diary" >> ~/.zshenv
. ~/.zshenv
# Write a article
retter # $EDITOR will open. Write an article with Markdown.
retter preview # Preview the draft article (browser will open).
# Publish
retter bind # bind the draft article, re-generate all html pages.
retter commit # shortcut of "cd $RETTER_HOME; git add .; git commit -m 'Retter commit'"
cd $RETTER_HOME
git push [remote] [branch] # or sftp, rsync, etc...
# Specific date
retter edit --date=20110101
retter preview --date=20110101
# Browse offline.
retter open # Open your (static) site top page (browser will open).
See also: https://github.com/hibariya/retter
EOM
end
end
usage
# coding: utf-8
class Retter::Command < Thor
include Retter::Stationery
map '-v' => :version,
'-e' => :edit,
'-p' => :preview,
'-o' => :open,
'-r' => :rebind,
'-b' => :bind
desc 'edit', 'Open $EDITOR. Write an article with Markdown.'
method_options date: :string, key: :string, silent: :boolean
def edit(identifier = options[:date] || options[:key])
entry = entries.detect_by_string(identifier)
system "#{config.editor} #{entry.path}"
invoke_after :edit unless silent?
end
default_task :edit
desc 'preview', 'Preview the draft article (browser will open).'
method_options date: :string, key: :string
def preview(identifier = options[:date] || options[:key])
entry = entries.detect_by_string(identifier)
preprint.print entry
Launchy.open preprint.path
end
desc 'open', 'Open your (static) site top page (browser will open).'
def open
Launchy.open pages.index.path
end
desc 'rebind', 'Bind the draft article, re-generate all html pages.'
method_options silent: :boolean
def rebind
entries.commit_wip_entry!
pages.bind!
unless silent?
invoke_after :bind
invoke_after :rebind
end
end
desc 'bind', 'Alias of rebind'
method_options silent: :boolean
alias_method :bind, :rebind
desc 'commit', "cd $RETTER_HOME && git add . && git commit -m 'Retter commit'"
method_options silent: :boolean
def commit
repository.open do |git|
say git.add(config.retter_home), :green
say git.commit_all('Retter commit'), :green
end
invoke_after :commit unless silent?
end
desc 'list', 'List retters'
def list
entries.each_with_index do |entry, n|
say "[e#{n}] #{entry.date}"
say " #{entry.articles.map(&:title).join(', ')}"
say
end
end
desc 'home', 'Open a new shell in $RETTER_HOME'
def home
Dir.chdir config.retter_home.to_s
system %(PS1="(retter) " #{config.shell})
say 'bye', :green
end
desc 'callback', 'Call a callback process only'
method_options after: :string
def callback
invoke_after options[:after].intern
end
desc 'new', 'Create a new site'
def new; end
desc 'gen', 'Generate initial files'
def gen; end
desc 'usage', 'Show usage.'
def usage
say Retter::Command.usage, :green
end
desc 'version', 'Show version.'
def version
say "Retter version #{Retter::VERSION}"
end
private
def silent?
!options[:silent].nil?
end
def invoke_after(name)
callback = config.after(name)
return unless callback
case callback
when Proc
instance_eval &callback
when Symbol
invoke callback
else
raise ArgumentError
end
end
def self.usage
<<-EOM
Usage:
# 1. Startup
cd /path/to/dir
retter new my_sweet_diary
echo "export EDITOR=vim" >> ~/.zshenv # retter requires $EDITOR.
echo "export RETTER_HOME=/path/to/my_sweet_diary" >> ~/.zshenv
. ~/.zshenv
# 2. Write a article
retter # $EDITOR will open. Write an article with Markdown.
retter preview # Preview the draft article (browser will open).
# 3. Publish
retter bind # bind the draft article, re-generate all html pages.
retter commit # shortcut of "cd $RETTER_HOME; git add .; git commit -m 'Retter commit'"
cd $RETTER_HOME
git push [remote] [branch] # or sftp, rsync, etc...
# Specific date
retter edit --date=20110101
retter preview --date=20110101
# Specific file
retter edit --key=today.md
retter edit --key=20110101.md
retter preview --key=20110101.md
# Browse entry list.
retter list
output examples:
[e0] 2011-11-07
entry3 title
[e1] 2011-10-25
entry2 title
[e2] 2011-10-22
entry1 title
to edit by keyword. run following command:
retter edit e1
# Browse offline.
retter open # Open your (static) site top page (browser will open).
See also: https://github.com/hibariya/retter
EOM
end
end
|
module Rize
module_function
# Map over the keys and values of a hash.
#
# @param hsh [Hash] The hash to be mapped over.
# @yield [key, value] A block which returns in the form [key, value].
#
# @return [Hash] Returns a new hash with the results of running the block over it.
# @example Map over a hash
# Rize.hmap({a: 1, b: 2}) { |k,v| [k.to_s, v + 1] }
# { "a" => 2, "b" => 3 }
def hmap(hsh)
Hash[hsh.map { |k, v| yield(k, v) }]
end
# Map over the keys of a hash.
#
# @param hsh [Hash] The hash to be mapped over.
# @yield [key] A block that acts upon the hash keys.
#
# @return [Hash] Returns a new hash with updated keys, and unchanged values.
# @example Map over a hash's keys.
# Rize.hkeymap({a: 1, b: 2}, &:to_s)
# { "a" => 1, "b" => 2 }
def hkeymap(hsh)
Hash[hsh.map { |k, v| [yield(k), v] }]
end
# Map over the values of a hash.
#
# @param hsh [Hash] The hash to be mapped over.
# @yield [value] A block that acts upon the hash values
#
# @return [Hash] Returns a new hash with updated values, and unchanged keys.
# @example Map over a hash's values.
# Rize.hvalmap({a: 1, b: 2}, &:to_s)
# { a: "1", b: "2" }
def hvalmap(hsh)
Hash[hsh.map { |k, v| [k, yield(v)] }]
end
# Returns the first element of an array, or nil if the array is empty.
#
# @param arr [Array] The array from which we want the head.
#
# @return elem The first element of the array.
# @example Get the first element of an array.
# Rize.hd [1, 2, 3]
# 1
# @example
# Rize.hd []
# nil
def hd(arr)
arr[0]
end
# Returns all but the first element of the array.
#
# @param arr [Array] The array from which we want the tail.
#
# @return tail [Array] An array containing all but the first element of the input.
# @example Get all but the first element of the array.
# Rize.tl [1, 2, 3]
# [2, 3]
# @example
# Rize.tl []
# []
def tl(arr)
arr.drop(1)
end
# The same as doing [block(a1, b1, c1), block(a2, b2, c2)]
# for arrays [a1, b1, c1] and [a2, b2, c2].
#
# Raises an ArgumentError if arrays are of unequal length.
#
# @param *arrs [Array] A variable-length number of arrays.
# @yield [*args] A block that acts upon elements at a particular index in the array.
#
# @return [Array] The result of calling block over the matching array elements.
# @example Sum all the elements at the same position across multiple arrays.
# Rize.map_n([1, 2, 3], [4, 5, 6], [7, 8, 9]) { |*args| args.reduce(:+) }
# [12, 15, 18]
# @example Subtract the second array's element from the first, and multiply by the third.
# Rize.map_n([1, 2, 3], [4, 5, 6], [7, 8, 9]) { |a, b, c| (a - b) * c }
# [-21, -24, -27]
# @example Try with arrays of unequal length.
# Rize.map_n([1, 2], [1, 2, 3]) { |*args| args.reduce(:+) }
# ArgumentError: Expected all inputs to be of length 2
def map_n(*arrs)
expected_length = arrs[0].length
if arrs.any? { |arr| arr.length != expected_length }
raise ArgumentError, "Expected all inputs to be of length #{expected_length}"
end
hd(arrs).zip(*tl(arrs)).map do |elems|
yield(*elems)
end
end
end
Documentation improvements
module Rize
module_function
# Map over the keys and values of a hash.
#
# @param hsh [Hash] The hash to be mapped over.
# @yield [key, value] A block which returns in the form [key, value].
#
# @return [Hash] Returns a new hash with the results of running the block over it.
# @example Map over a hash
# Rize.hmap({a: 1, b: 2}) { |k,v| [k.to_s, v + 1] }
# { "a" => 2, "b" => 3 }
def hmap(hsh)
Hash[hsh.map { |k, v| yield(k, v) }]
end
# Map over the keys of a hash.
#
# @param hsh [Hash] The hash to be mapped over.
# @yield [key] A block that acts upon the hash keys.
#
# @return [Hash] Returns a new hash with updated keys, and unchanged values.
# @example Map over a hash's keys.
# Rize.hkeymap({a: 1, b: 2}, &:to_s)
# { "a" => 1, "b" => 2 }
def hkeymap(hsh)
Hash[hsh.map { |k, v| [yield(k), v] }]
end
# Map over the values of a hash.
#
# @param hsh [Hash] The hash to be mapped over.
# @yield [value] A block that acts upon the hash values
#
# @return [Hash] Returns a new hash with updated values, and unchanged keys.
# @example Map over a hash's values.
# Rize.hvalmap({a: 1, b: 2}, &:to_s)
# { a: "1", b: "2" }
def hvalmap(hsh)
Hash[hsh.map { |k, v| [k, yield(v)] }]
end
# Returns the first element of an array, or nil if the array is empty.
#
# @param arr [Array] The array from which we want the head.
#
# @return elem The first element of the array.
# @example Get the first element of an array.
# Rize.hd [1, 2, 3]
# 1
# @example
# Rize.hd []
# nil
def hd(arr)
arr[0]
end
# Returns all but the first element of the array.
#
# @param arr [Array] The array from which we want the tail.
#
# @return tail [Array] An array containing all but the first element of the input.
# @example Get all but the first element of the array.
# Rize.tl [1, 2, 3]
# [2, 3]
# @example
# Rize.tl []
# []
def tl(arr)
arr.drop(1)
end
# Map over multiple arrays at the same time.
#
# The same as doing [block(a1, b1, c1), block(a2, b2, c2)]
# for arrays [a1, b1, c1] and [a2, b2, c2].
#
# Raises an ArgumentError if arrays are of unequal length.
#
# @param *arrs [Array] A variable-length number of arrays.
# @yield [*args] A block that acts upon elements at a particular index in the array.
#
# @return [Array] The result of calling the block over the matching array elements.
# @example Sum all the elements at the same position across multiple arrays.
# Rize.map_n([1, 2, 3], [4, 5, 6], [7, 8, 9]) { |*args| args.reduce(:+) }
# [12, 15, 18]
# @example Subtract the second array's element from the first, and multiply by the third.
# Rize.map_n([1, 2, 3], [4, 5, 6], [7, 8, 9]) { |a, b, c| (a - b) * c }
# [-21, -24, -27]
# @example Try with arrays of unequal length.
# Rize.map_n([1, 2], [1, 2, 3]) { |*args| args.reduce(:+) }
# ArgumentError: Expected all inputs to be of length 2
def map_n(*arrs)
expected_length = arrs[0].length
if arrs.any? { |arr| arr.length != expected_length }
raise ArgumentError, "Expected all inputs to be of length #{expected_length}"
end
hd(arrs).zip(*tl(arrs)).map do |elems|
yield(*elems)
end
end
end
|
require "active_support/core_ext/string"
require "cache_rocket/key"
require "cache_rocket/fragment"
require "cache_rocket/version"
module CacheRocket
include Key
ERROR_MISSING_KEY_OR_BLOCK = "You must either pass a `replace` key or a block to render_cached."
# Supports 5 options:
#
# 1. Single partial to replace.
# "inner" is the key name and "_inner.*" is the partial file name.
#
# render_cached "container", replace: "inner"
#
# 2. Array of partials to replace
#
# render_cached "container", replace: ["inner"]
#
# 3. Map of keys to replace with values
#
# render_cached "container", replace: {key_name: a_helper_method(object)}
#
# 4. Yield to a hash of keys to replace with values
#
# render_cached "container" do
# {key_name: a_helper_method(object)}
# end
#
# 5. Render a collection with Procs for replace values.
#
# render_cached "partial", collection: objects, replace: { key_name: ->(object){a_method(object)} }
#
def render_cached(partial, options = {})
replace_hash = options.delete(:replace)
collection = options.delete(:collection)
fragment = Fragment.new(render(partial, options))
case replace_hash
when Hash
fragment.replace replace_hash, collection
when NilClass
raise(ArgumentError, ERROR_MISSING_KEY_OR_BLOCK) unless block_given?
fragment.replace yield, collection
else
key_array = *replace_hash
key_array.each do |key|
fragment.gsub! cache_replace_key(key), render(key, options)
end
end
fragment.to_s.html_safe
end
end
Remove local variable
require "active_support/core_ext/string"
require "cache_rocket/key"
require "cache_rocket/fragment"
require "cache_rocket/version"
module CacheRocket
include Key
ERROR_MISSING_KEY_OR_BLOCK = "You must either pass a `replace` key or a block to render_cached."
# Supports 5 options:
#
# 1. Single partial to replace.
# "inner" is the key name and "_inner.*" is the partial file name.
#
# render_cached "container", replace: "inner"
#
# 2. Array of partials to replace
#
# render_cached "container", replace: ["inner"]
#
# 3. Map of keys to replace with values
#
# render_cached "container", replace: {key_name: a_helper_method(object)}
#
# 4. Yield to a hash of keys to replace with values
#
# render_cached "container" do
# {key_name: a_helper_method(object)}
# end
#
# 5. Render a collection with Procs for replace values.
#
# render_cached "partial", collection: objects, replace: { key_name: ->(object){a_method(object)} }
#
def render_cached(partial, options = {})
replace_hash = options.delete(:replace)
collection = options.delete(:collection)
fragment = Fragment.new(render(partial, options))
case replace_hash
when Hash
fragment.replace replace_hash, collection
when NilClass
raise(ArgumentError, ERROR_MISSING_KEY_OR_BLOCK) unless block_given?
fragment.replace yield, collection
else
[*replace_hash].each do |key|
fragment.gsub! cache_replace_key(key), render(key, options)
end
end
fragment.to_s.html_safe
end
end
|
require 'rgen/ecore/ecore'
require 'rgen/ecore/ecore_ext'
require 'rgen/serializer/opposite_reference_filter'
require 'rgen/serializer/qualified_name_provider'
module RText
class Language
# Creates an RText language description for the metamodel described by +root_epackage+
# Valid options include:
#
# :feature_provider
# a Proc which receives an EClass and should return a subset of this EClass's features
# this can be used to filter and/or reorder the features
# note that in most cases, this Proc will have to filter opposite references
# default: all features filtered using OppositeReferenceFilter
#
# :unlabled_arguments
# a Proc which receives an EClass and should return this EClass's feature names which are
# to be serialized without lables in the given order and before all labled arguments
# the features must also occur in :feature_provider if :feature_provider is provided
# if unlabled arguments are not part of the current class's features, they will be ignored
# default: no unlabled arguments
#
# :unquoted_arguments
# a Proc which receives an EClass and should return this EClass's string typed attribute
# names which are to be serialized without quotes. input data my still be quoted.
# the serializer will take care to insert quotes if the data is not a valid identifier
# the features must also occur in :feature_provider if :feature_provider is provided
# default: no unquoted arguments
#
# :labeled_containments
# a Proc which receives an EClass and should return this EClass's containment references
# which are to be serialized with a lable
# default: use lables when references can't be uniquely derived from contained element
#
# :argument_format_provider
# a Proc which receives an EAttribute and should return a format specification string
# (in sprintf syntax) which will be used by the serializer for integers and floats.
# default: if not present or the proc returns nil, then #to_s is used
#
# :reference_regexp
# a Regexp which is used by the tokenizer for identifying references
# it must only match at the beginning of a string, i.e. it should start with \A
# it must be built in a way that does not match other language constructs
# in particular it must not match identifiers (word characters not starting with a digit)
# identifiers can always be used where references are expected
# default: word characters separated by at least one slash (/)
#
# :identifier_provider
# a Proc which receives an element, its containing element, the feature through which the
# element is referenced and the index position of the reference within the feature's values.
# the latter 3 argumnets may be nil. it should return the element's identifier as a string.
# the identifier must be unique for the element unless "per_type_identifier" is set to true,
# in which case they must be unique for each element of the same type.
# identifiers may be relative to the given containing element, depending on the given
# feature and index position. in this case a globally unique
# identifier must be resonstructed by the proc specified using the :reference_qualifier option.
# if the containing element is nil, the identifier returned must be globally unique.
# default: identifiers calculated by QualifiedNameProvider
# in this case options to QualifiedNameProvider may be provided and will be passed through
#
# :per_type_identifier
# if set to true, identifiers may be reused for elements of different type
# default: false
#
# :reference_qualifier
# a Proc which receives RGen unresolved references and either a FragmentedModel or a ModelFragment.
# it should modify the unresolved references' targetIdentifiers to make them globally unique.
# the Proc is called for each fragment after it as been loaded and for the overall model.
# this can be used to qualify context specific references returned by the identifier provider.
# default: no reference qualifier
#
# :root_classes
# an Array of EClass objects representing classes which can be used on root level
# default: all classes which can't be contained by any class
#
# :line_number_attribute
# the name of the attribute which will be used to associate the line number with a model element
# default: no line number
#
# :file_name_attribute
# the name of the attribute which will be used to associate the file name with a model element
# default: no file name
#
# :fragment_ref_attribute
# the name of the attribute which will be used to associate a model fragment with a model element
#
# :comment_handler
# a Proc which will be invoked when a new element has been instantiated. receives
# the comment as a string, the comment kind (one of [:above, :eol, :unassociated]), the
# element and the environment to which the element has been added to.
# the environment may be nil. it should add the comment to the element and
# return true. if the element can take no comment, it should return false.
# default: no handling of comments
#
# :comment_provider
# a Proc which receives an element and should return this element's comment as a string or nil
# the Proc may also modify the element to remove information already part of the comment
# default: no comments
#
# :annotation_handler
# a Proc which will be invoked when a new element has been instantiated. receives
# the annotation as a string, the element and the environment to which the element has been added to.
# the environment may be nil. it may change the model or otherwise use the annotated information.
# if the element can take no annotation, it should return false, otherwise true.
# default: no handling of annotations
#
# :annotation_provider
# a Proc which receives an element and should return this element's annotation as a string or nil.
# the Proc may also modify the element to remove information already part of the annotation.
# default: no annotations
#
# :indent_string
# the string representing one indent, could be a tab or spaces
# default: 2 spaces
#
# :command_name_provider
# a Proc which receives an EClass object and should return an RText command name
# default: class name
#
# :backward_ref_attribute
# a Proc which receives an EClass object and should return the name of this class's
# feature which is used to represent backward references (i.e. for following the backward
# reference, the user must click on a value of this attribute)
# a value of nil means that the command name is used to follow the backward reference
# default: nil (command name)
#
# :enable_generics
# if set to true, generics (<value>) are allowed, otherwise forbidden
# default: false
#
def initialize(root_epackage, options={})
@root_epackage = root_epackage
@feature_provider = options[:feature_provider] ||
proc { |c| RGen::Serializer::OppositeReferenceFilter.call(c.eAllStructuralFeatures).
reject{|f| f.derived} }
@unlabled_arguments = options[:unlabled_arguments]
@unquoted_arguments = options[:unquoted_arguments]
@labeled_containments = options[:labeled_containments]
@argument_format_provider = options[:argument_format_provider]
@root_classes = options[:root_classes] || default_root_classes(root_epackage)
command_name_provider = options[:command_name_provider] || proc{|c| c.name}
setup_commands(root_epackage, command_name_provider)
@reference_regexp = options[:reference_regexp] || /\A\w*(\/\w*)+/
@identifier_provider = options[:identifier_provider] ||
proc { |element, context, feature, index|
@qualified_name_provider ||= RGen::Serializer::QualifiedNameProvider.new(options)
name_attribute = options[:attribute_name] || "name"
if element.is_a?(RGen::MetamodelBuilder::MMProxy) || element.respond_to?(name_attribute)
@qualified_name_provider.identifier(element)
else
nil
end
}
@reference_qualifier = options[:reference_qualifier] || proc{|urefs, model_or_fragment| }
@line_number_attribute = options[:line_number_attribute]
@file_name_attribute = options[:file_name_attribute]
@fragment_ref_attribute = options[:fragment_ref_attribute]
@comment_handler = options[:comment_handler]
@comment_provider = options[:comment_provider]
@annotation_handler = options[:annotation_handler]
@annotation_provider = options[:annotation_provider]
@indent_string = options[:indent_string] || " "
@per_type_identifier = options[:per_type_identifier]
@backward_ref_attribute = options[:backward_ref_attribute] || proc{|c| nil}
@generics_enabled = options[:enable_generics]
end
attr_reader :root_epackage
attr_reader :root_classes
attr_reader :reference_regexp
attr_reader :identifier_provider
attr_reader :reference_qualifier
attr_reader :line_number_attribute
attr_reader :file_name_attribute
attr_reader :fragment_ref_attribute
attr_reader :comment_handler
attr_reader :comment_provider
attr_reader :annotation_handler
attr_reader :annotation_provider
attr_reader :indent_string
attr_reader :per_type_identifier
attr_reader :backward_ref_attribute
attr_reader :generics_enabled
def class_by_command(command, context_class)
map = @class_by_command[context_class]
map && map[command]
end
def has_command(command)
@has_command[command]
end
def command_by_class(clazz)
@command_by_class[clazz]
end
def containments(clazz)
features(clazz).select{|f| f.is_a?(RGen::ECore::EReference) && f.containment}
end
def non_containments(clazz)
features(clazz).reject{|f| f.is_a?(RGen::ECore::EReference) && f.containment}
end
def labled_arguments(clazz)
non_containments(clazz) - unlabled_arguments(clazz)
end
def unlabled_arguments(clazz)
return [] unless @unlabled_arguments
uargs = @unlabled_arguments.call(clazz) || []
uargs.collect{|a| non_containments(clazz).find{|f| f.name == a}}.compact
end
def unquoted?(feature)
return false unless @unquoted_arguments
@unquoted_arguments.call(feature.eContainingClass).include?(feature.name)
end
def labeled_containment?(clazz, feature)
return false unless @labeled_containments
@labeled_containments.call(clazz).include?(feature.name)
end
def argument_format(feature)
@argument_format_provider && @argument_format_provider.call(feature)
end
def concrete_types(clazz)
([clazz] + clazz.eAllSubTypes).select{|c| !c.abstract}
end
def containments_by_target_type(clazz, type)
map = {}
containments(clazz).each do |r|
concrete_types(r.eType).each {|t| (map[t] ||= []) << r}
end
([type]+type.eAllSuperTypes).inject([]){|m,t| m + (map[t] || []) }.uniq
end
def feature_by_name(clazz, name)
clazz.eAllStructuralFeatures.find{|f| f.name == name}
end
def file_name(element)
@file_name_attribute && element.respond_to?(@file_name_attribute) && element.send(@file_name_attribute)
end
def line_number(element)
@line_number_attribute && element.respond_to?(@line_number_attribute) && element.send(@line_number_attribute)
end
def fragment_ref(element)
@fragment_ref_attribute && element.respond_to?(@fragment_ref_attribute) && element.send(@fragment_ref_attribute)
end
private
def setup_commands(root_epackage, command_name_provider)
@class_by_command = {}
@command_by_class = {}
@has_command = {}
root_epackage.eAllClasses.each do |c|
next if c.abstract
cmd = command_name_provider.call(c)
@command_by_class[c.instanceClass] = cmd
@has_command[cmd] = true
clazz = c.instanceClass
@class_by_command[clazz] ||= {}
containments(c).collect{|r|
[r.eType] + r.eType.eAllSubTypes}.flatten.uniq.each do |t|
next if t.abstract
cmw = command_name_provider.call(t)
raise "ambiguous command name #{cmw}" if @class_by_command[clazz][cmw]
@class_by_command[clazz][cmw] = t.instanceClass
end
end
@class_by_command[nil] = {}
@root_classes.each do |c|
next if c.abstract
cmw = command_name_provider.call(c)
raise "ambiguous command name #{cmw}" if @class_by_command[nil][cmw]
@class_by_command[nil][cmw] = c.instanceClass
end
end
def default_root_classes(root_package)
root_epackage.eAllClasses.select{|c| !c.abstract &&
!c.eAllReferences.any?{|r| r.eOpposite && r.eOpposite.containment}}
end
def features(clazz)
@feature_provider.call(clazz)
end
# caching
[ :features,
:containments,
:non_containments,
:unlabled_arguments,
:labled_arguments,
:unquoted?,
:labeled_containment?,
:argument_format,
:concrete_types,
:containments_by_target_type,
:feature_by_name
].each do |m|
ms = m.to_s.sub('?','_')
module_eval <<-END
alias #{ms}_orig #{m}
def #{m}(*args)
@#{ms}_cache ||= {}
return @#{ms}_cache[args] if @#{ms}_cache.has_key?(args)
@#{ms}_cache[args] = #{ms}_orig(*args)
end
END
end
end
end
comment about unnecessary code
require 'rgen/ecore/ecore'
require 'rgen/ecore/ecore_ext'
require 'rgen/serializer/opposite_reference_filter'
require 'rgen/serializer/qualified_name_provider'
module RText
class Language
# Creates an RText language description for the metamodel described by +root_epackage+
# Valid options include:
#
# :feature_provider
# a Proc which receives an EClass and should return a subset of this EClass's features
# this can be used to filter and/or reorder the features
# note that in most cases, this Proc will have to filter opposite references
# default: all features filtered using OppositeReferenceFilter
#
# :unlabled_arguments
# a Proc which receives an EClass and should return this EClass's feature names which are
# to be serialized without lables in the given order and before all labled arguments
# the features must also occur in :feature_provider if :feature_provider is provided
# if unlabled arguments are not part of the current class's features, they will be ignored
# default: no unlabled arguments
#
# :unquoted_arguments
# a Proc which receives an EClass and should return this EClass's string typed attribute
# names which are to be serialized without quotes. input data my still be quoted.
# the serializer will take care to insert quotes if the data is not a valid identifier
# the features must also occur in :feature_provider if :feature_provider is provided
# default: no unquoted arguments
#
# :labeled_containments
# a Proc which receives an EClass and should return this EClass's containment references
# which are to be serialized with a lable
# default: use lables when references can't be uniquely derived from contained element
#
# :argument_format_provider
# a Proc which receives an EAttribute and should return a format specification string
# (in sprintf syntax) which will be used by the serializer for integers and floats.
# default: if not present or the proc returns nil, then #to_s is used
#
# :reference_regexp
# a Regexp which is used by the tokenizer for identifying references
# it must only match at the beginning of a string, i.e. it should start with \A
# it must be built in a way that does not match other language constructs
# in particular it must not match identifiers (word characters not starting with a digit)
# identifiers can always be used where references are expected
# default: word characters separated by at least one slash (/)
#
# :identifier_provider
# a Proc which receives an element, its containing element, the feature through which the
# element is referenced and the index position of the reference within the feature's values.
# the latter 3 argumnets may be nil. it should return the element's identifier as a string.
# the identifier must be unique for the element unless "per_type_identifier" is set to true,
# in which case they must be unique for each element of the same type.
# identifiers may be relative to the given containing element, depending on the given
# feature and index position. in this case a globally unique
# identifier must be resonstructed by the proc specified using the :reference_qualifier option.
# if the containing element is nil, the identifier returned must be globally unique.
# default: identifiers calculated by QualifiedNameProvider
# in this case options to QualifiedNameProvider may be provided and will be passed through
#
# :per_type_identifier
# if set to true, identifiers may be reused for elements of different type
# default: false
#
# :reference_qualifier
# a Proc which receives RGen unresolved references and either a FragmentedModel or a ModelFragment.
# it should modify the unresolved references' targetIdentifiers to make them globally unique.
# the Proc is called for each fragment after it as been loaded and for the overall model.
# this can be used to qualify context specific references returned by the identifier provider.
# default: no reference qualifier
#
# :root_classes
# an Array of EClass objects representing classes which can be used on root level
# default: all classes which can't be contained by any class
#
# :line_number_attribute
# the name of the attribute which will be used to associate the line number with a model element
# default: no line number
#
# :file_name_attribute
# the name of the attribute which will be used to associate the file name with a model element
# default: no file name
#
# :fragment_ref_attribute
# the name of the attribute which will be used to associate a model fragment with a model element
#
# :comment_handler
# a Proc which will be invoked when a new element has been instantiated. receives
# the comment as a string, the comment kind (one of [:above, :eol, :unassociated]), the
# element and the environment to which the element has been added to.
# the environment may be nil. it should add the comment to the element and
# return true. if the element can take no comment, it should return false.
# default: no handling of comments
#
# :comment_provider
# a Proc which receives an element and should return this element's comment as a string or nil
# the Proc may also modify the element to remove information already part of the comment
# default: no comments
#
# :annotation_handler
# a Proc which will be invoked when a new element has been instantiated. receives
# the annotation as a string, the element and the environment to which the element has been added to.
# the environment may be nil. it may change the model or otherwise use the annotated information.
# if the element can take no annotation, it should return false, otherwise true.
# default: no handling of annotations
#
# :annotation_provider
# a Proc which receives an element and should return this element's annotation as a string or nil.
# the Proc may also modify the element to remove information already part of the annotation.
# default: no annotations
#
# :indent_string
# the string representing one indent, could be a tab or spaces
# default: 2 spaces
#
# :command_name_provider
# a Proc which receives an EClass object and should return an RText command name
# default: class name
#
# :backward_ref_attribute
# a Proc which receives an EClass object and should return the name of this class's
# feature which is used to represent backward references (i.e. for following the backward
# reference, the user must click on a value of this attribute)
# a value of nil means that the command name is used to follow the backward reference
# default: nil (command name)
#
# :enable_generics
# if set to true, generics (<value>) are allowed, otherwise forbidden
# default: false
#
def initialize(root_epackage, options={})
@root_epackage = root_epackage
@feature_provider = options[:feature_provider] ||
proc { |c| RGen::Serializer::OppositeReferenceFilter.call(c.eAllStructuralFeatures).
reject{|f| f.derived} }
@unlabled_arguments = options[:unlabled_arguments]
@unquoted_arguments = options[:unquoted_arguments]
@labeled_containments = options[:labeled_containments]
@argument_format_provider = options[:argument_format_provider]
@root_classes = options[:root_classes] || default_root_classes(root_epackage)
command_name_provider = options[:command_name_provider] || proc{|c| c.name}
setup_commands(root_epackage, command_name_provider)
@reference_regexp = options[:reference_regexp] || /\A\w*(\/\w*)+/
@identifier_provider = options[:identifier_provider] ||
proc { |element, context, feature, index|
@qualified_name_provider ||= RGen::Serializer::QualifiedNameProvider.new(options)
name_attribute = options[:attribute_name] || "name"
if element.is_a?(RGen::MetamodelBuilder::MMProxy) || element.respond_to?(name_attribute)
@qualified_name_provider.identifier(element)
else
nil
end
}
@reference_qualifier = options[:reference_qualifier] || proc{|urefs, model_or_fragment| }
@line_number_attribute = options[:line_number_attribute]
@file_name_attribute = options[:file_name_attribute]
@fragment_ref_attribute = options[:fragment_ref_attribute]
@comment_handler = options[:comment_handler]
@comment_provider = options[:comment_provider]
@annotation_handler = options[:annotation_handler]
@annotation_provider = options[:annotation_provider]
@indent_string = options[:indent_string] || " "
@per_type_identifier = options[:per_type_identifier]
@backward_ref_attribute = options[:backward_ref_attribute] || proc{|c| nil}
@generics_enabled = options[:enable_generics]
end
attr_reader :root_epackage
attr_reader :root_classes
attr_reader :reference_regexp
attr_reader :identifier_provider
attr_reader :reference_qualifier
attr_reader :line_number_attribute
attr_reader :file_name_attribute
attr_reader :fragment_ref_attribute
attr_reader :comment_handler
attr_reader :comment_provider
attr_reader :annotation_handler
attr_reader :annotation_provider
attr_reader :indent_string
attr_reader :per_type_identifier
attr_reader :backward_ref_attribute
attr_reader :generics_enabled
def class_by_command(command, context_class)
map = @class_by_command[context_class]
map && map[command]
end
def has_command(command)
@has_command[command]
end
def command_by_class(clazz)
@command_by_class[clazz]
end
def containments(clazz)
features(clazz).select{|f| f.is_a?(RGen::ECore::EReference) && f.containment}
end
def non_containments(clazz)
features(clazz).reject{|f| f.is_a?(RGen::ECore::EReference) && f.containment}
end
def labled_arguments(clazz)
non_containments(clazz) - unlabled_arguments(clazz)
end
def unlabled_arguments(clazz)
return [] unless @unlabled_arguments
uargs = @unlabled_arguments.call(clazz) || []
uargs.collect{|a| non_containments(clazz).find{|f| f.name == a}}.compact
end
def unquoted?(feature)
return false unless @unquoted_arguments
@unquoted_arguments.call(feature.eContainingClass).include?(feature.name)
end
def labeled_containment?(clazz, feature)
return false unless @labeled_containments
@labeled_containments.call(clazz).include?(feature.name)
end
def argument_format(feature)
@argument_format_provider && @argument_format_provider.call(feature)
end
def concrete_types(clazz)
([clazz] + clazz.eAllSubTypes).select{|c| !c.abstract}
end
def containments_by_target_type(clazz, type)
map = {}
containments(clazz).each do |r|
concrete_types(r.eType).each {|t| (map[t] ||= []) << r}
end
# the following line should be unnecessary with exception of "uniq"
([type]+type.eAllSuperTypes).inject([]){|m,t| m + (map[t] || []) }.uniq
end
def feature_by_name(clazz, name)
clazz.eAllStructuralFeatures.find{|f| f.name == name}
end
def file_name(element)
@file_name_attribute && element.respond_to?(@file_name_attribute) && element.send(@file_name_attribute)
end
def line_number(element)
@line_number_attribute && element.respond_to?(@line_number_attribute) && element.send(@line_number_attribute)
end
def fragment_ref(element)
@fragment_ref_attribute && element.respond_to?(@fragment_ref_attribute) && element.send(@fragment_ref_attribute)
end
private
def setup_commands(root_epackage, command_name_provider)
@class_by_command = {}
@command_by_class = {}
@has_command = {}
root_epackage.eAllClasses.each do |c|
next if c.abstract
cmd = command_name_provider.call(c)
@command_by_class[c.instanceClass] = cmd
@has_command[cmd] = true
clazz = c.instanceClass
@class_by_command[clazz] ||= {}
containments(c).collect{|r|
[r.eType] + r.eType.eAllSubTypes}.flatten.uniq.each do |t|
next if t.abstract
cmw = command_name_provider.call(t)
raise "ambiguous command name #{cmw}" if @class_by_command[clazz][cmw]
@class_by_command[clazz][cmw] = t.instanceClass
end
end
@class_by_command[nil] = {}
@root_classes.each do |c|
next if c.abstract
cmw = command_name_provider.call(c)
raise "ambiguous command name #{cmw}" if @class_by_command[nil][cmw]
@class_by_command[nil][cmw] = c.instanceClass
end
end
def default_root_classes(root_package)
root_epackage.eAllClasses.select{|c| !c.abstract &&
!c.eAllReferences.any?{|r| r.eOpposite && r.eOpposite.containment}}
end
def features(clazz)
@feature_provider.call(clazz)
end
# caching
[ :features,
:containments,
:non_containments,
:unlabled_arguments,
:labled_arguments,
:unquoted?,
:labeled_containment?,
:argument_format,
:concrete_types,
:containments_by_target_type,
:feature_by_name
].each do |m|
ms = m.to_s.sub('?','_')
module_eval <<-END
alias #{ms}_orig #{m}
def #{m}(*args)
@#{ms}_cache ||= {}
return @#{ms}_cache[args] if @#{ms}_cache.has_key?(args)
@#{ms}_cache[args] = #{ms}_orig(*args)
end
END
end
end
end
|
class Tmdb
require 'net/http'
require 'uri'
require 'cgi'
require 'yaml'
<<<<<<< HEAD
=======
require 'deepopenstruct'
>>>>>>> development
@@api_key = ""
@@api_response = {}
def self.api_key
@@api_key
end
def self.api_key=(key)
@@api_key = key
end
def self.base_api_url
"http://api.themoviedb.org/2.1/"
end
def self.api_call(method, data, language = "en")
raise ArgumentError, "Tmdb.api_key must be set before using the API" if(Tmdb.api_key.nil? || Tmdb.api_key.empty?)
url = Tmdb.base_api_url + method + '/' + language + '/yaml/' + Tmdb.api_key + '/' + CGI::escape(data.to_s)
response = Tmdb.get_url(url)
if(response.code.to_i != 200)
return nil
end
body = YAML::load(response.body)
if( body.first.include?("Nothing found"))
return nil
else
return body
end
end
# Get a URL and return a response object, follow upto 'limit' re-directs on the way
def self.get_url(uri_str, limit = 10)
return false if limit == 0
begin
response = Net::HTTP.get_response(URI.parse(uri_str))
rescue SocketError, Errno::ENETDOWN
response = Net::HTTPBadRequest.new( '404', 404, "Not Found" )
return response
end
case response
when Net::HTTPSuccess then response
when Net::HTTPRedirection then get_url(response['location'], limit - 1)
else
Net::HTTPBadRequest.new( '404', 404, "Not Found" )
end
end
def self.data_to_object(data)
object = DeepOpenStruct.load(data)
object.raw_data = data
["posters", "backdrops", "profile"].each do |image_array_name|
if(object.respond_to?(image_array_name))
image_array = object.send(image_array_name)
image_array.each_index do |x|
image_array[x] = image_array[x].image
image_array[x].instance_eval <<-EOD
def self.data
return Tmdb.get_url(self.url).body
end
EOD
end
end
if(object.profile)
object.profiles = object.profile
end
end
unless(object.cast.nil?)
object.cast.each_index do |x|
object.cast[x].instance_eval <<-EOD
def self.bio
return TmdbCast.find(:id => self.id, :limit => 1)
end
EOD
end
end
return object
end
end
Fixed merge issue in master
class Tmdb
require 'net/http'
require 'uri'
require 'cgi'
require 'yaml'
require 'deepopenstruct'
@@api_key = ""
@@api_response = {}
def self.api_key
@@api_key
end
def self.api_key=(key)
@@api_key = key
end
def self.base_api_url
"http://api.themoviedb.org/2.1/"
end
def self.api_call(method, data, language = "en")
raise ArgumentError, "Tmdb.api_key must be set before using the API" if(Tmdb.api_key.nil? || Tmdb.api_key.empty?)
url = Tmdb.base_api_url + method + '/' + language + '/yaml/' + Tmdb.api_key + '/' + CGI::escape(data.to_s)
response = Tmdb.get_url(url)
if(response.code.to_i != 200)
return nil
end
body = YAML::load(response.body)
if( body.first.include?("Nothing found"))
return nil
else
return body
end
end
# Get a URL and return a response object, follow upto 'limit' re-directs on the way
def self.get_url(uri_str, limit = 10)
return false if limit == 0
begin
response = Net::HTTP.get_response(URI.parse(uri_str))
rescue SocketError, Errno::ENETDOWN
response = Net::HTTPBadRequest.new( '404', 404, "Not Found" )
return response
end
case response
when Net::HTTPSuccess then response
when Net::HTTPRedirection then get_url(response['location'], limit - 1)
else
Net::HTTPBadRequest.new( '404', 404, "Not Found" )
end
end
def self.data_to_object(data)
object = DeepOpenStruct.load(data)
object.raw_data = data
["posters", "backdrops", "profile"].each do |image_array_name|
if(object.respond_to?(image_array_name))
image_array = object.send(image_array_name)
image_array.each_index do |x|
image_array[x] = image_array[x].image
image_array[x].instance_eval <<-EOD
def self.data
return Tmdb.get_url(self.url).body
end
EOD
end
end
if(object.profile)
object.profiles = object.profile
end
end
unless(object.cast.nil?)
object.cast.each_index do |x|
object.cast[x].instance_eval <<-EOD
def self.bio
return TmdbCast.find(:id => self.id, :limit => 1)
end
EOD
end
end
return object
end
end |
# Ruby2D::VERSION
module Ruby2D
VERSION = '0.6.1'
end
Bump version to 0.7.0
# Ruby2D::VERSION
module Ruby2D
VERSION = '0.7.0'
end
|
module RubySL
VERSION = "1.0.1"
end
Bumped version to 1.0.2.
module RubySL
VERSION = "1.0.2"
end
|
module Cap2
Version = '0.2.1'
end
Bump version to 0.2.2
module Cap2
Version = '0.2.2'
end
|
#!/bin/env ruby
#encoding: utf8
# if not already created, make a CensorRule that hides personal information
regexp = '={67}\s*\n(?:.*?#.*?: ?.*\n){3}.*={67}'
rule = CensorRule.find_by_text(regexp)
if rule.nil?
Rails.logger.info("Creating new censor rule: /#{regexp}/")
CensorRule.create!(:text => regexp,
:allow_global => true,
:replacement => '[redacted]',
:regexp => true,
:last_edit_editor => 'system',
:last_edit_comment => 'Added automatically by ipvtheme')
CensorRule
end
rules_data = [{:text => 'Vaše evidenční údaje / Your name and address: .*$',
:replacement => 'Vaše evidenční údaje / Your name and address: [Adresa]',
:regexp => true,
:public_body => PublicBody.find_by_url_name('ministerstvo_prmyslu_a_obchodu'),
:last_edit_editor => 'system',
:last_edit_comment => 'Added automatically by ipvtheme'}]
rules_data.each do |d|
rule = CensorRule.find_by_text(d[:text])
if rule.nil?
new_rule = CensorRule.new(d)
if new_rule.info_request || new_rule.public_body
new_rule.save!
end
end
end
Set encoding in files with czech chars
#!/bin/env ruby
#encoding: utf-8
# if not already created, make a CensorRule that hides personal information
regexp = '={67}\s*\n(?:.*?#.*?: ?.*\n){3}.*={67}'
rule = CensorRule.find_by_text(regexp)
if rule.nil?
Rails.logger.info("Creating new censor rule: /#{regexp}/")
CensorRule.create!(:text => regexp,
:allow_global => true,
:replacement => '[redacted]',
:regexp => true,
:last_edit_editor => 'system',
:last_edit_comment => 'Added automatically by ipvtheme')
CensorRule
end
rules_data = [{:text => 'Vaše evidenční údaje / Your name and address: .*$',
:replacement => 'Vaše evidenční údaje / Your name and address: [Adresa]',
:regexp => true,
:public_body => PublicBody.find_by_url_name('ministerstvo_prmyslu_a_obchodu'),
:last_edit_editor => 'system',
:last_edit_comment => 'Added automatically by ipvtheme'}]
rules_data.each do |d|
rule = CensorRule.find_by_text(d[:text])
if rule.nil?
new_rule = CensorRule.new(d)
if new_rule.info_request || new_rule.public_body
new_rule.save!
end
end
end
|
require 'chewy/query'
module Chewy
module Search
extend ActiveSupport::Concern
included do
singleton_class.delegate :explain, :query_mode, :filter_mode, :post_filter_mode,
:timeout, :limit, :offset, :highlight, :min_score, :rescore, :facets, :script_score,
:boost_factor, :weight, :random_score, :field_value_factor, :decay, :aggregations,
:suggest, :none, :strategy, :query, :filter, :post_filter, :boost_mode,
:score_mode, :order, :reorder, :only, :types, :delete_all, :find, :total,
:total_count, :total_entries, :unlimited, :script_fields, :track_scores,
to: :all
end
module ClassMethods
def all
query_class.scopes.last || query_class.new(self)
end
def search_string(query, options = {})
options = options.merge(
index: all._indexes.map(&:index_name),
type: all._types.map(&:type_name),
q: query
)
Chewy.client.search(options)
end
private
def query_class
@query_class ||= begin
query_class = Class.new(Chewy::Query)
if self < Chewy::Type
index_scopes = index.scopes - scopes
delegate_scoped index, query_class, index_scopes
delegate_scoped index, self, index_scopes
end
delegate_scoped self, query_class, scopes
const_set('Query', query_class)
end
end
def delegate_scoped(source, destination, methods)
methods.each do |method|
destination.class_eval do
define_method method do |*args, &block|
scoping { source.public_send(method, *args, &block) }
end
end
end
end
end
end
end
Add preference to search delegate
require 'chewy/query'
module Chewy
module Search
extend ActiveSupport::Concern
included do
singleton_class.delegate :explain, :query_mode, :filter_mode, :post_filter_mode,
:timeout, :limit, :offset, :highlight, :min_score, :rescore, :facets, :script_score,
:boost_factor, :weight, :random_score, :field_value_factor, :decay, :aggregations,
:suggest, :none, :strategy, :query, :filter, :post_filter, :boost_mode,
:score_mode, :order, :reorder, :only, :types, :delete_all, :find, :total,
:total_count, :total_entries, :unlimited, :script_fields, :track_scores, :preference,
to: :all
end
module ClassMethods
def all
query_class.scopes.last || query_class.new(self)
end
def search_string(query, options = {})
options = options.merge(
index: all._indexes.map(&:index_name),
type: all._types.map(&:type_name),
q: query
)
Chewy.client.search(options)
end
private
def query_class
@query_class ||= begin
query_class = Class.new(Chewy::Query)
if self < Chewy::Type
index_scopes = index.scopes - scopes
delegate_scoped index, query_class, index_scopes
delegate_scoped index, self, index_scopes
end
delegate_scoped self, query_class, scopes
const_set('Query', query_class)
end
end
def delegate_scoped(source, destination, methods)
methods.each do |method|
destination.class_eval do
define_method method do |*args, &block|
scoping { source.public_send(method, *args, &block) }
end
end
end
end
end
end
end
|
require "chh_moretext/version"
require "json"
require "open-uri"
module ChhMoretext
class Base
class << self
def fetch_moretext(number, limit)
number = "n=#{number}"
if limit.is_a?(Range)
limit = "limit=#{limit.min},#{limit.max}"
elsif limit.is_a?(Integer)
limit = "limit=#{limit}"
else
limit = nil
end
if limit.nil?
condition = "?#{number}"
else
condition = "?#{number}&#{limit}"
end
return JSON(open("http://more.handlino.com/sentences.json#{condition}").read)["sentences"]
end
end
end
end
require "chh_moretext/moretext"
refactor by using ternary operator
require "chh_moretext/version"
require "json"
require "open-uri"
module ChhMoretext
class Base
class << self
def fetch_moretext(number, limit)
number = "n=#{number}"
if limit.is_a?(Range)
limit = "limit=#{limit.min},#{limit.max}"
elsif limit.is_a?(Integer)
limit = "limit=#{limit}"
else
limit = nil
end
condition = limit.nil? ? "?#{number}" : "?#{number}&#{limit}"
return JSON(open("http://more.handlino.com/sentences.json#{condition}").read)["sentences"]
end
end
end
end
require "chh_moretext/moretext"
|
#
# Author:: Dreamcat4 (<dreamcat4@gmail.com>)
# Copyright:: Copyright (c) 2009 OpsCode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require File.expand_path(File.join(File.dirname(__FILE__), "..", "..", "..", "spec_helper"))
describe Chef::Provider::Group::Dscl, "dscl" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group", :null_object => true, :group_name => "aj")
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@status = mock("Process::Status", :null_object => true, :exitstatus => 0)
@pid = mock("PID", :null_object => true)
@stdin = mock("STDIN", :null_object => true)
@stdout = mock("STDOUT", :null_object => true)
@stderr = mock("STDERR", :null_object => true)
@stdout.stub!(:each).and_yield("\n")
@stderr.stub!(:each).and_yield("")
@provider.stub!(:popen4).and_yield(@pid,@stdin,@stdout,@stderr).and_return(@status)
end
it "should run popen4 with the supplied array of arguments appended to the dscl command" do
@provider.should_receive(:popen4).with("dscl . -cmd /Path arg1 arg2")
@provider.dscl("cmd", "/Path", "arg1", "arg2")
end
it "should return an array of four elements - cmd, status, stdout, stderr" do
dscl_retval = @provider.dscl("cmd /Path args")
dscl_retval.should be_a_kind_of(Array)
dscl_retval.should == ["dscl . -cmd /Path args",@status,"\n",""]
end
end
describe Chef::Provider::Group::Dscl, "safe_dscl" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group", :null_object => true, :group_name => "aj")
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@status = mock("Process::Status", :null_object => true, :exitstatus => 0)
@provider.stub!(:dscl).and_return(["cmd", @status, "stdout", "stderr"])
end
it "should run dscl with the supplied cmd /Path args" do
@provider.should_receive(:dscl).with("cmd /Path args")
@provider.safe_dscl("cmd /Path args")
end
describe "with the dscl command returning a non zero exit status for a delete" do
before do
@status = mock("Process::Status", :null_object => true, :exitstatus => 1)
@provider.stub!(:dscl).and_return(["cmd", @status, "stdout", "stderr"])
end
it "should return an empty string of standard output for a delete" do
safe_dscl_retval = @provider.safe_dscl("delete /Path args")
safe_dscl_retval.should be_a_kind_of(String)
safe_dscl_retval.should == ""
end
it "should raise an exception for any other command" do
lambda { @provider.safe_dscl("cmd /Path arguments") }.should raise_error(Chef::Exceptions::Group)
end
end
describe "with the dscl command returning no such key" do
before do
# @status = mock("Process::Status", :null_object => true, :exitstatus => 0)
@provider.stub!(:dscl).and_return(["cmd", @status, "No such key: ", "stderr"])
end
it "should raise an exception" do
lambda { @provider.safe_dscl("cmd /Path arguments") }.should raise_error(Chef::Exceptions::Group)
end
end
describe "with the dscl command returning a zero exit status" do
it "should return the third array element, the string of standard output" do
safe_dscl_retval = @provider.safe_dscl("cmd /Path args")
safe_dscl_retval.should be_a_kind_of(String)
safe_dscl_retval.should == "stdout"
end
end
end
describe Chef::Provider::Group::Dscl, "get_free_gid" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group", :null_object => true, :group_name => "aj")
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@provider.stub!(:safe_dscl).and_return("\naj 200\njt 201\n")
end
it "should run safe_dscl with list /Groups gid" do
@provider.should_receive(:safe_dscl).with("list /Groups gid")
@provider.get_free_gid
end
it "should return the first unused gid number on or above 200" do
@provider.get_free_gid.should equal(202)
end
it "should raise an exception when the search limit is exhausted" do
search_limit = 1
lambda { @provider.get_free_gid(search_limit) }.should raise_error(RuntimeError)
end
end
describe Chef::Provider::Group::Dscl, "gid_used?" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group", :null_object => true, :group_name => "aj")
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@provider.stub!(:safe_dscl).and_return("\naj 500\n")
end
it "should run safe_dscl with list /Groups gid" do
@provider.should_receive(:safe_dscl).with("list /Groups gid")
@provider.gid_used?(500)
end
it "should return true for a used gid number" do
@provider.gid_used?(500).should be_true
end
it "should return false for an unused gid number" do
@provider.gid_used?(501).should be_false
end
it "should return false if not given any valid gid number" do
@provider.gid_used?(nil).should be_false
end
end
describe Chef::Provider::Group::Dscl, "set_gid" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:gid => 50,
:members => [ "root", "aj"]
)
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@provider.stub!(:get_free_gid).and_return(501)
@provider.stub!(:gid_used?).and_return(false)
@provider.stub!(:safe_dscl).and_return(true)
end
describe "with the new resource and a gid number which is already in use" do
before do
@provider.stub!(:gid_used?).and_return(true)
end
it "should raise an exception if the new resources gid is already in use" do
lambda { @provider.set_gid }.should raise_error(Chef::Exceptions::Group)
end
end
describe "with no gid number for the new resources" do
before do
@new_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:gid => nil,
:members => [ "root", "aj"]
)
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@provider.stub!(:get_free_gid).and_return(501)
@provider.stub!(:gid_used?).and_return(false)
@provider.stub!(:safe_dscl).and_return(true)
end
it "should run get_free_gid and return a valid, unused gid number" do
@provider.should_receive(:get_free_gid).and_return(501)
@provider.set_gid
end
end
describe "with blank gid number for the new resources" do
before do
@new_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:gid => "",
:members => [ "root", "aj"]
)
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@provider.stub!(:get_free_gid).and_return(501)
@provider.stub!(:gid_used?).and_return(false)
@provider.stub!(:safe_dscl).and_return(true)
end
it "should run get_free_gid and return a valid, unused gid number" do
@provider.should_receive(:get_free_gid).and_return(501)
@provider.set_gid
end
end
describe "with a valid gid number which is not already in use" do
it "should run safe_dscl with create /Groups/group PrimaryGroupID gid" do
@provider.should_receive(:safe_dscl).with("create /Groups/aj PrimaryGroupID 50").and_return(true)
@provider.set_gid
end
end
end
describe Chef::Provider::Group::Dscl, "set_members" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:members => [ "all", "your", "base" ]
)
@current_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:members => [ "all", "your", "base" ]
)
@new_resource.stub!(:to_s).and_return("group[aj]")
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@provider.current_resource = @current_resource
@provider.stub!(:safe_dscl).and_return(true)
end
describe "with existing members in the current resource and append set to false in the new resource" do
before do
@new_resource.stub!(:members).and_return([])
@new_resource.stub!(:append).and_return(false)
@current_resource.stub!(:members).and_return(["all", "your", "base"])
end
it "should log an appropriate message" do
Chef::Log.should_receive(:debug).with("group[aj]: removing group members all your base")
@provider.set_members
end
it "should run safe_dscl with create /Groups/group GroupMembers to clear the Group's GUID list" do
@provider.should_receive(:safe_dscl).with("create /Groups/aj GroupMembers ''").and_return(true)
@provider.set_members
end
it "should run safe_dscl with create /Groups/group GroupMembership to clear the Group's UID list" do
@provider.should_receive(:safe_dscl).with("create /Groups/aj GroupMembership ''").and_return(true)
@provider.set_members
end
end
describe "with supplied members in the new resource" do
before do
@new_resource.stub!(:members).and_return(["all", "your", "base"])
@current_resource.stub!(:members).and_return([])
end
it "should log an appropriate debug message" do
Chef::Log.should_receive(:debug).with("group[aj]: setting group members all, your, base")
@provider.set_members
end
it "should run safe_dscl with append /Groups/group GroupMembership and group members all, your, base" do
@provider.should_receive(:safe_dscl).with("append /Groups/aj GroupMembership all your base").and_return(true)
@provider.set_members
end
end
end
describe Chef::Provider::Group::Dscl, "load_current_resource" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group", :null_object => true, :group_name => "aj")
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
File.stub!(:exists?).and_return(false)
end
it "should raise an error if the required binary /usr/bin/dscl doesn't exist" do
File.should_receive(:exists?).with("/usr/bin/dscl").and_return(false)
lambda { @provider.load_current_resource }.should raise_error(Chef::Exceptions::Group)
end
it "shouldn't raise an error if /usr/bin/dscl exists" do
File.stub!(:exists?).and_return(true)
lambda { @provider.load_current_resource }.should_not raise_error(Chef::Exceptions::Group)
end
end
describe Chef::Provider::Group::Dscl, "create_group" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group", :null_object => true)
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@provider.stub!(:manage_group).and_return(true)
end
it "should run manage_group with manage=false to create all the group attributes" do
@provider.should_receive(:manage_group).with(false).and_return(true)
@provider.create_group
end
end
describe Chef::Provider::Group::Dscl, "manage_group" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:gid => 50,
:members => [ "root", "aj"]
)
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@current_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:members => [ "all", "your", "base" ]
)
@provider.current_resource = @current_resource
@provider.stub!(:safe_dscl).and_return(true)
@provider.stub!(:set_gid).and_return(true)
@provider.stub!(:set_members).and_return(true)
end
fields = [:group_name,:gid,:members]
fields.each do |field|
it "should check for differences in #{field.to_s} between the current and new resources" do
@new_resource.should_receive(field)
@current_resource.should_receive(field)
@provider.manage_group
end
it "should manage the #{field} if it changed and the new resources #{field} is not null" do
@current_resource.stub!(field).and_return("oldval")
@new_resource.stub!(field).and_return("newval")
@current_resource.should_receive(field).once
@new_resource.should_receive(field).twice
@provider.manage_group
end
end
describe "with manage set to false" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:gid => 50,
:members => [ "root", "aj"]
)
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@current_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:members => [ "all", "your", "base" ]
)
@provider.current_resource = @current_resource
@provider.stub!(:gid_used?).and_return(false)
@provider.stub!(:safe_dscl).and_return(true)
@provider.stub!(:set_gid).and_return(true)
@provider.stub!(:set_members).and_return(true)
@provider.stub!(:get_free_gid).and_return(501)
end
it "should run safe_dscl with create /Groups/group and with the new resources group name" do
@provider.should_receive(:safe_dscl).with("create /Groups/aj").and_return(true)
@provider.manage_group(false)
end
it "should run safe_dscl with create /Groups/group Password * to set the groups password field" do
@provider.should_receive(:safe_dscl).with("create /Groups/aj").and_return(true)
@provider.manage_group(false)
end
it "should run set_gid to set the gid number" do
@provider.should_receive(:set_gid).and_return(true)
@provider.manage_group(false)
end
it "should run set_members to set any group memberships" do
@provider.should_receive(:set_members).and_return(true)
@provider.manage_group(false)
end
end
end
describe Chef::Provider::Group::Dscl, "remove_group" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj"
)
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@provider.stub!(:safe_dscl).and_return(true)
end
it "should run safe_dscl with delete /Groups/group and with the new resources group name" do
@provider.should_receive(:safe_dscl).with("delete /Groups/aj").and_return(true)
@provider.remove_group
end
end
Added test for dscl group provider with no members case
#
# Author:: Dreamcat4 (<dreamcat4@gmail.com>)
# Copyright:: Copyright (c) 2009 OpsCode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require File.expand_path(File.join(File.dirname(__FILE__), "..", "..", "..", "spec_helper"))
describe Chef::Provider::Group::Dscl, "dscl" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group", :null_object => true, :group_name => "aj")
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@status = mock("Process::Status", :null_object => true, :exitstatus => 0)
@pid = mock("PID", :null_object => true)
@stdin = mock("STDIN", :null_object => true)
@stdout = mock("STDOUT", :null_object => true)
@stderr = mock("STDERR", :null_object => true)
@stdout.stub!(:each).and_yield("\n")
@stderr.stub!(:each).and_yield("")
@provider.stub!(:popen4).and_yield(@pid,@stdin,@stdout,@stderr).and_return(@status)
end
it "should run popen4 with the supplied array of arguments appended to the dscl command" do
@provider.should_receive(:popen4).with("dscl . -cmd /Path arg1 arg2")
@provider.dscl("cmd", "/Path", "arg1", "arg2")
end
it "should return an array of four elements - cmd, status, stdout, stderr" do
dscl_retval = @provider.dscl("cmd /Path args")
dscl_retval.should be_a_kind_of(Array)
dscl_retval.should == ["dscl . -cmd /Path args",@status,"\n",""]
end
end
describe Chef::Provider::Group::Dscl, "safe_dscl" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group", :null_object => true, :group_name => "aj")
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@status = mock("Process::Status", :null_object => true, :exitstatus => 0)
@provider.stub!(:dscl).and_return(["cmd", @status, "stdout", "stderr"])
end
it "should run dscl with the supplied cmd /Path args" do
@provider.should_receive(:dscl).with("cmd /Path args")
@provider.safe_dscl("cmd /Path args")
end
describe "with the dscl command returning a non zero exit status for a delete" do
before do
@status = mock("Process::Status", :null_object => true, :exitstatus => 1)
@provider.stub!(:dscl).and_return(["cmd", @status, "stdout", "stderr"])
end
it "should return an empty string of standard output for a delete" do
safe_dscl_retval = @provider.safe_dscl("delete /Path args")
safe_dscl_retval.should be_a_kind_of(String)
safe_dscl_retval.should == ""
end
it "should raise an exception for any other command" do
lambda { @provider.safe_dscl("cmd /Path arguments") }.should raise_error(Chef::Exceptions::Group)
end
end
describe "with the dscl command returning no such key" do
before do
# @status = mock("Process::Status", :null_object => true, :exitstatus => 0)
@provider.stub!(:dscl).and_return(["cmd", @status, "No such key: ", "stderr"])
end
it "should raise an exception" do
lambda { @provider.safe_dscl("cmd /Path arguments") }.should raise_error(Chef::Exceptions::Group)
end
end
describe "with the dscl command returning a zero exit status" do
it "should return the third array element, the string of standard output" do
safe_dscl_retval = @provider.safe_dscl("cmd /Path args")
safe_dscl_retval.should be_a_kind_of(String)
safe_dscl_retval.should == "stdout"
end
end
end
describe Chef::Provider::Group::Dscl, "get_free_gid" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group", :null_object => true, :group_name => "aj")
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@provider.stub!(:safe_dscl).and_return("\naj 200\njt 201\n")
end
it "should run safe_dscl with list /Groups gid" do
@provider.should_receive(:safe_dscl).with("list /Groups gid")
@provider.get_free_gid
end
it "should return the first unused gid number on or above 200" do
@provider.get_free_gid.should equal(202)
end
it "should raise an exception when the search limit is exhausted" do
search_limit = 1
lambda { @provider.get_free_gid(search_limit) }.should raise_error(RuntimeError)
end
end
describe Chef::Provider::Group::Dscl, "gid_used?" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group", :null_object => true, :group_name => "aj")
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@provider.stub!(:safe_dscl).and_return("\naj 500\n")
end
it "should run safe_dscl with list /Groups gid" do
@provider.should_receive(:safe_dscl).with("list /Groups gid")
@provider.gid_used?(500)
end
it "should return true for a used gid number" do
@provider.gid_used?(500).should be_true
end
it "should return false for an unused gid number" do
@provider.gid_used?(501).should be_false
end
it "should return false if not given any valid gid number" do
@provider.gid_used?(nil).should be_false
end
end
describe Chef::Provider::Group::Dscl, "set_gid" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:gid => 50,
:members => [ "root", "aj"]
)
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@provider.stub!(:get_free_gid).and_return(501)
@provider.stub!(:gid_used?).and_return(false)
@provider.stub!(:safe_dscl).and_return(true)
end
describe "with the new resource and a gid number which is already in use" do
before do
@provider.stub!(:gid_used?).and_return(true)
end
it "should raise an exception if the new resources gid is already in use" do
lambda { @provider.set_gid }.should raise_error(Chef::Exceptions::Group)
end
end
describe "with no gid number for the new resources" do
before do
@new_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:gid => nil,
:members => [ "root", "aj"]
)
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@provider.stub!(:get_free_gid).and_return(501)
@provider.stub!(:gid_used?).and_return(false)
@provider.stub!(:safe_dscl).and_return(true)
end
it "should run get_free_gid and return a valid, unused gid number" do
@provider.should_receive(:get_free_gid).and_return(501)
@provider.set_gid
end
end
describe "with blank gid number for the new resources" do
before do
@new_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:gid => "",
:members => [ "root", "aj"]
)
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@provider.stub!(:get_free_gid).and_return(501)
@provider.stub!(:gid_used?).and_return(false)
@provider.stub!(:safe_dscl).and_return(true)
end
it "should run get_free_gid and return a valid, unused gid number" do
@provider.should_receive(:get_free_gid).and_return(501)
@provider.set_gid
end
end
describe "with a valid gid number which is not already in use" do
it "should run safe_dscl with create /Groups/group PrimaryGroupID gid" do
@provider.should_receive(:safe_dscl).with("create /Groups/aj PrimaryGroupID 50").and_return(true)
@provider.set_gid
end
end
end
describe Chef::Provider::Group::Dscl, "set_members" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:members => [ "all", "your", "base" ]
)
@current_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:members => [ "all", "your", "base" ]
)
@new_resource.stub!(:to_s).and_return("group[aj]")
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@provider.current_resource = @current_resource
@provider.stub!(:safe_dscl).and_return(true)
end
describe "with existing members in the current resource and append set to false in the new resource" do
before do
@new_resource.stub!(:members).and_return([])
@new_resource.stub!(:append).and_return(false)
@current_resource.stub!(:members).and_return(["all", "your", "base"])
end
it "should log an appropriate message" do
Chef::Log.should_receive(:debug).with("group[aj]: removing group members all your base")
@provider.set_members
end
it "should run safe_dscl with create /Groups/group GroupMembers to clear the Group's GUID list" do
@provider.should_receive(:safe_dscl).with("create /Groups/aj GroupMembers ''").and_return(true)
@provider.set_members
end
it "should run safe_dscl with create /Groups/group GroupMembership to clear the Group's UID list" do
@provider.should_receive(:safe_dscl).with("create /Groups/aj GroupMembership ''").and_return(true)
@provider.set_members
end
end
describe "with supplied members in the new resource" do
before do
@new_resource.stub!(:members).and_return(["all", "your", "base"])
@current_resource.stub!(:members).and_return([])
end
it "should log an appropriate debug message" do
Chef::Log.should_receive(:debug).with("group[aj]: setting group members all, your, base")
@provider.set_members
end
it "should run safe_dscl with append /Groups/group GroupMembership and group members all, your, base" do
@provider.should_receive(:safe_dscl).with("append /Groups/aj GroupMembership all your base").and_return(true)
@provider.set_members
end
end
describe "with no members in the new resource" do
before do
@new_resource.stub!(:members).and_return([])
end
it "should not call safe_dscl" do
@provider.should_not_receive(:safe_dscl)
@provider.set_members
end
end
end
describe Chef::Provider::Group::Dscl, "load_current_resource" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group", :null_object => true, :group_name => "aj")
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
File.stub!(:exists?).and_return(false)
end
it "should raise an error if the required binary /usr/bin/dscl doesn't exist" do
File.should_receive(:exists?).with("/usr/bin/dscl").and_return(false)
lambda { @provider.load_current_resource }.should raise_error(Chef::Exceptions::Group)
end
it "shouldn't raise an error if /usr/bin/dscl exists" do
File.stub!(:exists?).and_return(true)
lambda { @provider.load_current_resource }.should_not raise_error(Chef::Exceptions::Group)
end
end
describe Chef::Provider::Group::Dscl, "create_group" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group", :null_object => true)
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@provider.stub!(:manage_group).and_return(true)
end
it "should run manage_group with manage=false to create all the group attributes" do
@provider.should_receive(:manage_group).with(false).and_return(true)
@provider.create_group
end
end
describe Chef::Provider::Group::Dscl, "manage_group" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:gid => 50,
:members => [ "root", "aj"]
)
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@current_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:members => [ "all", "your", "base" ]
)
@provider.current_resource = @current_resource
@provider.stub!(:safe_dscl).and_return(true)
@provider.stub!(:set_gid).and_return(true)
@provider.stub!(:set_members).and_return(true)
end
fields = [:group_name,:gid,:members]
fields.each do |field|
it "should check for differences in #{field.to_s} between the current and new resources" do
@new_resource.should_receive(field)
@current_resource.should_receive(field)
@provider.manage_group
end
it "should manage the #{field} if it changed and the new resources #{field} is not null" do
@current_resource.stub!(field).and_return("oldval")
@new_resource.stub!(field).and_return("newval")
@current_resource.should_receive(field).once
@new_resource.should_receive(field).twice
@provider.manage_group
end
end
describe "with manage set to false" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:gid => 50,
:members => [ "root", "aj"]
)
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@current_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj",
:members => [ "all", "your", "base" ]
)
@provider.current_resource = @current_resource
@provider.stub!(:gid_used?).and_return(false)
@provider.stub!(:safe_dscl).and_return(true)
@provider.stub!(:set_gid).and_return(true)
@provider.stub!(:set_members).and_return(true)
@provider.stub!(:get_free_gid).and_return(501)
end
it "should run safe_dscl with create /Groups/group and with the new resources group name" do
@provider.should_receive(:safe_dscl).with("create /Groups/aj").and_return(true)
@provider.manage_group(false)
end
it "should run safe_dscl with create /Groups/group Password * to set the groups password field" do
@provider.should_receive(:safe_dscl).with("create /Groups/aj").and_return(true)
@provider.manage_group(false)
end
it "should run set_gid to set the gid number" do
@provider.should_receive(:set_gid).and_return(true)
@provider.manage_group(false)
end
it "should run set_members to set any group memberships" do
@provider.should_receive(:set_members).and_return(true)
@provider.manage_group(false)
end
end
end
describe Chef::Provider::Group::Dscl, "remove_group" do
before do
@node = mock("Chef::Node", :null_object => true)
@new_resource = mock("Chef::Resource::Group",
:null_object => true,
:group_name => "aj"
)
@provider = Chef::Provider::Group::Dscl.new(@node, @new_resource)
@provider.stub!(:safe_dscl).and_return(true)
end
it "should run safe_dscl with delete /Groups/group and with the new resources group name" do
@provider.should_receive(:safe_dscl).with("delete /Groups/aj").and_return(true)
@provider.remove_group
end
end
|
module Rworkflow
class Flow
STATE_SUCCESSFUL = :successful
STATE_FAILED = :failed
STATES_TERMINAL = [STATE_FAILED, STATE_SUCCESSFUL]
STATES_FAILED = [STATE_FAILED]
REDIS_NS = 'flow'
WORKFLOW_REGISTRY = "#{REDIS_NS}:__registry"
attr_accessor :id
attr_reader :lifecycle
def initialize(id)
@id = id
@redis_key = "#{REDIS_NS}:#{id}"
@storage = RedisRds::Hash.new(@redis_key)
@flow_data = RedisRds::Hash.new("#{@redis_key}__data")
@processing = RedisRds::Hash.new("#{@redis_key}__processing")
load_lifecycle
end
def load_lifecycle
serialized = @storage.get(:lifecycle)
if serialized.present?
structure = YAML.load(serialized)
@lifecycle = Rworkflow::Lifecycle.unserialize(structure) if structure.present?
end
end
private :load_lifecycle
def lifecycle=(new_lifecycle)
@lifecycle = new_lifecycle
@storage.set(:lifecycle, @lifecycle.serialize.to_yaml)
end
def finished?
return false unless self.started?
total = get_counters.reduce(0) do |sum, pair|
self.class.terminal?(pair[0]) ? sum : (sum + pair[1].to_i)
end
return total == 0
end
def status
status = 'Running'
status = (successful?) ? 'Finished' : 'Failed' if finished?
return status
end
def created_at
return @created_at ||= begin Time.at(self.get(:created_at, 0)) end
end
def started?
return self.get(:start_time).present?
end
def name
return self.get(:name, @id)
end
def name=(name)
return self.set(:name, name)
end
def start_time
return Time.at(self.get(:start_time, 0))
end
def finish_time
return Time.at(self.get(:finish_time, 0))
end
def expected_duration
return Float::INFINITY
end
def valid?
return @lifecycle.present?
end
def count(state)
return get_list(state).size
end
def get_counters
counters = @storage.get(:counters)
if counters.present?
counters = begin
YAML.load(counters)
rescue Exception => e
Rails.logger.error("Error loading stored flow counters: #{e.message}")
nil
end
end
return counters || get_counters!
end
# fetches counters atomically
def get_counters!
counters = { processing: 0 }
names = @lifecycle.states.keys
results = RedisRds::Object.connection.multi do
self.class::STATES_TERMINAL.each { |name| get_list(name).size }
names.each { |name| get_list(name).size }
@processing.getall
end
(self.class::STATES_TERMINAL + names).each do |name|
counters[name] = results.shift.to_i
end
counters[:processing] = results.shift.reduce(0) { |sum, pair| sum + pair.last.to_i }
return counters
end
private :get_counters!
def fetch(fetcher_id, state_name, &block)
@processing.set(fetcher_id, 1)
list = get_state_list(state_name)
if list.present?
failed = []
cardinality = @lifecycle.states[state_name].cardinality
cardinality = get(:start_count).to_i if cardinality == Rworkflow::Lifecycle::CARDINALITY_ALL_STARTED
force_list_complete = @lifecycle.states[state_name].policy == Rworkflow::State::STATE_POLICY_WAIT
raw_objects = list.lpop(cardinality, force_list_complete)
if !raw_objects.empty?
objects = raw_objects.map do |raw_object|
begin
YAML.load(raw_object)
rescue StandardError => _
failed << raw_object
nil
end
end.compact
@processing.set(fetcher_id, objects.size)
if failed.present?
push(failed, STATE_FAILED)
Rails.logger.error("Failed to parse #{failed.size} in workflow #{self.id} for fetcher id #{fetcher_id} at state #{state_name}")
end
yield(objects) if block_given?
end
end
ensure
@processing.remove(fetcher_id)
terminate if finished?
end
def list_objects(state_name, limit = -1)
list = get_list(state_name)
return list.get(0, limit).map {|object| YAML.load(object)}
end
def get_state_list(state_name)
list = nil
state = @lifecycle.states[state_name]
if state.present?
list = get_list(state_name)
else
Rails.logger.error("Tried accessing invalid state #{state_name} for workflow #{id}")
end
return list
end
private :get_state_list
def terminate
mutex = RedisRds::Mutex.new(self.id)
mutex.synchronize do
if !self.cleaned_up?
set(:finish_time, Time.now.to_i)
post_process
if self.public?
counters = get_counters!
counters[:processing] = 0 # Some worker might have increased the processing flag at that time even if there is no more jobs to be done
@storage.setnx(:counters, counters.to_yaml)
states_cleanup
else
self.cleanup
end
end
end
end
def post_process
end
protected :post_process
def metadata_string
return "Rworkflow: #{self.name}"
end
def cleaned_up?
return states_list.all? { |name| !get_list(name).exists? }
end
def states_list
states = self.class::STATES_TERMINAL
states += @lifecycle.states.keys if valid?
return states
end
def transition(from_state, name, objects)
objects = Array.wrap(objects)
to_state = begin
lifecycle.transition(from_state, name)
rescue Rworkflow::StateError => e
Rails.logger.error("Error transitioning: #{e}")
nil
end
if to_state.present?
push(objects, to_state)
log(from_state, name, objects.size)
end
end
def logging?
return get(:logging, false)
end
def log(from_state, transition, num_objects)
logger.incrby("#{from_state}__#{transition}", num_objects.to_i) if logging?
end
def logger
return @logger ||= begin
RedisRds::Hash.new("#{@redis_key}__logger")
end
end
def logs
logs = {}
if valid? && logging?
state_transition_counters = logger.getall
state_transition_counters.each do |state_transition, counter|
state, transition = state_transition.split('__')
logs[state] = {} unless logs.key?(state)
logs[state][transition] = counter.to_i
end
end
return logs
end
def get_state_cardinality(state_name)
cardinality = @lifecycle.states[state_name].cardinality
cardinality = self.get(:start_count).to_i if cardinality == Rworkflow::Lifecycle::CARDINALITY_ALL_STARTED
return cardinality
end
def set(key, value)
@flow_data.set(key, value.to_yaml)
end
def get(key, default = nil)
value = @flow_data.get(key)
value = if value.nil? then default else YAML.load(value) end
return value
end
def incr(key, value = 1)
return @flow_data.incrby(key, value)
end
def push(objects, state)
objects = Array.wrap(objects)
return 0 if objects.empty?
list = get_list(state)
list.rpush(objects.map(&:to_yaml))
return objects.size
end
private :push
def get_list(name)
return RedisRds::List.new("#{@redis_key}:lists:#{name}")
end
private :get_list
def cleanup
return if Rails.env.test?
states_cleanup
@processing.delete
@storage.delete
@flow_data.delete
logger.delete if logging?
self.class.unregister(self)
end
def states_cleanup
return if Rails.env.test?
states_list.each { |name| get_list(name).delete }
end
protected :states_cleanup
def start(objects)
objects = Array.wrap(objects)
self.set(:start_time, Time.now.to_i)
self.set(:start_count, objects.size)
push(objects, lifecycle.initial)
log(lifecycle.initial, 'initial', objects.size)
end
def total_objects_processed(counters = nil)
return (counters || get_counters).reduce(0) do |sum, pair|
if self.class.terminal?(pair[0])
sum + pair[1]
else
sum
end
end
end
def total_objects(counters = nil)
return (counters || get_counters).reduce(0) { |sum, pair| sum + pair[1] }
end
def total_objects_failed(counters = nil)
return (counters || get_counters).reduce(0) do |sum, pair|
if self.class.failure?(pair[0])
sum + pair[1]
else
sum
end
end
end
def successful?
return false if !finished?
return !failed?
end
def failed?
return false if !finished?
return total_objects_failed > 0
end
def public?
return @public ||= begin self.get(:public, false) end
end
class << self
def create(lifecycle, name = '', options = {})
id = generate_id(name)
workflow = self.new(id)
workflow.name = name
workflow.lifecycle = lifecycle
workflow.set(:created_at, Time.now.to_i)
workflow.set(:public, options.fetch(:public, false))
workflow.set(:logging, options.fetch(:logging, true))
self.register(workflow)
return workflow
end
def generate_id(workflow_name)
now = Time.now.to_f
random = Random.new(now)
return "#{self.name}__#{workflow_name}__#{(Time.now.to_f * 1000).to_i}__#{random.rand(now).to_i}"
end
private :generate_id
def cleanup(id)
workflow = self.new(id)
workflow.cleanup
end
def get_public_workflows
return all.select { |flow| flow.public? }
end
def all
return registry.all.select do |id|
klass = read_flow_class(id)
klass.present? && klass <= self
end.map { |id| load(id) }
end
def load(id, klass = nil)
workflow = nil
klass = read_flow_class(id) if klass.nil?
workflow = klass.new(id) if klass.respond_to?(:new)
return workflow
end
def read_flow_class(id)
klass = nil
raw_class = id.split('__').first
klass = begin
raw_class.constantize
rescue NameError => _
Rails.logger.warn("Unknown flow class for workflow id #{id}")
nil
end if raw_class.present?
return klass
end
private :read_flow_class
def registered?(id)
return registry.include?(id)
end
def register(workflow)
registry.add(workflow.id)
end
def unregister(workflow)
registry.remove(workflow.id)
end
def terminal?(state)
return self::STATES_TERMINAL.include?(state)
end
def failure?(state)
return self::STATES_FAILED.include?(state)
end
def registry
RedisRds::Set.new(WORKFLOW_REGISTRY)
end
end
end
end
prevent from rescueing non standard exceptions
module Rworkflow
class Flow
STATE_SUCCESSFUL = :successful
STATE_FAILED = :failed
STATES_TERMINAL = [STATE_FAILED, STATE_SUCCESSFUL]
STATES_FAILED = [STATE_FAILED]
REDIS_NS = 'flow'
WORKFLOW_REGISTRY = "#{REDIS_NS}:__registry"
attr_accessor :id
attr_reader :lifecycle
def initialize(id)
@id = id
@redis_key = "#{REDIS_NS}:#{id}"
@storage = RedisRds::Hash.new(@redis_key)
@flow_data = RedisRds::Hash.new("#{@redis_key}__data")
@processing = RedisRds::Hash.new("#{@redis_key}__processing")
load_lifecycle
end
def load_lifecycle
serialized = @storage.get(:lifecycle)
if serialized.present?
structure = YAML.load(serialized)
@lifecycle = Rworkflow::Lifecycle.unserialize(structure) if structure.present?
end
end
private :load_lifecycle
def lifecycle=(new_lifecycle)
@lifecycle = new_lifecycle
@storage.set(:lifecycle, @lifecycle.serialize.to_yaml)
end
def finished?
return false unless self.started?
total = get_counters.reduce(0) do |sum, pair|
self.class.terminal?(pair[0]) ? sum : (sum + pair[1].to_i)
end
return total == 0
end
def status
status = 'Running'
status = (successful?) ? 'Finished' : 'Failed' if finished?
return status
end
def created_at
return @created_at ||= begin Time.at(self.get(:created_at, 0)) end
end
def started?
return self.get(:start_time).present?
end
def name
return self.get(:name, @id)
end
def name=(name)
return self.set(:name, name)
end
def start_time
return Time.at(self.get(:start_time, 0))
end
def finish_time
return Time.at(self.get(:finish_time, 0))
end
def expected_duration
return Float::INFINITY
end
def valid?
return @lifecycle.present?
end
def count(state)
return get_list(state).size
end
def get_counters
counters = @storage.get(:counters)
if counters.present?
counters = begin
YAML.load(counters)
rescue => e
Rails.logger.error("Error loading stored flow counters: #{e.message}")
nil
end
end
return counters || get_counters!
end
# fetches counters atomically
def get_counters!
counters = { processing: 0 }
names = @lifecycle.states.keys
results = RedisRds::Object.connection.multi do
self.class::STATES_TERMINAL.each { |name| get_list(name).size }
names.each { |name| get_list(name).size }
@processing.getall
end
(self.class::STATES_TERMINAL + names).each do |name|
counters[name] = results.shift.to_i
end
counters[:processing] = results.shift.reduce(0) { |sum, pair| sum + pair.last.to_i }
return counters
end
private :get_counters!
def fetch(fetcher_id, state_name, &block)
@processing.set(fetcher_id, 1)
list = get_state_list(state_name)
if list.present?
failed = []
cardinality = @lifecycle.states[state_name].cardinality
cardinality = get(:start_count).to_i if cardinality == Rworkflow::Lifecycle::CARDINALITY_ALL_STARTED
force_list_complete = @lifecycle.states[state_name].policy == Rworkflow::State::STATE_POLICY_WAIT
raw_objects = list.lpop(cardinality, force_list_complete)
if !raw_objects.empty?
objects = raw_objects.map do |raw_object|
begin
YAML.load(raw_object)
rescue StandardError => _
failed << raw_object
nil
end
end.compact
@processing.set(fetcher_id, objects.size)
if failed.present?
push(failed, STATE_FAILED)
Rails.logger.error("Failed to parse #{failed.size} in workflow #{self.id} for fetcher id #{fetcher_id} at state #{state_name}")
end
yield(objects) if block_given?
end
end
ensure
@processing.remove(fetcher_id)
terminate if finished?
end
def list_objects(state_name, limit = -1)
list = get_list(state_name)
return list.get(0, limit).map {|object| YAML.load(object)}
end
def get_state_list(state_name)
list = nil
state = @lifecycle.states[state_name]
if state.present?
list = get_list(state_name)
else
Rails.logger.error("Tried accessing invalid state #{state_name} for workflow #{id}")
end
return list
end
private :get_state_list
def terminate
mutex = RedisRds::Mutex.new(self.id)
mutex.synchronize do
if !self.cleaned_up?
set(:finish_time, Time.now.to_i)
post_process
if self.public?
counters = get_counters!
counters[:processing] = 0 # Some worker might have increased the processing flag at that time even if there is no more jobs to be done
@storage.setnx(:counters, counters.to_yaml)
states_cleanup
else
self.cleanup
end
end
end
end
def post_process
end
protected :post_process
def metadata_string
return "Rworkflow: #{self.name}"
end
def cleaned_up?
return states_list.all? { |name| !get_list(name).exists? }
end
def states_list
states = self.class::STATES_TERMINAL
states += @lifecycle.states.keys if valid?
return states
end
def transition(from_state, name, objects)
objects = Array.wrap(objects)
to_state = begin
lifecycle.transition(from_state, name)
rescue Rworkflow::StateError => e
Rails.logger.error("Error transitioning: #{e}")
nil
end
if to_state.present?
push(objects, to_state)
log(from_state, name, objects.size)
end
end
def logging?
return get(:logging, false)
end
def log(from_state, transition, num_objects)
logger.incrby("#{from_state}__#{transition}", num_objects.to_i) if logging?
end
def logger
return @logger ||= begin
RedisRds::Hash.new("#{@redis_key}__logger")
end
end
def logs
logs = {}
if valid? && logging?
state_transition_counters = logger.getall
state_transition_counters.each do |state_transition, counter|
state, transition = state_transition.split('__')
logs[state] = {} unless logs.key?(state)
logs[state][transition] = counter.to_i
end
end
return logs
end
def get_state_cardinality(state_name)
cardinality = @lifecycle.states[state_name].cardinality
cardinality = self.get(:start_count).to_i if cardinality == Rworkflow::Lifecycle::CARDINALITY_ALL_STARTED
return cardinality
end
def set(key, value)
@flow_data.set(key, value.to_yaml)
end
def get(key, default = nil)
value = @flow_data.get(key)
value = if value.nil? then default else YAML.load(value) end
return value
end
def incr(key, value = 1)
return @flow_data.incrby(key, value)
end
def push(objects, state)
objects = Array.wrap(objects)
return 0 if objects.empty?
list = get_list(state)
list.rpush(objects.map(&:to_yaml))
return objects.size
end
private :push
def get_list(name)
return RedisRds::List.new("#{@redis_key}:lists:#{name}")
end
private :get_list
def cleanup
return if Rails.env.test?
states_cleanup
@processing.delete
@storage.delete
@flow_data.delete
logger.delete if logging?
self.class.unregister(self)
end
def states_cleanup
return if Rails.env.test?
states_list.each { |name| get_list(name).delete }
end
protected :states_cleanup
def start(objects)
objects = Array.wrap(objects)
self.set(:start_time, Time.now.to_i)
self.set(:start_count, objects.size)
push(objects, lifecycle.initial)
log(lifecycle.initial, 'initial', objects.size)
end
def total_objects_processed(counters = nil)
return (counters || get_counters).reduce(0) do |sum, pair|
if self.class.terminal?(pair[0])
sum + pair[1]
else
sum
end
end
end
def total_objects(counters = nil)
return (counters || get_counters).reduce(0) { |sum, pair| sum + pair[1] }
end
def total_objects_failed(counters = nil)
return (counters || get_counters).reduce(0) do |sum, pair|
if self.class.failure?(pair[0])
sum + pair[1]
else
sum
end
end
end
def successful?
return false if !finished?
return !failed?
end
def failed?
return false if !finished?
return total_objects_failed > 0
end
def public?
return @public ||= begin self.get(:public, false) end
end
class << self
def create(lifecycle, name = '', options = {})
id = generate_id(name)
workflow = self.new(id)
workflow.name = name
workflow.lifecycle = lifecycle
workflow.set(:created_at, Time.now.to_i)
workflow.set(:public, options.fetch(:public, false))
workflow.set(:logging, options.fetch(:logging, true))
self.register(workflow)
return workflow
end
def generate_id(workflow_name)
now = Time.now.to_f
random = Random.new(now)
return "#{self.name}__#{workflow_name}__#{(Time.now.to_f * 1000).to_i}__#{random.rand(now).to_i}"
end
private :generate_id
def cleanup(id)
workflow = self.new(id)
workflow.cleanup
end
def get_public_workflows
return all.select { |flow| flow.public? }
end
def all
return registry.all.select do |id|
klass = read_flow_class(id)
klass.present? && klass <= self
end.map { |id| load(id) }
end
def load(id, klass = nil)
workflow = nil
klass = read_flow_class(id) if klass.nil?
workflow = klass.new(id) if klass.respond_to?(:new)
return workflow
end
def read_flow_class(id)
klass = nil
raw_class = id.split('__').first
klass = begin
raw_class.constantize
rescue NameError => _
Rails.logger.warn("Unknown flow class for workflow id #{id}")
nil
end if raw_class.present?
return klass
end
private :read_flow_class
def registered?(id)
return registry.include?(id)
end
def register(workflow)
registry.add(workflow.id)
end
def unregister(workflow)
registry.remove(workflow.id)
end
def terminal?(state)
return self::STATES_TERMINAL.include?(state)
end
def failure?(state)
return self::STATES_FAILED.include?(state)
end
def registry
RedisRds::Set.new(WORKFLOW_REGISTRY)
end
end
end
end
|
# -*- coding: utf-8 -*-
module StarChat
class Channel
def self.find(name)
key = ['channels', name]
if RedisDB.exec(:exists, key)
# values = RedisDB.exec(:hmget)
# params = {}
return new(name)
end
nil
end
def self.all
RedisDB.exec(:smembers, ['channels']).map do |name|
Channel.find(name)
end
end
attr_reader :name
def name
@name
end
def name=(name)
@name = name.strip.gsub(/[[:cntrl:]]/, '')[0, 32]
end
def last_topic_id
topic_id = RedisDB.exec(:lindex, ['channels', name, 'topics'], -1)
topic_id ? topic_id.to_i : nil
end
def initialize(name, options = {})
options = {
}.merge(options)
self.name = name
end
class ChannelMessages
include Enumerable
def initialize(channel_name)
@key = ['channels', channel_name, 'messages']
end
def each
idx = 0
loop do
# TODO: Lock!
messages = Message.find_by_list(@key, idx, 100)
break if messages.size == 0
messages.each do |message|
yield message
end
idx += 100
end
end
end
def messages(idx = nil, len = nil)
if idx and len
Message.find_by_list(['channels', name, 'messages'], idx, len)
else
ChannelMessages.new(name)
end
end
def messages_by_time_span(start_time, end_time)
redis_key = ['channels', name, 'messages']
len = RedisDB.exec(:llen, redis_key)
idx1 = BinarySearch.search(start_time, 0, len) do |i|
Message.find_by_list(redis_key, i, 1)[0].created_at
end
idx2 = BinarySearch.search(end_time, 0, len) do |i|
Message.find_by_list(redis_key, i, 1)[0].created_at
end
Message.find_by_list(redis_key, idx1, idx2 - idx1)
end
def post_message(user, body, created_at = Time.now.to_i)
message = nil
# TODO: lock?
message = Message.new(user.name,
body,
created_at: created_at,
channel_name: name).save
RedisDB.exec(:rpush,
['channels', name, 'messages'],
message.id)
message
end
def update_topic(user, body, created_at = Time.now.to_i)
topic = nil
# TODO: lock?
topic = Topic.new(user.name,
self.name,
body,
created_at: created_at).save
RedisDB.exec(:rpush,
['channels', name, 'topics'],
topic.id)
topic
end
def users
RedisDB.exec(:smembers, ['channels', name, 'users']).map do |name|
User.find(name)
end
end
def to_json(*args)
hash = {
name: name
}
if last_topic_id
topic = Topic.find(last_topic_id)
hash[:topic] = topic.to_h
end
hash.to_json(*args)
end
def save
raise 'The name should not be empty' if name.empty?
RedisDB.multi do
RedisDB.exec(:sadd, ['channels'], name)
RedisDB.exec(:hmset, ['channels', name], 'dummy', 'dummy')
end
self
end
end
end
Added a comment
# -*- coding: utf-8 -*-
module StarChat
class Channel
def self.find(name)
key = ['channels', name]
if RedisDB.exec(:exists, key)
# values = RedisDB.exec(:hmget)
# params = {}
return new(name)
end
nil
end
def self.all
RedisDB.exec(:smembers, ['channels']).map do |name|
Channel.find(name)
end
end
attr_reader :name
def name
@name
end
def name=(name)
@name = name.strip.gsub(/[[:cntrl:]]/, '')[0, 32]
end
# TODO: Rename 'current_topic_id'
def last_topic_id
topic_id = RedisDB.exec(:lindex, ['channels', name, 'topics'], -1)
topic_id ? topic_id.to_i : nil
end
def initialize(name, options = {})
options = {
}.merge(options)
self.name = name
end
class ChannelMessages
include Enumerable
def initialize(channel_name)
@key = ['channels', channel_name, 'messages']
end
def each
idx = 0
loop do
# TODO: Lock!
messages = Message.find_by_list(@key, idx, 100)
break if messages.size == 0
messages.each do |message|
yield message
end
idx += 100
end
end
end
def messages(idx = nil, len = nil)
if idx and len
Message.find_by_list(['channels', name, 'messages'], idx, len)
else
ChannelMessages.new(name)
end
end
def messages_by_time_span(start_time, end_time)
redis_key = ['channels', name, 'messages']
len = RedisDB.exec(:llen, redis_key)
idx1 = BinarySearch.search(start_time, 0, len) do |i|
Message.find_by_list(redis_key, i, 1)[0].created_at
end
idx2 = BinarySearch.search(end_time, 0, len) do |i|
Message.find_by_list(redis_key, i, 1)[0].created_at
end
Message.find_by_list(redis_key, idx1, idx2 - idx1)
end
def post_message(user, body, created_at = Time.now.to_i)
message = nil
# TODO: lock?
message = Message.new(user.name,
body,
created_at: created_at,
channel_name: name).save
RedisDB.exec(:rpush,
['channels', name, 'messages'],
message.id)
message
end
def update_topic(user, body, created_at = Time.now.to_i)
topic = nil
# TODO: lock?
topic = Topic.new(user.name,
self.name,
body,
created_at: created_at).save
RedisDB.exec(:rpush,
['channels', name, 'topics'],
topic.id)
topic
end
def users
RedisDB.exec(:smembers, ['channels', name, 'users']).map do |name|
User.find(name)
end
end
def to_json(*args)
hash = {
name: name
}
if last_topic_id
topic = Topic.find(last_topic_id)
hash[:topic] = topic.to_h
end
hash.to_json(*args)
end
def save
raise 'The name should not be empty' if name.empty?
RedisDB.multi do
RedisDB.exec(:sadd, ['channels'], name)
RedisDB.exec(:hmset, ['channels', name], 'dummy', 'dummy')
end
self
end
end
end
|
added engine to auto-insert asset folders into pipeline
module ChosenRails
class Engine < ::Rails::Engine
puts "ChosenRails loaded"
end
end |
# Default options for a client. Override whatever you need to for
# your specific implementation
module Saddle::Options
# Construct our default options, based upon the class methods
def default_options
{
:host => host,
:port => port,
:use_ssl => use_ssl,
:request_style => request_style,
:num_retries => num_retries,
:timeout => timeout,
:additional_middlewares => additional_middlewares,
:stubs => stubs,
}
end
# The default host for this client
def host
'localhost'
end
# The default port for this client
def port
80
end
# Should this client use SSL by default?
def use_ssl
false
end
# The POST/PUT style for this client
# options are [:json, :urlencoded]
def request_style
:json
end
# Default number of retries per request
def num_retries
3
end
# Default timeout per request (in seconds)
def timeout
30
end
# Override this to add additional middleware to the request stack
# ex:
#
# require 'my_middleware'
# def self.default_middleware
# [MyMiddleware]
# end
#
###
def additional_middlewares
[]
end
# If the Typhoeus adapter is being used, pass stubs to it for testing.
def stubs
nil
end
end
additional middleware can be added one at a time
# Default options for a client. Override whatever you need to for
# your specific implementation
module Saddle::Options
# Construct our default options, based upon the class methods
def default_options
{
:host => host,
:port => port,
:use_ssl => use_ssl,
:request_style => request_style,
:num_retries => num_retries,
:timeout => timeout,
:additional_middlewares => @@additional_middlewares,
:stubs => stubs,
}
end
# The default host for this client
def host
'localhost'
end
# The default port for this client
def port
80
end
# Should this client use SSL by default?
def use_ssl
false
end
# The POST/PUT style for this client
# options are [:json, :urlencoded]
def request_style
:json
end
# Default number of retries per request
def num_retries
3
end
# Default timeout per request (in seconds)
def timeout
30
end
# Use this to add additional middleware to the request stack
# ex:
# add_middleware({
# :klass => MyMiddleware,
# :args => [arg1, arg2],
# })
# end
#
###
@@additional_middlewares = []
def add_middleware m
@@additional_middlewares << m
end
# If the Typhoeus adapter is being used, pass stubs to it for testing.
def stubs
nil
end
end
|
require "safer_bus_api/version"
require 'safer_bus_api/configuration'
module SaferBusApi
BASE_URL = 'https://mobile.fmcsa.dot.gov/saferbus/resource/v1/'
def configure
yield SaferBusApi::Configuration
end
class Request
def initialize(opts={})
@dot_number = opts[:dot_number]
@response = nil
end
def perform
raise 'Cannot perform SaferBusApi api-request with empty dot_number' if @dot_number.blank?
@response = SaferBusApi.query_by_dot_number(@dot_number)
end
def response
@response
end
def self.query_by_company_name(company_name)
fetch_data("carriers/#{company_name}")
end
def self.query_by_dot_number(dot_number)
fetch_data("carrier/#{dot_number}")
end
def self.query_by_mc_number(mc_number)
fetch_data("carrier/#{mc_number}")
end
def self.fetch_data(url_suffix)
url = "#{BASE_URL}#{url_suffix}.json?start=1&size=10&webKey=#{SaferBusApi::Configuration.api_token}"
response = Typhoeus.get(url)
return JSON.parse(response.body)
end
end
end
Added: Response class
require "safer_bus_api/version"
require 'safer_bus_api/configuration'
module SaferBusApi
BASE_URL = 'https://mobile.fmcsa.dot.gov/saferbus/resource/v1/'
def configure
yield SaferBusApi::Configuration
end
class Response
def initialize(response)
@response_data = response_data
end
end
class Request
def initialize(opts={})
@dot_number = opts[:dot_number]
@company_name = opts[:company_name]
@mc_number = opts[:mc_number]
@response = nil
end
def perform
if @dot_number.present?
@response = SaferBusApi.query_by_dot_number(@dot_number)
elsif @company_name.present?
@response = SaferBusApi.query_by_company_name(@company_name)
elsif @mc_number.present?
@response = SaferBusApi.query_by_mc_number(@mc_number)
else
raise 'SaferBusApi Error: Need to set either a dot_number, company_name or mc_number.'
end
Response.new(@response)
end
def response
@response
end
def self.query_by_company_name(company_name)
fetch_data("carriers/#{company_name}")
end
def self.query_by_dot_number(dot_number)
fetch_data("carrier/#{dot_number}")
end
def self.query_by_mc_number(mc_number)
fetch_data("carrier/#{mc_number}")
end
def self.fetch_data(url_suffix)
url = "#{BASE_URL}#{url_suffix}.json?start=1&size=10&webKey=#{SaferBusApi::Configuration.api_token}"
response = Typhoeus.get(url)
return JSON.parse(response.body)
end
end
end |
require "active_support/configurable"
require "samfundet_auth/engine"
module SamfundetAuth
include ActiveSupport::Configurable
class << self
def setup
yield config
database_path = "#{Rails.root}/config/database.yml"
if File.exists? database_path
database_config = YAML.load_file database_path
if config.domain_database
[Role, MembersRole].each do |model|
model.establish_connection database_config[config.domain_database.to_s]
end
end
if config.member_database
Member.establish_connection database_config[config.member_database.to_s]
end
if config.member_table
Member.set_table_name config.member_table.to_s
end
end
end
end
end
Removed deprecated call to set_table_name.
require "active_support/configurable"
require "samfundet_auth/engine"
module SamfundetAuth
include ActiveSupport::Configurable
class << self
def setup
yield config
database_path = "#{Rails.root}/config/database.yml"
if File.exists? database_path
database_config = YAML.load_file database_path
if config.domain_database
[Role, MembersRole].each do |model|
model.establish_connection database_config[config.domain_database.to_s]
end
end
if config.member_database
Member.establish_connection database_config[config.member_database.to_s]
end
if config.member_table
Member.table_name = config.member_table.to_s
end
end
end
end
end
|
module Satori
VERSION = "0.0.36"
end
version change
module Satori
VERSION = "0.0.37"
end
|
Bump the podspec iOS min to 9.0
Xcode 12+ issue a warning when targeting older iOS versions, so avoid
the issue. #7980 updated the Xcode projects already.
|
Pod::Spec.new do |s|
s.name = "Pulsator"
s.version = "0.1.0"
s.summary = "Pulse animation for iOS."
s.homepage = "https://github.com/shu223/Pulsator"
s.screenshots = "https://github.com/shu223/Pulsator/blob/master/demo.gif?raw=true"
s.license = 'MIT'
s.author = { "shu223" => "shuichi0526@gmail.com" }
s.source = { :git => "https://github.com/shu223/Pulsator.git", :tag => "0.1.0" }
s.social_media_url = 'https://twitter.com/shu223'
s.platform = :ios, '8.0'
s.requires_arc = true
s.source_files = 'Pulsator/Pulsator.swift'
s.frameworks = 'UIKit', 'QuartzCore'
end
Updated the versions in Podspec
Pod::Spec.new do |s|
s.name = "Pulsator"
s.version = "0.1.1"
s.summary = "Pulse animation for iOS."
s.homepage = "https://github.com/shu223/Pulsator"
s.screenshots = "https://github.com/shu223/Pulsator/blob/master/demo.gif?raw=true"
s.license = 'MIT'
s.author = { "shu223" => "shuichi0526@gmail.com" }
s.source = { :git => "https://github.com/shu223/Pulsator.git", :tag => "0.1.1" }
s.social_media_url = 'https://twitter.com/shu223'
s.platform = :ios, '8.0'
s.requires_arc = true
s.source_files = 'Pulsator/Pulsator.swift'
s.frameworks = 'UIKit', 'QuartzCore'
end
|
Pod::Spec.new do |s|
s.license = "MIT"
s.author = { "qqc" => "20599378@qq.com" }
s.platform = :ios, "8.0"
s.requires_arc = true
s.name = "QqcShare"
s.version = "1.0.61"
s.summary = "QqcShare"
s.homepage = "https://github.com/xukiki/QqcShare"
s.source = { :git => "https://github.com/xukiki/QqcShare.git", :tag => "#{s.version}" }
s.source_files = ["QqcShare/*.{h,m}"]
s.resource = 'QqcShare/QqcShare.bundle'
s.subspec 'AuthLoginProcess' do |sp|
sp.dependency '1.0.61/QqcShare'
sp.source_files = 'QqcShare/AuthLoginProcess/*.{h,m}'
end
s.subspec 'QRCodeGenerator' do |sp|
sp.source_files = 'QqcShare/QRCodeGenerator/*.{h,m,c}'
end
s.subspec 'ShareProcess' do |sp|
sp.dependency '1.0.61/QqcShare'
sp.source_files = 'QqcShare/ShareProcess/*.{h,m}'
end
s.subspec 'Views' do |sp|
sp.source_files = 'QqcShare/Views/*.{h,m}'
sp.dependency '1.0.61/QqcShare'
sp.subspec 'Panel' do |ssp|
ssp.source_files = 'QqcShare/Views/Panel/*.{h,m}'
end
end
s.dependency 'ShareSDK3'
s.dependency 'MOBFoundation'
s.dependency 'ShareSDK3/ShareSDKPlatforms/QQ'
s.dependency 'ShareSDK3/ShareSDKPlatforms/SinaWeibo'
s.dependency 'ShareSDK3/ShareSDKPlatforms/WeChat'
s.dependency 'QqcProgressHUD'
s.dependency 'QqcUtilityUI'
end
1.0.62
Pod::Spec.new do |s|
s.license = "MIT"
s.author = { "qqc" => "20599378@qq.com" }
s.platform = :ios, "8.0"
s.requires_arc = true
s.name = "QqcShare"
s.version = "1.0.62"
s.summary = "QqcShare"
s.homepage = "https://github.com/xukiki/QqcShare"
s.source = { :git => "https://github.com/xukiki/QqcShare.git", :tag => "#{s.version}" }
s.source_files = ["QqcShare/*.{h,m}"]
s.resource = 'QqcShare/QqcShare.bundle'
s.subspec 'AuthLoginProcess' do |sp|
sp.dependency 'QqcShare'
sp.source_files = 'QqcShare/AuthLoginProcess/*.{h,m}'
end
s.subspec 'QRCodeGenerator' do |sp|
sp.source_files = 'QqcShare/QRCodeGenerator/*.{h,m,c}'
end
s.subspec 'ShareProcess' do |sp|
sp.dependency 'QqcShare'
sp.source_files = 'QqcShare/ShareProcess/*.{h,m}'
end
s.subspec 'Views' do |sp|
sp.source_files = 'QqcShare/Views/*.{h,m}'
sp.dependency 'QqcShare'
sp.subspec 'Panel' do |ssp|
ssp.source_files = 'QqcShare/Views/Panel/*.{h,m}'
end
end
s.dependency 'ShareSDK3'
s.dependency 'MOBFoundation'
s.dependency 'ShareSDK3/ShareSDKPlatforms/QQ'
s.dependency 'ShareSDK3/ShareSDKPlatforms/SinaWeibo'
s.dependency 'ShareSDK3/ShareSDKPlatforms/WeChat'
s.dependency 'QqcProgressHUD'
s.dependency 'QqcUtilityUI'
end
|
require './lib/brewery_db/version'
Gem::Specification.new do |gem|
gem.name = 'brewery_db'
gem.version = BreweryDB::VERSION
gem.summary = 'A Ruby library for using the BreweryDB API.'
gem.homepage = 'http://github.com/tylerhunt/brewery_db'
gem.authors = ['Tyler Hunt', 'Steven Harman']
gem.required_ruby_version = '>= 1.9'
gem.add_dependency 'faraday', '~> 0.8.0'
gem.add_dependency 'faraday_middleware', '~> 0.8'
gem.add_dependency 'hashie', '>= 1.1', '< 3'
gem.add_development_dependency 'pry'
gem.add_development_dependency 'rspec', '~> 2.14'
gem.add_development_dependency 'vcr', '~> 2.0'
gem.files = `git ls-files`.split($\)
gem.executables = gem.files.grep(%r{^bin/}).map { |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ['lib']
end
Add License to Gemspec
Closes Issue #24
[ci skip]
require './lib/brewery_db/version'
Gem::Specification.new do |gem|
gem.name = 'brewery_db'
gem.version = BreweryDB::VERSION
gem.summary = 'A Ruby library for using the BreweryDB API.'
gem.homepage = 'http://github.com/tylerhunt/brewery_db'
gem.authors = ['Tyler Hunt', 'Steven Harman']
gem.license = 'MIT'
gem.required_ruby_version = '>= 1.9'
gem.add_dependency 'faraday', '~> 0.8.0'
gem.add_dependency 'faraday_middleware', '~> 0.8'
gem.add_dependency 'hashie', '>= 1.1', '< 3'
gem.add_development_dependency 'pry'
gem.add_development_dependency 'rspec', '~> 2.14'
gem.add_development_dependency 'vcr', '~> 2.0'
gem.files = `git ls-files`.split($\)
gem.executables = gem.files.grep(%r{^bin/}).map { |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ['lib']
end
|
require 'spec_helper_acceptance'
describe 'cassandra::java' do
install_java_pp = <<-EOS
if $::osfamily == 'RedHat' {
include 'cassandra::java'
} else {
if $::lsbdistid == 'Ubuntu' {
class { 'cassandra::java':
aptkey => {
'openjdk-r' => {
id => 'DA1A4A13543B466853BAF164EB9B1D8886F44E2A',
server => 'keyserver.ubuntu.com',
},
},
aptsource => {
'openjdk-r' => {
location => 'http://ppa.launchpad.net/openjdk-r/ppa/ubuntu',
comment => 'OpenJDK builds (all archs)',
release => $::lsbdistcodename,
repos => 'main',
},
},
package_name => 'openjdk-8-jdk',
}
} else {
class { 'cassandra::java':
aptkey => {
'ZuluJDK' => {
id => '27BC0C8CB3D81623F59BDADCB1998361219BD9C9',
server => 'keyserver.ubuntu.com',
},
},
aptsource => {
'ZuluJDK' => {
location => 'http://repos.azulsystems.com/debian',
comment => 'Zulu OpenJDK 8 for Debian',
release => 'stable',
repos => 'main',
},
},
package_name => 'zulu-8',
}
}
}
EOS
describe '########### Java installation.' do
it 'should work with no errors' do
apply_manifest(install_java_pp, catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(install_java_pp,
catch_failures: true).exit_code).to be_zero
end
end
end
describe 'cassandra::datastax_repo' do
install_datastax_repo_pp = <<-EOS
class { 'cassandra::datastax_repo': }
EOS
describe '########### DataStax Repository installation.' do
it 'should work with no errors' do
apply_manifest(install_datastax_repo_pp, catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(install_datastax_repo_pp,
catch_failures: true).exit_code).to be_zero
end
end
end
describe 'cassandra' do
nodeset = ENV['BEAKER_set']
opsys = nodeset.split('_')[1]
# Ubuntu 16 only works with Cassandra 3.X
cassandra_version = if opsys == 'ubuntu16'
['3.0.3']
else
['2.2.7', '3.0.3']
end
ruby_lt_190 = case opsys
when 'centos6' then true
else false
end
cassandra_version.each do |version|
cassandra_install_pp = <<-EOS
if $::osfamily == 'RedHat' {
$version = '#{version}-1'
if $version == '2.2.7-1' {
$cassandra_optutils_package = 'cassandra22-tools'
$cassandra_package = 'cassandra22'
} else {
$cassandra_optutils_package = 'cassandra30-tools'
$cassandra_package = 'cassandra30'
}
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '#{version}'
if $::lsbdistid == 'Ubuntu' {
# Workarounds for amonst other things CASSANDRA-11850
Exec {
environment => [ 'CQLSH_NO_BUNDLED=TRUE' ]
}
if $::operatingsystemmajrelease >= 16 {
exec { '/usr/bin/wget http://launchpadlibrarian.net/109052632/python-support_1.0.15_all.deb':
cwd => '/var/tmp',
creates => '/var/tmp/python-support_1.0.15_all.deb',
} ~>
exec { '/usr/bin/dpkg -i /var/tmp/python-support_1.0.15_all.deb':
refreshonly => true,
} ->
package { 'cassandra-driver':
provider => 'pip',
before => Class['cassandra']
}
}
}
exec { '/bin/chown root:root /etc/apt/sources.list.d/datastax.list':
unless => '/usr/bin/test -O /etc/apt/sources.list.d/datastax.list',
require => Class['cassandra::datastax_agent']
}
}
$initial_settings = {
'authenticator' => 'PasswordAuthenticator',
'cluster_name' => 'MyCassandraCluster',
'commitlog_directory' => '/var/lib/cassandra/commitlog',
'commitlog_sync' => 'periodic',
'commitlog_sync_period_in_ms' => 10000,
'data_file_directories' => ['/var/lib/cassandra/data'],
'endpoint_snitch' => 'GossipingPropertyFileSnitch',
'listen_address' => $::ipaddress,
'partitioner' => 'org.apache.cassandra.dht.Murmur3Partitioner',
'saved_caches_directory' => '/var/lib/cassandra/saved_caches',
'seed_provider' => [
{
'class_name' => 'org.apache.cassandra.locator.SimpleSeedProvider',
'parameters' => [
{
'seeds' => $::ipaddress,
},
],
},
],
'start_native_transport' => true,
}
if $version =~ /^2/ {
$settings = $initial_settings
} else {
$settings = merge($initial_settings, { 'hints_directory' => '/var/lib/cassandra/hints' })
}
if versioncmp($::rubyversion, '1.9.0') < 0 {
$service_refresh = false
} else {
$service_refresh = true
}
class { 'cassandra':
cassandra_9822 => true,
dc => 'LON',
package_ensure => $version,
package_name => $cassandra_package,
rack => 'R101',
service_ensure => running,
service_refresh => $service_refresh,
settings => $settings,
}
class { 'cassandra::optutils':
package_ensure => $version,
package_name => $cassandra_optutils_package,
require => Class['cassandra']
}
class { 'cassandra::datastax_agent':
require => Class['cassandra']
}
# This really sucks but Docker, CentOS 6 and iptables don't play nicely
# together. Therefore we can't test the firewall on this platform :-(
if $::operatingsystem != CentOS and $::operatingsystemmajrelease != 6 {
include '::cassandra::firewall_ports'
}
EOS
describe "########### Cassandra #{version} installation (#{opsys})." do
it 'should work with no errors' do
apply_manifest(cassandra_install_pp, catch_failures: true)
end
it 'Give Cassandra a minute to fully come alive.' do
sleep 60
end
if ruby_lt_190
it 'should work with no errors (subsequent run)' do
apply_manifest(cassandra_install_pp, catch_failures: true)
end
else
it 'check code is idempotent' do
datastax_agent_cludge_pp = <<-EOS
Exec {
path => [ '/usr/bin', '/bin'],
}
exec { 'chmod 0640 /var/lib/datastax-agent/conf/address.yaml': }
EOS
it '/var/lib/datastax-agent/conf/address.yaml changes mode' do
apply_manifest(datastax_agent_cludge_pp, catch_failures: true)
end
expect(apply_manifest(cassandra_install_pp,
catch_failures: true).exit_code).to be_zero
end
end
end
describe service('cassandra') do
it do
is_expected.to be_running
is_expected.to be_enabled
end
end
describe service('datastax-agent') do
it do
is_expected.to be_running
is_expected.to be_enabled
end
end
schema_testing_create_pp = <<-EOS
#{cassandra_install_pp}
$cql_types = {
'fullname' => {
'keyspace' => 'mykeyspace',
'fields' => {
'fname' => 'text',
'lname' => 'text',
},
},
}
$keyspaces = {
'mykeyspace' => {
ensure => present,
replication_map => {
keyspace_class => 'SimpleStrategy',
replication_factor => 1,
},
durable_writes => false,
},
}
if $::operatingsystem != CentOS {
$os_ok = true
} else {
if $::operatingsystemmajrelease != 6 {
$os_ok = true
} else {
$os_ok = false
}
}
if $os_ok {
class { 'cassandra::schema':
cql_types => $cql_types,
cqlsh_host => $::ipaddress,
cqlsh_password => 'cassandra',
cqlsh_user => 'cassandra',
indexes => {
'users_lname_idx' => {
keyspace => 'mykeyspace',
table => 'users',
keys => 'lname',
},
},
keyspaces => $keyspaces,
tables => {
'users' => {
'keyspace' => 'mykeyspace',
'columns' => {
'userid' => 'int',
'fname' => 'text',
'lname' => 'text',
'PRIMARY KEY' => '(userid)',
},
},
},
users => {
'spillman' => {
password => 'Niner27',
},
'akers' => {
password => 'Niner2',
superuser => true,
},
'boone' => {
password => 'Niner75',
},
},
}
}
EOS
describe '########### Schema create.' do
it 'should work with no errors' do
apply_manifest(schema_testing_create_pp, catch_failures: true)
end
if ruby_lt_190
it 'should work with no errors (subsequent run)' do
apply_manifest(schema_testing_create_pp, catch_failures: true)
end
else
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_create_pp, catch_failures: true).exit_code).to be_zero
end
end
end
schema_testing_drop_type_pp = <<-EOS
#{cassandra_install_pp}
$cql_types = {
'fullname' => {
'keyspace' => 'mykeyspace',
'ensure' => 'absent'
}
}
if $::operatingsystem != CentOS {
$os_ok = true
} else {
if $::operatingsystemmajrelease != 6 {
$os_ok = true
} else {
$os_ok = false
}
}
if $os_ok {
class { 'cassandra::schema':
cql_types => $cql_types,
cqlsh_host => $::ipaddress,
cqlsh_user => 'akers',
cqlsh_password => 'Niner2',
}
}
EOS
describe '########### Schema drop type.' do
it 'should work with no errors' do
apply_manifest(schema_testing_drop_type_pp, catch_failures: true)
end
if ruby_lt_190
it 'should work with no errors (subsequent run)' do
apply_manifest(schema_testing_drop_type_pp, catch_failures: true)
end
else
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_drop_type_pp, catch_failures: true).exit_code).to be_zero
end
end
end
schema_testing_drop_user_pp = <<-EOS
#{cassandra_install_pp}
if $::operatingsystem != CentOS {
$os_ok = true
} else {
if $::operatingsystemmajrelease != 6 {
$os_ok = true
} else {
$os_ok = false
}
}
if $os_ok {
class { 'cassandra::schema':
cqlsh_password => 'Niner2',
cqlsh_host => $::ipaddress,
cqlsh_user => 'akers',
cqlsh_client_config => '/root/.puppetcqlshrc',
users => {
'boone' => {
ensure => absent,
},
},
}
}
EOS
describe '########### Drop the boone user.' do
it 'should work with no errors' do
apply_manifest(schema_testing_drop_user_pp, catch_failures: true)
end
if ruby_lt_190
it 'should work with no errors (subsequent run)' do
apply_manifest(schema_testing_drop_user_pp, catch_failures: true)
end
else
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_drop_user_pp, catch_failures: true).exit_code).to be_zero
end
end
end
schema_testing_drop_index_pp = <<-EOS
#{cassandra_install_pp}
if $::operatingsystem != CentOS {
$os_ok = true
} else {
if $::operatingsystemmajrelease != 6 {
$os_ok = true
} else {
$os_ok = false
}
}
if $os_ok {
class { 'cassandra::schema':
cqlsh_host => $::ipaddress,
cqlsh_user => 'akers',
cqlsh_password => 'Niner2',
indexes => {
'users_lname_idx' => {
ensure => absent,
keyspace => 'mykeyspace',
table => 'users',
},
},
}
}
EOS
describe '########### Schema drop index.' do
it 'should work with no errors' do
apply_manifest(schema_testing_drop_index_pp, catch_failures: true)
end
if ruby_lt_190
it 'should run with no errors (subsequent run)' do
apply_manifest(schema_testing_drop_index_pp, catch_failures: true)
end
else
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_drop_index_pp, catch_failures: true).exit_code).to be_zero
end
end
end
schema_testing_drop_pp = <<-EOS
#{cassandra_install_pp}
if $::operatingsystem != CentOS {
$os_ok = true
} else {
if $::operatingsystemmajrelease != 6 {
$os_ok = true
} else {
$os_ok = false
}
}
if $os_ok {
class { 'cassandra::schema':
cqlsh_host => $ipaddress,
cqlsh_password => 'Niner2',
cqlsh_user => 'akers',
tables => {
'users' => {
ensure => absent,
keyspace => 'mykeyspace',
},
},
}
}
EOS
describe '########### Schema drop (table).' do
it 'should work with no errors' do
apply_manifest(schema_testing_drop_pp, catch_failures: true)
end
if ruby_lt_190
it 'should work with no errors (subsequent run)' do
apply_manifest(schema_testing_drop_pp, catch_failures: true)
end
else
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_drop_pp, catch_failures: true).exit_code).to be_zero
end
end
end
schema_testing_drop_pp = <<-EOS
#{cassandra_install_pp}
$keyspaces = {
'mykeyspace' => {
ensure => absent,
}
}
if $::operatingsystem != CentOS {
$os_ok = true
} else {
if $::operatingsystemmajrelease != 6 {
$os_ok = true
} else {
$os_ok = false
}
}
if $os_ok {
class { 'cassandra::schema':
cqlsh_host => $::ipaddress,
cqlsh_password => 'Niner2',
cqlsh_user => 'akers',
keyspaces => $keyspaces,
}
}
EOS
describe '########### Schema drop (Keyspaces).' do
it 'should work with no errors' do
apply_manifest(schema_testing_drop_pp, catch_failures: true)
end
if ruby_lt_190
it 'should work with no errors (subsequent run)' do
apply_manifest(schema_testing_drop_pp, catch_failures: true)
end
else
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_drop_pp, catch_failures: true).exit_code).to be_zero
end
end
end
describe '########### Gather service information (when in debug mode).' do
it 'Show the cassandra system log.' do
shell("grep -v -e '^INFO' -e '^\s*INFO' /var/log/cassandra/system.log")
end
end
next unless version != cassandra_version.last
cassandra_uninstall_pp = <<-EOS
Exec {
path => [
'/usr/local/bin',
'/opt/local/bin',
'/usr/bin',
'/usr/sbin',
'/bin',
'/sbin'],
logoutput => true,
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra22-tools'
$cassandra_package = 'cassandra22'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
}
service { 'cassandra':
ensure => stopped,
} ->
package { $cassandra_optutils_package:
ensure => absent
} ->
package { $cassandra_package:
ensure => absent
} ->
exec { 'rm -rf /var/lib/cassandra/*/* /var/log/cassandra/*': }
EOS
describe '########### Uninstall Cassandra 2.2.' do
it 'should work with no errors' do
apply_manifest(cassandra_uninstall_pp, catch_failures: true)
end
end
end
end
Clarified the code a bit (#281).
require 'spec_helper_acceptance'
describe 'cassandra::java' do
install_java_pp = <<-EOS
if $::osfamily == 'RedHat' {
include 'cassandra::java'
} else {
if $::lsbdistid == 'Ubuntu' {
class { 'cassandra::java':
aptkey => {
'openjdk-r' => {
id => 'DA1A4A13543B466853BAF164EB9B1D8886F44E2A',
server => 'keyserver.ubuntu.com',
},
},
aptsource => {
'openjdk-r' => {
location => 'http://ppa.launchpad.net/openjdk-r/ppa/ubuntu',
comment => 'OpenJDK builds (all archs)',
release => $::lsbdistcodename,
repos => 'main',
},
},
package_name => 'openjdk-8-jdk',
}
} else {
class { 'cassandra::java':
aptkey => {
'ZuluJDK' => {
id => '27BC0C8CB3D81623F59BDADCB1998361219BD9C9',
server => 'keyserver.ubuntu.com',
},
},
aptsource => {
'ZuluJDK' => {
location => 'http://repos.azulsystems.com/debian',
comment => 'Zulu OpenJDK 8 for Debian',
release => 'stable',
repos => 'main',
},
},
package_name => 'zulu-8',
}
}
}
EOS
describe '########### Java installation.' do
it 'should work with no errors' do
apply_manifest(install_java_pp, catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(install_java_pp,
catch_failures: true).exit_code).to be_zero
end
end
end
describe 'cassandra::datastax_repo' do
install_datastax_repo_pp = <<-EOS
class { 'cassandra::datastax_repo': }
EOS
describe '########### DataStax Repository installation.' do
it 'should work with no errors' do
apply_manifest(install_datastax_repo_pp, catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(install_datastax_repo_pp,
catch_failures: true).exit_code).to be_zero
end
end
end
describe 'cassandra' do
nodeset = ENV['BEAKER_set']
opsys = nodeset.split('_')[1]
# Ubuntu 16 only works with Cassandra 3.X
cassandra_version = if opsys == 'ubuntu16'
['3.0.3']
else
['2.2.7', '3.0.3']
end
ruby_lt_190 = case opsys
when 'centos6' then true
else false
end
cassandra_version.each do |version|
cassandra_install_pp = <<-EOS
if $::osfamily == 'RedHat' {
$version = '#{version}-1'
if $version == '2.2.7-1' {
$cassandra_optutils_package = 'cassandra22-tools'
$cassandra_package = 'cassandra22'
} else {
$cassandra_optutils_package = 'cassandra30-tools'
$cassandra_package = 'cassandra30'
}
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '#{version}'
if $::lsbdistid == 'Ubuntu' {
# Workarounds for amonst other things CASSANDRA-11850
Exec {
environment => [ 'CQLSH_NO_BUNDLED=TRUE' ]
}
if $::operatingsystemmajrelease >= 16 {
exec { '/usr/bin/wget http://launchpadlibrarian.net/109052632/python-support_1.0.15_all.deb':
cwd => '/var/tmp',
creates => '/var/tmp/python-support_1.0.15_all.deb',
} ~>
exec { '/usr/bin/dpkg -i /var/tmp/python-support_1.0.15_all.deb':
refreshonly => true,
} ->
package { 'cassandra-driver':
provider => 'pip',
before => Class['cassandra']
}
}
}
exec { '/bin/chown root:root /etc/apt/sources.list.d/datastax.list':
unless => '/usr/bin/test -O /etc/apt/sources.list.d/datastax.list',
require => Class['cassandra::datastax_agent']
}
}
$initial_settings = {
'authenticator' => 'PasswordAuthenticator',
'cluster_name' => 'MyCassandraCluster',
'commitlog_directory' => '/var/lib/cassandra/commitlog',
'commitlog_sync' => 'periodic',
'commitlog_sync_period_in_ms' => 10000,
'data_file_directories' => ['/var/lib/cassandra/data'],
'endpoint_snitch' => 'GossipingPropertyFileSnitch',
'listen_address' => $::ipaddress,
'partitioner' => 'org.apache.cassandra.dht.Murmur3Partitioner',
'saved_caches_directory' => '/var/lib/cassandra/saved_caches',
'seed_provider' => [
{
'class_name' => 'org.apache.cassandra.locator.SimpleSeedProvider',
'parameters' => [
{
'seeds' => $::ipaddress,
},
],
},
],
'start_native_transport' => true,
}
if $version =~ /^2/ {
$settings = $initial_settings
} else {
$settings = merge($initial_settings, { 'hints_directory' => '/var/lib/cassandra/hints' })
}
if versioncmp($::rubyversion, '1.9.0') < 0 {
$service_refresh = false
} else {
$service_refresh = true
}
class { 'cassandra':
cassandra_9822 => true,
dc => 'LON',
package_ensure => $version,
package_name => $cassandra_package,
rack => 'R101',
service_ensure => running,
service_refresh => $service_refresh,
settings => $settings,
}
class { 'cassandra::optutils':
package_ensure => $version,
package_name => $cassandra_optutils_package,
require => Class['cassandra']
}
class { 'cassandra::datastax_agent':
require => Class['cassandra']
}
# This really sucks but Docker, CentOS 6 and iptables don't play nicely
# together. Therefore we can't test the firewall on this platform :-(
if $::operatingsystem != CentOS and $::operatingsystemmajrelease != 6 {
include '::cassandra::firewall_ports'
}
EOS
datastax_agent_cludge_pp = <<-EOS
Exec {
path => [ '/usr/bin', '/bin'],
}
exec { 'chmod 0640 /var/lib/datastax-agent/conf/address.yaml': }
EOS
describe "########### Cassandra #{version} installation (#{opsys})." do
it 'should work with no errors' do
apply_manifest(cassandra_install_pp, catch_failures: true)
end
it 'Give Cassandra a minute to fully come alive.' do
sleep 60
end
if ruby_lt_190
it 'should work with no errors (subsequent run)' do
apply_manifest(cassandra_install_pp, catch_failures: true)
end
else
it 'check code is idempotent' do
apply_manifest(datastax_agent_cludge_pp, catch_failures: true)
expect(apply_manifest(cassandra_install_pp,
catch_failures: true).exit_code).to be_zero
end
end
end
describe service('cassandra') do
it do
is_expected.to be_running
is_expected.to be_enabled
end
end
describe service('datastax-agent') do
it do
is_expected.to be_running
is_expected.to be_enabled
end
end
schema_testing_create_pp = <<-EOS
#{cassandra_install_pp}
$cql_types = {
'fullname' => {
'keyspace' => 'mykeyspace',
'fields' => {
'fname' => 'text',
'lname' => 'text',
},
},
}
$keyspaces = {
'mykeyspace' => {
ensure => present,
replication_map => {
keyspace_class => 'SimpleStrategy',
replication_factor => 1,
},
durable_writes => false,
},
}
if $::operatingsystem != CentOS {
$os_ok = true
} else {
if $::operatingsystemmajrelease != 6 {
$os_ok = true
} else {
$os_ok = false
}
}
if $os_ok {
class { 'cassandra::schema':
cql_types => $cql_types,
cqlsh_host => $::ipaddress,
cqlsh_password => 'cassandra',
cqlsh_user => 'cassandra',
indexes => {
'users_lname_idx' => {
keyspace => 'mykeyspace',
table => 'users',
keys => 'lname',
},
},
keyspaces => $keyspaces,
tables => {
'users' => {
'keyspace' => 'mykeyspace',
'columns' => {
'userid' => 'int',
'fname' => 'text',
'lname' => 'text',
'PRIMARY KEY' => '(userid)',
},
},
},
users => {
'spillman' => {
password => 'Niner27',
},
'akers' => {
password => 'Niner2',
superuser => true,
},
'boone' => {
password => 'Niner75',
},
},
}
}
EOS
describe '########### Schema create.' do
it 'should work with no errors' do
apply_manifest(schema_testing_create_pp, catch_failures: true)
end
if ruby_lt_190
it 'should work with no errors (subsequent run)' do
apply_manifest(schema_testing_create_pp, catch_failures: true)
end
else
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_create_pp, catch_failures: true).exit_code).to be_zero
end
end
end
schema_testing_drop_type_pp = <<-EOS
#{cassandra_install_pp}
$cql_types = {
'fullname' => {
'keyspace' => 'mykeyspace',
'ensure' => 'absent'
}
}
if $::operatingsystem != CentOS {
$os_ok = true
} else {
if $::operatingsystemmajrelease != 6 {
$os_ok = true
} else {
$os_ok = false
}
}
if $os_ok {
class { 'cassandra::schema':
cql_types => $cql_types,
cqlsh_host => $::ipaddress,
cqlsh_user => 'akers',
cqlsh_password => 'Niner2',
}
}
EOS
describe '########### Schema drop type.' do
it 'should work with no errors' do
apply_manifest(schema_testing_drop_type_pp, catch_failures: true)
end
if ruby_lt_190
it 'should work with no errors (subsequent run)' do
apply_manifest(schema_testing_drop_type_pp, catch_failures: true)
end
else
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_drop_type_pp, catch_failures: true).exit_code).to be_zero
end
end
end
schema_testing_drop_user_pp = <<-EOS
#{cassandra_install_pp}
if $::operatingsystem != CentOS {
$os_ok = true
} else {
if $::operatingsystemmajrelease != 6 {
$os_ok = true
} else {
$os_ok = false
}
}
if $os_ok {
class { 'cassandra::schema':
cqlsh_password => 'Niner2',
cqlsh_host => $::ipaddress,
cqlsh_user => 'akers',
cqlsh_client_config => '/root/.puppetcqlshrc',
users => {
'boone' => {
ensure => absent,
},
},
}
}
EOS
describe '########### Drop the boone user.' do
it 'should work with no errors' do
apply_manifest(schema_testing_drop_user_pp, catch_failures: true)
end
if ruby_lt_190
it 'should work with no errors (subsequent run)' do
apply_manifest(schema_testing_drop_user_pp, catch_failures: true)
end
else
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_drop_user_pp, catch_failures: true).exit_code).to be_zero
end
end
end
schema_testing_drop_index_pp = <<-EOS
#{cassandra_install_pp}
if $::operatingsystem != CentOS {
$os_ok = true
} else {
if $::operatingsystemmajrelease != 6 {
$os_ok = true
} else {
$os_ok = false
}
}
if $os_ok {
class { 'cassandra::schema':
cqlsh_host => $::ipaddress,
cqlsh_user => 'akers',
cqlsh_password => 'Niner2',
indexes => {
'users_lname_idx' => {
ensure => absent,
keyspace => 'mykeyspace',
table => 'users',
},
},
}
}
EOS
describe '########### Schema drop index.' do
it 'should work with no errors' do
apply_manifest(schema_testing_drop_index_pp, catch_failures: true)
end
if ruby_lt_190
it 'should run with no errors (subsequent run)' do
apply_manifest(schema_testing_drop_index_pp, catch_failures: true)
end
else
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_drop_index_pp, catch_failures: true).exit_code).to be_zero
end
end
end
schema_testing_drop_pp = <<-EOS
#{cassandra_install_pp}
if $::operatingsystem != CentOS {
$os_ok = true
} else {
if $::operatingsystemmajrelease != 6 {
$os_ok = true
} else {
$os_ok = false
}
}
if $os_ok {
class { 'cassandra::schema':
cqlsh_host => $ipaddress,
cqlsh_password => 'Niner2',
cqlsh_user => 'akers',
tables => {
'users' => {
ensure => absent,
keyspace => 'mykeyspace',
},
},
}
}
EOS
describe '########### Schema drop (table).' do
it 'should work with no errors' do
apply_manifest(schema_testing_drop_pp, catch_failures: true)
end
if ruby_lt_190
it 'should work with no errors (subsequent run)' do
apply_manifest(schema_testing_drop_pp, catch_failures: true)
end
else
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_drop_pp, catch_failures: true).exit_code).to be_zero
end
end
end
schema_testing_drop_pp = <<-EOS
#{cassandra_install_pp}
$keyspaces = {
'mykeyspace' => {
ensure => absent,
}
}
if $::operatingsystem != CentOS {
$os_ok = true
} else {
if $::operatingsystemmajrelease != 6 {
$os_ok = true
} else {
$os_ok = false
}
}
if $os_ok {
class { 'cassandra::schema':
cqlsh_host => $::ipaddress,
cqlsh_password => 'Niner2',
cqlsh_user => 'akers',
keyspaces => $keyspaces,
}
}
EOS
describe '########### Schema drop (Keyspaces).' do
it 'should work with no errors' do
apply_manifest(schema_testing_drop_pp, catch_failures: true)
end
if ruby_lt_190
it 'should work with no errors (subsequent run)' do
apply_manifest(schema_testing_drop_pp, catch_failures: true)
end
else
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_drop_pp, catch_failures: true).exit_code).to be_zero
end
end
end
describe '########### Gather service information (when in debug mode).' do
it 'Show the cassandra system log.' do
shell("grep -v -e '^INFO' -e '^\s*INFO' /var/log/cassandra/system.log")
end
end
next unless version != cassandra_version.last
cassandra_uninstall_pp = <<-EOS
Exec {
path => [
'/usr/local/bin',
'/opt/local/bin',
'/usr/bin',
'/usr/sbin',
'/bin',
'/sbin'],
logoutput => true,
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra22-tools'
$cassandra_package = 'cassandra22'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
}
service { 'cassandra':
ensure => stopped,
} ->
package { $cassandra_optutils_package:
ensure => absent
} ->
package { $cassandra_package:
ensure => absent
} ->
exec { 'rm -rf /var/lib/cassandra/*/* /var/log/cassandra/*': }
EOS
describe '########### Uninstall Cassandra 2.2.' do
it 'should work with no errors' do
apply_manifest(cassandra_uninstall_pp, catch_failures: true)
end
end
end
end
|
require 'spec_helper'
inputs = [:path, :hash, :file, :tempfile, :stringio]
outputs = [:path, :file, :tempfile, :stringio, :nil]
def get_input(input_type, file_name = nil)
case input_type
when :path
path_to_pdf(file_name || 'fields.pdf')
when :hash
{path_to_pdf(file_name || 'fields.pdf') => nil}
when :file
File.new(path_to_pdf(file_name || 'fields.pdf'))
when :tempfile
t = Tempfile.new('specs')
t.write(File.read(path_to_pdf(file_name || 'fields.pdf')))
t
when :stringio
StringIO.new(File.read(path_to_pdf(file_name || 'fields.pdf')))
end
end
def get_output(output_type)
case output_type
when :path
path_to_pdf('output.spec')
when :file
File.new(path_to_pdf('output.spec'), 'w+')
when :tempfile
Tempfile.new('specs2')
when :stringio
StringIO.new()
when :nil
nil
end
end
def map_output_type(output_specified)
case output_specified
when :path
String
when :file
File
when :tempfile
Tempfile
when :stringio, :nil
StringIO
end
end
def remove_output(output)
if output.is_a?(String)
File.unlink(output)
elsif output.is_a?(File)
File.unlink(output.path)
end
end
describe ActivePdftk::Wrapper do
before(:all) { @pdftk = ActivePdftk::Wrapper.new }
context "new" do
it "should instantiate the object." do
@pdftk.should be_an_instance_of(ActivePdftk::Wrapper)
end
it "should pass the defaults statements to the call instance." do
path = ActivePdftk::Call.new.locate_pdftk
@pdftk_opt = ActivePdftk::Wrapper.new(:path => path, :operation => {:fill_form => 'a.fdf'}, :options => { :flatten => false, :owner_pw => 'bar', :user_pw => 'baz', :encrypt => :'40bit'})
@pdftk_opt.default_statements.should == {:path => path, :operation => {:fill_form => 'a.fdf'}, :options => { :flatten => false, :owner_pw => 'bar', :user_pw => 'baz', :encrypt => :'40bit'}}
end
end
shared_examples "a working command" do
it "should return a #{@output.nil? ? StringIO : @output.class}" do
@call_output.should be_kind_of(@output.nil? ? StringIO : @output.class)
end
it "should return expected data" do
if @call_output.is_a? String
File.new(@call_output).read.should == @example_expect
else
@call_output.rewind
@call_output.read.should == @example_expect
end
end
after(:each) { remove_output(@call_output) }
end
shared_examples "a combination command" do
it "should return a #{@output.nil? ? StringIO : @output.class}" do
@call_output.should be_kind_of(@output.nil? ? StringIO : @output.class)
end
it "should return expected data" do
@example_expect.gsub!(/\(D\:.*\)/, '')
@example_expect.gsub!(/\[<[a-z0-9]*><[a-z0-9]*>\]/, '')
if @call_output.is_a?(String)
text = File.read(@call_output)
text.gsub!(/\(D\:.*\)/, '')
text.gsub!(/\[<[a-z0-9]*><[a-z0-9]*>\]/, '')
else
@call_output.rewind
text = @call_output.read
text.gsub!(/\(D\:.*\)/, '')
text.gsub!(/\[<[a-z0-9]*><[a-z0-9]*>\]/, '')
end
text.should == @example_expect
end
after(:each) { remove_output(@call_output) }
end
inputs.each do |input_type|
outputs.each do |output_type|
context "(Input:#{input_type}|Output:#{output_type})" do
before :each do
@input = get_input(input_type)
@input.rewind rescue nil # rewind if possible.
@output = get_output(output_type)
end
describe "#dump_data_fields" do
it_behaves_like "a working command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.data_fields')).read }
before(:each) { @call_output = @pdftk.dump_data_fields(@input, :output => @output) }
end
end
describe "#fill_form" do
it_behaves_like "a working command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.fill_form.pdf')).read }
before(:each) { @call_output = @pdftk.fill_form(@input, path_to_pdf('fields.fdf.spec'), :output => @output) }
end
it_behaves_like "a working command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.fill_form.pdf')).read }
before(:each) { @call_output = @pdftk.fill_form(@input, path_to_pdf('fields.xfdf.spec'), :output => @output) }
end
end
describe "#generate_fdf" do
it_behaves_like "a working command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.fdf')).read }
before(:each) { @call_output = @pdftk.generate_fdf(@input,:output => @output) }
end
end
describe "#dump_data" do
it_behaves_like "a working command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.data')).read }
before(:each) { @call_output = @pdftk.dump_data(@input,:output => @output) }
end
end
describe "#update_info" do
it_behaves_like "a working command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.update_info.pdf')).read }
before(:each) { @call_output = @pdftk.update_info(@input, path_to_pdf('fields.data.spec'), :output => @output) }
end
end
describe "#attach_files" do
before(:all) { @attachment_size = File.size(path_to_pdf('attached_file.txt')) }
before(:each) { @call_output = @pdftk.attach_files(@input, [path_to_pdf('attached_file.txt')], :output => @output) }
it "should bind the file ine the pdf" do
if @call_output.is_a?(String)
output_size = File.size(@call_output)
else
@call_output.rewind
t = Tempfile.new('attachment_output')
t.write(@call_output.read)
output_size = File.size(t.path)
t.close
end
if @input.is_a?(String)
input_size = File.size(@input)
elsif @input.is_a?(Hash)
input_size = 0
@input.each do |file_path, name|
input_size += File.size(file_path)
end
else
@input.rewind
t = Tempfile.new('attachment_input')
t.write(@input.read)
input_size = File.size(t.path)
t.close
end
total_size = input_size + @attachment_size
output_size.should >= total_size
end
it "should output the correct type" do
@call_output.should be_kind_of(map_output_type(output_type))
end
end
describe "#unpack_files to path", :if => output_type == :path do
before(:each) do
@input = get_input(input_type, 'fields.unpack_files.pdf')
@input.rewind rescue nil # rewind if possible.
@output = path_to_pdf('')
@call_output = @pdftk.unpack_files(@input, @output)
end
it "should unpack the files" do
@call_output.should == @output
File.unlink(path_to_pdf('unpacked_file.txt')).should == 1
end
end
describe "#unpack_files to tmp dir", :if => output_type == :nil do
before(:each) do
@input = get_input(input_type, 'fields.unpack_files.pdf')
@input.rewind rescue nil # rewind if possible.
@call_output = @pdftk.unpack_files(@input, @output)
end
it "should unpack the files" do
@call_output.should == Dir.tmpdir
File.unlink(File.join(Dir.tmpdir, 'unpacked_file.txt')).should == 1
end
end
describe "#background" do
it_behaves_like "a working command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.background.pdf')).read }
before(:each) { @call_output = @pdftk.background(@input, path_to_pdf('a.pdf'), :output => @output) }
end
pending "spec multibackground also"
end
describe "#stamp" do
it_behaves_like "a working command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.stamp.pdf')).read }
before(:each) { @call_output = @pdftk.stamp(@input, path_to_pdf('a.pdf'), :output => @output) }
end
pending "check if the output is really a stamp & spec multistamp also"
end
describe "#cat" do
it_behaves_like "a combination command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.cat.pdf')).read }
before(:each) { @call_output = @pdftk.cat([{:pdf => path_to_pdf('a.pdf')}, {:pdf => path_to_pdf('b.pdf'), :start => 1, :end => 'end', :orientation => 'N', :pages => 'even'}], :output => @output) }
end
end
describe "#shuffle" do
it_behaves_like "a combination command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.shuffle.pdf')).read }
before(:each) { @call_output = @pdftk.shuffle([{:pdf => path_to_pdf('a.pdf')}, {:pdf => path_to_pdf('b.pdf'), :start => 1, :end => 'end', :orientation => 'N', :pages => 'even'}], :output => @output) }
end
end
describe "#burst", :if => output_type == :path do
before(:each) do
@input = get_input(input_type, 'a.pdf')
@input.rewind rescue nil # rewind if possible.
@output = path_to_pdf('pg_%04d.pdf')
@call_output = @pdftk.burst(@input, :output => @output)
end
it "should file into single pages" do
@call_output.should == @output
File.unlink(path_to_pdf('pg_0001.pdf')).should == 1
File.unlink(path_to_pdf('pg_0002.pdf')).should == 1
File.unlink(path_to_pdf('pg_0003.pdf')).should == 1
end
end
describe "#burst to tmp dir", :if => output_type == :nil do
before(:each) do
@input = get_input(input_type, 'a.pdf')
@input.rewind rescue nil # rewind if possible.
@call_output = @pdftk.burst(@input, :output => @output)
end
it "should file into single pages" do
@call_output.should == Dir.tmpdir
File.unlink(File.join(Dir.tmpdir, 'pg_0001.pdf')).should == 1
File.unlink(File.join(Dir.tmpdir, 'pg_0002.pdf')).should == 1
File.unlink(File.join(Dir.tmpdir, 'pg_0003.pdf')).should == 1
end
end
end
end # each outputs
end # each inputs
context "burst" do
it "should call #pdtk on @call" do
ActivePdftk::Call.any_instance.should_receive(:pdftk).with({:input => path_to_pdf('fields.pdf'), :operation => :burst})
@pdftk.burst(path_to_pdf('fields.pdf'))
@pdftk = ActivePdftk::Wrapper.new
ActivePdftk::Call.any_instance.should_receive(:pdftk).with({:input => path_to_pdf('fields.pdf'), :operation => :burst, :options => {:encrypt => :'40bit'}})
@pdftk.burst(path_to_pdf('fields.pdf'), :options => {:encrypt => :'40bit'})
end
it "should put a file in the system tmpdir when no output location given" do
@pdftk = ActivePdftk::Wrapper.new
@pdftk.burst(path_to_pdf('fields.pdf'))
File.unlink(File.join(Dir.tmpdir, 'pg_0001.pdf')).should == 1
end
it "should put a file in the system tmpdir when no output location given but a page name format given" do
@pdftk = ActivePdftk::Wrapper.new
@pdftk.burst(path_to_pdf('fields.pdf'), :output => 'page_%02d.pdf')
File.unlink(File.join(Dir.tmpdir, 'page_01.pdf')).should == 1
end
it "should put a file in the specified path" do
@pdftk = ActivePdftk::Wrapper.new
@pdftk.burst(path_to_pdf('fields.pdf'), :output => path_to_pdf('page_%02d.pdf').to_s)
File.unlink(path_to_pdf('page_01.pdf')).should == 1
end
end
context "cat" do
it "should call #pdftk on @call" do
ActivePdftk::Call.any_instance.should_receive(:pdftk).with({:input => {'a.pdf' => 'foo', 'b.pdf' => nil}, :operation => {:cat => [{:pdf => 'a.pdf'}, {:pdf => 'b.pdf', :start => 1, :end => 'end', :orientation => 'N', :pages => 'even'}]}})
@pdftk.cat([{:pdf => 'a.pdf', :pass => 'foo'}, {:pdf => 'b.pdf', :start => 1, :end => 'end', :orientation => 'N', :pages => 'even'}])
end
it "should output the generated pdf" do
@pdftk = ActivePdftk::Wrapper.new
@pdftk.cat([{:pdf => path_to_pdf('a.pdf'), :pass => 'foo'}, {:pdf => path_to_pdf('b.pdf'), :start => 1, :end => 'end', :orientation => 'N', :pages => 'even'}], :output => path_to_pdf('cat.pdf'))
File.unlink(path_to_pdf('cat.pdf')).should == 1
end
end
context "shuffle" do
it "should call #pdftk on @call" do
ActivePdftk::Call.any_instance.should_receive(:pdftk).with({:input => {'a.pdf' => 'foo', 'b.pdf' => nil}, :operation => {:shuffle => [{:pdf => 'a.pdf'}, {:pdf => 'b.pdf', :start => 1, :end => 'end', :orientation => 'N', :pages => 'even'}]}})
@pdftk.shuffle([{:pdf => 'a.pdf', :pass => 'foo'}, {:pdf => 'b.pdf', :start => 1, :end => 'end', :orientation => 'N', :pages => 'even'}])
end
it "should output the generated pdf" do
@pdftk = ActivePdftk::Wrapper.new
@pdftk.shuffle([{:pdf => path_to_pdf('a.pdf'), :pass => 'foo'}, {:pdf => path_to_pdf('b.pdf'), :start => 1, :end => 'end', :orientation => 'N', :pages => 'even'}], :output => path_to_pdf('shuffle.pdf'))
File.unlink(path_to_pdf('shuffle.pdf')).should == 1
end
end
context "unpack_files" do
it "should return Dir.tmpdir" do
@pdftk = ActivePdftk::Wrapper.new
@pdftk.attach_files(path_to_pdf('fields.pdf'), [path_to_pdf('attached_file.txt')], :output => path_to_pdf('attached.pdf'))
@pdftk.unpack_files(path_to_pdf('attached.pdf')).should == Dir.tmpdir
File.unlink(path_to_pdf('attached.pdf')).should == 1
end
it "should return the specified output directory" do
@pdftk = ActivePdftk::Wrapper.new
@pdftk.attach_files(path_to_pdf('fields.pdf'), [path_to_pdf('attached_file.txt')], :output => path_to_pdf('attached.pdf'))
@pdftk.unpack_files(path_to_pdf('attached.pdf'), path_to_pdf(nil)).should == path_to_pdf(nil)
File.unlink(path_to_pdf('attached.pdf')).should == 1
end
end
end # Wrapper
Wrapper :
Fix 1.9.2 string encoding issue
Remove duplicate tests (from the end of the file)
Improve #burst test coverage
require 'spec_helper'
inputs = [:path, :hash, :file, :tempfile, :stringio]
outputs = [:path, :file, :tempfile, :stringio, :nil]
def get_input(input_type, file_name = 'fields.pdf')
case input_type
when :path
path_to_pdf(file_name)
when :hash
{path_to_pdf(file_name) => nil}
when :file
File.new(path_to_pdf(file_name))
when :tempfile
t = Tempfile.new('input.spec')
t.write(File.read(path_to_pdf(file_name)))
t
when :stringio
StringIO.new(File.read(path_to_pdf(file_name)))
end
end
def get_output(output_type)
case output_type
when :path
path_to_pdf('output.spec')
when :file
File.new(path_to_pdf('output.spec'), 'w+')
when :tempfile
Tempfile.new('output.spec')
when :stringio
StringIO.new()
when :nil
nil
end
end
def map_output_type(output_specified)
case output_specified
when :path
String
when :file
File
when :tempfile
Tempfile
when :stringio, :nil
StringIO
end
end
def remove_output(output)
if output.is_a?(String)
File.unlink(output)
elsif output.is_a?(File)
File.unlink(output.path)
end
end
def open_or_rewind(target)
if target.is_a? String
File.new(target).read
else
target.rewind if target.respond_to? :rewind
target.read
end
end
def cleanup_file_content(text)
text.force_encoding('ASCII-8BIT') if text.respond_to? :force_encoding
text.gsub!(/\(D\:.*\)/, '')
text.gsub!(/\[<[a-z0-9]*><[a-z0-9]*>\]/, '')
text
end
describe ActivePdftk::Wrapper do
before(:all) { @pdftk = ActivePdftk::Wrapper.new }
context "new" do
it "should instantiate the object." do
@pdftk.should be_an_instance_of(ActivePdftk::Wrapper)
end
it "should pass the defaults statements to the call instance." do
path = ActivePdftk::Call.new.locate_pdftk
@pdftk_opt = ActivePdftk::Wrapper.new(:path => path, :operation => {:fill_form => 'a.fdf'}, :options => { :flatten => false, :owner_pw => 'bar', :user_pw => 'baz', :encrypt => :'40bit'})
@pdftk_opt.default_statements.should == {:path => path, :operation => {:fill_form => 'a.fdf'}, :options => { :flatten => false, :owner_pw => 'bar', :user_pw => 'baz', :encrypt => :'40bit'}}
end
end
shared_examples "a working command" do
it "should return a #{@output.nil? ? StringIO : @output.class}" do
@call_output.should be_kind_of(@output.nil? ? StringIO : @output.class)
end
it "should return expected data" do
open_or_rewind(@call_output).should == @example_expect
end
after(:each) { remove_output(@call_output) }
end
shared_examples "a combination command" do
it "should return a #{@output.nil? ? StringIO : @output.class}" do
@call_output.should be_kind_of(@output.nil? ? StringIO : @output.class)
end
it "should return expected data" do
cleanup_file_content(@example_expect)
text = open_or_rewind(@call_output)
cleanup_file_content(text)
text.should == @example_expect
end
after(:each) { remove_output(@call_output) }
end
inputs.each do |input_type|
outputs.each do |output_type|
context "(Input:#{input_type}|Output:#{output_type})" do
before :each do
@input = get_input(input_type)
@input.rewind rescue nil # rewind if possible.
@output = get_output(output_type)
end
describe "#dump_data_fields" do
it_behaves_like "a working command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.data_fields')).read }
before(:each) { @call_output = @pdftk.dump_data_fields(@input, :output => @output) }
end
end
describe "#fill_form" do
it_behaves_like "a working command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.fill_form.pdf')).read }
before(:each) { @call_output = @pdftk.fill_form(@input, path_to_pdf('fields.fdf.spec'), :output => @output) }
end
it_behaves_like "a working command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.fill_form.pdf')).read }
before(:each) { @call_output = @pdftk.fill_form(@input, path_to_pdf('fields.xfdf.spec'), :output => @output) }
end
end
describe "#generate_fdf" do
it_behaves_like "a working command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.fdf')).read }
before(:each) { @call_output = @pdftk.generate_fdf(@input,:output => @output) }
end
end
describe "#dump_data" do
it_behaves_like "a working command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.data')).read }
before(:each) { @call_output = @pdftk.dump_data(@input,:output => @output) }
end
end
describe "#update_info" do
it_behaves_like "a working command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.update_info.pdf')).read }
before(:each) { @call_output = @pdftk.update_info(@input, path_to_pdf('fields.data.spec'), :output => @output) }
end
end
describe "#attach_files" do
before(:all) { @attachment_size = File.size(path_to_pdf('attached_file.txt')) }
before(:each) { @call_output = @pdftk.attach_files(@input, [path_to_pdf('attached_file.txt')], :output => @output) }
it "should bind the file ine the pdf" do
if @call_output.is_a?(String)
output_size = File.size(@call_output)
else
@call_output.rewind
t = Tempfile.new('attachment_output')
t.write(@call_output.read)
output_size = File.size(t.path)
t.close
end
if @input.is_a?(String)
input_size = File.size(@input)
elsif @input.is_a?(Hash)
input_size = 0
@input.each do |file_path, name|
input_size += File.size(file_path)
end
else
@input.rewind
t = Tempfile.new('attachment_input')
t.write(@input.read)
input_size = File.size(t.path)
t.close
end
total_size = input_size + @attachment_size
output_size.should >= total_size
end
it "should output the correct type" do
@call_output.should be_kind_of(map_output_type(output_type))
end
end
describe "#unpack_files to path", :if => output_type == :path do
before(:each) do
@input = get_input(input_type, 'fields.unpack_files.pdf')
@input.rewind rescue nil # rewind if possible.
@output = path_to_pdf('')
@call_output = @pdftk.unpack_files(@input, @output)
end
it "should unpack the files" do
@call_output.should == @output
File.unlink(path_to_pdf('unpacked_file.txt')).should == 1
end
end
describe "#unpack_files to tmp dir", :if => output_type == :nil do
before(:each) do
@input = get_input(input_type, 'fields.unpack_files.pdf')
@input.rewind rescue nil # rewind if possible.
@call_output = @pdftk.unpack_files(@input, @output)
end
it "should unpack the files" do
@call_output.should == Dir.tmpdir
File.unlink(File.join(Dir.tmpdir, 'unpacked_file.txt')).should == 1
end
end
describe "#background" do
it_behaves_like "a working command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.background.pdf')).read }
before(:each) { @call_output = @pdftk.background(@input, path_to_pdf('a.pdf'), :output => @output) }
end
pending "spec multibackground also"
end
describe "#stamp" do
it_behaves_like "a working command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.stamp.pdf')).read }
before(:each) { @call_output = @pdftk.stamp(@input, path_to_pdf('a.pdf'), :output => @output) }
end
pending "check if the output is really a stamp & spec multistamp also"
end
describe "#cat" do
it_behaves_like "a combination command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.cat.pdf')).read }
before(:each) { @call_output = @pdftk.cat([{:pdf => path_to_pdf('a.pdf')}, {:pdf => path_to_pdf('b.pdf'), :start => 1, :end => 'end', :orientation => 'N', :pages => 'even'}], :output => @output) }
end
end
describe "#shuffle" do
it_behaves_like "a combination command" do
before(:all) { @example_expect = File.new(path_to_pdf('fields.shuffle.pdf')).read }
before(:each) { @call_output = @pdftk.shuffle([{:pdf => path_to_pdf('a.pdf')}, {:pdf => path_to_pdf('b.pdf'), :start => 1, :end => 'end', :orientation => 'N', :pages => 'even'}], :output => @output) }
end
end
describe "#burst", :if => output_type == :path do
before(:each) do
@input = get_input(input_type, 'a.pdf')
@input.rewind rescue nil # rewind if possible.
end
it "should file into single pages" do
output = path_to_pdf('pg_%04d.pdf')
@pdftk.burst(@input, :output => output).should == output
File.unlink(path_to_pdf('pg_0001.pdf')).should == 1
File.unlink(path_to_pdf('pg_0002.pdf')).should == 1
File.unlink(path_to_pdf('pg_0003.pdf')).should == 1
end
end
describe "#burst to tmp dir", :if => output_type == :nil do
before(:each) do
@input = get_input(input_type, 'a.pdf')
@input.rewind rescue nil # rewind if possible.
end
it "should file into single pages" do
@pdftk.burst(@input).should == Dir.tmpdir
File.unlink(File.join(Dir.tmpdir, 'pg_0001.pdf')).should == 1
File.unlink(File.join(Dir.tmpdir, 'pg_0002.pdf')).should == 1
File.unlink(File.join(Dir.tmpdir, 'pg_0003.pdf')).should == 1
end
it "should put a file in the system tmpdir when no output location given but a page name format given" do
@pdftk.burst(@input, :output => 'page_%02d.pdf').should == 'page_%02d.pdf'
File.unlink(File.join(Dir.tmpdir, 'page_01.pdf')).should == 1
File.unlink(File.join(Dir.tmpdir, 'page_02.pdf')).should == 1
File.unlink(File.join(Dir.tmpdir, 'page_03.pdf')).should == 1
end
end
end
end # each outputs
end # each inputs
context "burst" do
it "should call #pdtk on @call" do
pending "integration of Call receiver tests in looping strategy for all operations."
#ActivePdftk::Call.any_instance.should_receive(:pdftk).with({:input => path_to_pdf('fields.pdf'), :operation => :burst})
#@pdftk.burst(path_to_pdf('fields.pdf'))
#@pdftk = ActivePdftk::Wrapper.new
#ActivePdftk::Call.any_instance.should_receive(:pdftk).with({:input => path_to_pdf('fields.pdf'), :operation => :burst, :options => {:encrypt => :'40bit'}})
#@pdftk.burst(path_to_pdf('fields.pdf'), :options => {:encrypt => :'40bit'})
end
end
end # Wrapper |
module ErrSupply
module Generators
class InstallGenerator < Rails::Generators::Base
source_root File.expand_path('../templates', __FILE__)
def add_javascript_files
copy_file "jquery.qtip-1.0.0-rc3.min.js", "public/javascripts/jquery.qtip-1.0.0-rc3.min.js"
copy_file "err_supply.js", "public/javascripts/err_supply.js"
end
end
end
end
Requiring rails generators.
require "rails/generators"
module ErrSupply
module Generators
class InstallGenerator < Rails::Generators::Base
source_root File.expand_path('../templates', __FILE__)
def add_javascript_files
copy_file "jquery.qtip-1.0.0-rc3.min.js", "public/javascripts/jquery.qtip-1.0.0-rc3.min.js"
copy_file "err_supply.js", "public/javascripts/err_supply.js"
end
end
end
end |
require 'helper'
class TestSlimCodeEvaluation < TestSlim
def test_render_with_call_to_set_attributes
source = %q{
p id="#{id_helper}" class="hello world" = hello_world
}
assert_html '<p class="hello world" id="notice">Hello World from @env</p>', source
end
def test_render_with_call_to_set_custom_attributes
source = %q{
p data-id="#{id_helper}" data-class="hello world"
= hello_world
}
assert_html '<p data-class="hello world" data-id="notice">Hello World from @env</p>', source
end
def test_render_with_call_to_set_attributes_and_call_to_set_content
source = %q{
p id="#{id_helper}" class="hello world" = hello_world
}
assert_html '<p class="hello world" id="notice">Hello World from @env</p>', source
end
def test_render_with_parameterized_call_to_set_attributes_and_call_to_set_content
source = %q{
p id="#{id_helper}" class="hello world" = hello_world("Hello Ruby!")
}
assert_html '<p class="hello world" id="notice">Hello Ruby!</p>', source
end
def test_render_with_spaced_parameterized_call_to_set_attributes_and_call_to_set_content
source = %q{
p id="#{id_helper}" class="hello world" = hello_world "Hello Ruby!"
}
assert_html '<p class="hello world" id="notice">Hello Ruby!</p>', source
end
def test_render_with_spaced_parameterized_call_to_set_attributes_and_call_to_set_content_2
source = %q{
p id="#{id_helper}" class="hello world" = hello_world "Hello Ruby!", dummy: "value"
}
assert_html '<p class="hello world" id="notice">Hello Ruby!dummy value</p>', source
end
def test_hash_call_in_attribute
source = %q{
p id="#{hash[:a]}" Test it
}
assert_html '<p id="The letter a">Test it</p>', source
end
def test_instance_variable_in_attribute_without_quotes
source = %q{
p id=@var
}
assert_html '<p id="instance"></p>', source
end
def test_method_call_in_attribute_without_quotes
source = %q{
form action=action_path(:page, :save) method='post'
}
assert_html '<form action="/action-page-save" method="post"></form>', source
end
def test_ruby_attribute_with_unbalanced_delimiters
source = %q{
div crazy=action_path('[') id="crazy_delimiters"
}
assert_html '<div crazy="/action-[" id="crazy_delimiters"></div>', source
end
def test_method_call_in_delimited_attribute_without_quotes
source = %q{
form(action=action_path(:page, :save) method='post')
}
assert_html '<form action="/action-page-save" method="post"></form>', source
end
def test_method_call_in_delimited_attribute_without_quotes2
source = %q{
form(method='post' action=action_path(:page, :save))
}
assert_html '<form action="/action-page-save" method="post"></form>', source
end
def test_hash_call_in_attribute_without_quotes
source = %q{
p id=hash[:a] Test it
}
assert_html '<p id="The letter a">Test it</p>', source
end
def test_hash_call_in_delimited_attribute
source = %q{
p(id=hash[:a]) Test it
}
assert_html '<p id="The letter a">Test it</p>', source
end
def test_hash_call_in_attribute_with_ruby_evaluation
source = %q{
p id=(hash[:a] + hash[:a]) Test it
}
assert_html '<p id="The letter aThe letter a">Test it</p>', source
end
def test_hash_call_in_delimited_attribute_with_ruby_evaluation
source = %q{
p(id=(hash[:a] + hash[:a])) Test it
}
assert_html '<p id="The letter aThe letter a">Test it</p>', source
end
def test_hash_call_in_delimited_attribute_with_ruby_evaluation_2
source = %q{
p[id=(hash[:a] + hash[:a])] Test it
}
assert_html '<p id="The letter aThe letter a">Test it</p>', source
end
def test_hash_call_in_delimited_attribute_with_ruby_evaluation_3
source = %q{
p(id=(hash[:a] + hash[:a]) class=hash[:a]) Test it
}
assert_html '<p class="The letter a" id="The letter aThe letter a">Test it</p>', source
end
def test_hash_call_in_delimited_attribute_with_ruby_evaluation_4_
source = %q{
p(id=hash[:a] class=hash[:a]) Test it
}
assert_html '<p class="The letter a" id="The letter a">Test it</p>', source
end
def test_computation_in_attribute
source = %q{
p id=(1 + 1)*5 Test it
}
assert_html '<p id="10">Test it</p>', source
end
def test_code_attribute_does_not_modify_argument
template = 'span class=attribute'
model = OpenStruct.new(attribute: [:a, :b, [:c, :d]])
output = Slim::Template.new { template }.render(model)
assert_equal('<span class="a b c d"></span>', output)
assert_equal([:a, :b, [:c, :d]], model.attribute)
end
def test_number_type_interpolation
source = %q{
p = output_number
}
assert_html '<p>1337</p>', source
end
end
Add missing require 'ostruct' in a test (#893)
require 'helper'
class TestSlimCodeEvaluation < TestSlim
def test_render_with_call_to_set_attributes
source = %q{
p id="#{id_helper}" class="hello world" = hello_world
}
assert_html '<p class="hello world" id="notice">Hello World from @env</p>', source
end
def test_render_with_call_to_set_custom_attributes
source = %q{
p data-id="#{id_helper}" data-class="hello world"
= hello_world
}
assert_html '<p data-class="hello world" data-id="notice">Hello World from @env</p>', source
end
def test_render_with_call_to_set_attributes_and_call_to_set_content
source = %q{
p id="#{id_helper}" class="hello world" = hello_world
}
assert_html '<p class="hello world" id="notice">Hello World from @env</p>', source
end
def test_render_with_parameterized_call_to_set_attributes_and_call_to_set_content
source = %q{
p id="#{id_helper}" class="hello world" = hello_world("Hello Ruby!")
}
assert_html '<p class="hello world" id="notice">Hello Ruby!</p>', source
end
def test_render_with_spaced_parameterized_call_to_set_attributes_and_call_to_set_content
source = %q{
p id="#{id_helper}" class="hello world" = hello_world "Hello Ruby!"
}
assert_html '<p class="hello world" id="notice">Hello Ruby!</p>', source
end
def test_render_with_spaced_parameterized_call_to_set_attributes_and_call_to_set_content_2
source = %q{
p id="#{id_helper}" class="hello world" = hello_world "Hello Ruby!", dummy: "value"
}
assert_html '<p class="hello world" id="notice">Hello Ruby!dummy value</p>', source
end
def test_hash_call_in_attribute
source = %q{
p id="#{hash[:a]}" Test it
}
assert_html '<p id="The letter a">Test it</p>', source
end
def test_instance_variable_in_attribute_without_quotes
source = %q{
p id=@var
}
assert_html '<p id="instance"></p>', source
end
def test_method_call_in_attribute_without_quotes
source = %q{
form action=action_path(:page, :save) method='post'
}
assert_html '<form action="/action-page-save" method="post"></form>', source
end
def test_ruby_attribute_with_unbalanced_delimiters
source = %q{
div crazy=action_path('[') id="crazy_delimiters"
}
assert_html '<div crazy="/action-[" id="crazy_delimiters"></div>', source
end
def test_method_call_in_delimited_attribute_without_quotes
source = %q{
form(action=action_path(:page, :save) method='post')
}
assert_html '<form action="/action-page-save" method="post"></form>', source
end
def test_method_call_in_delimited_attribute_without_quotes2
source = %q{
form(method='post' action=action_path(:page, :save))
}
assert_html '<form action="/action-page-save" method="post"></form>', source
end
def test_hash_call_in_attribute_without_quotes
source = %q{
p id=hash[:a] Test it
}
assert_html '<p id="The letter a">Test it</p>', source
end
def test_hash_call_in_delimited_attribute
source = %q{
p(id=hash[:a]) Test it
}
assert_html '<p id="The letter a">Test it</p>', source
end
def test_hash_call_in_attribute_with_ruby_evaluation
source = %q{
p id=(hash[:a] + hash[:a]) Test it
}
assert_html '<p id="The letter aThe letter a">Test it</p>', source
end
def test_hash_call_in_delimited_attribute_with_ruby_evaluation
source = %q{
p(id=(hash[:a] + hash[:a])) Test it
}
assert_html '<p id="The letter aThe letter a">Test it</p>', source
end
def test_hash_call_in_delimited_attribute_with_ruby_evaluation_2
source = %q{
p[id=(hash[:a] + hash[:a])] Test it
}
assert_html '<p id="The letter aThe letter a">Test it</p>', source
end
def test_hash_call_in_delimited_attribute_with_ruby_evaluation_3
source = %q{
p(id=(hash[:a] + hash[:a]) class=hash[:a]) Test it
}
assert_html '<p class="The letter a" id="The letter aThe letter a">Test it</p>', source
end
def test_hash_call_in_delimited_attribute_with_ruby_evaluation_4_
source = %q{
p(id=hash[:a] class=hash[:a]) Test it
}
assert_html '<p class="The letter a" id="The letter a">Test it</p>', source
end
def test_computation_in_attribute
source = %q{
p id=(1 + 1)*5 Test it
}
assert_html '<p id="10">Test it</p>', source
end
def test_code_attribute_does_not_modify_argument
require 'ostruct'
template = 'span class=attribute'
model = OpenStruct.new(attribute: [:a, :b, [:c, :d]])
output = Slim::Template.new { template }.render(model)
assert_equal('<span class="a b c d"></span>', output)
assert_equal([:a, :b, [:c, :d]], model.attribute)
end
def test_number_type_interpolation
source = %q{
p = output_number
}
assert_html '<p>1337</p>', source
end
end
|
$:.unshift 'lib'
require 'snuggie/version'
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'snuggie'
s.version = Snuggie::Version
s.date = Time.now.strftime('%Y-%m-%d')
s.summary = 'Snuggie wraps the Softaculous API in a warm, loving embrace'
s.homepage = 'https://github.com/site5/snuggie'
s.authors = ['Joshua Priddle']
s.email = 'jpriddle@nevercraft.net'
s.files = %w[ Rakefile README.markdown ]
s.files += Dir['lib/**/*']
s.files += Dir['test/**/*']
s.extra_rdoc_files = ['README.markdown']
s.rdoc_options = ["--charset=UTF-8"]
s.add_dependency 'php-serialize', '~> 1.1.0'
s.add_development_dependency 'rake', '~> 0.8.7'
s.add_development_dependency 'fakeweb', '~> 1.3.0'
s.description = <<-DESC
Snuggie wraps the Softaculous API in a warm, loving embrace.
DESC
end
Fix email in gemspec
$:.unshift 'lib'
require 'snuggie/version'
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'snuggie'
s.version = Snuggie::Version
s.date = Time.now.strftime('%Y-%m-%d')
s.summary = 'Snuggie wraps the Softaculous API in a warm, loving embrace'
s.homepage = 'https://github.com/site5/snuggie'
s.authors = ['Joshua Priddle']
s.email = 'jpriddle@site5.com'
s.files = %w[ Rakefile README.markdown ]
s.files += Dir['lib/**/*']
s.files += Dir['test/**/*']
s.extra_rdoc_files = ['README.markdown']
s.rdoc_options = ["--charset=UTF-8"]
s.add_dependency 'php-serialize', '~> 1.1.0'
s.add_development_dependency 'rake', '~> 0.8.7'
s.add_development_dependency 'fakeweb', '~> 1.3.0'
s.description = <<-DESC
Snuggie wraps the Softaculous API in a warm, loving embrace.
DESC
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "format_validators"
s.version = "0.0.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Jeremiah Hemphill"]
s.date = "2012-03-21"
s.description = "Complex format validators"
s.email = "jeremiah@cloudspace.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
".rspec",
"CHANGELOG",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"app/validators/florida_counties_validator.rb",
"app/validators/ssn_format_validator.rb",
"format_validators.gemspec",
"lib/format_validators.rb",
"lib/format_validators/engine.rb",
"lib/format_validators/railtie.rb",
"spec/dummy/Rakefile",
"spec/dummy/app/controllers/application_controller.rb",
"spec/dummy/app/helpers/application_helper.rb",
"spec/dummy/app/models/building.rb",
"spec/dummy/app/views/layouts/application.html.erb",
"spec/dummy/config.ru",
"spec/dummy/config/application.rb",
"spec/dummy/config/boot.rb",
"spec/dummy/config/database.yml",
"spec/dummy/config/environment.rb",
"spec/dummy/config/environments/development.rb",
"spec/dummy/config/environments/production.rb",
"spec/dummy/config/environments/test.rb",
"spec/dummy/config/initializers/backtrace_silencers.rb",
"spec/dummy/config/initializers/inflections.rb",
"spec/dummy/config/initializers/mime_types.rb",
"spec/dummy/config/initializers/secret_token.rb",
"spec/dummy/config/initializers/session_store.rb",
"spec/dummy/config/locales/en.yml",
"spec/dummy/config/routes.rb",
"spec/dummy/db/development.sqlite3",
"spec/dummy/db/migrate/20120321134932_create_buildings.rb",
"spec/dummy/db/schema.rb",
"spec/dummy/db/test.sqlite3",
"spec/dummy/public/404.html",
"spec/dummy/public/422.html",
"spec/dummy/public/500.html",
"spec/dummy/public/favicon.ico",
"spec/dummy/public/javascripts/application.js",
"spec/dummy/public/javascripts/controls.js",
"spec/dummy/public/javascripts/dragdrop.js",
"spec/dummy/public/javascripts/effects.js",
"spec/dummy/public/javascripts/prototype.js",
"spec/dummy/public/javascripts/rails.js",
"spec/dummy/public/stylesheets/.gitkeep",
"spec/dummy/script/rails",
"spec/format_validators_spec.rb",
"spec/spec_helper.rb",
"spec/support/basic_record.rb",
"spec/validators/florida_counties_integration_spec.rb",
"spec/validators/florida_counties_spec.rb",
"spec/validators/ssn_format_validator_spec.rb"
]
s.homepage = "http://github.com/jeremiahishere/format_validators"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.15"
s.summary = "Complex format validators"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rails>, ["~> 3.0.7"])
s.add_development_dependency(%q<capybara>, [">= 0.4.0"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<ruby-debug19>, [">= 0"])
s.add_development_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_development_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_development_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_development_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_development_dependency(%q<ZenTest>, [">= 0"])
s.add_development_dependency(%q<autotest-rails>, [">= 0"])
else
s.add_dependency(%q<rails>, ["~> 3.0.7"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_dependency(%q<ZenTest>, [">= 0"])
s.add_dependency(%q<autotest-rails>, [">= 0"])
end
else
s.add_dependency(%q<rails>, ["~> 3.0.7"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_dependency(%q<ZenTest>, [">= 0"])
s.add_dependency(%q<autotest-rails>, [">= 0"])
end
end
Regenerate gemspec for version 0.0.4
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "format_validators"
s.version = "0.0.4"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Jeremiah Hemphill"]
s.date = "2012-03-21"
s.description = "Complex format validators"
s.email = "jeremiah@cloudspace.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
".rspec",
"CHANGELOG",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"app/validators/currency_format_validator.rb",
"app/validators/florida_counties_validator.rb",
"app/validators/ssn_format_validator.rb",
"format_validators.gemspec",
"lib/format_validators.rb",
"lib/format_validators/engine.rb",
"lib/format_validators/railtie.rb",
"spec/dummy/Rakefile",
"spec/dummy/app/controllers/application_controller.rb",
"spec/dummy/app/helpers/application_helper.rb",
"spec/dummy/app/models/building.rb",
"spec/dummy/app/views/layouts/application.html.erb",
"spec/dummy/config.ru",
"spec/dummy/config/application.rb",
"spec/dummy/config/boot.rb",
"spec/dummy/config/database.yml",
"spec/dummy/config/environment.rb",
"spec/dummy/config/environments/development.rb",
"spec/dummy/config/environments/production.rb",
"spec/dummy/config/environments/test.rb",
"spec/dummy/config/initializers/backtrace_silencers.rb",
"spec/dummy/config/initializers/inflections.rb",
"spec/dummy/config/initializers/mime_types.rb",
"spec/dummy/config/initializers/secret_token.rb",
"spec/dummy/config/initializers/session_store.rb",
"spec/dummy/config/locales/en.yml",
"spec/dummy/config/routes.rb",
"spec/dummy/db/development.sqlite3",
"spec/dummy/db/migrate/20120321134932_create_buildings.rb",
"spec/dummy/db/schema.rb",
"spec/dummy/db/test.sqlite3",
"spec/dummy/public/404.html",
"spec/dummy/public/422.html",
"spec/dummy/public/500.html",
"spec/dummy/public/favicon.ico",
"spec/dummy/public/javascripts/application.js",
"spec/dummy/public/javascripts/controls.js",
"spec/dummy/public/javascripts/dragdrop.js",
"spec/dummy/public/javascripts/effects.js",
"spec/dummy/public/javascripts/prototype.js",
"spec/dummy/public/javascripts/rails.js",
"spec/dummy/public/stylesheets/.gitkeep",
"spec/dummy/script/rails",
"spec/format_validators_spec.rb",
"spec/spec_helper.rb",
"spec/support/basic_record.rb",
"spec/validators/currency_format_validator_spec.rb",
"spec/validators/florida_counties_integration_spec.rb",
"spec/validators/florida_counties_spec.rb",
"spec/validators/ssn_format_validator_spec.rb"
]
s.homepage = "http://github.com/jeremiahishere/format_validators"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.15"
s.summary = "Complex format validators"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rails>, ["~> 3.0.7"])
s.add_development_dependency(%q<capybara>, [">= 0.4.0"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<ruby-debug19>, [">= 0"])
s.add_development_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_development_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_development_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_development_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_development_dependency(%q<ZenTest>, [">= 0"])
s.add_development_dependency(%q<autotest-rails>, [">= 0"])
else
s.add_dependency(%q<rails>, ["~> 3.0.7"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_dependency(%q<ZenTest>, [">= 0"])
s.add_dependency(%q<autotest-rails>, [">= 0"])
end
else
s.add_dependency(%q<rails>, ["~> 3.0.7"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_dependency(%q<ZenTest>, [">= 0"])
s.add_dependency(%q<autotest-rails>, [">= 0"])
end
end
|
require 'coderay'
path = File.join File.expand_path(File.dirname(__FILE__))
require File.join(path, "coderay/scanners/bash.rb")
require File.join(path, "coderay/scanners/erb_bash.rb")
Register sh filetype
require 'coderay'
path = File.join File.expand_path(File.dirname(__FILE__))
require File.join(path, "coderay/scanners/bash.rb")
require File.join(path, "coderay/scanners/erb_bash.rb")
# Register file types
::CodeRay::FileType::TypeFromExt['sh'] = :bash
|
module CommentTags
include Radiant::Taggable
desc "Provides tags and behaviors to support comments in Radiant."
desc %{
Renders the contained elements if comments are enabled on the page.
}
tag "if_enable_comments" do |tag|
tag.expand if (tag.locals.page.enable_comments?)
end
# makes more sense to me
tag "if_comments_enabled" do |tag|
tag.expand if (tag.locals.page.enable_comments?)
end
desc %{
Renders the contained elements unless comments are enabled on the page.
}
tag "unless_enable_comments" do |tag|
tag.expand unless (tag.locals.page.enable_comments?)
end
# makes more sense to me
tag "unless_comments_enabled" do |tag|
tag.expand unless (tag.locals.page.enable_comments?)
end
desc %{
Renders the contained elements if the page has comments.
}
tag "if_comments" do |tag|
tag.expand if tag.locals.page.has_visible_comments?
end
desc %{
Renders the contained elements unless the page has comments.
}
tag "unless_comments" do |tag|
tag.expand unless tag.locals.page.has_visible_comments?
end
desc %{
Renders the contained elements if the page has comments _or_ comment is enabled on it.
}
tag "if_comments_or_enable_comments" do |tag|
tag.expand if(tag.locals.page.has_visible_comments? || tag.locals.page.enable_comments?)
end
desc %{
Gives access to comment-related tags
}
tag "comments" do |tag|
comments = tag.locals.page.approved_comments
tag.expand
end
desc %{
Cycles through each comment and renders the enclosed tags for each.
}
tag "comments:each" do |tag|
page = tag.locals.page
comments = page.approved_comments.to_a
comments << page.selected_comment if page.selected_comment && page.selected_comment.unapproved?
result = []
comments.each_with_index do |comment, index|
tag.locals.comment = comment
tag.locals.index = index
result << tag.expand
end
result
end
desc %{
Gives access to the particular fields for each comment.
}
tag "comments:field" do |tag|
tag.expand
end
desc %{
Renders the index number for this comment.
}
tag 'comments:field:index' do |tag|
tag.locals.index + 1
end
%w(id author author_email author_url content content_html filter_id rating).each do |field|
desc %{ Print the value of the #{field} field for this comment. }
tag "comments:field:#{field}" do |tag|
options = tag.attr.dup
#options.inspect
value = tag.locals.comment.send(field)
return value[7..-1] if field == 'author_url' && value[0,7]=='http://'
value
end
end
desc %{
Renders the date a comment was created.
*Usage:*
<pre><code><r:date [format="%A, %B %d, %Y"] /></code></pre>
}
tag 'comments:field:date' do |tag|
comment = tag.locals.comment
format = (tag.attr['format'] || '%A, %B %d, %Y')
date = comment.created_at
date.strftime(format)
end
desc %{
Renders a link if there's an author_url, otherwise just the author's name.
}
tag "comments:field:author_link" do |tag|
if tag.locals.comment.author_url.blank?
tag.locals.comment.author
else
%(<a href="http://#{tag.locals.comment.author_url}">#{tag.locals.comment.author}</a>)
end
end
desc %{
Renders the contained elements if the comment has an author_url specified.
}
tag "comments:field:if_author_url" do |tag|
tag.expand unless tag.locals.comment.author_url.blank?
end
desc %{
Renders the contained elements if the comment is selected - that is, if it is a comment
the user has just posted
}
tag "comments:field:if_selected" do |tag|
tag.expand if tag.locals.comment == tag.locals.page.selected_comment
end
desc %{
Renders the contained elements if the comment has been approved
}
tag "comments:field:if_approved" do |tag|
tag.expand if tag.locals.comment.approved?
end
desc %{
Renders the containing markup for each score of the rating
Example:
<r:comments:field:rating_empty_star>*</r:comments:field:rating_empty_star>
}
tag 'comments:field:rating_empty_star' do |tag|
@empty_star = tag.expand
''
end
desc %{
Renders the containing markup for each score of the rating
Example:
<r:comments:field:rating_full_star>*</r:comments:field:rating_full_star>
}
tag 'comments:field:rating_full_star' do |tag|
rating = tag.locals.comment.rating || Comment::MIN_RATING
markup = ''
rating.times { markup << tag.expand }
(Comment::MAX_RATING - rating).times { markup << @empty_star.to_s }
markup
end
desc %{
Renders the contained elements if the comment has not been approved
}
tag "comments:field:unless_approved" do |tag|
tag.expand unless tag.locals.comment.approved?
end
desc %{
Renders a Gravatar URL for the author of the comment.
}
tag "comments:field:gravatar_url" do |tag|
email = tag.locals.comment.author_email
size = tag.attr['size']
format = tag.attr['format']
rating = tag.attr['rating']
default = tag.attr['default']
md5 = Digest::MD5.hexdigest(email)
returning "http://www.gravatar.com/avatar/#{md5}" do |url|
url << ".#{format.downcase}" if format
if size || rating || default
attrs = []
attrs << "s=#{size}" if size
attrs << "d=#{default}" if default
attrs << "r=#{rating.downcase}" if rating
url << "?#{attrs.join('&')}"
end
end
end
desc %{
Renders a comment form.
*Usage:*
<r:comment:form [class="comments" id="comment_form"]>...</r:comment:form>
}
tag "comments:form" do |tag|
attrs = tag.attr.symbolize_keys
html_class, html_id = attrs[:class], attrs[:id]
r = %Q{ <form action="#{tag.locals.page.url}comments}
r << %Q{##{html_id}} unless html_id.blank?
r << %{" method="post" } #comlpete the quotes for the action
r << %{ id="#{html_id}" } unless html_id.blank?
r << %{ class="#{html_class}" } unless html_class.blank?
r << '>' #close the form element
r << tag.expand
r << %{</form>}
r
end
tag 'comments:error' do |tag|
if comment = tag.locals.page.last_comment
if on = tag.attr['on']
if error = comment.errors.on(on)
tag.locals.error_message = error
tag.expand
end
else
tag.expand if !comment.valid?
end
end
end
tag 'comments:error:message' do |tag|
tag.locals.error_message
end
%w(text password hidden).each do |type|
desc %{Builds a #{type} form field for comments.}
tag "comments:#{type}_field_tag" do |tag|
attrs = tag.attr.symbolize_keys
r = %{<input type="#{type}"}
r << %{ id="comment_#{attrs[:name]}"}
r << %{ name="comment[#{attrs[:name]}]"}
r << %{ class="#{attrs[:class]}"} if attrs[:class]
if value = (tag.locals.page.last_comment ? tag.locals.page.last_comment.send(attrs[:name]) : attrs[:value])
r << %{ value="#{value}" }
end
r << %{ />}
end
end
%w(submit reset).each do |type|
desc %{Builds a #{type} form button for comments.}
tag "comments:#{type}_tag" do |tag|
attrs = tag.attr.symbolize_keys
r = %{<input type="#{type}"}
r << %{ id="#{attrs[:name]}"}
r << %{ name="#{attrs[:name]}"}
r << %{ class="#{attrs[:class]}"} if attrs[:class]
r << %{ value="#{attrs[:value]}" } if attrs[:value]
r << %{ />}
end
end
desc %{Builds a text_area form field for comments.}
tag "comments:text_area_tag" do |tag|
attrs = tag.attr.symbolize_keys
r = %{<textarea}
r << %{ id="comment_#{attrs[:name]}"}
r << %{ name="comment[#{attrs[:name]}]"}
r << %{ class="#{attrs[:class]}"} if attrs[:class]
r << %{ rows="#{attrs[:rows]}"} if attrs[:rows]
r << %{ cols="#{attrs[:cols]}"} if attrs[:cols]
r << %{>}
if content = (tag.locals.page.last_comment ? tag.locals.page.last_comment.send(attrs[:name]) : attrs[:content])
r << content
end
r << %{</textarea>}
end
desc %{Build a drop_box form field for the filters avaiable.}
tag "comments:filter_box_tag" do |tag|
attrs = tag.attr.symbolize_keys
value = attrs.delete(:value)
name = attrs.delete(:name)
r = %{<select name="comment[#{name}]"}
unless attrs.empty?
r << " "
r << attrs.map {|k,v| %Q(#{k}="#{v}") }.join(" ")
end
r << %{>}
TextFilter.descendants.each do |filter|
r << %{<option value="#{filter.filter_name}"}
r << %{ selected="selected"} if value == filter.filter_name
r << %{>#{filter.filter_name}</option>}
end
r << %{</select>}
end
desc %{Builds a series of input tags to input a rating
*Usage:*
<pre><code><r:comments:ratings_tag [class="myclass"] [disabled="disabled"] /></code></pre>
}
tag 'comments:ratings_tag' do |tag|
module TagCreator
# Hack, simply including modules in CommentTags didn't work
extend ActionView::Helpers::TagHelper
extend ActionView::Helpers::FormTagHelper
end
returning '' do |markup|
(Comment::MIN_RATING...Comment::MAX_RATING).each do |rating|
markup << TagCreator.radio_button_tag('comment[rating]', rating+1, false, tag.attr)
end
end
end
desc %{Prints the number of comments. }
tag "comments:count" do |tag|
tag.locals.page.approved_comments.count
end
tag "recent_comments" do |tag|
tag.expand
end
desc %{Returns the last [limit] comments throughout the site.
*Usage:*
<pre><code><r:recent_comments:each [limit="10"]>...</r:recent_comments:each></code></pre>
}
tag "recent_comments:each" do |tag|
limit = tag.attr['limit'] || 10
comments = Comment.find(:all, :conditions => "comments.approved_at IS NOT NULL", :order => "created_at DESC", :limit => limit)
result = []
comments.each_with_index do |comment, index|
tag.locals.comment = comment
tag.locals.index = index
tag.locals.page = comment.page
result << tag.expand
end
result
end
desc %{
Use this to prevent spam bots from filling your site with spam.
*Usage:*
<pre><code>What day comes after Monday? <r:comments:spam_answer_tag answer="Tuesday" /></code></pre>
}
tag "comments:spam_answer_tag" do |tag|
attrs = tag.attr.symbolize_keys
valid_spam_answer = attrs[:answer] || 'hemidemisemiquaver'
r = %{<input type="text" id="comment_spam_answer" name="comment[spam_answer]"}
r << %{ class="#{attrs[:class]}"} if attrs[:class]
if value = (tag.locals.page.last_comment ? tag.locals.page.last_comment.send(:spam_answer) : '')
r << %{ value="#{value}" }
end
r << %{ />}
r << %{<input type="hidden" name="comment[valid_spam_answer]" value="#{valid_spam_answer}" />}
end
end
don't include selected comment in current comments
module CommentTags
include Radiant::Taggable
desc "Provides tags and behaviors to support comments in Radiant."
desc %{
Renders the contained elements if comments are enabled on the page.
}
tag "if_enable_comments" do |tag|
tag.expand if (tag.locals.page.enable_comments?)
end
# makes more sense to me
tag "if_comments_enabled" do |tag|
tag.expand if (tag.locals.page.enable_comments?)
end
desc %{
Renders the contained elements unless comments are enabled on the page.
}
tag "unless_enable_comments" do |tag|
tag.expand unless (tag.locals.page.enable_comments?)
end
# makes more sense to me
tag "unless_comments_enabled" do |tag|
tag.expand unless (tag.locals.page.enable_comments?)
end
desc %{
Renders the contained elements if the page has comments.
}
tag "if_comments" do |tag|
tag.expand if tag.locals.page.has_visible_comments?
end
desc %{
Renders the contained elements unless the page has comments.
}
tag "unless_comments" do |tag|
tag.expand unless tag.locals.page.has_visible_comments?
end
desc %{
Renders the contained elements if the page has comments _or_ comment is enabled on it.
}
tag "if_comments_or_enable_comments" do |tag|
tag.expand if(tag.locals.page.has_visible_comments? || tag.locals.page.enable_comments?)
end
desc %{
Gives access to comment-related tags
}
tag "comments" do |tag|
comments = tag.locals.page.approved_comments
tag.expand
end
desc %{
Cycles through each comment and renders the enclosed tags for each.
}
tag "comments:each" do |tag|
page = tag.locals.page
comments = page.approved_comments.to_a
result = []
comments.each_with_index do |comment, index|
tag.locals.comment = comment
tag.locals.index = index
result << tag.expand
end
result
end
desc %{
Gives access to the particular fields for each comment.
}
tag "comments:field" do |tag|
tag.expand
end
desc %{
Renders the index number for this comment.
}
tag 'comments:field:index' do |tag|
tag.locals.index + 1
end
%w(id author author_email author_url content content_html filter_id rating).each do |field|
desc %{ Print the value of the #{field} field for this comment. }
tag "comments:field:#{field}" do |tag|
options = tag.attr.dup
#options.inspect
value = tag.locals.comment.send(field)
return value[7..-1] if field == 'author_url' && value[0,7]=='http://'
value
end
end
desc %{
Renders the date a comment was created.
*Usage:*
<pre><code><r:date [format="%A, %B %d, %Y"] /></code></pre>
}
tag 'comments:field:date' do |tag|
comment = tag.locals.comment
format = (tag.attr['format'] || '%A, %B %d, %Y')
date = comment.created_at
date.strftime(format)
end
desc %{
Renders a link if there's an author_url, otherwise just the author's name.
}
tag "comments:field:author_link" do |tag|
if tag.locals.comment.author_url.blank?
tag.locals.comment.author
else
%(<a href="http://#{tag.locals.comment.author_url}">#{tag.locals.comment.author}</a>)
end
end
desc %{
Renders the contained elements if the comment has an author_url specified.
}
tag "comments:field:if_author_url" do |tag|
tag.expand unless tag.locals.comment.author_url.blank?
end
desc %{
Renders the contained elements if the comment is selected - that is, if it is a comment
the user has just posted
}
tag "comments:field:if_selected" do |tag|
tag.expand if tag.locals.comment == tag.locals.page.selected_comment
end
desc %{
Renders the contained elements if the comment has been approved
}
tag "comments:field:if_approved" do |tag|
tag.expand if tag.locals.comment.approved?
end
desc %{
Renders the containing markup for each score of the rating
Example:
<r:comments:field:rating_empty_star>*</r:comments:field:rating_empty_star>
}
tag 'comments:field:rating_empty_star' do |tag|
@empty_star = tag.expand
''
end
desc %{
Renders the containing markup for each score of the rating
Example:
<r:comments:field:rating_full_star>*</r:comments:field:rating_full_star>
}
tag 'comments:field:rating_full_star' do |tag|
rating = tag.locals.comment.rating || Comment::MIN_RATING
markup = ''
rating.times { markup << tag.expand }
(Comment::MAX_RATING - rating).times { markup << @empty_star.to_s }
markup
end
desc %{
Renders the contained elements if the comment has not been approved
}
tag "comments:field:unless_approved" do |tag|
tag.expand unless tag.locals.comment.approved?
end
desc %{
Renders a Gravatar URL for the author of the comment.
}
tag "comments:field:gravatar_url" do |tag|
email = tag.locals.comment.author_email
size = tag.attr['size']
format = tag.attr['format']
rating = tag.attr['rating']
default = tag.attr['default']
md5 = Digest::MD5.hexdigest(email)
returning "http://www.gravatar.com/avatar/#{md5}" do |url|
url << ".#{format.downcase}" if format
if size || rating || default
attrs = []
attrs << "s=#{size}" if size
attrs << "d=#{default}" if default
attrs << "r=#{rating.downcase}" if rating
url << "?#{attrs.join('&')}"
end
end
end
desc %{
Renders a comment form.
*Usage:*
<r:comment:form [class="comments" id="comment_form"]>...</r:comment:form>
}
tag "comments:form" do |tag|
attrs = tag.attr.symbolize_keys
html_class, html_id = attrs[:class], attrs[:id]
r = %Q{ <form action="#{tag.locals.page.url}comments}
r << %Q{##{html_id}} unless html_id.blank?
r << %{" method="post" } #comlpete the quotes for the action
r << %{ id="#{html_id}" } unless html_id.blank?
r << %{ class="#{html_class}" } unless html_class.blank?
r << '>' #close the form element
r << tag.expand
r << %{</form>}
r
end
tag 'comments:error' do |tag|
if comment = tag.locals.page.last_comment
if on = tag.attr['on']
if error = comment.errors.on(on)
tag.locals.error_message = error
tag.expand
end
else
tag.expand if !comment.valid?
end
end
end
tag 'comments:error:message' do |tag|
tag.locals.error_message
end
%w(text password hidden).each do |type|
desc %{Builds a #{type} form field for comments.}
tag "comments:#{type}_field_tag" do |tag|
attrs = tag.attr.symbolize_keys
r = %{<input type="#{type}"}
r << %{ id="comment_#{attrs[:name]}"}
r << %{ name="comment[#{attrs[:name]}]"}
r << %{ class="#{attrs[:class]}"} if attrs[:class]
if value = (tag.locals.page.last_comment ? tag.locals.page.last_comment.send(attrs[:name]) : attrs[:value])
r << %{ value="#{value}" }
end
r << %{ />}
end
end
%w(submit reset).each do |type|
desc %{Builds a #{type} form button for comments.}
tag "comments:#{type}_tag" do |tag|
attrs = tag.attr.symbolize_keys
r = %{<input type="#{type}"}
r << %{ id="#{attrs[:name]}"}
r << %{ name="#{attrs[:name]}"}
r << %{ class="#{attrs[:class]}"} if attrs[:class]
r << %{ value="#{attrs[:value]}" } if attrs[:value]
r << %{ />}
end
end
desc %{Builds a text_area form field for comments.}
tag "comments:text_area_tag" do |tag|
attrs = tag.attr.symbolize_keys
r = %{<textarea}
r << %{ id="comment_#{attrs[:name]}"}
r << %{ name="comment[#{attrs[:name]}]"}
r << %{ class="#{attrs[:class]}"} if attrs[:class]
r << %{ rows="#{attrs[:rows]}"} if attrs[:rows]
r << %{ cols="#{attrs[:cols]}"} if attrs[:cols]
r << %{>}
if content = (tag.locals.page.last_comment ? tag.locals.page.last_comment.send(attrs[:name]) : attrs[:content])
r << content
end
r << %{</textarea>}
end
desc %{Build a drop_box form field for the filters avaiable.}
tag "comments:filter_box_tag" do |tag|
attrs = tag.attr.symbolize_keys
value = attrs.delete(:value)
name = attrs.delete(:name)
r = %{<select name="comment[#{name}]"}
unless attrs.empty?
r << " "
r << attrs.map {|k,v| %Q(#{k}="#{v}") }.join(" ")
end
r << %{>}
TextFilter.descendants.each do |filter|
r << %{<option value="#{filter.filter_name}"}
r << %{ selected="selected"} if value == filter.filter_name
r << %{>#{filter.filter_name}</option>}
end
r << %{</select>}
end
desc %{Builds a series of input tags to input a rating
*Usage:*
<pre><code><r:comments:ratings_tag [class="myclass"] [disabled="disabled"] /></code></pre>
}
tag 'comments:ratings_tag' do |tag|
module TagCreator
# Hack, simply including modules in CommentTags didn't work
extend ActionView::Helpers::TagHelper
extend ActionView::Helpers::FormTagHelper
end
returning '' do |markup|
(Comment::MIN_RATING...Comment::MAX_RATING).each do |rating|
markup << TagCreator.radio_button_tag('comment[rating]', rating+1, false, tag.attr)
end
end
end
desc %{Prints the number of comments. }
tag "comments:count" do |tag|
tag.locals.page.approved_comments.count
end
tag "recent_comments" do |tag|
tag.expand
end
desc %{Returns the last [limit] comments throughout the site.
*Usage:*
<pre><code><r:recent_comments:each [limit="10"]>...</r:recent_comments:each></code></pre>
}
tag "recent_comments:each" do |tag|
limit = tag.attr['limit'] || 10
comments = Comment.find(:all, :conditions => "comments.approved_at IS NOT NULL", :order => "created_at DESC", :limit => limit)
result = []
comments.each_with_index do |comment, index|
tag.locals.comment = comment
tag.locals.index = index
tag.locals.page = comment.page
result << tag.expand
end
result
end
desc %{
Use this to prevent spam bots from filling your site with spam.
*Usage:*
<pre><code>What day comes after Monday? <r:comments:spam_answer_tag answer="Tuesday" /></code></pre>
}
tag "comments:spam_answer_tag" do |tag|
attrs = tag.attr.symbolize_keys
valid_spam_answer = attrs[:answer] || 'hemidemisemiquaver'
r = %{<input type="text" id="comment_spam_answer" name="comment[spam_answer]"}
r << %{ class="#{attrs[:class]}"} if attrs[:class]
if value = (tag.locals.page.last_comment ? tag.locals.page.last_comment.send(:spam_answer) : '')
r << %{ value="#{value}" }
end
r << %{ />}
r << %{<input type="hidden" name="comment[valid_spam_answer]" value="#{valid_spam_answer}" />}
end
end
|
require_relative '../spec_helper'
ruby_version_is "2.7" do
describe "Pattern matching" do
# TODO: Remove excessive eval calls when support of previous version
# Ruby 2.6 will be dropped
before :each do
ScratchPad.record []
end
ruby_version_is "3.0" do
it "can be standalone assoc operator that deconstructs value" do
suppress_warning do
eval(<<-RUBY).should == [0, 1]
[0, 1] => [a, b]
[a, b]
RUBY
end
end
describe "find pattern" do
it "captures preceding elements to the pattern" do
eval(<<~RUBY).should == [0, 1]
case [0, 1, 2, 3]
in [*pre, 2, 3]
pre
else
false
end
RUBY
end
it "captures following elements to the pattern" do
eval(<<~RUBY).should == [2, 3]
case [0, 1, 2, 3]
in [0, 1, *post]
post
else
false
end
RUBY
end
it "captures both preceding and following elements to the pattern" do
eval(<<~RUBY).should == [[0, 1], [3, 4]]
case [0, 1, 2, 3, 4]
in [*pre, 2, *post]
[pre, post]
else
false
end
RUBY
end
it "can capture the entirety of the pattern" do
eval(<<~RUBY).should == [0, 1, 2, 3, 4]
case [0, 1, 2, 3, 4]
in [*everything]
everything
else
false
end
RUBY
end
it "will match an empty Array-like structure" do
eval(<<~RUBY).should == []
case []
in [*everything]
everything
else
false
end
RUBY
end
end
end
it "extends case expression with case/in construction" do
eval(<<~RUBY).should == :bar
case [0, 1]
in [0]
:foo
in [0, 1]
:bar
end
RUBY
end
it "allows using then operator" do
eval(<<~RUBY).should == :bar
case [0, 1]
in [0] then :foo
in [0, 1] then :bar
end
RUBY
end
describe "warning" do
before :each do
@experimental, Warning[:experimental] = Warning[:experimental], true
end
after :each do
Warning[:experimental] = @experimental
end
context 'when regular form' do
before :each do
@src = 'case [0, 1]; in [a, b]; end'
end
ruby_version_is ""..."3.0" do
it "warns about pattern matching is experimental feature" do
-> { eval @src }.should complain(/pattern matching is experimental, and the behavior may change in future versions of Ruby!/i)
end
end
ruby_version_is "3.0" do
it "does not warn about pattern matching is experimental feature" do
-> { eval @src }.should_not complain
end
end
end
context 'when one-line form' do
ruby_version_is '3.0' do
before :each do
@src = '[0, 1] => [a, b]'
end
ruby_version_is ""..."3.1" do
it "warns about pattern matching is experimental feature" do
-> { eval @src }.should complain(/pattern matching is experimental, and the behavior may change in future versions of Ruby!/i)
end
end
ruby_version_is "3.1" do
it "does not warn about pattern matching is experimental feature" do
-> { eval @src }.should_not complain
end
end
end
end
end
it "binds variables" do
eval(<<~RUBY).should == 1
case [0, 1]
in [0, a]
a
end
RUBY
end
it "cannot mix in and when operators" do
-> {
eval <<~RUBY
case []
when 1 == 1
in []
end
RUBY
}.should raise_error(SyntaxError, /syntax error, unexpected `in'/)
-> {
eval <<~RUBY
case []
in []
when 1 == 1
end
RUBY
}.should raise_error(SyntaxError, /syntax error, unexpected `when'/)
end
it "checks patterns until the first matching" do
eval(<<~RUBY).should == :bar
case [0, 1]
in [0]
:foo
in [0, 1]
:bar
in [0, 1]
:baz
end
RUBY
end
it "executes else clause if no pattern matches" do
eval(<<~RUBY).should == false
case [0, 1]
in [0]
true
else
false
end
RUBY
end
it "raises NoMatchingPatternError if no pattern matches and no else clause" do
-> {
eval <<~RUBY
case [0, 1]
in [0]
end
RUBY
}.should raise_error(NoMatchingPatternError, /\[0, 1\]/)
end
it "does not allow calculation or method calls in a pattern" do
-> {
eval <<~RUBY
case 0
in 1 + 1
true
end
RUBY
}.should raise_error(SyntaxError, /unexpected/)
end
it "evaluates the case expression once for multiple patterns, caching the result" do
eval(<<~RUBY).should == true
case (ScratchPad << :foo; 1)
in 0
false
in 1
true
end
RUBY
ScratchPad.recorded.should == [:foo]
end
describe "guards" do
it "supports if guard" do
eval(<<~RUBY).should == false
case 0
in 0 if false
true
else
false
end
RUBY
eval(<<~RUBY).should == true
case 0
in 0 if true
true
else
false
end
RUBY
end
it "supports unless guard" do
eval(<<~RUBY).should == false
case 0
in 0 unless true
true
else
false
end
RUBY
eval(<<~RUBY).should == true
case 0
in 0 unless false
true
else
false
end
RUBY
end
it "makes bound variables visible in guard" do
eval(<<~RUBY).should == true
case [0, 1]
in [a, 1] if a >= 0
true
end
RUBY
end
it "does not evaluate guard if pattern does not match" do
eval <<~RUBY
case 0
in 1 if (ScratchPad << :foo) || true
else
end
RUBY
ScratchPad.recorded.should == []
end
it "takes guards into account when there are several matching patterns" do
eval(<<~RUBY).should == :bar
case 0
in 0 if false
:foo
in 0 if true
:bar
end
RUBY
end
it "executes else clause if no guarded pattern matches" do
eval(<<~RUBY).should == false
case 0
in 0 if false
true
else
false
end
RUBY
end
it "raises NoMatchingPatternError if no guarded pattern matches and no else clause" do
-> {
eval <<~RUBY
case [0, 1]
in [0, 1] if false
end
RUBY
}.should raise_error(NoMatchingPatternError, /\[0, 1\]/)
end
end
describe "value pattern" do
it "matches an object such that pattern === object" do
eval(<<~RUBY).should == true
case 0
in 0
true
end
RUBY
eval(<<~RUBY).should == true
case 0
in (-1..1)
true
end
RUBY
eval(<<~RUBY).should == true
case 0
in Integer
true
end
RUBY
eval(<<~RUBY).should == true
case "0"
in /0/
true
end
RUBY
eval(<<~RUBY).should == true
case "0"
in ->(s) { s == "0" }
true
end
RUBY
end
it "allows string literal with interpolation" do
x = "x"
eval(<<~RUBY).should == true
case "x"
in "#{x + ""}"
true
end
RUBY
end
end
describe "variable pattern" do
it "matches a value and binds variable name to this value" do
eval(<<~RUBY).should == 0
case 0
in a
a
end
RUBY
end
it "makes bounded variable visible outside a case statement scope" do
eval(<<~RUBY).should == 0
case 0
in a
end
a
RUBY
end
it "create local variables even if a pattern doesn't match" do
eval(<<~RUBY).should == [0, nil, nil]
case 0
in a
in b
in c
end
[a, b, c]
RUBY
end
it "allow using _ name to drop values" do
eval(<<~RUBY).should == 0
case [0, 1]
in [a, _]
a
end
RUBY
end
it "supports using _ in a pattern several times" do
eval(<<~RUBY).should == true
case [0, 1, 2]
in [0, _, _]
true
end
RUBY
end
it "supports using any name with _ at the beginning in a pattern several times" do
eval(<<~RUBY).should == true
case [0, 1, 2]
in [0, _x, _x]
true
end
RUBY
eval(<<~RUBY).should == true
case {a: 0, b: 1, c: 2}
in {a: 0, b: _x, c: _x}
true
end
RUBY
end
it "does not support using variable name (except _) several times" do
-> {
eval <<~RUBY
case [0]
in [a, a]
end
RUBY
}.should raise_error(SyntaxError, /duplicated variable name/)
end
it "supports existing variables in a pattern specified with ^ operator" do
a = 0
eval(<<~RUBY).should == true
case 0
in ^a
true
end
RUBY
end
it "allows applying ^ operator to bound variables" do
eval(<<~RUBY).should == 1
case [1, 1]
in [n, ^n]
n
end
RUBY
eval(<<~RUBY).should == false
case [1, 2]
in [n, ^n]
true
else
false
end
RUBY
end
it "requires bound variable to be specified in a pattern before ^ operator when it relies on a bound variable" do
-> {
eval <<~RUBY
case [1, 2]
in [^n, n]
true
else
false
end
RUBY
}.should raise_error(SyntaxError, /n: no such local variable/)
end
end
describe "alternative pattern" do
it "matches if any of patterns matches" do
eval(<<~RUBY).should == true
case 0
in 0 | 1 | 2
true
end
RUBY
end
it "does not support variable binding" do
-> {
eval <<~RUBY
case [0, 1]
in [0, 0] | [0, a]
end
RUBY
}.should raise_error(SyntaxError, /illegal variable in alternative pattern/)
end
it "support underscore prefixed variables in alternation" do
eval(<<~RUBY).should == true
case [0, 1]
in [1, _]
false
in [0, 0] | [0, _a]
true
end
RUBY
end
end
describe "AS pattern" do
it "binds a variable to a value if pattern matches" do
eval(<<~RUBY).should == 0
case 0
in Integer => n
n
end
RUBY
end
it "can be used as a nested pattern" do
eval(<<~RUBY).should == [2, 3]
case [1, [2, 3]]
in [1, Array => ary]
ary
end
RUBY
end
end
describe "Array pattern" do
it "supports form Constant(pat, pat, ...)" do
eval(<<~RUBY).should == true
case [0, 1, 2]
in Array(0, 1, 2)
true
end
RUBY
end
it "supports form Constant[pat, pat, ...]" do
eval(<<~RUBY).should == true
case [0, 1, 2]
in Array[0, 1, 2]
true
end
RUBY
end
it "supports form [pat, pat, ...]" do
eval(<<~RUBY).should == true
case [0, 1, 2]
in [0, 1, 2]
true
end
RUBY
end
it "supports form pat, pat, ..." do
eval(<<~RUBY).should == true
case [0, 1, 2]
in 0, 1, 2
true
end
RUBY
eval(<<~RUBY).should == 1
case [0, 1, 2]
in 0, a, 2
a
end
RUBY
eval(<<~RUBY).should == [1, 2]
case [0, 1, 2]
in 0, *rest
rest
end
RUBY
end
it "matches an object with #deconstruct method which returns an array and each element in array matches element in pattern" do
obj = Object.new
def obj.deconstruct; [0, 1] end
eval(<<~RUBY).should == true
case obj
in [Integer, Integer]
true
end
RUBY
end
ruby_version_is "3.0" do
it "calls #deconstruct once for multiple patterns, caching the result" do
obj = Object.new
def obj.deconstruct
ScratchPad << :deconstruct
[0, 1]
end
eval(<<~RUBY).should == true
case obj
in [1, 2]
false
in [0, 1]
true
end
RUBY
ScratchPad.recorded.should == [:deconstruct]
end
end
it "calls #deconstruct even on objects that are already an array" do
obj = [1, 2]
def obj.deconstruct
ScratchPad << :deconstruct
[3, 4]
end
eval(<<~RUBY).should == true
case obj
in [3, 4]
true
else
false
end
RUBY
ScratchPad.recorded.should == [:deconstruct]
end
it "does not match object if Constant === object returns false" do
eval(<<~RUBY).should == false
case [0, 1, 2]
in String[0, 1, 2]
true
else
false
end
RUBY
end
it "does not match object without #deconstruct method" do
obj = Object.new
obj.should_receive(:respond_to?).with(:deconstruct)
eval(<<~RUBY).should == false
case obj
in Object[]
true
else
false
end
RUBY
end
it "raises TypeError if #deconstruct method does not return array" do
obj = Object.new
def obj.deconstruct; "" end
-> {
eval <<~RUBY
case obj
in Object[]
else
end
RUBY
}.should raise_error(TypeError, /deconstruct must return Array/)
end
it "accepts a subclass of Array from #deconstruct" do
obj = Object.new
def obj.deconstruct
subarray = Class.new(Array).new(2)
def subarray.[](n)
n
end
subarray
end
eval(<<~RUBY).should == true
case obj
in [1, 2]
false
in [0, 1]
true
end
RUBY
end
it "does not match object if elements of array returned by #deconstruct method does not match elements in pattern" do
obj = Object.new
def obj.deconstruct; [1] end
eval(<<~RUBY).should == false
case obj
in Object[0]
true
else
false
end
RUBY
end
it "binds variables" do
eval(<<~RUBY).should == [0, 1, 2]
case [0, 1, 2]
in [a, b, c]
[a, b, c]
end
RUBY
end
it "supports splat operator *rest" do
eval(<<~RUBY).should == [1, 2]
case [0, 1, 2]
in [0, *rest]
rest
end
RUBY
end
it "does not match partially by default" do
eval(<<~RUBY).should == false
case [0, 1, 2, 3]
in [1, 2]
true
else
false
end
RUBY
end
it "does match partially from the array beginning if list + , syntax used" do
eval(<<~RUBY).should == true
case [0, 1, 2, 3]
in [0, 1,]
true
end
RUBY
eval(<<~RUBY).should == true
case [0, 1, 2, 3]
in 0, 1,;
true
end
RUBY
end
it "matches [] with []" do
eval(<<~RUBY).should == true
case []
in []
true
end
RUBY
end
it "matches anything with *" do
eval(<<~RUBY).should == true
case [0, 1]
in *;
true
end
RUBY
end
end
describe "Hash pattern" do
it "supports form Constant(id: pat, id: pat, ...)" do
eval(<<~RUBY).should == true
case {a: 0, b: 1}
in Hash(a: 0, b: 1)
true
end
RUBY
end
it "supports form Constant[id: pat, id: pat, ...]" do
eval(<<~RUBY).should == true
case {a: 0, b: 1}
in Hash[a: 0, b: 1]
true
end
RUBY
end
it "supports form {id: pat, id: pat, ...}" do
eval(<<~RUBY).should == true
case {a: 0, b: 1}
in {a: 0, b: 1}
true
end
RUBY
end
it "supports form id: pat, id: pat, ..." do
eval(<<~RUBY).should == true
case {a: 0, b: 1}
in a: 0, b: 1
true
end
RUBY
eval(<<~RUBY).should == [0, 1]
case {a: 0, b: 1}
in a: a, b: b
[a, b]
end
RUBY
eval(<<~RUBY).should == { b: 1, c: 2 }
case {a: 0, b: 1, c: 2}
in a: 0, **rest
rest
end
RUBY
end
it "supports a: which means a: a" do
eval(<<~RUBY).should == [0, 1]
case {a: 0, b: 1}
in Hash(a:, b:)
[a, b]
end
RUBY
a = b = nil
eval(<<~RUBY).should == [0, 1]
case {a: 0, b: 1}
in Hash[a:, b:]
[a, b]
end
RUBY
a = b = nil
eval(<<~RUBY).should == [0, 1]
case {a: 0, b: 1}
in {a:, b:}
[a, b]
end
RUBY
a = nil
eval(<<~RUBY).should == [0, {b: 1, c: 2}]
case {a: 0, b: 1, c: 2}
in {a:, **rest}
[a, rest]
end
RUBY
a = b = nil
eval(<<~RUBY).should == [0, 1]
case {a: 0, b: 1}
in a:, b:
[a, b]
end
RUBY
end
it "can mix key (a:) and key-value (a: b) declarations" do
eval(<<~RUBY).should == [0, 1]
case {a: 0, b: 1}
in Hash(a:, b: x)
[a, x]
end
RUBY
end
it "supports 'string': key literal" do
eval(<<~RUBY).should == true
case {a: 0}
in {"a": 0}
true
end
RUBY
end
it "does not support non-symbol keys" do
-> {
eval <<~RUBY
case {a: 1}
in {"a" => 1}
end
RUBY
}.should raise_error(SyntaxError, /unexpected/)
end
it "does not support string interpolation in keys" do
x = "a"
-> {
eval <<~'RUBY'
case {a: 1}
in {"#{x}": 1}
end
RUBY
}.should raise_error(SyntaxError, /symbol literal with interpolation is not allowed/)
end
it "raise SyntaxError when keys duplicate in pattern" do
-> {
eval <<~RUBY
case {a: 1}
in {a: 1, b: 2, a: 3}
end
RUBY
}.should raise_error(SyntaxError, /duplicated key name/)
end
it "matches an object with #deconstruct_keys method which returns a Hash with equal keys and each value in Hash matches value in pattern" do
obj = Object.new
def obj.deconstruct_keys(*); {a: 1} end
eval(<<~RUBY).should == true
case obj
in {a: 1}
true
end
RUBY
end
it "calls #deconstruct_keys per pattern" do
obj = Object.new
def obj.deconstruct_keys(*)
ScratchPad << :deconstruct_keys
{a: 1}
end
eval(<<~RUBY).should == true
case obj
in {b: 1}
false
in {a: 1}
true
end
RUBY
ScratchPad.recorded.should == [:deconstruct_keys, :deconstruct_keys]
end
it "does not match object if Constant === object returns false" do
eval(<<~RUBY).should == false
case {a: 1}
in String[a: 1]
true
else
false
end
RUBY
end
it "does not match object without #deconstruct_keys method" do
obj = Object.new
obj.should_receive(:respond_to?).with(:deconstruct_keys)
eval(<<~RUBY).should == false
case obj
in Object[a: 1]
true
else
false
end
RUBY
end
it "does not match object if #deconstruct_keys method does not return Hash" do
obj = Object.new
def obj.deconstruct_keys(*); "" end
-> {
eval <<~RUBY
case obj
in Object[a: 1]
end
RUBY
}.should raise_error(TypeError, /deconstruct_keys must return Hash/)
end
it "does not match object if #deconstruct_keys method returns Hash with non-symbol keys" do
obj = Object.new
def obj.deconstruct_keys(*); {"a" => 1} end
eval(<<~RUBY).should == false
case obj
in Object[a: 1]
true
else
false
end
RUBY
end
it "does not match object if elements of Hash returned by #deconstruct_keys method does not match values in pattern" do
obj = Object.new
def obj.deconstruct_keys(*); {a: 1} end
eval(<<~RUBY).should == false
case obj
in Object[a: 2]
true
else
false
end
RUBY
end
it "passes keys specified in pattern as arguments to #deconstruct_keys method" do
obj = Object.new
def obj.deconstruct_keys(*args)
ScratchPad << args
{a: 1, b: 2, c: 3}
end
eval <<~RUBY
case obj
in Object[a: 1, b: 2, c: 3]
end
RUBY
ScratchPad.recorded.sort.should == [[[:a, :b, :c]]]
end
it "passes keys specified in pattern to #deconstruct_keys method if pattern contains double splat operator **" do
obj = Object.new
def obj.deconstruct_keys(*args)
ScratchPad << args
{a: 1, b: 2, c: 3}
end
eval <<~RUBY
case obj
in Object[a: 1, b: 2, **]
end
RUBY
ScratchPad.recorded.sort.should == [[[:a, :b]]]
end
it "passes nil to #deconstruct_keys method if pattern contains double splat operator **rest" do
obj = Object.new
def obj.deconstruct_keys(*args)
ScratchPad << args
{a: 1, b: 2}
end
eval <<~RUBY
case obj
in Object[a: 1, **rest]
end
RUBY
ScratchPad.recorded.should == [[nil]]
end
it "binds variables" do
eval(<<~RUBY).should == [0, 1, 2]
case {a: 0, b: 1, c: 2}
in {a: x, b: y, c: z}
[x, y, z]
end
RUBY
end
it "supports double splat operator **rest" do
eval(<<~RUBY).should == {b: 1, c: 2}
case {a: 0, b: 1, c: 2}
in {a: 0, **rest}
rest
end
RUBY
end
it "treats **nil like there should not be any other keys in a matched Hash" do
eval(<<~RUBY).should == true
case {a: 1, b: 2}
in {a: 1, b: 2, **nil}
true
end
RUBY
eval(<<~RUBY).should == false
case {a: 1, b: 2}
in {a: 1, **nil}
true
else
false
end
RUBY
end
it "can match partially" do
eval(<<~RUBY).should == true
case {a: 1, b: 2}
in {a: 1}
true
end
RUBY
end
it "matches {} with {}" do
eval(<<~RUBY).should == true
case {}
in {}
true
end
RUBY
end
it "matches anything with **" do
eval(<<~RUBY).should == true
case {a: 1}
in **;
true
end
RUBY
end
end
describe "refinements" do
it "are used for #deconstruct" do
refinery = Module.new do
refine Array do
def deconstruct
[0]
end
end
end
result = nil
Module.new do
using refinery
result = eval(<<~RUBY)
case []
in [0]
true
end
RUBY
end
result.should == true
end
it "are used for #deconstruct_keys" do
refinery = Module.new do
refine Hash do
def deconstruct_keys(_)
{a: 0}
end
end
end
result = nil
Module.new do
using refinery
result = eval(<<~RUBY)
case {}
in a: 0
true
end
RUBY
end
result.should == true
end
it "are used for #=== in constant pattern" do
refinery = Module.new do
refine Array.singleton_class do
def ===(obj)
obj.is_a?(Hash)
end
end
end
result = nil
Module.new do
using refinery
result = eval(<<~RUBY)
case {}
in Array
true
end
RUBY
end
result.should == true
end
end
ruby_version_is "3.1" do
it "can omit parentheses in one line pattern matching" do
eval(<<~RUBY).should == [1, 2]
[1, 2] => a, b
[a, b]
RUBY
eval(<<~RUBY).should == 1
{a: 1} => a:
a
RUBY
end
end
end
end
Add nested pattern specs
require_relative '../spec_helper'
ruby_version_is "2.7" do
describe "Pattern matching" do
# TODO: Remove excessive eval calls when support of previous version
# Ruby 2.6 will be dropped
before :each do
ScratchPad.record []
end
ruby_version_is "3.0" do
it "can be standalone assoc operator that deconstructs value" do
suppress_warning do
eval(<<-RUBY).should == [0, 1]
[0, 1] => [a, b]
[a, b]
RUBY
end
end
describe "find pattern" do
it "captures preceding elements to the pattern" do
eval(<<~RUBY).should == [0, 1]
case [0, 1, 2, 3]
in [*pre, 2, 3]
pre
else
false
end
RUBY
end
it "captures following elements to the pattern" do
eval(<<~RUBY).should == [2, 3]
case [0, 1, 2, 3]
in [0, 1, *post]
post
else
false
end
RUBY
end
it "captures both preceding and following elements to the pattern" do
eval(<<~RUBY).should == [[0, 1], [3, 4]]
case [0, 1, 2, 3, 4]
in [*pre, 2, *post]
[pre, post]
else
false
end
RUBY
end
it "can capture the entirety of the pattern" do
eval(<<~RUBY).should == [0, 1, 2, 3, 4]
case [0, 1, 2, 3, 4]
in [*everything]
everything
else
false
end
RUBY
end
it "will match an empty Array-like structure" do
eval(<<~RUBY).should == []
case []
in [*everything]
everything
else
false
end
RUBY
end
end
end
it "extends case expression with case/in construction" do
eval(<<~RUBY).should == :bar
case [0, 1]
in [0]
:foo
in [0, 1]
:bar
end
RUBY
end
it "allows using then operator" do
eval(<<~RUBY).should == :bar
case [0, 1]
in [0] then :foo
in [0, 1] then :bar
end
RUBY
end
describe "warning" do
before :each do
@experimental, Warning[:experimental] = Warning[:experimental], true
end
after :each do
Warning[:experimental] = @experimental
end
context 'when regular form' do
before :each do
@src = 'case [0, 1]; in [a, b]; end'
end
ruby_version_is ""..."3.0" do
it "warns about pattern matching is experimental feature" do
-> { eval @src }.should complain(/pattern matching is experimental, and the behavior may change in future versions of Ruby!/i)
end
end
ruby_version_is "3.0" do
it "does not warn about pattern matching is experimental feature" do
-> { eval @src }.should_not complain
end
end
end
context 'when one-line form' do
ruby_version_is '3.0' do
before :each do
@src = '[0, 1] => [a, b]'
end
ruby_version_is ""..."3.1" do
it "warns about pattern matching is experimental feature" do
-> { eval @src }.should complain(/pattern matching is experimental, and the behavior may change in future versions of Ruby!/i)
end
end
ruby_version_is "3.1" do
it "does not warn about pattern matching is experimental feature" do
-> { eval @src }.should_not complain
end
end
end
end
end
it "binds variables" do
eval(<<~RUBY).should == 1
case [0, 1]
in [0, a]
a
end
RUBY
end
it "cannot mix in and when operators" do
-> {
eval <<~RUBY
case []
when 1 == 1
in []
end
RUBY
}.should raise_error(SyntaxError, /syntax error, unexpected `in'/)
-> {
eval <<~RUBY
case []
in []
when 1 == 1
end
RUBY
}.should raise_error(SyntaxError, /syntax error, unexpected `when'/)
end
it "checks patterns until the first matching" do
eval(<<~RUBY).should == :bar
case [0, 1]
in [0]
:foo
in [0, 1]
:bar
in [0, 1]
:baz
end
RUBY
end
it "executes else clause if no pattern matches" do
eval(<<~RUBY).should == false
case [0, 1]
in [0]
true
else
false
end
RUBY
end
it "raises NoMatchingPatternError if no pattern matches and no else clause" do
-> {
eval <<~RUBY
case [0, 1]
in [0]
end
RUBY
}.should raise_error(NoMatchingPatternError, /\[0, 1\]/)
end
it "does not allow calculation or method calls in a pattern" do
-> {
eval <<~RUBY
case 0
in 1 + 1
true
end
RUBY
}.should raise_error(SyntaxError, /unexpected/)
end
it "evaluates the case expression once for multiple patterns, caching the result" do
eval(<<~RUBY).should == true
case (ScratchPad << :foo; 1)
in 0
false
in 1
true
end
RUBY
ScratchPad.recorded.should == [:foo]
end
describe "guards" do
it "supports if guard" do
eval(<<~RUBY).should == false
case 0
in 0 if false
true
else
false
end
RUBY
eval(<<~RUBY).should == true
case 0
in 0 if true
true
else
false
end
RUBY
end
it "supports unless guard" do
eval(<<~RUBY).should == false
case 0
in 0 unless true
true
else
false
end
RUBY
eval(<<~RUBY).should == true
case 0
in 0 unless false
true
else
false
end
RUBY
end
it "makes bound variables visible in guard" do
eval(<<~RUBY).should == true
case [0, 1]
in [a, 1] if a >= 0
true
end
RUBY
end
it "does not evaluate guard if pattern does not match" do
eval <<~RUBY
case 0
in 1 if (ScratchPad << :foo) || true
else
end
RUBY
ScratchPad.recorded.should == []
end
it "takes guards into account when there are several matching patterns" do
eval(<<~RUBY).should == :bar
case 0
in 0 if false
:foo
in 0 if true
:bar
end
RUBY
end
it "executes else clause if no guarded pattern matches" do
eval(<<~RUBY).should == false
case 0
in 0 if false
true
else
false
end
RUBY
end
it "raises NoMatchingPatternError if no guarded pattern matches and no else clause" do
-> {
eval <<~RUBY
case [0, 1]
in [0, 1] if false
end
RUBY
}.should raise_error(NoMatchingPatternError, /\[0, 1\]/)
end
end
describe "value pattern" do
it "matches an object such that pattern === object" do
eval(<<~RUBY).should == true
case 0
in 0
true
end
RUBY
eval(<<~RUBY).should == true
case 0
in (-1..1)
true
end
RUBY
eval(<<~RUBY).should == true
case 0
in Integer
true
end
RUBY
eval(<<~RUBY).should == true
case "0"
in /0/
true
end
RUBY
eval(<<~RUBY).should == true
case "0"
in ->(s) { s == "0" }
true
end
RUBY
end
it "allows string literal with interpolation" do
x = "x"
eval(<<~RUBY).should == true
case "x"
in "#{x + ""}"
true
end
RUBY
end
end
describe "variable pattern" do
it "matches a value and binds variable name to this value" do
eval(<<~RUBY).should == 0
case 0
in a
a
end
RUBY
end
it "makes bounded variable visible outside a case statement scope" do
eval(<<~RUBY).should == 0
case 0
in a
end
a
RUBY
end
it "create local variables even if a pattern doesn't match" do
eval(<<~RUBY).should == [0, nil, nil]
case 0
in a
in b
in c
end
[a, b, c]
RUBY
end
it "allow using _ name to drop values" do
eval(<<~RUBY).should == 0
case [0, 1]
in [a, _]
a
end
RUBY
end
it "supports using _ in a pattern several times" do
eval(<<~RUBY).should == true
case [0, 1, 2]
in [0, _, _]
true
end
RUBY
end
it "supports using any name with _ at the beginning in a pattern several times" do
eval(<<~RUBY).should == true
case [0, 1, 2]
in [0, _x, _x]
true
end
RUBY
eval(<<~RUBY).should == true
case {a: 0, b: 1, c: 2}
in {a: 0, b: _x, c: _x}
true
end
RUBY
end
it "does not support using variable name (except _) several times" do
-> {
eval <<~RUBY
case [0]
in [a, a]
end
RUBY
}.should raise_error(SyntaxError, /duplicated variable name/)
end
it "supports existing variables in a pattern specified with ^ operator" do
a = 0
eval(<<~RUBY).should == true
case 0
in ^a
true
end
RUBY
end
it "allows applying ^ operator to bound variables" do
eval(<<~RUBY).should == 1
case [1, 1]
in [n, ^n]
n
end
RUBY
eval(<<~RUBY).should == false
case [1, 2]
in [n, ^n]
true
else
false
end
RUBY
end
it "requires bound variable to be specified in a pattern before ^ operator when it relies on a bound variable" do
-> {
eval <<~RUBY
case [1, 2]
in [^n, n]
true
else
false
end
RUBY
}.should raise_error(SyntaxError, /n: no such local variable/)
end
end
describe "alternative pattern" do
it "matches if any of patterns matches" do
eval(<<~RUBY).should == true
case 0
in 0 | 1 | 2
true
end
RUBY
end
it "does not support variable binding" do
-> {
eval <<~RUBY
case [0, 1]
in [0, 0] | [0, a]
end
RUBY
}.should raise_error(SyntaxError, /illegal variable in alternative pattern/)
end
it "support underscore prefixed variables in alternation" do
eval(<<~RUBY).should == true
case [0, 1]
in [1, _]
false
in [0, 0] | [0, _a]
true
end
RUBY
end
it "can be used as a nested pattern" do
eval(<<~RUBY).should == true
case [[1], ["2"]]
in [[0] | nil, _]
false
in [[1], [1]]
false
in [[1], [2 | "2"]]
true
end
RUBY
eval(<<~RUBY).should == true
case [1, 2]
in [0, _] | {a: 0}
false
in {a: 1, b: 2} | [1, 2]
true
end
RUBY
end
end
describe "AS pattern" do
it "binds a variable to a value if pattern matches" do
eval(<<~RUBY).should == 0
case 0
in Integer => n
n
end
RUBY
end
it "can be used as a nested pattern" do
eval(<<~RUBY).should == [2, 3]
case [1, [2, 3]]
in [1, Array => ary]
ary
end
RUBY
end
end
describe "Array pattern" do
it "supports form Constant(pat, pat, ...)" do
eval(<<~RUBY).should == true
case [0, 1, 2]
in Array(0, 1, 2)
true
end
RUBY
end
it "supports form Constant[pat, pat, ...]" do
eval(<<~RUBY).should == true
case [0, 1, 2]
in Array[0, 1, 2]
true
end
RUBY
end
it "supports form [pat, pat, ...]" do
eval(<<~RUBY).should == true
case [0, 1, 2]
in [0, 1, 2]
true
end
RUBY
end
it "supports form pat, pat, ..." do
eval(<<~RUBY).should == true
case [0, 1, 2]
in 0, 1, 2
true
end
RUBY
eval(<<~RUBY).should == 1
case [0, 1, 2]
in 0, a, 2
a
end
RUBY
eval(<<~RUBY).should == [1, 2]
case [0, 1, 2]
in 0, *rest
rest
end
RUBY
end
it "matches an object with #deconstruct method which returns an array and each element in array matches element in pattern" do
obj = Object.new
def obj.deconstruct; [0, 1] end
eval(<<~RUBY).should == true
case obj
in [Integer, Integer]
true
end
RUBY
end
ruby_version_is "3.0" do
it "calls #deconstruct once for multiple patterns, caching the result" do
obj = Object.new
def obj.deconstruct
ScratchPad << :deconstruct
[0, 1]
end
eval(<<~RUBY).should == true
case obj
in [1, 2]
false
in [0, 1]
true
end
RUBY
ScratchPad.recorded.should == [:deconstruct]
end
end
it "calls #deconstruct even on objects that are already an array" do
obj = [1, 2]
def obj.deconstruct
ScratchPad << :deconstruct
[3, 4]
end
eval(<<~RUBY).should == true
case obj
in [3, 4]
true
else
false
end
RUBY
ScratchPad.recorded.should == [:deconstruct]
end
it "does not match object if Constant === object returns false" do
eval(<<~RUBY).should == false
case [0, 1, 2]
in String[0, 1, 2]
true
else
false
end
RUBY
end
it "does not match object without #deconstruct method" do
obj = Object.new
obj.should_receive(:respond_to?).with(:deconstruct)
eval(<<~RUBY).should == false
case obj
in Object[]
true
else
false
end
RUBY
end
it "raises TypeError if #deconstruct method does not return array" do
obj = Object.new
def obj.deconstruct; "" end
-> {
eval <<~RUBY
case obj
in Object[]
else
end
RUBY
}.should raise_error(TypeError, /deconstruct must return Array/)
end
it "accepts a subclass of Array from #deconstruct" do
obj = Object.new
def obj.deconstruct
subarray = Class.new(Array).new(2)
def subarray.[](n)
n
end
subarray
end
eval(<<~RUBY).should == true
case obj
in [1, 2]
false
in [0, 1]
true
end
RUBY
end
it "does not match object if elements of array returned by #deconstruct method does not match elements in pattern" do
obj = Object.new
def obj.deconstruct; [1] end
eval(<<~RUBY).should == false
case obj
in Object[0]
true
else
false
end
RUBY
end
it "binds variables" do
eval(<<~RUBY).should == [0, 1, 2]
case [0, 1, 2]
in [a, b, c]
[a, b, c]
end
RUBY
end
it "supports splat operator *rest" do
eval(<<~RUBY).should == [1, 2]
case [0, 1, 2]
in [0, *rest]
rest
end
RUBY
end
it "does not match partially by default" do
eval(<<~RUBY).should == false
case [0, 1, 2, 3]
in [1, 2]
true
else
false
end
RUBY
end
it "does match partially from the array beginning if list + , syntax used" do
eval(<<~RUBY).should == true
case [0, 1, 2, 3]
in [0, 1,]
true
end
RUBY
eval(<<~RUBY).should == true
case [0, 1, 2, 3]
in 0, 1,;
true
end
RUBY
end
it "matches [] with []" do
eval(<<~RUBY).should == true
case []
in []
true
end
RUBY
end
it "matches anything with *" do
eval(<<~RUBY).should == true
case [0, 1]
in *;
true
end
RUBY
end
it "can be used as a nested pattern" do
eval(<<~RUBY).should == true
case [[1], ["2"]]
in [[0] | nil, _]
false
in [[1], [1]]
false
in [[1], [2 | "2"]]
true
end
RUBY
eval(<<~RUBY).should == true
case [1, 2]
in [0, _] | {a: 0}
false
in {a: 1, b: 2} | [1, 2]
true
end
RUBY
end
end
describe "Hash pattern" do
it "supports form Constant(id: pat, id: pat, ...)" do
eval(<<~RUBY).should == true
case {a: 0, b: 1}
in Hash(a: 0, b: 1)
true
end
RUBY
end
it "supports form Constant[id: pat, id: pat, ...]" do
eval(<<~RUBY).should == true
case {a: 0, b: 1}
in Hash[a: 0, b: 1]
true
end
RUBY
end
it "supports form {id: pat, id: pat, ...}" do
eval(<<~RUBY).should == true
case {a: 0, b: 1}
in {a: 0, b: 1}
true
end
RUBY
end
it "supports form id: pat, id: pat, ..." do
eval(<<~RUBY).should == true
case {a: 0, b: 1}
in a: 0, b: 1
true
end
RUBY
eval(<<~RUBY).should == [0, 1]
case {a: 0, b: 1}
in a: a, b: b
[a, b]
end
RUBY
eval(<<~RUBY).should == { b: 1, c: 2 }
case {a: 0, b: 1, c: 2}
in a: 0, **rest
rest
end
RUBY
end
it "supports a: which means a: a" do
eval(<<~RUBY).should == [0, 1]
case {a: 0, b: 1}
in Hash(a:, b:)
[a, b]
end
RUBY
a = b = nil
eval(<<~RUBY).should == [0, 1]
case {a: 0, b: 1}
in Hash[a:, b:]
[a, b]
end
RUBY
a = b = nil
eval(<<~RUBY).should == [0, 1]
case {a: 0, b: 1}
in {a:, b:}
[a, b]
end
RUBY
a = nil
eval(<<~RUBY).should == [0, {b: 1, c: 2}]
case {a: 0, b: 1, c: 2}
in {a:, **rest}
[a, rest]
end
RUBY
a = b = nil
eval(<<~RUBY).should == [0, 1]
case {a: 0, b: 1}
in a:, b:
[a, b]
end
RUBY
end
it "can mix key (a:) and key-value (a: b) declarations" do
eval(<<~RUBY).should == [0, 1]
case {a: 0, b: 1}
in Hash(a:, b: x)
[a, x]
end
RUBY
end
it "supports 'string': key literal" do
eval(<<~RUBY).should == true
case {a: 0}
in {"a": 0}
true
end
RUBY
end
it "does not support non-symbol keys" do
-> {
eval <<~RUBY
case {a: 1}
in {"a" => 1}
end
RUBY
}.should raise_error(SyntaxError, /unexpected/)
end
it "does not support string interpolation in keys" do
x = "a"
-> {
eval <<~'RUBY'
case {a: 1}
in {"#{x}": 1}
end
RUBY
}.should raise_error(SyntaxError, /symbol literal with interpolation is not allowed/)
end
it "raise SyntaxError when keys duplicate in pattern" do
-> {
eval <<~RUBY
case {a: 1}
in {a: 1, b: 2, a: 3}
end
RUBY
}.should raise_error(SyntaxError, /duplicated key name/)
end
it "matches an object with #deconstruct_keys method which returns a Hash with equal keys and each value in Hash matches value in pattern" do
obj = Object.new
def obj.deconstruct_keys(*); {a: 1} end
eval(<<~RUBY).should == true
case obj
in {a: 1}
true
end
RUBY
end
it "calls #deconstruct_keys per pattern" do
obj = Object.new
def obj.deconstruct_keys(*)
ScratchPad << :deconstruct_keys
{a: 1}
end
eval(<<~RUBY).should == true
case obj
in {b: 1}
false
in {a: 1}
true
end
RUBY
ScratchPad.recorded.should == [:deconstruct_keys, :deconstruct_keys]
end
it "does not match object if Constant === object returns false" do
eval(<<~RUBY).should == false
case {a: 1}
in String[a: 1]
true
else
false
end
RUBY
end
it "does not match object without #deconstruct_keys method" do
obj = Object.new
obj.should_receive(:respond_to?).with(:deconstruct_keys)
eval(<<~RUBY).should == false
case obj
in Object[a: 1]
true
else
false
end
RUBY
end
it "does not match object if #deconstruct_keys method does not return Hash" do
obj = Object.new
def obj.deconstruct_keys(*); "" end
-> {
eval <<~RUBY
case obj
in Object[a: 1]
end
RUBY
}.should raise_error(TypeError, /deconstruct_keys must return Hash/)
end
it "does not match object if #deconstruct_keys method returns Hash with non-symbol keys" do
obj = Object.new
def obj.deconstruct_keys(*); {"a" => 1} end
eval(<<~RUBY).should == false
case obj
in Object[a: 1]
true
else
false
end
RUBY
end
it "does not match object if elements of Hash returned by #deconstruct_keys method does not match values in pattern" do
obj = Object.new
def obj.deconstruct_keys(*); {a: 1} end
eval(<<~RUBY).should == false
case obj
in Object[a: 2]
true
else
false
end
RUBY
end
it "passes keys specified in pattern as arguments to #deconstruct_keys method" do
obj = Object.new
def obj.deconstruct_keys(*args)
ScratchPad << args
{a: 1, b: 2, c: 3}
end
eval <<~RUBY
case obj
in Object[a: 1, b: 2, c: 3]
end
RUBY
ScratchPad.recorded.sort.should == [[[:a, :b, :c]]]
end
it "passes keys specified in pattern to #deconstruct_keys method if pattern contains double splat operator **" do
obj = Object.new
def obj.deconstruct_keys(*args)
ScratchPad << args
{a: 1, b: 2, c: 3}
end
eval <<~RUBY
case obj
in Object[a: 1, b: 2, **]
end
RUBY
ScratchPad.recorded.sort.should == [[[:a, :b]]]
end
it "passes nil to #deconstruct_keys method if pattern contains double splat operator **rest" do
obj = Object.new
def obj.deconstruct_keys(*args)
ScratchPad << args
{a: 1, b: 2}
end
eval <<~RUBY
case obj
in Object[a: 1, **rest]
end
RUBY
ScratchPad.recorded.should == [[nil]]
end
it "binds variables" do
eval(<<~RUBY).should == [0, 1, 2]
case {a: 0, b: 1, c: 2}
in {a: x, b: y, c: z}
[x, y, z]
end
RUBY
end
it "supports double splat operator **rest" do
eval(<<~RUBY).should == {b: 1, c: 2}
case {a: 0, b: 1, c: 2}
in {a: 0, **rest}
rest
end
RUBY
end
it "treats **nil like there should not be any other keys in a matched Hash" do
eval(<<~RUBY).should == true
case {a: 1, b: 2}
in {a: 1, b: 2, **nil}
true
end
RUBY
eval(<<~RUBY).should == false
case {a: 1, b: 2}
in {a: 1, **nil}
true
else
false
end
RUBY
end
it "can match partially" do
eval(<<~RUBY).should == true
case {a: 1, b: 2}
in {a: 1}
true
end
RUBY
end
it "matches {} with {}" do
eval(<<~RUBY).should == true
case {}
in {}
true
end
RUBY
end
it "matches anything with **" do
eval(<<~RUBY).should == true
case {a: 1}
in **;
true
end
RUBY
end
it "can be used as a nested pattern" do
eval(<<~RUBY).should == true
case {a: {a: 1, b: 1}, b: {a: 1, b: 2}}
in {a: {a: 0}}
false
in {a: {a: 1}, b: {b: 1}}
false
in {a: {a: 1}, b: {b: 2}}
true
end
RUBY
eval(<<~RUBY).should == true
case [{a: 1, b: [1]}, {a: 1, c: ["2"]}]
in [{a:, c:},]
false
in [{a: 1, b:}, {a: 1, c: [Integer]}]
false
in [_, {a: 1, c: [String]}]
true
end
RUBY
end
end
describe "refinements" do
it "are used for #deconstruct" do
refinery = Module.new do
refine Array do
def deconstruct
[0]
end
end
end
result = nil
Module.new do
using refinery
result = eval(<<~RUBY)
case []
in [0]
true
end
RUBY
end
result.should == true
end
it "are used for #deconstruct_keys" do
refinery = Module.new do
refine Hash do
def deconstruct_keys(_)
{a: 0}
end
end
end
result = nil
Module.new do
using refinery
result = eval(<<~RUBY)
case {}
in a: 0
true
end
RUBY
end
result.should == true
end
it "are used for #=== in constant pattern" do
refinery = Module.new do
refine Array.singleton_class do
def ===(obj)
obj.is_a?(Hash)
end
end
end
result = nil
Module.new do
using refinery
result = eval(<<~RUBY)
case {}
in Array
true
end
RUBY
end
result.should == true
end
end
ruby_version_is "3.1" do
it "can omit parentheses in one line pattern matching" do
eval(<<~RUBY).should == [1, 2]
[1, 2] => a, b
[a, b]
RUBY
eval(<<~RUBY).should == 1
{a: 1} => a:
a
RUBY
end
end
end
end
|
require_relative 'ui_skeleton_editor'
class SkeletonEditorDialog < Qt::Dialog
GREY_PEN = Qt::Pen.new(Qt::Brush.new(Qt::Color.new(128, 128, 128)), 2)
RED_PEN = Qt::Pen.new(Qt::Brush.new(Qt::Color.new(224, 16, 16)), 2)
GREEN_PEN = Qt::Pen.new(Qt::Brush.new(Qt::Color.new(16, 224, 16)), 2)
BLUE_PEN = Qt::Pen.new(Qt::Brush.new(Qt::Color.new(16, 16, 224)), 2)
WHITE_PEN = Qt::Pen.new(Qt::Brush.new(Qt::Color.new(255, 255, 255)), 2)
attr_reader :game, :fs
slots "pose_changed_no_tween(int)"
slots "toggle_show_skeleton(int)"
slots "toggle_show_hitboxes(int)"
slots "toggle_show_points(int)"
slots "animation_changed(int)"
slots "animation_keyframe_changed_no_tween(int)"
slots "toggle_animation_paused()"
slots "advance_tweenframe()"
slots "button_box_clicked(QAbstractButton*)"
def initialize(parent, sprite_info, fs, renderer)
super(parent, Qt::WindowTitleHint | Qt::WindowSystemMenuHint)
@sprite_info = sprite_info
@fs = fs
@renderer = renderer
@ui = Ui_SkeletonEditor.new
@ui.setup_ui(self)
@skeleton_graphics_scene = Qt::GraphicsScene.new
@ui.skeleton_graphics_view.setScene(@skeleton_graphics_scene)
@animation_timer = Qt::Timer.new()
@animation_timer.setSingleShot(true)
connect(@animation_timer, SIGNAL("timeout()"), self, SLOT("advance_tweenframe()"))
set_animation_paused(true)
connect(@ui.pose_index, SIGNAL("activated(int)"), self, SLOT("pose_changed_no_tween(int)"))
connect(@ui.show_skeleton, SIGNAL("stateChanged(int)"), self, SLOT("toggle_show_skeleton(int)"))
connect(@ui.show_hitboxes, SIGNAL("stateChanged(int)"), self, SLOT("toggle_show_hitboxes(int)"))
connect(@ui.show_points, SIGNAL("stateChanged(int)"), self, SLOT("toggle_show_points(int)"))
connect(@ui.animation_index, SIGNAL("activated(int)"), self, SLOT("animation_changed(int)"))
connect(@ui.seek_slider, SIGNAL("valueChanged(int)"), self, SLOT("animation_keyframe_changed_no_tween(int)"))
connect(@ui.toggle_paused_button, SIGNAL("clicked()"), self, SLOT("toggle_animation_paused()"))
connect(@ui.buttonBox, SIGNAL("clicked(QAbstractButton*)"), self, SLOT("button_box_clicked(QAbstractButton*)"))
self.show()
self.load_skeleton()
end
def load_skeleton
@skeleton = SpriteSkeleton.new(@sprite_info.skeleton_file, @fs)
chunky_frames, @min_x, @min_y, _, _, _, _, _ = @renderer.render_sprite(@sprite_info)
@pixmap_frames = chunky_frames.map do |chunky_image|
pixmap = Qt::Pixmap.new
blob = chunky_image.to_blob
pixmap.loadFromData(blob, blob.length)
pixmap
end
@ui.skeleton_file_name.text = @skeleton.skeleton_file
@ui.pose_index.clear()
@skeleton.poses.each_index do |i|
@ui.pose_index.addItem("%02X" % i)
end
pose_changed(0)
@ui.animation_index.clear()
@skeleton.animations.each_with_index do |animation, i|
@ui.animation_index.addItem("%02X" % i)
end
animation_changed(0)
end
def pose_changed_no_tween(i)
@current_pose_index = i
pose = @skeleton.poses[@current_pose_index]
@current_pose_joint_states = initialize_joint_states(pose)
@tweening_progress = 0.0
@previous_pose = nil
update_drawn_joints()
@ui.pose_index.setCurrentIndex(i)
end
def pose_changed(i)
if @current_pose_joint_states
@previous_pose = @skeleton.poses[@current_pose_index]
@previous_pose_joint_state = @current_pose_joint_states
@current_pose_index = i
pose = @skeleton.poses[@current_pose_index]
@current_pose_joint_states = initialize_joint_states(pose)
@tweening_progress = 0.0
else
@current_pose_index = i
pose = @skeleton.poses[@current_pose_index]
@current_pose_joint_states = initialize_joint_states(pose)
@previous_pose_joint_state = @current_pose_joint_states
@previous_pose = @skeleton.poses[@current_pose_index]
@tweening_progress = 0.0
end
update_drawn_joints()
@ui.pose_index.setCurrentIndex(i)
end
def animation_changed(i)
@current_animation_index = i
@animation_timer.stop()
@ui.seek_slider.value = 0
@current_animation_keyframe_index = 0
@current_animation = @skeleton.animations[@current_animation_index]
if @current_animation.nil?
@ui.seek_slider.enabled = false
@ui.toggle_paused_button.enabled = false
return
end
@ui.seek_slider.enabled = true
@ui.seek_slider.minimum = 0
@ui.seek_slider.maximum = @current_animation.keyframes.length-1
@ui.toggle_paused_button.enabled = true
if @current_animation.keyframes.length > 0
animation_keyframe_changed(0)
start_animation()
else
# Animation with no keyframes
end
end
def initialize_joint_states(pose)
joint_states_for_pose = []
@skeleton.joints.each_with_index do |joint, joint_index|
joint_change = pose.joint_changes[joint_index]
joint_state = JointState.new
joint_states_for_pose << joint_state
if joint.parent_id == 0xFF
joint_state.x_pos = 0
joint_state.y_pos = 0
joint_state.inherited_rotation = 0
next
end
parent_joint = @skeleton.joints[joint.parent_id]
parent_joint_change = pose.joint_changes[joint.parent_id]
parent_joint_state = joint_states_for_pose[joint.parent_id]
joint_state.x_pos = parent_joint_state.x_pos
joint_state.y_pos = parent_joint_state.y_pos
joint_state.inherited_rotation = parent_joint_change.rotation
if parent_joint.copy_parent_visual_rotation && parent_joint.parent_id != 0xFF
joint_state.inherited_rotation += parent_joint_state.inherited_rotation
end
connected_rotation_in_degrees = joint_state.inherited_rotation / 182.0
offset_angle = connected_rotation_in_degrees
offset_angle += 90 * joint.positional_rotation
offset_angle_in_radians = offset_angle * Math::PI / 180
joint_state.x_pos += joint_change.distance*Math.cos(offset_angle_in_radians)
joint_state.y_pos += joint_change.distance*Math.sin(offset_angle_in_radians)
end
joint_states_for_pose
end
def tween_poses(previous_pose, next_pose, tweening_progress)
tweened_pose = Pose.new
previous_pose.joint_changes.each_with_index do |prev_joint_change, joint_index|
next_joint_change = next_pose.joint_changes[joint_index]
prev_multiplier = 1.0 - tweening_progress
next_multiplier = tweening_progress
tweened_rotation = merge_two_angles(prev_joint_change.rotation, next_joint_change.rotation, prev_multiplier, next_multiplier)
tweened_distance = prev_joint_change.distance*prev_multiplier + next_joint_change.distance*next_multiplier
tweened_joint_change_data = [tweened_rotation, tweened_distance, prev_joint_change.new_frame_id].pack("vcC")
tweened_joint_change = JointChange.new(tweened_joint_change_data)
tweened_pose.joint_changes << tweened_joint_change
end
tweened_states = initialize_joint_states(tweened_pose)
return [tweened_states, tweened_pose]
end
def merge_two_angles(a, b, a_multiplier, b_multiplier)
if b - a >= 0x8000
b -= 0x10000
elsif a - b >= 0x8000
a -= 0x10000
end
a*a_multiplier + b*b_multiplier
end
def animation_keyframe_changed(i)
@current_animation_keyframe_index = i
@current_animation_tweenframe_index = 0
@current_keyframe = @current_animation.keyframes[@current_animation_keyframe_index]
pose_changed(@current_keyframe.pose_id)
animation_tweenframe_changed(0)
@ui.seek_slider.value = @current_animation_keyframe_index
millisecond_delay = (1 / 60.0 * 1000).round
@animation_timer.start(millisecond_delay)
end
def animation_keyframe_changed_no_tween(i, force=false)
return if i == @current_animation_keyframe_index && @ui.seek_slider.value == @current_animation_keyframe_index && !force
@current_animation_keyframe_index = i
@current_animation_tweenframe_index = 0
@current_keyframe = @current_animation.keyframes[@current_animation_keyframe_index]
pose_changed_no_tween(@current_keyframe.pose_id)
animation_tweenframe_changed(0)
@ui.seek_slider.value = @current_animation_keyframe_index
millisecond_delay = (1 / 60.0 * 1000).round
@animation_timer.start(millisecond_delay)
end
def animation_tweenframe_changed(i)
@current_animation_tweenframe_index = i
@tweening_progress = @current_animation_tweenframe_index.to_f / @current_keyframe.length_in_frames
#@ui.frame_delay.text = "%04X" % frame_delay.delay
update_drawn_joints()
end
def advance_tweenframe
if @current_animation && !@animation_paused
if @current_animation_tweenframe_index >= @current_keyframe.length_in_frames
advance_keyframe()
else
animation_tweenframe_changed(@current_animation_tweenframe_index+1)
end
millisecond_delay = (1 / 60.0 * 1000).round
@animation_timer.start(millisecond_delay)
end
end
def set_animation_paused(paused)
@animation_paused = paused
if @animation_paused
@ui.toggle_paused_button.text = "Play"
else
@ui.toggle_paused_button.text = "Pause"
start_animation()
end
end
def start_animation
millisecond_delay = (1 / 60.0 * 1000).round
@animation_timer.start(millisecond_delay)
end
def toggle_animation_paused
set_animation_paused(!@animation_paused)
end
def advance_keyframe
if @current_animation && !@animation_paused
if @current_animation_keyframe_index >= @current_animation.keyframes.length-1
animation_keyframe_changed(0)
unless @ui.loop_animation.checked
set_animation_paused(true)
end
else
animation_keyframe_changed(@current_animation_keyframe_index+1)
end
end
end
def update_drawn_joints
@skeleton_graphics_scene.items.each do |item|
@skeleton_graphics_scene.removeItem(item)
end
next_pose = @skeleton.poses[@current_pose_index]
if @previous_pose
@current_tweened_joint_states, pose = tween_poses(@previous_pose, next_pose, @tweening_progress)
else
@current_tweened_joint_states = @current_pose_joint_states
pose = next_pose
end
@skeleton.joint_indexes_by_draw_order.each do |joint_index|
joint = @skeleton.joints[joint_index]
joint_change = pose.joint_changes[joint_index]
joint_state = @current_tweened_joint_states[joint_index]
next if joint.frame_id == 0xFF
rotation = joint_change.rotation
if joint.parent_id != 0xFF && joint.copy_parent_visual_rotation
rotation += joint_state.inherited_rotation
end
rotation_in_degrees = rotation/182.0
if joint_change.new_frame_id == 0xFF
frame_id = joint.frame_id
else
frame_id = joint_change.new_frame_id
end
pixmap = @pixmap_frames[frame_id]
graphics_item = Qt::GraphicsPixmapItem.new(pixmap)
graphics_item.setOffset(@min_x, @min_y)
graphics_item.setPos(joint_state.x_pos, joint_state.y_pos)
graphics_item.setRotation(rotation_in_degrees)
if joint.horizontal_flip && joint.vertical_flip
graphics_item.scale(-1, -1)
elsif joint.horizontal_flip
graphics_item.scale(-1, 1)
elsif joint.vertical_flip
graphics_item.scale(1, -1)
end
@skeleton_graphics_scene.addItem(graphics_item)
end
if @ui.show_skeleton.checked
@skeleton.joints.each_index do |joint_index|
joint = @skeleton.joints[joint_index]
joint_change = pose.joint_changes[joint_index]
joint_state = @current_tweened_joint_states[joint_index]
ellipse = @skeleton_graphics_scene.addEllipse(joint_state.x_pos-1, joint_state.y_pos-1, 3, 3, GREY_PEN)
ellipse.setZValue(1)
if joint.parent_id != 0xFF
parent_joint = @skeleton.joints[joint.parent_id]
parent_joint_state = @current_tweened_joint_states[joint.parent_id]
line = @skeleton_graphics_scene.addLine(joint_state.x_pos, joint_state.y_pos, parent_joint_state.x_pos, parent_joint_state.y_pos, GREY_PEN)
line.setZValue(1)
end
end
end
if @ui.show_hitboxes.checked
@skeleton.hitboxes.each do |hitbox|
joint = @skeleton.joints[hitbox.parent_joint_id]
joint_change = pose.joint_changes[hitbox.parent_joint_id]
joint_state = @current_tweened_joint_states[hitbox.parent_joint_id]
x_pos = joint_state.x_pos
y_pos = joint_state.y_pos
offset_angle = hitbox.rotation + joint_change.rotation
if joint.copy_parent_visual_rotation
offset_angle += joint_state.inherited_rotation
end
offset_angle_in_degrees = offset_angle / 182.0
offset_angle_in_radians = offset_angle_in_degrees * Math::PI / 180
x_pos += hitbox.distance*Math.cos(offset_angle_in_radians)
y_pos += hitbox.distance*Math.sin(offset_angle_in_radians)
hitbox_item = Qt::GraphicsRectItem.new
if hitbox.can_damage_player && hitbox.can_take_damage
hitbox_item.setPen(RED_PEN)
elsif hitbox.can_damage_player
hitbox_item.setPen(BLUE_PEN)
elsif hitbox.can_take_damage
hitbox_item.setPen(GREEN_PEN)
else
hitbox_item.setPen(WHITE_PEN)
end
hitbox_item.setRect(x_pos-hitbox.width/2, y_pos-hitbox.height/2, hitbox.width, hitbox.height)
hitbox_item.setTransformOriginPoint(hitbox_item.rect.center)
rotation_in_degrees = hitbox.rotation / 182.0
hitbox_item.setRotation(rotation_in_degrees)
hitbox_item.setZValue(1)
@skeleton_graphics_scene.addItem(hitbox_item)
end
end
if @ui.show_points.checked?
@skeleton.points.each do |point|
joint = @skeleton.joints[point.parent_joint_id]
joint_change = pose.joint_changes[point.parent_joint_id]
joint_state = @current_tweened_joint_states[point.parent_joint_id]
x_pos = joint_state.x_pos
y_pos = joint_state.y_pos
offset_angle = point.rotation + joint_change.rotation
if joint.copy_parent_visual_rotation
offset_angle += joint_state.inherited_rotation
end
offset_angle_in_degrees = offset_angle / 182.0
offset_angle_in_radians = offset_angle_in_degrees * Math::PI / 180
x_pos += point.distance*Math.cos(offset_angle_in_radians)
y_pos += point.distance*Math.sin(offset_angle_in_radians)
ellipse = @skeleton_graphics_scene.addEllipse(x_pos, y_pos, 3, 3, RED_PEN)
ellipse.setZValue(1)
end
end
end
def toggle_show_skeleton(checked)
update_drawn_joints()
end
def toggle_show_hitboxes(checked)
update_drawn_joints()
end
def toggle_show_points(checked)
update_drawn_joints()
end
def button_box_clicked(button)
if @ui.buttonBox.standardButton(button) == Qt::DialogButtonBox::Apply
end
end
def inspect; to_s; end
end
class JointState
attr_accessor :x_pos,
:y_pos,
:inherited_rotation
end
Minor tweak to how skeleton viewer is coded
This makes the joint state initialization code more robust in order to
help support showing skeletons in DoS/PoR, in case that ever gets
figured out.
require_relative 'ui_skeleton_editor'
class SkeletonEditorDialog < Qt::Dialog
GREY_PEN = Qt::Pen.new(Qt::Brush.new(Qt::Color.new(128, 128, 128)), 2)
RED_PEN = Qt::Pen.new(Qt::Brush.new(Qt::Color.new(224, 16, 16)), 2)
GREEN_PEN = Qt::Pen.new(Qt::Brush.new(Qt::Color.new(16, 224, 16)), 2)
BLUE_PEN = Qt::Pen.new(Qt::Brush.new(Qt::Color.new(16, 16, 224)), 2)
WHITE_PEN = Qt::Pen.new(Qt::Brush.new(Qt::Color.new(255, 255, 255)), 2)
attr_reader :game, :fs
slots "pose_changed_no_tween(int)"
slots "toggle_show_skeleton(int)"
slots "toggle_show_hitboxes(int)"
slots "toggle_show_points(int)"
slots "animation_changed(int)"
slots "animation_keyframe_changed_no_tween(int)"
slots "toggle_animation_paused()"
slots "advance_tweenframe()"
slots "button_box_clicked(QAbstractButton*)"
def initialize(parent, sprite_info, fs, renderer)
super(parent, Qt::WindowTitleHint | Qt::WindowSystemMenuHint)
@sprite_info = sprite_info
@fs = fs
@renderer = renderer
@ui = Ui_SkeletonEditor.new
@ui.setup_ui(self)
@skeleton_graphics_scene = Qt::GraphicsScene.new
@ui.skeleton_graphics_view.setScene(@skeleton_graphics_scene)
@animation_timer = Qt::Timer.new()
@animation_timer.setSingleShot(true)
connect(@animation_timer, SIGNAL("timeout()"), self, SLOT("advance_tweenframe()"))
set_animation_paused(true)
connect(@ui.pose_index, SIGNAL("activated(int)"), self, SLOT("pose_changed_no_tween(int)"))
connect(@ui.show_skeleton, SIGNAL("stateChanged(int)"), self, SLOT("toggle_show_skeleton(int)"))
connect(@ui.show_hitboxes, SIGNAL("stateChanged(int)"), self, SLOT("toggle_show_hitboxes(int)"))
connect(@ui.show_points, SIGNAL("stateChanged(int)"), self, SLOT("toggle_show_points(int)"))
connect(@ui.animation_index, SIGNAL("activated(int)"), self, SLOT("animation_changed(int)"))
connect(@ui.seek_slider, SIGNAL("valueChanged(int)"), self, SLOT("animation_keyframe_changed_no_tween(int)"))
connect(@ui.toggle_paused_button, SIGNAL("clicked()"), self, SLOT("toggle_animation_paused()"))
connect(@ui.buttonBox, SIGNAL("clicked(QAbstractButton*)"), self, SLOT("button_box_clicked(QAbstractButton*)"))
self.show()
self.load_skeleton()
end
def load_skeleton
@skeleton = SpriteSkeleton.new(@sprite_info.skeleton_file, @fs)
chunky_frames, @min_x, @min_y, _, _, _, _, _ = @renderer.render_sprite(@sprite_info)
@pixmap_frames = chunky_frames.map do |chunky_image|
pixmap = Qt::Pixmap.new
blob = chunky_image.to_blob
pixmap.loadFromData(blob, blob.length)
pixmap
end
@ui.skeleton_file_name.text = @skeleton.skeleton_file
@ui.pose_index.clear()
@skeleton.poses.each_index do |i|
@ui.pose_index.addItem("%02X" % i)
end
pose_changed(0)
@ui.animation_index.clear()
@skeleton.animations.each_with_index do |animation, i|
@ui.animation_index.addItem("%02X" % i)
end
animation_changed(0)
end
def pose_changed_no_tween(i)
@current_pose_index = i
pose = @skeleton.poses[@current_pose_index]
@current_pose_joint_states = initialize_joint_states(pose)
@tweening_progress = 0.0
@previous_pose = nil
update_drawn_joints()
@ui.pose_index.setCurrentIndex(i)
end
def pose_changed(i)
if @current_pose_joint_states
@previous_pose = @skeleton.poses[@current_pose_index]
@previous_pose_joint_state = @current_pose_joint_states
@current_pose_index = i
pose = @skeleton.poses[@current_pose_index]
@current_pose_joint_states = initialize_joint_states(pose)
@tweening_progress = 0.0
else
@current_pose_index = i
pose = @skeleton.poses[@current_pose_index]
@current_pose_joint_states = initialize_joint_states(pose)
@previous_pose_joint_state = @current_pose_joint_states
@previous_pose = @skeleton.poses[@current_pose_index]
@tweening_progress = 0.0
end
update_drawn_joints()
@ui.pose_index.setCurrentIndex(i)
end
def animation_changed(i)
@current_animation_index = i
@animation_timer.stop()
@ui.seek_slider.value = 0
@current_animation_keyframe_index = 0
@current_animation = @skeleton.animations[@current_animation_index]
if @current_animation.nil?
@ui.seek_slider.enabled = false
@ui.toggle_paused_button.enabled = false
return
end
@ui.seek_slider.enabled = true
@ui.seek_slider.minimum = 0
@ui.seek_slider.maximum = @current_animation.keyframes.length-1
@ui.toggle_paused_button.enabled = true
if @current_animation.keyframes.length > 0
animation_keyframe_changed(0)
start_animation()
else
# Animation with no keyframes
end
end
def initialize_joint_states(pose)
joint_states_for_pose = []
joint_states_to_initialize = []
@skeleton.joints.each_with_index do |joint, joint_index|
joint_state = JointState.new
joint_states_for_pose << joint_state
if joint.parent_id == 0xFF
joint_states_to_initialize << joint_state
end
end
while joint_states_to_initialize.any?
joint_state = joint_states_to_initialize.shift()
joint_index = joint_states_for_pose.index(joint_state)
joint_change = pose.joint_changes[joint_index]
joint = @skeleton.joints[joint_index]
if joint.parent_id == 0xFF
joint_state.x_pos = 0
joint_state.y_pos = 0
joint_state.inherited_rotation = 0
else
parent_joint = @skeleton.joints[joint.parent_id]
parent_joint_change = pose.joint_changes[joint.parent_id]
parent_joint_state = joint_states_for_pose[joint.parent_id]
joint_state.x_pos = parent_joint_state.x_pos
joint_state.y_pos = parent_joint_state.y_pos
joint_state.inherited_rotation = parent_joint_change.rotation
if parent_joint.copy_parent_visual_rotation && parent_joint.parent_id != 0xFF
joint_state.inherited_rotation += parent_joint_state.inherited_rotation
end
connected_rotation_in_degrees = joint_state.inherited_rotation / 182.0
offset_angle = connected_rotation_in_degrees
offset_angle += 90 * joint.positional_rotation
offset_angle_in_radians = offset_angle * Math::PI / 180
joint_state.x_pos += joint_change.distance*Math.cos(offset_angle_in_radians)
joint_state.y_pos += joint_change.distance*Math.sin(offset_angle_in_radians)
end
child_joints = @skeleton.joints.select{|joint| joint.parent_id == joint_index}
child_joint_indexes = child_joints.map{|joint| @skeleton.joints.index(joint)}
joint_states_to_initialize += child_joint_indexes.map{|joint_index| joint_states_for_pose[joint_index]}
end
joint_states_for_pose
end
def tween_poses(previous_pose, next_pose, tweening_progress)
tweened_pose = Pose.new
previous_pose.joint_changes.each_with_index do |prev_joint_change, joint_index|
next_joint_change = next_pose.joint_changes[joint_index]
prev_multiplier = 1.0 - tweening_progress
next_multiplier = tweening_progress
tweened_rotation = merge_two_angles(prev_joint_change.rotation, next_joint_change.rotation, prev_multiplier, next_multiplier)
tweened_distance = prev_joint_change.distance*prev_multiplier + next_joint_change.distance*next_multiplier
tweened_joint_change_data = [tweened_rotation, tweened_distance, prev_joint_change.new_frame_id].pack("vcC")
tweened_joint_change = JointChange.new(tweened_joint_change_data)
tweened_pose.joint_changes << tweened_joint_change
end
tweened_states = initialize_joint_states(tweened_pose)
return [tweened_states, tweened_pose]
end
def merge_two_angles(a, b, a_multiplier, b_multiplier)
if b - a >= 0x8000
b -= 0x10000
elsif a - b >= 0x8000
a -= 0x10000
end
a*a_multiplier + b*b_multiplier
end
def animation_keyframe_changed(i)
@current_animation_keyframe_index = i
@current_animation_tweenframe_index = 0
@current_keyframe = @current_animation.keyframes[@current_animation_keyframe_index]
pose_changed(@current_keyframe.pose_id)
animation_tweenframe_changed(0)
@ui.seek_slider.value = @current_animation_keyframe_index
millisecond_delay = (1 / 60.0 * 1000).round
@animation_timer.start(millisecond_delay)
end
def animation_keyframe_changed_no_tween(i, force=false)
return if i == @current_animation_keyframe_index && @ui.seek_slider.value == @current_animation_keyframe_index && !force
@current_animation_keyframe_index = i
@current_animation_tweenframe_index = 0
@current_keyframe = @current_animation.keyframes[@current_animation_keyframe_index]
pose_changed_no_tween(@current_keyframe.pose_id)
animation_tweenframe_changed(0)
@ui.seek_slider.value = @current_animation_keyframe_index
millisecond_delay = (1 / 60.0 * 1000).round
@animation_timer.start(millisecond_delay)
end
def animation_tweenframe_changed(i)
@current_animation_tweenframe_index = i
@tweening_progress = @current_animation_tweenframe_index.to_f / @current_keyframe.length_in_frames
#@ui.frame_delay.text = "%04X" % frame_delay.delay
update_drawn_joints()
end
def advance_tweenframe
if @current_animation && !@animation_paused
if @current_animation_tweenframe_index >= @current_keyframe.length_in_frames
advance_keyframe()
else
animation_tweenframe_changed(@current_animation_tweenframe_index+1)
end
millisecond_delay = (1 / 60.0 * 1000).round
@animation_timer.start(millisecond_delay)
end
end
def set_animation_paused(paused)
@animation_paused = paused
if @animation_paused
@ui.toggle_paused_button.text = "Play"
else
@ui.toggle_paused_button.text = "Pause"
start_animation()
end
end
def start_animation
millisecond_delay = (1 / 60.0 * 1000).round
@animation_timer.start(millisecond_delay)
end
def toggle_animation_paused
set_animation_paused(!@animation_paused)
end
def advance_keyframe
if @current_animation && !@animation_paused
if @current_animation_keyframe_index >= @current_animation.keyframes.length-1
animation_keyframe_changed(0)
unless @ui.loop_animation.checked
set_animation_paused(true)
end
else
animation_keyframe_changed(@current_animation_keyframe_index+1)
end
end
end
def update_drawn_joints
@skeleton_graphics_scene.items.each do |item|
@skeleton_graphics_scene.removeItem(item)
end
next_pose = @skeleton.poses[@current_pose_index]
if @previous_pose
@current_tweened_joint_states, pose = tween_poses(@previous_pose, next_pose, @tweening_progress)
else
@current_tweened_joint_states = @current_pose_joint_states
pose = next_pose
end
@skeleton.joint_indexes_by_draw_order.each do |joint_index|
joint = @skeleton.joints[joint_index]
joint_change = pose.joint_changes[joint_index]
joint_state = @current_tweened_joint_states[joint_index]
next if joint.frame_id == 0xFF
rotation = joint_change.rotation
if joint.parent_id != 0xFF && joint.copy_parent_visual_rotation
rotation += joint_state.inherited_rotation
end
rotation_in_degrees = rotation/182.0
if joint_change.new_frame_id == 0xFF
frame_id = joint.frame_id
else
frame_id = joint_change.new_frame_id
end
pixmap = @pixmap_frames[frame_id]
graphics_item = Qt::GraphicsPixmapItem.new(pixmap)
graphics_item.setOffset(@min_x, @min_y)
graphics_item.setPos(joint_state.x_pos, joint_state.y_pos)
graphics_item.setRotation(rotation_in_degrees)
if joint.horizontal_flip && joint.vertical_flip
graphics_item.scale(-1, -1)
elsif joint.horizontal_flip
graphics_item.scale(-1, 1)
elsif joint.vertical_flip
graphics_item.scale(1, -1)
end
@skeleton_graphics_scene.addItem(graphics_item)
end
if @ui.show_skeleton.checked
@skeleton.joints.each_index do |joint_index|
joint = @skeleton.joints[joint_index]
joint_change = pose.joint_changes[joint_index]
joint_state = @current_tweened_joint_states[joint_index]
ellipse = @skeleton_graphics_scene.addEllipse(joint_state.x_pos-1, joint_state.y_pos-1, 3, 3, GREY_PEN)
ellipse.setZValue(1)
if joint.parent_id != 0xFF
parent_joint = @skeleton.joints[joint.parent_id]
parent_joint_state = @current_tweened_joint_states[joint.parent_id]
line = @skeleton_graphics_scene.addLine(joint_state.x_pos, joint_state.y_pos, parent_joint_state.x_pos, parent_joint_state.y_pos, GREY_PEN)
line.setZValue(1)
end
end
end
if @ui.show_hitboxes.checked
@skeleton.hitboxes.each do |hitbox|
joint = @skeleton.joints[hitbox.parent_joint_id]
joint_change = pose.joint_changes[hitbox.parent_joint_id]
joint_state = @current_tweened_joint_states[hitbox.parent_joint_id]
x_pos = joint_state.x_pos
y_pos = joint_state.y_pos
offset_angle = hitbox.rotation + joint_change.rotation
if joint.copy_parent_visual_rotation
offset_angle += joint_state.inherited_rotation
end
offset_angle_in_degrees = offset_angle / 182.0
offset_angle_in_radians = offset_angle_in_degrees * Math::PI / 180
x_pos += hitbox.distance*Math.cos(offset_angle_in_radians)
y_pos += hitbox.distance*Math.sin(offset_angle_in_radians)
hitbox_item = Qt::GraphicsRectItem.new
if hitbox.can_damage_player && hitbox.can_take_damage
hitbox_item.setPen(RED_PEN)
elsif hitbox.can_damage_player
hitbox_item.setPen(BLUE_PEN)
elsif hitbox.can_take_damage
hitbox_item.setPen(GREEN_PEN)
else
hitbox_item.setPen(WHITE_PEN)
end
hitbox_item.setRect(x_pos-hitbox.width/2, y_pos-hitbox.height/2, hitbox.width, hitbox.height)
hitbox_item.setTransformOriginPoint(hitbox_item.rect.center)
rotation_in_degrees = hitbox.rotation / 182.0
hitbox_item.setRotation(rotation_in_degrees)
hitbox_item.setZValue(1)
@skeleton_graphics_scene.addItem(hitbox_item)
end
end
if @ui.show_points.checked?
@skeleton.points.each do |point|
joint = @skeleton.joints[point.parent_joint_id]
joint_change = pose.joint_changes[point.parent_joint_id]
joint_state = @current_tweened_joint_states[point.parent_joint_id]
x_pos = joint_state.x_pos
y_pos = joint_state.y_pos
offset_angle = point.rotation + joint_change.rotation
if joint.copy_parent_visual_rotation
offset_angle += joint_state.inherited_rotation
end
offset_angle_in_degrees = offset_angle / 182.0
offset_angle_in_radians = offset_angle_in_degrees * Math::PI / 180
x_pos += point.distance*Math.cos(offset_angle_in_radians)
y_pos += point.distance*Math.sin(offset_angle_in_radians)
ellipse = @skeleton_graphics_scene.addEllipse(x_pos, y_pos, 3, 3, RED_PEN)
ellipse.setZValue(1)
end
end
end
def toggle_show_skeleton(checked)
update_drawn_joints()
end
def toggle_show_hitboxes(checked)
update_drawn_joints()
end
def toggle_show_points(checked)
update_drawn_joints()
end
def button_box_clicked(button)
if @ui.buttonBox.standardButton(button) == Qt::DialogButtonBox::Apply
end
end
def inspect; to_s; end
end
class JointState
attr_accessor :x_pos,
:y_pos,
:inherited_rotation
end
|
module Series
module Command
module_function
DEFAULT_OPTIONS = {
:n_steps => 8,
:offset => 0,
:step_size => 1,
}
module_function
def setup(config)
end
def options(overrides = {})
DEFAULT_OPTIONS.merge(overrides)
end
def driver(options = {})
n_steps = options.fetch(:n_steps)
start_index = options.fetch(:offset)
step_size = options.fetch(:step_size)
if n_steps.nil?
Enumerator.new do |y|
current = start_index
while true
y << current
current += step_size
end
end
else
Enumerator.new do |y|
current = start_index
n_steps.times do
y << current
current += step_size
end
end
end
end
def loop(stdin = $stdin, stdout = $stdout)
while line = stdin.gets
line.strip!
next if line.empty?
x = Integer(line)
y = yield(x)
stdout.puts y
end
end
def loop_enum(enum, stdin = $stdin, stdout = $stdout)
enum_value, last_x = enum.next, 0
self.loop(stdin, stdout) do |x|
n_steps = x - last_x
# reset if not going forward
if n_steps < 0
enum.rewind
enum_value, last_x, n_steps = enum.next, 0, x
end
n_steps.times { enum_value = enum.next }
last_x = x
enum_value
end
end
end
end
Handle StopIteration in loop_enum
So as to stop the generation of series points once the enum finishes.
module Series
module Command
module_function
DEFAULT_OPTIONS = {
:n_steps => 8,
:offset => 0,
:step_size => 1,
}
module_function
def setup(config)
end
def options(overrides = {})
DEFAULT_OPTIONS.merge(overrides)
end
def driver(options = {})
n_steps = options.fetch(:n_steps)
start_index = options.fetch(:offset)
step_size = options.fetch(:step_size)
if n_steps.nil?
Enumerator.new do |y|
current = start_index
while true
y << current
current += step_size
end
end
else
Enumerator.new do |y|
current = start_index
n_steps.times do
y << current
current += step_size
end
end
end
end
def loop(stdin = $stdin, stdout = $stdout)
while line = stdin.gets
line.strip!
next if line.empty?
x = Integer(line)
y = yield(x)
stdout.puts y
end
self
end
def loop_enum(enum, stdin = $stdin, stdout = $stdout)
enum_value, last_x = enum.next, 0
begin
self.loop(stdin, stdout) do |x|
n_steps = x - last_x
# reset if not going forward
if n_steps < 0
enum.rewind
enum_value, last_x, n_steps = enum.next, 0, x
end
n_steps.times { enum_value = enum.next }
last_x = x
enum_value
end
rescue StopIteration
# normal exit
self
end
end
end
end
|
# -*- encoding: utf-8 -*-
require File.expand_path("../lib/easy_globalize3_accessors/version", __FILE__)
Gem::Specification.new do |s|
s.name = "easy_globalize3_accessors"
s.version = EasyGlobalize3Accessors::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Tomasz Stachewicz", "Wojciech Pietrzak", "Steve Verlinden", "Robert Pankowecki"]
s.email = ["tomekrs@o2.pl", "steve.verlinden@gmail.com", "robert.pankowecki@gmail.com", "rpa@gavdi.com"]
s.homepage = "http://rubygems.org/gems/easy_globalize3_accessors"
s.summary = "Define methods for accessing translated attributes"
s.description = "Define methods for accessing translated attributes"
s.required_rubygems_version = ">= 1.3.6"
s.rubyforge_project = "easy_globalize3_accessors"
s.add_dependency "globalize3", "~> 0.3.0"
s.add_development_dependency "bundler", "~> 1.0.15"
s.add_development_dependency "rake", "~> 0.9.2"
s.add_development_dependency "sqlite3"
s.files = `git ls-files`.split("\n")
s.executables = `git ls-files`.split("\n").map{|f| f =~ /^bin\/(.*)/ ? $1 : nil}.compact
s.require_path = 'lib'
end
Update bundler version
# -*- encoding: utf-8 -*-
require File.expand_path("../lib/easy_globalize3_accessors/version", __FILE__)
Gem::Specification.new do |s|
s.name = "easy_globalize3_accessors"
s.version = EasyGlobalize3Accessors::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Tomasz Stachewicz", "Wojciech Pietrzak", "Steve Verlinden", "Robert Pankowecki"]
s.email = ["tomekrs@o2.pl", "steve.verlinden@gmail.com", "robert.pankowecki@gmail.com", "rpa@gavdi.com"]
s.homepage = "http://rubygems.org/gems/easy_globalize3_accessors"
s.summary = "Define methods for accessing translated attributes"
s.description = "Define methods for accessing translated attributes"
s.required_rubygems_version = ">= 1.3.6"
s.rubyforge_project = "easy_globalize3_accessors"
s.add_dependency "globalize3", "~> 0.3.0"
s.add_development_dependency "bundler", "~> 1.3.5"
s.add_development_dependency "rake", "~> 0.9.2"
s.add_development_dependency "sqlite3"
s.files = `git ls-files`.split("\n")
s.executables = `git ls-files`.split("\n").map{|f| f =~ /^bin\/(.*)/ ? $1 : nil}.compact
s.require_path = 'lib'
end
|
require 'action_logger/version'
require 'action_logger/engine'
module ActionLogger
def self.log action, source, attrs = {}
ActionLog.log action, source, attrs[:semder], attrs[:ip], attrs[:data], attrs[:admin_id]
end
end
Fixes typo in ActionLogger.log
require 'action_logger/version'
require 'action_logger/engine'
module ActionLogger
def self.log action, source, attrs = {}
ActionLog.log action, source, attrs[:sender], attrs[:ip], attrs[:data], attrs[:admin_id]
end
end
|
# Copyright:: Copyright (c) 2015.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name "unifiedpush-server"
default_version "master"
dependency "ruby"
dependency "bundler"
dependency "rsync"
dependency "postgresql"
dependency "cassandra-unit"
source git: "https://github.com/aerobase/unifiedpush-server.git"
relative_path "unifiedpush-server"
build_dir = "#{project_dir}"
build do
command "mvn clean install -DskipTests"
command "mkdir -p #{install_dir}/embedded/apps/unifiedpush-server/"
# Copy packages to installation dir.
copy "#{project_dir}/servers/target/unifiedpush-server.war", "#{install_dir}/embedded/apps/unifiedpush-server/unifiedpush-server.war"
copy "#{project_dir}/databases/initdb/target/unifiedpush-initdb.tar.gz", "#{install_dir}/embedded/apps/unifiedpush-server/unifiedpush-initdb.tar.gz"
erb source: "version.yml.erb",
dest: "#{install_dir}/embedded/apps/unifiedpush-server/version.yml",
mode: 0644,
vars: { default_version: default_version }
end
# extract initdb project to allow JPA based schema creation.
build do
command "tar xzf #{install_dir}/embedded/apps/unifiedpush-server/unifiedpush-initdb.tar.gz -C #{install_dir}/embedded/apps/unifiedpush-server/"
end
Compile missing initdb module
# Copyright:: Copyright (c) 2015.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name "unifiedpush-server"
default_version "master"
dependency "ruby"
dependency "bundler"
dependency "rsync"
dependency "postgresql"
dependency "cassandra-unit"
source git: "https://github.com/aerobase/unifiedpush-server.git"
relative_path "unifiedpush-server"
build_dir = "#{project_dir}"
build do
command "mvn --non-recursive clean install"
command "mvn clean install -DskipTests"
command "mvn clean install -DskipTests -f databases/initdb/pom.xml"
command "mkdir -p #{install_dir}/embedded/apps/unifiedpush-server/"
# Copy packages to installation dir.
copy "#{project_dir}/servers/target/unifiedpush-server.war", "#{install_dir}/embedded/apps/unifiedpush-server/unifiedpush-server.war"
copy "#{project_dir}/databases/initdb/target/unifiedpush-initdb.tar.gz", "#{install_dir}/embedded/apps/unifiedpush-server/unifiedpush-initdb.tar.gz"
erb source: "version.yml.erb",
dest: "#{install_dir}/embedded/apps/unifiedpush-server/version.yml",
mode: 0644,
vars: { default_version: default_version }
end
# extract initdb project to allow JPA based schema creation.
build do
command "tar xzf #{install_dir}/embedded/apps/unifiedpush-server/unifiedpush-initdb.tar.gz -C #{install_dir}/embedded/apps/unifiedpush-server/"
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.