CombinedText stringlengths 4 3.42M |
|---|
class Policy < ActiveRecord::Base
has_many :assets,
:dependent => :nullify,
:order => "resource_type ASC"
has_many :permissions,
:dependent => :destroy,
:order => "created_at ASC",
:autosave => true,
:after_add => proc {|policy, perm| perm.policy = policy}
validates_presence_of :sharing_scope, :access_type
validates_numericality_of :sharing_scope, :access_type
alias_attribute :title, :name
before_save :update_timestamp_if_permissions_change
def update_timestamp_if_permissions_change
if permissions.changed_for_autosave?
current_time = current_time_from_proper_timezone
write_attribute('updated_at', current_time) if respond_to?(:updated_at)
write_attribute('updated_on', current_time) if respond_to?(:updated_on)
end
end
# *****************************************************************************
# This section defines constants for "sharing_scope" and "access_type" values
# NB! It is critical to all algorithms using these constants, that the latter
# have their integer values increased along with the access they provide
# (so, for example, "editing" should have greated value than "viewing")
# In other words, this means that for both(!) sharing_scope and access_type
# constants it is crucial that order of these (imposed by their integer values)
# is preserved
# sharing_scope
PRIVATE = 0
ALL_SYSMO_USERS = 2
EVERYONE = 4
# access_type
DETERMINED_BY_GROUP = -1 # used for whitelist/blacklist (meaning that it doesn't matter what value this field has)
NO_ACCESS = 0 # i.e. only for anyone; only owner has access
VISIBLE = 1 # visible only
ACCESSIBLE = 2 # accessible and visible
EDITING = 3 # accessible, visible and editing
MANAGING = 4 # any actions that owner of the asset can perform (including "destroy"ing)
PUBLISHING = 5 # publish the item
# "true" value for flag-type fields
TRUE_VALUE = 1
FALSE_VALUE = 0
# *****************************************************************************
#makes a copy of the policy, and its associated permissions.
def deep_copy
copy=self.clone
self.permissions.each {|p| copy.permissions << p.clone}
return copy
end
#checks that there are permissions for the provided contributor, for the access_type (or higher)
def permission_granted?(contributor,access_type)
permissions.detect{|p| p.contributor==contributor && p.access_type >= access_type}
end
def self.new_for_upload_tool(resource, recipient)
policy = resource.build_policy(:name => 'auto',
:sharing_scope => Policy::PRIVATE,
:access_type => Policy::NO_ACCESS)
policy.permissions.build :contributor_type => "Person", :contributor_id => recipient, :access_type => Policy::ACCESSIBLE
return policy
end
def set_attributes_with_sharing sharing, projects
# if no data about sharing is given, it should be some user (not the owner!)
# who is editing the asset - no need to do anything with policy / permissions: return success
self.tap do |policy|
if sharing
# obtain parameters from sharing hash
policy.sharing_scope = sharing[:sharing_scope]
policy.access_type = sharing["access_type_#{sharing_scope}"]
policy.use_whitelist = sharing[:use_whitelist]
policy.use_blacklist = sharing[:use_blacklist]
# NOW PROCESS THE PERMISSIONS
# read the permission data from sharing
unless sharing[:permissions].blank?
contributor_types = ActiveSupport::JSON.decode(sharing[:permissions][:contributor_types])
new_permission_data = ActiveSupport::JSON.decode(sharing[:permissions][:values])
else
contributor_types = []
new_permission_data = {}
end
#if share with your project is chosen
if (sharing[:sharing_scope].to_i == Policy::ALL_SYSMO_USERS) and !projects.blank?
#add Project to contributor_type
contributor_types << "Project" if !contributor_types.include? "Project"
#add one hash {project.id => {"access_type" => sharing[:your_proj_access_type].to_i}} to new_permission_data
new_permission_data["Project"] = {} unless new_permission_data["Project"]
projects.each {|project| new_permission_data["Project"][project.id] = {"access_type" => sharing[:your_proj_access_type].to_i}}
end
# --- Synchronise All Permissions for the Policy ---
# first delete or update any old memberships
policy.permissions.each do |p|
if permission_access = (new_permission_data[p.contributor_type.to_s].try :delete, p.contributor_id)
p.access_type = permission_access["access_type"]
else
p.mark_for_destruction
end
end
# now add any remaining new memberships
contributor_types.try :each do |contributor_type|
new_permission_data[contributor_type.to_s].try :each do |p|
if policy.new_record? or !Permission.find :first, :conditions => {:contributor_type => contributor_type, :contributor_id => p[0], :policy_id => policy.id}
p = policy.permissions.build :contributor_type => contributor_type, :contributor_id => p[0], :access_type => p[1]["access_type"]
end
end
end
end
end
end
# returns a default policy for a project
# (all the related permissions will still be linked to the returned policy)
def self.project_default(project)
# if the default project policy isn't set, NIL will be returned - and the caller
# has to perform further actions in such case
return project.default_policy
end
def self.private_policy
Policy.new(:name => "default private",
:sharing_scope => PRIVATE,
:access_type => NO_ACCESS,
:use_whitelist => false,
:use_blacklist => false)
end
def self.registered_users_accessible_policy
Policy.new(:name => "default accessible",
:sharing_scope => ALL_SYSMO_USERS,
:access_type => ACCESSIBLE,
:use_whitelist => false,
:use_blacklist => false)
end
#The default policy to use when creating authorized items if no other policy is specified
def self.default
registered_users_accessible_policy
end
# translates access type codes into human-readable form
def self.get_access_type_wording(access_type, resource=nil)
case access_type
when Policy::DETERMINED_BY_GROUP
return "Individual access rights for each member"
when Policy::NO_ACCESS
return "No access"
when Policy::VISIBLE
return resource.try(:is_downloadable?) ? "View summary only" : "View summary"
when Policy::ACCESSIBLE
return resource.try(:is_downloadable?) ? "View summary and get contents" : "View summary"
when Policy::EDITING
return resource.try(:is_downloadable?) ? "View and edit summary and contents" : "View and edit summary"
when Policy::MANAGING
return "Manage"
else
return "Invalid access type"
end
end
# extracts the "settings" of the policy, discarding other information
# (e.g. contributor, creation time, etc.)
def get_settings
settings = {}
settings['sharing_scope'] = self.sharing_scope
settings['access_type'] = self.access_type
settings['use_whitelist'] = self.use_whitelist
settings['use_blacklist'] = self.use_blacklist
return settings
end
# extract the "settings" from all permissions associated to the policy;
# creates array containing 2-item arrays per each policy in the form:
# [ ... , [ permission_id, {"contributor_id" => id, "contributor_type" => type, "access_type" => access} ] , ... ]
def get_permission_settings
p_settings = []
self.permissions.each do |p|
# standard parameters for all contributor types
params_hash = {}
params_hash["contributor_id"] = p.contributor_id
params_hash["contributor_type"] = p.contributor_type
params_hash["access_type"] = p.access_type
params_hash["contributor_name"] = (p.contributor_type == "Person" ? (p.contributor.first_name + " " + p.contributor.last_name) : p.contributor.name)
# some of the contributor types will have special additional parameters
case p.contributor_type
when "FavouriteGroup"
params_hash["whitelist_or_blacklist"] = [FavouriteGroup::WHITELIST_NAME, FavouriteGroup::BLACKLIST_NAME].include?(p.contributor.name)
end
p_settings << [ p.id, params_hash ]
end
return p_settings
end
def private?
sharing_scope == Policy::PRIVATE and permissions.empty?
end
def public?
sharing_scope == Policy::EVERYONE
end
#return the hash: key is access_type, value is the array of people
def summarize_permissions creators=[User.current_user.person], contributor=User.current_user.person
#build the hash containing contributor_type as key and the people in these groups as value,exception:'Public' holds the access_type as the value
people_in_group = {'Person' => [], 'FavouriteGroup' => [], 'WorkGroup' => [], 'Project' => [], 'Institution' => [], 'WhiteList' => [], 'BlackList' => [],'Network' => [], 'Public' => 0}
#the result return: a hash contain the access_type as key, and array of people as value
grouped_people_by_access_type = {}
policy_to_people_group people_in_group, contributor
permissions_to_people_group permissions, people_in_group, contributor
#Now make the people in group unique by choosing the highest access_type
people_in_group['FavouriteGroup'] = remove_duplicate(people_in_group['FavouriteGroup'])
people_in_group['WorkGroup'] = remove_duplicate(people_in_group['WorkGroup'])
people_in_group['Project'] = remove_duplicate(people_in_group['Project'])
people_in_group['Institution'] = remove_duplicate(people_in_group['Institution'])
#Now process precedence with the order [network, institution, project, wg, fg, person]
filtered_people = people_in_group['Network']
filtered_people = precedence(filtered_people, people_in_group['Institution'])
filtered_people = precedence(filtered_people, people_in_group['Project'])
filtered_people = precedence(filtered_people, people_in_group['WorkGroup'])
filtered_people = precedence(filtered_people, people_in_group['FavouriteGroup'])
filtered_people = precedence(filtered_people, people_in_group['Person'])
#add people in white list
filtered_people = add_people_in_whitelist(filtered_people, people_in_group['WhiteList'])
#remove people in blacklist
filtered_people = remove_people_in_blacklist(filtered_people, people_in_group['BlackList'])
#add creators and assign them the Policy::EDITING right
creators.collect!{|c| [c.id, "#{c.first_name} #{c.last_name}", Policy::EDITING] unless c.blank?}
filtered_people = add_people_in_whitelist(filtered_people, creators)
#add contributor
filtered_people = add_people_in_whitelist(filtered_people, [[contributor.id, "#{contributor.first_name} #{contributor.last_name}", Policy::MANAGING]]) unless contributor.blank?
#sort people by name
filtered_people = filtered_people.sort{|a,b| a[1] <=> b[1]}
#group people by access_type
grouped_people_by_access_type.merge!(filtered_people.group_by{|person| person[2]})
#add publishing if access_type for public > 0
grouped_people_by_access_type[Policy::PUBLISHING] = people_in_group['Public'] if people_in_group['Public'] > 0
#only store (people in backlist) + (people in people_in_group['Person'] with no access) to the group of access_type=Policy::NO_ACCESS
people_with_no_access = []
people_with_no_access.concat(people_in_group['BlackList']) unless people_in_group['BlackList'].blank?
people_with_no_access.concat(people_in_group['Person'].group_by{|person| person[2]}[Policy::NO_ACCESS]) unless people_in_group['Person'].group_by{|person| person[2]}[Policy::NO_ACCESS].blank?
people_with_no_access.uniq!
unless people_with_no_access.blank?
grouped_people_by_access_type[Policy::NO_ACCESS] = people_with_no_access.sort{|a,b| a[1] <=> b[1]}
end
#sort by key of the hash
grouped_people_by_access_type = Hash[grouped_people_by_access_type.sort]
return grouped_people_by_access_type
end
def policy_to_people_group people_in_group, contributor=User.current_user.person
if sharing_scope == Policy::ALL_SYSMO_USERS
people_in_network = get_people_in_network access_type
people_in_group['Network'] |= people_in_network unless people_in_network.blank?
elsif sharing_scope == Policy::EVERYONE
people_in_group['Public'] = access_type
end
#if blacklist/whitelist is used
if use_whitelist
people_in_whitelist = get_people_in_FG(contributor, nil, true, nil)
people_in_group['WhiteList'] |= people_in_whitelist unless people_in_whitelist.blank?
end
#if blacklist/whitelist is used
if use_blacklist
people_in_blacklist = get_people_in_FG(contributor, nil, nil, true)
people_in_group['BlackList'] |= people_in_blacklist unless people_in_blacklist.blank?
end
people_in_group
end
def permissions_to_people_group permissions, people_in_group, contributor=User.current_user.person
permissions.each do |permission|
contributor_id = permission.contributor_id
access_type = permission.access_type
case permission.contributor_type
when 'Person'
person = get_person contributor_id, access_type
people_in_group['Person'] << person unless person.blank?
when 'FavouriteGroup'
people_in_FG = get_people_in_FG contributor, contributor_id
people_in_group['FavouriteGroup'] |= people_in_FG unless people_in_FG.blank?
when 'WorkGroup'
people_in_WG = get_people_in_WG contributor_id, access_type
people_in_group['WorkGroup'] |= people_in_WG unless people_in_WG.blank?
when 'Project'
people_in_project = get_people_in_project contributor_id, access_type
people_in_group['Project'] |= people_in_project unless people_in_project.blank?
when 'Institution'
people_in_institution = get_people_in_institution contributor_id, access_type
people_in_group['Institution'] |= people_in_institution unless people_in_institution.blank?
end
end
people_in_group
end
def get_person person_id, access_type
person = Person.find(person_id)
if person
return [person.id, "#{person.first_name} #{person.last_name}", access_type]
end
end
#review people WG
def get_people_in_WG wg_id, access_type
w_group = WorkGroup.find(wg_id)
if w_group
people_in_wg = [] #id, name, access_type
w_group.group_memberships.each do |gm|
people_in_wg.push [gm.person.id, "#{gm.person.first_name} #{gm.person.last_name}", access_type ]
end
end
return people_in_wg
end
#review people in black list, white list and normal workgroup
def get_people_in_FG contributor, fg_id=nil, is_white_list=nil, is_black_list=nil
if is_white_list
f_group = FavouriteGroup.find(:all, :conditions => ["name = ? AND user_id = ?", "__whitelist__", contributor.user.id]).first
elsif is_black_list
f_group = FavouriteGroup.find(:all, :conditions => ["name = ? AND user_id = ?", "__blacklist__", contributor.user.id]).first
else
f_group = FavouriteGroup.find_by_id(fg_id)
end
if f_group
people_in_FG = [] #id, name, access_type
f_group.favourite_group_memberships.each do |fgm|
people_in_FG.push [fgm.person.id, "#{fgm.person.first_name} #{fgm.person.last_name}", fgm.access_type]
end
return people_in_FG
end
end
#review people in project
def get_people_in_project project_id, access_type
project = Project.find(project_id)
if project
people_in_project = [] #id, name, access_type
project.people.each do |person|
people_in_project.push [person.id, "#{person.first_name} #{person.last_name}", access_type]
end
return people_in_project
end
end
#review people in institution
def get_people_in_institution institution_id, access_type
institution = Institution.find(institution_id)
if institution
people_in_institution = [] #id, name, access_type
institution.people.each do |person|
people_in_institution.push [person.id, "#{person.first_name} #{person.last_name}", access_type]
end
return people_in_institution
end
end
#review people in network
def get_people_in_network access_type
people_in_network = [] #id, name, access_type
projects = Project.find(:all)
projects.each do |project|
project.people.each do |person|
person_identification = [person.id, "#{person.first_name} #{person.last_name}"]
people_in_network.push person_identification if (!people_in_network.include? person_identification)
end
end
people_in_network.collect!{|person| person.push access_type}
return people_in_network
end
#remove duplicate by taking the one with the highest access_type
def remove_duplicate people_list
result = []
#first replace each person in the people list with the highest access_type of this person
people_list.each do |person|
result.push(get_max_access_type_element(people_list, person))
end
#remove the duplication
result = result.inject([]) { |result,i| result << i unless result.include?(i); result }
result
end
def get_max_access_type_element(array, element)
array.each do |a|
if (element[0] == a[0] && element[2] < a[2])
element = a;
end
end
return element
end
#array2 has precedence
def precedence array1, array2
result = []
result |= array2
array1.each do |a1|
check = false
array2.each do |a2|
if (a1[0] == a2[0])
check = true
break
end
end
if !check
result.push(a1)
end
end
return result
end
#remove people which are in blacklist from the people list
def remove_people_in_blacklist(people_list, blacklist)
result = []
result |= people_list
people_list.each do |person|
check = false
blacklist.each do |person_in_bl|
if (person[0] == person_in_bl[0])
check = true
break
end
end
if check
result.delete person
end
end
return result
end
#add people which are in whitelist to the people list
def add_people_in_whitelist(people_list, whitelist)
result = []
result |= people_list
result |= whitelist
return remove_duplicate(result)
end
end
changed_for_autosave is called on the instance, not the association
class Policy < ActiveRecord::Base
has_many :assets,
:dependent => :nullify,
:order => "resource_type ASC"
has_many :permissions,
:dependent => :destroy,
:order => "created_at ASC",
:autosave => true,
:after_add => proc {|policy, perm| perm.policy = policy}
validates_presence_of :sharing_scope, :access_type
validates_numericality_of :sharing_scope, :access_type
alias_attribute :title, :name
before_save :update_timestamp_if_permissions_change
def update_timestamp_if_permissions_change
if changed_for_autosave?
current_time = current_time_from_proper_timezone
write_attribute('updated_at', current_time) if respond_to?(:updated_at)
write_attribute('updated_on', current_time) if respond_to?(:updated_on)
end
end
# *****************************************************************************
# This section defines constants for "sharing_scope" and "access_type" values
# NB! It is critical to all algorithms using these constants, that the latter
# have their integer values increased along with the access they provide
# (so, for example, "editing" should have greated value than "viewing")
# In other words, this means that for both(!) sharing_scope and access_type
# constants it is crucial that order of these (imposed by their integer values)
# is preserved
# sharing_scope
PRIVATE = 0
ALL_SYSMO_USERS = 2
EVERYONE = 4
# access_type
DETERMINED_BY_GROUP = -1 # used for whitelist/blacklist (meaning that it doesn't matter what value this field has)
NO_ACCESS = 0 # i.e. only for anyone; only owner has access
VISIBLE = 1 # visible only
ACCESSIBLE = 2 # accessible and visible
EDITING = 3 # accessible, visible and editing
MANAGING = 4 # any actions that owner of the asset can perform (including "destroy"ing)
PUBLISHING = 5 # publish the item
# "true" value for flag-type fields
TRUE_VALUE = 1
FALSE_VALUE = 0
# *****************************************************************************
#makes a copy of the policy, and its associated permissions.
def deep_copy
copy=self.clone
self.permissions.each {|p| copy.permissions << p.clone}
return copy
end
#checks that there are permissions for the provided contributor, for the access_type (or higher)
def permission_granted?(contributor,access_type)
permissions.detect{|p| p.contributor==contributor && p.access_type >= access_type}
end
def self.new_for_upload_tool(resource, recipient)
policy = resource.build_policy(:name => 'auto',
:sharing_scope => Policy::PRIVATE,
:access_type => Policy::NO_ACCESS)
policy.permissions.build :contributor_type => "Person", :contributor_id => recipient, :access_type => Policy::ACCESSIBLE
return policy
end
def set_attributes_with_sharing sharing, projects
# if no data about sharing is given, it should be some user (not the owner!)
# who is editing the asset - no need to do anything with policy / permissions: return success
self.tap do |policy|
if sharing
# obtain parameters from sharing hash
policy.sharing_scope = sharing[:sharing_scope]
policy.access_type = sharing["access_type_#{sharing_scope}"]
policy.use_whitelist = sharing[:use_whitelist]
policy.use_blacklist = sharing[:use_blacklist]
# NOW PROCESS THE PERMISSIONS
# read the permission data from sharing
unless sharing[:permissions].blank?
contributor_types = ActiveSupport::JSON.decode(sharing[:permissions][:contributor_types])
new_permission_data = ActiveSupport::JSON.decode(sharing[:permissions][:values])
else
contributor_types = []
new_permission_data = {}
end
#if share with your project is chosen
if (sharing[:sharing_scope].to_i == Policy::ALL_SYSMO_USERS) and !projects.blank?
#add Project to contributor_type
contributor_types << "Project" if !contributor_types.include? "Project"
#add one hash {project.id => {"access_type" => sharing[:your_proj_access_type].to_i}} to new_permission_data
new_permission_data["Project"] = {} unless new_permission_data["Project"]
projects.each {|project| new_permission_data["Project"][project.id] = {"access_type" => sharing[:your_proj_access_type].to_i}}
end
# --- Synchronise All Permissions for the Policy ---
# first delete or update any old memberships
policy.permissions.each do |p|
if permission_access = (new_permission_data[p.contributor_type.to_s].try :delete, p.contributor_id)
p.access_type = permission_access["access_type"]
else
p.mark_for_destruction
end
end
# now add any remaining new memberships
contributor_types.try :each do |contributor_type|
new_permission_data[contributor_type.to_s].try :each do |p|
if policy.new_record? or !Permission.find :first, :conditions => {:contributor_type => contributor_type, :contributor_id => p[0], :policy_id => policy.id}
p = policy.permissions.build :contributor_type => contributor_type, :contributor_id => p[0], :access_type => p[1]["access_type"]
end
end
end
end
end
end
# returns a default policy for a project
# (all the related permissions will still be linked to the returned policy)
def self.project_default(project)
# if the default project policy isn't set, NIL will be returned - and the caller
# has to perform further actions in such case
return project.default_policy
end
def self.private_policy
Policy.new(:name => "default private",
:sharing_scope => PRIVATE,
:access_type => NO_ACCESS,
:use_whitelist => false,
:use_blacklist => false)
end
def self.registered_users_accessible_policy
Policy.new(:name => "default accessible",
:sharing_scope => ALL_SYSMO_USERS,
:access_type => ACCESSIBLE,
:use_whitelist => false,
:use_blacklist => false)
end
#The default policy to use when creating authorized items if no other policy is specified
def self.default
registered_users_accessible_policy
end
# translates access type codes into human-readable form
def self.get_access_type_wording(access_type, resource=nil)
case access_type
when Policy::DETERMINED_BY_GROUP
return "Individual access rights for each member"
when Policy::NO_ACCESS
return "No access"
when Policy::VISIBLE
return resource.try(:is_downloadable?) ? "View summary only" : "View summary"
when Policy::ACCESSIBLE
return resource.try(:is_downloadable?) ? "View summary and get contents" : "View summary"
when Policy::EDITING
return resource.try(:is_downloadable?) ? "View and edit summary and contents" : "View and edit summary"
when Policy::MANAGING
return "Manage"
else
return "Invalid access type"
end
end
# extracts the "settings" of the policy, discarding other information
# (e.g. contributor, creation time, etc.)
def get_settings
settings = {}
settings['sharing_scope'] = self.sharing_scope
settings['access_type'] = self.access_type
settings['use_whitelist'] = self.use_whitelist
settings['use_blacklist'] = self.use_blacklist
return settings
end
# extract the "settings" from all permissions associated to the policy;
# creates array containing 2-item arrays per each policy in the form:
# [ ... , [ permission_id, {"contributor_id" => id, "contributor_type" => type, "access_type" => access} ] , ... ]
def get_permission_settings
p_settings = []
self.permissions.each do |p|
# standard parameters for all contributor types
params_hash = {}
params_hash["contributor_id"] = p.contributor_id
params_hash["contributor_type"] = p.contributor_type
params_hash["access_type"] = p.access_type
params_hash["contributor_name"] = (p.contributor_type == "Person" ? (p.contributor.first_name + " " + p.contributor.last_name) : p.contributor.name)
# some of the contributor types will have special additional parameters
case p.contributor_type
when "FavouriteGroup"
params_hash["whitelist_or_blacklist"] = [FavouriteGroup::WHITELIST_NAME, FavouriteGroup::BLACKLIST_NAME].include?(p.contributor.name)
end
p_settings << [ p.id, params_hash ]
end
return p_settings
end
def private?
sharing_scope == Policy::PRIVATE and permissions.empty?
end
def public?
sharing_scope == Policy::EVERYONE
end
#return the hash: key is access_type, value is the array of people
def summarize_permissions creators=[User.current_user.person], contributor=User.current_user.person
#build the hash containing contributor_type as key and the people in these groups as value,exception:'Public' holds the access_type as the value
people_in_group = {'Person' => [], 'FavouriteGroup' => [], 'WorkGroup' => [], 'Project' => [], 'Institution' => [], 'WhiteList' => [], 'BlackList' => [],'Network' => [], 'Public' => 0}
#the result return: a hash contain the access_type as key, and array of people as value
grouped_people_by_access_type = {}
policy_to_people_group people_in_group, contributor
permissions_to_people_group permissions, people_in_group, contributor
#Now make the people in group unique by choosing the highest access_type
people_in_group['FavouriteGroup'] = remove_duplicate(people_in_group['FavouriteGroup'])
people_in_group['WorkGroup'] = remove_duplicate(people_in_group['WorkGroup'])
people_in_group['Project'] = remove_duplicate(people_in_group['Project'])
people_in_group['Institution'] = remove_duplicate(people_in_group['Institution'])
#Now process precedence with the order [network, institution, project, wg, fg, person]
filtered_people = people_in_group['Network']
filtered_people = precedence(filtered_people, people_in_group['Institution'])
filtered_people = precedence(filtered_people, people_in_group['Project'])
filtered_people = precedence(filtered_people, people_in_group['WorkGroup'])
filtered_people = precedence(filtered_people, people_in_group['FavouriteGroup'])
filtered_people = precedence(filtered_people, people_in_group['Person'])
#add people in white list
filtered_people = add_people_in_whitelist(filtered_people, people_in_group['WhiteList'])
#remove people in blacklist
filtered_people = remove_people_in_blacklist(filtered_people, people_in_group['BlackList'])
#add creators and assign them the Policy::EDITING right
creators.collect!{|c| [c.id, "#{c.first_name} #{c.last_name}", Policy::EDITING] unless c.blank?}
filtered_people = add_people_in_whitelist(filtered_people, creators)
#add contributor
filtered_people = add_people_in_whitelist(filtered_people, [[contributor.id, "#{contributor.first_name} #{contributor.last_name}", Policy::MANAGING]]) unless contributor.blank?
#sort people by name
filtered_people = filtered_people.sort{|a,b| a[1] <=> b[1]}
#group people by access_type
grouped_people_by_access_type.merge!(filtered_people.group_by{|person| person[2]})
#add publishing if access_type for public > 0
grouped_people_by_access_type[Policy::PUBLISHING] = people_in_group['Public'] if people_in_group['Public'] > 0
#only store (people in backlist) + (people in people_in_group['Person'] with no access) to the group of access_type=Policy::NO_ACCESS
people_with_no_access = []
people_with_no_access.concat(people_in_group['BlackList']) unless people_in_group['BlackList'].blank?
people_with_no_access.concat(people_in_group['Person'].group_by{|person| person[2]}[Policy::NO_ACCESS]) unless people_in_group['Person'].group_by{|person| person[2]}[Policy::NO_ACCESS].blank?
people_with_no_access.uniq!
unless people_with_no_access.blank?
grouped_people_by_access_type[Policy::NO_ACCESS] = people_with_no_access.sort{|a,b| a[1] <=> b[1]}
end
#sort by key of the hash
grouped_people_by_access_type = Hash[grouped_people_by_access_type.sort]
return grouped_people_by_access_type
end
def policy_to_people_group people_in_group, contributor=User.current_user.person
if sharing_scope == Policy::ALL_SYSMO_USERS
people_in_network = get_people_in_network access_type
people_in_group['Network'] |= people_in_network unless people_in_network.blank?
elsif sharing_scope == Policy::EVERYONE
people_in_group['Public'] = access_type
end
#if blacklist/whitelist is used
if use_whitelist
people_in_whitelist = get_people_in_FG(contributor, nil, true, nil)
people_in_group['WhiteList'] |= people_in_whitelist unless people_in_whitelist.blank?
end
#if blacklist/whitelist is used
if use_blacklist
people_in_blacklist = get_people_in_FG(contributor, nil, nil, true)
people_in_group['BlackList'] |= people_in_blacklist unless people_in_blacklist.blank?
end
people_in_group
end
def permissions_to_people_group permissions, people_in_group, contributor=User.current_user.person
permissions.each do |permission|
contributor_id = permission.contributor_id
access_type = permission.access_type
case permission.contributor_type
when 'Person'
person = get_person contributor_id, access_type
people_in_group['Person'] << person unless person.blank?
when 'FavouriteGroup'
people_in_FG = get_people_in_FG contributor, contributor_id
people_in_group['FavouriteGroup'] |= people_in_FG unless people_in_FG.blank?
when 'WorkGroup'
people_in_WG = get_people_in_WG contributor_id, access_type
people_in_group['WorkGroup'] |= people_in_WG unless people_in_WG.blank?
when 'Project'
people_in_project = get_people_in_project contributor_id, access_type
people_in_group['Project'] |= people_in_project unless people_in_project.blank?
when 'Institution'
people_in_institution = get_people_in_institution contributor_id, access_type
people_in_group['Institution'] |= people_in_institution unless people_in_institution.blank?
end
end
people_in_group
end
def get_person person_id, access_type
person = Person.find(person_id)
if person
return [person.id, "#{person.first_name} #{person.last_name}", access_type]
end
end
#review people WG
def get_people_in_WG wg_id, access_type
w_group = WorkGroup.find(wg_id)
if w_group
people_in_wg = [] #id, name, access_type
w_group.group_memberships.each do |gm|
people_in_wg.push [gm.person.id, "#{gm.person.first_name} #{gm.person.last_name}", access_type ]
end
end
return people_in_wg
end
#review people in black list, white list and normal workgroup
def get_people_in_FG contributor, fg_id=nil, is_white_list=nil, is_black_list=nil
if is_white_list
f_group = FavouriteGroup.find(:all, :conditions => ["name = ? AND user_id = ?", "__whitelist__", contributor.user.id]).first
elsif is_black_list
f_group = FavouriteGroup.find(:all, :conditions => ["name = ? AND user_id = ?", "__blacklist__", contributor.user.id]).first
else
f_group = FavouriteGroup.find_by_id(fg_id)
end
if f_group
people_in_FG = [] #id, name, access_type
f_group.favourite_group_memberships.each do |fgm|
people_in_FG.push [fgm.person.id, "#{fgm.person.first_name} #{fgm.person.last_name}", fgm.access_type]
end
return people_in_FG
end
end
#review people in project
def get_people_in_project project_id, access_type
project = Project.find(project_id)
if project
people_in_project = [] #id, name, access_type
project.people.each do |person|
people_in_project.push [person.id, "#{person.first_name} #{person.last_name}", access_type]
end
return people_in_project
end
end
#review people in institution
def get_people_in_institution institution_id, access_type
institution = Institution.find(institution_id)
if institution
people_in_institution = [] #id, name, access_type
institution.people.each do |person|
people_in_institution.push [person.id, "#{person.first_name} #{person.last_name}", access_type]
end
return people_in_institution
end
end
#review people in network
def get_people_in_network access_type
people_in_network = [] #id, name, access_type
projects = Project.find(:all)
projects.each do |project|
project.people.each do |person|
person_identification = [person.id, "#{person.first_name} #{person.last_name}"]
people_in_network.push person_identification if (!people_in_network.include? person_identification)
end
end
people_in_network.collect!{|person| person.push access_type}
return people_in_network
end
#remove duplicate by taking the one with the highest access_type
def remove_duplicate people_list
result = []
#first replace each person in the people list with the highest access_type of this person
people_list.each do |person|
result.push(get_max_access_type_element(people_list, person))
end
#remove the duplication
result = result.inject([]) { |result,i| result << i unless result.include?(i); result }
result
end
def get_max_access_type_element(array, element)
array.each do |a|
if (element[0] == a[0] && element[2] < a[2])
element = a;
end
end
return element
end
#array2 has precedence
def precedence array1, array2
result = []
result |= array2
array1.each do |a1|
check = false
array2.each do |a2|
if (a1[0] == a2[0])
check = true
break
end
end
if !check
result.push(a1)
end
end
return result
end
#remove people which are in blacklist from the people list
def remove_people_in_blacklist(people_list, blacklist)
result = []
result |= people_list
people_list.each do |person|
check = false
blacklist.each do |person_in_bl|
if (person[0] == person_in_bl[0])
check = true
break
end
end
if check
result.delete person
end
end
return result
end
#add people which are in whitelist to the people list
def add_people_in_whitelist(people_list, whitelist)
result = []
result |= people_list
result |= whitelist
return remove_duplicate(result)
end
end
|
class Policy < ActiveRecord::Base
has_many :permissions,
:dependent => :destroy,
:order => "created_at ASC",
:autosave => true,
:after_add => proc {|policy, perm| perm.policy = policy}
#basically the same as validates_numericality_of :sharing_scope, :access_type
#but with a more generic error message because our users don't know what
#sharing_scope and access_type are.
validates_each(:sharing_scope, :access_type) do |record, attr, value|
raw_value = record.send("#{attr}_before_type_cast") || value
begin
Kernel.Float(raw_value)
rescue ArgumentError, TypeError
record.errors[:base] << "Sharing policy is invalid" unless value.is_a? Integer
end
end
alias_attribute :title, :name
after_commit :queue_update_auth_table
before_save :update_timestamp_if_permissions_change
def update_timestamp_if_permissions_change
update_timestamp if changed_for_autosave?
end
def queue_update_auth_table
unless (previous_changes.keys - ["updated_at"]).empty?
AuthLookupUpdateJob.add_items_to_queue(assets) unless assets.empty?
end
end
def assets
Seek::Util.authorized_types.collect do |type|
type.where(:policy_id=>id)
end.flatten.uniq
end
# *****************************************************************************
# This section defines constants for "sharing_scope" and "access_type" values
# NB! It is critical to all algorithms using these constants, that the latter
# have their integer values increased along with the access they provide
# (so, for example, "editing" should have greated value than "viewing")
# In other words, this means that for both(!) sharing_scope and access_type
# constants it is crucial that order of these (imposed by their integer values)
# is preserved
# sharing_scope
PRIVATE = 0
ALL_SYSMO_USERS = 2
EVERYONE = 4
# access_type
DETERMINED_BY_GROUP = -1 # used for whitelist/blacklist (meaning that it doesn't matter what value this field has)
NO_ACCESS = 0 # i.e. only for anyone; only owner has access
VISIBLE = 1 # visible only
ACCESSIBLE = 2 # accessible and visible
EDITING = 3 # accessible, visible and editing
MANAGING = 4 # any actions that owner of the asset can perform (including "destroy"ing)
PUBLISHING = 5 # publish the item
# "true" value for flag-type fields
TRUE_VALUE = 1
FALSE_VALUE = 0
# *****************************************************************************
#makes a copy of the policy, and its associated permissions.
def deep_copy
copy=self.dup
copied_permissions = self.permissions.collect {|p| p.dup}
copied_permissions.each {|p| copy.permissions << p}
copy
end
#checks that there are permissions for the provided contributor, for the access_type (or higher)
def permission_granted?(contributor,access_type)
permissions.detect{|p| p.contributor==contributor && p.access_type >= access_type}
end
def self.new_for_upload_tool(resource, recipient)
policy = resource.build_policy(:name => 'auto',
:sharing_scope => Policy::PRIVATE,
:access_type => Policy::NO_ACCESS)
policy.permissions.build :contributor_type => "Person", :contributor_id => recipient, :access_type => Policy::ACCESSIBLE
return policy
end
def self.new_from_email(resource, recipients, accessors)
policy = resource.build_policy(:name => 'auto',
:sharing_scope => Policy::PRIVATE,
:access_type => Policy::NO_ACCESS)
recipients.each do |id|
policy.permissions.build :contributor_type => "Person", :contributor_id => id, :access_type => Policy::EDITING
end if recipients
accessors.each do |id|
policy.permissions.build :contributor_type => "Person", :contributor_id => id, :access_type => Policy::ACCESSIBLE
end if accessors
return policy
end
def set_attributes_with_sharing sharing, projects
# if no data about sharing is given, it should be some user (not the owner!)
# who is editing the asset - no need to do anything with policy / permissions: return success
self.tap do |policy|
if sharing
# obtain parameters from sharing hash
policy.sharing_scope = sharing[:sharing_scope]
policy.access_type = sharing["access_type_#{sharing_scope}"].blank? ? 0 : sharing["access_type_#{sharing_scope}"]
# NOW PROCESS THE PERMISSIONS
# read the permission data from sharing
unless sharing[:permissions].blank? or sharing[:permissions][:contributor_types].blank?
contributor_types = ActiveSupport::JSON.decode(sharing[:permissions][:contributor_types]) || []
new_permission_data = ActiveSupport::JSON.decode(sharing[:permissions][:values]) || {}
else
contributor_types = []
new_permission_data = {}
end
#if share with your project is chosen
if (sharing[:sharing_scope].to_i == Policy::ALL_SYSMO_USERS) and !projects.map(&:id).compact.blank?
#add Project to contributor_type
contributor_types << "Project" if !contributor_types.include? "Project"
#add one hash {project.id => {"access_type" => sharing[:your_proj_access_type].to_i}} to new_permission_data
new_permission_data["Project"] = {} unless new_permission_data["Project"]
projects.each {|project| new_permission_data["Project"][project.id] = {"access_type" => sharing[:your_proj_access_type].to_i}}
end
# --- Synchronise All Permissions for the Policy ---
# first delete or update any old memberships
policy.permissions.each do |p|
if permission_access = (new_permission_data[p.contributor_type.to_s].try :delete, p.contributor_id.to_s)
p.access_type = permission_access["access_type"]
else
p.mark_for_destruction
end
end
# now add any remaining new memberships
contributor_types.try :each do |contributor_type|
new_permission_data[contributor_type.to_s].try :each do |p|
if policy.new_record? or !Permission.where(:contributor_type => contributor_type, :contributor_id => p[0], :policy_id => policy.id).first
p = policy.permissions.build :contributor_type => contributor_type, :contributor_id => p[0], :access_type => p[1]["access_type"]
end
end
end
end
end
end
# returns a default policy for a project
# (all the related permissions will still be linked to the returned policy)
def self.project_default(project)
# if the default project policy isn't set, NIL will be returned - and the caller
# has to perform further actions in such case
return project.default_policy
end
def self.private_policy
Policy.new(:name => "default private",
:sharing_scope => PRIVATE,
:access_type => NO_ACCESS,
:use_whitelist => false,
:use_blacklist => false)
end
def self.registered_users_accessible_policy
Policy.new(:name => "default accessible",
:sharing_scope => ALL_SYSMO_USERS,
:access_type => ACCESSIBLE,
:use_whitelist => false,
:use_blacklist => false)
end
def self.public_policy
Policy.new(:name => "default public",
:sharing_scope => EVERYONE,
:access_type => ACCESSIBLE
)
end
def self.sysmo_and_projects_policy projects=[]
policy = Policy.new(:name => "default sysmo and projects policy",
:sharing_scope => ALL_SYSMO_USERS,
:access_type => VISIBLE
)
projects.each do |project|
policy.permissions << Permission.new(:contributor => project, :access_type => ACCESSIBLE)
end
return policy
end
#The default policy to use when creating authorized items if no other policy is specified
def self.default resource=nil
#FIXME: - would like to revisit this, remove is_virtualiver, and make the default policy itself a configuration
unless Seek::Config.is_virtualliver
private_policy
else
Policy.new(:name => "default accessible", :use_whitelist => false, :use_blacklist => false)
end
end
# translates access type codes into human-readable form
def self.get_access_type_wording(access_type, downloadable=false)
case access_type
when Policy::DETERMINED_BY_GROUP
return I18n.t('access.determined_by_group')
when Policy::NO_ACCESS
return I18n.t("access.no_access")
when Policy::VISIBLE
return downloadable ? I18n.t('access.visible_downloadable') : I18n.t('access.visible')
when Policy::ACCESSIBLE
return downloadable ? I18n.t('access.accessible_downloadable') : I18n.t('access.accessible')
when Policy::EDITING
return downloadable ? I18n.t('access.editing_downloadable') : I18n.t('access.editing')
when Policy::MANAGING
return I18n.t('access.managing')
else
return "Invalid access type"
end
end
# extracts the "settings" of the policy, discarding other information
# (e.g. contributor, creation time, etc.)
def get_settings
settings = {}
settings['sharing_scope'] = self.sharing_scope
settings['access_type'] = self.access_type
settings['use_whitelist'] = self.use_whitelist
settings['use_blacklist'] = self.use_blacklist
return settings
end
# extract the "settings" from all permissions associated to the policy;
# creates array containing 2-item arrays per each policy in the form:
# [ ... , [ permission_id, {"contributor_id" => id, "contributor_type" => type, "access_type" => access} ] , ... ]
def get_permission_settings
p_settings = []
self.permissions.each do |p|
# standard parameters for all contributor types
params_hash = {}
params_hash["contributor_id"] = p.contributor_id
params_hash["contributor_type"] = p.contributor_type
params_hash["access_type"] = p.access_type
params_hash["contributor_name"] = (p.contributor_type == "Person" ? (p.contributor.first_name + " " + p.contributor.last_name) : p.contributor.name)
# some of the contributor types will have special additional parameters
case p.contributor_type
when "FavouriteGroup"
params_hash["whitelist_or_blacklist"] = [FavouriteGroup::WHITELIST_NAME, FavouriteGroup::BLACKLIST_NAME].include?(p.contributor.name)
end
p_settings << [ p.id, params_hash ]
end
return p_settings
end
def private?
sharing_scope == Policy::PRIVATE && permissions.empty?
end
def public?
sharing_scope == Policy::EVERYONE
end
#return the hash: key is access_type, value is the array of people
def summarize_permissions creators=[User.current_user.try(:person)], asset_managers = [], contributor=User.current_user.try(:person)
#build the hash containing contributor_type as key and the people in these groups as value,exception:'Public' holds the access_type as the value
people_in_group = {'Person' => [], 'FavouriteGroup' => [], 'WorkGroup' => [], 'Project' => [], 'Institution' => [], 'WhiteList' => [], 'BlackList' => [],'Network' => [], 'Public' => 0}
#the result return: a hash contain the access_type as key, and array of people as value
grouped_people_by_access_type = {}
policy_to_people_group people_in_group, contributor
permissions_to_people_group permissions, people_in_group
#Now make the people in group unique by choosing the highest access_type
people_in_group['FavouriteGroup'] = remove_duplicate(people_in_group['FavouriteGroup'])
people_in_group['WorkGroup'] = remove_duplicate(people_in_group['WorkGroup'])
people_in_group['Project'] = remove_duplicate(people_in_group['Project'])
people_in_group['Institution'] = remove_duplicate(people_in_group['Institution'])
#Now process precedence with the order [network, institution, project, wg, fg, person]
filtered_people = people_in_group['Network']
filtered_people = precedence(filtered_people, people_in_group['Institution'])
filtered_people = precedence(filtered_people, people_in_group['Project'])
filtered_people = precedence(filtered_people, people_in_group['WorkGroup'])
filtered_people = precedence(filtered_people, people_in_group['FavouriteGroup'])
filtered_people = precedence(filtered_people, people_in_group['Person'])
#add people in white list
filtered_people = add_people_in_whitelist(filtered_people, people_in_group['WhiteList'])
#add people in blacklist
filtered_people = precedence(filtered_people, people_in_group['BlackList'])
#add creators and assign them the Policy::EDITING right
creator_array = creators.collect{|c| [c.id, "#{c.name}", Policy::EDITING] unless c.blank?}
filtered_people = add_people_in_whitelist(filtered_people, creator_array)
#add contributor
filtered_people = add_people_in_whitelist(filtered_people, [[contributor.id, "#{contributor.name}", Policy::MANAGING]]) unless contributor.blank?
#sort people by name
filtered_people = filtered_people.sort{|a,b| a[1] <=> b[1]}
#group people by access_type
grouped_people_by_access_type.merge!(filtered_people.group_by{|person| person[2]})
asset_manager_array = asset_managers.collect { |am| [am.id, "#{am.name}", Policy::MANAGING] unless am.blank? }
if grouped_people_by_access_type[Policy::MANAGING].blank?
grouped_people_by_access_type[Policy::MANAGING] = asset_manager_array
else
grouped_people_by_access_type[Policy::MANAGING] |= asset_manager_array
end
#concat the roles to a person name
concat_roles_to_name grouped_people_by_access_type, creators, asset_managers
#use Policy::DETERMINED_BY_GROUP to store public group if access_type for public > 0
grouped_people_by_access_type[Policy::DETERMINED_BY_GROUP] = people_in_group['Public'] if people_in_group['Public'] > 0
#sort by key of the hash
grouped_people_by_access_type = Hash[grouped_people_by_access_type.sort]
return grouped_people_by_access_type
end
def policy_to_people_group people_in_group, contributor=User.current_user.person
if sharing_scope == Policy::ALL_SYSMO_USERS
people_in_network = get_people_in_network access_type
people_in_group['Network'] |= people_in_network unless people_in_network.blank?
elsif sharing_scope == Policy::EVERYONE
people_in_group['Public'] = access_type
end
#if blacklist/whitelist is used
if use_whitelist
people_in_whitelist = get_people_in_FG(contributor, nil, true, nil)
people_in_group['WhiteList'] |= people_in_whitelist unless people_in_whitelist.blank?
end
#if blacklist/whitelist is used
if use_blacklist
people_in_blacklist = get_people_in_FG(contributor, nil, nil, true)
people_in_group['BlackList'] |= people_in_blacklist unless people_in_blacklist.blank?
end
people_in_group
end
def permissions_to_people_group permissions, people_in_group
permissions.each do |permission|
contributor_id = permission.contributor_id
access_type = permission.access_type
case permission.contributor_type
when 'Person'
person = get_person contributor_id, access_type
people_in_group['Person'] << person unless person.blank?
when 'FavouriteGroup'
people_in_FG = get_people_in_FG nil, contributor_id
people_in_group['FavouriteGroup'] |= people_in_FG unless people_in_FG.blank?
when 'WorkGroup'
people_in_WG = get_people_in_WG contributor_id, access_type
people_in_group['WorkGroup'] |= people_in_WG unless people_in_WG.blank?
when 'Project'
people_in_project = get_people_in_project contributor_id, access_type
people_in_group['Project'] |= people_in_project unless people_in_project.blank?
when 'Institution'
people_in_institution = get_people_in_institution contributor_id, access_type
people_in_group['Institution'] |= people_in_institution unless people_in_institution.blank?
end
end
people_in_group
end
def get_person person_id, access_type
person = begin
Person.find(person_id)
rescue ActiveRecord::RecordNotFound
nil
end
if person
return [person.id, "#{person.name}", access_type]
end
end
#review people WG
def get_people_in_WG wg_id, access_type
w_group = WorkGroup.find(wg_id)
if w_group
people_in_wg = [] #id, name, access_type
w_group.people.each do |person|
people_in_wg.push [person.id, "#{person.name}", access_type ] unless person.blank?
end
end
return people_in_wg
end
#review people in black list, white list and normal workgroup
def get_people_in_FG contributor, fg_id=nil, is_white_list=nil, is_black_list=nil
if is_white_list
f_group = FavouriteGroup.where(["name = ? AND user_id = ?", "__whitelist__", contributor.user.id]).first
elsif is_black_list
f_group = FavouriteGroup.where(["name = ? AND user_id = ?", "__blacklist__", contributor.user.id]).first
else
f_group = FavouriteGroup.find_by_id(fg_id)
end
if f_group
people_in_FG = [] #id, name, access_type
f_group.favourite_group_memberships.each do |fgm|
people_in_FG.push [fgm.person.id, "#{fgm.person.name}", fgm.access_type] if !fgm.blank? and !fgm.person.blank?
end
return people_in_FG
end
end
#review people in project
def get_people_in_project project_id, access_type
project = Project.find(project_id)
if project
people_in_project = [] #id, name, access_type
project.people.each do |person|
people_in_project.push [person.id, "#{person.name}", access_type] unless person.blank?
end
return people_in_project
end
end
#review people in institution
def get_people_in_institution institution_id, access_type
institution = Institution.find(institution_id)
if institution
people_in_institution = [] #id, name, access_type
institution.people.each do |person|
people_in_institution.push [person.id, "#{person.name}", access_type] unless person.blank?
end
return people_in_institution
end
end
#review people in network
def get_people_in_network access_type
people_in_network = [] #id, name, access_type
projects = Project.all
projects.each do |project|
project.people.each do |person|
unless person.blank?
person_identification = [person.id, "#{person.name}"]
people_in_network.push person_identification if (!people_in_network.include? person_identification)
end
end
end
people_in_network.collect!{|person| person.push access_type}
return people_in_network
end
#remove duplicate by taking the one with the highest access_type
def remove_duplicate people_list
result = []
#first replace each person in the people list with the highest access_type of this person
people_list.each do |person|
result.push(get_max_access_type_element(people_list, person))
end
#remove the duplication
result = result.inject([]) { |result,i| result << i unless result.include?(i); result }
result
end
def get_max_access_type_element(array, element)
array.each do |a|
if (element[0] == a[0] && element[2] < a[2])
element = a;
end
end
return element
end
#array2 has precedence
def precedence array1, array2
result = []
result |= array2
array1.each do |a1|
check = false
array2.each do |a2|
if (a1[0] == a2[0])
check = true
break
end
end
if !check
result.push(a1)
end
end
return result
end
#add people which are in whitelist to the people list
def add_people_in_whitelist(people_list, whitelist)
result = []
result |= people_list
result |= whitelist
return remove_duplicate(result)
end
def is_entirely_private? grouped_people_by_access_type, contributor
entirely_private = true
if access_type > Policy::NO_ACCESS
entirely_private = false
else
grouped_people_by_access_type.reject{|key,value| key == Policy::NO_ACCESS || key == Policy::DETERMINED_BY_GROUP}.each_value do |value|
value.each do |person|
entirely_private = false if (person[0] != contributor.try(:id))
end
end
end
return entirely_private
end
def concat_roles_to_name grouped_people_by_access_type, creators, asset_managers
creator_id_array = creators.collect{|c| c.id unless c.blank?}
asset_manage_id_array = asset_managers.collect{|am| am.id unless am.blank?}
grouped_people_by_access_type = grouped_people_by_access_type.reject{|key,value| key == Policy::DETERMINED_BY_GROUP}.each_value do |value|
value.each do |person|
person[1].concat(' (creator)') if creator_id_array.include?(person[0])
person[1].concat(' (asset manager)') if asset_manage_id_array.include?(person[0])
end
end
grouped_people_by_access_type
end
end
Fix sharing bug: when policy is already defined for project members, another change throws back the system to default (view summary only)
In policy.rb, accessing the policy with ".to_s" when deleting it from the temporary array does not work.
removing the ".to_s" and working with the integer value works, despite the FAIRDOM-hub version works withthe stringified version.
class Policy < ActiveRecord::Base
has_many :permissions,
:dependent => :destroy,
:order => "created_at ASC",
:autosave => true,
:after_add => proc {|policy, perm| perm.policy = policy}
#basically the same as validates_numericality_of :sharing_scope, :access_type
#but with a more generic error message because our users don't know what
#sharing_scope and access_type are.
validates_each(:sharing_scope, :access_type) do |record, attr, value|
raw_value = record.send("#{attr}_before_type_cast") || value
begin
Kernel.Float(raw_value)
rescue ArgumentError, TypeError
record.errors[:base] << "Sharing policy is invalid" unless value.is_a? Integer
end
end
alias_attribute :title, :name
after_commit :queue_update_auth_table
before_save :update_timestamp_if_permissions_change
def update_timestamp_if_permissions_change
update_timestamp if changed_for_autosave?
end
def queue_update_auth_table
unless (previous_changes.keys - ["updated_at"]).empty?
AuthLookupUpdateJob.add_items_to_queue(assets) unless assets.empty?
end
end
def assets
Seek::Util.authorized_types.collect do |type|
type.where(:policy_id=>id)
end.flatten.uniq
end
# *****************************************************************************
# This section defines constants for "sharing_scope" and "access_type" values
# NB! It is critical to all algorithms using these constants, that the latter
# have their integer values increased along with the access they provide
# (so, for example, "editing" should have greated value than "viewing")
# In other words, this means that for both(!) sharing_scope and access_type
# constants it is crucial that order of these (imposed by their integer values)
# is preserved
# sharing_scope
PRIVATE = 0
ALL_SYSMO_USERS = 2
EVERYONE = 4
# access_type
DETERMINED_BY_GROUP = -1 # used for whitelist/blacklist (meaning that it doesn't matter what value this field has)
NO_ACCESS = 0 # i.e. only for anyone; only owner has access
VISIBLE = 1 # visible only
ACCESSIBLE = 2 # accessible and visible
EDITING = 3 # accessible, visible and editing
MANAGING = 4 # any actions that owner of the asset can perform (including "destroy"ing)
PUBLISHING = 5 # publish the item
# "true" value for flag-type fields
TRUE_VALUE = 1
FALSE_VALUE = 0
# *****************************************************************************
#makes a copy of the policy, and its associated permissions.
def deep_copy
copy=self.dup
copied_permissions = self.permissions.collect {|p| p.dup}
copied_permissions.each {|p| copy.permissions << p}
copy
end
#checks that there are permissions for the provided contributor, for the access_type (or higher)
def permission_granted?(contributor,access_type)
permissions.detect{|p| p.contributor==contributor && p.access_type >= access_type}
end
def self.new_for_upload_tool(resource, recipient)
policy = resource.build_policy(:name => 'auto',
:sharing_scope => Policy::PRIVATE,
:access_type => Policy::NO_ACCESS)
policy.permissions.build :contributor_type => "Person", :contributor_id => recipient, :access_type => Policy::ACCESSIBLE
return policy
end
def self.new_from_email(resource, recipients, accessors)
policy = resource.build_policy(:name => 'auto',
:sharing_scope => Policy::PRIVATE,
:access_type => Policy::NO_ACCESS)
recipients.each do |id|
policy.permissions.build :contributor_type => "Person", :contributor_id => id, :access_type => Policy::EDITING
end if recipients
accessors.each do |id|
policy.permissions.build :contributor_type => "Person", :contributor_id => id, :access_type => Policy::ACCESSIBLE
end if accessors
return policy
end
def set_attributes_with_sharing sharing, projects
# if no data about sharing is given, it should be some user (not the owner!)
# who is editing the asset - no need to do anything with policy / permissions: return success
self.tap do |policy|
if sharing
# obtain parameters from sharing hash
policy.sharing_scope = sharing[:sharing_scope]
policy.access_type = sharing["access_type_#{sharing_scope}"].blank? ? 0 : sharing["access_type_#{sharing_scope}"]
# NOW PROCESS THE PERMISSIONS
# read the permission data from sharing
unless sharing[:permissions].blank? or sharing[:permissions][:contributor_types].blank?
contributor_types = ActiveSupport::JSON.decode(sharing[:permissions][:contributor_types]) || []
new_permission_data = ActiveSupport::JSON.decode(sharing[:permissions][:values]) || {}
else
contributor_types = []
new_permission_data = {}
end
#if share with your project is chosen
if (sharing[:sharing_scope].to_i == Policy::ALL_SYSMO_USERS) and !projects.map(&:id).compact.blank?
#add Project to contributor_type
contributor_types << "Project" if !contributor_types.include? "Project"
#add one hash {project.id => {"access_type" => sharing[:your_proj_access_type].to_i}} to new_permission_data
new_permission_data["Project"] = {} unless new_permission_data["Project"]
projects.each {|project| new_permission_data["Project"][project.id] = {"access_type" => sharing[:your_proj_access_type].to_i}}
end
# --- Synchronise All Permissions for the Policy ---
# first delete or update any old memberships
# removed the .to_s in the :delete argument, but why does it work in the main SEEK branch?
policy.permissions.each do |p|
if permission_access = (new_permission_data[p.contributor_type.to_s].try :delete, p.contributor_id)
p.access_type = permission_access["access_type"]
else
p.mark_for_destruction
end
end
# now add any remaining new memberships
contributor_types.try :each do |contributor_type|
new_permission_data[contributor_type.to_s].try :each do |p|
if policy.new_record? or !Permission.where(:contributor_type => contributor_type, :contributor_id => p[0], :policy_id => policy.id).first
p = policy.permissions.build :contributor_type => contributor_type, :contributor_id => p[0], :access_type => p[1]["access_type"]
end
end
end
end
end
end
# returns a default policy for a project
# (all the related permissions will still be linked to the returned policy)
def self.project_default(project)
# if the default project policy isn't set, NIL will be returned - and the caller
# has to perform further actions in such case
return project.default_policy
end
def self.private_policy
Policy.new(:name => "default private",
:sharing_scope => PRIVATE,
:access_type => NO_ACCESS,
:use_whitelist => false,
:use_blacklist => false)
end
def self.registered_users_accessible_policy
Policy.new(:name => "default accessible",
:sharing_scope => ALL_SYSMO_USERS,
:access_type => ACCESSIBLE,
:use_whitelist => false,
:use_blacklist => false)
end
def self.public_policy
Policy.new(:name => "default public",
:sharing_scope => EVERYONE,
:access_type => ACCESSIBLE
)
end
def self.sysmo_and_projects_policy projects=[]
policy = Policy.new(:name => "default sysmo and projects policy",
:sharing_scope => ALL_SYSMO_USERS,
:access_type => VISIBLE
)
projects.each do |project|
policy.permissions << Permission.new(:contributor => project, :access_type => ACCESSIBLE)
end
return policy
end
#The default policy to use when creating authorized items if no other policy is specified
def self.default resource=nil
#FIXME: - would like to revisit this, remove is_virtualiver, and make the default policy itself a configuration
unless Seek::Config.is_virtualliver
private_policy
else
Policy.new(:name => "default accessible", :use_whitelist => false, :use_blacklist => false)
end
end
# translates access type codes into human-readable form
def self.get_access_type_wording(access_type, downloadable=false)
case access_type
when Policy::DETERMINED_BY_GROUP
return I18n.t('access.determined_by_group')
when Policy::NO_ACCESS
return I18n.t("access.no_access")
when Policy::VISIBLE
return downloadable ? I18n.t('access.visible_downloadable') : I18n.t('access.visible')
when Policy::ACCESSIBLE
return downloadable ? I18n.t('access.accessible_downloadable') : I18n.t('access.accessible')
when Policy::EDITING
return downloadable ? I18n.t('access.editing_downloadable') : I18n.t('access.editing')
when Policy::MANAGING
return I18n.t('access.managing')
else
return "Invalid access type"
end
end
# extracts the "settings" of the policy, discarding other information
# (e.g. contributor, creation time, etc.)
def get_settings
settings = {}
settings['sharing_scope'] = self.sharing_scope
settings['access_type'] = self.access_type
settings['use_whitelist'] = self.use_whitelist
settings['use_blacklist'] = self.use_blacklist
return settings
end
# extract the "settings" from all permissions associated to the policy;
# creates array containing 2-item arrays per each policy in the form:
# [ ... , [ permission_id, {"contributor_id" => id, "contributor_type" => type, "access_type" => access} ] , ... ]
def get_permission_settings
p_settings = []
self.permissions.each do |p|
# standard parameters for all contributor types
params_hash = {}
params_hash["contributor_id"] = p.contributor_id
params_hash["contributor_type"] = p.contributor_type
params_hash["access_type"] = p.access_type
params_hash["contributor_name"] = (p.contributor_type == "Person" ? (p.contributor.first_name + " " + p.contributor.last_name) : p.contributor.name)
# some of the contributor types will have special additional parameters
case p.contributor_type
when "FavouriteGroup"
params_hash["whitelist_or_blacklist"] = [FavouriteGroup::WHITELIST_NAME, FavouriteGroup::BLACKLIST_NAME].include?(p.contributor.name)
end
p_settings << [ p.id, params_hash ]
end
return p_settings
end
def private?
sharing_scope == Policy::PRIVATE && permissions.empty?
end
def public?
sharing_scope == Policy::EVERYONE
end
#return the hash: key is access_type, value is the array of people
def summarize_permissions creators=[User.current_user.try(:person)], asset_managers = [], contributor=User.current_user.try(:person)
#build the hash containing contributor_type as key and the people in these groups as value,exception:'Public' holds the access_type as the value
people_in_group = {'Person' => [], 'FavouriteGroup' => [], 'WorkGroup' => [], 'Project' => [], 'Institution' => [], 'WhiteList' => [], 'BlackList' => [],'Network' => [], 'Public' => 0}
#the result return: a hash contain the access_type as key, and array of people as value
grouped_people_by_access_type = {}
policy_to_people_group people_in_group, contributor
permissions_to_people_group permissions, people_in_group
#Now make the people in group unique by choosing the highest access_type
people_in_group['FavouriteGroup'] = remove_duplicate(people_in_group['FavouriteGroup'])
people_in_group['WorkGroup'] = remove_duplicate(people_in_group['WorkGroup'])
people_in_group['Project'] = remove_duplicate(people_in_group['Project'])
people_in_group['Institution'] = remove_duplicate(people_in_group['Institution'])
#Now process precedence with the order [network, institution, project, wg, fg, person]
filtered_people = people_in_group['Network']
filtered_people = precedence(filtered_people, people_in_group['Institution'])
filtered_people = precedence(filtered_people, people_in_group['Project'])
filtered_people = precedence(filtered_people, people_in_group['WorkGroup'])
filtered_people = precedence(filtered_people, people_in_group['FavouriteGroup'])
filtered_people = precedence(filtered_people, people_in_group['Person'])
#add people in white list
filtered_people = add_people_in_whitelist(filtered_people, people_in_group['WhiteList'])
#add people in blacklist
filtered_people = precedence(filtered_people, people_in_group['BlackList'])
#add creators and assign them the Policy::EDITING right
creator_array = creators.collect{|c| [c.id, "#{c.name}", Policy::EDITING] unless c.blank?}
filtered_people = add_people_in_whitelist(filtered_people, creator_array)
#add contributor
filtered_people = add_people_in_whitelist(filtered_people, [[contributor.id, "#{contributor.name}", Policy::MANAGING]]) unless contributor.blank?
#sort people by name
filtered_people = filtered_people.sort{|a,b| a[1] <=> b[1]}
#group people by access_type
grouped_people_by_access_type.merge!(filtered_people.group_by{|person| person[2]})
asset_manager_array = asset_managers.collect { |am| [am.id, "#{am.name}", Policy::MANAGING] unless am.blank? }
if grouped_people_by_access_type[Policy::MANAGING].blank?
grouped_people_by_access_type[Policy::MANAGING] = asset_manager_array
else
grouped_people_by_access_type[Policy::MANAGING] |= asset_manager_array
end
#concat the roles to a person name
concat_roles_to_name grouped_people_by_access_type, creators, asset_managers
#use Policy::DETERMINED_BY_GROUP to store public group if access_type for public > 0
grouped_people_by_access_type[Policy::DETERMINED_BY_GROUP] = people_in_group['Public'] if people_in_group['Public'] > 0
#sort by key of the hash
grouped_people_by_access_type = Hash[grouped_people_by_access_type.sort]
return grouped_people_by_access_type
end
def policy_to_people_group people_in_group, contributor=User.current_user.person
if sharing_scope == Policy::ALL_SYSMO_USERS
people_in_network = get_people_in_network access_type
people_in_group['Network'] |= people_in_network unless people_in_network.blank?
elsif sharing_scope == Policy::EVERYONE
people_in_group['Public'] = access_type
end
#if blacklist/whitelist is used
if use_whitelist
people_in_whitelist = get_people_in_FG(contributor, nil, true, nil)
people_in_group['WhiteList'] |= people_in_whitelist unless people_in_whitelist.blank?
end
#if blacklist/whitelist is used
if use_blacklist
people_in_blacklist = get_people_in_FG(contributor, nil, nil, true)
people_in_group['BlackList'] |= people_in_blacklist unless people_in_blacklist.blank?
end
people_in_group
end
def permissions_to_people_group permissions, people_in_group
permissions.each do |permission|
contributor_id = permission.contributor_id
access_type = permission.access_type
case permission.contributor_type
when 'Person'
person = get_person contributor_id, access_type
people_in_group['Person'] << person unless person.blank?
when 'FavouriteGroup'
people_in_FG = get_people_in_FG nil, contributor_id
people_in_group['FavouriteGroup'] |= people_in_FG unless people_in_FG.blank?
when 'WorkGroup'
people_in_WG = get_people_in_WG contributor_id, access_type
people_in_group['WorkGroup'] |= people_in_WG unless people_in_WG.blank?
when 'Project'
people_in_project = get_people_in_project contributor_id, access_type
people_in_group['Project'] |= people_in_project unless people_in_project.blank?
when 'Institution'
people_in_institution = get_people_in_institution contributor_id, access_type
people_in_group['Institution'] |= people_in_institution unless people_in_institution.blank?
end
end
people_in_group
end
def get_person person_id, access_type
person = begin
Person.find(person_id)
rescue ActiveRecord::RecordNotFound
nil
end
if person
return [person.id, "#{person.name}", access_type]
end
end
#review people WG
def get_people_in_WG wg_id, access_type
w_group = WorkGroup.find(wg_id)
if w_group
people_in_wg = [] #id, name, access_type
w_group.people.each do |person|
people_in_wg.push [person.id, "#{person.name}", access_type ] unless person.blank?
end
end
return people_in_wg
end
#review people in black list, white list and normal workgroup
def get_people_in_FG contributor, fg_id=nil, is_white_list=nil, is_black_list=nil
if is_white_list
f_group = FavouriteGroup.where(["name = ? AND user_id = ?", "__whitelist__", contributor.user.id]).first
elsif is_black_list
f_group = FavouriteGroup.where(["name = ? AND user_id = ?", "__blacklist__", contributor.user.id]).first
else
f_group = FavouriteGroup.find_by_id(fg_id)
end
if f_group
people_in_FG = [] #id, name, access_type
f_group.favourite_group_memberships.each do |fgm|
people_in_FG.push [fgm.person.id, "#{fgm.person.name}", fgm.access_type] if !fgm.blank? and !fgm.person.blank?
end
return people_in_FG
end
end
#review people in project
def get_people_in_project project_id, access_type
project = Project.find(project_id)
if project
people_in_project = [] #id, name, access_type
project.people.each do |person|
people_in_project.push [person.id, "#{person.name}", access_type] unless person.blank?
end
return people_in_project
end
end
#review people in institution
def get_people_in_institution institution_id, access_type
institution = Institution.find(institution_id)
if institution
people_in_institution = [] #id, name, access_type
institution.people.each do |person|
people_in_institution.push [person.id, "#{person.name}", access_type] unless person.blank?
end
return people_in_institution
end
end
#review people in network
def get_people_in_network access_type
people_in_network = [] #id, name, access_type
projects = Project.all
projects.each do |project|
project.people.each do |person|
unless person.blank?
person_identification = [person.id, "#{person.name}"]
people_in_network.push person_identification if (!people_in_network.include? person_identification)
end
end
end
people_in_network.collect!{|person| person.push access_type}
return people_in_network
end
#remove duplicate by taking the one with the highest access_type
def remove_duplicate people_list
result = []
#first replace each person in the people list with the highest access_type of this person
people_list.each do |person|
result.push(get_max_access_type_element(people_list, person))
end
#remove the duplication
result = result.inject([]) { |result,i| result << i unless result.include?(i); result }
result
end
def get_max_access_type_element(array, element)
array.each do |a|
if (element[0] == a[0] && element[2] < a[2])
element = a;
end
end
return element
end
#array2 has precedence
def precedence array1, array2
result = []
result |= array2
array1.each do |a1|
check = false
array2.each do |a2|
if (a1[0] == a2[0])
check = true
break
end
end
if !check
result.push(a1)
end
end
return result
end
#add people which are in whitelist to the people list
def add_people_in_whitelist(people_list, whitelist)
result = []
result |= people_list
result |= whitelist
return remove_duplicate(result)
end
def is_entirely_private? grouped_people_by_access_type, contributor
entirely_private = true
if access_type > Policy::NO_ACCESS
entirely_private = false
else
grouped_people_by_access_type.reject{|key,value| key == Policy::NO_ACCESS || key == Policy::DETERMINED_BY_GROUP}.each_value do |value|
value.each do |person|
entirely_private = false if (person[0] != contributor.try(:id))
end
end
end
return entirely_private
end
def concat_roles_to_name grouped_people_by_access_type, creators, asset_managers
creator_id_array = creators.collect{|c| c.id unless c.blank?}
asset_manage_id_array = asset_managers.collect{|am| am.id unless am.blank?}
grouped_people_by_access_type = grouped_people_by_access_type.reject{|key,value| key == Policy::DETERMINED_BY_GROUP}.each_value do |value|
value.each do |person|
person[1].concat(' (creator)') if creator_id_array.include?(person[0])
person[1].concat(' (asset manager)') if asset_manage_id_array.include?(person[0])
end
end
grouped_people_by_access_type
end
end
|
#Copyright (c) 2014 Stelligent Systems LLC
#
#MIT LICENSE
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
depends 'magic_shell'
Store dependency for ideal library ordering
#Copyright (c) 2014 Stelligent Systems LLC
#
#MIT LICENSE
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
depends 'magic_shell'
depends 'jenkins', '> 2.0' |
class Reason < ApplicationRecord
has_and_belongs_to_many :posts
has_many :feedbacks, :through => :posts
def tp_percentage
# I don't like the .count.count, but it does get the job done
count = self.posts.where(:is_tp => true, :is_fp => false).count
return (count.to_f / self.posts.count.to_f).to_f
end
def fp_percentage
count = self.posts.where(:is_fp => true, :is_tp => false).count
return (count.to_f / self.posts.count.to_f).to_f
end
def both_percentage
count = self.posts.where(:is_fp => true, :is_tp => true).count + self.posts.includes(:feedbacks).where(:is_tp => false, :is_fp => false).where.not( :feedbacks => { :post_id => nil }).count
return (count.to_f / self.posts.count.to_f).to_f
end
end
Save a query (or a few hundred) on the dashboard
class Reason < ApplicationRecord
has_and_belongs_to_many :posts
has_many :feedbacks, :through => :posts
def tp_percentage
# I don't like the .count.count, but it does get the job done
count = self.posts.where(:is_tp => true, :is_fp => false).count
return (count.to_f / fast_post_count.to_f).to_f
end
def fp_percentage
count = self.posts.where(:is_fp => true, :is_tp => false).count
return (count.to_f / fast_post_count.to_f).to_f
end
def both_percentage
count = self.posts.where(:is_fp => true, :is_tp => true).count + self.posts.includes(:feedbacks).where(:is_tp => false, :is_fp => false).where.not( :feedbacks => { :post_id => nil }).count
return (count.to_f / fast_post_count.to_f).to_f
end
# Attempt to use cached post_count if it's available (included in the dashboard/index query)
def fast_post_count
self.post_count || self.posts.count
end
end
|
class Recipe < ActiveRecord::Base
belongs_to :user
has_many :ingredients
has_many :directions
accepts_nested_attributes_for :ingredients,
reject_if: proc { |attributes| attributes['name'].blank? },
allow_destroy: true
accepts_nested_attributes_for :directions,
reject_if: proc { |attributes| attributes['step'].blank? },
allow_destroy: true
has_attached_file :image, styles: { medium: "400x400#" }
validates_attachment_content_type :image, content_type: /\Aimage\/.*\Z/
end
fix delete recipe with ingredients and directions
class Recipe < ActiveRecord::Base
belongs_to :user
has_many :ingredients, dependent: :destroy
has_many :directions, dependent: :destroy
accepts_nested_attributes_for :ingredients,
reject_if: proc { |attributes| attributes['name'].blank? },
allow_destroy: true
accepts_nested_attributes_for :directions,
reject_if: proc { |attributes| attributes['step'].blank? },
allow_destroy: true
has_attached_file :image, styles: { medium: "400x400#" }
validates_attachment_content_type :image, content_type: /\Aimage\/.*\Z/
end
|
module CreateIfBestResultForRaceExtension
def create_if_best_result_for_race(attributes)
source_result = attributes[:source_result]
for score in @owner.scores
same_race = (score.source_result.race == source_result.race)
same_racer = (score.source_result.racer == source_result.racer)
if same_race && score.source_result.racer && same_racer
if attributes[:points] > score.points
@owner.scores.delete(score)
else
return nil
end
end
end
create(attributes)
end
end
# Race result
#
# Race is the only required attribute -- even +racer+ and +place+ can be blank
#
# Result keeps its own copy of +number+ and +team+, even though each Racer has
# a +team+ atribute and many RaceNumbers. Result's number is just a String, not
# a RaceNumber
#
# Doesn't support multiple hotspot points, though it should
class Result < ActiveRecord::Base
include Dirty
# FIXME Make sure names are coerced correctly
# TODO Add number (race_number) and license
before_validation :find_associated_records
before_save :save_racer
after_save :update_racer_number
after_save {|result| result.race.standings.after_result_save}
after_destroy {|result| result.race.standings.after_result_destroy}
has_many :scores, :foreign_key => 'competition_result_id', :dependent => :destroy, :extend => CreateIfBestResultForRaceExtension
has_many :dependent_scores, :class_name => 'Score', :foreign_key => 'source_result_id', :dependent => :destroy
belongs_to :category
belongs_to :race
belongs_to :racer
belongs_to :team
validates_presence_of :race_id
def Result.find_all_for(racer)
if racer.is_a? Racer
racer_id = racer.id
else
racer_id = racer
end
Result.find(
:all,
:include => [:team, :racer, :scores, :category, {:race => [{:standings => :event}, :category]}],
:conditions => ['racers.id = ?', racer_id]
)
end
def attributes=(attributes)
unless attributes.nil?
if attributes[:first_name]
self.first_name = attributes[:first_name]
end
if attributes[:last_name]
self.last_name = attributes[:last_name]
end
if attributes[:team_name]
self.team_name = attributes[:team_name]
end
if attributes[:category] and attributes[:category].is_a?(String)
attributes[:category] = Category.new(:name => attributes[:category])
end
end
super(attributes)
end
# Replace any new +category+, +racer+, or +team+ with one that already exists if name matches
def find_associated_records
if category and (category.new_record? or category.dirty?)
if category.name.blank?
self.category = nil
else
existing_category = Category.find_by_name(category.name)
self.category = existing_category if existing_category
end
end
_racer = self.racer
if _racer and (_racer.new_record? or _racer.dirty?)
if _racer.name.blank?
self.racer = nil
else
existing_racers = find_racers
self.racer = existing_racers.to_a.first if existing_racers.size == 1
end
end
if !self.racer.nil? &&
self.racer.new_record? &&
self.racer[:member_from].blank? &&
!RaceNumber.rental?(number, Discipline[event.discipline])
self.racer.member_from = race.date
end
if self.team and (team.new_record? or team.dirty?)
if team.name.blank?
self.team = nil
else
existing_team = Team.find_by_name_or_alias(team.name)
self.team = existing_team if existing_team
end
end
end
# Use +first_name+, +last_name+, +race_number+, +team+ to figure out if +racer+ already exists.
# Returns an Array of Racers if there is more than one potential match
#
# TODO refactor into methods or split responsibilities with Racer?
# Need Event to match on race number. Event will not be set before result is saved to database
def find_racers(_event = event)
matches = Set.new
# name
matches = matches + Racer.find_all_by_name_or_alias(first_name, last_name)
return matches if matches.size == 1
# number
if matches.size > 1 or (matches.empty? and first_name.blank? and last_name.blank?)
race_numbers = RaceNumber.find_all_by_value_and_event(number, _event)
race_numbers.each do |race_number|
if matches.include?(race_number.racer)
return [race_number.racer]
else
matches << race_number.racer
end
end
return matches if matches.size == 1
end
# team
unless team_name.blank?
team = Team.find_by_name_or_alias(team_name)
matches.reject! do |match|
match.team != team
end
end
matches
end
# Set +racer#number+ to +number+ if this isn't a rental number
def update_racer_number
discipline = Discipline[event.discipline]
if self.racer and !number.blank? and !RaceNumber.rental?(number, discipline)
event.number_issuer unless event.number_issuer
self.racer.add_number(number, discipline, event.number_issuer, event.date.year)
end
end
def save_racer
if self.racer and self.racer.dirty?
self.racer.save!
end
end
# Set points from +scores+
def calculate_points
if !scores.empty? and competition_result?
pts = 0
for score in scores
pts = pts + score.points
end
self.points = pts
end
end
def category_name
if self.category
self.category.name
else
''
end
end
def category_name=(name)
if name.blank?
self.category = nil
else
self.category = Category.new(:name => name)
end
end
def competition_result?
self.race.standings.event.is_a?(Competition)
end
def date
if race || race(true)
race.date
end
end
def event
if (race || race(true)) && (race.standings || race.standings(true)) && (race.standings.event || race.standings.event(true))
race.standings.event
end
end
def place
self[:place] || ''
end
def points
if self[:points]
self[:points].to_f
else
0.0
end
end
# Hot spots
def points_bonus_penalty=(value)
if value == nil || value == ""
value = 0
end
write_attribute(:points_bonus_penalty, value)
end
# Points from placing at finish, not from hot spots
def points_from_place=(value)
if value == nil || value == ""
value = 0
end
write_attribute(:points_from_place, value)
end
def first_name
if racer and !racer.first_name.blank?
racer.first_name
else
""
end
end
def first_name=(value)
if self.racer
self.racer.first_name = value
self.racer.dirty
else
self.racer = Racer.new(:first_name => value)
end
end
def last_name
if (racer and !racer.last_name.blank?)
racer.last_name
else
""
end
end
def last_name=(value)
if self.racer
self.racer.last_name = value
self.racer.dirty
else
self.racer = Racer.new(:last_name => value)
end
end
# racer.name
def name
if racer == nil
""
else
racer.name
end
end
def racer_name
name
end
# racer.name
def name=(value)
if self.racer
self.racer.name = value
self.racer.dirty
else
self.racer = Racer.new
self.racer.name = value
end
end
def racer_name=(value)
name = value
end
def team_name
return '' if team.nil?
team.name || ''
end
def team_name=(value)
if self.team.nil? || self.team.name != value
self.team = Team.new(:name => value)
self.team.dirty
end
if self.racer && self.racer.team_name != value
self.racer.team = self.team
self.racer.dirty
end
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
def time_s
time_to_s(self.time)
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
def time_s=(time)
self.time = s_to_time(time)
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
def time_total_s
time_to_s(self.time_total)
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
def time_total_s=(time_total)
self.time_total = s_to_time(time_total)
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
def time_bonus_penalty_s
time_to_s(self.time_bonus_penalty)
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
def time_bonus_penalty_s=(time_bonus_penalty)
self.time_bonus_penalty = s_to_time(time_bonus_penalty)
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
def time_gap_to_leader_s
time_to_s(self.time_gap_to_leader)
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
def time_gap_to_leader_s=(time_gap_to_leader_s)
self.time_gap_to_leader = s_to_time(time_gap_to_leader_s)
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
# This method doesn't handle some typical edge cases very well
def time_to_s(time)
return '' if time == 0.0 or time.blank?
hours = (time / 3600).to_i
minutes = ((time - (hours * 3600)) / 60).floor
seconds = (time - (hours * 3600).floor - (minutes * 60).floor)
# TODO Use sprintf better
seconds = sprintf('%0.2f', seconds)
if hours > 0
hour_prefix = "#{hours.to_s.rjust(2, '0')}:"
end
"#{hour_prefix}#{minutes.to_s.rjust(2, '0')}:#{seconds.rjust(5, '0')}"
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
# This method doesn't handle some typical edge cases very well
def s_to_time(string)
if string.to_s.blank?
0.0
else
string.gsub!(',', '.')
parts = string.to_s.split(':')
parts.reverse!
t = 0.0
parts.each_with_index do |part, index|
t = t + (part.to_f) * (60.0 ** index)
end
t
end
end
# Fix common formatting mistakes and inconsistencies
def cleanup
cleanup_place
cleanup_number
self.first_name = cleanup_name(first_name)
self.last_name = cleanup_name(last_name)
self.team_name = cleanup_name(team_name)
end
# Drops the 'st' from 1st, among other things
def cleanup_place
if place
normalized_place = place.to_s
normalized_place.upcase!
normalized_place.gsub!("ST", "")
normalized_place.gsub!("ND", "")
normalized_place.gsub!("RD", "")
normalized_place.gsub!("TH", "")
normalized_place.gsub!(")", "")
normalized_place = normalized_place.to_i.to_s if normalized_place[/^\d+\.0$/]
normalized_place.strip!
normalized_place.gsub!(".", "")
self.place = normalized_place
else
self.place = ""
end
end
def cleanup_number
self.number = self.number.to_s
self.number = self.number.to_i.to_s if self.number[/^\d+\.0$/]
end
# Mostly removes unfortunate punctuation typos
def cleanup_name(name)
return name if name.nil?
return '' if name == '0.0'
return '' if name == '0'
return '' if name == '.'
return '' if name.include?('N/A')
name = name.gsub(';', '\'')
name = name.gsub(/ *\/ */, '/')
end
# Highest points first. Break ties by highest placing
def compare_by_points(other, break_ties = true)
diff = other.points <=> points
return diff if diff != 0 || !break_ties
scores_by_place = scores.sort do |x, y|
x.source_result <=> y.source_result
end
other_scores_by_place = other.scores.sort do |x, y|
x.source_result <=> y.source_result
end
max_results = max(scores_by_place.size, other_scores_by_place.size)
return 0 if max_results == 0
for index in 0..(max_results - 1)
if scores_by_place.size == index
return 1
elsif other_scores_by_place.size == index
return -1
else
diff = scores_by_place[index].source_result.place <=> other_scores_by_place[index].source_result.place
return diff if diff != 0
end
end
0
end
def max(x, y)
if x >= y
x
else
y
end
end
# All numbered places first, followed by DNF, DQ, and DNS
def <=>(other)
begin
if place.blank?
place_as_int = 0
else
place_as_int = place.to_i
end
if other.place.blank?
other_place_as_int = 0
else
other_place_as_int = other.place.to_i
end
if place_as_int > 0
if other_place_as_int == 0
return -1
elsif place_as_int != other_place_as_int
return place_as_int <=> other_place_as_int
end
elsif place == 'DNF'
if other_place_as_int > 0
return 1
elsif other.place == 'DNF'
if id.nil?
return 0
else
return id <=> other.id
end
elsif other.place == 'DQ'
return -1
else
return -1
end
elsif place == 'DQ'
if other_place_as_int > 0
return 1
elsif other.place == 'DNF'
return 1
elsif other.place == 'DQ'
if id.nil?
return 0
else
return id <=> other.id
end
else
return -1
end
elsif place == 'DNS'
if other_place_as_int > 0
return 1
elsif other.place == 'DNF'
return 1
elsif other.place == 'DQ'
return 1
elsif other.place == 'DNS'
if id.nil?
return 0
else
return id <=> other.id
end
else
return -1
end
elsif place.blank?
if other_place_as_int > 0
return 1
elsif other.place == 'DNF'
return 1
elsif other.place == 'DNS'
return 1
else
if id.nil?
return 0
else
return id <=> other.id
end
end
end
if id.nil?
return 0
else
return id <=> other.id
end
rescue ArgumentError => error
RAILS_DEFAULT_LOGGER.error("Error in Result.<=> #{error} comparing #{self} with #{other}")
throw error
end
end
# Add +race+ and +race#standings+ name, and points to default to_s
def to_long_s
"#<Result #{id}\t#{place}\t#{race.standings.name}\t#{race.name} (#{race.id})\t#{name}\t#{team_name}\t#{points}\t#{time_s if self[:time]}>"
end
def to_s
"#<Result #{id} place #{place} race #{race_id} racer #{racer_id} team #{team_id} pts #{points}>"
end
end
Changed logger
module CreateIfBestResultForRaceExtension
def create_if_best_result_for_race(attributes)
source_result = attributes[:source_result]
for score in @owner.scores
same_race = (score.source_result.race == source_result.race)
same_racer = (score.source_result.racer == source_result.racer)
if same_race && score.source_result.racer && same_racer
if attributes[:points] > score.points
@owner.scores.delete(score)
else
return nil
end
end
end
create(attributes)
end
end
# Race result
#
# Race is the only required attribute -- even +racer+ and +place+ can be blank
#
# Result keeps its own copy of +number+ and +team+, even though each Racer has
# a +team+ atribute and many RaceNumbers. Result's number is just a String, not
# a RaceNumber
#
# Doesn't support multiple hotspot points, though it should
class Result < ActiveRecord::Base
include Dirty
# FIXME Make sure names are coerced correctly
# TODO Add number (race_number) and license
before_validation :find_associated_records
before_save :save_racer
after_save :update_racer_number
after_save {|result| result.race.standings.after_result_save}
after_destroy {|result| result.race.standings.after_result_destroy}
has_many :scores, :foreign_key => 'competition_result_id', :dependent => :destroy, :extend => CreateIfBestResultForRaceExtension
has_many :dependent_scores, :class_name => 'Score', :foreign_key => 'source_result_id', :dependent => :destroy
belongs_to :category
belongs_to :race
belongs_to :racer
belongs_to :team
validates_presence_of :race_id
def Result.find_all_for(racer)
if racer.is_a? Racer
racer_id = racer.id
else
racer_id = racer
end
Result.find(
:all,
:include => [:team, :racer, :scores, :category, {:race => [{:standings => :event}, :category]}],
:conditions => ['racers.id = ?', racer_id]
)
end
def attributes=(attributes)
unless attributes.nil?
if attributes[:first_name]
self.first_name = attributes[:first_name]
end
if attributes[:last_name]
self.last_name = attributes[:last_name]
end
if attributes[:team_name]
self.team_name = attributes[:team_name]
end
if attributes[:category] and attributes[:category].is_a?(String)
attributes[:category] = Category.new(:name => attributes[:category])
end
end
super(attributes)
end
# Replace any new +category+, +racer+, or +team+ with one that already exists if name matches
def find_associated_records
if category and (category.new_record? or category.dirty?)
if category.name.blank?
self.category = nil
else
existing_category = Category.find_by_name(category.name)
self.category = existing_category if existing_category
end
end
_racer = self.racer
if _racer and (_racer.new_record? or _racer.dirty?)
if _racer.name.blank?
self.racer = nil
else
existing_racers = find_racers
self.racer = existing_racers.to_a.first if existing_racers.size == 1
end
end
if !self.racer.nil? &&
self.racer.new_record? &&
self.racer[:member_from].blank? &&
!RaceNumber.rental?(number, Discipline[event.discipline])
self.racer.member_from = race.date
end
if self.team and (team.new_record? or team.dirty?)
if team.name.blank?
self.team = nil
else
existing_team = Team.find_by_name_or_alias(team.name)
self.team = existing_team if existing_team
end
end
end
# Use +first_name+, +last_name+, +race_number+, +team+ to figure out if +racer+ already exists.
# Returns an Array of Racers if there is more than one potential match
#
# TODO refactor into methods or split responsibilities with Racer?
# Need Event to match on race number. Event will not be set before result is saved to database
def find_racers(_event = event)
matches = Set.new
# name
matches = matches + Racer.find_all_by_name_or_alias(first_name, last_name)
return matches if matches.size == 1
# number
if matches.size > 1 or (matches.empty? and first_name.blank? and last_name.blank?)
race_numbers = RaceNumber.find_all_by_value_and_event(number, _event)
race_numbers.each do |race_number|
if matches.include?(race_number.racer)
return [race_number.racer]
else
matches << race_number.racer
end
end
return matches if matches.size == 1
end
# team
unless team_name.blank?
team = Team.find_by_name_or_alias(team_name)
matches.reject! do |match|
match.team != team
end
end
matches
end
# Set +racer#number+ to +number+ if this isn't a rental number
def update_racer_number
discipline = Discipline[event.discipline]
if self.racer and !number.blank? and !RaceNumber.rental?(number, discipline)
event.number_issuer unless event.number_issuer
self.racer.add_number(number, discipline, event.number_issuer, event.date.year)
end
end
def save_racer
if self.racer and self.racer.dirty?
self.racer.save!
end
end
# Set points from +scores+
def calculate_points
if !scores.empty? and competition_result?
pts = 0
for score in scores
pts = pts + score.points
end
self.points = pts
end
end
def category_name
if self.category
self.category.name
else
''
end
end
def category_name=(name)
if name.blank?
self.category = nil
else
self.category = Category.new(:name => name)
end
end
def competition_result?
self.race.standings.event.is_a?(Competition)
end
def date
if race || race(true)
race.date
end
end
def event
if (race || race(true)) && (race.standings || race.standings(true)) && (race.standings.event || race.standings.event(true))
race.standings.event
end
end
def place
self[:place] || ''
end
def points
if self[:points]
self[:points].to_f
else
0.0
end
end
# Hot spots
def points_bonus_penalty=(value)
if value == nil || value == ""
value = 0
end
write_attribute(:points_bonus_penalty, value)
end
# Points from placing at finish, not from hot spots
def points_from_place=(value)
if value == nil || value == ""
value = 0
end
write_attribute(:points_from_place, value)
end
def first_name
if racer and !racer.first_name.blank?
racer.first_name
else
""
end
end
def first_name=(value)
if self.racer
self.racer.first_name = value
self.racer.dirty
else
self.racer = Racer.new(:first_name => value)
end
end
def last_name
if (racer and !racer.last_name.blank?)
racer.last_name
else
""
end
end
def last_name=(value)
if self.racer
self.racer.last_name = value
self.racer.dirty
else
self.racer = Racer.new(:last_name => value)
end
end
# racer.name
def name
if racer == nil
""
else
racer.name
end
end
def racer_name
name
end
# racer.name
def name=(value)
if self.racer
self.racer.name = value
self.racer.dirty
else
self.racer = Racer.new
self.racer.name = value
end
end
def racer_name=(value)
name = value
end
def team_name
return '' if team.nil?
team.name || ''
end
def team_name=(value)
if self.team.nil? || self.team.name != value
self.team = Team.new(:name => value)
self.team.dirty
end
if self.racer && self.racer.team_name != value
self.racer.team = self.team
self.racer.dirty
end
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
def time_s
time_to_s(self.time)
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
def time_s=(time)
self.time = s_to_time(time)
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
def time_total_s
time_to_s(self.time_total)
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
def time_total_s=(time_total)
self.time_total = s_to_time(time_total)
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
def time_bonus_penalty_s
time_to_s(self.time_bonus_penalty)
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
def time_bonus_penalty_s=(time_bonus_penalty)
self.time_bonus_penalty = s_to_time(time_bonus_penalty)
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
def time_gap_to_leader_s
time_to_s(self.time_gap_to_leader)
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
def time_gap_to_leader_s=(time_gap_to_leader_s)
self.time_gap_to_leader = s_to_time(time_gap_to_leader_s)
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
# This method doesn't handle some typical edge cases very well
def time_to_s(time)
return '' if time == 0.0 or time.blank?
hours = (time / 3600).to_i
minutes = ((time - (hours * 3600)) / 60).floor
seconds = (time - (hours * 3600).floor - (minutes * 60).floor)
# TODO Use sprintf better
seconds = sprintf('%0.2f', seconds)
if hours > 0
hour_prefix = "#{hours.to_s.rjust(2, '0')}:"
end
"#{hour_prefix}#{minutes.to_s.rjust(2, '0')}:#{seconds.rjust(5, '0')}"
end
# Time in hh:mm:ss.00 format. E.g., 1:20:59.75
# This method doesn't handle some typical edge cases very well
def s_to_time(string)
if string.to_s.blank?
0.0
else
string.gsub!(',', '.')
parts = string.to_s.split(':')
parts.reverse!
t = 0.0
parts.each_with_index do |part, index|
t = t + (part.to_f) * (60.0 ** index)
end
t
end
end
# Fix common formatting mistakes and inconsistencies
def cleanup
cleanup_place
cleanup_number
self.first_name = cleanup_name(first_name)
self.last_name = cleanup_name(last_name)
self.team_name = cleanup_name(team_name)
end
# Drops the 'st' from 1st, among other things
def cleanup_place
if place
normalized_place = place.to_s
normalized_place.upcase!
normalized_place.gsub!("ST", "")
normalized_place.gsub!("ND", "")
normalized_place.gsub!("RD", "")
normalized_place.gsub!("TH", "")
normalized_place.gsub!(")", "")
normalized_place = normalized_place.to_i.to_s if normalized_place[/^\d+\.0$/]
normalized_place.strip!
normalized_place.gsub!(".", "")
self.place = normalized_place
else
self.place = ""
end
end
def cleanup_number
self.number = self.number.to_s
self.number = self.number.to_i.to_s if self.number[/^\d+\.0$/]
end
# Mostly removes unfortunate punctuation typos
def cleanup_name(name)
return name if name.nil?
return '' if name == '0.0'
return '' if name == '0'
return '' if name == '.'
return '' if name.include?('N/A')
name = name.gsub(';', '\'')
name = name.gsub(/ *\/ */, '/')
end
# Highest points first. Break ties by highest placing
def compare_by_points(other, break_ties = true)
diff = other.points <=> points
return diff if diff != 0 || !break_ties
scores_by_place = scores.sort do |x, y|
x.source_result <=> y.source_result
end
other_scores_by_place = other.scores.sort do |x, y|
x.source_result <=> y.source_result
end
max_results = max(scores_by_place.size, other_scores_by_place.size)
return 0 if max_results == 0
for index in 0..(max_results - 1)
if scores_by_place.size == index
return 1
elsif other_scores_by_place.size == index
return -1
else
diff = scores_by_place[index].source_result.place <=> other_scores_by_place[index].source_result.place
return diff if diff != 0
end
end
0
end
def max(x, y)
if x >= y
x
else
y
end
end
# All numbered places first, followed by DNF, DQ, and DNS
def <=>(other)
begin
if place.blank?
place_as_int = 0
else
place_as_int = place.to_i
end
if other.place.blank?
other_place_as_int = 0
else
other_place_as_int = other.place.to_i
end
if place_as_int > 0
if other_place_as_int == 0
return -1
elsif place_as_int != other_place_as_int
return place_as_int <=> other_place_as_int
end
elsif place == 'DNF'
if other_place_as_int > 0
return 1
elsif other.place == 'DNF'
if id.nil?
return 0
else
return id <=> other.id
end
elsif other.place == 'DQ'
return -1
else
return -1
end
elsif place == 'DQ'
if other_place_as_int > 0
return 1
elsif other.place == 'DNF'
return 1
elsif other.place == 'DQ'
if id.nil?
return 0
else
return id <=> other.id
end
else
return -1
end
elsif place == 'DNS'
if other_place_as_int > 0
return 1
elsif other.place == 'DNF'
return 1
elsif other.place == 'DQ'
return 1
elsif other.place == 'DNS'
if id.nil?
return 0
else
return id <=> other.id
end
else
return -1
end
elsif place.blank?
if other_place_as_int > 0
return 1
elsif other.place == 'DNF'
return 1
elsif other.place == 'DNS'
return 1
else
if id.nil?
return 0
else
return id <=> other.id
end
end
end
if id.nil?
return 0
else
return id <=> other.id
end
rescue ArgumentError => error
logger.error("Error in Result.<=> #{error} comparing #{self} with #{other}")
throw error
end
end
# Add +race+ and +race#standings+ name, and points to default to_s
def to_long_s
"#<Result #{id}\t#{place}\t#{race.standings.name}\t#{race.name} (#{race.id})\t#{name}\t#{team_name}\t#{points}\t#{time_s if self[:time]}>"
end
def to_s
"#<Result #{id} place #{place} race #{race_id} racer #{racer_id} team #{team_id} pts #{points}>"
end
end |
# frozen_string_literal: true
class Runner < ApplicationRecord
belongs_to :execution_environment
belongs_to :user, polymorphic: true
before_validation :request_id
validates :execution_environment, :user, :runner_id, presence: true
attr_accessor :strategy
def self.strategy_class
@strategy_class ||= if Runner.management_active?
strategy_name = CodeOcean::Config.new(:code_ocean).read[:runner_management][:strategy]
"runner/strategy/#{strategy_name}".camelize.constantize
else
Runner::Strategy::Null
end
end
def self.management_active?
@management_active ||= begin
runner_management = CodeOcean::Config.new(:code_ocean).read[:runner_management]
if runner_management
runner_management[:enabled]
else
false
end
end
end
def self.for(user, execution_environment)
runner = find_by(user: user, execution_environment: execution_environment)
if runner.nil?
runner = Runner.create(user: user, execution_environment: execution_environment)
# The `strategy` is added through the before_validation hook `:request_id`.
raise Runner::Error::Unknown.new("Runner could not be saved: #{runner.errors.inspect}") unless runner.persisted?
else
# This information is required but not persisted in the runner model.
runner.strategy = strategy_class.new(runner.runner_id, runner.execution_environment)
end
runner
end
def copy_files(files)
@strategy.copy_files(files)
rescue Runner::Error::RunnerNotFound
request_new_id
save
@strategy.copy_files(files)
end
def attach_to_execution(command, &block)
starting_time = Time.zone.now
begin
# As the EventMachine reactor is probably shared with other threads, we cannot use EventMachine.run with
# stop_event_loop to wait for the WebSocket connection to terminate. Instead we use a self built event
# loop for that: Runner::EventLoop. The attach_to_execution method of the strategy is responsible for
# initializing its Runner::Connection with the given event loop. The Runner::Connection class ensures that
# this event loop is stopped after the socket was closed.
event_loop = Runner::EventLoop.new
socket = @strategy.attach_to_execution(command, event_loop, &block)
event_loop.wait
raise socket.error if socket.error.present?
rescue Runner::Error => e
e.execution_duration = Time.zone.now - starting_time
raise
end
Time.zone.now - starting_time # execution duration
end
def execute_command(command, raise_exception: true)
output = {}
stdout = +''
stderr = +''
try = 0
begin
exit_code = 1 # default to error
execution_time = attach_to_execution(command) do |socket|
socket.on :stderr do |data|
stderr << data
end
socket.on :stdout do |data|
stdout << data
end
socket.on :exit do |received_exit_code|
exit_code = received_exit_code
end
end
output.merge!(container_execution_time: execution_time, status: exit_code.zero? ? :ok : :failed)
rescue Runner::Error::ExecutionTimeout => e
Rails.logger.debug { "Running command `#{command}` timed out: #{e.message}" }
output.merge!(status: :timeout, container_execution_time: e.execution_duration)
rescue Runner::Error::RunnerNotFound => e
Rails.logger.debug { "Running command `#{command}` failed for the first time: #{e.message}" }
try += 1
request_new_id
save
retry if try == 1
Rails.logger.debug { "Running command `#{command}` failed for the second time: #{e.message}" }
output.merge!(status: :failed, container_execution_time: e.execution_duration)
rescue Runner::Error => e
Rails.logger.debug { "Running command `#{command}` failed: #{e.message}" }
output.merge!(status: :failed, container_execution_time: e.execution_duration)
ensure
# We forward the exception if requested
raise e if raise_exception && defined?(e) && e.present?
output.merge!(stdout: stdout, stderr: stderr)
end
end
def destroy_at_management
@strategy.destroy_at_management
end
private
def request_id
request_new_id if runner_id.blank?
end
def request_new_id
strategy_class = self.class.strategy_class
begin
self.runner_id = strategy_class.request_from_management(execution_environment)
@strategy = strategy_class.new(runner_id, execution_environment)
rescue Runner::Error::EnvironmentNotFound
# Whenever the environment could not be found by the runner management, we
# try to synchronize it and then forward a more specific error to our callee.
if strategy_class.sync_environment(execution_environment)
raise Runner::Error::EnvironmentNotFound.new(
"The execution environment with id #{execution_environment.id} was not found yet by the runner management. "\
'It has been successfully synced now so that the next request should be successful.'
)
else
raise Runner::Error::EnvironmentNotFound.new(
"The execution environment with id #{execution_environment.id} was not found by the runner management."\
'In addition, it could not be synced so that this probably indicates a permanent error.'
)
end
end
end
end
Reset previous exception if retrying command execution
# frozen_string_literal: true
class Runner < ApplicationRecord
belongs_to :execution_environment
belongs_to :user, polymorphic: true
before_validation :request_id
validates :execution_environment, :user, :runner_id, presence: true
attr_accessor :strategy
def self.strategy_class
@strategy_class ||= if Runner.management_active?
strategy_name = CodeOcean::Config.new(:code_ocean).read[:runner_management][:strategy]
"runner/strategy/#{strategy_name}".camelize.constantize
else
Runner::Strategy::Null
end
end
def self.management_active?
@management_active ||= begin
runner_management = CodeOcean::Config.new(:code_ocean).read[:runner_management]
if runner_management
runner_management[:enabled]
else
false
end
end
end
def self.for(user, execution_environment)
runner = find_by(user: user, execution_environment: execution_environment)
if runner.nil?
runner = Runner.create(user: user, execution_environment: execution_environment)
# The `strategy` is added through the before_validation hook `:request_id`.
raise Runner::Error::Unknown.new("Runner could not be saved: #{runner.errors.inspect}") unless runner.persisted?
else
# This information is required but not persisted in the runner model.
runner.strategy = strategy_class.new(runner.runner_id, runner.execution_environment)
end
runner
end
def copy_files(files)
@strategy.copy_files(files)
rescue Runner::Error::RunnerNotFound
request_new_id
save
@strategy.copy_files(files)
end
def attach_to_execution(command, &block)
starting_time = Time.zone.now
begin
# As the EventMachine reactor is probably shared with other threads, we cannot use EventMachine.run with
# stop_event_loop to wait for the WebSocket connection to terminate. Instead we use a self built event
# loop for that: Runner::EventLoop. The attach_to_execution method of the strategy is responsible for
# initializing its Runner::Connection with the given event loop. The Runner::Connection class ensures that
# this event loop is stopped after the socket was closed.
event_loop = Runner::EventLoop.new
socket = @strategy.attach_to_execution(command, event_loop, &block)
event_loop.wait
raise socket.error if socket.error.present?
rescue Runner::Error => e
e.execution_duration = Time.zone.now - starting_time
raise
end
Time.zone.now - starting_time # execution duration
end
def execute_command(command, raise_exception: true)
output = {}
stdout = +''
stderr = +''
try = 0
begin
exit_code = 1 # default to error
execution_time = attach_to_execution(command) do |socket|
socket.on :stderr do |data|
stderr << data
end
socket.on :stdout do |data|
stdout << data
end
socket.on :exit do |received_exit_code|
exit_code = received_exit_code
end
end
output.merge!(container_execution_time: execution_time, status: exit_code.zero? ? :ok : :failed)
rescue Runner::Error::ExecutionTimeout => e
Rails.logger.debug { "Running command `#{command}` timed out: #{e.message}" }
output.merge!(status: :timeout, container_execution_time: e.execution_duration)
rescue Runner::Error::RunnerNotFound => e
Rails.logger.debug { "Running command `#{command}` failed for the first time: #{e.message}" }
try += 1
request_new_id
save
if try == 1
# Reset the variable. This is required to prevent raising an outdated exception after a successful second try
e = nil
retry
end
Rails.logger.debug { "Running command `#{command}` failed for the second time: #{e.message}" }
output.merge!(status: :failed, container_execution_time: e.execution_duration)
rescue Runner::Error => e
Rails.logger.debug { "Running command `#{command}` failed: #{e.message}" }
output.merge!(status: :failed, container_execution_time: e.execution_duration)
ensure
# We forward the exception if requested
raise e if raise_exception && defined?(e) && e.present?
output.merge!(stdout: stdout, stderr: stderr)
end
end
def destroy_at_management
@strategy.destroy_at_management
end
private
def request_id
request_new_id if runner_id.blank?
end
def request_new_id
strategy_class = self.class.strategy_class
begin
self.runner_id = strategy_class.request_from_management(execution_environment)
@strategy = strategy_class.new(runner_id, execution_environment)
rescue Runner::Error::EnvironmentNotFound
# Whenever the environment could not be found by the runner management, we
# try to synchronize it and then forward a more specific error to our callee.
if strategy_class.sync_environment(execution_environment)
raise Runner::Error::EnvironmentNotFound.new(
"The execution environment with id #{execution_environment.id} was not found yet by the runner management. "\
'It has been successfully synced now so that the next request should be successful.'
)
else
raise Runner::Error::EnvironmentNotFound.new(
"The execution environment with id #{execution_environment.id} was not found by the runner management."\
'In addition, it could not be synced so that this probably indicates a permanent error.'
)
end
end
end
end
|
# == Schema Information
#
# Table name: schools
#
# active :boolean default(FALSE)
# address :text
# calendar_area_id :bigint(8)
# calendar_id :bigint(8)
# created_at :datetime not null
# floor_area :decimal(, )
# id :bigint(8) not null, primary key
# level :integer default(0)
# met_office_area_id :bigint(8)
# name :string
# number_of_pupils :integer
# postcode :string
# sash_id :bigint(8)
# school_group_id :bigint(8)
# school_type :integer
# slug :string
# solar_irradiance_area_id :bigint(8)
# solar_pv_tuos_area_id :bigint(8)
# temperature_area_id :bigint(8)
# updated_at :datetime not null
# urn :integer not null
# weather_underground_area_id :bigint(8)
# website :string
#
# Indexes
#
# index_schools_on_calendar_id (calendar_id)
# index_schools_on_sash_id (sash_id)
# index_schools_on_school_group_id (school_group_id)
# index_schools_on_urn (urn) UNIQUE
#
# Foreign Keys
#
# fk_rails_... (calendar_id => calendars.id)
# fk_rails_... (school_group_id => school_groups.id)
#
class School < ApplicationRecord
include AmrUsage
extend FriendlyId
friendly_id :slug_candidates, use: [:finders, :slugged, :history]
delegate :holiday_approaching?, to: :calendar
include Merit::UsageCalculations
has_merit
has_and_belongs_to_many :key_stages, join_table: :school_key_stages
has_many :users, dependent: :destroy
has_many :meters, inverse_of: :school, dependent: :destroy
has_many :school_times, inverse_of: :school, dependent: :destroy
has_many :activities, inverse_of: :school, dependent: :destroy
has_many :contacts, inverse_of: :school, dependent: :destroy
has_many :alert_subscriptions, inverse_of: :school, dependent: :destroy
has_many :alerts, inverse_of: :school, dependent: :destroy
has_many :simulations, inverse_of: :school, dependent: :destroy
has_many :amr_data_feed_readings, through: :meters
has_many :amr_validated_readings, through: :meters
has_many :alert_subscription_events, through: :alert_subscriptions
belongs_to :calendar
belongs_to :calendar_area
belongs_to :weather_underground_area
belongs_to :solar_pv_tuos_area
belongs_to :school_group
has_one :school_onboarding
enum school_type: [:primary, :secondary, :special, :infant, :junior, :middle]
scope :active, -> { where(active: true) }
scope :inactive, -> { where(active: false) }
scope :without_group, -> { where(school_group_id: nil) }
validates_presence_of :urn, :name, :address, :postcode, :website
validates_uniqueness_of :urn
validates :floor_area, :number_of_pupils, numericality: { greater_than: 0, allow_blank: true }
validates_associated :school_times, on: :school_time_update
accepts_nested_attributes_for :school_times
after_create :create_sash_relation
auto_strip_attributes :name, :website, :postcode, squish: true
def should_generate_new_friendly_id?
slug.blank? || name_changed? || postcode_changed?
end
# Prevent the generated urls from becoming too long
def normalize_friendly_id(string)
super[0..59]
end
# Try building a slug based on the following fields in increasing order of specificity.
def slug_candidates
[
:name,
[:postcode, :name],
[:urn, :name]
]
end
def area_name
school_group.name
end
def active_meters
meters.where(active: true)
end
def meters_for_supply(supply)
meters.where(meter_type: supply)
end
def meters?(supply = nil)
meters_for_supply(supply).any?
end
def meters_with_readings(supply = Meter.meter_types.keys)
meters.includes(:amr_data_feed_readings).where(meter_type: supply).where.not(amr_data_feed_readings: { meter_id: nil })
end
def meters_with_validated_readings(supply = Meter.meter_types.keys)
meters.includes(:amr_validated_readings).where(meter_type: supply).where.not(amr_validated_readings: { meter_id: nil })
end
def meters_with_enough_validated_readings_for_analysis(supply, threshold = AmrValidatedMeterCollection::NUMBER_OF_READINGS_REQUIRED_FOR_ANALYTICS)
meters.where(meter_type: supply).joins(:amr_validated_readings).group('amr_validated_readings.meter_id, meters.id').having('count(amr_validated_readings.meter_id) > ?', threshold)
end
def both_supplies?
meters_with_readings(:electricity).any? && meters_with_readings(:gas).any?
end
def has_enough_readings_for_meter_types?(supply, threshold = AmrValidatedMeterCollection::NUMBER_OF_READINGS_REQUIRED_FOR_ANALYTICS)
meters_with_enough_validated_readings_for_analysis(supply, threshold).any?
end
def fuel_types
if both_supplies?
:electric_and_gas
elsif meters_with_readings(:electricity).any?
:electric_only
elsif meters_with_readings(:gas).any?
:gas_only
else
:none
end
end
def fuel_types_for_analysis(threshold = AmrValidatedMeterCollection::NUMBER_OF_READINGS_REQUIRED_FOR_ANALYTICS)
if has_enough_readings_for_meter_types?(:gas, threshold) && has_enough_readings_for_meter_types?(:electricity, threshold)
:electric_and_gas
elsif has_enough_readings_for_meter_types?(:electricity, threshold)
:electric_only
elsif has_enough_readings_for_meter_types?(:gas, threshold)
:gas_only
else
:none
end
end
def has_badge?(id)
sash.badge_ids.include?(id)
end
def alert_subscriptions?
alert_subscriptions.any?
end
def current_term
calendar.terms.find_by('NOW()::DATE BETWEEN start_date AND end_date')
end
def last_term
calendar.terms.find_by('end_date <= ?', current_term.start_date)
end
def number_of_active_meters
meters.where(active: true).count
end
def expected_readings_for_a_week
7 * number_of_active_meters
end
def has_last_full_week_of_readings?
previous_friday = Time.zone.today.prev_occurring(:friday)
start_of_window = previous_friday - 1.week
end_of_window = previous_friday
actual_readings = amr_validated_readings.where('reading_date > ? and reading_date <= ?', start_of_window, end_of_window).count
actual_readings == expected_readings_for_a_week
end
def badges_by_date(order: :desc, limit: nil)
sash.badges_sashes.order(created_at: order)
.limit(limit)
.map(&:badge)
end
def points_since(since = 1.month.ago)
self.score_points.where("created_at > '#{since}'").sum(:num_points)
end
def school_admin
users.where(role: :school_admin)
end
def scoreboard
school_group.scoreboard if school_group
end
def scoreboard_position
scoreboard.position(self) + 1
end
private
# Create Merit::Sash relation
# Having the sash relation makes life easier elsewhere
def create_sash_relation
badges
end
end
Nil check for school group
# == Schema Information
#
# Table name: schools
#
# active :boolean default(FALSE)
# address :text
# calendar_area_id :bigint(8)
# calendar_id :bigint(8)
# created_at :datetime not null
# floor_area :decimal(, )
# id :bigint(8) not null, primary key
# level :integer default(0)
# met_office_area_id :bigint(8)
# name :string
# number_of_pupils :integer
# postcode :string
# sash_id :bigint(8)
# school_group_id :bigint(8)
# school_type :integer
# slug :string
# solar_irradiance_area_id :bigint(8)
# solar_pv_tuos_area_id :bigint(8)
# temperature_area_id :bigint(8)
# updated_at :datetime not null
# urn :integer not null
# weather_underground_area_id :bigint(8)
# website :string
#
# Indexes
#
# index_schools_on_calendar_id (calendar_id)
# index_schools_on_sash_id (sash_id)
# index_schools_on_school_group_id (school_group_id)
# index_schools_on_urn (urn) UNIQUE
#
# Foreign Keys
#
# fk_rails_... (calendar_id => calendars.id)
# fk_rails_... (school_group_id => school_groups.id)
#
class School < ApplicationRecord
include AmrUsage
extend FriendlyId
friendly_id :slug_candidates, use: [:finders, :slugged, :history]
delegate :holiday_approaching?, to: :calendar
include Merit::UsageCalculations
has_merit
has_and_belongs_to_many :key_stages, join_table: :school_key_stages
has_many :users, dependent: :destroy
has_many :meters, inverse_of: :school, dependent: :destroy
has_many :school_times, inverse_of: :school, dependent: :destroy
has_many :activities, inverse_of: :school, dependent: :destroy
has_many :contacts, inverse_of: :school, dependent: :destroy
has_many :alert_subscriptions, inverse_of: :school, dependent: :destroy
has_many :alerts, inverse_of: :school, dependent: :destroy
has_many :simulations, inverse_of: :school, dependent: :destroy
has_many :amr_data_feed_readings, through: :meters
has_many :amr_validated_readings, through: :meters
has_many :alert_subscription_events, through: :alert_subscriptions
belongs_to :calendar
belongs_to :calendar_area
belongs_to :weather_underground_area
belongs_to :solar_pv_tuos_area
belongs_to :school_group
has_one :school_onboarding
enum school_type: [:primary, :secondary, :special, :infant, :junior, :middle]
scope :active, -> { where(active: true) }
scope :inactive, -> { where(active: false) }
scope :without_group, -> { where(school_group_id: nil) }
validates_presence_of :urn, :name, :address, :postcode, :website
validates_uniqueness_of :urn
validates :floor_area, :number_of_pupils, numericality: { greater_than: 0, allow_blank: true }
validates_associated :school_times, on: :school_time_update
accepts_nested_attributes_for :school_times
after_create :create_sash_relation
auto_strip_attributes :name, :website, :postcode, squish: true
def should_generate_new_friendly_id?
slug.blank? || name_changed? || postcode_changed?
end
# Prevent the generated urls from becoming too long
def normalize_friendly_id(string)
super[0..59]
end
# Try building a slug based on the following fields in increasing order of specificity.
def slug_candidates
[
:name,
[:postcode, :name],
[:urn, :name]
]
end
def area_name
school_group.name if school_group
end
def active_meters
meters.where(active: true)
end
def meters_for_supply(supply)
meters.where(meter_type: supply)
end
def meters?(supply = nil)
meters_for_supply(supply).any?
end
def meters_with_readings(supply = Meter.meter_types.keys)
meters.includes(:amr_data_feed_readings).where(meter_type: supply).where.not(amr_data_feed_readings: { meter_id: nil })
end
def meters_with_validated_readings(supply = Meter.meter_types.keys)
meters.includes(:amr_validated_readings).where(meter_type: supply).where.not(amr_validated_readings: { meter_id: nil })
end
def meters_with_enough_validated_readings_for_analysis(supply, threshold = AmrValidatedMeterCollection::NUMBER_OF_READINGS_REQUIRED_FOR_ANALYTICS)
meters.where(meter_type: supply).joins(:amr_validated_readings).group('amr_validated_readings.meter_id, meters.id').having('count(amr_validated_readings.meter_id) > ?', threshold)
end
def both_supplies?
meters_with_readings(:electricity).any? && meters_with_readings(:gas).any?
end
def has_enough_readings_for_meter_types?(supply, threshold = AmrValidatedMeterCollection::NUMBER_OF_READINGS_REQUIRED_FOR_ANALYTICS)
meters_with_enough_validated_readings_for_analysis(supply, threshold).any?
end
def fuel_types
if both_supplies?
:electric_and_gas
elsif meters_with_readings(:electricity).any?
:electric_only
elsif meters_with_readings(:gas).any?
:gas_only
else
:none
end
end
def fuel_types_for_analysis(threshold = AmrValidatedMeterCollection::NUMBER_OF_READINGS_REQUIRED_FOR_ANALYTICS)
if has_enough_readings_for_meter_types?(:gas, threshold) && has_enough_readings_for_meter_types?(:electricity, threshold)
:electric_and_gas
elsif has_enough_readings_for_meter_types?(:electricity, threshold)
:electric_only
elsif has_enough_readings_for_meter_types?(:gas, threshold)
:gas_only
else
:none
end
end
def has_badge?(id)
sash.badge_ids.include?(id)
end
def alert_subscriptions?
alert_subscriptions.any?
end
def current_term
calendar.terms.find_by('NOW()::DATE BETWEEN start_date AND end_date')
end
def last_term
calendar.terms.find_by('end_date <= ?', current_term.start_date)
end
def number_of_active_meters
meters.where(active: true).count
end
def expected_readings_for_a_week
7 * number_of_active_meters
end
def has_last_full_week_of_readings?
previous_friday = Time.zone.today.prev_occurring(:friday)
start_of_window = previous_friday - 1.week
end_of_window = previous_friday
actual_readings = amr_validated_readings.where('reading_date > ? and reading_date <= ?', start_of_window, end_of_window).count
actual_readings == expected_readings_for_a_week
end
def badges_by_date(order: :desc, limit: nil)
sash.badges_sashes.order(created_at: order)
.limit(limit)
.map(&:badge)
end
def points_since(since = 1.month.ago)
self.score_points.where("created_at > '#{since}'").sum(:num_points)
end
def school_admin
users.where(role: :school_admin)
end
def scoreboard
school_group.scoreboard if school_group
end
def scoreboard_position
scoreboard.position(self) + 1
end
private
# Create Merit::Sash relation
# Having the sash relation makes life easier elsewhere
def create_sash_relation
badges
end
end
|
# Code based on http://developer.yahoo.com/boss/search/boss_api_guide/codeexamples.html#oauth_ruby
require_relative './oauth_util.rb'
require 'net/http'
require 'uri'
require 'json'
class Search
def initialize
@format = "json"
@count = "20"
@search_bucket = "limitedweb"
@search_url = "http://yboss.yahooapis.com/ysearch/#{@search_bucket}?"
end
def query(search_phrase)
site_names = Site.all.map { |s| s.name }
sites_to_search = URI.encode(site_names.join(","))
search_params = { "format" => @format, "count" => @count,
"sites" => sites_to_search, "q" => URI.encode(search_phrase) }
JSON.parse(search_against_boss(search_params), symbolize_names: true)[:bossresponse][:limitedweb][:results]
end
def search_against_boss(args)
url = @search_url
arg_count = 0
args.each do|key,value|
url = url + key + "=" + value+"&"
++arg_count
end
if(arg_count > 0)
url.slice!(url.length-1)
end
parsed_url = URI.parse( url )
o = OauthUtil.new
o.consumer_key = ENV['BOSS_CONSUMER_KEY']
o.consumer_secret = ENV['BOSS_CONSUMER_SECRET']
Net::HTTP.start( parsed_url.host ) { | http |
req = Net::HTTP::Get.new "#{ parsed_url.path }?#{ o.sign(parsed_url).query_string }"
response = http.request(req)
return response.read_body
}
end
end
Debugging
# Code based on http://developer.yahoo.com/boss/search/boss_api_guide/codeexamples.html#oauth_ruby
require_relative './oauth_util.rb'
require 'net/http'
require 'uri'
require 'json'
class Search
def initialize
@format = "json"
@count = "20"
@search_bucket = "limitedweb"
@search_url = "http://yboss.yahooapis.com/ysearch/#{@search_bucket}?"
end
def query(search_phrase)
site_names = Site.all.map { |s| s.name }
sites_to_search = URI.encode(site_names.join(","))
search_params = { "format" => @format, "count" => @count,
"sites" => sites_to_search, "q" => URI.encode(search_phrase) }
response= search_against_boss(search_params)
Rails.logger.error response.inspect
JSON.parse(response, symbolize_names: true)[:bossresponse][:limitedweb][:results]
end
def search_against_boss(args)
url = @search_url
arg_count = 0
args.each do|key,value|
url = url + key + "=" + value+"&"
++arg_count
end
if(arg_count > 0)
url.slice!(url.length-1)
end
parsed_url = URI.parse( url )
o = OauthUtil.new
o.consumer_key = ENV['BOSS_CONSUMER_KEY']
o.consumer_secret = ENV['BOSS_CONSUMER_SECRET']
Net::HTTP.start( parsed_url.host ) { | http |
req = Net::HTTP::Get.new "#{ parsed_url.path }?#{ o.sign(parsed_url).query_string }"
response = http.request(req)
return response.read_body
}
end
end
|
class Search
FIELD_GIST_KEY = %r{^field_\d+_gist$}
RESULTS_COUNT_MIN = 55
SUBSTRING_MIN = 3
def column_all_gather_ids(params)
columns = columns_w_values(params)
return if columns.empty?
column_all_agree(columns, params)
end
def custom_all_gather_ids(params)
hsh = params_custom_w_values(params)
return if hsh.empty?
custom_all_agree(hsh)
end
def all_agree_ids_for_find(params)
column_ids = column_all_gather_ids(params)
return [] if column_ids.try(:empty?)
custom_ids = custom_all_gather_ids(params)
return [] if custom_ids.try(:empty?)
if custom_ids
column_ids ? column_ids & custom_ids : custom_ids
else
column_ids ? column_ids : []
end
end
def column_any_gather_ids(params)
columns = columns_w_values(params)
return [] if columns.empty?
column_any_agree(columns, params)
end
def custom_any_gather_ids(params)
hsh = params_custom_w_values(params)
return [] if hsh.empty?
custom_any_agree(hsh)
end
def column_substring_gather_ids(params)
columns = columns_w_substring_values(params)
return [] if columns.empty?
column_substring_agree(columns, params)
end
def custom_substring_gather_ids(params)
[] # stub
end
# return a hash of {parent_id: agree_frequency} pairs
def parent_distribution(ids)
return {} if ids.empty?
ids.inject(Hash.new 0) do |hsh, i|
hsh[i] += 1
hsh
end
end
def any_agree_ids(params, all_agree_ids)
column_any_gather_ids(params) + custom_any_gather_ids(params) - all_agree_ids
end
def substring_agree_ids(params, all_agree_ids, any_agree_ids)
column_substring_gather_ids(params) + custom_substring_gather_ids(params) - all_agree_ids - any_agree_ids
end
# return an array of [agree_frequency, [parent_ids]] pairs, ordered by frequency descending
def ids_grouped_by_agree_frequency(parent_distribution_hsh)
return [] if parent_distribution_hsh.empty?
hsh = {}
parent_distribution_hsh.each { |k, v| hsh[v] ? (hsh[v] << k) : hsh[v] = [k] }
hsh.sort.reverse
end
def grouped_result_ids(params)
all_ids = all_agree_ids_for_find(params)
return [all_ids] unless all_agree_ids_few?(all_ids)
any_agree_hsh = parent_distribution(any_agree_ids params, all_ids)
grouped_ids = [all_ids, ids_grouped_by_agree_frequency(any_agree_hsh)]
any_ids = any_agree_hsh.keys
return grouped_ids unless any_agree_ids_few?(all_ids, any_ids)
substring_agree_hsh = parent_distribution(substring_agree_ids params, all_ids, any_ids)
grouped_ids + ids_grouped_by_agree_frequency(substring_agree_hsh)
end
def results_united(params)
grouped_ids = grouped_result_ids(params)
all_agree_locations(grouped_ids[0]) + any_agree_locations(grouped_ids[1])
end
private
def columns_w_values(params)
columns_searchable.delete_if { |sym| params[sym.id2name].blank? }
end
def columns_w_substring_values(params)
columns_searchable.delete_if do |sym|
v = params[sym.id2name]
v.blank? || v.length < SUBSTRING_MIN
end
end
def params_custom_w_values(params)
params.select { |k, v| k =~ FIELD_GIST_KEY unless v.blank? }
end
def custom_all_agree(hsh)
ids = nil
hsh.each do |k, v|
o = CustomField.find(k.split('_')[1])
value_ids = o.parents_find_by_gist(v)
ids = ids ? value_ids & ids : value_ids
return [] if ids.empty?
end
ids
end
def custom_any_agree(hsh)
ids = []
hsh.each do |k, v|
o = CustomField.find(k.split('_')[1])
ids = ids + o.parents_find_by_gist(v)
end
ids
end
def all_agree_ids_few?(all_agree_ids)
all_agree_ids.length < RESULTS_COUNT_MIN
end
def any_agree_ids_few?(all_agree_ids, any_agree_ids)
all_agree_ids.length + any_agree_ids.length < RESULTS_COUNT_MIN
end
end
Search remove explicit returns
class Search
FIELD_GIST_KEY = %r{^field_\d+_gist$}
RESULTS_COUNT_MIN = 55
SUBSTRING_MIN = 3
def column_all_gather_ids(params)
columns = columns_w_values(params)
column_all_agree(columns, params) unless columns.empty?
end
def custom_all_gather_ids(params)
hsh = params_custom_w_values(params)
custom_all_agree(hsh) unless hsh.empty?
end
def all_agree_ids_for_find(params)
column_ids = column_all_gather_ids(params)
return [] if column_ids.try(:empty?)
custom_ids = custom_all_gather_ids(params)
return [] if custom_ids.try(:empty?)
if custom_ids
column_ids ? column_ids & custom_ids : custom_ids
else
column_ids ? column_ids : []
end
end
def column_any_gather_ids(params)
columns = columns_w_values(params)
columns.empty? ? [] : column_any_agree(columns, params)
end
def custom_any_gather_ids(params)
hsh = params_custom_w_values(params)
hsh.empty? ? [] : custom_any_agree(hsh)
end
def column_substring_gather_ids(params)
columns = columns_w_substring_values(params)
return [] if columns.empty?
column_substring_agree(columns, params)
end
def custom_substring_gather_ids(params)
[] # stub
end
# return a hash of {parent_id: agree_frequency} pairs
def parent_distribution(ids)
return {} if ids.empty?
ids.inject(Hash.new 0) do |hsh, i|
hsh[i] += 1
hsh
end
end
def any_agree_ids(params, all_agree_ids)
column_any_gather_ids(params) + custom_any_gather_ids(params) - all_agree_ids
end
def substring_agree_ids(params, all_agree_ids, any_agree_ids)
column_substring_gather_ids(params) + custom_substring_gather_ids(params) - all_agree_ids - any_agree_ids
end
# return an array of [agree_frequency, [parent_ids]] pairs, ordered by frequency descending
def ids_grouped_by_agree_frequency(parent_distribution_hsh)
return [] if parent_distribution_hsh.empty?
hsh = {}
parent_distribution_hsh.each { |k, v| hsh[v] ? (hsh[v] << k) : hsh[v] = [k] }
hsh.sort.reverse
end
def grouped_result_ids(params)
all_ids = all_agree_ids_for_find(params)
return [all_ids] unless all_agree_ids_few?(all_ids)
any_agree_hsh = parent_distribution(any_agree_ids params, all_ids)
grouped_ids = [all_ids, ids_grouped_by_agree_frequency(any_agree_hsh)]
any_ids = any_agree_hsh.keys
return grouped_ids unless any_agree_ids_few?(all_ids, any_ids)
substring_agree_hsh = parent_distribution(substring_agree_ids params, all_ids, any_ids)
grouped_ids + ids_grouped_by_agree_frequency(substring_agree_hsh)
end
def results_united(params)
grouped_ids = grouped_result_ids(params)
all_agree_locations(grouped_ids[0]) + any_agree_locations(grouped_ids[1])
end
private
def columns_w_values(params)
columns_searchable.delete_if { |sym| params[sym.id2name].blank? }
end
def columns_w_substring_values(params)
columns_searchable.delete_if do |sym|
v = params[sym.id2name]
v.blank? || v.length < SUBSTRING_MIN
end
end
def params_custom_w_values(params)
params.select { |k, v| k =~ FIELD_GIST_KEY unless v.blank? }
end
def custom_all_agree(hsh)
ids = nil
hsh.each do |k, v|
o = CustomField.find(k.split('_')[1])
value_ids = o.parents_find_by_gist(v)
ids = ids ? value_ids & ids : value_ids
return [] if ids.empty?
end
ids
end
def custom_any_agree(hsh)
ids = []
hsh.each do |k, v|
o = CustomField.find(k.split('_')[1])
ids = ids + o.parents_find_by_gist(v)
end
ids
end
def all_agree_ids_few?(all_agree_ids)
all_agree_ids.length < RESULTS_COUNT_MIN
end
def any_agree_ids_few?(all_agree_ids, any_agree_ids)
all_agree_ids.length + any_agree_ids.length < RESULTS_COUNT_MIN
end
end
|
class Seeker < ActiveRecord::Base
require 'rest-client'
devise :database_authenticatable, :registerable, authentication_keys: [:login]
# include ConfirmToggle
enum status: {inactive: 1, active: 2, completed: 3}
has_and_belongs_to_many :work_categories
has_and_belongs_to_many :certificates
has_many :allocations, dependent: :destroy
has_many :assignments, dependent: :destroy
has_many :access_tokens, as: :userable, dependent: :destroy
# has_many :notes
has_many :jobs, through: :allocations
has_many :todos, dependent: :destroy
belongs_to :place, inverse_of: :seekers
belongs_to :organization
attr_accessor :new_note
attr_accessor :current_broker_id
validates :login, presence: true, uniqueness: true
validates :firstname, :lastname, presence: true
validates :street, :place, presence: true
validates :organization, presence: true
validates :mobile, phony_plausible: true, presence: true, uniqueness: true
validates :date_of_birth, presence: true
validates :sex, inclusion: {in: lambda {|m| m.sex_enum}}
validates :contact_preference, inclusion: {in: lambda {|m| m.contact_preference_enum}}
validates :contact_availability, presence: true, if: lambda {%w(phone mobile).include?(self.contact_preference)}
phony_normalize :phone, default_country_code: 'CH'
phony_normalize :mobile, default_country_code: 'CH'
validate :ensure_seeker_age
validate :unique_email
after_create :send_create_to_jugendinfo
after_update :send_update_to_jugendinfo
after_destroy :send_delete_to_jugendinfo
after_destroy :delete_access_tokens
after_save :adjust_todo
after_create :send_welcome_message
before_save :send_activation_message, if: proc {|s| s.status_changed? && s.active?}
after_save :add_new_note
before_save :update_last_message
before_save :update_messages_count
before_save :generate_agreement_id
DEV = 'https://admin.staging.jugendarbeit.digital/api/ji/jobboard/ping/user'
LIVE = 'https://admin.staging.jugendarbeit.digital/api/ji/jobboard/ping/user'
CURRENT_LINK = Rails.env.production? ? LIVE : DEV
# Adds new note to the database if it's present
#
def add_new_note
return unless new_note.present?
Note.create!(seeker_id: id, broker_id: current_broker_id, message: new_note)
end
# Creates new todos based on todotypes after saving seeker
#
def adjust_todo
logger.info "Removing existing todos"
Todo.where(record_type: :seeker, record_id: id).find_each &:destroy!
logger.info "Creating new todos"
Todotype.seeker.find_each do |todotype|
begin
result = Seeker.find_by(todotype.where + " AND id = #{id}")
logger.info "Result: #{result}"
unless result.nil?
todo = Todo.create(record_id: id, record_type: :seeker, todotype: todotype, seeker_id: id)
logger.info "Todo: #{todo}"
end
rescue
logger.info "Error creating todo: #{$!}"
end
end
end
# Updates last message for seeker
#
def update_last_message
logger.info "App user id is #{self.app_user_id}"
return if self.app_user_id.nil?
message = MessagingHelper.get_last_message(self.app_user_id)
logger.info "Last message is #{message}"
return if message.nil?
self.last_message_date = DateTime.strptime(message['datetime'], '%s').in_time_zone('Warsaw')
logger.info "last_message_date is #{self.last_message_date}"
self.last_message_sent_from_seeker = message['from_ji_user_id'] == self.app_user_id.to_s
logger.info "last_message_sent_from_seeker is #{self.last_message_sent_from_seeker}"
self.last_message_seen = message['seen'] == '1'
logger.info "last_message_seen is #{self.last_message_seen}"
end
# Updates count of the messages
#
def update_messages_count
return if self.app_user_id.nil?
self.messages_count = MessagingHelper.get_messages_count(self.app_user_id)
end
# Check if there is no provider or broker with the same email
#
def unique_email
return if email.blank? || email.nil?
provider = Provider.find_by(email: email)
broker = Broker.find_by(email: email)
if !provider.nil? || !broker.nil?
errors.add(:email, :email_not_unique)
end
end
# validate :ensure_work_category
# validates_acceptance_of :terms, allow_nil: false, on: :create
# after_save :send_agreement_email, if: proc { |s| s.confirmed_at_changed? && s.confirmed_at_was.nil? }
# after_save :send_registration_email, if: proc { |s| s.confirmed_at_changed? && s.confirmed_at_was.nil? }
# Returns the display name
#
# @return [String] the name
#
def name
"#{ firstname } #{ lastname }"
end
# Available options for the sex
#
# @return [Array<String>] the possible seeker sex
#
def sex_enum
%w(male female other)
end
# Available options for the contact preference
#
# @return [Array<String>] the possible seeker contact preferences
#
def contact_preference_enum
%w(email phone mobile postal whatsapp)
end
# @!group Devise
# Check if the user can log in
#
# @return [Boolean] the status
#
def active_for_authentication?
super && active?
end
# Return the I18n message key when authentication fails
#
# @return [Symbol] the i18n key
#
def unauthenticated_message
# confirmed? ? :inactive : :unconfirmed
:inactive
end
# @!endgroup
private
def delete_access_tokens
access_tokens.destroy_all
end
# Validate the job seeker age
#
# @return [Boolean] validation status
#
def ensure_seeker_age
if date_of_birth.present? && !date_of_birth.between?(26.years.ago, 13.years.ago)
errors.add(:date_of_birth, :invalid_seeker_age)
end
end
# Ensure a seeker has at least one work category selected
#
# @return [Boolean] validation status
#
def ensure_work_category
if work_categories.empty?
errors.add(:work_categories, :invalid_work_category)
end
end
# Send the seeker a welcome email
# with the agreement pdf to sign and return.
#
def send_agreement_email
Notifier.send_agreement_for_seeker(self).deliver
end
# Send the broker an email about the new seeker.
#
def send_registration_email
Notifier.new_seeker_signup_for_broker(self).deliver
end
def jugendinfo_data
ApiHelper::seeker_to_json(self)
end
# Make post request to jugendinfo API
#
def send_to_jugendinfo(method)
begin
logger.info "Sending changes to jugendinfo"
logger.info "Sending: #{jugendinfo_data}"
response = RestClient.post CURRENT_LINK, {operation: method, data: jugendinfo_data}, {Authorization: "Bearer ob7jwke6axsaaygrcin54er1n7xoou6e3n1xduwm"}
logger.info "Response from jugendinfo: #{response}"
rescue
logger.info "Failed sending changes to jugendinfo"
nil
end
end
# Make post request to jugendinfo API
#
def send_update_to_jugendinfo
send_to_jugendinfo("UPDATE")
end
# Make post request to jugendinfo API
#
def send_create_to_jugendinfo
send_to_jugendinfo("CREATE")
end
# Make post request to jugendinfo API
#
def send_delete_to_jugendinfo
send_to_jugendinfo("DELETE")
end
# Sends welcome message through chat to new seeker
#
def send_welcome_message
title = 'Willkommen'
host = "#{self.organization.regions.first.subdomain}.smalljobs.ch"
seeker_agreement_link = "#{(Rails.application.routes.url_helpers.root_url(subdomain: self.organization.regions.first.subdomain, host: host, protocol: 'https'))}/broker/seekers/#{self.id}/agreement"
message = Mustache.render(self.organization.welcome_chat_register_msg || '', organization_name: self.organization.name, organization_street: self.organization.street, organization_zip: self.organization.place.zip, organization_place: self.organization.place.name, organization_phone: self.organization.phone, organization_email: self.organization.email, seeker_first_name: self.firstname, seeker_last_name: self.lastname, broker_first_name: self.organization.brokers.first.firstname, broker_last_name: self.organization.brokers.first.lastname, seeker_link_to_agreement: "<a file type='application/pdf' title='Elterneinverstรคndnis herunterladen' href='#{seeker_agreement_link}'>#{seeker_agreement_link}</a>", link_to_jobboard_list: (Rails.application.routes.url_helpers.root_url(subdomain: self.organization.regions.first.subdomain, host: host)))
logger.info "Welcome message: #{message}"
MessagingHelper::send_message(title, message, self.app_user_id, self.organization.email)
end
# Sends activation message through chat after seeker is activated
#
def send_activation_message
title = 'Willkommen'
host = "#{self.organization.regions.first.subdomain}.smalljobs.ch"
seeker_agreement_link = "#{(Rails.application.routes.url_helpers.root_url(subdomain: self.organization.regions.first.subdomain, host: host, protocol: 'https'))}/broker/seekers/#{self.id}/agreement"
message = Mustache.render(self.organization.activation_msg || '', organization_name: self.organization.name, organization_street: self.organization.street, organization_zip: self.organization.place.zip, organization_place: self.organization.place.name, organization_phone: self.organization.phone, organization_email: self.organization.email, seeker_first_name: self.firstname, seeker_last_name: self.lastname, broker_first_name: self.organization.brokers.first.firstname, broker_last_name: self.organization.brokers.first.lastname, seeker_link_to_agreement: "<a file type='application/pdf' title='Elterneinverstรคndnis herunterladen' href='#{seeker_agreement_link}'>#{seeker_agreement_link}</a>", link_to_jobboard_list: (Rails.application.routes.url_helpers.root_url(subdomain: self.organization.regions.first.subdomain, host: host)))
MessagingHelper::send_message(title, message, self.app_user_id, self.organization.email)
end
public
def stat_name
if completed?
return "finished"
end
return "active"
end
def generate_agreement_id
self.agreement_id = SecureRandom.uuid if self.agreement_id.nil?
end
end
return to production version
class Seeker < ActiveRecord::Base
require 'rest-client'
devise :database_authenticatable, :registerable, authentication_keys: [:login]
# include ConfirmToggle
enum status: {inactive: 1, active: 2, completed: 3}
has_and_belongs_to_many :work_categories
has_and_belongs_to_many :certificates
has_many :allocations, dependent: :destroy
has_many :assignments, dependent: :destroy
has_many :access_tokens, as: :userable, dependent: :destroy
# has_many :notes
has_many :jobs, through: :allocations
has_many :todos, dependent: :destroy
belongs_to :place, inverse_of: :seekers
belongs_to :organization
attr_accessor :new_note
attr_accessor :current_broker_id
validates :login, presence: true, uniqueness: true
validates :firstname, :lastname, presence: true
validates :street, :place, presence: true
validates :organization, presence: true
validates :mobile, phony_plausible: true, presence: true, uniqueness: true
validates :date_of_birth, presence: true
validates :sex, inclusion: {in: lambda {|m| m.sex_enum}}
validates :contact_preference, inclusion: {in: lambda {|m| m.contact_preference_enum}}
validates :contact_availability, presence: true, if: lambda {%w(phone mobile).include?(self.contact_preference)}
phony_normalize :phone, default_country_code: 'CH'
phony_normalize :mobile, default_country_code: 'CH'
validate :ensure_seeker_age
validate :unique_email
after_save :send_to_jugendinfo
## New option
## after_create :send_create_to_jugendinfo
## after_update :send_update_to_jugendinfo
## after_destroy :send_delete_to_jugendinfo
## after_destroy :delete_access_tokens
after_save :adjust_todo
after_create :send_welcome_message
before_save :send_activation_message, if: proc {|s| s.status_changed? && s.active?}
after_save :add_new_note
before_save :update_last_message
before_save :update_messages_count
before_save :generate_agreement_id
DEV = 'https://admin.staging.jugendarbeit.digital/api/ji/jobboard/ping/user'
LIVE = 'https://admin.staging.jugendarbeit.digital/api/ji/jobboard/ping/user'
CURRENT_LINK = Rails.env.production? ? LIVE : DEV
# Adds new note to the database if it's present
#
def add_new_note
return unless new_note.present?
Note.create!(seeker_id: id, broker_id: current_broker_id, message: new_note)
end
# Creates new todos based on todotypes after saving seeker
#
def adjust_todo
logger.info "Removing existing todos"
Todo.where(record_type: :seeker, record_id: id).find_each &:destroy!
logger.info "Creating new todos"
Todotype.seeker.find_each do |todotype|
begin
result = Seeker.find_by(todotype.where + " AND id = #{id}")
logger.info "Result: #{result}"
unless result.nil?
todo = Todo.create(record_id: id, record_type: :seeker, todotype: todotype, seeker_id: id)
logger.info "Todo: #{todo}"
end
rescue
logger.info "Error creating todo: #{$!}"
end
end
end
# Updates last message for seeker
#
def update_last_message
logger.info "App user id is #{self.app_user_id}"
return if self.app_user_id.nil?
message = MessagingHelper.get_last_message(self.app_user_id)
logger.info "Last message is #{message}"
return if message.nil?
self.last_message_date = DateTime.strptime(message['datetime'], '%s').in_time_zone('Warsaw')
logger.info "last_message_date is #{self.last_message_date}"
self.last_message_sent_from_seeker = message['from_ji_user_id'] == self.app_user_id.to_s
logger.info "last_message_sent_from_seeker is #{self.last_message_sent_from_seeker}"
self.last_message_seen = message['seen'] == '1'
logger.info "last_message_seen is #{self.last_message_seen}"
end
# Updates count of the messages
#
def update_messages_count
return if self.app_user_id.nil?
self.messages_count = MessagingHelper.get_messages_count(self.app_user_id)
end
# Check if there is no provider or broker with the same email
#
def unique_email
return if email.blank? || email.nil?
provider = Provider.find_by(email: email)
broker = Broker.find_by(email: email)
if !provider.nil? || !broker.nil?
errors.add(:email, :email_not_unique)
end
end
# validate :ensure_work_category
# validates_acceptance_of :terms, allow_nil: false, on: :create
# after_save :send_agreement_email, if: proc { |s| s.confirmed_at_changed? && s.confirmed_at_was.nil? }
# after_save :send_registration_email, if: proc { |s| s.confirmed_at_changed? && s.confirmed_at_was.nil? }
# Returns the display name
#
# @return [String] the name
#
def name
"#{ firstname } #{ lastname }"
end
# Available options for the sex
#
# @return [Array<String>] the possible seeker sex
#
def sex_enum
%w(male female other)
end
# Available options for the contact preference
#
# @return [Array<String>] the possible seeker contact preferences
#
def contact_preference_enum
%w(email phone mobile postal whatsapp)
end
# @!group Devise
# Check if the user can log in
#
# @return [Boolean] the status
#
def active_for_authentication?
super && active?
end
# Return the I18n message key when authentication fails
#
# @return [Symbol] the i18n key
#
def unauthenticated_message
# confirmed? ? :inactive : :unconfirmed
:inactive
end
# @!endgroup
private
def delete_access_tokens
access_tokens.destroy_all
end
# Validate the job seeker age
#
# @return [Boolean] validation status
#
def ensure_seeker_age
if date_of_birth.present? && !date_of_birth.between?(26.years.ago, 13.years.ago)
errors.add(:date_of_birth, :invalid_seeker_age)
end
end
# Ensure a seeker has at least one work category selected
#
# @return [Boolean] validation status
#
def ensure_work_category
if work_categories.empty?
errors.add(:work_categories, :invalid_work_category)
end
end
# Send the seeker a welcome email
# with the agreement pdf to sign and return.
#
def send_agreement_email
Notifier.send_agreement_for_seeker(self).deliver
end
# Send the broker an email about the new seeker.
#
def send_registration_email
Notifier.new_seeker_signup_for_broker(self).deliver
end
def jugendinfo_data
ApiHelper::seeker_to_json(self)
end
# Make post request to jugendinfo API
#
def send_to_jugendinfo(method)
begin
logger.info "Sending changes to jugendinfo"
# logger.info "Sending: #{jugendinfo_data}"
logger.info "Sending: #{{token: '1bN1SO2W1Ilz4xL2ld364qVibI0PsfEYcKZRH', id: app_user_id, smalljobs_user_id: id, firstname: firstname, lastname: lastname, mobile: mobile, address: street, zip: place.zip, birthdate: date_of_birth.strftime('%Y-%m-%d'), city: place.name, smalljobs_status: Seeker.statuses[status], smalljobs_parental_consent: parental, smalljobs_first_visit: discussion, smalljobs_organization_id: organization.id}}"
# response = RestClient.post CURRENT_LINK, {operation: method, data: jugendinfo_data}, {Authorization: "Bearer ob7jwke6axsaaygrcin54er1n7xoou6e3n1xduwm"}
response = RestClient.post current_link, {token: '1bN1SO2W1Ilz4xL2ld364qVibI0PsfEYcKZRH', id: app_user_id, smalljobs_user_id: id, firstname: firstname, lastname: lastname, mobile: mobile, address: street, zip: place.zip, birthdate: date_of_birth.strftime('%Y-%m-%d'), city: place.name, smalljobs_status: Seeker.statuses[status], smalljobs_parental_consent: parental, smalljobs_first_visit: discussion, smalljobs_organization_id: organization.id}
logger.info "Response from jugendinfo: #{response}"
rescue
logger.info "Failed sending changes to jugendinfo"
nil
end
end
# Make post request to jugendinfo API
#
def send_update_to_jugendinfo
send_to_jugendinfo("UPDATE")
end
# Make post request to jugendinfo API
#
def send_create_to_jugendinfo
send_to_jugendinfo("CREATE")
end
# Make post request to jugendinfo API
#
def send_delete_to_jugendinfo
send_to_jugendinfo("DELETE")
end
# Sends welcome message through chat to new seeker
#
def send_welcome_message
title = 'Willkommen'
host = "#{self.organization.regions.first.subdomain}.smalljobs.ch"
seeker_agreement_link = "#{(Rails.application.routes.url_helpers.root_url(subdomain: self.organization.regions.first.subdomain, host: host, protocol: 'https'))}/broker/seekers/#{self.id}/agreement"
message = Mustache.render(self.organization.welcome_chat_register_msg || '', organization_name: self.organization.name, organization_street: self.organization.street, organization_zip: self.organization.place.zip, organization_place: self.organization.place.name, organization_phone: self.organization.phone, organization_email: self.organization.email, seeker_first_name: self.firstname, seeker_last_name: self.lastname, broker_first_name: self.organization.brokers.first.firstname, broker_last_name: self.organization.brokers.first.lastname, seeker_link_to_agreement: "<a file type='application/pdf' title='Elterneinverstรคndnis herunterladen' href='#{seeker_agreement_link}'>#{seeker_agreement_link}</a>", link_to_jobboard_list: (Rails.application.routes.url_helpers.root_url(subdomain: self.organization.regions.first.subdomain, host: host)))
logger.info "Welcome message: #{message}"
MessagingHelper::send_message(title, message, self.app_user_id, self.organization.email)
end
# Sends activation message through chat after seeker is activated
#
def send_activation_message
title = 'Willkommen'
host = "#{self.organization.regions.first.subdomain}.smalljobs.ch"
seeker_agreement_link = "#{(Rails.application.routes.url_helpers.root_url(subdomain: self.organization.regions.first.subdomain, host: host, protocol: 'https'))}/broker/seekers/#{self.id}/agreement"
message = Mustache.render(self.organization.activation_msg || '', organization_name: self.organization.name, organization_street: self.organization.street, organization_zip: self.organization.place.zip, organization_place: self.organization.place.name, organization_phone: self.organization.phone, organization_email: self.organization.email, seeker_first_name: self.firstname, seeker_last_name: self.lastname, broker_first_name: self.organization.brokers.first.firstname, broker_last_name: self.organization.brokers.first.lastname, seeker_link_to_agreement: "<a file type='application/pdf' title='Elterneinverstรคndnis herunterladen' href='#{seeker_agreement_link}'>#{seeker_agreement_link}</a>", link_to_jobboard_list: (Rails.application.routes.url_helpers.root_url(subdomain: self.organization.regions.first.subdomain, host: host)))
MessagingHelper::send_message(title, message, self.app_user_id, self.organization.email)
end
public
def stat_name
if completed?
return "finished"
end
return "active"
end
def generate_agreement_id
self.agreement_id = SecureRandom.uuid if self.agreement_id.nil?
end
end
|
class Seller < ActiveRecord::Base
TOTAL_LIMIT = 100
MODEL_LIMITS = {'A' => [nil, 20],
'C' => [nil, nil],
'D' => [30, 30],
'E' => [10, nil]}
MODEL_WEIGHTS = {'A' => 0,
'C' => 0,
'D' => 2,
'E' => 2}
attr_accessor :warnings, :current_user
enum model: [:A, :C, :D, :E]
def self.models_by_id
@models_by_id ||= models.invert
end
has_many :items, dependent: :restrict_with_exception
has_many :activities, dependent: :destroy, inverse_of: :seller
has_many :tasks, through: :activities, inverse_of: :sellers
accepts_nested_attributes_for :activities
belongs_to :user, touch: true
validates :initials, presence: true, length: { in: 2..3 }, format: { with: /\A[[:alpha:]]*\z/, message: "erlaubt nur Buchstaben" }
validates :number, presence: true, uniqueness: true, numericality: { only_integer: true, greater_than_or_equal_to: 1 }
validates :model, presence: true
validates :user, presence: true, uniqueness: true
validate :only_one_delivery
validate :check_only_d_helps
validate :check_activity_limits
validate :check_mandatory_activities
validate :enough_help_planned
validate :check_model
after_initialize :fill_activities
before_save :delete_null_activities
after_create :send_welcome_mail
def write_attribute *args
@activity_counts = nil
@activities_counts = nil
@total_revenue = nil
super *args
end
def fill_activities
each_task_with_activity do |task, activity|
unless activity
activity = activities.build(task: task)
if mandatory? task
activity.planned_count = 1
end
end
end
end
def delete_null_activities
activities.each do |activity|
if activity.planned_count == 0 && activity.actual_count == 0
activities.delete activity
end
end
end
def each_task_with_activity
return enum_for(__method__) unless block_given?
tasks = RequestStore.fetch(:tasks) { Task.list }
tasks.each do |task|
activity = activities.find {|act| act.task_id == task.id}
yield task, activity
end
end
def sorted_activities
each_task_with_activity.map {|task, activity| activity}.reject(&:nil?)
end
def mandatory? task
(task.must_d && model == 'D') || (task.must_e && model == 'E')
end
def only_one_delivery
if activities.find_all {|act| act.me && act.task.deliver?}.size > 1
errors[:base] << "Es darf nur ein Abgabetermin ausgewรคhlt werden"
end
end
def name
user.name
end
def check_only_d_helps # actually it is: no A deliver
if model == 'A' && activities.any? {|act| act.me && act.task.deliver? && act.task.only_d}
errors[:base] << "Dieser Abgabetermin ist fรผr Verkรคufer mit Modell A nicht erlaubt"
end
end
def check_activity_limits
activities.each do |act|
planned_other = Activity.where.not(seller: self).where(task: act.task).sum(:planned_count)
if planned_other + act.planned_count > act.task.limit
kind_text = case act.task.kind
when "help"
"Hilfstermin: "
when "deliver"
"Abgabetermin: "
else
""
end
errors[:base] << kind_text + act.task.description + " nicht mehr verfรผgbar"
end
end
end
def warnings
@warnings ||= []
end
def check_mandatory_activities
each_task_with_activity do |task, activity|
if mandatory?(task) && activity.planned_count < 0.99
errors[:base] << "#{task} ist verpflichtend bei Modell #{model}"
end
end
end
def enough_help_planned
if model
planned_help = activities.find_all {|act| act.task.bring? || act.task.help?}.map {|act| act.planned_count}.inject(0, :+)
needed_help =
case model
when "A"
0
when "C"
2
when "D"
0
when "E"
0
end
if planned_help < needed_help
warnings << "Achtung: Noch nicht genug Hilfstermine ausgewรคhlt"
end
end
end
def color
if number
case number % 4
when 0
:blue
when 1
:red
when 2
:green
when 3
:black
end
end
end
def rate
case model
when 'A'
0.2
when 'C'
0.1
when 'D'
0.1
when 'E'
0.0
end
end
def rate_in_percent
rate && (rate * 100).round
end
def initials= string
super( string && string.strip.upcase )
end
def code
Seller.seller_code self.initials, self.number
end
def self.seller_code initials, number
"#{initials}#{number && ("%02d" % number)}"
end
def self.find_by_code(code)
sellers = RequestStore.fetch(:sellers) { Seller.list false }
if pair = split_code(code)
initials, number = pair
sellers.find {|seller| seller.number == number.to_i && (initials.blank? || seller.initials == initials)}
end
end
def self.find_all_by_similar_code(code)
result = []
if pair = split_code(code)
initials, number = pair
result += self.where(initials: initials).to_a unless initials.blank?
result += self.where(number: number).to_a unless number.blank?
end
result.uniq!
result
end
def number_of_items
items.size
end
def total_revenue
@total_revenue ||= items.map {|item| item.price || 0}.inject(0, :+)
end
def total_commission(rate = self.final_rate)
total_revenue * rate
end
def total_payout(rate = self.final_rate)
total_revenue - total_commission(rate)
end
def to_s
"<Seller: #{code} #{name}, #{rate_in_percent}%>"
end
def activity_summary(task)
counts = activity_counts(task)
"#{counts[0]} / #{counts[1]}"
end
def activities_summary
counts = activities_counts
"#{counts[0]} / #{counts[1]}"
end
def computed_rate
actual_work = activities_counts[0]
if actual_work > 3.99
0.0
elsif actual_work > 1.99
0.1
elsif activities.any? {|act| act.task.must_d && act.actual_count > 0.99}
0.1
else
0.2
end
end
def final_rate
if each_task_with_activity.any? do |task, activity|
mandatory?(task) && activity.actual_count < 0.99
end ||
computed_rate > rate
0.4
else
computed_rate
end
end
def final_rate_in_percent
self.final_rate && (self.final_rate * 100).round
end
def self.list(includes = true, with_items = false)
if includes
if with_items
Seller.includes(:user, :items, activities: :task).order("number").to_a
else
Seller.includes(:user, activities: :task).order("number").to_a
end
else
Seller.order("number").to_a
end
end
def computed_rate_in_percent
self.computed_rate && (self.computed_rate * 100).round
end
def self.activities_summary
counts = [0, 0]
sellers = RequestStore.fetch(:sellers) { Seller.list }
sellers.each do |seller|
seller_counts = seller.activities_counts
counts[0] += seller_counts[0]
counts[1] += seller_counts[1]
end
"#{counts[0]} / #{counts[1]}"
end
def self.activity_summary task
counts = [0, 0]
sellers = RequestStore.fetch(:sellers) { Seller.list }
sellers.each do |seller|
seller_counts = seller.activity_counts(task)
counts[0] += seller_counts[0]
counts[1] += seller_counts[1]
end
"#{counts[0]} / #{counts[1]}"
end
def activity_counts(task)
@activity_counts ||= {}
unless @activity_counts[task]
activity = activities.select {|act| act.task_id == task.id}.first
@activity_counts[task] =
if activity
[activity.actual_count, activity.planned_count].map do |float|
int = float.round
int == float ? int : float.round(2)
end
else
[0, 0]
end
end
@activity_counts[task]
end
def activities_counts
unless @activities_counts
@activities_counts = [0, 0]
tasks = RequestStore.fetch(:tasks) { Task.list }
tasks.each do |task|
task_counts = activity_counts(task)
@activities_counts[0] += task_counts[0]
@activities_counts[1] += task_counts[1]
end
end
@activities_counts
end
def self.split_code(code)
if code.strip.upcase =~ /^(?<initials>[[:alpha:]]*)\s*(?<number>\d*)$/
[ $~[:initials], $~[:number] ]
end
end
def self.generate_number
[ User.all.order('old_number desc').limit(1).first.try(:old_number) || 0,
Seller.all.order('number desc').limit(1).first.try(:number) || 0 ].
max + 1
end
def self.available? model = nil
if model
counts = Seller.group(:model).count.map {|model_id, count| [models_by_id[model_id], count]}.to_h
counts.default = 0
total_count = counts.values.inject(0, :+)
if total_count < TOTAL_LIMIT
model_max = MODEL_LIMITS[model][1]
model_count = counts[model]
if !model_max || model_count < model_max
MODEL_LIMITS.reject {|other, limits| other == model}.none? do |reserved_model, reserved_limits|
reserved_min = reserved_limits[0]
if reserved_min
total_available = TOTAL_LIMIT - total_count
still_reserved = reserved_min - counts[reserved_model]
total_available <= still_reserved
end
end
end
end
else
Seller.all.size < TOTAL_LIMIT
end
end
# @@@ not correct if changing models: total_count should be ignored, reserved_min has to respect old model
def check_model
if new_record? || model_changed?
unless Seller.available? model
if current_user && current_user.admin?
warnings << "Modell #{model} bereits รผber dem Limit"
else
errors.add :model, "#{model} nicht mehr verfรผgbar"
end
end
end
end
def self.available_models
models.keys.find_all {|model| available? model}
end
def self.model_descriptions
unless @model_descriptions
@model_descriptions = {}
descriptions =
{ 'A' => [20, "keine Mithilfe"],
'C' => [10, "4 Std. Aufbauhilfe ODER 2 Std. Aufbauhilfe und ein Kuchen"],
'D' => [10, "ca. 3 Std. Abbauhilfe"],
'E' => [0, "Superhelfer am letzten Aufbautag"] }
models.map do |model, model_id|
description = descriptions[model]
raise unless description
rate_percentage, help_text = description
text = "#{model}, #{rate_percentage}% Kommission, #{help_text}"
@model_descriptions[model] = text
end
end
@model_descriptions
end
def model_description
Seller.model_descriptions[model]
end
def send_welcome_mail
SellerMailer.welcome(self).deliver_later
end
end
correct again
class Seller < ActiveRecord::Base
TOTAL_LIMIT = 100
MODEL_LIMITS = {'A' => [nil, 20],
'C' => [nil, nil],
'D' => [30, 30],
'E' => [10, nil]}
MODEL_WEIGHTS = {'A' => 0,
'C' => 0,
'D' => 2,
'E' => 2}
attr_accessor :warnings, :current_user
enum model: [:A, :C, :D, :E]
def self.models_by_id
@models_by_id ||= models.invert
end
has_many :items, dependent: :restrict_with_exception
has_many :activities, dependent: :destroy, inverse_of: :seller
has_many :tasks, through: :activities, inverse_of: :sellers
accepts_nested_attributes_for :activities
belongs_to :user, touch: true
validates :initials, presence: true, length: { in: 2..3 }, format: { with: /\A[[:alpha:]]*\z/, message: "erlaubt nur Buchstaben" }
validates :number, presence: true, uniqueness: true, numericality: { only_integer: true, greater_than_or_equal_to: 1 }
validates :model, presence: true
validates :user, presence: true, uniqueness: true
validate :only_one_delivery
validate :check_only_d_helps
validate :check_activity_limits
validate :check_mandatory_activities
validate :enough_help_planned
validate :check_model
after_initialize :fill_activities
before_save :delete_null_activities
after_create :send_welcome_mail
def write_attribute *args
@activity_counts = nil
@activities_counts = nil
@total_revenue = nil
super *args
end
def fill_activities
each_task_with_activity do |task, activity|
unless activity
activity = activities.build(task: task)
if mandatory? task
activity.planned_count = 1
end
end
end
end
def delete_null_activities
activities.each do |activity|
if activity.planned_count == 0 && activity.actual_count == 0
activities.delete activity
end
end
end
def each_task_with_activity
return enum_for(__method__) unless block_given?
tasks = RequestStore.fetch(:tasks) { Task.list }
tasks.each do |task|
activity = activities.find {|act| act.task_id == task.id}
yield task, activity
end
end
def sorted_activities
each_task_with_activity.map {|task, activity| activity}.reject(&:nil?)
end
def mandatory? task
(task.must_d && model == 'D') || (task.must_e && model == 'E')
end
def only_one_delivery
if activities.find_all {|act| act.me && act.task.deliver?}.size > 1
errors[:base] << "Es darf nur ein Abgabetermin ausgewรคhlt werden"
end
end
def name
user.name
end
def check_only_d_helps # actually it is: no A deliver
if model == 'A' && activities.any? {|act| act.me && act.task.deliver? && act.task.only_d}
errors[:base] << "Dieser Abgabetermin ist fรผr Verkรคufer mit Modell A nicht erlaubt"
end
end
def check_activity_limits
activities.each do |act|
planned_other = Activity.where.not(seller: self).where(task: act.task).sum(:planned_count)
if planned_other + act.planned_count > act.task.limit
kind_text = case act.task.kind
when "help"
"Hilfstermin: "
when "deliver"
"Abgabetermin: "
else
""
end
errors[:base] << kind_text + act.task.description + " nicht mehr verfรผgbar"
end
end
end
def warnings
@warnings ||= []
end
def check_mandatory_activities
each_task_with_activity do |task, activity|
if mandatory?(task) && activity.planned_count < 0.99
errors[:base] << "#{task} ist verpflichtend bei Modell #{model}"
end
end
end
def enough_help_planned
if model
planned_help = activities.find_all {|act| act.task.bring? || act.task.help?}.map {|act| act.planned_count}.inject(0, :+)
needed_help =
case model
when "A"
0
when "C"
2
when "D"
0
when "E"
0
end
if planned_help < needed_help
warnings << "Achtung: Noch nicht genug Hilfstermine ausgewรคhlt"
end
end
end
def color
if number
case number % 4
when 0
:blue
when 1
:red
when 2
:green
when 3
:black
end
end
end
def rate
case model
when 'A'
0.2
when 'C'
0.1
when 'D'
0.1
when 'E'
0.0
end
end
def rate_in_percent
rate && (rate * 100).round
end
def initials= string
super( string && string.strip.upcase )
end
def code
Seller.seller_code self.initials, self.number
end
def self.seller_code initials, number
"#{initials}#{number && ("%02d" % number)}"
end
def self.find_by_code(code)
sellers = RequestStore.fetch(:sellers) { Seller.list false }
if pair = split_code(code)
initials, number = pair
sellers.find {|seller| seller.number == number.to_i && (initials.blank? || seller.initials == initials)}
end
end
def self.find_all_by_similar_code(code)
result = []
if pair = split_code(code)
initials, number = pair
result += self.where(initials: initials).to_a unless initials.blank?
result += self.where(number: number).to_a unless number.blank?
end
result.uniq!
result
end
def number_of_items
items.size
end
def total_revenue
@total_revenue ||= items.map {|item| item.price || 0}.inject(0, :+)
end
def total_commission(rate = self.final_rate)
total_revenue * rate
end
def total_payout(rate = self.final_rate)
total_revenue - total_commission(rate)
end
def to_s
"<Seller: #{code} #{name}, #{rate_in_percent}%>"
end
def activity_summary(task)
counts = activity_counts(task)
"#{counts[0]} / #{counts[1]}"
end
def activities_summary
counts = activities_counts
"#{counts[0]} / #{counts[1]}"
end
def computed_rate
actual_work = activities_counts[0]
if actual_work > 3.99
0.0
elsif actual_work > 1.99
0.1
elsif activities.any? {|act| act.task.must_d && act.actual_count > 0.99}
0.1
else
0.2
end
end
def final_rate
if each_task_with_activity.any? do |task, activity|
task.must_d && mandatory?(task) && activity.actual_count < 0.99
end ||
computed_rate > rate
0.4
else
computed_rate
end
end
def final_rate_in_percent
self.final_rate && (self.final_rate * 100).round
end
def self.list(includes = true, with_items = false)
if includes
if with_items
Seller.includes(:user, :items, activities: :task).order("number").to_a
else
Seller.includes(:user, activities: :task).order("number").to_a
end
else
Seller.order("number").to_a
end
end
def computed_rate_in_percent
self.computed_rate && (self.computed_rate * 100).round
end
def self.activities_summary
counts = [0, 0]
sellers = RequestStore.fetch(:sellers) { Seller.list }
sellers.each do |seller|
seller_counts = seller.activities_counts
counts[0] += seller_counts[0]
counts[1] += seller_counts[1]
end
"#{counts[0]} / #{counts[1]}"
end
def self.activity_summary task
counts = [0, 0]
sellers = RequestStore.fetch(:sellers) { Seller.list }
sellers.each do |seller|
seller_counts = seller.activity_counts(task)
counts[0] += seller_counts[0]
counts[1] += seller_counts[1]
end
"#{counts[0]} / #{counts[1]}"
end
def activity_counts(task)
@activity_counts ||= {}
unless @activity_counts[task]
activity = activities.select {|act| act.task_id == task.id}.first
@activity_counts[task] =
if activity
[activity.actual_count, activity.planned_count].map do |float|
int = float.round
int == float ? int : float.round(2)
end
else
[0, 0]
end
end
@activity_counts[task]
end
def activities_counts
unless @activities_counts
@activities_counts = [0, 0]
tasks = RequestStore.fetch(:tasks) { Task.list }
tasks.each do |task|
task_counts = activity_counts(task)
@activities_counts[0] += task_counts[0]
@activities_counts[1] += task_counts[1]
end
end
@activities_counts
end
def self.split_code(code)
if code.strip.upcase =~ /^(?<initials>[[:alpha:]]*)\s*(?<number>\d*)$/
[ $~[:initials], $~[:number] ]
end
end
def self.generate_number
[ User.all.order('old_number desc').limit(1).first.try(:old_number) || 0,
Seller.all.order('number desc').limit(1).first.try(:number) || 0 ].
max + 1
end
def self.available? model = nil
if model
counts = Seller.group(:model).count.map {|model_id, count| [models_by_id[model_id], count]}.to_h
counts.default = 0
total_count = counts.values.inject(0, :+)
if total_count < TOTAL_LIMIT
model_max = MODEL_LIMITS[model][1]
model_count = counts[model]
if !model_max || model_count < model_max
MODEL_LIMITS.reject {|other, limits| other == model}.none? do |reserved_model, reserved_limits|
reserved_min = reserved_limits[0]
if reserved_min
total_available = TOTAL_LIMIT - total_count
still_reserved = reserved_min - counts[reserved_model]
total_available <= still_reserved
end
end
end
end
else
Seller.all.size < TOTAL_LIMIT
end
end
# @@@ not correct if changing models: total_count should be ignored, reserved_min has to respect old model
def check_model
if new_record? || model_changed?
unless Seller.available? model
if current_user && current_user.admin?
warnings << "Modell #{model} bereits รผber dem Limit"
else
errors.add :model, "#{model} nicht mehr verfรผgbar"
end
end
end
end
def self.available_models
models.keys.find_all {|model| available? model}
end
def self.model_descriptions
unless @model_descriptions
@model_descriptions = {}
descriptions =
{ 'A' => [20, "keine Mithilfe"],
'C' => [10, "4 Std. Aufbauhilfe ODER 2 Std. Aufbauhilfe und ein Kuchen"],
'D' => [10, "ca. 3 Std. Abbauhilfe"],
'E' => [0, "Superhelfer am letzten Aufbautag"] }
models.map do |model, model_id|
description = descriptions[model]
raise unless description
rate_percentage, help_text = description
text = "#{model}, #{rate_percentage}% Kommission, #{help_text}"
@model_descriptions[model] = text
end
end
@model_descriptions
end
def model_description
Seller.model_descriptions[model]
end
def send_welcome_mail
SellerMailer.welcome(self).deliver_later
end
end
|
class Tenant < ActiveRecord::Base
# Admin Tenant
# ============
belongs_to :admin_tenant, :class_name => 'Admin::Tenant'
attr_accessible :admin_tenant
# Settings
# ========
has_settings do |s|
s.key :payment, :defaults => { :period => 30.days }
s.key :modules, :defaults => { :active => [] }
end
# User
# ====
has_many :users
attr_accessible :user_ids
# Company
# =======
attr_accessible :company, :company_attributes
belongs_to :company, :foreign_key => :person_id, :autosave => true
validates_presence_of :company
accepts_nested_attributes_for :company
def company
super || build_company
end
def to_s
company.to_s
end
# Bookyt
# ======
# Fiscal Years
attr_accessible :fiscal_year_ends_on
attr_accessible :incorporated_on
def fiscal_period(year)
final_day_of_fiscal_year = Date.new(year, fiscal_year_ends_on.month, fiscal_year_ends_on.day)
first_day_of_fiscal_year = final_day_of_fiscal_year.ago(1.year).in(1.day)
return :from => first_day_of_fiscal_year.to_date, :to => final_day_of_fiscal_year.to_date
end
# Describe passed fiscal years
#
# Returns empty array if fiscal_year_ends_on is not set.
def fiscal_years
# Guard
return [] unless fiscal_year_ends_on
first_year = fiscal_year_ends_on.year
final_year = Date.today.year + 1
(first_year..final_year).map{|year|
fiscal_period(year)
}
end
# Describe passed calendar years
#
# Returns empty array if incorporated_on is not set.
def calendar_years
# Guard
return [] unless incorporated_on
first_year = incorporated_on.year
final_year = Date.today.year
(first_year..final_year).map{ |year|
{
:from => Date.new(year, 1, 1),
:to => Date.new(year, 12, 31)
}
}
end
# Vat
attr_accessible :vat_number, :uid_number, :ahv_number
def vat_obligation?
vat_number.present?
end
# Invoicing
attr_accessible :print_payment_for, :use_vesr
def payment_period
settings(:payment).period / 24 / 3600
end
def payment_period=(value)
settings(:payment).period = value.to_i.days
end
# Attachments
# ===========
has_many :attachments, :as => :reference
accepts_nested_attributes_for :attachments, :reject_if => proc { |attributes| attributes['file'].blank? }
# Import/Export
# =============
has_many :backups, :as => :reference
end
Fix tenant update by making payment_period accessible.
class Tenant < ActiveRecord::Base
# Admin Tenant
# ============
belongs_to :admin_tenant, :class_name => 'Admin::Tenant'
attr_accessible :admin_tenant
# Settings
# ========
has_settings do |s|
s.key :payment, :defaults => { :period => 30.days }
s.key :modules, :defaults => { :active => [] }
end
# User
# ====
has_many :users
attr_accessible :user_ids
# Company
# =======
attr_accessible :company, :company_attributes
belongs_to :company, :foreign_key => :person_id, :autosave => true
validates_presence_of :company
accepts_nested_attributes_for :company
def company
super || build_company
end
def to_s
company.to_s
end
# Bookyt
# ======
# Fiscal Years
attr_accessible :fiscal_year_ends_on
attr_accessible :incorporated_on
def fiscal_period(year)
final_day_of_fiscal_year = Date.new(year, fiscal_year_ends_on.month, fiscal_year_ends_on.day)
first_day_of_fiscal_year = final_day_of_fiscal_year.ago(1.year).in(1.day)
return :from => first_day_of_fiscal_year.to_date, :to => final_day_of_fiscal_year.to_date
end
# Describe passed fiscal years
#
# Returns empty array if fiscal_year_ends_on is not set.
def fiscal_years
# Guard
return [] unless fiscal_year_ends_on
first_year = fiscal_year_ends_on.year
final_year = Date.today.year + 1
(first_year..final_year).map{|year|
fiscal_period(year)
}
end
# Describe passed calendar years
#
# Returns empty array if incorporated_on is not set.
def calendar_years
# Guard
return [] unless incorporated_on
first_year = incorporated_on.year
final_year = Date.today.year
(first_year..final_year).map{ |year|
{
:from => Date.new(year, 1, 1),
:to => Date.new(year, 12, 31)
}
}
end
# Vat
attr_accessible :vat_number, :uid_number, :ahv_number
def vat_obligation?
vat_number.present?
end
# Invoicing
attr_accessible :print_payment_for, :use_vesr
attr_accessible :payment_period
def payment_period
settings(:payment).period / 24 / 3600
end
def payment_period=(value)
settings(:payment).period = value.to_i.days
end
# Attachments
# ===========
has_many :attachments, :as => :reference
accepts_nested_attributes_for :attachments, :reject_if => proc { |attributes| attributes['file'].blank? }
# Import/Export
# =============
has_many :backups, :as => :reference
end
|
class Tenant < ActiveRecord::Base
# Person
# ======
belongs_to :person
accepts_nested_attributes_for :person
attr_accessible :person_attributes
def person_with_autobuild
person_without_autobuild || build_person
end
alias_method_chain :person, :autobuild
# Settings
# ========
has_settings
attr_accessible :settings
def to_s
person.to_s
end
# User
# ====
has_many :users
attr_accessible :user_ids
# Company
attr_accessible :company, :company_attributes
belongs_to :company, :foreign_key => :person_id
# validates_presence_of :company
accepts_nested_attributes_for :company
# Bookyt
# ======
validates :code, :uniqueness => true
attr_accessible :code
# Fiscal Years
attr_accessible :fiscal_year_ends_on
attr_accessible :incorporated_on
# validates_date :fiscal_year_ends_on
# validates_date :incorporated_on
def fiscal_period(year)
final_day_of_fiscal_year = Date.new(year, fiscal_year_ends_on.month, fiscal_year_ends_on.day)
first_day_of_fiscal_year = final_day_of_fiscal_year.ago(1.year).in(1.day)
return :from => first_day_of_fiscal_year.to_date, :to => final_day_of_fiscal_year.to_date
end
def fiscal_years
first_year = fiscal_year_ends_on.year
final_year = Date.today.year + 1
(first_year..final_year).map{|year|
fiscal_period(year)
}
end
# Vat
attr_accessible :vat_number, :uid_number, :ahv_number
def vat_obligation?
vat_number.present?
end
# Invoicing
attr_accessible :payment_period, :print_payment_for, :use_vesr
def payment_period
settings.payment_period / 24 / 3600
end
def payment_period=(value)
settings.payment_period = value.to_i.days
end
# Attachments
# ===========
has_many :attachments, :as => :object
accepts_nested_attributes_for :attachments, :reject_if => proc { |attributes| attributes['file'].blank? }
end
Re-enable some validations on Tenant.
class Tenant < ActiveRecord::Base
# Person
# ======
belongs_to :person
accepts_nested_attributes_for :person
attr_accessible :person_attributes
def person_with_autobuild
person_without_autobuild || build_person
end
alias_method_chain :person, :autobuild
# Settings
# ========
has_settings
attr_accessible :settings
def to_s
person.to_s
end
# User
# ====
has_many :users
attr_accessible :user_ids
# Company
attr_accessible :company, :company_attributes
belongs_to :company, :foreign_key => :person_id
validates_presence_of :company
accepts_nested_attributes_for :company
# Bookyt
# ======
# Fiscal Years
attr_accessible :fiscal_year_ends_on
attr_accessible :incorporated_on
validates_date :fiscal_year_ends_on
validates_date :incorporated_on
def fiscal_period(year)
final_day_of_fiscal_year = Date.new(year, fiscal_year_ends_on.month, fiscal_year_ends_on.day)
first_day_of_fiscal_year = final_day_of_fiscal_year.ago(1.year).in(1.day)
return :from => first_day_of_fiscal_year.to_date, :to => final_day_of_fiscal_year.to_date
end
def fiscal_years
first_year = fiscal_year_ends_on.year
final_year = Date.today.year + 1
(first_year..final_year).map{|year|
fiscal_period(year)
}
end
# Vat
attr_accessible :vat_number, :uid_number, :ahv_number
def vat_obligation?
vat_number.present?
end
# Invoicing
attr_accessible :payment_period, :print_payment_for, :use_vesr
def payment_period
settings.payment_period / 24 / 3600
end
def payment_period=(value)
settings.payment_period = value.to_i.days
end
# Attachments
# ===========
has_many :attachments, :as => :object
accepts_nested_attributes_for :attachments, :reject_if => proc { |attributes| attributes['file'].blank? }
end
|
require 'digest/sha1'
require 'Logger'
require 'JSON'
class Operator
attr :args
def initialize(**parameters)
@args = parameters
end
def execute(mapper)
return mapper.experiment_salt
end
end
class OpSimple < Operator
def execute(mapper)
@mapper = mapper
@parameters = Hash.new
@args.each do |param, value|
@parameters[param] = mapper.evaluate(value)
end
return self.simpleExecute()
end
def simpleExecute()
return -1
end
end
class OpRandom < OpSimple
def getUnit(appended_unit=nil)
unit = @parameters[:"unit"]
if not unit.is_a? Array
unit = [unit]
end
if appended_unit != nil
unit += appended_unit
end
return unit
end
def getHash(appended_unit=nil)
salt = @parameters[:"salt"]
salty = '%s.%s' % [@mapper.experiment_salt, salt]
unit_str = self.getUnit(appended_unit).join('.')
x = '%s.%s' % [salty, unit_str]
last_hex = (Digest::SHA1.hexdigest(x))[0..14]
return last_hex.to_i(16)
end
def getUniform(min_val=0.0, max_val=1.0, appended_unit=nil)
long_scale = Float(0xFFFFFFFFFFFFFFF) # not sure how to make this a constant
zero_to_one = self.getHash(appended_unit)/long_scale
return min_val + (max_val-min_val)*zero_to_one
end
end
class RandomFloat < OpRandom
def simpleExecute()
min_val = @parameters.fetch(:min, 0)
max_val = @parameters.fetch(:max, 1)
return self.getUniform(min_val, max_val)
end
end
class RandomInteger < OpRandom
def simpleExecute()
min_val = @parameters.fetch(:min, 0)
max_val = @parameters.fetch(:max, 1)
return min_val + self.getHash() % (max_val - min_val + 1)
end
end
class BernoulliTrial < OpRandom
def simpleExecute()
p = @parameters[:p]
rand_val = self.getUniform(0.0, 1.0)
if rand_val <= p
return 1
else
return 0
end
end
end
class UniformChoice < OpRandom
def simpleExecute()
choices = @parameters[:choices]
if choices.length() == 0
return []
end
rand_index = self.getHash() % choices.length()
return choices[rand_index]
end
end
class WeightedChoice < OpRandom
def simpleExecute()
choices = @parameters[:choices]
weights = @parameters[:weights]
if choices.length() == 0
return []
end
cum_weights = Hash[choices.zip(weights)]
cum_sum = 0.0
cum_weights.each do |choice, weight|
cum_sum += weight
cum_weights[choice] = cum_sum
end
stop_value = self.getUniform(0.0, cum_sum)
cum_weights.each do |choice, cum_weight|
if stop_value <= cum_weight
return choice
end
end
end
end
class Assignment
attr_reader :experiment_salt
attr_reader :data
def initialize(experiment_salt)
@experiment_salt = experiment_salt
@data = Hash.new
end
def evaluate(data)
return data
end
def get(var, default=nil)
if @data.has_key? var
return @data[var]
else
return default
end
end
# in python this would be defined as __setattr__ or __setitem__
# not sure how to do this in Ruby.
def set(name, value)
if value.is_a? Operator
if not value.args.has_key? 'salt'
value.args[:salt] = name
end
@data[name] = value.execute(self)
else
@data[name] = value
end
end
def [](x)
return self.get(x)
end
def []=(x,y)
self.set(x,y)
end
def get_params()
return @data
end
end
# I'd like to create decorators equivalent to Python's
# @requires_assignment() and @requires_exposure_logging
# (experiment.py:21, 29), but have no idea how...
class Experiment
attr :auto_exposure_log
def initialize(**inputs)
@inputs = inputs
@exposure_logged = false
@_salt = nil
@in_experiment = true
@name = self.class.name
@auto_exposure_log = true
self.setup() # sets name, salt, etc.
@assignment = Assignment.new(self.salt)
@assigned = false
@logger = nil
setup()
end
def _assign()
self.configure_logger()
self.assign(@assignment, **@inputs)
@in_experiment = @assignment.get(
'in_experiment', @in_experiment)
@assigned = true
end
def setup()
return nil
end
def salt=(value)
@_salt = value
end
def salt
return @_salt ? @_salt : @name
end
def auto_exposure_log=(value)
@auto_exposure_log = value
end
def configure_logger()
return nil
end
def requires_assignment()
if not @assigned
self._assign()
end
end
def is_logged?
return @logged
end
def requires_exposure_logging()
if @auto_exposure_log and @in_experiment and not @exposure_logged
self.log_exposure()
end
end
def get_params()
requires_assignment()
requires_exposure_logging()
return @assignment.get_params()
end
def get(name, default=nil)
requires_assignment()
requires_exposure_logging()
return @assignment.get(name, default)
end
def assign(params, *inputs)
# up to child class to implement
return nil
end
def log_event(event_type, extras = nil)
if extras.nil?
extra_payload = {'event' => event_type}
else
extra_payload = {
'event' => event_type,
'extra_data' => extras.clone
}
end
self.log(self.as_blob(extra_payload))
end
def log_exposure(extras = nil)
@exposure_logged = true
self.log_event('exposure', extras)
end
def as_blob(extras = {})
d = {
'name' => @name,
'time' => Time.now.to_i,
'salt' => self.salt,
'inputs' => @inputs,
'params' => @assignment.data
}
extras.each do |key, value|
d[key] = value
end
return d
end
# would like to know if I'm going in the right direction
# from a Ruby hacker before I continue...
end
class SimpleExperiment < Experiment
def configure_logger()
@logger = Logger.new(STDOUT)
#@loger.level = Logger::WARN
@logger.formatter = proc do
|severity, datetime, progname, msg|
"logged data: #{msg}\n"
end
end
def log(data)
@logger.info(JSON.dump(data))
end
end
class VotingExperiment < SimpleExperiment
def setup()
# self.salt = "VotingExperiment"
end
# all assign() methods take params and an inputs array
def assign(params, **inputs)
userid = inputs[:userid]
params[:button_color] = UniformChoice.new(
choices: ['ff0000', '#00ff00'], unit: userid)
params[:button_text] = UniformChoice.new(
choices: ["I'm voting", "I'm a voter"], unit: userid, salt:'x')
end
end
my_exp = VotingExperiment.new(userid:14)
my_button_color = my_exp.get(:button_color)
button_text = my_exp.get(:button_text)
puts "button color is %s and button text is %s." % [my_button_color,button_text]
(14..16).each do |i|
my_exp = VotingExperiment.new(userid:i)
#my_exp.auto_exposure_log = false
# toggling the above disables or re-enables auto-logging
puts "\ngetting assignment for user %s note: first time triggers a log event" % i
puts "button color is %s and button text is %s" %
[my_exp.get(:button_color), my_exp.get(:button_text)]
end
# ### this is just a proof of concept for Assignment
# (1..10).each do |userid|
# a = Assignment.new('exp_salt')
# a.set('foo', UniformChoice.new(
# unit: userid, choices: ['x', 'y']
# ))
# a.set('bar', WeightedChoice.new(
# unit: userid,
# choices: ['a','b','c'],
# weights: [0.2, 0.5, 0.3])
# )
# a.set('baz', RandomFloat.new(
# unit:userid, min: 5, max: 20))
# puts a.data
# end
Make ruby code idiomatic
require 'digest/sha1'
require 'Logger'
require 'JSON'
class Operator
attr_accessor :args
def initialize(parameters)
@args = parameters
end
def execute(mapper)
mapper.experiment_salt
end
end
class OpSimple < Operator
def execute(mapper)
@mapper = mapper
@parameters = {}
@args.each do |key, value|
@parameters[key] = mapper.evaluate(value)
end
simple_execute
end
def simple_execute
-1
end
end
class OpRandom < OpSimple
LongScale = Float(0xFFFFFFFFFFFFFFF)
def get_unit(appended_unit = nil)
unit = @parameters[:unit]
unit = [unit] if !unit.is_a? Array
unit += appended_unit if appended_unit != nil
unit
end
def get_hash(appended_unit = nil)
salt = @parameters[:salt]
salty = "#{@mapper.experiment_salt}.#{salt}"
unit_str = get_unit(appended_unit).join('.')
x = "#{salty}.#{unit_str}"
last_hex = (Digest::SHA1.hexdigest(x))[0..14]
last_hex.to_i(16)
end
def get_uniform(min_val = 0.0, max_val = 1.0, appended_unit = nil)
zero_to_one = self.get_hash(appended_unit)/LongScale
min_val + (max_val-min_val) * zero_to_one
end
end
class RandomFloat < OpRandom
def simple_execute
min_val = @parameters.fetch(:min, 0)
max_val = @parameters.fetch(:max, 1)
get_uniform(min_val, max_val)
end
end
class RandomInteger < OpRandom
def simple_execute
min_val = @parameters.fetch(:min, 0)
max_val = @parameters.fetch(:max, 1)
min_val + get_hash() % (max_val - min_val + 1)
end
end
class BernoulliTrial < OpRandom
def simple_execute
p = @parameters[:p]
rand_val = get_uniform(0.0, 1.0)
(rand_val <= p) ? 1 : 0
end
end
class UniformChoice < OpRandom
def simple_execute
choices = @parameters[:choices]
return [] if choices.length() == 0
rand_index = get_hash() % choices.length()
choices[rand_index]
end
end
class WeightedChoice < OpRandom
def simple_execute
choices = @parameters[:choices]
weights = @parameters[:weights]
return [] if choices.length() == 0
cum_weights = choices.zip(weights)
cum_sum = 0.0
cum_weights.each do |choice, weight|
cum_sum += weight
cum_weights[choice] = cum_sum
end
stop_value = get_uniform(0.0, cum_sum)
cum_weights.each do |choice, cum_weight|
choice if stop_value <= cum_weight
end
end
end
class Assignment
attr_accessor :experiment_salt, :data
def initialize(experiment_salt)
@experiment_salt = experiment_salt
@data = {}
end
def evaluate(data)
data
end
def get(var, default = nil)
@data[var] || default
end
# in python this would be defined as __setattr__ or __setitem__
# not sure how to do this in Ruby.
def set(name, value)
if value.is_a? Operator
value.args[:salt] = name if !value.args.has_key?(:salt)
@data[name] = value.execute(self)
else
@data[name] = value
end
end
def [](x)
get(x)
end
def []=(x,y)
set(x,y)
end
def get_params
@data
end
end
# I'd like to create decorators equivalent to Python's
# @requires_assignment() and @requires_exposure_logging
# (experiment.py:21, 29), but have no idea how...
class Experiment
attr_accessor :auto_exposure_log
def initialize(**inputs)
@inputs = inputs
@exposure_logged = false
@_salt = nil
@in_experiment = true
@name = self.class.name
@auto_exposure_log = true
setup # sets name, salt, etc.
@assignment = Assignment.new(salt)
@assigned = false
@logger = nil
setup
end
def _assign
configure_logger
assign(@assignment, **@inputs)
@in_experiment = @assignment.get(:in_experiment, @in_experiment)
@assigned = true
end
def setup
nil
end
def salt=(value)
@_salt = value
end
def salt
@_salt ? @_salt : @name
end
def auto_exposure_log=(value)
@auto_exposure_log = value
end
def configure_logger
nil
end
def requires_assignment
_assign if !@assigned
end
def is_logged?
@logged
end
def requires_exposure_logging
log_exposure if @auto_exposure_log && @in_experiment && !@exposure_logged
end
def get_params
requires_assignment
requires_exposure_logging
@assignment.get_params
end
def get(name, default = nil)
requires_assignment
requires_exposure_logging
@assignment.get(name, default)
end
def assign(params, *inputs)
# up to child class to implement
nil
end
def log_event(event_type, extras = nil)
if extras.nil?
extra_payload = {event: event_type}
else
extra_payload = {
event: event_type,
extra_data: extras.clone
}
end
log(as_blob(extra_payload))
end
def log_exposure(extras = nil)
@exposure_logged = true
log_event(:exposure, extras)
end
def as_blob(extras = {})
d = {
name: @name,
time: Time.now.to_i,
salt: salt,
inputs: @inputs,
params: @assignment.data
}
d.merge!(extras)
d
end
end
class SimpleExperiment < Experiment
def configure_logger
@logger = Logger.new(STDOUT)
#@loger.level = Logger::WARN
@logger.formatter = proc do |severity, datetime, progname, msg|
"logged data: #{msg}\n"
end
end
def log(data)
@logger.info(JSON.dump(data))
end
end
class VotingExperiment < SimpleExperiment
def setup
# self.salt = "VotingExperiment"
end
# all assign() methods take params and an inputs array
def assign(params, **inputs)
userid = inputs[:userid]
params[:button_color] = UniformChoice.new({
choices: ['ff0000', '#00ff00'],
unit: userid
})
params[:button_text] = UniformChoice.new({
choices: ["I'm voting", "I'm a voter"],
unit: userid,
salt:'x'
})
end
end
my_exp = VotingExperiment.new(userid:14)
my_button_color = my_exp.get(:button_color)
button_text = my_exp.get(:button_text)
puts "button color is #{my_button_color} and button text is #{button_text}."
(14..16).each do |i|
my_exp = VotingExperiment.new(userid:i)
#my_exp.auto_exposure_log = false
# toggling the above disables or re-enables auto-logging
puts "\ngetting assignment for user #{i} note: first time triggers a log event"
puts "button color is #{my_exp.get(:button_color)} and button text is #{my_exp.get(:button_text)}"
end
# ### this is just a proof of concept for Assignment
# (1..10).each do |userid|
# a = Assignment.new('exp_salt')
# a.set('foo', UniformChoice.new(
# unit: userid, choices: ['x', 'y']
# ))
# a.set('bar', WeightedChoice.new(
# unit: userid,
# choices: ['a','b','c'],
# weights: [0.2, 0.5, 0.3])
# )
# a.set('baz', RandomFloat.new(
# unit:userid, min: 5, max: 20))
# puts a.data
# end
|
class Ticket < ActiveRecord::Base
belongs_to :conference
has_many :ticket_purchases, dependent: :destroy
has_many :buyers, -> { distinct }, through: :ticket_purchases, source: :user
has_paper_trail meta: { conference_id: :conference_id }
monetize :price_cents, with_model_currency: :price_currency
# This validation is for the sake of simplicity.
# If we would allow different currencies per conference we also have to handle convertions between currencies!
validate :tickets_of_conference_have_same_currency
validates :price_cents, :price_currency, :title, presence: true
validates :price_cents, numericality: { greater_than_or_equal_to: 0 }
def bought?(user)
buyers.include?(user)
end
def tickets_paid(user)
paid_tickets = quantity_bought_by(user, paid: true)
unpaid_tickets = quantity_bought_by(user, paid: false)
"#{paid_tickets}/#{paid_tickets + unpaid_tickets}"
end
def quantity_bought_by(user, paid: false)
ticket_purchases.by_user(user).where(paid: paid).sum(:quantity)
end
def unpaid?(user)
ticket_purchases.unpaid.by_user(user).present?
end
def total_price(user, paid: false)
quantity_bought_by(user, paid: paid) * price
end
def self.total_price(conference, user, paid: false)
tickets = Ticket.where(conference_id: conference.id)
result = nil
begin
tickets.each do |ticket|
price = ticket.total_price(user, paid: paid)
if result
result += price unless price.zero?
else
result = price
end
end
rescue Money::Bank::UnknownRate
result = Money.new(-1, 'USD')
end
result ? result : Money.new(0, 'USD')
end
def tickets_sold
ticket_purchases.sum(:quantity)
end
def tickets_turnover
tickets_sold * price
end
private
def tickets_of_conference_have_same_currency
unless Ticket.where(conference_id: conference_id).all?{|t| t.price_currency == price_currency }
errors.add(:price_currency, 'is different from the existing tickets of this conference.')
end
end
end
Fixed ticket_sold method of ticket model
class Ticket < ActiveRecord::Base
belongs_to :conference
has_many :ticket_purchases, dependent: :destroy
has_many :buyers, -> { distinct }, through: :ticket_purchases, source: :user
has_paper_trail meta: { conference_id: :conference_id }
monetize :price_cents, with_model_currency: :price_currency
# This validation is for the sake of simplicity.
# If we would allow different currencies per conference we also have to handle convertions between currencies!
validate :tickets_of_conference_have_same_currency
validates :price_cents, :price_currency, :title, presence: true
validates :price_cents, numericality: { greater_than_or_equal_to: 0 }
def bought?(user)
buyers.include?(user)
end
def tickets_paid(user)
paid_tickets = quantity_bought_by(user, paid: true)
unpaid_tickets = quantity_bought_by(user, paid: false)
"#{paid_tickets}/#{paid_tickets + unpaid_tickets}"
end
def quantity_bought_by(user, paid: false)
ticket_purchases.by_user(user).where(paid: paid).sum(:quantity)
end
def unpaid?(user)
ticket_purchases.unpaid.by_user(user).present?
end
def total_price(user, paid: false)
quantity_bought_by(user, paid: paid) * price
end
def self.total_price(conference, user, paid: false)
tickets = Ticket.where(conference_id: conference.id)
result = nil
begin
tickets.each do |ticket|
price = ticket.total_price(user, paid: paid)
if result
result += price unless price.zero?
else
result = price
end
end
rescue Money::Bank::UnknownRate
result = Money.new(-1, 'USD')
end
result ? result : Money.new(0, 'USD')
end
def tickets_sold
ticket_purchases.paid.sum(:quantity)
end
def tickets_turnover
tickets_sold * price
end
private
def tickets_of_conference_have_same_currency
unless Ticket.where(conference_id: conference_id).all?{|t| t.price_currency == price_currency }
errors.add(:price_currency, 'is different from the existing tickets of this conference.')
end
end
end
|
# An Update is a particular status message sent by one of our users.
class Update
require 'cgi'
include MongoMapper::Document
# Determines what constitutes a username inside an update text
USERNAME_REGULAR_EXPRESSION = /(^|[ \t\n\r\f"'\(\[{]+)@([^ \t\n\r\f&?=@%\/\#]*[^ \t\n\r\f&?=@%\/\#.!:;,"'\]}\)])(?:@([^ \t\n\r\f&?=@%\/\#]*[^ \t\n\r\f&?=@%\/\#.!:;,"'\]}\)]))?/
# Updates are aggregated in Feeds
belongs_to :feed
key :feed_id, ObjectId
# Updates are written by Authors
belongs_to :author
key :author_id, ObjectId
validates_presence_of :author_id
# The content of the update, unaltered, is stored here
key :text, String, :default => ""
validates_length_of :text, :minimum => 1, :maximum => 140
validate :do_not_repeat_yourself, :on => :create
# Mentions are stored in the following array
key :mention_ids, Array
many :mentions, :in => :mention_ids, :class_name => 'Author'
before_save :get_mentions
# The following are extra features and identifications for the update
key :tags, Array, :default => []
key :twitter, Boolean
# For speed, we generate the html for the update lazily when it is rendered
key :html, String
# We also generate the tags upon editing the update
before_save :get_tags
# Updates have a remote url that globally identifies them
key :remote_url, String
# Reply and restate identifiers
# Local Update id: (nil if remote)
key :referral_id
# Remote Update url: (nil if local)
key :referral_url, String
def referral
Update.first(:id => referral_id)
end
def url
feed.local? ? "/updates/#{id}" : remote_url
end
def url=(the_url)
self.remote_url = the_url
end
def to_html
self.html || generate_html
end
def mentioned?(username)
matches = text.match(/@#{username}\b/)
matches.nil? ? false : matches.length > 0
end
# These handle sending the update to other nodes and services
after_create :send_to_remote_mentions
after_create :send_to_external_accounts
timestamps!
def self.hot_updates
all(:limit => 6, :order => 'created_at desc')
end
def get_tags
self[:tags] = self.text.scan(/#([\w\-\.]*)/).flatten
end
# Return OStatus::Entry instance describing this Update
def to_atom(base_uri)
links = []
links << Atom::Link.new({ :href => ("#{base_uri}updates/#{self.id.to_s}")})
mentions.each do |author|
author_url = author.url
if author_url.start_with?("/")
author_url = "http://#{author.domain}/feeds/#{author.feed.id}"
end
links << Atom::Link.new({ :rel => 'ostatus:attention', :href => author_url })
links << Atom::Link.new({ :rel => 'mentioned', :href => author_url })
end
OStatus::Entry.new(:title => self.text,
:content => Atom::Content::Html.new(self.to_html),
:updated => self.updated_at,
:published => self.created_at,
:activity => OStatus::Activity.new(:object_type => :note),
:author => self.author.to_atom,
:id => "#{base_uri}updates/#{self.id.to_s}",
:links => links)
end
def to_xml(base_uri)
to_atom(base_uri).to_xml
end
protected
def get_mentions
self.mentions = []
out = CGI.escapeHTML(text)
out.gsub!(USERNAME_REGULAR_EXPRESSION) do |match|
if $3 and a = Author.first(:username => /^#{$2}$/i, :domain => /^#{$3}$/i)
self.mentions << a
elsif not $3 and authors = Author.all(:username => /^#{$2}$/i)
a = nil
if authors.count == 1
a = authors.first
else
# Disambiguate
# Is it in update to this author?
if in_reply_to = referral
if not authors.index(in_reply_to.author).nil?
a = in_reply_to.author
end
end
# Is this update is generated by a local user,
# look at who they are following
if a.nil? and user = self.author.user
authors.each do |author|
if user.following_author?(author)
a = author
end
end
end
end
self.mentions << a unless a.nil?
end
match
end
self.mentions
end
# Generate and store the html
def generate_html
out = CGI.escapeHTML(text)
# Replace any absolute addresses with a link
# Note: Do this first! Otherwise it will add anchors inside anchors!
out.gsub!(/(http[s]?:\/\/\S+[a-zA-Z0-9\/}])/, "<a href='\\1'>\\1</a>")
# we let almost anything be in a username, except those that mess with urls.
# but you can't end in a .:;, or !
# also ignore container chars [] () "" '' {}
# XXX: the _correct_ solution will be to use an email validator
out.gsub!(USERNAME_REGULAR_EXPRESSION) do |match|
if $3 and a = Author.first(:username => /^#{$2}$/i, :domain => /^#{$3}$/i)
author_url = a.url
if author_url.start_with?("/")
author_url = "http://#{author.domain}#{author_url}"
end
"#{$1}<a href='#{author_url}'>@#{$2}@#{$3}</a>"
elsif not $3 and a = Author.first(:username => /^#{$2}$/i)
author_url = a.url
if author_url.start_with?("/")
author_url = "http://#{author.domain}#{author_url}"
end
"#{$1}<a href='#{author_url}'>@#{$2}</a>"
else
match
end
end
out.gsub!(/(^|\s+)#(\w+)/) do |match|
"#{$1}<a href='/search?q=%23#{$2}'>##{$2}</a>"
end
self.html = out
end
def send_to_remote_mentions
# Only local users can do this
if author.user
# For each mention, if they are not following this user, send
# this update to them as a salmon notification
# XXX: allow for authors that we do not know (who do not have feeds)
mentions.each do |mentioned_author|
unless mentioned_author.domain == author.domain
mentioned_feed = mentioned_author.feed
unless author.user.followers.include? mentioned_feed
author.user.delay.send_mention_notification id, mentioned_feed.id
end
end
end
end
end
# If a user has twitter enabled on their account and they checked
# it on update form, repost the update to twitter
def send_to_external_accounts
return if ENV['RAILS_ENV'] == 'development'
# If there is no user we can't get to the oauth tokens, abort!
if author.user
# If the twitter flag is true and the user has a twitter account linked
# send the update
if self.twitter? && author.user.twitter?
begin
Twitter.configure do |config|
config.consumer_key = ENV["CONSUMER_KEY"]
config.consumer_secret = ENV["CONSUMER_SECRET"]
config.oauth_token = author.user.twitter.oauth_token
config.oauth_token_secret = author.user.twitter.oauth_secret
end
Twitter.update(text)
rescue Exception => e
#I should be shot for doing this.
end
end
end
end
def do_not_repeat_yourself
errors.add(:text, "You already posted this update.") if feed.last_update && feed.last_update.id != id && feed.last_update.text == text && feed.last_update.author.id == author.id
end
end
Added Update#already_posted?
Moving the check for an already posted update into its own method and
calling already_posted? in do_not_repeat_yourself.
# An Update is a particular status message sent by one of our users.
class Update
require 'cgi'
include MongoMapper::Document
# Determines what constitutes a username inside an update text
USERNAME_REGULAR_EXPRESSION = /(^|[ \t\n\r\f"'\(\[{]+)@([^ \t\n\r\f&?=@%\/\#]*[^ \t\n\r\f&?=@%\/\#.!:;,"'\]}\)])(?:@([^ \t\n\r\f&?=@%\/\#]*[^ \t\n\r\f&?=@%\/\#.!:;,"'\]}\)]))?/
# Updates are aggregated in Feeds
belongs_to :feed
key :feed_id, ObjectId
# Updates are written by Authors
belongs_to :author
key :author_id, ObjectId
validates_presence_of :author_id
# The content of the update, unaltered, is stored here
key :text, String, :default => ""
validates_length_of :text, :minimum => 1, :maximum => 140
validate :do_not_repeat_yourself, :on => :create
# Mentions are stored in the following array
key :mention_ids, Array
many :mentions, :in => :mention_ids, :class_name => 'Author'
before_save :get_mentions
# The following are extra features and identifications for the update
key :tags, Array, :default => []
key :twitter, Boolean
# For speed, we generate the html for the update lazily when it is rendered
key :html, String
# We also generate the tags upon editing the update
before_save :get_tags
# Updates have a remote url that globally identifies them
key :remote_url, String
# Reply and restate identifiers
# Local Update id: (nil if remote)
key :referral_id
# Remote Update url: (nil if local)
key :referral_url, String
def referral
Update.first(:id => referral_id)
end
def url
feed.local? ? "/updates/#{id}" : remote_url
end
def url=(the_url)
self.remote_url = the_url
end
def to_html
self.html || generate_html
end
def mentioned?(username)
matches = text.match(/@#{username}\b/)
matches.nil? ? false : matches.length > 0
end
# These handle sending the update to other nodes and services
after_create :send_to_remote_mentions
after_create :send_to_external_accounts
timestamps!
def self.hot_updates
all(:limit => 6, :order => 'created_at desc')
end
def get_tags
self[:tags] = self.text.scan(/#([\w\-\.]*)/).flatten
end
# Return OStatus::Entry instance describing this Update
def to_atom(base_uri)
links = []
links << Atom::Link.new({ :href => ("#{base_uri}updates/#{self.id.to_s}")})
mentions.each do |author|
author_url = author.url
if author_url.start_with?("/")
author_url = "http://#{author.domain}/feeds/#{author.feed.id}"
end
links << Atom::Link.new({ :rel => 'ostatus:attention', :href => author_url })
links << Atom::Link.new({ :rel => 'mentioned', :href => author_url })
end
OStatus::Entry.new(:title => self.text,
:content => Atom::Content::Html.new(self.to_html),
:updated => self.updated_at,
:published => self.created_at,
:activity => OStatus::Activity.new(:object_type => :note),
:author => self.author.to_atom,
:id => "#{base_uri}updates/#{self.id.to_s}",
:links => links)
end
def to_xml(base_uri)
to_atom(base_uri).to_xml
end
protected
def get_mentions
self.mentions = []
out = CGI.escapeHTML(text)
out.gsub!(USERNAME_REGULAR_EXPRESSION) do |match|
if $3 and a = Author.first(:username => /^#{$2}$/i, :domain => /^#{$3}$/i)
self.mentions << a
elsif not $3 and authors = Author.all(:username => /^#{$2}$/i)
a = nil
if authors.count == 1
a = authors.first
else
# Disambiguate
# Is it in update to this author?
if in_reply_to = referral
if not authors.index(in_reply_to.author).nil?
a = in_reply_to.author
end
end
# Is this update is generated by a local user,
# look at who they are following
if a.nil? and user = self.author.user
authors.each do |author|
if user.following_author?(author)
a = author
end
end
end
end
self.mentions << a unless a.nil?
end
match
end
self.mentions
end
# Generate and store the html
def generate_html
out = CGI.escapeHTML(text)
# Replace any absolute addresses with a link
# Note: Do this first! Otherwise it will add anchors inside anchors!
out.gsub!(/(http[s]?:\/\/\S+[a-zA-Z0-9\/}])/, "<a href='\\1'>\\1</a>")
# we let almost anything be in a username, except those that mess with urls.
# but you can't end in a .:;, or !
# also ignore container chars [] () "" '' {}
# XXX: the _correct_ solution will be to use an email validator
out.gsub!(USERNAME_REGULAR_EXPRESSION) do |match|
if $3 and a = Author.first(:username => /^#{$2}$/i, :domain => /^#{$3}$/i)
author_url = a.url
if author_url.start_with?("/")
author_url = "http://#{author.domain}#{author_url}"
end
"#{$1}<a href='#{author_url}'>@#{$2}@#{$3}</a>"
elsif not $3 and a = Author.first(:username => /^#{$2}$/i)
author_url = a.url
if author_url.start_with?("/")
author_url = "http://#{author.domain}#{author_url}"
end
"#{$1}<a href='#{author_url}'>@#{$2}</a>"
else
match
end
end
out.gsub!(/(^|\s+)#(\w+)/) do |match|
"#{$1}<a href='/search?q=%23#{$2}'>##{$2}</a>"
end
self.html = out
end
def send_to_remote_mentions
# Only local users can do this
if author.user
# For each mention, if they are not following this user, send
# this update to them as a salmon notification
# XXX: allow for authors that we do not know (who do not have feeds)
mentions.each do |mentioned_author|
unless mentioned_author.domain == author.domain
mentioned_feed = mentioned_author.feed
unless author.user.followers.include? mentioned_feed
author.user.delay.send_mention_notification id, mentioned_feed.id
end
end
end
end
end
# If a user has twitter enabled on their account and they checked
# it on update form, repost the update to twitter
def send_to_external_accounts
return if ENV['RAILS_ENV'] == 'development'
# If there is no user we can't get to the oauth tokens, abort!
if author.user
# If the twitter flag is true and the user has a twitter account linked
# send the update
if self.twitter? && author.user.twitter?
begin
Twitter.configure do |config|
config.consumer_key = ENV["CONSUMER_KEY"]
config.consumer_secret = ENV["CONSUMER_SECRET"]
config.oauth_token = author.user.twitter.oauth_token
config.oauth_token_secret = author.user.twitter.oauth_secret
end
Twitter.update(text)
rescue Exception => e
#I should be shot for doing this.
end
end
end
end
def do_not_repeat_yourself
errors.add(:text, "You already posted this update.") if already_posted?
end
def already_posted?
feed.last_update && feed.last_update.id != id && feed.last_update.text == text
end
end
|
class Vips < Formula
homepage "http://www.vips.ecs.soton.ac.uk/"
url "http://www.vips.ecs.soton.ac.uk/supported/8.2/vips-8.2.2.tar.gz"
sha256 "0f688a34e99714ff0901cba8cdf93ec9878447e33dea122f4b226416550a6389"
bottle do
sha256 "72c8c3137ab75a2942145f2c9fdb3c23cc497dd70433b6f1062efed051dd1fa9" => :el_capitan
sha256 "2584dda14e2e7bc609f1e5ad7fbc010589fefdb57a4ef3e6e66b3e062cc73c6b" => :yosemite
sha256 "ace28caf618dccfda3ba4719e35aa30c399c464635bba378884e80f6ae10a507" => :mavericks
end
option "without-check", "Disable build time checks (not recommended)"
depends_on "pkg-config" => :build
depends_on "fontconfig"
depends_on "gettext"
depends_on "glib"
depends_on "libpng" => :recommended
depends_on "jpeg" => :recommended
depends_on "orc" => :recommended
depends_on "libgsf" => :recommended
depends_on "libtiff" => :recommended
depends_on "fftw" => :recommended
depends_on "little-cms2" => :recommended
depends_on "pango" => :recommended
depends_on "libexif" => :recommended
depends_on "gobject-introspection" => :recommended
depends_on "pygobject3" => :recommended
depends_on "python" => :recommended
depends_on "openslide" => :optional
depends_on "imagemagick" => :optional
depends_on "graphicsmagick" => :optional
depends_on "openexr" => :optional
depends_on "cfitsio" => :optional
depends_on "webp" => :optional
depends_on "python3" => :optional
depends_on "libmatio" => :optional
depends_on "mozjpeg" => :optional
depends_on "jpeg-turbo" => :optional
def install
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
]
args.concat %w[--with-magick --with-magickpackage=GraphicsMagick] if build.with? "graphicsmagick"
system "./configure", *args
if build.with? "check"
# Test scripts fail with non-english decimal separator, see jcupitt/libvips#367
ENV["LC_NUMERIC"] = "C"
system "make", "check"
end
system "make", "install"
end
test do
system "#{bin}/vips", "-l"
system "#{bin}/vipsheader", test_fixtures("test.png")
end
end
vips: update 8.2.2 bottle.
class Vips < Formula
homepage "http://www.vips.ecs.soton.ac.uk/"
url "http://www.vips.ecs.soton.ac.uk/supported/8.2/vips-8.2.2.tar.gz"
sha256 "0f688a34e99714ff0901cba8cdf93ec9878447e33dea122f4b226416550a6389"
bottle do
sha256 "27f3767c5faec611d09350fd94ab645dbd08e5023b2fa99cf6452eb207271392" => :el_capitan
sha256 "26ffee29d527ec276119a9514f3279c254ea58ff72c00128c6bc7cde0bf28b43" => :yosemite
sha256 "b61fa5e4217cd376ae064d6c73b9255aa607ab180d43c74309c2f53b2a69d8d6" => :mavericks
end
option "without-check", "Disable build time checks (not recommended)"
depends_on "pkg-config" => :build
depends_on "fontconfig"
depends_on "gettext"
depends_on "glib"
depends_on "libpng" => :recommended
depends_on "jpeg" => :recommended
depends_on "orc" => :recommended
depends_on "libgsf" => :recommended
depends_on "libtiff" => :recommended
depends_on "fftw" => :recommended
depends_on "little-cms2" => :recommended
depends_on "pango" => :recommended
depends_on "libexif" => :recommended
depends_on "gobject-introspection" => :recommended
depends_on "pygobject3" => :recommended
depends_on "python" => :recommended
depends_on "openslide" => :optional
depends_on "imagemagick" => :optional
depends_on "graphicsmagick" => :optional
depends_on "openexr" => :optional
depends_on "cfitsio" => :optional
depends_on "webp" => :optional
depends_on "python3" => :optional
depends_on "libmatio" => :optional
depends_on "mozjpeg" => :optional
depends_on "jpeg-turbo" => :optional
def install
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
]
args.concat %w[--with-magick --with-magickpackage=GraphicsMagick] if build.with? "graphicsmagick"
system "./configure", *args
if build.with? "check"
# Test scripts fail with non-english decimal separator, see jcupitt/libvips#367
ENV["LC_NUMERIC"] = "C"
system "make", "check"
end
system "make", "install"
end
test do
system "#{bin}/vips", "-l"
system "#{bin}/vipsheader", test_fixtures("test.png")
end
end
|
#
# Copyright 2011 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class ProviderImagesController < ApplicationController
before_filter :require_user
def create
provider_account = ProviderAccount.find(params[:account_id])
@provider_image = Aeolus::Image::Factory::ProviderImage.new(
:provider => provider_account.provider.name,
:credentials => provider_account.to_xml(:with_credentials => true),
:image_id => params[:image_id],
:build_id => params[:build_id],
:target_image_id => params[:target_image_id]
)
if @provider_image.save
flash[:notice] = t('provider_images.flash.notice.upload_start')
else
flash[:warning] = t('provider_images.flash.warning.upload_failed')
end
redirect_to image_path(params[:image_id], :build => params[:build_id])
end
def destroy
if image = Aeolus::Image::Warehouse::ProviderImage.find(params[:id])
target_id = image.target_identifier
provider = image.provider
if image.delete!
flash[:notice] = t('provider_images.flash.notice.deleted',
:target_id => target_id, :provider => provider)
else
flash[:warning] = t('provider_images.flash.warning.delete_failed')
end
else
flash[:warning] = t('provider_images.flash.warning.not_found')
end
build_id = image.build.id rescue nil
redirect_to image_path(params[:image_id], :build => build_id)
end
end
Rescues a possible exception when creating a ProviderImage
Related to https://bugzilla.redhat.com/show_bug.cgi?id=761160
#
# Copyright 2011 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class ProviderImagesController < ApplicationController
before_filter :require_user
def create
provider_account = ProviderAccount.find(params[:account_id])
@provider_image = Aeolus::Image::Factory::ProviderImage.new(
:provider => provider_account.provider.name,
:credentials => provider_account.to_xml(:with_credentials => true),
:image_id => params[:image_id],
:build_id => params[:build_id],
:target_image_id => params[:target_image_id]
)
begin
if @provider_image.save
flash[:notice] = t('provider_images.flash.notice.upload_start')
else
flash[:warning] = t('provider_images.flash.warning.upload_failed')
end
rescue Exception => e
logger.error "Caught exception importing image: #{e.message}"
flash[:warning] = t('provider_images.flash.warning.upload_failed')
end
redirect_to image_path(params[:image_id], :build => params[:build_id])
end
def destroy
if image = Aeolus::Image::Warehouse::ProviderImage.find(params[:id])
target_id = image.target_identifier
provider = image.provider
if image.delete!
flash[:notice] = t('provider_images.flash.notice.deleted',
:target_id => target_id, :provider => provider)
else
flash[:warning] = t('provider_images.flash.warning.delete_failed')
end
else
flash[:warning] = t('provider_images.flash.warning.not_found')
end
build_id = image.build.id rescue nil
redirect_to image_path(params[:image_id], :build => build_id)
end
end
|
require 'yaml'
require 'fileutils'
require 'crypt'
class MasterPasswordError < RuntimeError
end
class Ward
def initialize(store_filename, master_password)
@store_filename = store_filename
@master_password = master_password
load_store()
end
def set(opts = {})
return if opts.nil? || opts.empty?
username = opts[:username]
domain = opts[:domain]
password = opts[:password]
nick = opts[:nick]
key = format_key(opts)
return if key.nil?
created = !@store.include?(key)
# TODO: Enforce nick uniqueness.
@store[key] = {}
@store[key]['username'] = username
@store[key]['domain'] = domain
@store[key]['password'] = password
@store[key]['nick'] = nick
save_store()
return created
end
def get(opts = {})
return nil if opts.nil? || opts.empty?
if !opts[:nick].nil?
get_by_nick(opts)
else
get_by_username_domain(opts)
end
end
def delete(opts = {})
return if opts.nil? || opts.empty?
if !opts[:nick].nil?
deleted = delete_by_nick(opts)
else
deleted = delete_by_username_domain(opts)
end
save_store()
return deleted
end
private
def get_by_username_domain(opts)
key = format_key(opts)
return nil if key.nil?
info = @store[key]
return nil if info.nil?
return info['password']
end
def get_by_nick(opts)
nick = opts[:nick]
return nil if nick.nil?
match = @store.find { |key, info|
!info['nick'].nil? && info['nick'].casecmp(nick) == 0
}
return nil if match.nil?
return match['password']
end
def delete_by_username_domain(opts)
key = format_key(opts)
return false if key.nil?
same = @store.reject! { |entry_key, info|
entry_key.casecmp(key) == 0
}.nil?
return !same
end
def delete_by_nick(opts)
nick = opts[:nick]
return nil if nick.nil?
same = @store.reject! { |key, info|
!info['nick'].nil? && info['nick'].casecmp(nick) == 0
}.nil?
return !same
end
def load_store()
if !File.exist?(@store_filename)
@store = {}
else
encrypted_yaml = File.read(@store_filename)
begin
key = Digest::SHA256.hexdigest(@master_password)
yaml = Crypt.decrypt(
:value => encrypted_yaml,
:key => key
)
rescue ArgumentError
@store = {}
rescue OpenSSL::Cipher::CipherError
raise MasterPasswordError
end
@store = YAML.load(yaml)
if !@store
@store = {}
end
end
end
def save_store()
yaml = YAML.dump(@store)
key = Digest::SHA256.hexdigest(@master_password)
encrypted_yaml = Crypt.encrypt(
:value => yaml,
:key => key
)
File.open(@store_filename, 'wb') do |out|
out.write(encrypted_yaml)
end
end
def format_key(opts)
username = opts[:username]
domain = opts[:domain]
return nil if domain.nil?
if username.nil?
domain
else
"#{username}@#{domain}"
end
end
end
Fixing bug where load/decrypt would fail sometimes due to not reading the store file in binary mode.
require 'yaml'
require 'fileutils'
require 'crypt'
class MasterPasswordError < RuntimeError
end
class Ward
def initialize(store_filename, master_password)
@store_filename = store_filename
@master_password = master_password
load_store()
end
def set(opts = {})
return if opts.nil? || opts.empty?
username = opts[:username]
domain = opts[:domain]
password = opts[:password]
nick = opts[:nick]
key = format_key(opts)
return if key.nil?
created = !@store.include?(key)
# TODO: Enforce nick uniqueness.
@store[key] = {}
@store[key]['username'] = username
@store[key]['domain'] = domain
@store[key]['password'] = password
@store[key]['nick'] = nick
save_store()
return created
end
def get(opts = {})
return nil if opts.nil? || opts.empty?
if !opts[:nick].nil?
get_by_nick(opts)
else
get_by_username_domain(opts)
end
end
def delete(opts = {})
return if opts.nil? || opts.empty?
if !opts[:nick].nil?
deleted = delete_by_nick(opts)
else
deleted = delete_by_username_domain(opts)
end
save_store()
return deleted
end
private
def get_by_username_domain(opts)
key = format_key(opts)
return nil if key.nil?
info = @store[key]
return nil if info.nil?
return info['password']
end
def get_by_nick(opts)
nick = opts[:nick]
return nil if nick.nil?
match = @store.find { |key, info|
!info['nick'].nil? && info['nick'].casecmp(nick) == 0
}
return nil if match.nil?
return match['password']
end
def delete_by_username_domain(opts)
key = format_key(opts)
return false if key.nil?
same = @store.reject! { |entry_key, info|
entry_key.casecmp(key) == 0
}.nil?
return !same
end
def delete_by_nick(opts)
nick = opts[:nick]
return nil if nick.nil?
same = @store.reject! { |key, info|
!info['nick'].nil? && info['nick'].casecmp(nick) == 0
}.nil?
return !same
end
def load_store()
if !File.exist?(@store_filename)
@store = {}
else
encrypted_yaml = File.open(@store_filename, 'rb') { |file| file.read }
begin
key = Digest::SHA256.hexdigest(@master_password)
yaml = Crypt.decrypt(
:value => encrypted_yaml,
:key => key
)
rescue ArgumentError
@store = {}
rescue OpenSSL::Cipher::CipherError
raise MasterPasswordError
end
@store = YAML.load(yaml)
if !@store
@store = {}
end
end
end
def save_store()
yaml = YAML.dump(@store)
key = Digest::SHA256.hexdigest(@master_password)
encrypted_yaml = Crypt.encrypt(
:value => yaml,
:key => key
)
File.open(@store_filename, 'wb') do |out|
out.write(encrypted_yaml)
end
end
def format_key(opts)
username = opts[:username]
domain = opts[:domain]
return nil if domain.nil?
if username.nil?
domain
else
"#{username}@#{domain}"
end
end
end |
require 'net/http'
require 'json'
module LeapFrog
def self.search(url)
retries = 3
begin
uri = URI("url")
json_object = Net::HTTP.get(uri)
rescue OpenURI::HTTPError => error
response = error.io
puts response.status
puts "Retrying #{retries} more times"
retries -= 1
sleep 120
retry
end
return propensity(json_object)
end
def self.propensity(json)
hash = JSON.parse(json)
return hash["propensity"]
end
class << self
private :propensity
end
end
remove private module method, refactor
require 'net/http'
require 'json'
module LeapFrog
def self.propensity(url)
json_object = search(url)
hash = JSON.parse(json_object)
return hash["propensity"]
end
def self.search(url)
retries = 3
begin
uri = URI("url")
json_object = Net::HTTP.get(uri)
rescue OpenURI::HTTPError => error
response = error.io
puts response.status
puts "Retrying #{retries} more times"
retries -= 1
sleep 120
retry
end
return json_object
end
end |
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'capistrano/ci/version'
Gem::Specification.new do |spec|
spec.name = "capistrano-ci"
spec.version = Capistrano::Ci::VERSION
spec.authors = ["paladiy"]
spec.email = ["olexanderpaladiy@gmail.com"]
spec.description = %q{Capistrano recipe for checking CI build status}
spec.summary = %q{Capistrano recipe for checking CI build status}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec", "2.14.1"
spec.add_development_dependency "simplecov", "~> 0.7.1"
spec.add_development_dependency "vcr", "~> 2.6.0"
spec.add_development_dependency "webmock", "~> 1.14.0"
spec.add_development_dependency "httparty", "~> 0.12.0"
spec.add_runtime_dependency "capistrano", ">=2.5.5"
end
fix httparty dependency type
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'capistrano/ci/version'
Gem::Specification.new do |spec|
spec.name = "capistrano-ci"
spec.version = Capistrano::Ci::VERSION
spec.authors = ["paladiy"]
spec.email = ["olexanderpaladiy@gmail.com"]
spec.description = %q{Capistrano recipe for checking CI build status}
spec.summary = %q{Capistrano recipe for checking CI build status}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec", "2.14.1"
spec.add_development_dependency "simplecov", "~> 0.7.1"
spec.add_development_dependency "vcr", "~> 2.6.0"
spec.add_development_dependency "webmock", "~> 1.14.0"
spec.add_runtime_dependency "capistrano", ">=2.5.5"
spec.add_runtime_dependency "httparty", "~> 0.12.0"
end
|
class Libdrm < Formula
desc "Library for accessing the direct rendering manager"
homepage "https://dri.freedesktop.org"
url "https://dri.freedesktop.org/libdrm/libdrm-2.4.82.tar.bz2"
sha256 "43fa2dbd422d6d41ac141272cc9855360ce4d08c7cf7f2c7bb55dfe449c4ce1c"
bottle do
sha256 "f16f0b130bb765d8fe39400b4a32ca2aa955594538f1c7184f636231f9139124" => :x86_64_linux
end
option "without-test", "Skip compile-time tests"
option "with-static", "Build static libraries (not recommended)"
option "with-valgrind", "Build libdrm with valgrind support"
depends_on "pkg-config" => :build
depends_on "linuxbrew/xorg/libpciaccess"
depends_on "cunit" if build.with? "test"
depends_on "cairo" if build.with? "test"
depends_on "valgrind" => :optional
def install
args = %W[
--prefix=#{prefix}
--sysconfdir=#{etc}
--localstatedir=#{var}
--disable-dependency-tracking
--disable-silent-rules
--enable-udev
]
args << "--enable-static=#{build.with?("static") ? "yes" : "no"}"
system "./configure", *args
system "make"
system "make", "check" if build.with? "test"
system "make", "install"
end
end
libdrm: update 2.4.82 bottle for Linuxbrew.
class Libdrm < Formula
desc "Library for accessing the direct rendering manager"
homepage "https://dri.freedesktop.org"
url "https://dri.freedesktop.org/libdrm/libdrm-2.4.82.tar.bz2"
sha256 "43fa2dbd422d6d41ac141272cc9855360ce4d08c7cf7f2c7bb55dfe449c4ce1c"
bottle do
sha256 "6993d092717699c084bb6723576475183c675f9f19aa9c4e051ee19bfa892cf5" => :x86_64_linux
end
option "without-test", "Skip compile-time tests"
option "with-static", "Build static libraries (not recommended)"
option "with-valgrind", "Build libdrm with valgrind support"
depends_on "pkg-config" => :build
depends_on "linuxbrew/xorg/libpciaccess"
depends_on "cunit" if build.with? "test"
depends_on "cairo" if build.with? "test"
depends_on "valgrind" => :optional
def install
args = %W[
--prefix=#{prefix}
--sysconfdir=#{etc}
--localstatedir=#{var}
--disable-dependency-tracking
--disable-silent-rules
--enable-udev
]
args << "--enable-static=#{build.with?("static") ? "yes" : "no"}"
system "./configure", *args
system "make"
system "make", "check" if build.with? "test"
system "make", "install"
end
end
|
# Classe interpreteur objet fenรชtre vers rendu GL(UT)
# Necessite OpenAL, OpenGL, FreeType, LibSnd
# (c) 2016 - Groupe B - Picross L3 SPI Informatique
# Universitรฉ du Maine
require 'ray'
module Rendu
class Scene < Ray::Scene
scene_name :primary
def newText(unChampText)
text unChampText.contenu, :at => [unChampText.posx, unChampText.posy], :size => unChampText.police
end
end
class Jeu < Ray::Game
# La fenรชtre ร rendre
attr_writer :contexte
def initialize(unContexteInitial)
@contexte = unContexteInitial
end
# Effectue le rendu en boucle si un รฉlement bloquant si trouve (saisie & boutton)
# Retourne l'objet selectionnรฉ
def rendu()
@contexte.listeComposant.each_char { |element| }
end
end
end
Classe de rendu en fonction des classes TDA.
# Classe interpreteur objet fenรชtre vers rendu GL(UT)
# Necessite OpenAL, OpenGL, FreeType, LibSnd
# (c) 2016 - Groupe B - Picross L3 SPI Informatique
# Universitรฉ du Maine
require 'ray'
require 'tmx'
load './class/objetgui.class.rb'
load './class/fenetre.class.rb'
load './class/button.class.rb'
load './class/saisie.class.rb'
load './class/text.class.rb'
class Rendu
# La fenรชtre ร rendre
attr_writer :contexte
attr_reader :map
def initialize(unContexteInitial)
@contexte = unContexteInitial
end
# Effectue le rendu en boucle si un รฉlement bloquant si trouve (saisie & boutton)
# Retourne l'objet selectionnรฉ
def rendu()
#@contexte.listeComposant.each_char { |element| }
Ray.game @contexte.designation do
register { add_hook :quit, method(:exit!) }
scene :stdout do
@contexte.listeComposant.each do |composant|
puts "Initialisation du composant #{composant.designation}"
if (composant.instance_of? Text)
@text = text composant.contenu, :at => [composant.posx, composant.posy], :size => composant.police
end
end
always do
end
render do |win|
win.draw @text
end
end
scenes << :stdout
end
end
end
# Tests
kWindow = Fenetre.creer("Picross L3-SPI", 0, 0, 0, 800, 600)
#kWindow.ajouterComposant(Button.creer("Partie rapide", 100, 50, 0, 150, 200))
#kWindow.ajouterComposant(Button.creer("Aventure", 200, 50, 0, 150, 200))
kWindow.ajouterComposant(Text.creer("Welcome-Message", "Bienvenue dans le jeu Picross L3-SPI", 12, 400, 50, 0))
kRender = Rendu.new kWindow
kRender.rendu
|
#
# Be sure to run `pod lib lint MASQLite.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'MASQLite'
s.version = '0.1.0'
s.summary = 'A short description of MASQLite.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = "Nice wrapper about SQLite.swift"
s.homepage = 'https://github.com/lindongpeng/MASQLite'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'lindongpeng' => '371823023@qq.com' }
s.source = { :git => 'https://github.com/mahomealex/MASQLite.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
s.source_files = 'MASQLite/Classes/**/*'
# s.resource_bundles = {
# 'MASQLite' => ['MASQLite/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
s.dependency "SQLite.swift"
end
0.2.1
#
# Be sure to run `pod lib lint MASQLite.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'MASQLite'
s.version = '0.2.1'
s.summary = 'A short description of MASQLite.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = "Nice wrapper about SQLite.swift"
s.homepage = 'https://github.com/mahomealex/MASQLite'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'lindongpeng' => '371823023@qq.com' }
s.source = { :git => 'https://github.com/mahomealex/MASQLite.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
s.source_files = 'MASQLite/Classes/**/*'
# s.resource_bundles = {
# 'MASQLite' => ['MASQLite/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
s.dependency "SQLite.swift"
end
|
module Luhnar
VERSION = "1.0.0"
end
fixed version
class Luhnar
VERSION = "1.0.0"
end
|
require 'magnum/payload/version'
module Magnum
module Payload
autoload :Base, 'magnum/payload/base'
autoload :Custom, 'magnum/payload/custom'
autoload :Github, 'magnum/payload/github'
autoload :Gitlab, 'magnum/payload/gitlab'
autoload :Gitslice, 'magnum/payload/gitslice'
autoload :Bitbucket, 'magnum/payload/bitbucket'
autoload :Beanstalk, 'magnum/payload/beanstalk'
# Shorthand method to initialize a new payload class
#
# Examples:
#
# Magnum::Payload.parse(:github, 'data') # => Magnum::Payload::Github
# Magnum::Payload.parse(:gitslice, 'data') # => Magnum::Payload::Gitslice
#
def self.parse(source, payload)
Magnum::Payload.const_get(source.to_s.capitalize).new(payload)
end
end
end
Add payload errors
require 'magnum/payload/version'
module Magnum
module Payload
class ParseError < StandardError ; end
class PayloadError < StandardError ; end
autoload :Base, 'magnum/payload/base'
autoload :Custom, 'magnum/payload/custom'
autoload :Github, 'magnum/payload/github'
autoload :Gitlab, 'magnum/payload/gitlab'
autoload :Gitslice, 'magnum/payload/gitslice'
autoload :Bitbucket, 'magnum/payload/bitbucket'
autoload :Beanstalk, 'magnum/payload/beanstalk'
# Shorthand method to initialize a new payload class
#
# Examples:
#
# Magnum::Payload.parse(:github, 'data') # => Magnum::Payload::Github
# Magnum::Payload.parse(:gitslice, 'data') # => Magnum::Payload::Gitslice
#
def self.parse(source, payload)
begin
klass = Magnum::Payload.const_get(source.to_s.capitalize)
klass..new(payload)
rescue NameError
raise PayloadError, "Invalid payload type: #{source}"
end
end
end
end |
module MailForm
class Base
def self.attributes(*names)
attr_accessor(*names)
end
end
end
Use attribute_method_prefix and suffix
module MailForm
class Base
include ActiveModel::AttributeMethods
include ActiveModel::Conversion
attribute_method_prefix 'clear_'
attribute_method_suffix '?'
def self.attributes(*names)
attr_accessor(*names)
#it defines the prefix methods for the given attributes names
define_attribute_methods(names)
end
protected
def clear_attribute(attribute)
send("#{attribute}=", nil)
end
def attribute?(attribute)
send(attribute).present?
end
end
end |
module Mailkick
module Model
def mailkick_user(opts = {})
email_key = opts[:email_key] || :email
class_eval do
scope :opted_out, lambda { |options = {}|
binds = [self.class.name, true]
if options[:list]
query = "(mailkick_opt_outs.list IS NULL OR mailkick_opt_outs.list = ?)"
binds << options[:list]
else
query = "mailkick_opt_outs.list IS NULL"
end
where("#{options[:not] ? 'NOT ' : ''}EXISTS(SELECT * FROM mailkick_opt_outs WHERE (#{table_name}.#{email_key} = mailkick_opt_outs.email OR (#{table_name}.#{primary_key} = mailkick_opt_outs.user_id AND mailkick_opt_outs.user_type = ?)) AND mailkick_opt_outs.active = ? AND #{query})", *binds)
}
scope :not_opted_out, lambda { |options = {}|
opted_out(options.merge(not: true))
}
define_method :opted_out? do |options = {}|
Mailkick.opted_out?({email: send(email_key), user: self}.merge(options))
end
define_method :opt_out do |options = {}|
Mailkick.opt_out({email: send(email_key), user: self}.merge(options))
end
define_method :opt_in do |options = {}|
Mailkick.opt_in({email: send(email_key), user: self}.merge(options))
end
end
end
end
end
Changed self.class.name to self.name for correct opted_out result
module Mailkick
module Model
def mailkick_user(opts = {})
email_key = opts[:email_key] || :email
class_eval do
scope :opted_out, lambda { |options = {}|
binds = [self.name, true]
if options[:list]
query = "(mailkick_opt_outs.list IS NULL OR mailkick_opt_outs.list = ?)"
binds << options[:list]
else
query = "mailkick_opt_outs.list IS NULL"
end
where("#{options[:not] ? 'NOT ' : ''}EXISTS(SELECT * FROM mailkick_opt_outs WHERE (#{table_name}.#{email_key} = mailkick_opt_outs.email OR (#{table_name}.#{primary_key} = mailkick_opt_outs.user_id AND mailkick_opt_outs.user_type = ?)) AND mailkick_opt_outs.active = ? AND #{query})", *binds)
}
scope :not_opted_out, lambda { |options = {}|
opted_out(options.merge(not: true))
}
define_method :opted_out? do |options = {}|
Mailkick.opted_out?({email: send(email_key), user: self}.merge(options))
end
define_method :opt_out do |options = {}|
Mailkick.opt_out({email: send(email_key), user: self}.merge(options))
end
define_method :opt_in do |options = {}|
Mailkick.opt_in({email: send(email_key), user: self}.merge(options))
end
end
end
end
end
|
Pod::Spec.new do |s|
s.name = "AOTToolkit"
s.version = "0.1.0"
s.summary = "A collection of helper/utility classes to aid iOS development."
s.description = <<-DESC
This toolkit contains various helper classes and categories to aid in iOS development.
It currently encompasses the following modules:
* Crypto: HMAC utility
* UI: Convenient categories on UIView, UIViewController, UINavigationController and the like
* UICollectionView: Commonly used layouts for UICollectionView
* Util: General utilities (e.g. NSString, NSObject)
DESC
s.homepage = "https://bitbucket.org/manaral/aottoolkit/"
s.license = 'MIT'
s.author = { "Alex Manarpies" => "alex@manarpies.com" }
s.platform = :ios
s.source = { :git => "git@bitbucket.org:manaral/aottoolkit.git", :tag => "0.1.0" }
s.exclude_files = 'AOTToolkitDemo'
s.frameworks = 'XCTest', 'Foundation'
s.requires_arc = true
s.subspec 'UI' do |ui|
ui.source_files = 'UI/**/*.{h,m}'
end
s.subspec 'Crypto' do |crypto|
crypto.source_files = 'Crypto/**/*.{h,m}'
end
s.subspec 'UICollectionView' do |collectionView|
collectionView.source_files = 'UICollectionView/**/*.{h,m}'
end
s.subspec 'Util' do |util|
util.source_files = 'Util/**/*.{h,m}'
end
end
Update podspec
Pod::Spec.new do |s|
s.name = "AOTToolkit"
s.version = "0.1.0"
s.summary = "A collection of helper/utility classes to aid iOS development."
s.description = <<-DESC
This toolkit contains various helper classes and categories to aid in iOS development.
It currently encompasses the following modules:
* Crypto: HMAC utility
* UI: Convenient categories on UIView, UIViewController, UINavigationController and the like
* UICollectionView: Commonly used layouts for UICollectionView
* Util: General utilities (e.g. NSString, NSObject)
DESC
s.homepage = "https://bitbucket.org/manaral/aottoolkit/"
s.license = 'MIT'
s.author = { "Alex Manarpies" => "alex@manarpies.com" }
s.platform = :ios
s.source = { :git => "git@bitbucket.org:manaral/aottoolkit.git", :tag => "0.1.0" }
s.frameworks = 'Foundation'
s.requires_arc = true
s.subspec 'UI' do |ui|
ui.source_files = 'AOTToolkit/UI/**/*.{h,m}'
end
s.subspec 'Crypto' do |crypto|
crypto.source_files = 'AOTToolkit/Crypto/**/*.{h,m}'
end
s.subspec 'UICollectionView' do |collectionView|
collectionView.source_files = 'AOTToolkit/UICollectionView/**/*.{h,m}'
end
s.subspec 'Util' do |util|
util.source_files = 'AOTToolkit/Util/**/*.{h,m}'
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "sextant"
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["schneems"]
s.date = "2012-05-22"
s.description = "Sextant is a Rails engine that quickly shows the routes available"
s.email = "richard.schneeman@gmail.com"
s.extra_rdoc_files = [
"README.md"
]
s.files = [
".travis.yml",
"CHANGELOG.md",
"Gemfile",
"Gemfile.lock",
"MIT-LICENSE",
"README.md",
"Rakefile",
"VERSION",
"app/controllers/rails/routes_controller.rb",
"app/views/layouts/rails/routes.html.erb",
"app/views/rails/routes/index.html.erb",
"config/routes.rb",
"lib/sextant.rb",
"lib/sextant/engine.rb",
"test/dummy/Rakefile",
"test/dummy/app/controllers/application_controller.rb",
"test/dummy/app/helpers/application_helper.rb",
"test/dummy/app/views/layouts/application.html.erb",
"test/dummy/config.ru",
"test/dummy/config/application.rb",
"test/dummy/config/boot.rb",
"test/dummy/config/database.yml",
"test/dummy/config/environment.rb",
"test/dummy/config/environments/development.rb",
"test/dummy/config/environments/production.rb",
"test/dummy/config/environments/test.rb",
"test/dummy/config/initializers/backtrace_silencers.rb",
"test/dummy/config/initializers/inflections.rb",
"test/dummy/config/initializers/mime_types.rb",
"test/dummy/config/initializers/secret_token.rb",
"test/dummy/config/initializers/session_store.rb",
"test/dummy/config/locales/en.yml",
"test/dummy/config/routes.rb",
"test/dummy/public/404.html",
"test/dummy/public/422.html",
"test/dummy/public/500.html",
"test/dummy/public/favicon.ico",
"test/dummy/public/index.html",
"test/dummy/public/javascripts/application.js",
"test/dummy/public/javascripts/controls.js",
"test/dummy/public/javascripts/dragdrop.js",
"test/dummy/public/javascripts/effects.js",
"test/dummy/public/javascripts/prototype.js",
"test/dummy/public/javascripts/rails.js",
"test/dummy/public/stylesheets/.gitkeep",
"test/dummy/script/rails",
"test/integration/sextant_test.rb",
"test/route_inspector_test.rb",
"test/support/integration_case.rb",
"test/test_helper.rb"
]
s.homepage = "http://github.com/schneems/sextant"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.10"
s.summary = "Use Sextant to show you the route"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activesupport>, [">= 3.1"])
s.add_runtime_dependency(%q<rails>, [">= 3.1"])
s.add_development_dependency(%q<rake>, [">= 0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<capybara>, [">= 0.4.0"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
s.add_development_dependency(%q<launchy>, ["~> 2.1.0"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<simplecov>, [">= 0"])
else
s.add_dependency(%q<activesupport>, [">= 3.1"])
s.add_dependency(%q<rails>, [">= 3.1"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<launchy>, ["~> 2.1.0"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
else
s.add_dependency(%q<activesupport>, [">= 3.1"])
s.add_dependency(%q<rails>, [">= 3.1"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<launchy>, ["~> 2.1.0"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
end
Regenerate gemspec for version 0.1.1
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "sextant"
s.version = "0.1.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["schneems"]
s.date = "2012-05-23"
s.description = "Sextant is a Rails engine that quickly shows the routes available"
s.email = "richard.schneeman@gmail.com"
s.extra_rdoc_files = [
"README.md"
]
s.files = [
".travis.yml",
"CHANGELOG.md",
"Gemfile",
"Gemfile.lock",
"MIT-LICENSE",
"README.md",
"Rakefile",
"VERSION",
"app/controllers/rails/routes_controller.rb",
"app/views/layouts/rails/routes.html.erb",
"app/views/rails/routes/index.html.erb",
"config/routes.rb",
"lib/sextant.rb",
"lib/sextant/engine.rb",
"sextant.gemspec",
"test/dummy/Rakefile",
"test/dummy/app/controllers/application_controller.rb",
"test/dummy/app/helpers/application_helper.rb",
"test/dummy/app/views/layouts/application.html.erb",
"test/dummy/config.ru",
"test/dummy/config/application.rb",
"test/dummy/config/boot.rb",
"test/dummy/config/database.yml",
"test/dummy/config/environment.rb",
"test/dummy/config/environments/development.rb",
"test/dummy/config/environments/production.rb",
"test/dummy/config/environments/test.rb",
"test/dummy/config/initializers/backtrace_silencers.rb",
"test/dummy/config/initializers/inflections.rb",
"test/dummy/config/initializers/mime_types.rb",
"test/dummy/config/initializers/secret_token.rb",
"test/dummy/config/initializers/session_store.rb",
"test/dummy/config/locales/en.yml",
"test/dummy/config/routes.rb",
"test/dummy/public/404.html",
"test/dummy/public/422.html",
"test/dummy/public/500.html",
"test/dummy/public/favicon.ico",
"test/dummy/public/index.html",
"test/dummy/public/javascripts/application.js",
"test/dummy/public/javascripts/controls.js",
"test/dummy/public/javascripts/dragdrop.js",
"test/dummy/public/javascripts/effects.js",
"test/dummy/public/javascripts/prototype.js",
"test/dummy/public/javascripts/rails.js",
"test/dummy/public/stylesheets/.gitkeep",
"test/dummy/script/rails",
"test/integration/sextant_test.rb",
"test/route_inspector_test.rb",
"test/support/integration_case.rb",
"test/test_helper.rb"
]
s.homepage = "http://github.com/schneems/sextant"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.10"
s.summary = "Use Sextant to show you the route"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activesupport>, [">= 3.2"])
s.add_runtime_dependency(%q<rails>, [">= 3.2"])
s.add_development_dependency(%q<rake>, [">= 0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<capybara>, [">= 0.4.0"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
s.add_development_dependency(%q<launchy>, ["~> 2.1.0"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<simplecov>, [">= 0"])
else
s.add_dependency(%q<activesupport>, [">= 3.2"])
s.add_dependency(%q<rails>, [">= 3.2"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<launchy>, ["~> 2.1.0"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
else
s.add_dependency(%q<activesupport>, [">= 3.2"])
s.add_dependency(%q<rails>, [">= 3.2"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<launchy>, ["~> 2.1.0"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
end
|
Regenerate gemspec for version 0.1.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "sextant"
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["schneems"]
s.date = "2012-05-22"
s.description = "Sextant is a Rails engine that quickly shows the routes available"
s.email = "richard.schneeman@gmail.com"
s.extra_rdoc_files = [
"README.md"
]
s.files = [
".travis.yml",
"CHANGELOG.md",
"Gemfile",
"Gemfile.lock",
"MIT-LICENSE",
"README.md",
"Rakefile",
"VERSION",
"app/controllers/rails/routes_controller.rb",
"app/views/layouts/rails/routes.html.erb",
"app/views/rails/routes/index.html.erb",
"config/routes.rb",
"lib/sextant.rb",
"lib/sextant/engine.rb",
"test/dummy/Rakefile",
"test/dummy/app/controllers/application_controller.rb",
"test/dummy/app/helpers/application_helper.rb",
"test/dummy/app/views/layouts/application.html.erb",
"test/dummy/config.ru",
"test/dummy/config/application.rb",
"test/dummy/config/boot.rb",
"test/dummy/config/database.yml",
"test/dummy/config/environment.rb",
"test/dummy/config/environments/development.rb",
"test/dummy/config/environments/production.rb",
"test/dummy/config/environments/test.rb",
"test/dummy/config/initializers/backtrace_silencers.rb",
"test/dummy/config/initializers/inflections.rb",
"test/dummy/config/initializers/mime_types.rb",
"test/dummy/config/initializers/secret_token.rb",
"test/dummy/config/initializers/session_store.rb",
"test/dummy/config/locales/en.yml",
"test/dummy/config/routes.rb",
"test/dummy/public/404.html",
"test/dummy/public/422.html",
"test/dummy/public/500.html",
"test/dummy/public/favicon.ico",
"test/dummy/public/index.html",
"test/dummy/public/javascripts/application.js",
"test/dummy/public/javascripts/controls.js",
"test/dummy/public/javascripts/dragdrop.js",
"test/dummy/public/javascripts/effects.js",
"test/dummy/public/javascripts/prototype.js",
"test/dummy/public/javascripts/rails.js",
"test/dummy/public/stylesheets/.gitkeep",
"test/dummy/script/rails",
"test/integration/sextant_test.rb",
"test/route_inspector_test.rb",
"test/support/integration_case.rb",
"test/test_helper.rb"
]
s.homepage = "http://github.com/schneems/sextant"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.10"
s.summary = "Use Sextant to show you the route"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activesupport>, [">= 3.1"])
s.add_runtime_dependency(%q<rails>, [">= 3.1"])
s.add_development_dependency(%q<rake>, [">= 0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<capybara>, [">= 0.4.0"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
s.add_development_dependency(%q<launchy>, ["~> 2.1.0"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<simplecov>, [">= 0"])
else
s.add_dependency(%q<activesupport>, [">= 3.1"])
s.add_dependency(%q<rails>, [">= 3.1"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<launchy>, ["~> 2.1.0"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
else
s.add_dependency(%q<activesupport>, [">= 3.1"])
s.add_dependency(%q<rails>, [">= 3.1"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<launchy>, ["~> 2.1.0"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
end
|
#!/usr/bin/env ruby
#
# Copyright (C) 2017 Denver Gingerich <denver@ossguy.com>
#
# This file is part of sgx-catapult.
#
# sgx-catapult is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# sgx-catapult is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with sgx-catapult. If not, see <http://www.gnu.org/licenses/>.
require 'blather/client/dsl'
require 'json'
require 'net/http'
require 'redis/connection/hiredis'
require 'uri'
require 'goliath/api'
require 'goliath/server'
require 'log4r'
if ARGV.size != 8 then
puts "Usage: sgx-catapult.rb <component_jid> <component_password> " +
"<server_hostname> <server_port> " +
"<http_port> " +
"<redis_hostname> <redis_port> <delivery_receipt_url>"
exit 0
end
module SGXcatapult
extend Blather::DSL
def self.run
client.run
end
def self.write(stanza)
client.write(stanza)
end
def self.error_msg(orig, query_node, type, name, text = nil)
orig.add_child(query_node)
orig.type = :error
error = Nokogiri::XML::Node.new 'error', orig.document
error['type'] = type
orig.add_child(error)
suberr = Nokogiri::XML::Node.new name, orig.document
suberr['xmlns'] = 'urn:ietf:params:xml:ns:xmpp-stanzas'
error.add_child(suberr)
# TODO: add some explanatory xml:lang='en' text (see text param)
puts "RESPONSE3: #{orig.inspect}"
return orig
end
setup ARGV[0], ARGV[1], ARGV[2], ARGV[3]
message :chat?, :body do |m|
num_dest = m.to.to_s.split('@', 2)[0]
if num_dest[0] != '+'
# TODO: add text re number not (yet) supported/implmnted
write_to_stream error_msg(m.reply, m.body, :modify,
'policy-violation')
next
end
bare_jid = m.from.to_s.split('/', 2)[0]
cred_key = "catapult_cred-" + bare_jid
conn = Hiredis::Connection.new
conn.connect(ARGV[5], ARGV[6].to_i)
conn.write ["EXISTS", cred_key]
if conn.read == 0
conn.disconnect
# TODO: add text re credentials not being registered
write_to_stream error_msg(m.reply, m.body, :auth,
'registration-required')
next
end
conn.write ["LRANGE", cred_key, 0, 3]
creds = conn.read
conn.disconnect
uri = URI.parse('https://api.catapult.inetwork.com')
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = true
request = Net::HTTP::Post.new('/v1/users/' + creds[0] +
'/messages')
request.basic_auth creds[1], creds[2]
request.add_field('Content-Type', 'application/json')
request.body = JSON.dump({
'from' => creds[3],
'to' => num_dest,
'text' => m.body,
'tag' => m.id, # TODO: message has it?
'receiptRequested' => 'all',
'callbackUrl' => ARGV[6]
})
response = http.request(request)
puts 'API response to send: ' + response.to_s + ' with code ' +
response.code + ', body "' + response.body + '"'
if response.code != '201'
# TODO: add text re unexpected code; mention code number
write_to_stream error_msg(m.reply, m.body, :cancel,
'internal-server-error')
next
end
# TODO: don't echo message; leave in until we rcv msgs properly
begin
puts "#{m.from.to_s} -> #{m.to.to_s} #{m.body}"
msg = Blather::Stanza::Message.new(m.from, 'thx for "' +
m.body + '"')
msg.from = m.to
write_to_stream msg
rescue => e
# TODO: do something better with this info
say m.from, e.inspect
end
end
iq '/iq/ns:query', :ns =>
'http://jabber.org/protocol/disco#items' do |i, xpath_result|
write_to_stream i.reply
end
iq '/iq/ns:query', :ns =>
'http://jabber.org/protocol/disco#info' do |i, xpath_result|
msg = i.reply
msg.identities = [{:name =>
'Soprani.ca Gateway to XMPP - Catapult',
:type => 'sms-ctplt', :category => 'gateway'}]
msg.features = ["jabber:iq:register",
"jabber:iq:gateway", "jabber:iq:private",
"http://jabber.org/protocol/disco#info",
"http://jabber.org/protocol/commands",
"http://jabber.org/protocol/muc"]
write_to_stream msg
end
iq '/iq/ns:query', :ns => 'jabber:iq:register' do |i, qn|
puts "IQ: #{i.inspect}"
if i.type == :set
xn = qn.children.find { |v| v.element_name == "x" }
user_id = ''
api_token = ''
api_secret = ''
phone_num = ''
if xn.nil?
user_id = qn.children.find {
|v| v.element_name == "nick" }
api_token = qn.children.find {
|v| v.element_name == "username" }
api_secret = qn.children.find {
|v| v.element_name == "password" }
phone_num = qn.children.find {
|v| v.element_name == "phone" }
else
for field in xn.children
if field.element_name == "field"
val = field.children.find { |v|
v.element_name == "value" }
case field['var']
when 'nick'
user_id = val.text
when 'username'
api_token = val.text
when 'password'
api_secret = val.text
when 'phone'
phone_num = val.text
else
# TODO: error
puts "?: " +field['var']
end
end
end
end
if phone_num[0] != '+'
# TODO: add text re number not (yet) supported
write_to_stream error_msg(i.reply, qn, :modify,
'policy-violation')
next
end
uri = URI.parse('https://api.catapult.inetwork.com')
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = true
request = Net::HTTP::Get.new('/v1/users/' + user_id +
'/phoneNumbers/' + phone_num)
request.basic_auth api_token, api_secret
response = http.request(request)
puts 'API response: ' + response.to_s + ' with code ' +
response.code + ', body "' + response.body + '"'
if response.code == '200'
params = JSON.parse response.body
if params['numberState'] == 'enabled'
num_key = "catapult_num-" + phone_num
bare_jid = i.from.to_s.split('/', 2)[0]
cred_key = "catapult_cred-" + bare_jid
# TODO: pre-validate ARGV[5] is integer
conn = Hiredis::Connection.new
conn.connect(ARGV[4], ARGV[5].to_i)
conn.write ["EXISTS", num_key]
if conn.read == 1
conn.disconnect
# TODO: add txt re num exists
write_to_stream error_msg(
i.reply, qn, :cancel,
'conflict')
next
end
conn.write ["EXISTS", cred_key]
if conn.read == 1
conn.disconnect
# TODO: add txt re already exist
write_to_stream error_msg(
i.reply, qn, :cancel,
'conflict')
next
end
conn.write ["RPUSH",num_key,bare_jid]
if conn.read != 1
conn.disconnect
# TODO: catch/relay RuntimeError
# TODO: add txt re push failure
write_to_stream error_msg(
i.reply, qn, :cancel,
'internal-server-error')
next
end
conn.write ["RPUSH",cred_key,user_id]
conn.write ["RPUSH",cred_key,api_token]
conn.write ["RPUSH",cred_key,api_secret]
conn.write ["RPUSH",cred_key,phone_num]
for n in 1..4 do
# TODO: catch/relay RuntimeError
result = conn.read
if result != n
conn.disconnect
write_to_stream(
error_msg(
i.reply, qn, :cancel,
'internal-server-error')
)
next
end
end
conn.disconnect
write_to_stream i.reply
else
# TODO: add text re number disabled
write_to_stream error_msg(i.reply, qn,
:modify, 'not-acceptable')
end
elsif response.code == '401'
# TODO: add text re bad credentials
write_to_stream error_msg(i.reply, qn, :auth,
'not-authorized')
elsif response.code == '404'
# TODO: add text re number not found or disabled
write_to_stream error_msg(i.reply, qn, :cancel,
'item-not-found')
else
# TODO: add text re misc error, and mention code
write_to_stream error_msg(i.reply, qn, :modify,
'not-acceptable')
end
elsif i.type == :get
orig = i.reply
msg = Nokogiri::XML::Node.new 'query',orig.document
msg['xmlns'] = 'jabber:iq:register'
n1 = Nokogiri::XML::Node.new 'instructions',msg.document
n1.content= "Enter the information from your Account " +
"page as well as the Phone Number\nin your " +
"account you want to use (ie. '+12345678901')" +
".\nUser Id is nick, API Token is username, " +
"API Secret is password, Phone Number is phone"+
".\n\nThe source code for this gateway is at " +
"https://github.com/ossguy/sgx-catapult ." +
"\nCopyright (C) 2017 Denver Gingerich, " +
"licensed under AGPLv3+."
n2 = Nokogiri::XML::Node.new 'nick',msg.document
n3 = Nokogiri::XML::Node.new 'username',msg.document
n4 = Nokogiri::XML::Node.new 'password',msg.document
n5 = Nokogiri::XML::Node.new 'phone',msg.document
msg.add_child(n1)
msg.add_child(n2)
msg.add_child(n3)
msg.add_child(n4)
msg.add_child(n5)
x = Blather::Stanza::X.new :form, [
{:required => true, :type => :"text-single",
:label => 'User Id', :var => 'nick'},
{:required => true, :type => :"text-single",
:label => 'API Token', :var => 'username'},
{:required => true, :type => :"text-private",
:label => 'API Secret', :var => 'password'},
{:required => true, :type => :"text-single",
:label => 'Phone Number', :var => 'phone'}
]
x.title= 'Register for ' +
'Soprani.ca Gateway to XMPP - Catapult'
x.instructions= "Enter the details from your Account " +
"page as well as the Phone Number\nin your " +
"account you want to use (ie. '+12345678901')" +
".\n\nThe source code for this gateway is at " +
"https://github.com/ossguy/sgx-catapult ." +
"\nCopyright (C) 2017 Denver Gingerich, " +
"licensed under AGPLv3+."
msg.add_child(x)
orig.add_child(msg)
puts "RESPONSE2: #{orig.inspect}"
write_to_stream orig
puts "SENT"
end
end
subscription(:request?) do |s|
# TODO: are these the best to return? really need '!' here?
#write_to_stream s.approve!
#write_to_stream s.request!
end
end
[:INT, :TERM].each do |sig|
trap(sig) {
puts 'Shutting down gateway...'
SGXcatapult.shutdown
puts 'Gateway has terminated.'
EM.stop
}
end
class WebhookHandler < Goliath::API
def response(env)
msg = Blather::Stanza::Message.new('test@localhost', 'hi')
SGXcatapult.write(msg)
[200, {}, "OK"]
end
end
EM.run do
SGXcatapult.run
server = Goliath::Server.new('127.0.0.1', ARGV[4].to_i)
server.api = WebhookHandler.new
server.app = Goliath::Rack::Builder.build(server.api.class, server.api)
server.logger = Log4r::Logger.new('goliath')
server.logger.add(Log4r::StdoutOutputter.new('console'))
server.logger.level = Log4r::INFO
server.start
end
some minor updates to the recently-merged 26d6c4f
* include new copyright line, as the file has an additional author now
* split up the `require` lines a bit as we're not alphabetical anymore
* move/rename the HTTP listen port in the command-line parameters
* revert ARGV change since others missed in 26d6c4f; reordering anyway
* add comment to #write so I remember why it was added to begin with
#!/usr/bin/env ruby
#
# Copyright (C) 2017 Denver Gingerich <denver@ossguy.com>
# Copyright (C) 2017 Stephen Paul Weber <singpolyma@singpolyma.net>
#
# This file is part of sgx-catapult.
#
# sgx-catapult is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# sgx-catapult is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with sgx-catapult. If not, see <http://www.gnu.org/licenses/>.
require 'blather/client/dsl'
require 'json'
require 'net/http'
require 'redis/connection/hiredis'
require 'uri'
require 'goliath/api'
require 'goliath/server'
require 'log4r'
if ARGV.size != 8 then
puts "Usage: sgx-catapult.rb <component_jid> <component_password> " +
"<server_hostname> <server_port> " +
"<redis_hostname> <redis_port> <delivery_receipt_url> " +
"<http_listen_port>"
exit 0
end
module SGXcatapult
extend Blather::DSL
def self.run
client.run
end
# so classes outside this module can write messages, too
def self.write(stanza)
client.write(stanza)
end
def self.error_msg(orig, query_node, type, name, text = nil)
orig.add_child(query_node)
orig.type = :error
error = Nokogiri::XML::Node.new 'error', orig.document
error['type'] = type
orig.add_child(error)
suberr = Nokogiri::XML::Node.new name, orig.document
suberr['xmlns'] = 'urn:ietf:params:xml:ns:xmpp-stanzas'
error.add_child(suberr)
# TODO: add some explanatory xml:lang='en' text (see text param)
puts "RESPONSE3: #{orig.inspect}"
return orig
end
setup ARGV[0], ARGV[1], ARGV[2], ARGV[3]
message :chat?, :body do |m|
num_dest = m.to.to_s.split('@', 2)[0]
if num_dest[0] != '+'
# TODO: add text re number not (yet) supported/implmnted
write_to_stream error_msg(m.reply, m.body, :modify,
'policy-violation')
next
end
bare_jid = m.from.to_s.split('/', 2)[0]
cred_key = "catapult_cred-" + bare_jid
conn = Hiredis::Connection.new
conn.connect(ARGV[4], ARGV[5].to_i)
conn.write ["EXISTS", cred_key]
if conn.read == 0
conn.disconnect
# TODO: add text re credentials not being registered
write_to_stream error_msg(m.reply, m.body, :auth,
'registration-required')
next
end
conn.write ["LRANGE", cred_key, 0, 3]
creds = conn.read
conn.disconnect
uri = URI.parse('https://api.catapult.inetwork.com')
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = true
request = Net::HTTP::Post.new('/v1/users/' + creds[0] +
'/messages')
request.basic_auth creds[1], creds[2]
request.add_field('Content-Type', 'application/json')
request.body = JSON.dump({
'from' => creds[3],
'to' => num_dest,
'text' => m.body,
'tag' => m.id, # TODO: message has it?
'receiptRequested' => 'all',
'callbackUrl' => ARGV[6]
})
response = http.request(request)
puts 'API response to send: ' + response.to_s + ' with code ' +
response.code + ', body "' + response.body + '"'
if response.code != '201'
# TODO: add text re unexpected code; mention code number
write_to_stream error_msg(m.reply, m.body, :cancel,
'internal-server-error')
next
end
# TODO: don't echo message; leave in until we rcv msgs properly
begin
puts "#{m.from.to_s} -> #{m.to.to_s} #{m.body}"
msg = Blather::Stanza::Message.new(m.from, 'thx for "' +
m.body + '"')
msg.from = m.to
write_to_stream msg
rescue => e
# TODO: do something better with this info
say m.from, e.inspect
end
end
iq '/iq/ns:query', :ns =>
'http://jabber.org/protocol/disco#items' do |i, xpath_result|
write_to_stream i.reply
end
iq '/iq/ns:query', :ns =>
'http://jabber.org/protocol/disco#info' do |i, xpath_result|
msg = i.reply
msg.identities = [{:name =>
'Soprani.ca Gateway to XMPP - Catapult',
:type => 'sms-ctplt', :category => 'gateway'}]
msg.features = ["jabber:iq:register",
"jabber:iq:gateway", "jabber:iq:private",
"http://jabber.org/protocol/disco#info",
"http://jabber.org/protocol/commands",
"http://jabber.org/protocol/muc"]
write_to_stream msg
end
iq '/iq/ns:query', :ns => 'jabber:iq:register' do |i, qn|
puts "IQ: #{i.inspect}"
if i.type == :set
xn = qn.children.find { |v| v.element_name == "x" }
user_id = ''
api_token = ''
api_secret = ''
phone_num = ''
if xn.nil?
user_id = qn.children.find {
|v| v.element_name == "nick" }
api_token = qn.children.find {
|v| v.element_name == "username" }
api_secret = qn.children.find {
|v| v.element_name == "password" }
phone_num = qn.children.find {
|v| v.element_name == "phone" }
else
for field in xn.children
if field.element_name == "field"
val = field.children.find { |v|
v.element_name == "value" }
case field['var']
when 'nick'
user_id = val.text
when 'username'
api_token = val.text
when 'password'
api_secret = val.text
when 'phone'
phone_num = val.text
else
# TODO: error
puts "?: " +field['var']
end
end
end
end
if phone_num[0] != '+'
# TODO: add text re number not (yet) supported
write_to_stream error_msg(i.reply, qn, :modify,
'policy-violation')
next
end
uri = URI.parse('https://api.catapult.inetwork.com')
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = true
request = Net::HTTP::Get.new('/v1/users/' + user_id +
'/phoneNumbers/' + phone_num)
request.basic_auth api_token, api_secret
response = http.request(request)
puts 'API response: ' + response.to_s + ' with code ' +
response.code + ', body "' + response.body + '"'
if response.code == '200'
params = JSON.parse response.body
if params['numberState'] == 'enabled'
num_key = "catapult_num-" + phone_num
bare_jid = i.from.to_s.split('/', 2)[0]
cred_key = "catapult_cred-" + bare_jid
# TODO: pre-validate ARGV[5] is integer
conn = Hiredis::Connection.new
conn.connect(ARGV[4], ARGV[5].to_i)
conn.write ["EXISTS", num_key]
if conn.read == 1
conn.disconnect
# TODO: add txt re num exists
write_to_stream error_msg(
i.reply, qn, :cancel,
'conflict')
next
end
conn.write ["EXISTS", cred_key]
if conn.read == 1
conn.disconnect
# TODO: add txt re already exist
write_to_stream error_msg(
i.reply, qn, :cancel,
'conflict')
next
end
conn.write ["RPUSH",num_key,bare_jid]
if conn.read != 1
conn.disconnect
# TODO: catch/relay RuntimeError
# TODO: add txt re push failure
write_to_stream error_msg(
i.reply, qn, :cancel,
'internal-server-error')
next
end
conn.write ["RPUSH",cred_key,user_id]
conn.write ["RPUSH",cred_key,api_token]
conn.write ["RPUSH",cred_key,api_secret]
conn.write ["RPUSH",cred_key,phone_num]
for n in 1..4 do
# TODO: catch/relay RuntimeError
result = conn.read
if result != n
conn.disconnect
write_to_stream(
error_msg(
i.reply, qn, :cancel,
'internal-server-error')
)
next
end
end
conn.disconnect
write_to_stream i.reply
else
# TODO: add text re number disabled
write_to_stream error_msg(i.reply, qn,
:modify, 'not-acceptable')
end
elsif response.code == '401'
# TODO: add text re bad credentials
write_to_stream error_msg(i.reply, qn, :auth,
'not-authorized')
elsif response.code == '404'
# TODO: add text re number not found or disabled
write_to_stream error_msg(i.reply, qn, :cancel,
'item-not-found')
else
# TODO: add text re misc error, and mention code
write_to_stream error_msg(i.reply, qn, :modify,
'not-acceptable')
end
elsif i.type == :get
orig = i.reply
msg = Nokogiri::XML::Node.new 'query',orig.document
msg['xmlns'] = 'jabber:iq:register'
n1 = Nokogiri::XML::Node.new 'instructions',msg.document
n1.content= "Enter the information from your Account " +
"page as well as the Phone Number\nin your " +
"account you want to use (ie. '+12345678901')" +
".\nUser Id is nick, API Token is username, " +
"API Secret is password, Phone Number is phone"+
".\n\nThe source code for this gateway is at " +
"https://github.com/ossguy/sgx-catapult ." +
"\nCopyright (C) 2017 Denver Gingerich, " +
"licensed under AGPLv3+."
n2 = Nokogiri::XML::Node.new 'nick',msg.document
n3 = Nokogiri::XML::Node.new 'username',msg.document
n4 = Nokogiri::XML::Node.new 'password',msg.document
n5 = Nokogiri::XML::Node.new 'phone',msg.document
msg.add_child(n1)
msg.add_child(n2)
msg.add_child(n3)
msg.add_child(n4)
msg.add_child(n5)
x = Blather::Stanza::X.new :form, [
{:required => true, :type => :"text-single",
:label => 'User Id', :var => 'nick'},
{:required => true, :type => :"text-single",
:label => 'API Token', :var => 'username'},
{:required => true, :type => :"text-private",
:label => 'API Secret', :var => 'password'},
{:required => true, :type => :"text-single",
:label => 'Phone Number', :var => 'phone'}
]
x.title= 'Register for ' +
'Soprani.ca Gateway to XMPP - Catapult'
x.instructions= "Enter the details from your Account " +
"page as well as the Phone Number\nin your " +
"account you want to use (ie. '+12345678901')" +
".\n\nThe source code for this gateway is at " +
"https://github.com/ossguy/sgx-catapult ." +
"\nCopyright (C) 2017 Denver Gingerich, " +
"licensed under AGPLv3+."
msg.add_child(x)
orig.add_child(msg)
puts "RESPONSE2: #{orig.inspect}"
write_to_stream orig
puts "SENT"
end
end
subscription(:request?) do |s|
# TODO: are these the best to return? really need '!' here?
#write_to_stream s.approve!
#write_to_stream s.request!
end
end
[:INT, :TERM].each do |sig|
trap(sig) {
puts 'Shutting down gateway...'
SGXcatapult.shutdown
puts 'Gateway has terminated.'
EM.stop
}
end
class WebhookHandler < Goliath::API
def response(env)
msg = Blather::Stanza::Message.new('test@localhost', 'hi')
SGXcatapult.write(msg)
[200, {}, "OK"]
end
end
EM.run do
SGXcatapult.run
server = Goliath::Server.new('127.0.0.1', ARGV[7].to_i)
server.api = WebhookHandler.new
server.app = Goliath::Rack::Builder.build(server.api.class, server.api)
server.logger = Log4r::Logger.new('goliath')
server.logger.add(Log4r::StdoutOutputter.new('console'))
server.logger.level = Log4r::INFO
server.start
end
|
# Array with values
class BASICArray
def self.make_array(dims, init_value)
values = {}
base = $options['base'].value
(base..dims[0].to_i).each do |col|
coords = AbstractElement.make_coord(col)
values[coords] = init_value
end
values
end
def self.zero_values(dimensions)
case dimensions.size
when 1
BASICArray.make_array(dimensions, NumericConstant.new(0))
when 2
raise BASICSyntaxError, 'Too many dimensions in array'
end
end
def self.one_values(dimensions)
case dimensions.size
when 1
BASICArray.make_array(dimensions, NumericConstant.new(1))
when 2
raise BASICSyntaxError, 'Too many dimensions in array'
end
end
attr_reader :dimensions
def initialize(dimensions, values)
@dimensions = dimensions
@values = values
end
def clone
Array.new(@dimensions.clone, @values.clone)
end
def numeric_constant?
value = get_value(0)
value.numeric_constant?
end
def text_constant?
value = get_value(0)
value.text_constant?
end
def scalar?
false
end
def array?
true
end
def matrix?
false
end
def values(interpreter)
values = {}
base = interpreter.base
(base..@dimensions[0].to_i).each do |col|
value = get_value(col)
coords = AbstractElement.make_coord(col)
values[coords] = value
end
values
end
def get_value(col)
coords = AbstractElement.make_coord(col)
return @values[coords] if @values.key?(coords)
NumericConstant.new(0)
end
def to_s
'ARRAY: ' + @values.to_s
end
def print(printer, interpreter, formats)
case @dimensions.size
when 0
raise BASICSyntaxError, 'Need dimension in array'
when 1
print_1(printer, interpreter, formats)
else
raise BASICSyntaxError, 'Too many dimensions in array'
end
end
def write(printer, interpreter)
case @dimensions.size
when 0
raise BASICSyntaxError, 'Need dimension in array'
when 1
write_1(printer, interpreter)
else
raise BASICSyntaxError, 'Too many dimensions in array'
end
end
def pack
count = get_value(0).to_i
result = ''
(1..count).each do |index|
value = get_value(index)
result += value.to_i.chr unless value.nil?
end
quoted = '"' + result + '"'
token = TextConstantToken.new(quoted)
TextConstant.new(token)
end
private
def print_1(printer, interpreter, formats)
n_cols = @dimensions[0].to_i
fs_carriage = CarriageControl.new($options['field_sep'].value)
base = interpreter.base
(base..n_cols).each do |col|
if formats.nil?
value = get_value(col)
value.print(printer)
else
# apply using formats
formats.each do |format|
value = nil
value = get_value(col) if format.wants_item
text = format.format(value)
text.print(printer)
end
end
fs_carriage.print(printer, interpreter) if col < n_cols
end
end
def write_1(printer, interpreter)
n_cols = @dimensions[0].to_i
fs_carriage = CarriageControl.new(',')
base = interpreter.base
(base..n_cols).each do |col|
value = get_value(col)
value.write(printer)
fs_carriage.write(printer, interpreter) if col < n_cols
end
end
end
# Matrix with values
class Matrix
def self.make_array(dims, init_value)
values = {}
base = $options['base'].value
(base..dims[0].to_i).each do |col|
coords = AbstractElement.make_coord(col)
values[coords] = init_value
end
values
end
def self.make_matrix(dims, init_value)
values = {}
base = $options['base'].value
(base..dims[0].to_i).each do |row|
(base..dims[1].to_i).each do |col|
coords = AbstractElement.make_coords(row, col)
values[coords] = init_value
end
end
values
end
def self.zero_values(dimensions)
case dimensions.size
when 1
make_array(dimensions, NumericConstant.new(0))
when 2
make_matrix(dimensions, NumericConstant.new(0))
end
end
def self.one_values(dimensions)
case dimensions.size
when 1
make_array(dimensions, NumericConstant.new(1))
when 2
make_matrix(dimensions, NumericConstant.new(1))
end
end
def self.identity_values(dimensions)
new_values = make_matrix(dimensions, NumericConstant.new(0))
one = NumericConstant.new(1)
base = $options['base'].value
(base..dimensions[0].to_i).each do |row|
coords = AbstractElement.make_coords(row, row)
new_values[coords] = one
end
new_values
end
attr_reader :dimensions
def initialize(dimensions, values)
@dimensions = dimensions
@values = values
end
def clone
Matrix.new(@dimensions.clone, @values.clone)
end
def numeric_constant?
value = get_value_2(0, 0)
value.numeric_constant?
end
def text_constant?
value = get_value_2(0, 0)
value.text_constant?
end
def scalar?
false
end
def array?
false
end
def matrix?
true
end
def values_1
values = {}
base = $options['base'].value
(base..@dimensions[0].to_i).each do |col|
value = get_value_1(col)
coords = AbstractElement.make_coord(col)
values[coords] = value
end
values
end
def values_2
values = {}
base = $options['base'].value
(base..@dimensions[0].to_i).each do |row|
(base..@dimensions[1].to_i).each do |col|
value = get_value_2(row, col)
coords = AbstractElement.make_coords(row, col)
values[coords] = value
end
end
values
end
def get_value_1(col)
coords = AbstractElement.make_coord(col)
return @values[coords] if @values.key?(coords)
NumericConstant.new(0)
end
def get_value_2(row, col)
coords = AbstractElement.make_coords(row, col)
return @values[coords] if @values.key?(coords)
NumericConstant.new(0)
end
def to_s
'MATRIX: ' + @values.to_s
end
def print(printer, interpreter, formats)
case @dimensions.size
when 0
raise BASICSyntaxError, 'Need dimensions in matrix'
when 1
print_1(printer, interpreter, formats)
when 2
print_2(printer, interpreter, formats)
else
raise BASICSyntaxError, 'Too many dimensions in matrix'
end
end
def write(printer, interpreter)
case @dimensions.size
when 0
raise BASICSyntaxError, 'Need dimensions in matrix'
when 1
write_1(printer, interpreter)
when 2
write_2(printer, interpreter)
else
raise BASICSyntaxError, 'Too many dimensions in matrix'
end
end
def transpose_values
raise(BASICExpressionError, 'TRN requires matrix') unless
@dimensions.size == 2
new_values = {}
base = $options['base'].value
(base..@dimensions[0].to_i).each do |row|
(base..@dimensions[1].to_i).each do |col|
value = get_value_2(row, col)
coords = AbstractElement.make_coords(col, row)
new_values[coords] = value
end
end
new_values
end
def determinant
raise(BASICSyntaxError, 'DET requires matrix') unless @dimensions.size == 2
raise BASICRuntimeError.new(:te_mat_no_sq, 'DET') if
@dimensions[1] != @dimensions[0]
case @dimensions[0].to_i
when 1
get_value_2(1, 1)
when 2
determinant_2
else
determinant_n
end
end
def inverse_values
# set all values
values = values_2
# create identity matrix
inv_values = Matrix.identity_values(@dimensions)
n_rows = @dimensions[0].to_i
n_cols = @dimensions[1].to_i
# convert to upper triangular form
upper_triangle(n_cols, n_rows, values, inv_values)
# convert to lower triangular form
lower_triangle(n_cols, values, inv_values)
# normalize to ones
unitize(n_cols, n_rows, values, inv_values)
inv_values
end
def print_1(printer, interpreter, formats)
n_cols = @dimensions[0].to_i
base = $options['base'].value
fs_carriage = CarriageControl.new($options['field_sep'].value)
# gs_carriage = CarriageControl.new('NL')
# rs_carriage = CarriageControl.new('NL')
(base..n_cols).each do |col|
if formats.nil?
value = get_value_1(col)
value.print(printer)
else
# apply using formats
format.each do |format|
value.print(printer)
value = nil
value = get_value_1(col) if format.wants_item
text = format.format(value)
text.print(printer)
end
end
fs_carriage.print(printer, interpreter) if col < n_cols
end
printer.newline
printer.newline
end
def print_2(printer, interpreter, formats)
n_rows = @dimensions[0].to_i
n_cols = @dimensions[1].to_i
base = $options['base'].value
fs_carriage = CarriageControl.new($options['field_sep'].value)
gs_carriage = CarriageControl.new('NL')
rs_carriage = CarriageControl.new('NL')
(base..n_rows).each do |row|
(base..n_cols).each do |col|
if formats.nil?
value = get_value_2(row, col)
value.print(printer)
else
# apply using formats
formats.each do |format|
value = nil
value = get_value_2(row, col) if format.wants_item
text = format.format(value)
text.print(printer)
end
end
fs_carriage.print(printer, interpreter) if col < n_cols
end
gs_carriage.print(printer, interpreter) if row < n_rows
end
rs_carriage.print(printer, interpreter)
end
def write_1(printer, interpreter)
n_cols = @dimensions[0].to_i
base = $options['base'].value
fs_carriage = CarriageControl.new(',')
# gs_carriage = CarriageControl.new(';')
rs_carriage = CarriageControl.new('NL')
(base..n_cols).each do |col|
value = get_value_1(col)
value.write(printer)
fs_carriage.write(printer, interpreter) if col < n_cols
end
rs_carriage.write(printer, interpreter)
end
def write_2(printer, interpreter)
n_rows = @dimensions[0].to_i
n_cols = @dimensions[1].to_i
base = $options['base'].value
fs_carriage = CarriageControl.new(',')
gs_carriage = CarriageControl.new(';')
rs_carriage = CarriageControl.new('NL')
(base..n_rows).each do |row|
(base..n_cols).each do |col|
value = get_value_2(row, col)
value.write(printer)
fs_carriage.write(printer, interpreter) if col < n_cols
end
gs_carriage.write(printer, interpreter) if row < n_rows
end
rs_carriage.write(printer, interpreter)
end
def determinant_2
a = get_value_2(1, 1)
b = get_value_2(1, 2)
c = get_value_2(2, 1)
d = get_value_2(2, 2)
a.multiply(d) - b.multiply(c)
end
def determinant_n
minus_one = NumericConstant.new(-1)
sign = NumericConstant.new(1)
det = NumericConstant.new(0)
base = $options['base'].value
# for each element in first row
(base..@dimensions[1].to_i).each do |col|
v = get_value_2(1, col)
# create submatrix
subm = submatrix(1, col)
d = v.multiply(subm.determinant).multiply(sign)
det += d
sign = sign.multiply(minus_one)
end
det
end
def submatrix(exclude_row, exclude_col)
one = NumericConstant.new(1)
new_dims = [@dimensions[0] - one, @dimensions[1] - one]
new_values = submatrix_values(exclude_row, exclude_col)
Matrix.new(new_dims, new_values)
end
def submatrix_values(exclude_row, exclude_col)
new_values = {}
new_row = 1
base = $options['base'].value
(base..@dimensions[0].to_i).each do |row|
new_col = 1
next if row == exclude_row
(base..@dimensions[1].to_i).each do |col|
next if col == exclude_col
coords = AbstractElement.make_coords(new_row, new_col)
new_values[coords] = get_value_2(row, col)
new_col += 1
end
new_row += 1
end
new_values
end
def calc_factor(values, row, col)
denom_coords = AbstractElement.make_coords(col, col)
denominator = values[denom_coords]
numer_coords = AbstractElement.make_coords(row, col)
numerator = values[numer_coords]
numerator.divide(denominator)
end
def adjust_matrix_entry(values, row, col, wcol, factor)
value_coords = AbstractElement.make_coords(row, wcol)
minuend_coords = AbstractElement.make_coords(col, wcol)
subtrahend = values[value_coords]
minuend = values[minuend_coords]
new_value = subtrahend - minuend.multiply(factor)
values[value_coords] = new_value
end
def upper_triangle(n_cols, n_rows, values, inv_values)
base = $options['base'].value
(base..n_cols - 1).each do |col|
(col + 1..n_rows).each do |row|
# adjust values for this row
factor = calc_factor(values, row, col)
(base..n_cols).each do |wcol|
adjust_matrix_entry(values, row, col, wcol, factor)
adjust_matrix_entry(inv_values, row, col, wcol, factor)
end
end
end
end
def lower_triangle(n_cols, values, inv_values)
base = $options['base'].value
n_cols.downto(base + 1) do |col|
(col - 1).downto(base).each do |row|
# adjust values for this row
factor = calc_factor(values, row, col)
(base..n_cols).each do |wcol|
adjust_matrix_entry(values, row, col, wcol, factor)
adjust_matrix_entry(inv_values, row, col, wcol, factor)
end
end
end
end
def unitize(n_cols, n_rows, values, inv_values)
base = $options['base'].value
(base..n_rows).each do |row|
denom_coords = AbstractElement.make_coords(row, row)
denominator = values[denom_coords]
(base..n_cols).each do |col|
unitize_matrix_entry(values, row, col, denominator)
unitize_matrix_entry(inv_values, row, col, denominator)
end
end
end
def unitize_matrix_entry(values, row, col, denominator)
coords = AbstractElement.make_coords(row, col)
numerator = values[coords]
new_value = numerator.divide(denominator)
values[coords] = new_value
end
end
# Entry for cross-reference list
class XrefEntry
attr_reader :variable
attr_reader :signature
attr_reader :is_ref
def self.make_signature(arguments)
return nil if arguments.nil?
sigil_chars = {
numeric: '_',
integer: '%',
string: '$',
boolean: '?'
}
sigils = []
arguments.each do |arg|
content_type = :empty
if arg.class.to_s == 'Array'
# an array is a parsed expression
unless arg.empty?
a0 = arg[-1]
content_type = a0.content_type
end
else
content_type = arg.content_type
end
sigils << sigil_chars[content_type]
end
return sigils
end
def initialize(variable, signature, is_ref)
@variable = variable
@signature = signature
@is_ref = is_ref
end
def eql?(other)
@variable == other.variable &&
@signature == other.signature &&
@is_ref == other.is_ref
end
def hash
@variable.hash + @signature.hash + @is_ref.hash
end
def asize(x)
return -1 if x.nil?
x.size
end
def <=>(other)
return -1 if self < other
return 1 if self > other
0
end
def ==(other)
@variable == other.variable &&
@signature == other.signature &&
@is_ref == other.is_ref
end
def >(other)
return true if @variable > other.variable
return false if @variable < other.variable
return true if asize(@signature) > asize(other.signature)
return false if asize(@signature) < asize(other.signature)
!@is_ref && other.is_ref
end
def >=(other)
return true if @variable > other.variable
return false if @variable < other.variable
return true if asize(@signature) > asize(other.signature)
return false if asize(@signature) < asize(other.signature)
!@is_ref && other.is_ref
end
def <(other)
return true if @variable < other.variable
return false if @variable > other.variable
return true if asize(@signature) < asize(other.signature)
return false if asize(@signature) > asize(other.signature)
@is_ref && !other.is_ref
end
def <=(other)
return true if @variable < other.variable
return false if @variable > other.variable
return true if asize(@signature) < asize(other.signature)
return false if asize(@signature) > asize(other.signature)
@is_ref && !other.is_ref
end
def n_dims
@signature.size
end
def to_s
dims = ''
dims = '(' + @signature.join(',') + ')' unless @signature.nil?
ref = ''
ref = '=' if @is_ref
@variable.to_s + dims + ref
end
def to_text
dims = ''
dims = '(' + @signature.join(',') + ')' unless @signature.nil?
@variable.to_s + dims
end
end
# Expression parser
class Parser
def initialize(default_shape)
@parsed_expressions = []
@operator_stack = []
@expression_stack = []
@current_expression = []
@parens_stack = []
@shape_stack = [default_shape]
@parens_group = []
@previous_element = InitialOperator.new
end
def parse(element)
if element.group_separator?
group_separator(element)
elsif element.operator?
operator_higher(element)
elsif element.function_variable?
function_variable(element)
else
# the element is an operand, append it to the output list
@current_expression << element
end
@previous_element = element
end
def expressions
raise(BASICExpressionError, 'Too many operators') unless
@operator_stack.empty?
@parsed_expressions.concat @parens_group unless @parens_group.empty?
@parsed_expressions << @current_expression unless @current_expression.empty?
@parsed_expressions
end
private
def stack_to_expression(stack, expression)
until stack.empty? || stack[-1].starter?
op = stack.pop
expression << op
end
end
def stack_to_precedence(stack, expression, element)
while !stack.empty? &&
!stack[-1].starter? &&
stack[-1].precedence >= element.precedence
op = stack.pop
expression << op
end
end
def group_separator(element)
if element.group_start?
start_group(element)
elsif element.separator?
pop_to_group_start
elsif element.group_end?
end_group(element)
end
end
def start_group(element)
if @previous_element.function?
start_associated_group(element, @previous_element.default_shape)
elsif @previous_element.variable?
start_associated_group(element, :scalar)
else
start_simple_group(element)
end
end
# a group associated with a function or variable
# (arguments or subscripts)
def start_associated_group(element, shape)
@expression_stack.push(@current_expression)
@current_expression = []
@operator_stack.push(ParamStart.new(element))
@parens_stack << @parens_group
@parens_group = []
@shape_stack.push(shape)
end
def start_simple_group(element)
@operator_stack.push(element)
@parens_stack << @parens_group
@parens_group = []
@shape_stack.push(:scalar)
end
# pop the operator stack until the corresponding left paren is found
# Append each operator to the end of the output list
def pop_to_group_start
stack_to_expression(@operator_stack, @current_expression)
@parens_group << @current_expression
@current_expression = []
end
# pop the operator stack until the corresponding left paren is removed
# Append each operator to the end of the output list
def end_group(group_end_element)
stack_to_expression(@operator_stack, @current_expression)
@parens_group << @current_expression
raise(BASICExpressionError, 'Too few operators') if @operator_stack.empty?
# remove the '(' or '[' starter
start_op = @operator_stack.pop
error = 'Bracket/parenthesis mismatch, found ' + group_end_element.to_s +
' to match ' + start_op.to_s
raise(BASICExpressionError, error) unless group_end_element.match?(start_op)
if start_op.param_start?
list = List.new(@parens_group)
@operator_stack.push(list)
@current_expression = @expression_stack.pop
end
@parens_group = @parens_stack.pop
@shape_stack.pop
end
# remove operators already on the stack that have higher
# or equal precedence
# append them to the output list
def operator_higher(element)
stack_to_precedence(@operator_stack, @current_expression, element)
# push the operator onto the operator stack
@operator_stack.push(element) unless element.terminal?
end
def function_variable(element)
if element.user_function?
start_user_function(element)
elsif element.function?
start_function(element)
elsif element.variable?
start_variable(element)
end
end
# remove operators already on the stack that have higher
# or equal precedence
# append them to the output list
def start_user_function(element)
stack_to_precedence(@operator_stack, @current_expression, element)
# push the variable onto the operator stack
variable = UserFunction.new(element)
@operator_stack.push(variable)
end
# remove operators already on the stack that have higher
# or equal precedence
# append them to the output list
def start_function(element)
stack_to_precedence(@operator_stack, @current_expression, element)
# push the function onto the operator stack
@operator_stack.push(element)
end
# remove operators already on the stack that have higher
# or equal precedence
# append them to the output list
def start_variable(element)
stack_to_precedence(@operator_stack, @current_expression, element)
# push the variable onto the operator stack
if @shape_stack[-1] == :declaration
variable = Declaration.new(element)
else
variable = Variable.new(element, @shape_stack[-1], [])
end
@operator_stack.push(variable)
end
end
# base class for expressions
class AbstractExpression
attr_reader :parsed_expressions
attr_reader :comprehension_effort
def initialize(tokens, default_shape)
@unparsed_expression = tokens.map(&:to_s).join
@numeric_constant = tokens.size == 1 && tokens[0].numeric_constant?
@text_constant = tokens.size == 1 && tokens[0].text_constant?
@target = false
@carriage = false
elements = tokens_to_elements(tokens)
parser = Parser.new(default_shape)
elements.each { |element| parser.parse(element) }
@parsed_expressions = parser.expressions
set_arguments_1(@parsed_expressions)
@comprehension_effort = 1
@parsed_expressions.each do |parsed_expression|
prev = nil
parsed_expression.each do |element|
@comprehension_effort += 1 if element.operator?
@comprehension_effort += 1 if element.operator? && !prev.nil? && prev.operator?
@comprehension_effort += 1 if element.function?
# function? includes user-defined funcs,
# so the next line makes comprehension effort 2
@comprehension_effort += 1 if element.user_function?
prev = element
end
end
end
def to_s
@unparsed_expression
end
def dump
lines = []
@parsed_expressions.each do |expression|
x = expression.map(&:dump)
if x.class.to_s == 'Array'
lines += x.flatten
else
lines << x
end
end
lines
end
def carriage_control?
@carriage
end
def count
@parsed_expressions.length
end
def numeric_constant?
@numeric_constant
end
def text_constant?
@text_constant
end
def target?
@target
end
# returns an Array of values
def evaluate(interpreter)
interpreter.evaluate(@parsed_expressions)
end
def numerics
parsed_expressions_numerics(@parsed_expressions)
end
def num_symbols
parsed_expressions_num_symbols(@parsed_expressions)
end
def strings
parsed_expressions_strings(@parsed_expressions)
end
def booleans
parsed_expressions_booleans(@parsed_expressions)
end
def text_symbols
parsed_expressions_text_symbols(@parsed_expressions)
end
def variables
parsed_expressions_variables(@parsed_expressions)
end
def operators
parsed_expressions_operators(@parsed_expressions)
end
def functions
parsed_expressions_functions(@parsed_expressions)
end
def userfuncs
parsed_expressions_userfuncs(@parsed_expressions)
end
private
def set_arguments_1(parsed_expressions)
parsed_expressions.each { |expression| set_arguments_2(expression) }
end
def set_arguments_2(parsed_expression)
content_type_stack = []
parsed_expression.each do |item|
if item.list?
set_arguments_1(item.list)
elsif item.operator?
item.set_arguments(content_type_stack)
content_type_stack.push(item)
else
item.pop_stack(content_type_stack)
content_type_stack.push(item)
end
end
raise(BASICExpressionError, 'Bad expression') if
content_type_stack.size > 1
end
def parsed_expressions_numerics(parsed_expressions)
vars = []
parsed_expressions.each do |expression|
previous = nil
# backwards so the unary operator (if any) is seen first
expression.reverse_each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
vars += parsed_expressions_numerics(sublist)
elsif thing.numeric_constant? && !thing.symbol
if !previous.nil? &&
previous.operator? &&
previous.unary? &&
previous.to_s == '-'
vars << thing.negate
else
vars << thing
end
end
previous = thing
end
end
vars
end
def parsed_expressions_num_symbols(parsed_expressions)
vars = []
parsed_expressions.each do |expression|
# backwards so the unary operator (if any) is seen first
expression.reverse_each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
vars += parsed_expressions_num_symbols(sublist)
elsif thing.numeric_constant? && thing.symbol
vars << thing
end
end
end
vars
end
def parsed_expressions_strings(parsed_expressions)
strs = []
parsed_expressions.each do |expression|
expression.each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
strs += parsed_expressions_strings(sublist)
elsif thing.text_constant? && !thing.symbol
strs << thing
end
end
end
strs
end
def parsed_expressions_booleans(parsed_expressions)
bools = []
parsed_expressions.each do |expression|
expression.each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
bools += parsed_expressions_booleans(sublist)
elsif thing.boolean_constant?
bools << thing
end
end
end
bools
end
def parsed_expressions_text_symbols(parsed_expressions)
strs = []
parsed_expressions.each do |expression|
expression.each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
strs += parsed_expressions_text_symbols(sublist)
elsif thing.text_constant? && thing.symbol
strs << thing
end
end
end
strs
end
def parsed_expressions_variables(parsed_expressions)
vars = []
parsed_expressions.each do |expression|
previous = nil
expression.each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
vars += parsed_expressions_variables(sublist)
elsif thing.variable?
arguments = nil
if thing.array?
token = NumericConstantToken.new('0')
constant = NumericConstant.new(token)
arguments = [constant]
end
if thing.matrix?
token = NumericConstantToken.new('0')
constant = NumericConstant.new(token)
arguments = [constant, constant]
end
arguments = previous.list if !previous.nil? && previous.list?
is_ref = thing.reference?
signature = XrefEntry.make_signature(arguments)
vars << XrefEntry.new(thing.to_s, signature, is_ref)
end
previous = thing
end
end
vars
end
def parsed_expressions_operators(parsed_expressions)
opers = []
parsed_expressions.each do |expression|
previous = nil
expression.each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
opers += parsed_expressions_operators(sublist)
elsif thing.operator?
arguments = thing.arguments
is_ref = false
signature = XrefEntry.make_signature(arguments)
opers << XrefEntry.new(thing.to_s, signature, is_ref)
end
previous = thing
end
end
opers
end
def parsed_expressions_functions(parsed_expressions)
vars = []
parsed_expressions.each do |expression|
previous = nil
expression.each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
vars += parsed_expressions_functions(sublist)
elsif thing.function? && !thing.user_function?
arguments = nil
arguments = previous.list if !previous.nil? && previous.list?
is_ref = thing.reference?
signature = XrefEntry.make_signature(arguments)
vars << XrefEntry.new(thing.to_s, signature, is_ref)
end
previous = thing
end
end
vars
end
def parsed_expressions_userfuncs(parsed_expressions)
vars = []
parsed_expressions.each do |expression|
previous = nil
expression.each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
vars += parsed_expressions_userfuncs(sublist)
elsif thing.user_function?
arguments = nil
arguments = previous.list if !previous.nil? && previous.list?
is_ref = thing.reference?
signature = XrefEntry.make_signature(arguments)
vars << XrefEntry.new(thing.to_s, signature, is_ref)
end
previous = thing
end
end
vars
end
def tokens_to_elements(tokens)
elements = []
tokens.each do |token|
follows_operand = !elements.empty? && elements[-1].operand?
elements << token_to_element(token, follows_operand)
end
elements << TerminalOperator.new
end
def token_to_element(token, follows_operand)
return FunctionFactory.make(token.to_s) if
FunctionFactory.valid?(token.to_s)
element = nil
(follows_operand ? binary_classes : unary_classes).each do |c|
element = c.new(token) if element.nil? && c.accept?(token)
end
if element.nil?
raise(BASICExpressionError,
"Token '#{token.class}:#{token}' is not a value or operator")
end
element
end
def binary_classes
# first match is used; select order with care
# UserFunction before VariableName
[
GroupStart,
GroupEnd,
ParamSeparator,
BinaryOperatorPlus,
BinaryOperatorMinus,
BinaryOperatorMultiply,
BinaryOperatorDivide,
BinaryOperatorPower,
BinaryOperatorEqual,
BinaryOperatorNotEqual,
BinaryOperatorLess,
BinaryOperatorLessEqual,
BinaryOperatorGreater,
BinaryOperatorGreaterEqual,
BinaryOperatorAnd,
BinaryOperatorOr,
BinaryOperatorMax,
BinaryOperatorMin,
BooleanConstant,
IntegerConstant,
NumericConstant,
UserFunctionName,
VariableName,
TextConstant
]
end
def unary_classes
# first match is used; select order with care
# UserFunction before VariableName
[
GroupStart,
GroupEnd,
ParamSeparator,
UnaryOperatorPlus,
UnaryOperatorMinus,
UnaryOperatorHash,
UnaryOperatorColon,
UnaryOperatorNot,
BooleanConstant,
IntegerConstant,
NumericConstant,
UserFunctionName,
VariableName,
TextConstant
]
end
end
# Value expression (an R-value)
class ValueExpression < AbstractExpression
def initialize(_, shape)
super
@shape = shape
end
def printable?
true
end
def keyword?
false
end
def scalar?
@shape == :scalar
end
def content_type
first_expression = @parsed_expressions[0]
last_token = first_expression[-1]
last_token.content_type
end
def filehandle?
return false if @parsed_expressions.empty?
parsed_expression = @parsed_expressions[0]
last_token = parsed_expression[-1]
last_token.operator? && last_token.pound?
end
def print(printer, interpreter)
numeric_constants = evaluate(interpreter)
return if numeric_constants.empty?
numeric_constant = numeric_constants[0]
numeric_constant.print(printer)
end
def write(printer, interpreter)
numeric_constants = evaluate(interpreter)
return if numeric_constants.empty?
numeric_constant = numeric_constants[0]
numeric_constant.write(printer)
end
def compound_print(printer, interpreter, formats)
compounds = evaluate(interpreter)
return if compounds.empty?
compound = compounds[0]
compound.print(printer, interpreter, formats)
end
def compound_write(printer, interpreter)
compounds = evaluate(interpreter)
return if compounds.empty?
compound = compounds[0]
compound.write(printer, interpreter)
end
end
# Declaration expression
class DeclarationExpression < AbstractExpression
def initialize(tokens)
super(tokens, :declaration)
check_length
check_all_lengths
check_resolve_types
end
private
def check_length
raise(BASICSyntaxError, 'Value list is empty (length 0)') if
@parsed_expressions.empty?
end
def check_all_lengths
@parsed_expressions.each do |parsed_expression|
raise(BASICSyntaxError, 'Value is not declaration (length 0)') if
parsed_expression.empty?
end
end
def check_resolve_types
@parsed_expressions.each do |parsed_expression|
if parsed_expression[-1].class.to_s != 'Declaration'
raise(BASICSyntaxError,
"Value is not declaration (type #{parsed_expression[-1].class})")
end
end
end
end
# Target expression
class TargetExpression < AbstractExpression
def initialize(tokens, shape)
super
check_length
check_all_lengths
check_resolve_types
@target = true
@parsed_expressions.each do |parsed_expression|
parsed_expression[-1].valref = :reference
end
end
def filehandle?
false
end
private
def check_length
raise(BASICSyntaxError, 'Value list is empty (length 0)') if
@parsed_expressions.empty?
end
def check_all_lengths
@parsed_expressions.each do |parsed_expression|
raise(BASICSyntaxError, 'Value is not assignable (length 0)') if
parsed_expression.empty?
end
end
def check_resolve_types
@parsed_expressions.each do |parsed_expression|
if parsed_expression[-1].class.to_s != 'Variable' &&
parsed_expression[-1].class.to_s != 'UserFunction'
raise(BASICSyntaxError,
"Value is not assignable (type #{parsed_expression[-1].class})")
end
end
end
end
# User function definition
# Define the user function name, arguments, and expression
class UserFunctionDefinition
attr_reader :name
attr_reader :arguments
attr_reader :sig
attr_reader :expression
attr_reader :numerics
attr_reader :num_symbols
attr_reader :strings
attr_reader :booleans
attr_reader :text_symbols
attr_reader :variables
attr_reader :operators
attr_reader :functions
attr_reader :userfuncs
attr_reader :comprehension_effort
def initialize(tokens)
# parse into name '=' expression
line_text = tokens.map(&:to_s).join
parts = split_tokens(tokens)
raise(BASICExpressionError, "'#{line_text}' is not a valid assignment") if
parts.size != 3 && parts.size != 1
user_function_prototype = UserFunctionPrototype.new(parts[0])
@name = user_function_prototype.name
@arguments = user_function_prototype.arguments
@sig = XrefEntry.make_signature(@arguments)
@expression = nil
@expression = ValueExpression.new(parts[2], :scalar) if parts.size == 3
if @expression.nil?
@numerics = []
@num_symbols = []
@strings = []
@booleans = []
@text_symbols = []
@variables = []
@operators = []
@functions = []
signature = XrefEntry.make_signature(@arguments)
xr = XrefEntry.new(@name.to_s, signature, true)
@userfuncs = [xr]
@comprehension_effort = 0
else
@numerics = @expression.numerics
@num_symbols = @expression.num_symbols
@strings = @expression.strings
@booleans = @expression.booleans
@text_symbols = @expression.text_symbols
@variables = @expression.variables
@operators = @expression.operators
@functions = @expression.functions
signature = XrefEntry.make_signature(@arguments)
xr = XrefEntry.new(@name.to_s, signature, true)
@userfuncs = [xr] + @expression.userfuncs
@comprehension_effort = @expression.comprehension_effort
end
# add parameters to function as references
@arguments.each do |argument|
@variables << XrefEntry.new(argument.to_s, nil, true)
end
end
def multidef?
@expression.nil?
end
def dump
lines = []
lines << @name.dump
@arguments.each { |arg| lines << arg.dump }
lines += @expression.dump unless @expression.nil?
lines
end
def to_s
vnames = @arguments.map(&:to_s).join(',')
s = @name.to_s + '(' + vnames + ')'
s += '=' + @expression.to_s unless @expression.nil?
s
end
def signature
numeric_spec = { 'type' => :numeric, 'shape' => :scalar }
text_spec = { 'type' => :string, 'shape' => :scalar }
integer_spec = { 'type' => :integer, 'shape' => :scalar }
sig = []
@arguments.each do |arg|
sig << numeric_spec if arg.content_type == :numeric
sig << text_spec if arg.content_type == :string
sig << integer_spec if arg.content_type == :integer
end
sig
end
private
def split_tokens(tokens)
results = []
nonkeywords = []
eq_count = 0
tokens.each do |token|
if token.operator? && token.equals? && eq_count < 1
results << nonkeywords unless nonkeywords.empty?
nonkeywords = []
results << token
eq_count += 1
else
nonkeywords << token
end
end
results << nonkeywords unless nonkeywords.empty?
results
end
end
# User function prototype
# Define the user function name and arguments
class UserFunctionPrototype
attr_reader :name
attr_reader :arguments
def initialize(tokens)
check_tokens(tokens)
@name = UserFunctionName.new(tokens[0])
@arguments = variable_names(tokens[2..-2])
# arguments must be unique
names = @arguments.map(&:to_s)
raise(BASICExpressionError, 'Duplicate parameters') unless
names.uniq.size == names.size
end
def to_s
@name
end
private
# verify tokens are UserFunction, open, close
def check_tokens(tokens)
raise(BASICSyntaxError, 'Invalid function specification') unless
tokens.size >= 3 && tokens[0].user_function? &&
tokens[1].groupstart? && tokens[-1].groupend?
end
# verify tokens variables and commas
def variable_names(params)
name_tokens = params.values_at(* params.each_index.select(&:even?))
variable_names = []
name_tokens.each do |name_token|
variable_names << VariableName.new(name_token)
end
separators = params.values_at(* params.each_index.select(&:odd?))
separators.each do |separator|
raise(BASICSyntaxError, 'Invalid list separator') unless
separator.separator?
end
variable_names
end
end
# Aassignment
class Assignment
attr_reader :target
attr_reader :numerics
attr_reader :num_symbols
attr_reader :strings
attr_reader :booleans
attr_reader :text_symbols
attr_reader :variables
attr_reader :operators
attr_reader :functions
attr_reader :userfuncs
attr_reader :comprehension_effort
def initialize(tokens, shape)
# parse into variable, '=', expression
@token_lists = split_tokens(tokens)
line_text = tokens.map(&:to_s).join
raise(BASICExpressionError, "'#{line_text}' is not a valid assignment") if
@token_lists.size != 3 ||
!(@token_lists[1].operator? && @token_lists[1].equals?)
@numerics = []
@num_symbols = []
@strings = []
@booleans = []
@text_symbols = []
@variables = []
@operators = []
@functions = []
@userfuncs = []
@target = TargetExpression.new(@token_lists[0], shape)
@expression = ValueExpression.new(@token_lists[2], shape)
make_references
@comprehension_effort = @target.comprehension_effort + @expression.comprehension_effort
end
def dump
lines = []
lines += @target.dump
lines += @expression.dump
lines << 'AssignmentOperator:='
end
private
def split_tokens(tokens)
results = []
nonkeywords = []
tokens.each do |token|
if token.operator? && token.equals?
results << nonkeywords unless nonkeywords.empty?
nonkeywords = []
results << token
else
nonkeywords << token
end
end
results << nonkeywords unless nonkeywords.empty?
results
end
def make_references
@numerics = @target.numerics + @expression.numerics
@num_symbols = @target.num_symbols + @expression.num_symbols
@strings = @target.strings + @expression.strings
@booleans = @target.booleans + @expression.booleans
@text_symbols = @target.text_symbols + @expression.text_symbols
@variables = @target.variables + @expression.variables
@operators = @target.operators + @expression.operators
@functions = @target.functions + @expression.functions
@userfuncs = @target.userfuncs + @expression.userfuncs
end
public
def count_target
@target.count
end
def count_value
@expression.count
end
def eval_value(interpreter)
@expression.evaluate(interpreter)
end
def eval_target(interpreter)
@target.evaluate(interpreter)
end
def to_s
@target.to_s + ' = ' + @expression.to_s
end
end
Simplify Parser class.
# Array with values
class BASICArray
def self.make_array(dims, init_value)
values = {}
base = $options['base'].value
(base..dims[0].to_i).each do |col|
coords = AbstractElement.make_coord(col)
values[coords] = init_value
end
values
end
def self.zero_values(dimensions)
case dimensions.size
when 1
BASICArray.make_array(dimensions, NumericConstant.new(0))
when 2
raise BASICSyntaxError, 'Too many dimensions in array'
end
end
def self.one_values(dimensions)
case dimensions.size
when 1
BASICArray.make_array(dimensions, NumericConstant.new(1))
when 2
raise BASICSyntaxError, 'Too many dimensions in array'
end
end
attr_reader :dimensions
def initialize(dimensions, values)
@dimensions = dimensions
@values = values
end
def clone
Array.new(@dimensions.clone, @values.clone)
end
def numeric_constant?
value = get_value(0)
value.numeric_constant?
end
def text_constant?
value = get_value(0)
value.text_constant?
end
def scalar?
false
end
def array?
true
end
def matrix?
false
end
def values(interpreter)
values = {}
base = interpreter.base
(base..@dimensions[0].to_i).each do |col|
value = get_value(col)
coords = AbstractElement.make_coord(col)
values[coords] = value
end
values
end
def get_value(col)
coords = AbstractElement.make_coord(col)
return @values[coords] if @values.key?(coords)
NumericConstant.new(0)
end
def to_s
'ARRAY: ' + @values.to_s
end
def print(printer, interpreter, formats)
case @dimensions.size
when 0
raise BASICSyntaxError, 'Need dimension in array'
when 1
print_1(printer, interpreter, formats)
else
raise BASICSyntaxError, 'Too many dimensions in array'
end
end
def write(printer, interpreter)
case @dimensions.size
when 0
raise BASICSyntaxError, 'Need dimension in array'
when 1
write_1(printer, interpreter)
else
raise BASICSyntaxError, 'Too many dimensions in array'
end
end
def pack
count = get_value(0).to_i
result = ''
(1..count).each do |index|
value = get_value(index)
result += value.to_i.chr unless value.nil?
end
quoted = '"' + result + '"'
token = TextConstantToken.new(quoted)
TextConstant.new(token)
end
private
def print_1(printer, interpreter, formats)
n_cols = @dimensions[0].to_i
fs_carriage = CarriageControl.new($options['field_sep'].value)
base = interpreter.base
(base..n_cols).each do |col|
if formats.nil?
value = get_value(col)
value.print(printer)
else
# apply using formats
formats.each do |format|
value = nil
value = get_value(col) if format.wants_item
text = format.format(value)
text.print(printer)
end
end
fs_carriage.print(printer, interpreter) if col < n_cols
end
end
def write_1(printer, interpreter)
n_cols = @dimensions[0].to_i
fs_carriage = CarriageControl.new(',')
base = interpreter.base
(base..n_cols).each do |col|
value = get_value(col)
value.write(printer)
fs_carriage.write(printer, interpreter) if col < n_cols
end
end
end
# Matrix with values
class Matrix
def self.make_array(dims, init_value)
values = {}
base = $options['base'].value
(base..dims[0].to_i).each do |col|
coords = AbstractElement.make_coord(col)
values[coords] = init_value
end
values
end
def self.make_matrix(dims, init_value)
values = {}
base = $options['base'].value
(base..dims[0].to_i).each do |row|
(base..dims[1].to_i).each do |col|
coords = AbstractElement.make_coords(row, col)
values[coords] = init_value
end
end
values
end
def self.zero_values(dimensions)
case dimensions.size
when 1
make_array(dimensions, NumericConstant.new(0))
when 2
make_matrix(dimensions, NumericConstant.new(0))
end
end
def self.one_values(dimensions)
case dimensions.size
when 1
make_array(dimensions, NumericConstant.new(1))
when 2
make_matrix(dimensions, NumericConstant.new(1))
end
end
def self.identity_values(dimensions)
new_values = make_matrix(dimensions, NumericConstant.new(0))
one = NumericConstant.new(1)
base = $options['base'].value
(base..dimensions[0].to_i).each do |row|
coords = AbstractElement.make_coords(row, row)
new_values[coords] = one
end
new_values
end
attr_reader :dimensions
def initialize(dimensions, values)
@dimensions = dimensions
@values = values
end
def clone
Matrix.new(@dimensions.clone, @values.clone)
end
def numeric_constant?
value = get_value_2(0, 0)
value.numeric_constant?
end
def text_constant?
value = get_value_2(0, 0)
value.text_constant?
end
def scalar?
false
end
def array?
false
end
def matrix?
true
end
def values_1
values = {}
base = $options['base'].value
(base..@dimensions[0].to_i).each do |col|
value = get_value_1(col)
coords = AbstractElement.make_coord(col)
values[coords] = value
end
values
end
def values_2
values = {}
base = $options['base'].value
(base..@dimensions[0].to_i).each do |row|
(base..@dimensions[1].to_i).each do |col|
value = get_value_2(row, col)
coords = AbstractElement.make_coords(row, col)
values[coords] = value
end
end
values
end
def get_value_1(col)
coords = AbstractElement.make_coord(col)
return @values[coords] if @values.key?(coords)
NumericConstant.new(0)
end
def get_value_2(row, col)
coords = AbstractElement.make_coords(row, col)
return @values[coords] if @values.key?(coords)
NumericConstant.new(0)
end
def to_s
'MATRIX: ' + @values.to_s
end
def print(printer, interpreter, formats)
case @dimensions.size
when 0
raise BASICSyntaxError, 'Need dimensions in matrix'
when 1
print_1(printer, interpreter, formats)
when 2
print_2(printer, interpreter, formats)
else
raise BASICSyntaxError, 'Too many dimensions in matrix'
end
end
def write(printer, interpreter)
case @dimensions.size
when 0
raise BASICSyntaxError, 'Need dimensions in matrix'
when 1
write_1(printer, interpreter)
when 2
write_2(printer, interpreter)
else
raise BASICSyntaxError, 'Too many dimensions in matrix'
end
end
def transpose_values
raise(BASICExpressionError, 'TRN requires matrix') unless
@dimensions.size == 2
new_values = {}
base = $options['base'].value
(base..@dimensions[0].to_i).each do |row|
(base..@dimensions[1].to_i).each do |col|
value = get_value_2(row, col)
coords = AbstractElement.make_coords(col, row)
new_values[coords] = value
end
end
new_values
end
def determinant
raise(BASICSyntaxError, 'DET requires matrix') unless @dimensions.size == 2
raise BASICRuntimeError.new(:te_mat_no_sq, 'DET') if
@dimensions[1] != @dimensions[0]
case @dimensions[0].to_i
when 1
get_value_2(1, 1)
when 2
determinant_2
else
determinant_n
end
end
def inverse_values
# set all values
values = values_2
# create identity matrix
inv_values = Matrix.identity_values(@dimensions)
n_rows = @dimensions[0].to_i
n_cols = @dimensions[1].to_i
# convert to upper triangular form
upper_triangle(n_cols, n_rows, values, inv_values)
# convert to lower triangular form
lower_triangle(n_cols, values, inv_values)
# normalize to ones
unitize(n_cols, n_rows, values, inv_values)
inv_values
end
def print_1(printer, interpreter, formats)
n_cols = @dimensions[0].to_i
base = $options['base'].value
fs_carriage = CarriageControl.new($options['field_sep'].value)
# gs_carriage = CarriageControl.new('NL')
# rs_carriage = CarriageControl.new('NL')
(base..n_cols).each do |col|
if formats.nil?
value = get_value_1(col)
value.print(printer)
else
# apply using formats
format.each do |format|
value.print(printer)
value = nil
value = get_value_1(col) if format.wants_item
text = format.format(value)
text.print(printer)
end
end
fs_carriage.print(printer, interpreter) if col < n_cols
end
printer.newline
printer.newline
end
def print_2(printer, interpreter, formats)
n_rows = @dimensions[0].to_i
n_cols = @dimensions[1].to_i
base = $options['base'].value
fs_carriage = CarriageControl.new($options['field_sep'].value)
gs_carriage = CarriageControl.new('NL')
rs_carriage = CarriageControl.new('NL')
(base..n_rows).each do |row|
(base..n_cols).each do |col|
if formats.nil?
value = get_value_2(row, col)
value.print(printer)
else
# apply using formats
formats.each do |format|
value = nil
value = get_value_2(row, col) if format.wants_item
text = format.format(value)
text.print(printer)
end
end
fs_carriage.print(printer, interpreter) if col < n_cols
end
gs_carriage.print(printer, interpreter) if row < n_rows
end
rs_carriage.print(printer, interpreter)
end
def write_1(printer, interpreter)
n_cols = @dimensions[0].to_i
base = $options['base'].value
fs_carriage = CarriageControl.new(',')
# gs_carriage = CarriageControl.new(';')
rs_carriage = CarriageControl.new('NL')
(base..n_cols).each do |col|
value = get_value_1(col)
value.write(printer)
fs_carriage.write(printer, interpreter) if col < n_cols
end
rs_carriage.write(printer, interpreter)
end
def write_2(printer, interpreter)
n_rows = @dimensions[0].to_i
n_cols = @dimensions[1].to_i
base = $options['base'].value
fs_carriage = CarriageControl.new(',')
gs_carriage = CarriageControl.new(';')
rs_carriage = CarriageControl.new('NL')
(base..n_rows).each do |row|
(base..n_cols).each do |col|
value = get_value_2(row, col)
value.write(printer)
fs_carriage.write(printer, interpreter) if col < n_cols
end
gs_carriage.write(printer, interpreter) if row < n_rows
end
rs_carriage.write(printer, interpreter)
end
def determinant_2
a = get_value_2(1, 1)
b = get_value_2(1, 2)
c = get_value_2(2, 1)
d = get_value_2(2, 2)
a.multiply(d) - b.multiply(c)
end
def determinant_n
minus_one = NumericConstant.new(-1)
sign = NumericConstant.new(1)
det = NumericConstant.new(0)
base = $options['base'].value
# for each element in first row
(base..@dimensions[1].to_i).each do |col|
v = get_value_2(1, col)
# create submatrix
subm = submatrix(1, col)
d = v.multiply(subm.determinant).multiply(sign)
det += d
sign = sign.multiply(minus_one)
end
det
end
def submatrix(exclude_row, exclude_col)
one = NumericConstant.new(1)
new_dims = [@dimensions[0] - one, @dimensions[1] - one]
new_values = submatrix_values(exclude_row, exclude_col)
Matrix.new(new_dims, new_values)
end
def submatrix_values(exclude_row, exclude_col)
new_values = {}
new_row = 1
base = $options['base'].value
(base..@dimensions[0].to_i).each do |row|
new_col = 1
next if row == exclude_row
(base..@dimensions[1].to_i).each do |col|
next if col == exclude_col
coords = AbstractElement.make_coords(new_row, new_col)
new_values[coords] = get_value_2(row, col)
new_col += 1
end
new_row += 1
end
new_values
end
def calc_factor(values, row, col)
denom_coords = AbstractElement.make_coords(col, col)
denominator = values[denom_coords]
numer_coords = AbstractElement.make_coords(row, col)
numerator = values[numer_coords]
numerator.divide(denominator)
end
def adjust_matrix_entry(values, row, col, wcol, factor)
value_coords = AbstractElement.make_coords(row, wcol)
minuend_coords = AbstractElement.make_coords(col, wcol)
subtrahend = values[value_coords]
minuend = values[minuend_coords]
new_value = subtrahend - minuend.multiply(factor)
values[value_coords] = new_value
end
def upper_triangle(n_cols, n_rows, values, inv_values)
base = $options['base'].value
(base..n_cols - 1).each do |col|
(col + 1..n_rows).each do |row|
# adjust values for this row
factor = calc_factor(values, row, col)
(base..n_cols).each do |wcol|
adjust_matrix_entry(values, row, col, wcol, factor)
adjust_matrix_entry(inv_values, row, col, wcol, factor)
end
end
end
end
def lower_triangle(n_cols, values, inv_values)
base = $options['base'].value
n_cols.downto(base + 1) do |col|
(col - 1).downto(base).each do |row|
# adjust values for this row
factor = calc_factor(values, row, col)
(base..n_cols).each do |wcol|
adjust_matrix_entry(values, row, col, wcol, factor)
adjust_matrix_entry(inv_values, row, col, wcol, factor)
end
end
end
end
def unitize(n_cols, n_rows, values, inv_values)
base = $options['base'].value
(base..n_rows).each do |row|
denom_coords = AbstractElement.make_coords(row, row)
denominator = values[denom_coords]
(base..n_cols).each do |col|
unitize_matrix_entry(values, row, col, denominator)
unitize_matrix_entry(inv_values, row, col, denominator)
end
end
end
def unitize_matrix_entry(values, row, col, denominator)
coords = AbstractElement.make_coords(row, col)
numerator = values[coords]
new_value = numerator.divide(denominator)
values[coords] = new_value
end
end
# Entry for cross-reference list
class XrefEntry
attr_reader :variable
attr_reader :signature
attr_reader :is_ref
def self.make_signature(arguments)
return nil if arguments.nil?
sigil_chars = {
numeric: '_',
integer: '%',
string: '$',
boolean: '?'
}
sigils = []
arguments.each do |arg|
content_type = :empty
if arg.class.to_s == 'Array'
# an array is a parsed expression
unless arg.empty?
a0 = arg[-1]
content_type = a0.content_type
end
else
content_type = arg.content_type
end
sigils << sigil_chars[content_type]
end
return sigils
end
def initialize(variable, signature, is_ref)
@variable = variable
@signature = signature
@is_ref = is_ref
end
def eql?(other)
@variable == other.variable &&
@signature == other.signature &&
@is_ref == other.is_ref
end
def hash
@variable.hash + @signature.hash + @is_ref.hash
end
def asize(x)
return -1 if x.nil?
x.size
end
def <=>(other)
return -1 if self < other
return 1 if self > other
0
end
def ==(other)
@variable == other.variable &&
@signature == other.signature &&
@is_ref == other.is_ref
end
def >(other)
return true if @variable > other.variable
return false if @variable < other.variable
return true if asize(@signature) > asize(other.signature)
return false if asize(@signature) < asize(other.signature)
!@is_ref && other.is_ref
end
def >=(other)
return true if @variable > other.variable
return false if @variable < other.variable
return true if asize(@signature) > asize(other.signature)
return false if asize(@signature) < asize(other.signature)
!@is_ref && other.is_ref
end
def <(other)
return true if @variable < other.variable
return false if @variable > other.variable
return true if asize(@signature) < asize(other.signature)
return false if asize(@signature) > asize(other.signature)
@is_ref && !other.is_ref
end
def <=(other)
return true if @variable < other.variable
return false if @variable > other.variable
return true if asize(@signature) < asize(other.signature)
return false if asize(@signature) > asize(other.signature)
@is_ref && !other.is_ref
end
def n_dims
@signature.size
end
def to_s
dims = ''
dims = '(' + @signature.join(',') + ')' unless @signature.nil?
ref = ''
ref = '=' if @is_ref
@variable.to_s + dims + ref
end
def to_text
dims = ''
dims = '(' + @signature.join(',') + ')' unless @signature.nil?
@variable.to_s + dims
end
end
# Expression parser
class Parser
def initialize(default_shape)
@operator_stack = []
@expression_stack = []
@current_expression = []
@parens_stack = []
@shape_stack = [default_shape]
@parens_group = []
@previous_element = InitialOperator.new
end
def parse(element)
if element.group_separator?
group_separator(element)
elsif element.operator?
operator_higher(element)
elsif element.function_variable?
function_variable(element)
else
# the element is an operand, append it to the output list
@current_expression << element
end
@previous_element = element
end
def expressions
raise(BASICExpressionError, 'Too many operators') unless
@operator_stack.empty?
parsed_expressions = []
parsed_expressions.concat @parens_group unless @parens_group.empty?
parsed_expressions << @current_expression unless @current_expression.empty?
parsed_expressions
end
private
def stack_to_expression(stack, expression)
until stack.empty? || stack[-1].starter?
op = stack.pop
expression << op
end
end
def stack_to_precedence(stack, expression, element)
while !stack.empty? &&
!stack[-1].starter? &&
stack[-1].precedence >= element.precedence
op = stack.pop
expression << op
end
end
def group_separator(element)
if element.group_start?
start_group(element)
elsif element.separator?
pop_to_group_start
elsif element.group_end?
end_group(element)
end
end
def start_group(element)
if @previous_element.function?
start_associated_group(element, @previous_element.default_shape)
elsif @previous_element.variable?
start_associated_group(element, :scalar)
else
start_simple_group(element)
end
end
# a group associated with a function or variable
# (arguments or subscripts)
def start_associated_group(element, shape)
@expression_stack.push(@current_expression)
@current_expression = []
@operator_stack.push(ParamStart.new(element))
@parens_stack << @parens_group
@parens_group = []
@shape_stack.push(shape)
end
def start_simple_group(element)
@operator_stack.push(element)
@parens_stack << @parens_group
@parens_group = []
@shape_stack.push(:scalar)
end
# pop the operator stack until the corresponding left paren is found
# Append each operator to the end of the output list
def pop_to_group_start
stack_to_expression(@operator_stack, @current_expression)
@parens_group << @current_expression
@current_expression = []
end
# pop the operator stack until the corresponding left paren is removed
# Append each operator to the end of the output list
def end_group(group_end_element)
stack_to_expression(@operator_stack, @current_expression)
@parens_group << @current_expression
raise(BASICExpressionError, 'Too few operators') if @operator_stack.empty?
# remove the '(' or '[' starter
start_op = @operator_stack.pop
error = 'Bracket/parenthesis mismatch, found ' + group_end_element.to_s +
' to match ' + start_op.to_s
raise(BASICExpressionError, error) unless group_end_element.match?(start_op)
if start_op.param_start?
list = List.new(@parens_group)
@operator_stack.push(list)
@current_expression = @expression_stack.pop
end
@parens_group = @parens_stack.pop
@shape_stack.pop
end
# remove operators already on the stack that have higher
# or equal precedence
# append them to the output list
def operator_higher(element)
stack_to_precedence(@operator_stack, @current_expression, element)
# push the operator onto the operator stack
@operator_stack.push(element) unless element.terminal?
end
def function_variable(element)
if element.user_function?
start_user_function(element)
elsif element.function?
start_function(element)
elsif element.variable?
start_variable(element)
end
end
# remove operators already on the stack that have higher
# or equal precedence
# append them to the output list
def start_user_function(element)
stack_to_precedence(@operator_stack, @current_expression, element)
# push the variable onto the operator stack
variable = UserFunction.new(element)
@operator_stack.push(variable)
end
# remove operators already on the stack that have higher
# or equal precedence
# append them to the output list
def start_function(element)
stack_to_precedence(@operator_stack, @current_expression, element)
# push the function onto the operator stack
@operator_stack.push(element)
end
# remove operators already on the stack that have higher
# or equal precedence
# append them to the output list
def start_variable(element)
stack_to_precedence(@operator_stack, @current_expression, element)
# push the variable onto the operator stack
if @shape_stack[-1] == :declaration
variable = Declaration.new(element)
else
variable = Variable.new(element, @shape_stack[-1], [])
end
@operator_stack.push(variable)
end
end
# base class for expressions
class AbstractExpression
attr_reader :parsed_expressions
attr_reader :comprehension_effort
def initialize(tokens, default_shape)
@unparsed_expression = tokens.map(&:to_s).join
@numeric_constant = tokens.size == 1 && tokens[0].numeric_constant?
@text_constant = tokens.size == 1 && tokens[0].text_constant?
@target = false
@carriage = false
elements = tokens_to_elements(tokens)
parser = Parser.new(default_shape)
elements.each { |element| parser.parse(element) }
@parsed_expressions = parser.expressions
set_arguments_1(@parsed_expressions)
@comprehension_effort = 1
@parsed_expressions.each do |parsed_expression|
prev = nil
parsed_expression.each do |element|
@comprehension_effort += 1 if element.operator?
@comprehension_effort += 1 if element.operator? && !prev.nil? && prev.operator?
@comprehension_effort += 1 if element.function?
# function? includes user-defined funcs,
# so the next line makes comprehension effort 2
@comprehension_effort += 1 if element.user_function?
prev = element
end
end
end
def to_s
@unparsed_expression
end
def dump
lines = []
@parsed_expressions.each do |expression|
x = expression.map(&:dump)
if x.class.to_s == 'Array'
lines += x.flatten
else
lines << x
end
end
lines
end
def carriage_control?
@carriage
end
def count
@parsed_expressions.length
end
def numeric_constant?
@numeric_constant
end
def text_constant?
@text_constant
end
def target?
@target
end
# returns an Array of values
def evaluate(interpreter)
interpreter.evaluate(@parsed_expressions)
end
def numerics
parsed_expressions_numerics(@parsed_expressions)
end
def num_symbols
parsed_expressions_num_symbols(@parsed_expressions)
end
def strings
parsed_expressions_strings(@parsed_expressions)
end
def booleans
parsed_expressions_booleans(@parsed_expressions)
end
def text_symbols
parsed_expressions_text_symbols(@parsed_expressions)
end
def variables
parsed_expressions_variables(@parsed_expressions)
end
def operators
parsed_expressions_operators(@parsed_expressions)
end
def functions
parsed_expressions_functions(@parsed_expressions)
end
def userfuncs
parsed_expressions_userfuncs(@parsed_expressions)
end
private
def set_arguments_1(parsed_expressions)
parsed_expressions.each { |expression| set_arguments_2(expression) }
end
def set_arguments_2(parsed_expression)
content_type_stack = []
parsed_expression.each do |item|
if item.list?
set_arguments_1(item.list)
elsif item.operator?
item.set_arguments(content_type_stack)
content_type_stack.push(item)
else
item.pop_stack(content_type_stack)
content_type_stack.push(item)
end
end
raise(BASICExpressionError, 'Bad expression') if
content_type_stack.size > 1
end
def parsed_expressions_numerics(parsed_expressions)
vars = []
parsed_expressions.each do |expression|
previous = nil
# backwards so the unary operator (if any) is seen first
expression.reverse_each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
vars += parsed_expressions_numerics(sublist)
elsif thing.numeric_constant? && !thing.symbol
if !previous.nil? &&
previous.operator? &&
previous.unary? &&
previous.to_s == '-'
vars << thing.negate
else
vars << thing
end
end
previous = thing
end
end
vars
end
def parsed_expressions_num_symbols(parsed_expressions)
vars = []
parsed_expressions.each do |expression|
# backwards so the unary operator (if any) is seen first
expression.reverse_each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
vars += parsed_expressions_num_symbols(sublist)
elsif thing.numeric_constant? && thing.symbol
vars << thing
end
end
end
vars
end
def parsed_expressions_strings(parsed_expressions)
strs = []
parsed_expressions.each do |expression|
expression.each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
strs += parsed_expressions_strings(sublist)
elsif thing.text_constant? && !thing.symbol
strs << thing
end
end
end
strs
end
def parsed_expressions_booleans(parsed_expressions)
bools = []
parsed_expressions.each do |expression|
expression.each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
bools += parsed_expressions_booleans(sublist)
elsif thing.boolean_constant?
bools << thing
end
end
end
bools
end
def parsed_expressions_text_symbols(parsed_expressions)
strs = []
parsed_expressions.each do |expression|
expression.each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
strs += parsed_expressions_text_symbols(sublist)
elsif thing.text_constant? && thing.symbol
strs << thing
end
end
end
strs
end
def parsed_expressions_variables(parsed_expressions)
vars = []
parsed_expressions.each do |expression|
previous = nil
expression.each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
vars += parsed_expressions_variables(sublist)
elsif thing.variable?
arguments = nil
if thing.array?
token = NumericConstantToken.new('0')
constant = NumericConstant.new(token)
arguments = [constant]
end
if thing.matrix?
token = NumericConstantToken.new('0')
constant = NumericConstant.new(token)
arguments = [constant, constant]
end
arguments = previous.list if !previous.nil? && previous.list?
is_ref = thing.reference?
signature = XrefEntry.make_signature(arguments)
vars << XrefEntry.new(thing.to_s, signature, is_ref)
end
previous = thing
end
end
vars
end
def parsed_expressions_operators(parsed_expressions)
opers = []
parsed_expressions.each do |expression|
previous = nil
expression.each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
opers += parsed_expressions_operators(sublist)
elsif thing.operator?
arguments = thing.arguments
is_ref = false
signature = XrefEntry.make_signature(arguments)
opers << XrefEntry.new(thing.to_s, signature, is_ref)
end
previous = thing
end
end
opers
end
def parsed_expressions_functions(parsed_expressions)
vars = []
parsed_expressions.each do |expression|
previous = nil
expression.each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
vars += parsed_expressions_functions(sublist)
elsif thing.function? && !thing.user_function?
arguments = nil
arguments = previous.list if !previous.nil? && previous.list?
is_ref = thing.reference?
signature = XrefEntry.make_signature(arguments)
vars << XrefEntry.new(thing.to_s, signature, is_ref)
end
previous = thing
end
end
vars
end
def parsed_expressions_userfuncs(parsed_expressions)
vars = []
parsed_expressions.each do |expression|
previous = nil
expression.each do |thing|
if thing.list?
# recurse into expressions in list
sublist = thing.list
vars += parsed_expressions_userfuncs(sublist)
elsif thing.user_function?
arguments = nil
arguments = previous.list if !previous.nil? && previous.list?
is_ref = thing.reference?
signature = XrefEntry.make_signature(arguments)
vars << XrefEntry.new(thing.to_s, signature, is_ref)
end
previous = thing
end
end
vars
end
def tokens_to_elements(tokens)
elements = []
tokens.each do |token|
follows_operand = !elements.empty? && elements[-1].operand?
elements << token_to_element(token, follows_operand)
end
elements << TerminalOperator.new
end
def token_to_element(token, follows_operand)
return FunctionFactory.make(token.to_s) if
FunctionFactory.valid?(token.to_s)
element = nil
(follows_operand ? binary_classes : unary_classes).each do |c|
element = c.new(token) if element.nil? && c.accept?(token)
end
if element.nil?
raise(BASICExpressionError,
"Token '#{token.class}:#{token}' is not a value or operator")
end
element
end
def binary_classes
# first match is used; select order with care
# UserFunction before VariableName
[
GroupStart,
GroupEnd,
ParamSeparator,
BinaryOperatorPlus,
BinaryOperatorMinus,
BinaryOperatorMultiply,
BinaryOperatorDivide,
BinaryOperatorPower,
BinaryOperatorEqual,
BinaryOperatorNotEqual,
BinaryOperatorLess,
BinaryOperatorLessEqual,
BinaryOperatorGreater,
BinaryOperatorGreaterEqual,
BinaryOperatorAnd,
BinaryOperatorOr,
BinaryOperatorMax,
BinaryOperatorMin,
BooleanConstant,
IntegerConstant,
NumericConstant,
UserFunctionName,
VariableName,
TextConstant
]
end
def unary_classes
# first match is used; select order with care
# UserFunction before VariableName
[
GroupStart,
GroupEnd,
ParamSeparator,
UnaryOperatorPlus,
UnaryOperatorMinus,
UnaryOperatorHash,
UnaryOperatorColon,
UnaryOperatorNot,
BooleanConstant,
IntegerConstant,
NumericConstant,
UserFunctionName,
VariableName,
TextConstant
]
end
end
# Value expression (an R-value)
class ValueExpression < AbstractExpression
def initialize(_, shape)
super
@shape = shape
end
def printable?
true
end
def keyword?
false
end
def scalar?
@shape == :scalar
end
def content_type
first_expression = @parsed_expressions[0]
last_token = first_expression[-1]
last_token.content_type
end
def filehandle?
return false if @parsed_expressions.empty?
parsed_expression = @parsed_expressions[0]
last_token = parsed_expression[-1]
last_token.operator? && last_token.pound?
end
def print(printer, interpreter)
numeric_constants = evaluate(interpreter)
return if numeric_constants.empty?
numeric_constant = numeric_constants[0]
numeric_constant.print(printer)
end
def write(printer, interpreter)
numeric_constants = evaluate(interpreter)
return if numeric_constants.empty?
numeric_constant = numeric_constants[0]
numeric_constant.write(printer)
end
def compound_print(printer, interpreter, formats)
compounds = evaluate(interpreter)
return if compounds.empty?
compound = compounds[0]
compound.print(printer, interpreter, formats)
end
def compound_write(printer, interpreter)
compounds = evaluate(interpreter)
return if compounds.empty?
compound = compounds[0]
compound.write(printer, interpreter)
end
end
# Declaration expression
class DeclarationExpression < AbstractExpression
def initialize(tokens)
super(tokens, :declaration)
check_length
check_all_lengths
check_resolve_types
end
private
def check_length
raise(BASICSyntaxError, 'Value list is empty (length 0)') if
@parsed_expressions.empty?
end
def check_all_lengths
@parsed_expressions.each do |parsed_expression|
raise(BASICSyntaxError, 'Value is not declaration (length 0)') if
parsed_expression.empty?
end
end
def check_resolve_types
@parsed_expressions.each do |parsed_expression|
if parsed_expression[-1].class.to_s != 'Declaration'
raise(BASICSyntaxError,
"Value is not declaration (type #{parsed_expression[-1].class})")
end
end
end
end
# Target expression
class TargetExpression < AbstractExpression
def initialize(tokens, shape)
super
check_length
check_all_lengths
check_resolve_types
@target = true
@parsed_expressions.each do |parsed_expression|
parsed_expression[-1].valref = :reference
end
end
def filehandle?
false
end
private
def check_length
raise(BASICSyntaxError, 'Value list is empty (length 0)') if
@parsed_expressions.empty?
end
def check_all_lengths
@parsed_expressions.each do |parsed_expression|
raise(BASICSyntaxError, 'Value is not assignable (length 0)') if
parsed_expression.empty?
end
end
def check_resolve_types
@parsed_expressions.each do |parsed_expression|
if parsed_expression[-1].class.to_s != 'Variable' &&
parsed_expression[-1].class.to_s != 'UserFunction'
raise(BASICSyntaxError,
"Value is not assignable (type #{parsed_expression[-1].class})")
end
end
end
end
# User function definition
# Define the user function name, arguments, and expression
class UserFunctionDefinition
attr_reader :name
attr_reader :arguments
attr_reader :sig
attr_reader :expression
attr_reader :numerics
attr_reader :num_symbols
attr_reader :strings
attr_reader :booleans
attr_reader :text_symbols
attr_reader :variables
attr_reader :operators
attr_reader :functions
attr_reader :userfuncs
attr_reader :comprehension_effort
def initialize(tokens)
# parse into name '=' expression
line_text = tokens.map(&:to_s).join
parts = split_tokens(tokens)
raise(BASICExpressionError, "'#{line_text}' is not a valid assignment") if
parts.size != 3 && parts.size != 1
user_function_prototype = UserFunctionPrototype.new(parts[0])
@name = user_function_prototype.name
@arguments = user_function_prototype.arguments
@sig = XrefEntry.make_signature(@arguments)
@expression = nil
@expression = ValueExpression.new(parts[2], :scalar) if parts.size == 3
if @expression.nil?
@numerics = []
@num_symbols = []
@strings = []
@booleans = []
@text_symbols = []
@variables = []
@operators = []
@functions = []
signature = XrefEntry.make_signature(@arguments)
xr = XrefEntry.new(@name.to_s, signature, true)
@userfuncs = [xr]
@comprehension_effort = 0
else
@numerics = @expression.numerics
@num_symbols = @expression.num_symbols
@strings = @expression.strings
@booleans = @expression.booleans
@text_symbols = @expression.text_symbols
@variables = @expression.variables
@operators = @expression.operators
@functions = @expression.functions
signature = XrefEntry.make_signature(@arguments)
xr = XrefEntry.new(@name.to_s, signature, true)
@userfuncs = [xr] + @expression.userfuncs
@comprehension_effort = @expression.comprehension_effort
end
# add parameters to function as references
@arguments.each do |argument|
@variables << XrefEntry.new(argument.to_s, nil, true)
end
end
def multidef?
@expression.nil?
end
def dump
lines = []
lines << @name.dump
@arguments.each { |arg| lines << arg.dump }
lines += @expression.dump unless @expression.nil?
lines
end
def to_s
vnames = @arguments.map(&:to_s).join(',')
s = @name.to_s + '(' + vnames + ')'
s += '=' + @expression.to_s unless @expression.nil?
s
end
def signature
numeric_spec = { 'type' => :numeric, 'shape' => :scalar }
text_spec = { 'type' => :string, 'shape' => :scalar }
integer_spec = { 'type' => :integer, 'shape' => :scalar }
sig = []
@arguments.each do |arg|
sig << numeric_spec if arg.content_type == :numeric
sig << text_spec if arg.content_type == :string
sig << integer_spec if arg.content_type == :integer
end
sig
end
private
def split_tokens(tokens)
results = []
nonkeywords = []
eq_count = 0
tokens.each do |token|
if token.operator? && token.equals? && eq_count < 1
results << nonkeywords unless nonkeywords.empty?
nonkeywords = []
results << token
eq_count += 1
else
nonkeywords << token
end
end
results << nonkeywords unless nonkeywords.empty?
results
end
end
# User function prototype
# Define the user function name and arguments
class UserFunctionPrototype
attr_reader :name
attr_reader :arguments
def initialize(tokens)
check_tokens(tokens)
@name = UserFunctionName.new(tokens[0])
@arguments = variable_names(tokens[2..-2])
# arguments must be unique
names = @arguments.map(&:to_s)
raise(BASICExpressionError, 'Duplicate parameters') unless
names.uniq.size == names.size
end
def to_s
@name
end
private
# verify tokens are UserFunction, open, close
def check_tokens(tokens)
raise(BASICSyntaxError, 'Invalid function specification') unless
tokens.size >= 3 && tokens[0].user_function? &&
tokens[1].groupstart? && tokens[-1].groupend?
end
# verify tokens variables and commas
def variable_names(params)
name_tokens = params.values_at(* params.each_index.select(&:even?))
variable_names = []
name_tokens.each do |name_token|
variable_names << VariableName.new(name_token)
end
separators = params.values_at(* params.each_index.select(&:odd?))
separators.each do |separator|
raise(BASICSyntaxError, 'Invalid list separator') unless
separator.separator?
end
variable_names
end
end
# Aassignment
class Assignment
attr_reader :target
attr_reader :numerics
attr_reader :num_symbols
attr_reader :strings
attr_reader :booleans
attr_reader :text_symbols
attr_reader :variables
attr_reader :operators
attr_reader :functions
attr_reader :userfuncs
attr_reader :comprehension_effort
def initialize(tokens, shape)
# parse into variable, '=', expression
@token_lists = split_tokens(tokens)
line_text = tokens.map(&:to_s).join
raise(BASICExpressionError, "'#{line_text}' is not a valid assignment") if
@token_lists.size != 3 ||
!(@token_lists[1].operator? && @token_lists[1].equals?)
@numerics = []
@num_symbols = []
@strings = []
@booleans = []
@text_symbols = []
@variables = []
@operators = []
@functions = []
@userfuncs = []
@target = TargetExpression.new(@token_lists[0], shape)
@expression = ValueExpression.new(@token_lists[2], shape)
make_references
@comprehension_effort = @target.comprehension_effort + @expression.comprehension_effort
end
def dump
lines = []
lines += @target.dump
lines += @expression.dump
lines << 'AssignmentOperator:='
end
private
def split_tokens(tokens)
results = []
nonkeywords = []
tokens.each do |token|
if token.operator? && token.equals?
results << nonkeywords unless nonkeywords.empty?
nonkeywords = []
results << token
else
nonkeywords << token
end
end
results << nonkeywords unless nonkeywords.empty?
results
end
def make_references
@numerics = @target.numerics + @expression.numerics
@num_symbols = @target.num_symbols + @expression.num_symbols
@strings = @target.strings + @expression.strings
@booleans = @target.booleans + @expression.booleans
@text_symbols = @target.text_symbols + @expression.text_symbols
@variables = @target.variables + @expression.variables
@operators = @target.operators + @expression.operators
@functions = @target.functions + @expression.functions
@userfuncs = @target.userfuncs + @expression.userfuncs
end
public
def count_target
@target.count
end
def count_value
@expression.count
end
def eval_value(interpreter)
@expression.evaluate(interpreter)
end
def eval_target(interpreter)
@target.evaluate(interpreter)
end
def to_s
@target.to_s + ' = ' + @expression.to_s
end
end
|
#!/usr/bin/env ruby
require 'optparse'
require 'ostruct'
if ARGV.index('--')
myargs = ARGV[0...ARGV.index('--')]
remain = ARGV[ARGV.index('--')+1..ARGV.size]
else
myargs = ARGV
remain = []
end
DIR = File.expand_path(File.dirname(__FILE__))
puts "DIR = #{DIR}"
opt = OpenStruct.new
# opt.nnode = 2
# opt.ppn = 1
opt.time = '15:00'
opt.freeze_on_error = false
OptionParser.new do |p|
p.banner = "Usage: #{__FILE__} [options]"
p.on('-n', '--nnode NODES', Integer, "Number of nodes to run the Grappa job with"){|n| opt.nnode = n }
p.on('-p', '--ppn CORES', Integer, "Number of cores/processes per node"){|c| opt.ppn = c }
p.on('-t', '--time TIME', 'Job time to pass to srun'){|t| opt.time = t }
p.on('-e', '--test TEST', 'Run boost unit test program with given name (e.g. Aggregator_tests)'){|t| opt.test = t }
p.on('-f', '--freeze-on-error', "Freeze all the jobs when there's an error"){ opt.freeze_on_error = true }
end.parse!(myargs)
srun_flags = %w[ --cpu_bind=verbose,rank --label --kill-on-bad-exit ] \
<< "--task-prolog=#{DIR}/grappa_srun_prolog.rb" \
<< "--task-epilog=#{DIR}/grappa_srun_epilog.sh"
# "source" prolog
require_relative "grappa_srun_prolog.rb"
setarch = ""
case `hostname`
when /pal|node\d+/
srun_flags << "--partition=pal" << "--account=pal"
# disable address randomization (doesn't seem to actually fix pprof multi-node problems)
# setarch = "setarch x86_64 -RL "
else
srun_flags << "--partition=grappa" << "--resv-ports"
end
srun_flags << "--nodes=#{opt.nnode}" if opt.nnode
srun_flags << "--ntasks-per-node=#{opt.ppn}" if opt.ppn
srun_flags << "--time=#{opt.time}" if opt.time
ENV["GASNET_FREEZE_ON_ERROR"] = opt.freeze_on_error ? "1" : "0"
test = "#{opt.test}.test --log_level=test_suite --report_level=confirm --run_test=#{opt.test}" if opt.test
# jacob's preferred test options
#test = "#{opt.test}.test --log_level=nothing --report_level=no --run_test=#{opt.test}" if opt.test
s = "srun #{srun_flags.join(' ')} -- #{test} #{setarch}#{remain.join(' ')}"
puts s
$stdout.flush
exec s
Add flag '--[no-]verbose' to grappa_srun for verbose tests
#!/usr/bin/env ruby
require 'optparse'
require 'ostruct'
if ARGV.index('--')
myargs = ARGV[0...ARGV.index('--')]
remain = ARGV[ARGV.index('--')+1..ARGV.size]
else
myargs = ARGV
remain = []
end
DIR = File.expand_path(File.dirname(__FILE__))
puts "DIR = #{DIR}"
opt = OpenStruct.new
# opt.nnode = 2
# opt.ppn = 1
opt.time = '15:00'
opt.freeze_on_error = false
opt.verbose = true
OptionParser.new do |p|
p.banner = "Usage: #{__FILE__} [options]"
p.on('-n', '--nnode NODES', Integer, "Number of nodes to run the Grappa job with"){|n| opt.nnode = n }
p.on('-p', '--ppn CORES', Integer, "Number of cores/processes per node"){|c| opt.ppn = c }
p.on('-t', '--time TIME', 'Job time to pass to srun'){|t| opt.time = t }
p.on('-e', '--test TEST', 'Run boost unit test program with given name (e.g. Aggregator_tests)'){|t| opt.test = t }
p.on('-f', '--[no-]freeze-on-error', "Freeze all the jobs when there's an error"){|f| opt.freeze_on_error = f }
p.on('-v', '--[no-]verbose', "Verbose tests"){|v| opt.verbose = v }
end.parse!(myargs)
srun_flags = %w[ --cpu_bind=verbose,rank --label --kill-on-bad-exit ] \
<< "--task-prolog=#{DIR}/grappa_srun_prolog.rb" \
<< "--task-epilog=#{DIR}/grappa_srun_epilog.sh"
# "source" prolog
require_relative "grappa_srun_prolog.rb"
setarch = ""
case `hostname`
when /pal|node\d+/
srun_flags << "--partition=pal" << "--account=pal"
# disable address randomization (doesn't seem to actually fix pprof multi-node problems)
# setarch = "setarch x86_64 -RL "
else
srun_flags << "--partition=grappa" << "--resv-ports"
end
srun_flags << "--nodes=#{opt.nnode}" if opt.nnode
srun_flags << "--ntasks-per-node=#{opt.ppn}" if opt.ppn
srun_flags << "--time=#{opt.time}" if opt.time
ENV["GASNET_FREEZE_ON_ERROR"] = opt.freeze_on_error ? "1" : "0"
if opt.verbose
verbose_test = '--log_level=test_suite --report_level=confirm'
else
verbose_test = '--log_level=nothing --report_level=no'
end
test = "#{opt.test}.test #{verbose_test} --run_test=#{opt.test}" if opt.test
# jacob's preferred test options
#test = "#{opt.test}.test --log_level=nothing --report_level=no --run_test=#{opt.test}" if opt.test
s = "srun #{srun_flags.join(' ')} -- #{test} #{setarch}#{remain.join(' ')}"
puts s
$stdout.flush
exec s
|
require File.expand_path('../../spec_helper', __FILE__)
require File.expand_path('../fixtures/def', __FILE__)
# Language-level method behaviour
describe "Redefining a method" do
it "replaces the original method" do
def barfoo; 100; end
barfoo.should == 100
def barfoo; 200; end
barfoo.should == 200
end
end
describe "Defining a method at the top-level" do
it "defines it on Object with private visibility by default" do
Object.should have_private_instance_method(:some_toplevel_method, false)
end
it "defines it on Object with public visibility after calling public" do
Object.should have_public_instance_method(:public_toplevel_method, false)
end
end
describe "Defining an 'initialize' method" do
it "sets the method's visibility to private" do
class DefInitializeSpec
def initialize
end
end
DefInitializeSpec.should have_private_instance_method(:initialize, false)
end
end
describe "Defining an 'initialize_copy' method" do
it "sets the method's visibility to private" do
class DefInitializeCopySpec
def initialize_copy
end
end
DefInitializeCopySpec.should have_private_instance_method(:initialize_copy, false)
end
end
describe "Defining an 'initialize_dup' method" do
it "sets the method's visibility to private" do
class DefInitializeDupSpec
def initialize_dup
end
end
DefInitializeDupSpec.should have_private_instance_method(:initialize_dup, false)
end
end
describe "Defining an 'initialize_clone' method" do
it "sets the method's visibility to private" do
class DefInitializeCloneSpec
def initialize_clone
end
end
DefInitializeCloneSpec.should have_private_instance_method(:initialize_clone, false)
end
end
describe "Defining a 'respond_to_missing?' method" do
it "sets the method's visibility to private" do
class DefRespondToMissingPSpec
def respond_to_missing?
end
end
DefRespondToMissingPSpec.should have_private_instance_method(:respond_to_missing?, false)
end
end
describe "Defining a method" do
it "returns a symbol of the method name" do
method_name = def some_method; end
method_name.should == :some_method
end
end
describe "An instance method definition with a splat" do
it "accepts an unnamed '*' argument" do
def foo(*); end;
foo.should == nil
foo(1, 2).should == nil
foo(1, 2, 3, 4, :a, :b, 'c', 'd').should == nil
end
it "accepts a named * argument" do
def foo(*a); a; end;
foo.should == []
foo(1, 2).should == [1, 2]
foo([:a]).should == [[:a]]
end
it "accepts non-* arguments before the * argument" do
def foo(a, b, c, d, e, *f); [a, b, c, d, e, f]; end
foo(1, 2, 3, 4, 5, 6, 7, 8).should == [1, 2, 3, 4, 5, [6, 7, 8]]
end
it "allows only a single * argument" do
lambda { eval 'def foo(a, *b, *c); end' }.should raise_error(SyntaxError)
end
it "requires the presence of any arguments that precede the *" do
def foo(a, b, *c); end
lambda { foo 1 }.should raise_error(ArgumentError)
end
end
describe "An instance method with a default argument" do
it "evaluates the default when no arguments are passed" do
def foo(a = 1)
a
end
foo.should == 1
foo(2).should == 2
end
it "evaluates the default empty expression when no arguments are passed" do
def foo(a = ())
a
end
foo.should == nil
foo(2).should == 2
end
it "assigns an empty Array to an unused splat argument" do
def foo(a = 1, *b)
[a,b]
end
foo.should == [1, []]
foo(2).should == [2, []]
end
it "evaluates the default when required arguments precede it" do
def foo(a, b = 2)
[a,b]
end
lambda { foo }.should raise_error(ArgumentError)
foo(1).should == [1, 2]
end
it "prefers to assign to a default argument before a splat argument" do
def foo(a, b = 2, *c)
[a,b,c]
end
lambda { foo }.should raise_error(ArgumentError)
foo(1).should == [1,2,[]]
end
it "prefers to assign to a default argument when there are no required arguments" do
def foo(a = 1, *args)
[a,args]
end
foo(2,2).should == [2,[2]]
end
it "does not evaluate the default when passed a value and a * argument" do
def foo(a, b = 2, *args)
[a,b,args]
end
foo(2,3,3).should == [2,3,[3]]
end
it "shadows an existing method with the same name as the local" do
def bar
1
end
-> {
eval "def foo(bar = bar)
bar
end"
}.should complain(/circular argument reference/)
foo.should == nil
foo(2).should == 2
end
it "calls a method with the same name as the local when explicitly using ()" do
def bar
1
end
def foo(bar = bar())
bar
end
foo.should == 1
foo(2).should == 2
end
end
describe "A singleton method definition" do
it "can be declared for a local variable" do
a = Object.new
def a.foo
5
end
a.foo.should == 5
end
it "can be declared for an instance variable" do
@a = Object.new
def @a.foo
6
end
@a.foo.should == 6
end
it "can be declared for a global variable" do
$__a__ = "hi"
def $__a__.foo
7
end
$__a__.foo.should == 7
end
it "can be declared with an empty method body" do
class DefSpec
def self.foo;end
end
DefSpec.foo.should == nil
end
it "can be redefined" do
obj = Object.new
def obj.==(other)
1
end
(obj==1).should == 1
def obj.==(other)
2
end
(obj==2).should == 2
end
it "raises #{frozen_error_class} if frozen" do
obj = Object.new
obj.freeze
lambda { def obj.foo; end }.should raise_error(frozen_error_class)
end
end
describe "Redefining a singleton method" do
it "does not inherit a previously set visibility" do
o = Object.new
class << o; private; def foo; end; end;
class << o; should have_private_instance_method(:foo); end
class << o; def foo; end; end;
class << o; should_not have_private_instance_method(:foo); end
class << o; should have_instance_method(:foo); end
end
end
describe "Redefining a singleton method" do
it "does not inherit a previously set visibility" do
o = Object.new
class << o; private; def foo; end; end;
class << o; should have_private_instance_method(:foo); end
class << o; def foo; end; end;
class << o; should_not have_private_instance_method(:foo); end
class << o; should have_instance_method(:foo); end
end
end
describe "A method defined with extreme default arguments" do
it "can redefine itself when the default is evaluated" do
class DefSpecs
def foo(x = (def foo; "hello"; end;1));x;end
end
d = DefSpecs.new
d.foo(42).should == 42
d.foo.should == 1
d.foo.should == 'hello'
end
it "may use an fcall as a default" do
def bar
1
end
def foo(x = bar())
x
end
foo.should == 1
foo(2).should == 2
end
it "evaluates the defaults in the method's scope" do
def foo(x = ($foo_self = self; nil)); end
foo
$foo_self.should == self
end
it "may use preceding arguments as defaults" do
def foo(obj, width=obj.length)
width
end
foo('abcde').should == 5
end
it "may use a lambda as a default" do
def foo(output = 'a', prc = lambda {|n| output * n})
prc.call(5)
end
foo.should == 'aaaaa'
end
end
describe "A singleton method defined with extreme default arguments" do
it "may use a method definition as a default" do
$__a = Object.new
def $__a.foo(x = (def $__a.foo; "hello"; end;1));x;end
$__a.foo(42).should == 42
$__a.foo.should == 1
$__a.foo.should == 'hello'
end
it "may use an fcall as a default" do
a = Object.new
def a.bar
1
end
def a.foo(x = bar())
x
end
a.foo.should == 1
a.foo(2).should == 2
end
it "evaluates the defaults in the singleton scope" do
a = Object.new
def a.foo(x = ($foo_self = self; nil)); 5 ;end
a.foo
$foo_self.should == a
end
it "may use preceding arguments as defaults" do
a = Object.new
def a.foo(obj, width=obj.length)
width
end
a.foo('abcde').should == 5
end
it "may use a lambda as a default" do
a = Object.new
def a.foo(output = 'a', prc = lambda {|n| output * n})
prc.call(5)
end
a.foo.should == 'aaaaa'
end
end
describe "A method definition inside a metaclass scope" do
it "can create a class method" do
class DefSpecSingleton
class << self
def a_class_method;self;end
end
end
DefSpecSingleton.a_class_method.should == DefSpecSingleton
lambda { Object.a_class_method }.should raise_error(NoMethodError)
end
it "can create a singleton method" do
obj = Object.new
class << obj
def a_singleton_method;self;end
end
obj.a_singleton_method.should == obj
lambda { Object.new.a_singleton_method }.should raise_error(NoMethodError)
end
it "raises #{frozen_error_class} if frozen" do
obj = Object.new
obj.freeze
class << obj
lambda { def foo; end }.should raise_error(frozen_error_class)
end
end
end
describe "A nested method definition" do
it "creates an instance method when evaluated in an instance method" do
class DefSpecNested
def create_instance_method
def an_instance_method;self;end
an_instance_method
end
end
obj = DefSpecNested.new
obj.create_instance_method.should == obj
obj.an_instance_method.should == obj
other = DefSpecNested.new
other.an_instance_method.should == other
DefSpecNested.should have_instance_method(:an_instance_method)
end
it "creates a class method when evaluated in a class method" do
class DefSpecNested
class << self
# cleanup
remove_method :a_class_method if method_defined? :a_class_method
def create_class_method
def a_class_method;self;end
a_class_method
end
end
end
lambda { DefSpecNested.a_class_method }.should raise_error(NoMethodError)
DefSpecNested.create_class_method.should == DefSpecNested
DefSpecNested.a_class_method.should == DefSpecNested
lambda { Object.a_class_method }.should raise_error(NoMethodError)
lambda { DefSpecNested.new.a_class_method }.should raise_error(NoMethodError)
end
it "creates a singleton method when evaluated in the metaclass of an instance" do
class DefSpecNested
def create_singleton_method
class << self
def a_singleton_method;self;end
end
a_singleton_method
end
end
obj = DefSpecNested.new
obj.create_singleton_method.should == obj
obj.a_singleton_method.should == obj
other = DefSpecNested.new
lambda { other.a_singleton_method }.should raise_error(NoMethodError)
end
it "creates a method in the surrounding context when evaluated in a def expr.method" do
class DefSpecNested
TARGET = Object.new
def TARGET.defs_method
def inherited_method;self;end
end
end
DefSpecNested::TARGET.defs_method
DefSpecNested.should have_instance_method :inherited_method
DefSpecNested::TARGET.should_not have_method :inherited_method
obj = DefSpecNested.new
obj.inherited_method.should == obj
end
# See http://yugui.jp/articles/846#label-3
it "inside an instance_eval creates a singleton method" do
class DefSpecNested
OBJ = Object.new
OBJ.instance_eval do
def create_method_in_instance_eval(a = (def arg_method; end))
def body_method; end
end
end
end
obj = DefSpecNested::OBJ
obj.create_method_in_instance_eval
obj.should have_method :arg_method
obj.should have_method :body_method
DefSpecNested.should_not have_instance_method :arg_method
DefSpecNested.should_not have_instance_method :body_method
end
it "defines methods as public by default" do
cls = Class.new do
def do_def
def new_def
1
end
end
end
obj = cls.new
obj.do_def
obj.new_def.should == 1
end
end
describe "A method definition inside an instance_eval" do
it "creates a singleton method" do
obj = Object.new
obj.instance_eval do
def an_instance_eval_method;self;end
end
obj.an_instance_eval_method.should == obj
other = Object.new
lambda { other.an_instance_eval_method }.should raise_error(NoMethodError)
end
it "creates a singleton method when evaluated inside a metaclass" do
obj = Object.new
obj.instance_eval do
class << self
def a_metaclass_eval_method;self;end
end
end
obj.a_metaclass_eval_method.should == obj
other = Object.new
lambda { other.a_metaclass_eval_method }.should raise_error(NoMethodError)
end
it "creates a class method when the receiver is a class" do
DefSpecNested.instance_eval do
def an_instance_eval_class_method;self;end
end
DefSpecNested.an_instance_eval_class_method.should == DefSpecNested
lambda { Object.an_instance_eval_class_method }.should raise_error(NoMethodError)
end
it "creates a class method when the receiver is an anonymous class" do
m = Class.new
m.instance_eval do
def klass_method
:test
end
end
m.klass_method.should == :test
lambda { Object.klass_method }.should raise_error(NoMethodError)
end
it "creates a class method when instance_eval is within class" do
m = Class.new do
instance_eval do
def klass_method
:test
end
end
end
m.klass_method.should == :test
lambda { Object.klass_method }.should raise_error(NoMethodError)
end
end
describe "A method definition inside an instance_exec" do
it "creates a class method when the receiver is a class" do
DefSpecNested.instance_exec(1) do |param|
@stuff = param
def an_instance_exec_class_method; @stuff; end
end
DefSpecNested.an_instance_exec_class_method.should == 1
lambda { Object.an_instance_exec_class_method }.should raise_error(NoMethodError)
end
it "creates a class method when the receiver is an anonymous class" do
m = Class.new
m.instance_exec(1) do |param|
@stuff = param
def klass_method
@stuff
end
end
m.klass_method.should == 1
lambda { Object.klass_method }.should raise_error(NoMethodError)
end
it "creates a class method when instance_exec is within class" do
m = Class.new do
instance_exec(2) do |param|
@stuff = param
def klass_method
@stuff
end
end
end
m.klass_method.should == 2
lambda { Object.klass_method }.should raise_error(NoMethodError)
end
end
describe "A method definition in an eval" do
it "creates an instance method" do
class DefSpecNested
def eval_instance_method
eval "def an_eval_instance_method;self;end", binding
an_eval_instance_method
end
end
obj = DefSpecNested.new
obj.eval_instance_method.should == obj
obj.an_eval_instance_method.should == obj
other = DefSpecNested.new
other.an_eval_instance_method.should == other
lambda { Object.new.an_eval_instance_method }.should raise_error(NoMethodError)
end
it "creates a class method" do
class DefSpecNestedB
class << self
def eval_class_method
eval "def an_eval_class_method;self;end" #, binding
an_eval_class_method
end
end
end
DefSpecNestedB.eval_class_method.should == DefSpecNestedB
DefSpecNestedB.an_eval_class_method.should == DefSpecNestedB
lambda { Object.an_eval_class_method }.should raise_error(NoMethodError)
lambda { DefSpecNestedB.new.an_eval_class_method}.should raise_error(NoMethodError)
end
it "creates a singleton method" do
class DefSpecNested
def eval_singleton_method
class << self
eval "def an_eval_singleton_method;self;end", binding
end
an_eval_singleton_method
end
end
obj = DefSpecNested.new
obj.eval_singleton_method.should == obj
obj.an_eval_singleton_method.should == obj
other = DefSpecNested.new
lambda { other.an_eval_singleton_method }.should raise_error(NoMethodError)
end
end
describe "a method definition that sets more than one default parameter all to the same value" do
def foo(a=b=c={})
[a,b,c]
end
it "assigns them all the same object by default" do
foo.should == [{},{},{}]
a, b, c = foo
a.should eql(b)
a.should eql(c)
end
it "allows the first argument to be given, and sets the rest to null" do
foo(1).should == [1,nil,nil]
end
it "assigns the parameters different objects across different default calls" do
a, _b, _c = foo
d, _e, _f = foo
a.should_not equal(d)
end
it "only allows overriding the default value of the first such parameter in each set" do
lambda { foo(1,2) }.should raise_error(ArgumentError)
end
def bar(a=b=c=1,d=2)
[a,b,c,d]
end
it "treats the argument after the multi-parameter normally" do
bar.should == [1,1,1,2]
bar(3).should == [3,nil,nil,2]
bar(3,4).should == [3,nil,nil,4]
lambda { bar(3,4,5) }.should raise_error(ArgumentError)
end
end
describe "The def keyword" do
describe "within a closure" do
it "looks outside the closure for the visibility" do
module DefSpecsLambdaVisibility
private
lambda {
def some_method; end
}.call
end
DefSpecsLambdaVisibility.should have_private_instance_method("some_method")
end
end
end
Make sure the method is not defined on Object in nested def spec
require File.expand_path('../../spec_helper', __FILE__)
require File.expand_path('../fixtures/def', __FILE__)
# Language-level method behaviour
describe "Redefining a method" do
it "replaces the original method" do
def barfoo; 100; end
barfoo.should == 100
def barfoo; 200; end
barfoo.should == 200
end
end
describe "Defining a method at the top-level" do
it "defines it on Object with private visibility by default" do
Object.should have_private_instance_method(:some_toplevel_method, false)
end
it "defines it on Object with public visibility after calling public" do
Object.should have_public_instance_method(:public_toplevel_method, false)
end
end
describe "Defining an 'initialize' method" do
it "sets the method's visibility to private" do
class DefInitializeSpec
def initialize
end
end
DefInitializeSpec.should have_private_instance_method(:initialize, false)
end
end
describe "Defining an 'initialize_copy' method" do
it "sets the method's visibility to private" do
class DefInitializeCopySpec
def initialize_copy
end
end
DefInitializeCopySpec.should have_private_instance_method(:initialize_copy, false)
end
end
describe "Defining an 'initialize_dup' method" do
it "sets the method's visibility to private" do
class DefInitializeDupSpec
def initialize_dup
end
end
DefInitializeDupSpec.should have_private_instance_method(:initialize_dup, false)
end
end
describe "Defining an 'initialize_clone' method" do
it "sets the method's visibility to private" do
class DefInitializeCloneSpec
def initialize_clone
end
end
DefInitializeCloneSpec.should have_private_instance_method(:initialize_clone, false)
end
end
describe "Defining a 'respond_to_missing?' method" do
it "sets the method's visibility to private" do
class DefRespondToMissingPSpec
def respond_to_missing?
end
end
DefRespondToMissingPSpec.should have_private_instance_method(:respond_to_missing?, false)
end
end
describe "Defining a method" do
it "returns a symbol of the method name" do
method_name = def some_method; end
method_name.should == :some_method
end
end
describe "An instance method definition with a splat" do
it "accepts an unnamed '*' argument" do
def foo(*); end;
foo.should == nil
foo(1, 2).should == nil
foo(1, 2, 3, 4, :a, :b, 'c', 'd').should == nil
end
it "accepts a named * argument" do
def foo(*a); a; end;
foo.should == []
foo(1, 2).should == [1, 2]
foo([:a]).should == [[:a]]
end
it "accepts non-* arguments before the * argument" do
def foo(a, b, c, d, e, *f); [a, b, c, d, e, f]; end
foo(1, 2, 3, 4, 5, 6, 7, 8).should == [1, 2, 3, 4, 5, [6, 7, 8]]
end
it "allows only a single * argument" do
lambda { eval 'def foo(a, *b, *c); end' }.should raise_error(SyntaxError)
end
it "requires the presence of any arguments that precede the *" do
def foo(a, b, *c); end
lambda { foo 1 }.should raise_error(ArgumentError)
end
end
describe "An instance method with a default argument" do
it "evaluates the default when no arguments are passed" do
def foo(a = 1)
a
end
foo.should == 1
foo(2).should == 2
end
it "evaluates the default empty expression when no arguments are passed" do
def foo(a = ())
a
end
foo.should == nil
foo(2).should == 2
end
it "assigns an empty Array to an unused splat argument" do
def foo(a = 1, *b)
[a,b]
end
foo.should == [1, []]
foo(2).should == [2, []]
end
it "evaluates the default when required arguments precede it" do
def foo(a, b = 2)
[a,b]
end
lambda { foo }.should raise_error(ArgumentError)
foo(1).should == [1, 2]
end
it "prefers to assign to a default argument before a splat argument" do
def foo(a, b = 2, *c)
[a,b,c]
end
lambda { foo }.should raise_error(ArgumentError)
foo(1).should == [1,2,[]]
end
it "prefers to assign to a default argument when there are no required arguments" do
def foo(a = 1, *args)
[a,args]
end
foo(2,2).should == [2,[2]]
end
it "does not evaluate the default when passed a value and a * argument" do
def foo(a, b = 2, *args)
[a,b,args]
end
foo(2,3,3).should == [2,3,[3]]
end
it "shadows an existing method with the same name as the local" do
def bar
1
end
-> {
eval "def foo(bar = bar)
bar
end"
}.should complain(/circular argument reference/)
foo.should == nil
foo(2).should == 2
end
it "calls a method with the same name as the local when explicitly using ()" do
def bar
1
end
def foo(bar = bar())
bar
end
foo.should == 1
foo(2).should == 2
end
end
describe "A singleton method definition" do
it "can be declared for a local variable" do
a = Object.new
def a.foo
5
end
a.foo.should == 5
end
it "can be declared for an instance variable" do
@a = Object.new
def @a.foo
6
end
@a.foo.should == 6
end
it "can be declared for a global variable" do
$__a__ = "hi"
def $__a__.foo
7
end
$__a__.foo.should == 7
end
it "can be declared with an empty method body" do
class DefSpec
def self.foo;end
end
DefSpec.foo.should == nil
end
it "can be redefined" do
obj = Object.new
def obj.==(other)
1
end
(obj==1).should == 1
def obj.==(other)
2
end
(obj==2).should == 2
end
it "raises #{frozen_error_class} if frozen" do
obj = Object.new
obj.freeze
lambda { def obj.foo; end }.should raise_error(frozen_error_class)
end
end
describe "Redefining a singleton method" do
it "does not inherit a previously set visibility" do
o = Object.new
class << o; private; def foo; end; end;
class << o; should have_private_instance_method(:foo); end
class << o; def foo; end; end;
class << o; should_not have_private_instance_method(:foo); end
class << o; should have_instance_method(:foo); end
end
end
describe "Redefining a singleton method" do
it "does not inherit a previously set visibility" do
o = Object.new
class << o; private; def foo; end; end;
class << o; should have_private_instance_method(:foo); end
class << o; def foo; end; end;
class << o; should_not have_private_instance_method(:foo); end
class << o; should have_instance_method(:foo); end
end
end
describe "A method defined with extreme default arguments" do
it "can redefine itself when the default is evaluated" do
class DefSpecs
def foo(x = (def foo; "hello"; end;1));x;end
end
d = DefSpecs.new
d.foo(42).should == 42
d.foo.should == 1
d.foo.should == 'hello'
end
it "may use an fcall as a default" do
def bar
1
end
def foo(x = bar())
x
end
foo.should == 1
foo(2).should == 2
end
it "evaluates the defaults in the method's scope" do
def foo(x = ($foo_self = self; nil)); end
foo
$foo_self.should == self
end
it "may use preceding arguments as defaults" do
def foo(obj, width=obj.length)
width
end
foo('abcde').should == 5
end
it "may use a lambda as a default" do
def foo(output = 'a', prc = lambda {|n| output * n})
prc.call(5)
end
foo.should == 'aaaaa'
end
end
describe "A singleton method defined with extreme default arguments" do
it "may use a method definition as a default" do
$__a = Object.new
def $__a.foo(x = (def $__a.foo; "hello"; end;1));x;end
$__a.foo(42).should == 42
$__a.foo.should == 1
$__a.foo.should == 'hello'
end
it "may use an fcall as a default" do
a = Object.new
def a.bar
1
end
def a.foo(x = bar())
x
end
a.foo.should == 1
a.foo(2).should == 2
end
it "evaluates the defaults in the singleton scope" do
a = Object.new
def a.foo(x = ($foo_self = self; nil)); 5 ;end
a.foo
$foo_self.should == a
end
it "may use preceding arguments as defaults" do
a = Object.new
def a.foo(obj, width=obj.length)
width
end
a.foo('abcde').should == 5
end
it "may use a lambda as a default" do
a = Object.new
def a.foo(output = 'a', prc = lambda {|n| output * n})
prc.call(5)
end
a.foo.should == 'aaaaa'
end
end
describe "A method definition inside a metaclass scope" do
it "can create a class method" do
class DefSpecSingleton
class << self
def a_class_method;self;end
end
end
DefSpecSingleton.a_class_method.should == DefSpecSingleton
lambda { Object.a_class_method }.should raise_error(NoMethodError)
end
it "can create a singleton method" do
obj = Object.new
class << obj
def a_singleton_method;self;end
end
obj.a_singleton_method.should == obj
lambda { Object.new.a_singleton_method }.should raise_error(NoMethodError)
end
it "raises #{frozen_error_class} if frozen" do
obj = Object.new
obj.freeze
class << obj
lambda { def foo; end }.should raise_error(frozen_error_class)
end
end
end
describe "A nested method definition" do
it "creates an instance method when evaluated in an instance method" do
class DefSpecNested
def create_instance_method
def an_instance_method;self;end
an_instance_method
end
end
obj = DefSpecNested.new
obj.create_instance_method.should == obj
obj.an_instance_method.should == obj
other = DefSpecNested.new
other.an_instance_method.should == other
DefSpecNested.should have_instance_method(:an_instance_method)
end
it "creates a class method when evaluated in a class method" do
class DefSpecNested
class << self
# cleanup
remove_method :a_class_method if method_defined? :a_class_method
def create_class_method
def a_class_method;self;end
a_class_method
end
end
end
lambda { DefSpecNested.a_class_method }.should raise_error(NoMethodError)
DefSpecNested.create_class_method.should == DefSpecNested
DefSpecNested.a_class_method.should == DefSpecNested
lambda { Object.a_class_method }.should raise_error(NoMethodError)
lambda { DefSpecNested.new.a_class_method }.should raise_error(NoMethodError)
end
it "creates a singleton method when evaluated in the metaclass of an instance" do
class DefSpecNested
def create_singleton_method
class << self
def a_singleton_method;self;end
end
a_singleton_method
end
end
obj = DefSpecNested.new
obj.create_singleton_method.should == obj
obj.a_singleton_method.should == obj
other = DefSpecNested.new
lambda { other.a_singleton_method }.should raise_error(NoMethodError)
end
it "creates a method in the surrounding context when evaluated in a def expr.method" do
class DefSpecNested
TARGET = Object.new
def TARGET.defs_method
def inherited_method;self;end
end
end
DefSpecNested::TARGET.defs_method
DefSpecNested.should have_instance_method :inherited_method
DefSpecNested::TARGET.should_not have_method :inherited_method
obj = DefSpecNested.new
obj.inherited_method.should == obj
end
# See http://yugui.jp/articles/846#label-3
it "inside an instance_eval creates a singleton method" do
class DefSpecNested
OBJ = Object.new
OBJ.instance_eval do
def create_method_in_instance_eval(a = (def arg_method; end))
def body_method; end
end
end
end
obj = DefSpecNested::OBJ
obj.create_method_in_instance_eval
obj.should have_method :arg_method
obj.should have_method :body_method
DefSpecNested.should_not have_instance_method :arg_method
DefSpecNested.should_not have_instance_method :body_method
end
it "creates an instance method inside Class.new" do
cls = Class.new do
def do_def
def new_def
1
end
end
end
obj = cls.new
obj.do_def
obj.new_def.should == 1
cls.new.new_def.should == 1
-> { Object.new.new_def }.should raise_error(NoMethodError)
end
end
describe "A method definition inside an instance_eval" do
it "creates a singleton method" do
obj = Object.new
obj.instance_eval do
def an_instance_eval_method;self;end
end
obj.an_instance_eval_method.should == obj
other = Object.new
lambda { other.an_instance_eval_method }.should raise_error(NoMethodError)
end
it "creates a singleton method when evaluated inside a metaclass" do
obj = Object.new
obj.instance_eval do
class << self
def a_metaclass_eval_method;self;end
end
end
obj.a_metaclass_eval_method.should == obj
other = Object.new
lambda { other.a_metaclass_eval_method }.should raise_error(NoMethodError)
end
it "creates a class method when the receiver is a class" do
DefSpecNested.instance_eval do
def an_instance_eval_class_method;self;end
end
DefSpecNested.an_instance_eval_class_method.should == DefSpecNested
lambda { Object.an_instance_eval_class_method }.should raise_error(NoMethodError)
end
it "creates a class method when the receiver is an anonymous class" do
m = Class.new
m.instance_eval do
def klass_method
:test
end
end
m.klass_method.should == :test
lambda { Object.klass_method }.should raise_error(NoMethodError)
end
it "creates a class method when instance_eval is within class" do
m = Class.new do
instance_eval do
def klass_method
:test
end
end
end
m.klass_method.should == :test
lambda { Object.klass_method }.should raise_error(NoMethodError)
end
end
describe "A method definition inside an instance_exec" do
it "creates a class method when the receiver is a class" do
DefSpecNested.instance_exec(1) do |param|
@stuff = param
def an_instance_exec_class_method; @stuff; end
end
DefSpecNested.an_instance_exec_class_method.should == 1
lambda { Object.an_instance_exec_class_method }.should raise_error(NoMethodError)
end
it "creates a class method when the receiver is an anonymous class" do
m = Class.new
m.instance_exec(1) do |param|
@stuff = param
def klass_method
@stuff
end
end
m.klass_method.should == 1
lambda { Object.klass_method }.should raise_error(NoMethodError)
end
it "creates a class method when instance_exec is within class" do
m = Class.new do
instance_exec(2) do |param|
@stuff = param
def klass_method
@stuff
end
end
end
m.klass_method.should == 2
lambda { Object.klass_method }.should raise_error(NoMethodError)
end
end
describe "A method definition in an eval" do
it "creates an instance method" do
class DefSpecNested
def eval_instance_method
eval "def an_eval_instance_method;self;end", binding
an_eval_instance_method
end
end
obj = DefSpecNested.new
obj.eval_instance_method.should == obj
obj.an_eval_instance_method.should == obj
other = DefSpecNested.new
other.an_eval_instance_method.should == other
lambda { Object.new.an_eval_instance_method }.should raise_error(NoMethodError)
end
it "creates a class method" do
class DefSpecNestedB
class << self
def eval_class_method
eval "def an_eval_class_method;self;end" #, binding
an_eval_class_method
end
end
end
DefSpecNestedB.eval_class_method.should == DefSpecNestedB
DefSpecNestedB.an_eval_class_method.should == DefSpecNestedB
lambda { Object.an_eval_class_method }.should raise_error(NoMethodError)
lambda { DefSpecNestedB.new.an_eval_class_method}.should raise_error(NoMethodError)
end
it "creates a singleton method" do
class DefSpecNested
def eval_singleton_method
class << self
eval "def an_eval_singleton_method;self;end", binding
end
an_eval_singleton_method
end
end
obj = DefSpecNested.new
obj.eval_singleton_method.should == obj
obj.an_eval_singleton_method.should == obj
other = DefSpecNested.new
lambda { other.an_eval_singleton_method }.should raise_error(NoMethodError)
end
end
describe "a method definition that sets more than one default parameter all to the same value" do
def foo(a=b=c={})
[a,b,c]
end
it "assigns them all the same object by default" do
foo.should == [{},{},{}]
a, b, c = foo
a.should eql(b)
a.should eql(c)
end
it "allows the first argument to be given, and sets the rest to null" do
foo(1).should == [1,nil,nil]
end
it "assigns the parameters different objects across different default calls" do
a, _b, _c = foo
d, _e, _f = foo
a.should_not equal(d)
end
it "only allows overriding the default value of the first such parameter in each set" do
lambda { foo(1,2) }.should raise_error(ArgumentError)
end
def bar(a=b=c=1,d=2)
[a,b,c,d]
end
it "treats the argument after the multi-parameter normally" do
bar.should == [1,1,1,2]
bar(3).should == [3,nil,nil,2]
bar(3,4).should == [3,nil,nil,4]
lambda { bar(3,4,5) }.should raise_error(ArgumentError)
end
end
describe "The def keyword" do
describe "within a closure" do
it "looks outside the closure for the visibility" do
module DefSpecsLambdaVisibility
private
lambda {
def some_method; end
}.call
end
DefSpecsLambdaVisibility.should have_private_instance_method("some_method")
end
end
end
|
#!/usr/bin/env ruby
module MeaningBot
require 'rubygems'
require 'chatterbot/dsl'
module_function
# remove this to send out tweets
#debug_mode
# remove this to update the db
#no_update
# remove this to get less output when running
#verbose
###
# Helpers
###
SUBJECT_SIGNATURE = ' is the meaning of life'
PREDICATE_SIGNATURE = 'the meaning of life is '
UNDESIRABLE_CHARS = /http|@|meaning/
def search_term(base, modifiers)
"\"#{base}\" " + '-? -42 -Christ' + modifiers
end
def one_nth_of_the_time(n)
rand(n) == 0
end
def get_search_tweets(query)
tweets = []
search query do |tweet|
tweets << tweet
end
since_id(0)
tweets
end
def subject_tweets
get_search_tweets search_term(SUBJECT_SIGNATURE, '-what')
end
def predicate_tweets
get_search_tweets search_term(PREDICATE_SIGNATURE, '-give')
end
def recently_tweeted_text
text = client.user_timeline(
:screen_name => 'meaningbot',
:count => 200,
:trim_user => true
).map(&:text).join.downcase
since_id(0)
text
end
def pair_of_tweets
recents = recently_tweeted_text
subject_tweet = subject_tweets.map do |tweet|
{
:tweet => tweet,
:snippet => tweet.text.sub(/#{SUBJECT_SIGNATURE}.*/i, '').strip.delete('\""')
}
end.shuffle.find do |tweet|
!(tweet[:snippet] =~ UNDESIRABLE_CHARS) &&
!(recents.index(tweet[:snippet].downcase))
end
predicate_tweet = predicate_tweets.map do |tweet|
{
:tweet => tweet,
:snippet => tweet.text.sub(/.*#{PREDICATE_SIGNATURE}/i, '').strip.delete('\""')
}
end.shuffle.find do |tweet|
(tweet[:snippet].length + subject_tweet[:snippet].length + 4) < 140 &&
!(tweet[:snippet] =~ UNDESIRABLE_CHARS) &&
!(recents.index(tweet[:snippet].downcase))
end
[subject_tweet, predicate_tweet]
end
def run(opts)
if one_nth_of_the_time(10) || opts[:force]
subject_tweet, predicate_tweet = pair_of_tweets
if subject_tweet && predicate_tweet
aphorism = subject_tweet[:snippet] + ' is ' + predicate_tweet[:snippet]
puts "*"*10
if opts[:testing]
puts "TESTING MODE. NOT TWEETING."
else
puts "TWEETING!"
tweet(aphorism)
client.favorite(subject_tweet[:tweet])
client.favorite(predicate_tweet[:tweet])
end
puts "SUBJECT FULL TEXT: " + subject_tweet[:tweet].text
puts "PREDICATE FULL TEXT: " + predicate_tweet[:tweet].text
puts "TWEET TEXT: " + aphorism
puts "*"*10
else
puts "Not enough material."
end
else
puts "Staying silent this time."
end
end
end
CREDS = if File.exists?('bin/meaning_bot.yml')
YAML.load_file('bin/meaning_bot.yml')
else
ENV.symbolize_keys!
end
consumer_key CREDS[:consumer_key]
consumer_secret CREDS[:consumer_secret]
secret CREDS[:secret]
token CREDS[:token]
MeaningBot.run(:testing => true, :force => true)
Match against 'purpose' and 'point' as well as 'meaning'.
#!/usr/bin/env ruby
module MeaningBot
require 'rubygems'
require 'chatterbot/dsl'
module_function
# remove this to send out tweets
#debug_mode
# remove this to update the db
#no_update
# remove this to get less output when running
#verbose
###
# Helpers
###
MEANING_NOUNS = %w{meaning purpose point}
SUBJECT_QUERIES = MEANING_NOUNS.map{|n| " is the #{n} of life"}
PREDICATE_QUERIES = MEANING_NOUNS.map{|n| "the #{n} of life is "}
SEARCH_EXCLUSIONS = '-? -42 -Christ'
UNDESIRABLE_STRINGS = /http|@|#{MEANING_NOUNS.join('|')}/
def search_term(queries, modifiers)
[
queries.map{|q| "\"#{q}\""}.join(' OR '),
SEARCH_EXCLUSIONS,
modifiers
].join(' ')
end
def one_nth_of_the_time(n)
rand(n) == 0
end
def get_search_tweets(query)
tweets = []
search query do |tweet|
tweets << tweet
end
since_id(0)
tweets
end
def subject_tweets
get_search_tweets search_term(SUBJECT_QUERIES, '-what')
end
def predicate_tweets
get_search_tweets search_term(PREDICATE_QUERIES, '-give')
end
def recently_tweeted_text
text = client.user_timeline(
:screen_name => 'meaningbot',
:count => 200,
:trim_user => true
).map(&:text).join.downcase
since_id(0)
text
end
def strip_queries_from_tweet(tweet_text, queries, query_type)
query_matchers = queries.map do |q|
matcher = ''
matcher += '.*' if query_type == :predicate
matcher += q
matcher += '.*' if query_type == :subject
matcher
end
tweet_text.sub(/#{query_matchers.join('|')}/i, '').strip.delete('\""')
end
def pair_of_tweets
recents = recently_tweeted_text
subject_tweet = subject_tweets.map do |tweet|
{
:tweet => tweet,
:snippet => strip_queries_from_tweet(tweet.text, SUBJECT_QUERIES, :subject)
}
end.shuffle.find do |tweet|
!(tweet[:snippet] =~ UNDESIRABLE_STRINGS) &&
!(recents.index(tweet[:snippet].downcase))
end
predicate_tweet = predicate_tweets.map do |tweet|
{
:tweet => tweet,
:snippet => strip_queries_from_tweet(tweet.text, PREDICATE_QUERIES, :predicate)
}
end.shuffle.find do |tweet|
(tweet[:snippet].length + subject_tweet[:snippet].length + 4) < 140 &&
!(tweet[:snippet] =~ UNDESIRABLE_STRINGS) &&
!(recents.index(tweet[:snippet].downcase))
end
[subject_tweet, predicate_tweet]
end
def run(opts={})
if one_nth_of_the_time(10) || opts[:force]
subject_tweet, predicate_tweet = pair_of_tweets
if subject_tweet && predicate_tweet
aphorism = subject_tweet[:snippet] + ' is ' + predicate_tweet[:snippet]
puts "*"*10
if opts[:testing]
puts "TESTING MODE. NOT TWEETING."
else
puts "TWEETING!"
tweet(aphorism)
client.favorite(subject_tweet[:tweet])
client.favorite(predicate_tweet[:tweet])
end
puts "SUBJECT FULL TEXT: " + subject_tweet[:tweet].text
puts "PREDICATE FULL TEXT: " + predicate_tweet[:tweet].text
puts "TWEET TEXT: " + aphorism
puts "*"*10
else
puts "Not enough material."
end
else
puts "Staying silent this time."
end
end
end
CREDS = if File.exists?('bin/meaning_bot.yml')
YAML.load_file('bin/meaning_bot.yml')
else
ENV.symbolize_keys!
end
consumer_key CREDS[:consumer_key]
consumer_secret CREDS[:consumer_secret]
secret CREDS[:secret]
token CREDS[:token]
MeaningBot.run(:testing => true, :force => true)
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{simple_model_translations}
s.version = "0.1.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Pavel Forkert"]
s.date = %q{2010-10-14}
s.description = %q{Simple ActiveRecord translations for Rails 3}
s.email = %q{fxposter@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/simple_model_translations.rb",
"lib/simple_model_translations/attributes.rb",
"lib/simple_model_translations/base.rb",
"lib/simple_model_translations/class_methods.rb",
"lib/simple_model_translations/instance_methods.rb",
"lib/simple_model_translations/validations.rb",
"simple_model_translations.gemspec",
"spec/attributes_spec.rb",
"spec/class_methods_spec.rb",
"spec/data/models.rb",
"spec/data/schema.rb",
"spec/simple_model_translations_spec.rb",
"spec/spec_helper.rb",
"spec/validations_spec.rb"
]
s.homepage = %q{http://github.com/fxposter/simple_model_translations}
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{Simple ActiveRecord translations for Rails 3}
s.test_files = [
"spec/attributes_spec.rb",
"spec/auto_generated_translation_classes_spec.rb",
"spec/class_methods_spec.rb",
"spec/data/models.rb",
"spec/data/schema.rb",
"spec/simple_model_translations_spec.rb",
"spec/spec_helper.rb",
"spec/validations_spec.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activerecord>, [">= 3.0.0"])
s.add_development_dependency(%q<database_cleaner>, [">= 0.5.2"])
s.add_development_dependency(%q<sqlite3-ruby>, [">= 1.3.1"])
s.add_development_dependency(%q<shoulda>, [">= 2.11.3"])
s.add_development_dependency(%q<rspec>, [">= 2.0.0"])
s.add_development_dependency(%q<jeweler>, [">= 1.5.0.pre5"])
else
s.add_dependency(%q<activerecord>, [">= 3.0.0"])
s.add_dependency(%q<database_cleaner>, [">= 0.5.2"])
s.add_dependency(%q<sqlite3-ruby>, [">= 1.3.1"])
s.add_dependency(%q<shoulda>, [">= 2.11.3"])
s.add_dependency(%q<rspec>, [">= 2.0.0"])
s.add_dependency(%q<jeweler>, [">= 1.5.0.pre5"])
end
else
s.add_dependency(%q<activerecord>, [">= 3.0.0"])
s.add_dependency(%q<database_cleaner>, [">= 0.5.2"])
s.add_dependency(%q<sqlite3-ruby>, [">= 1.3.1"])
s.add_dependency(%q<shoulda>, [">= 2.11.3"])
s.add_dependency(%q<rspec>, [">= 2.0.0"])
s.add_dependency(%q<jeweler>, [">= 1.5.0.pre5"])
end
end
gemspec fixes
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{simple_model_translations}
s.version = "0.1.4"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Pavel Forkert"]
s.date = %q{2010-10-14}
s.description = %q{Simple ActiveRecord translations for Rails 3}
s.email = %q{fxposter@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/simple_model_translations.rb",
"lib/simple_model_translations/attributes.rb",
"lib/simple_model_translations/base.rb",
"lib/simple_model_translations/class_methods.rb",
"lib/simple_model_translations/instance_methods.rb",
"lib/simple_model_translations/validations.rb",
"simple_model_translations.gemspec",
"spec/attributes_spec.rb",
"spec/auto_generated_translation_classes_spec.rb",
"spec/class_methods_spec.rb",
"spec/data/models.rb",
"spec/data/schema.rb",
"spec/simple_model_translations_spec.rb",
"spec/spec_helper.rb",
"spec/validations_spec.rb"
]
s.homepage = %q{http://github.com/fxposter/simple_model_translations}
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{Simple ActiveRecord translations for Rails 3}
s.test_files = [
"spec/attributes_spec.rb",
"spec/auto_generated_translation_classes_spec.rb",
"spec/class_methods_spec.rb",
"spec/data/models.rb",
"spec/data/schema.rb",
"spec/simple_model_translations_spec.rb",
"spec/spec_helper.rb",
"spec/validations_spec.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activerecord>, [">= 3.0.0"])
s.add_development_dependency(%q<database_cleaner>, [">= 0.5.2"])
s.add_development_dependency(%q<sqlite3-ruby>, [">= 1.3.1"])
s.add_development_dependency(%q<shoulda>, [">= 2.11.3"])
s.add_development_dependency(%q<rspec>, [">= 2.0.0"])
s.add_development_dependency(%q<jeweler>, [">= 1.5.0.pre5"])
else
s.add_dependency(%q<activerecord>, [">= 3.0.0"])
s.add_dependency(%q<database_cleaner>, [">= 0.5.2"])
s.add_dependency(%q<sqlite3-ruby>, [">= 1.3.1"])
s.add_dependency(%q<shoulda>, [">= 2.11.3"])
s.add_dependency(%q<rspec>, [">= 2.0.0"])
s.add_dependency(%q<jeweler>, [">= 1.5.0.pre5"])
end
else
s.add_dependency(%q<activerecord>, [">= 3.0.0"])
s.add_dependency(%q<database_cleaner>, [">= 0.5.2"])
s.add_dependency(%q<sqlite3-ruby>, [">= 1.3.1"])
s.add_dependency(%q<shoulda>, [">= 2.11.3"])
s.add_dependency(%q<rspec>, [">= 2.0.0"])
s.add_dependency(%q<jeweler>, [">= 1.5.0.pre5"])
end
end
|
require 'set'
class BookmarkController < Ramaze::Controller
map '/uri'
helper :stack, :user
layout '/page' => [ :add, :edit, :search ]
def add
if ! logged_in?
call R( MainController, :login )
end
uri = h( request[ 'uri' ] )
if request.post?
bm = Bookmark.find_or_create( :uri => uri )
user.bookmark_add(
bm,
h( request[ 'title' ] ),
h( request[ 'notes' ] )
)
requested_tags.each do |tag|
t = Tag.find_or_create( :name => h( tag ) )
bm.tag_add t, user
end
redirect Rs( :edit, bm.id )
else
@bookmark = BookmarkStruct.new
@bookmark.uri = uri
@bookmark.uri_editable = true
@bookmark.title = h( request[ 'title' ] )
end
end
def edit( bookmark_id )
if ! logged_in?
call R( MainController, :login )
end
bm = Bookmark[ bookmark_id.to_i ]
if bm.nil?
redirect Rs( :add )
end
@bookmark = bm.to_struct( user )
end
def search
@bookmarks = Set.new
@tags = Set.new
requested_tags.each do |tagname|
tag = Tag[ :name => tagname ]
if tag
@tags << tag
tag.bookmarks.each do |bm|
@bookmarks << bm
end
end
end
end
def requested_tags
tags_in = request[ 'tags' ]
if tags_in && tags_in.any?
tags_in.split /[\s,+]+/
else
[]
end
end
private :requested_tags
end
Escape tags in requested_tags.
require 'set'
class BookmarkController < Ramaze::Controller
map '/uri'
helper :stack, :user
layout '/page' => [ :add, :edit, :search ]
def add
if ! logged_in?
call R( MainController, :login )
end
uri = h( request[ 'uri' ] )
if request.post?
bm = Bookmark.find_or_create( :uri => uri )
user.bookmark_add(
bm,
h( request[ 'title' ] ),
h( request[ 'notes' ] )
)
requested_tags.each do |tag|
t = Tag.find_or_create( :name => h( tag ) )
bm.tag_add t, user
end
redirect Rs( :edit, bm.id )
else
@bookmark = BookmarkStruct.new
@bookmark.uri = uri
@bookmark.uri_editable = true
@bookmark.title = h( request[ 'title' ] )
end
end
def edit( bookmark_id )
if ! logged_in?
call R( MainController, :login )
end
bm = Bookmark[ bookmark_id.to_i ]
if bm.nil?
redirect Rs( :add )
end
@bookmark = bm.to_struct( user )
end
def search
@bookmarks = Set.new
@tags = Set.new
requested_tags.each do |tagname|
tag = Tag[ :name => tagname ]
if tag
@tags << tag
tag.bookmarks.each do |bm|
@bookmarks << bm
end
end
end
end
def requested_tags
tags_in = request[ 'tags' ]
if tags_in && tags_in.any?
tags_in.split( /[\s,+]+/ ).collect { |tag| h( tag ) }
else
[]
end
end
private :requested_tags
end |
require 'test_helper'
class DeployCommandsTest < ActiveSupport::TestCase
def setup
@stack = stacks(:shipit)
@deploy = deploys(:shipit_pending)
@commands = DeployCommands.new(@deploy)
@deploy_spec = stub(
dependencies_steps: ['bundle install --some-args'],
deploy_steps: ['bundle exec cap $ENVIRONMENT deploy'],
)
@commands.stubs(:deploy_spec).returns(@deploy_spec)
StackCommands.stubs(git_version: Gem::Version.new('1.8.4.3'))
end
test "#fetch call git fetch if repository cache already exist" do
Dir.expects(:exists?).with(@stack.git_path).returns(true)
command = @commands.fetch
assert_equal %w(git fetch origin master), command.args
end
test "#fetch call git fetch in git_path directory if repository cache already exist" do
Dir.expects(:exists?).with(@stack.git_path).returns(true)
command = @commands.fetch
assert_equal @stack.git_path, command.chdir
end
test "#fetch call git clone if repository cache do not exist" do
Dir.expects(:exists?).with(@stack.git_path).returns(false)
command = @commands.fetch
assert_equal ['git', 'clone', '--branch', 'master', @stack.repo_git_url, @stack.git_path], command.args
end
test "#fetch do not use --single-branch if git is outdated" do
Dir.expects(:exists?).with(@stack.git_path).returns(false)
StackCommands.stubs(git_version: Gem::Version.new('1.7.2.30'))
command = @commands.fetch
assert_equal ['git', 'clone', '--branch', 'master', @stack.repo_git_url, @stack.git_path], command.args
end
test "#fetch call git fetch in base_path directory if repository cache do not exist" do
Dir.expects(:exists?).with(@stack.git_path).returns(false)
command = @commands.fetch
assert_equal @stack.deploys_path, command.chdir
end
test "#fetch merges Settings.env in ENVIRONMENT" do
Settings.stubs(:[]).with('env').returns("SPECIFIC_CONFIG" => 5)
command = @commands.fetch
assert_equal 5, command.env["SPECIFIC_CONFIG"]
end
test "#clone clone the repository cache into the working directory" do
command = @commands.clone
assert_equal ['git', 'clone', '--local', @stack.git_path, @deploy.working_directory], command.args
end
test "#clone clone the repository cache from the deploys_path" do
command = @commands.clone
assert_equal @stack.deploys_path, command.chdir
end
test "#checkout checkout the deployed commit" do
command = @commands.checkout(@deploy.until_commit)
assert_equal ['git', 'checkout', '-q', @deploy.until_commit.sha], command.args
end
test "#checkout checkout the deployed commit from the working directory" do
command = @commands.checkout(@deploy.until_commit)
assert_equal @deploy.working_directory, command.chdir
end
test "#deploy call cap $environment deploy" do
commands = @commands.deploy(@deploy.until_commit)
assert_equal 1, commands.length
command = commands.first
assert_equal ['bundle exec cap $ENVIRONMENT deploy'], command.args
end
test "#deploy call cap $environment deploy from the working_directory" do
commands = @commands.deploy(@deploy.until_commit)
assert_equal 1, commands.length
command = commands.first
assert_equal @deploy.working_directory, command.chdir
end
test "#deploy call cap $environment deploy with the SHA in the environment" do
commands = @commands.deploy(@deploy.until_commit)
assert_equal 1, commands.length
command = commands.first
assert_equal @deploy.until_commit.sha, command.env['SHA']
end
test "#deploy call cap $environment deploy with the ENVIRONMENT in the environment" do
commands = @commands.deploy(@deploy.until_commit)
assert_equal 1, commands.length
command = commands.first
assert_equal @stack.environment, command.env['ENVIRONMENT']
end
test "#deploy merges Settings.env in ENVIRONMENT" do
Settings.stubs(:[]).with('env').returns("SPECIFIC_CONFIG" => 5)
command = @commands.deploy(@deploy.until_commit).first
assert_equal 5, command.env["SPECIFIC_CONFIG"]
end
test "#install_dependencies call bundle install" do
commands = @commands.install_dependencies
assert_equal 1, commands.length
assert_equal ['bundle install --some-args'], commands.first.args
end
test "#install_dependencies merges Settings.env in ENVIRONMENT" do
Settings.stubs(:[]).with('env').returns("SPECIFIC_CONFIG" => 5)
command = @commands.install_dependencies.first
assert_equal 5, command.env["SPECIFIC_CONFIG"]
end
end
Fix broken test
require 'test_helper'
class DeployCommandsTest < ActiveSupport::TestCase
def setup
@stack = stacks(:shipit)
@deploy = deploys(:shipit_pending)
@commands = DeployCommands.new(@deploy)
@deploy_spec = stub(
dependencies_steps: ['bundle install --some-args'],
deploy_steps: ['bundle exec cap $ENVIRONMENT deploy'],
)
@commands.stubs(:deploy_spec).returns(@deploy_spec)
StackCommands.stubs(git_version: Gem::Version.new('1.8.4.3'))
end
test "#fetch call git fetch if repository cache already exist" do
Dir.expects(:exists?).with(@stack.git_path).returns(true)
command = @commands.fetch
assert_equal %w(git fetch origin master), command.args
end
test "#fetch call git fetch in git_path directory if repository cache already exist" do
Dir.expects(:exists?).with(@stack.git_path).returns(true)
command = @commands.fetch
assert_equal @stack.git_path, command.chdir
end
test "#fetch call git clone if repository cache do not exist" do
Dir.expects(:exists?).with(@stack.git_path).returns(false)
command = @commands.fetch
assert_equal ['git', 'clone', '--single-branch', '--branch', 'master', @stack.repo_git_url, @stack.git_path], command.args
end
test "#fetch do not use --single-branch if git is outdated" do
Dir.expects(:exists?).with(@stack.git_path).returns(false)
StackCommands.stubs(git_version: Gem::Version.new('1.7.2.30'))
command = @commands.fetch
assert_equal ['git', 'clone', '--branch', 'master', @stack.repo_git_url, @stack.git_path], command.args
end
test "#fetch call git fetch in base_path directory if repository cache do not exist" do
Dir.expects(:exists?).with(@stack.git_path).returns(false)
command = @commands.fetch
assert_equal @stack.deploys_path, command.chdir
end
test "#fetch merges Settings.env in ENVIRONMENT" do
Settings.stubs(:[]).with('env').returns("SPECIFIC_CONFIG" => 5)
command = @commands.fetch
assert_equal 5, command.env["SPECIFIC_CONFIG"]
end
test "#clone clone the repository cache into the working directory" do
command = @commands.clone
assert_equal ['git', 'clone', '--local', @stack.git_path, @deploy.working_directory], command.args
end
test "#clone clone the repository cache from the deploys_path" do
command = @commands.clone
assert_equal @stack.deploys_path, command.chdir
end
test "#checkout checkout the deployed commit" do
command = @commands.checkout(@deploy.until_commit)
assert_equal ['git', 'checkout', '-q', @deploy.until_commit.sha], command.args
end
test "#checkout checkout the deployed commit from the working directory" do
command = @commands.checkout(@deploy.until_commit)
assert_equal @deploy.working_directory, command.chdir
end
test "#deploy call cap $environment deploy" do
commands = @commands.deploy(@deploy.until_commit)
assert_equal 1, commands.length
command = commands.first
assert_equal ['bundle exec cap $ENVIRONMENT deploy'], command.args
end
test "#deploy call cap $environment deploy from the working_directory" do
commands = @commands.deploy(@deploy.until_commit)
assert_equal 1, commands.length
command = commands.first
assert_equal @deploy.working_directory, command.chdir
end
test "#deploy call cap $environment deploy with the SHA in the environment" do
commands = @commands.deploy(@deploy.until_commit)
assert_equal 1, commands.length
command = commands.first
assert_equal @deploy.until_commit.sha, command.env['SHA']
end
test "#deploy call cap $environment deploy with the ENVIRONMENT in the environment" do
commands = @commands.deploy(@deploy.until_commit)
assert_equal 1, commands.length
command = commands.first
assert_equal @stack.environment, command.env['ENVIRONMENT']
end
test "#deploy merges Settings.env in ENVIRONMENT" do
Settings.stubs(:[]).with('env').returns("SPECIFIC_CONFIG" => 5)
command = @commands.deploy(@deploy.until_commit).first
assert_equal 5, command.env["SPECIFIC_CONFIG"]
end
test "#install_dependencies call bundle install" do
commands = @commands.install_dependencies
assert_equal 1, commands.length
assert_equal ['bundle install --some-args'], commands.first.args
end
test "#install_dependencies merges Settings.env in ENVIRONMENT" do
Settings.stubs(:[]).with('env').returns("SPECIFIC_CONFIG" => 5)
command = @commands.install_dependencies.first
assert_equal 5, command.env["SPECIFIC_CONFIG"]
end
end
|
Regenerate gemspec for version 0.2.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{shortly}
s.version = "0.2.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Bagwan Pankaj"]
s.date = %q{2011-01-14}
s.description = %q{Ruby Wrapper for different Url Shortner Services Ruby Wrapper}
s.email = %q{bagwanpankaj@gmail.com}
s.extra_rdoc_files = [
"LICENSE.txt",
"README.textile"
]
s.files = [
"lib/shortly.rb",
"lib/shortly/client.rb",
"lib/shortly/clients/bitly.rb",
"lib/shortly/clients/googl.rb",
"lib/shortly/clients/isgd.rb",
"lib/shortly/clients/rubyurl.rb",
"lib/shortly/errors.rb",
"lib/shortly/helper.rb"
]
s.homepage = %q{http://github.com/bagwanpankaj/shortly}
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{Url Shortner Services Ruby Wrapper}
s.test_files = [
"spec/shortly_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<httparty>, [">= 0"])
s.add_development_dependency(%q<rspec>, ["~> 2.1.0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.5.1"])
s.add_development_dependency(%q<rcov>, [">= 0"])
else
s.add_dependency(%q<httparty>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.1.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.1"])
s.add_dependency(%q<rcov>, [">= 0"])
end
else
s.add_dependency(%q<httparty>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.1.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.1"])
s.add_dependency(%q<rcov>, [">= 0"])
end
end
|
# Load all flask files, in the correct order
files = %w(modules modules responder inventory room hallway player adventure)
files.each do |file|
require File.dirname(__FILE__) + '/flask/' + file
end
Fixed flask.rb to require the "load" file.
# Load all flask files, in the correct order
files = %w(load modules responder inventory room hallway player adventure)
files.each do |file|
require File.dirname(__FILE__) + '/flask/' + file
end |
#
# Be sure to run `pod spec lint TestPod.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# โโโ Spec Metadata โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "TestPod"
s.version = "0.0.1"
s.summary = "add pod testpod"
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
test add pod and add github
DESC
s.homepage = "https://github.com/j364960953/TestPod"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# โโโ Spec License โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = "MIT (example)"
s.license = { :type => "MIT", :file => "LICENSE" }
# โโโ Author Metadata โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "j364960953" => "364960953@163.com" }
# Or just: s.author = "j364960953"
# s.authors = { "j364960953" => "364960953@163.com" }
# s.social_media_url = "http://twitter.com/j364960953"
# โโโ Platform Specifics โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
s.platform = :ios, "8.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# โโโ Source Location โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/j364960953/TestPod.git", :tag => "0.0.1" }
# โโโ Source Code โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "TestPod", "TestPod/TestPod/TestPod/**/*.swift"
s.exclude_files = "TestPod/Exclude"
#s.public_header_files = "TestPod/TestPod/**/*.h"
# โโโ Resources โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# โโโ Project Linking โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
s.frameworks = "UIKit", "Foundation"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# โโโ Project Settings โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
add tag 0.0.2
#
# Be sure to run `pod spec lint TestPod.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# โโโ Spec Metadata โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "TestPodJ"
s.version = "0.0.2"
s.summary = "add pod testpod"
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
test add pod and add github
DESC
s.homepage = "https://github.com/j364960953/TestPod"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# โโโ Spec License โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = "MIT (example)"
s.license = { :type => "MIT", :file => "LICENSE" }
# โโโ Author Metadata โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "j364960953" => "364960953@163.com" }
# Or just: s.author = "j364960953"
# s.authors = { "j364960953" => "364960953@163.com" }
# s.social_media_url = "http://twitter.com/j364960953"
# โโโ Platform Specifics โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
s.platform = :ios, "8.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# โโโ Source Location โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/j364960953/TestPod.git", :tag => s.version }
# โโโ Source Code โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "TestPod", "TestPod/TestPod/TestPod/**/*.swift"
s.exclude_files = "TestPod/Exclude"
#s.public_header_files = "TestPod/TestPod/**/*.h"
# โโโ Resources โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# โโโ Project Linking โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
s.frameworks = "UIKit", "Foundation"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# โโโ Project Settings โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
|
class MessageFilter
def initialize(*words)
@words = words
end
def detect?(text)
@words.each do |w|
return true if text.include?(w)
end
false
end
end
Refactored code
class MessageFilter
def initialize(*words)
@words = words
end
def detect?(text)
@words.any?{|w| text.include?(w)}
end
end
|
#logic for Conways game of life.
=begin
The universe of the Game of Life is an infinite two-dimensional orthogonal grid of square cells, each of which is in one of two possible states, live or dead. Every cell interacts with its eight neighbors, which are the cells that are directly horizontally, vertically, or diagonally adjacent. At each step in time, the following transitions occur:
1. Any live cell with fewer than two live neighbours dies, as if caused by underpopulation.
2. Any live cell with more than three live neighbours dies, as if by overcrowding.
3. Any live cell with two or three live neighbours lives on to the next generation.
4. Any dead cell with exactly three live neighbours becomes a live cell.
The initial pattern constitutes the seed of the system. The first generation is created by applying the above rules simultaneously to every cell in the seedโbirths and deaths happen simultaneously, and the discrete moment at which this happens is sometimes called a tick (in other words, each generation is a pure function of the one before). The rules continue to be applied repeatedly to create further generations.
=end
class ConwayEngine
attr_accessor :cells
#Create a new ConwayEngine. Pass width and height of the cell world
def initialize(width=30, height=30, random=true, random_from=[true,false].to_a)
#create an array
@cells = Array.new()
border = false
#add 'height' arrays of size 'width'
height.times do
if random # fill it with random arrays
new_element = (0...width).map{random_from[rand(random_from.size)]} #random cohoice between [true,false]
#fill borders
new_element[0]=border
new_element[-1]=border
else # fill it with overcrowded
new_element = Array.new(width, border) # fill it with arrays
new_element[1...-1]=Array.new(new_element.size-2,true) #fill orders
end
@cells.push new_element
end
#draw border
@cells[0]= Array.new(width,border)
@cells[-1]= Array.new(width,border)
end
def printCells(array)
array.each do |row|
row.each do |element|
print "o" if element == true
print "." if element == false
end
print "\n"
end
end
# calculate the generation n+1
def iterate()
new_cells = Array.new(@cells.size,false).map!{ Array.new(@cells.first.size,false) }
@cells.each_with_index do |row, y|
if(y > 0 and y <@cells.size-1) #leave border out
row.each_with_index do |entry, x|
if(x > 0 and x < row.size-1) #leave border out
neighbor_count = count_neighbors(y,x,@cells)
new_cells[y][x] = evolve(entry, neighbor_count)
end
end
end
end
@cells = new_cells.clone
end
#Count the neighbors of a 2d orthogonal grid. Uses the concept of the N_8(P)-neighborhood (Moore-neighborhood)
def count_neighbors(x,y,cells)
counter=0
counter += 1 if cells[x+1][y-1]
counter += 1 if cells[x+1][y]
counter += 1 if cells[x+1][y+1]
counter += 1 if cells[x][y+1]
counter += 1 if cells[x-1][y+1]
counter += 1 if cells[x-1][y]
counter += 1 if cells[x-1][y-1]
counter += 1 if cells[x][y-1]
counter
end
#determine the n+1 state of a single cell according to it's neighbors count
def evolve(cell, neighbors)
new_cell = cell
if cell==true
case
when neighbors < 2
new_cell = false # 1. Any live cell with fewer than two live neighbours dies, as if caused by underpopulation.
when neighbors > 3
new_cell = false # 2. Any live cell with more than three live neighbours dies, as if by overcrowding.
when (neighbors == 2 or neighbors == 3)
new_cell = true # 3. Any live cell with two or three live neighbours lives on to the next generation.
end
elsif cell==false
if neighbors == 3
new_cell = true # 4. Any dead cell with exactly three live neighbours becomes a live cell.
end
end
new_cell
end
end
if __FILE__ == $0
ten_cell_row =[
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
]
ten_cell_row_next =[
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
]
freshConwayEngine = ConwayEngine.new(120,50)
#freshConwayEngine.cells = ten_cell_row
=begin
cells = freshConwayEngine.cells
x=5
y=2
cells[x+1][y-1]=true
cells[x+1][y] =true
cells[x+1][y+1]=true
cells[x][y+1]=true
cells[x-1][y+1]=true
cells[x-1][y]=true
cells[x-1][y-1] =true
cells[x][y-1]=true
=end
#500.times {freshConwayEngine.iterate()}
while true do
freshConwayEngine.printCells(freshConwayEngine.cells)
puts "Press 'Q' to quit, return to continue"
input = gets
break if "Q\n" == input
freshConwayEngine.iterate()
system("clear")
end
end
nightly build
#logic for Conways game of life.
=begin
The universe of the Game of Life is an infinite two-dimensional orthogonal grid of square cells, each of which is in one of two possible states, live or dead. Every cell interacts with its eight neighbors, which are the cells that are directly horizontally, vertically, or diagonally adjacent. At each step in time, the following transitions occur:
1. Any live cell with fewer than two live neighbours dies, as if caused by underpopulation.
2. Any live cell with more than three live neighbours dies, as if by overcrowding.
3. Any live cell with two or three live neighbours lives on to the next generation.
4. Any dead cell with exactly three live neighbours becomes a live cell.
The initial pattern constitutes the seed of the system. The first generation is created by applying the above rules simultaneously to every cell in the seedโbirths and deaths happen simultaneously, and the discrete moment at which this happens is sometimes called a tick (in other words, each generation is a pure function of the one before). The rules continue to be applied repeatedly to create further generations.
=end
class ConwayEngine
attr_accessor :cells
#Create a new ConwayEngine. Pass width and height of the cell world
def initialize(width=30, height=30, random=true, random_from=[true,false].to_a)
#create an array
@cells = Array.new()
border = false
#add 'height' arrays of size 'width'
height.times do
if random # fill it with random arrays
new_element = (0...width).map{random_from[rand(random_from.size)]} #random cohoice between [true,false]
#fill borders
new_element[0]=border
new_element[-1]=border
else # fill it with overcrowded
new_element = Array.new(width, border) # fill it with arrays
new_element[1...-1]=Array.new(new_element.size-2,true) #fill orders
end
@cells.push new_element
end
#draw border
@cells[0]= Array.new(width,border)
@cells[-1]= Array.new(width,border)
end
def printCells(array)
array.each do |row|
row.each do |element|
print "o" if element == true
print "." if element == false
end
print "\n"
end
end
# calculate the generation n+1
def iterate()
new_cells = Array.new(@cells.size,false).map!{ Array.new(@cells.first.size,false) }
@cells.each_with_index do |row, y|
if(y > 0 and y <@cells.size-1) #leave border out
row.each_with_index do |entry, x|
if(x > 0 and x < row.size-1) #leave border out
neighbor_count = count_neighbors(y,x,@cells)
new_cells[y][x] = evolve(entry, neighbor_count)
end
end
end
end
@cells = new_cells.clone
end
#Count the neighbors of a 2d orthogonal grid. Uses the concept of the N_8(P)-neighborhood (Moore-neighborhood)
def count_neighbors(x,y,cells)
counter=0
counter += 1 if cells[x+1][y-1]
counter += 1 if cells[x+1][y]
counter += 1 if cells[x+1][y+1]
counter += 1 if cells[x][y+1]
counter += 1 if cells[x-1][y+1]
counter += 1 if cells[x-1][y]
counter += 1 if cells[x-1][y-1]
counter += 1 if cells[x][y-1]
counter
end
#determine the n+1 state of a single cell according to it's neighbors count
def evolve(cell, neighbors)
new_cell = cell
if cell==true
case
when neighbors < 2
new_cell = false # 1. Any live cell with fewer than two live neighbours dies, as if caused by underpopulation.
when neighbors > 3
new_cell = false # 2. Any live cell with more than three live neighbours dies, as if by overcrowding.
when (neighbors == 2 or neighbors == 3)
new_cell = true # 3. Any live cell with two or three live neighbours lives on to the next generation.
end
elsif cell==false
if neighbors == 3
new_cell = true # 4. Any dead cell with exactly three live neighbours becomes a live cell.
end
end
new_cell
end
end
if __FILE__ == $0
ten_cell_row =[
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
]
ten_cell_row_next =[
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],
]
freshConwayEngine = ConwayEngine.new(120,50)
#freshConwayEngine.cells = ten_cell_row
runs = 500
runs.times {freshConwayEngine.iterate()}
while true do
freshConwayEngine.printCells(freshConwayEngine.cells)
puts "Tick #{runs} \nPress 'Q' to quit, return to continue"
input = gets
break if "Q\n" == input
freshConwayEngine.iterate()
system("clear")
runs += 1
end
end
|
Gem::Specification.new do |s|
s.name = "social_stream-attachments"
s.version = "0.0.2"
s.authors = ["Vรญctor Sรกnchez Belmar"]
s.summary = "Provides capabilities to upload files as another social stream activity"
s.description = "This gem allow you upload almost any kind of file as new social stream activity."
s.email = "v.sanchezbelmar@gmail.com"
s.homepage = "http://github.com/ging/social_stream-attachments"
s.files = `git ls-files`.split("\n")
# Gem dependencies
s.add_runtime_dependency('social_stream-base','~> 0.5.1')
# Development Gem dependencies
s.add_development_dependency('rails', '~> 3.0.7')
s.add_development_dependency('sqlite3-ruby')
if RUBY_VERSION < '1.9'
s.add_development_dependency('ruby-debug', '~> 0.10.3')
end
s.add_development_dependency('rspec-rails', '~> 2.5.0')
s.add_development_dependency('factory_girl', '~> 1.3.2')
s.add_development_dependency('forgery', '~> 0.3.6')
s.add_development_dependency('capybara', '~> 0.3.9')
end
Update gemspec
Gem::Specification.new do |s|
s.name = "social_stream-attachments"
s.version = "0.0.3"
s.authors = ["Vรญctor Sรกnchez Belmar", "GING - DIT - UPM"]
s.summary = "File capabilities for Social Stream, the core for building social network websites"
s.description = "Social Stream is a Ruby on Rails engine providing your application with social networking features and activity streams.\n\nThis gem allow you upload almost any kind of file as new social stream activity."
s.email = "v.sanchezbelmar@gmail.com"
s.homepage = "http://github.com/ging/social_stream-attachments"
s.files = `git ls-files`.split("\n")
# Gem dependencies
s.add_runtime_dependency('social_stream-base','~> 0.5.1')
# Development Gem dependencies
s.add_development_dependency('rails', '3.1.0.rc4')
s.add_development_dependency('sqlite3-ruby')
if RUBY_VERSION < '1.9'
s.add_development_dependency('ruby-debug', '~> 0.10.3')
end
s.add_development_dependency('rspec-rails', '~> 2.5.0')
s.add_development_dependency('factory_girl', '~> 1.3.2')
s.add_development_dependency('forgery', '~> 0.3.6')
s.add_development_dependency('capybara', '~> 0.3.9')
end
|
#
# Be sure to run `pod spec lint MusouKit.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
s.name = "MusouKit"
s.version = "0.1.1"
s.summary = "A development kit for iOS"
s.description = <<-DESC
A development kit for iOS by Danal Luo.
DESC
s.homepage = "https://github.com/idanal/musouKit"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
s.license = "MIT"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "Danal.Luo" => "idanal.mail@qq.com" }
# s.social_media_url = "http://twitter.com/yunlin.luo"
# โโโ Platform Specifics โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
s.platform = :ios, "7.0"
s.source = { :git => "https://github.com/idanal/musouKit.git", :tag => "#{s.version}" }
s.source_files = "MusouKit/**/*.{h,m,mm}"
s.exclude_files = "MusouKit/**/MSGPSManager.*"
s.public_header_files = "MusouKit/**/*.h"
# โโโ Project Linking โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
s.framework = "UIKit"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# โโโ Project Settings โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "SDWebImage", "~> 4.0.0"
end
0.1.2
#
# Be sure to run `pod spec lint MusouKit.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
s.name = "MusouKit"
s.version = "0.1.2"
s.summary = "A development kit for iOS"
s.description = <<-DESC
A development kit for iOS by Danal Luo.
DESC
s.homepage = "https://github.com/idanal/musouKit"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
s.license = "MIT"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "Danal.Luo" => "idanal.mail@qq.com" }
# s.social_media_url = "http://twitter.com/yunlin.luo"
# โโโ Platform Specifics โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
s.platform = :ios, "7.0"
s.source = { :git => "https://github.com/idanal/musouKit.git", :tag => "#{s.version}" }
s.source_files = "MusouKit/**/*.{h,m,mm}"
s.exclude_files = "MusouKit/**/MSGPSManager.*"
s.public_header_files = "MusouKit/**/*.h"
# โโโ Project Linking โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
s.framework = "UIKit"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# โโโ Project Settings โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "SDWebImage", "~> 4.0.0"
end
|
# === actions of insite messages ===
get '/user/messages' do
user = login_filter
if !(@slice = params['slice']) || (@slice.to_i <= 0)
@slice = 20
else
@slice = @slice.to_i
end
if !(@page = params['page']) || (@page.to_i <= 0)
@page = 1
else
@page = @page.to_i
end
@messages = user.inbox_messages
.order(sent_at: :desc)
.limit(@slice)
.offset(@slice * (@page - 1))
total_messages = @messages.count
@total_page = total_messages / @slice
@total_page += 1 if total_messages % @slice != 0
@title = "ๆถไปถ็ฎฑ"
@breadcrumb = [
{ name: "้ฆ้กต", url: '/' },
{ name: "ๆถไปถ็ฎฑ", active: true }
]
erb :user_messages
end
# ajax invoke for number of messages
post '/user/message_amount' do
user = login_filter
json ret: "success", msg: user.inbox_messages.length
end
# mark all as read
post '/user/mark_messages' do
user = login_filter
begin
user.inbox_messages.where(isread: false).each { |m| m.isread = 1; m.save }
session[:message_amount] = 0
json ret: "success"
rescue
json ret: "error", msg: user.errors.messages.inspect
end
end
allow newbie to view messages
# === actions of insite messages ===
get '/user/messages' do
user = login_filter required_status: [ User::Status::NEWBIE ]
if !(@slice = params['slice']) || (@slice.to_i <= 0)
@slice = 20
else
@slice = @slice.to_i
end
if !(@page = params['page']) || (@page.to_i <= 0)
@page = 1
else
@page = @page.to_i
end
@messages = user.inbox_messages
.order(sent_at: :desc)
.limit(@slice)
.offset(@slice * (@page - 1))
total_messages = @messages.count
@total_page = total_messages / @slice
@total_page += 1 if total_messages % @slice != 0
@title = "ๆถไปถ็ฎฑ"
@breadcrumb = [
{ name: "้ฆ้กต", url: '/' },
{ name: "ๆถไปถ็ฎฑ", active: true }
]
erb :user_messages
end
# ajax invoke for number of messages
post '/user/message_amount' do
user = login_filter required_status: [ User::Status::NEWBIE ]
json ret: "success", msg: user.inbox_messages.length
end
# mark all as read
post '/user/mark_messages' do
user = login_filter required_status: [ User::Status::NEWBIE ]
begin
user.inbox_messages.where(isread: false).each { |m| m.isread = 1; m.save }
session[:message_amount] = 0
json ret: "success"
rescue
json ret: "error", msg: user.errors.messages.inspect
end
end
|
#!/usr/bin/env ruby
require 'aws'
require 'erubis'
require 'uri'
require 'bitly'
module OCTechPreview
class WikiPage
def initialize(preview_info)
@preview_info = preview_info
end
def text
t = Erubis::Eruby.new(template)
data = {:info => @preview_info.as_hash, :days_valid => @preview_info.days_valid}
t.result(data)
end
def template
File.read("confluence_page.erb")
end
end
class URLGenerator
attr_reader :days_valid
def initialize(id, secret, bucket, days_valid, bitlyuser, bitlyapi)
@id, @secret, @bucket, @days_valid, @bitlyuser, @bitlyapi = id, secret, bucket, days_valid, bitlyuser, bitlyapi
@config = AWS.config(:access_key_id => @id, :secret_access_key => @secret, :region => 'us-west-2')
@s3 = AWS::S3.new
# Bitly library makes us declare that we're using api version 3 for now
Bitly.configure do |config|
config.api_version = 3
config.login = @bitlyuser
config.api_key = @bitlyapi
end
@bitly = Bitly.client
end
def url_for(package)
o = @s3.buckets[@bucket].objects[package]
url = o.url_for(:get, :expires => (60 * 60 * 24 * @days_valid))
@bitly.shorten(url).short_url
end
end
class PreviewInfo
def initialize(packages, url_generator)
@packages = packages
@url_generator = url_generator
end
def days_valid
@url_generator.days_valid
end
def as_hash
@hash ||= begin
@packages.inject({}) do |hash, package|
info = PackageInfo.new(package)
url = @url_generator.url_for(package)
entry = {info.product => [[info.label, url]]}
hash.merge!(entry) do |key, oldval, newval|
oldval << newval.first
end
hash
end
end
end
end
class PackageInfo
attr_reader :package
def initialize(package)
@package = package
end
def label
l = platform
l << " (#{architecture})" if architecture
l
end
def platform
case @package
when /\.el5\./
"CentOS 5"
when /\.el6\./
"Centos 6"
when /ubuntu\.10\.04/
"Ubuntu 10.04"
when /ubuntu\.11\.04/
"Ubuntu 11.04"
when /windows\.msi/
"Windows"
when /\.gem/
"All Platforms"
else
raise "Unrecognized package format for '#{@package}; could not determine platform!"
end
end
# @returns [String, nil] nil if no architecture is detected (this
# happens with e.g., Windows and Gem packages, and is not an
# error)
def architecture
case @package
when /x86_64/, /amd64/
"64-bit"
when /i686/, /i386/
"32-bit"
end
end
def product
case @package
when /private-chef/
:private_chef
when /opscode-reporting/
:reporting
when /knife-reporting/
:knife_reporting
when /opscode-push-jobs-server/
:push_jobs_server
when /opscode-push-jobs-client/
:push_jobs_client
when /knife-pushy/
:knife_push_jobs
when /opscode-webui/
:webui
else
raise "Unrecognized package format for '#{@package}; could not determine product!"
end
end
end
end
if __FILE__ == $0
BUCKET_NAME = "opc11-tech-preview"
DAYS_VALID = 7
id = ENV['AWS_ACCESS_KEY_ID'] || raise("No AWS_ACCESS_KEY_ID environment variable set!")
secret = ENV['AWS_SECRET_ACCESS_KEY'] || raise("No AWS_SECRET_ACCESS_KEY environment variable set!")
bitlyuser = ENV['BITLY_USER'] || raise("No BITLY_USER environment variable set!")
bitlyapi = ENV['BITLY_APIKEY'] || raise("No BITLY_APIKEY environment variable set!")
packages = [
# Private Chef
"private-chef-11.0.0_tech.preview.2+20130624205025-1.el5.x86_64.rpm",
"private-chef-11.0.0_tech.preview.2+20130624205025-1.el6.x86_64.rpm",
"private-chef_11.0.0-tech.preview.2+20130624205025-1.ubuntu.10.04_amd64.deb",
"private-chef_11.0.0-tech.preview.2+20130624205025-1.ubuntu.11.04_amd64.deb",
# Reporting
"opscode-reporting-0.2.0_tech.preview.2-1.el5.x86_64.rpm",
"opscode-reporting-0.2.0_tech.preview.2-1.el6.x86_64.rpm",
"opscode-reporting_0.2.0-tech.preview.2-1.ubuntu.10.04_amd64.deb",
"opscode-reporting_0.2.0-tech.preview.2-1.ubuntu.11.04_amd64.deb",
"knife-reporting-0.1.0.gem",
# Pushy
"opscode-push-jobs-server-0.9.0_tech.preview.1-1.el5.x86_64.rpm",
"opscode-push-jobs-server-0.9.0_tech.preview.1-1.el6.x86_64.rpm",
"opscode-push-jobs-server_0.9.0-tech.preview.1-1.ubuntu.10.04_amd64.deb",
"opscode-push-jobs-server_0.9.0-tech.preview.1-1.ubuntu.11.04_amd64.deb",
"opscode-push-jobs-client-0.0.1+20130307153525.git.98.c04f587-1.windows.msi",
"opscode-push-jobs-client-0.9.0_tech.preview.1+20130621231152.git.1.b4a8992-1.el5.i686.rpm",
"opscode-push-jobs-client-0.9.0_tech.preview.1+20130621231152.git.1.b4a8992-1.el5.x86_64.rpm",
"opscode-push-jobs-client-0.9.0_tech.preview.1+20130621231152.git.1.b4a8992-1.el6.i686.rpm",
"opscode-push-jobs-client-0.9.0_tech.preview.1+20130621231152.git.1.b4a8992-1.el6.x86_64.rpm",
"opscode-push-jobs-client_0.9.0-tech.preview.1+20130621231152.git.1.b4a8992-1.ubuntu.10.04_i386.deb",
"opscode-push-jobs-client_0.9.0-tech.preview.1+20130621231152.git.1.b4a8992-1.ubuntu.10.04_amd64.deb",
"opscode-push-jobs-client_0.9.0-tech.preview.1+20130621231152.git.1.b4a8992-1.ubuntu.11.04_i386.deb",
"opscode-push-jobs-client_0.9.0-tech.preview.1+20130621231152.git.1.b4a8992-1.ubuntu.11.04_amd64.deb",
"knife-pushy-0.1.gem",
# Web UI
"opscode-webui-2.4.0_tech.preview.1-1.el5.x86_64.rpm",
"opscode-webui-2.4.0_tech.preview.1-1.el6.x86_64.rpm",
"opscode-webui_2.4.0-tech.preview.1-1.ubuntu.10.04_amd64.deb",
"opscode-webui_2.4.0-tech.preview.1-1.ubuntu.11.04_amd64.deb"
]
url_generator = OCTechPreview::URLGenerator.new(id, secret, BUCKET_NAME, DAYS_VALID, bitlyuser, bitlyapi)
preview_info = OCTechPreview::PreviewInfo.new(packages, url_generator)
wiki_page = OCTechPreview::WikiPage.new(preview_info)
puts wiki_page.text
end
Update knife-pushy plugin version to 0.3
#!/usr/bin/env ruby
require 'aws'
require 'erubis'
require 'uri'
require 'bitly'
module OCTechPreview
class WikiPage
def initialize(preview_info)
@preview_info = preview_info
end
def text
t = Erubis::Eruby.new(template)
data = {:info => @preview_info.as_hash, :days_valid => @preview_info.days_valid}
t.result(data)
end
def template
File.read("confluence_page.erb")
end
end
class URLGenerator
attr_reader :days_valid
def initialize(id, secret, bucket, days_valid, bitlyuser, bitlyapi)
@id, @secret, @bucket, @days_valid, @bitlyuser, @bitlyapi = id, secret, bucket, days_valid, bitlyuser, bitlyapi
@config = AWS.config(:access_key_id => @id, :secret_access_key => @secret, :region => 'us-west-2')
@s3 = AWS::S3.new
# Bitly library makes us declare that we're using api version 3 for now
Bitly.configure do |config|
config.api_version = 3
config.login = @bitlyuser
config.api_key = @bitlyapi
end
@bitly = Bitly.client
end
def url_for(package)
o = @s3.buckets[@bucket].objects[package]
url = o.url_for(:get, :expires => (60 * 60 * 24 * @days_valid))
@bitly.shorten(url).short_url
end
end
class PreviewInfo
def initialize(packages, url_generator)
@packages = packages
@url_generator = url_generator
end
def days_valid
@url_generator.days_valid
end
def as_hash
@hash ||= begin
@packages.inject({}) do |hash, package|
info = PackageInfo.new(package)
url = @url_generator.url_for(package)
entry = {info.product => [[info.label, url]]}
hash.merge!(entry) do |key, oldval, newval|
oldval << newval.first
end
hash
end
end
end
end
class PackageInfo
attr_reader :package
def initialize(package)
@package = package
end
def label
l = platform
l << " (#{architecture})" if architecture
l
end
def platform
case @package
when /\.el5\./
"CentOS 5"
when /\.el6\./
"Centos 6"
when /ubuntu\.10\.04/
"Ubuntu 10.04"
when /ubuntu\.11\.04/
"Ubuntu 11.04"
when /windows\.msi/
"Windows"
when /\.gem/
"All Platforms"
else
raise "Unrecognized package format for '#{@package}; could not determine platform!"
end
end
# @returns [String, nil] nil if no architecture is detected (this
# happens with e.g., Windows and Gem packages, and is not an
# error)
def architecture
case @package
when /x86_64/, /amd64/
"64-bit"
when /i686/, /i386/
"32-bit"
end
end
def product
case @package
when /private-chef/
:private_chef
when /opscode-reporting/
:reporting
when /knife-reporting/
:knife_reporting
when /opscode-push-jobs-server/
:push_jobs_server
when /opscode-push-jobs-client/
:push_jobs_client
when /knife-pushy/
:knife_push_jobs
when /opscode-webui/
:webui
else
raise "Unrecognized package format for '#{@package}; could not determine product!"
end
end
end
end
if __FILE__ == $0
BUCKET_NAME = "opc11-tech-preview"
DAYS_VALID = 7
id = ENV['AWS_ACCESS_KEY_ID'] || raise("No AWS_ACCESS_KEY_ID environment variable set!")
secret = ENV['AWS_SECRET_ACCESS_KEY'] || raise("No AWS_SECRET_ACCESS_KEY environment variable set!")
bitlyuser = ENV['BITLY_USER'] || raise("No BITLY_USER environment variable set!")
bitlyapi = ENV['BITLY_APIKEY'] || raise("No BITLY_APIKEY environment variable set!")
packages = [
# Private Chef
"private-chef-11.0.0_tech.preview.2+20130624205025-1.el5.x86_64.rpm",
"private-chef-11.0.0_tech.preview.2+20130624205025-1.el6.x86_64.rpm",
"private-chef_11.0.0-tech.preview.2+20130624205025-1.ubuntu.10.04_amd64.deb",
"private-chef_11.0.0-tech.preview.2+20130624205025-1.ubuntu.11.04_amd64.deb",
# Reporting
"opscode-reporting-0.2.0_tech.preview.2-1.el5.x86_64.rpm",
"opscode-reporting-0.2.0_tech.preview.2-1.el6.x86_64.rpm",
"opscode-reporting_0.2.0-tech.preview.2-1.ubuntu.10.04_amd64.deb",
"opscode-reporting_0.2.0-tech.preview.2-1.ubuntu.11.04_amd64.deb",
"knife-reporting-0.1.0.gem",
# Pushy
"opscode-push-jobs-server-0.9.0_tech.preview.1-1.el5.x86_64.rpm",
"opscode-push-jobs-server-0.9.0_tech.preview.1-1.el6.x86_64.rpm",
"opscode-push-jobs-server_0.9.0-tech.preview.1-1.ubuntu.10.04_amd64.deb",
"opscode-push-jobs-server_0.9.0-tech.preview.1-1.ubuntu.11.04_amd64.deb",
"opscode-push-jobs-client-0.0.1+20130307153525.git.98.c04f587-1.windows.msi",
"opscode-push-jobs-client-0.9.0_tech.preview.1+20130621231152.git.1.b4a8992-1.el5.i686.rpm",
"opscode-push-jobs-client-0.9.0_tech.preview.1+20130621231152.git.1.b4a8992-1.el5.x86_64.rpm",
"opscode-push-jobs-client-0.9.0_tech.preview.1+20130621231152.git.1.b4a8992-1.el6.i686.rpm",
"opscode-push-jobs-client-0.9.0_tech.preview.1+20130621231152.git.1.b4a8992-1.el6.x86_64.rpm",
"opscode-push-jobs-client_0.9.0-tech.preview.1+20130621231152.git.1.b4a8992-1.ubuntu.10.04_i386.deb",
"opscode-push-jobs-client_0.9.0-tech.preview.1+20130621231152.git.1.b4a8992-1.ubuntu.10.04_amd64.deb",
"opscode-push-jobs-client_0.9.0-tech.preview.1+20130621231152.git.1.b4a8992-1.ubuntu.11.04_i386.deb",
"opscode-push-jobs-client_0.9.0-tech.preview.1+20130621231152.git.1.b4a8992-1.ubuntu.11.04_amd64.deb",
"knife-pushy-0.3.gem",
# Web UI
"opscode-webui-2.4.0_tech.preview.1-1.el5.x86_64.rpm",
"opscode-webui-2.4.0_tech.preview.1-1.el6.x86_64.rpm",
"opscode-webui_2.4.0-tech.preview.1-1.ubuntu.10.04_amd64.deb",
"opscode-webui_2.4.0-tech.preview.1-1.ubuntu.11.04_amd64.deb"
]
url_generator = OCTechPreview::URLGenerator.new(id, secret, BUCKET_NAME, DAYS_VALID, bitlyuser, bitlyapi)
preview_info = OCTechPreview::PreviewInfo.new(packages, url_generator)
wiki_page = OCTechPreview::WikiPage.new(preview_info)
puts wiki_page.text
end
|
require 'test_helper'
require 'seek/upload_handling/data_upload'
require 'seek/upload_handling/examine_url'
class UploadHandingTest < ActiveSupport::TestCase
include Seek::UploadHandling::DataUpload
include Seek::UploadHandling::ExamineUrl
test 'valid scheme?' do
assert_equal %w(http https ftp).sort, Seek::UploadHandling::ContentInspection::VALID_SCHEMES.sort
assert valid_scheme?('http://bbc.co.uk')
assert valid_scheme?('https://bbc.co.uk')
assert valid_scheme?('ftp://bbc.co.uk')
refute valid_scheme?('ssh://bbc.co.uk')
# also without a normal url
refute valid_scheme?('bob')
refute valid_scheme?('')
refute valid_scheme?(nil)
end
test 'content_blob_params' do
@params = { content_blobs: [{ fish: 1, soup: 2 }], data_file: { title: 'george' } }
assert_equal([{ fish: 1, soup: 2 }], content_blob_params)
end
test 'default to http if missing' do
params = { data_url: 'fish.com/path?query=yes' }
default_to_http_if_missing(params)
assert_equal('http://fish.com/path?query=yes', params[:data_url])
params[:data_url] = 'https://fish.com/path?query=yes'
default_to_http_if_missing(params)
assert_equal('https://fish.com/path?query=yes', params[:data_url])
params[:data_url] = nil
default_to_http_if_missing(params)
assert_nil(params[:data_url])
params[:data_url] = 'sdfhksdlfsdkfh'
default_to_http_if_missing(params)
assert_equal('sdfhksdlfsdkfh', params[:data_url])
end
test 'asset params' do
@params = { content_blob: { fish: 1, soup: 2 }, data_file: { title: 'george' }, sop: { title: 'mary' } }
assert_equal({ title: 'george' }, asset_params)
@controller_name = 'sops'
assert_equal({ title: 'mary' }, asset_params)
end
test 'check url response code' do
stub_request(:head, 'http://bbc.co.uk/').to_return(status: 200, body: '', headers: { content_type: 'text/html', content_length: '555' })
stub_request(:head, 'http://not-there.com').to_return(status: 404, body: '', headers: {})
stub_request(:head, 'http://server-error.com').to_return(status: 500, body: '', headers: {})
stub_request(:head, 'http://forbidden.com').to_return(status: 403, body: '', headers: {})
stub_request(:head, 'http://unauthorized.com').to_return(status: 401, body: '', headers: {})
stub_request(:head, 'http://methodnotallowed.com').to_return(status: 405, body: '', headers: {})
assert_equal 200, check_url_response_code('http://bbc.co.uk')
assert_equal 404, check_url_response_code('http://not-there.com')
assert_equal 500, check_url_response_code('http://server-error.com')
assert_equal 403, check_url_response_code('http://forbidden.com')
assert_equal 401, check_url_response_code('http://unauthorized.com')
assert_equal 405, check_url_response_code('http://methodnotallowed.com')
# redirection will be followed
stub_request(:head, 'http://moved.com').to_return(status: 301, body: '', headers: { location: 'http://bbc.co.uk' })
stub_request(:head, 'http://moved2.com').to_return(status: 302, body: '', headers: { location: 'http://forbidden.com' })
assert_equal 200, check_url_response_code('http://moved.com')
assert_equal 403, check_url_response_code('http://moved2.com')
end
test 'fetch url headers' do
stub_request(:head, 'http://bbc.co.uk/').to_return(status: 200,
body: '',
headers: { content_type: 'text/html', content_length: '555' })
headers = fetch_url_headers('http://bbc.co.uk')
assert_equal 'text/html', headers[:content_type]
assert_equal '555', headers[:content_length]
stub_request(:head, 'http://somewhere.org/excel.xls').to_return(status: 200,
body: '',
headers: { content_type: 'application/vnd.ms-excel', content_length: '1111' })
headers = fetch_url_headers('http://somewhere.org/excel.xls')
assert_equal 'application/vnd.ms-excel', headers[:content_type]
assert_equal '1111', headers[:content_length]
stub_request(:head, 'http://not-there.com').to_return(status: 404, body: '', headers: {})
assert_raise RestClient::ResourceNotFound do
fetch_url_headers('http://not-there.com')
end
# follows redirection
stub_request(:head, 'http://moved.com').to_return(status: 301, body: '', headers: { location: 'http://bbc.co.uk' })
headers = fetch_url_headers('http://moved.com')
assert_equal 'text/html', headers[:content_type]
assert_equal '555', headers[:content_length]
end
test 'content type from filename' do
assert_equal 'text/html', content_type_from_filename(nil)
# FIXME: , MERGENOTE - .xml gives an incorrect mime type of sbml+xml due to the ordering
checks = [
{ f: 'test.jpg', t: 'image/jpeg' },
{ f: 'test.JPG', t: 'image/jpeg' },
{ f: 'test.png', t: 'image/png' },
{ f: 'test.PNG', t: 'image/png' },
{ f: 'test.jpeg', t: 'image/jpeg' },
{ f: 'test.JPEG', t: 'image/jpeg' },
{ f: 'test.xls', t: 'application/excel' },
{ f: 'test.doc', t: 'application/msword' },
{ f: 'test.xlsx', t: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' },
{ f: 'test.docx', t: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' },
{ f: 'test.XLs', t: 'application/excel' },
{ f: 'test.Doc', t: 'application/msword' },
{ f: 'test.XLSX', t: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' },
{ f: 'test.dOCx', t: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' },
{ f: 'unknown.xxx', t: 'application/octet-stream' },
{ f: nil, t: 'text/html' }
]
checks.each do |check|
assert_equal check[:t], content_type_from_filename(check[:f]), "Expected #{check[:t]} for #{check[:f]}"
end
end
test 'content is webpage?' do
assert content_is_webpage?('text/html')
assert content_is_webpage?('text/html; charset=UTF-8')
refute content_is_webpage?('application/zip')
refute content_is_webpage?(nil)
end
test 'valid uri?' do
assert valid_uri?('http://fish.com')
assert valid_uri?('http://fish.com')
assert valid_uri?('http://fish.com ')
assert valid_uri?('http://fish.com/fish.txt')
assert valid_uri?('http://fish.com/fish.txt ')
refute valid_uri?('x dd s')
refute valid_uri?(nil)
end
test 'determine_filename_from_disposition' do
assert_equal '_form.html.erb', determine_filename_from_disposition('inline; filename="_form.html.erb"')
assert_equal '_form.html.erb', determine_filename_from_disposition('inline; filename=_form.html.erb')
assert_equal '_form.html.erb', determine_filename_from_disposition('attachment; filename="_form.html.erb"')
assert_nil determine_filename_from_disposition(nil)
assert_nil determine_filename_from_disposition('')
end
test 'determine filename from url' do
assert_equal 'fred.txt', determine_filename_from_url('http://place.com/fred.txt')
assert_equal 'fred.txt', determine_filename_from_url('http://place.com/fred.txt ')
assert_equal 'jenny.txt', determine_filename_from_url('http://place.com/here/he%20/jenny.txt')
assert_nil determine_filename_from_url('http://place.com')
assert_nil determine_filename_from_url('http://place.com/')
assert_nil determine_filename_from_url('')
assert_nil determine_filename_from_url('sdfsdf')
assert_nil determine_filename_from_url(nil)
end
test 'check for data or url' do
refute check_for_data_or_url(data: '', data_url: '')
assert check_for_data_or_url(data: 'hhhh')
assert check_for_data_or_url(data_url: 'hhhh')
refute check_for_data_or_url(data: [], data_url: [])
assert check_for_data_or_url(data: ['hhhh'])
assert check_for_data_or_url(data_url: ['hhhh'])
end
test 'retained content blob ids' do
@params = { retained_content_blob_ids: [1, 2] }
assert_equal [1, 2], retained_content_blob_ids
@params = {}
assert_equal [], retained_content_blob_ids
@params = { content_blobs: nil }
assert_equal [], retained_content_blob_ids
@params = { retained_content_blob_ids: [1, 2, 3] }
assert_equal [1, 2, 3], retained_content_blob_ids
end
test 'model image present?' do
file_with_content = ActionDispatch::Http::UploadedFile.new(
filename: 'file',
content_type: 'text/plain',
tempfile: StringIO.new('fish')
)
@params = { model_image: { image_file: file_with_content }, content_blob: {}, model: { title: 'fish' } }
assert model_image_present?
@params = { model_image: {}, content_blob: {}, model: { title: 'fish' } }
refute model_image_present?
@params = { content_blob: {}, model: { title: 'fish' } }
refute model_image_present?
end
test 'check for data if present' do
file_with_content = ActionDispatch::Http::UploadedFile.new(
filename: 'file',
content_type: 'text/plain',
tempfile: StringIO.new('fish')
)
empty_content = ActionDispatch::Http::UploadedFile.new(
filename: 'file',
content_type: 'text/plain',
tempfile: StringIO.new('')
)
assert check_for_empty_data_if_present(data: '', data_url: 'http://fish')
assert check_for_empty_data_if_present(data: file_with_content, data_url: '')
assert check_for_empty_data_if_present(data: file_with_content, data_url: [])
refute check_for_empty_data_if_present(data: empty_content, data_url: '')
refute check_for_empty_data_if_present(data: empty_content, data_url: [])
refute check_for_empty_data_if_present(data: empty_content)
assert check_for_empty_data_if_present(data: [], data_url: 'http://fish')
assert check_for_empty_data_if_present(data: [file_with_content], data_url: '')
assert check_for_empty_data_if_present(data: [file_with_content], data_url: [])
refute check_for_empty_data_if_present(data: [empty_content], data_url: '')
refute check_for_empty_data_if_present(data: [empty_content], data_url: [])
refute check_for_empty_data_if_present(data: [empty_content])
refute check_for_empty_data_if_present(data: [empty_content, file_with_content])
end
# allows some methods to be tested the rely on flash.now[:error]
def flash
ActionDispatch::Flash::FlashHash.new
end
# mock out the params method, set @params for the desired params for the test
attr_reader :params
# mocks out the controller name, defaults to data_files, but can be changed by setting @controller_name
def controller_name
@controller_name || 'data_files'
end
end
OPSK-855 - Test fix
require 'test_helper'
require 'seek/upload_handling/data_upload'
require 'seek/upload_handling/examine_url'
class UploadHandingTest < ActiveSupport::TestCase
include Seek::UploadHandling::DataUpload
include Seek::UploadHandling::ExamineUrl
test 'valid scheme?' do
assert_equal %w(file).sort, Seek::UploadHandling::ContentInspection::INVALID_SCHEMES.sort
assert valid_scheme?('http://bbc.co.uk')
assert valid_scheme?('https://bbc.co.uk')
assert valid_scheme?('ftp://bbc.co.uk')
assert valid_scheme?('ssh://bbc.co.uk')
refute valid_scheme?('file:///secret/documents.txt')
end
test 'content_blob_params' do
@params = { content_blobs: [{ fish: 1, soup: 2 }], data_file: { title: 'george' } }
assert_equal([{ fish: 1, soup: 2 }], content_blob_params)
end
test 'default to http if missing' do
params = { data_url: 'fish.com/path?query=yes' }
default_to_http_if_missing(params)
assert_equal('http://fish.com/path?query=yes', params[:data_url])
params[:data_url] = 'https://fish.com/path?query=yes'
default_to_http_if_missing(params)
assert_equal('https://fish.com/path?query=yes', params[:data_url])
params[:data_url] = nil
default_to_http_if_missing(params)
assert_nil(params[:data_url])
params[:data_url] = 'sdfhksdlfsdkfh'
default_to_http_if_missing(params)
assert_equal('sdfhksdlfsdkfh', params[:data_url])
end
test 'asset params' do
@params = { content_blob: { fish: 1, soup: 2 }, data_file: { title: 'george' }, sop: { title: 'mary' } }
assert_equal({ title: 'george' }, asset_params)
@controller_name = 'sops'
assert_equal({ title: 'mary' }, asset_params)
end
test 'check url response code' do
stub_request(:head, 'http://bbc.co.uk/').to_return(status: 200, body: '', headers: { content_type: 'text/html', content_length: '555' })
stub_request(:head, 'http://not-there.com').to_return(status: 404, body: '', headers: {})
stub_request(:head, 'http://server-error.com').to_return(status: 500, body: '', headers: {})
stub_request(:head, 'http://forbidden.com').to_return(status: 403, body: '', headers: {})
stub_request(:head, 'http://unauthorized.com').to_return(status: 401, body: '', headers: {})
stub_request(:head, 'http://methodnotallowed.com').to_return(status: 405, body: '', headers: {})
assert_equal 200, check_url_response_code('http://bbc.co.uk')
assert_equal 404, check_url_response_code('http://not-there.com')
assert_equal 500, check_url_response_code('http://server-error.com')
assert_equal 403, check_url_response_code('http://forbidden.com')
assert_equal 401, check_url_response_code('http://unauthorized.com')
assert_equal 405, check_url_response_code('http://methodnotallowed.com')
# redirection will be followed
stub_request(:head, 'http://moved.com').to_return(status: 301, body: '', headers: { location: 'http://bbc.co.uk' })
stub_request(:head, 'http://moved2.com').to_return(status: 302, body: '', headers: { location: 'http://forbidden.com' })
assert_equal 200, check_url_response_code('http://moved.com')
assert_equal 403, check_url_response_code('http://moved2.com')
end
test 'fetch url headers' do
stub_request(:head, 'http://bbc.co.uk/').to_return(status: 200,
body: '',
headers: { content_type: 'text/html', content_length: '555' })
headers = fetch_url_headers('http://bbc.co.uk')
assert_equal 'text/html', headers[:content_type]
assert_equal '555', headers[:content_length]
stub_request(:head, 'http://somewhere.org/excel.xls').to_return(status: 200,
body: '',
headers: { content_type: 'application/vnd.ms-excel', content_length: '1111' })
headers = fetch_url_headers('http://somewhere.org/excel.xls')
assert_equal 'application/vnd.ms-excel', headers[:content_type]
assert_equal '1111', headers[:content_length]
stub_request(:head, 'http://not-there.com').to_return(status: 404, body: '', headers: {})
assert_raise RestClient::ResourceNotFound do
fetch_url_headers('http://not-there.com')
end
# follows redirection
stub_request(:head, 'http://moved.com').to_return(status: 301, body: '', headers: { location: 'http://bbc.co.uk' })
headers = fetch_url_headers('http://moved.com')
assert_equal 'text/html', headers[:content_type]
assert_equal '555', headers[:content_length]
end
test 'content type from filename' do
assert_equal 'text/html', content_type_from_filename(nil)
# FIXME: , MERGENOTE - .xml gives an incorrect mime type of sbml+xml due to the ordering
checks = [
{ f: 'test.jpg', t: 'image/jpeg' },
{ f: 'test.JPG', t: 'image/jpeg' },
{ f: 'test.png', t: 'image/png' },
{ f: 'test.PNG', t: 'image/png' },
{ f: 'test.jpeg', t: 'image/jpeg' },
{ f: 'test.JPEG', t: 'image/jpeg' },
{ f: 'test.xls', t: 'application/excel' },
{ f: 'test.doc', t: 'application/msword' },
{ f: 'test.xlsx', t: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' },
{ f: 'test.docx', t: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' },
{ f: 'test.XLs', t: 'application/excel' },
{ f: 'test.Doc', t: 'application/msword' },
{ f: 'test.XLSX', t: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' },
{ f: 'test.dOCx', t: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' },
{ f: 'unknown.xxx', t: 'application/octet-stream' },
{ f: nil, t: 'text/html' }
]
checks.each do |check|
assert_equal check[:t], content_type_from_filename(check[:f]), "Expected #{check[:t]} for #{check[:f]}"
end
end
test 'content is webpage?' do
assert content_is_webpage?('text/html')
assert content_is_webpage?('text/html; charset=UTF-8')
refute content_is_webpage?('application/zip')
refute content_is_webpage?(nil)
end
test 'valid uri?' do
assert valid_uri?('http://fish.com')
assert valid_uri?('http://fish.com')
assert valid_uri?('http://fish.com ')
assert valid_uri?('http://fish.com/fish.txt')
assert valid_uri?('http://fish.com/fish.txt ')
refute valid_uri?('x dd s')
refute valid_uri?(nil)
end
test 'determine_filename_from_disposition' do
assert_equal '_form.html.erb', determine_filename_from_disposition('inline; filename="_form.html.erb"')
assert_equal '_form.html.erb', determine_filename_from_disposition('inline; filename=_form.html.erb')
assert_equal '_form.html.erb', determine_filename_from_disposition('attachment; filename="_form.html.erb"')
assert_nil determine_filename_from_disposition(nil)
assert_nil determine_filename_from_disposition('')
end
test 'determine filename from url' do
assert_equal 'fred.txt', determine_filename_from_url('http://place.com/fred.txt')
assert_equal 'fred.txt', determine_filename_from_url('http://place.com/fred.txt ')
assert_equal 'jenny.txt', determine_filename_from_url('http://place.com/here/he%20/jenny.txt')
assert_nil determine_filename_from_url('http://place.com')
assert_nil determine_filename_from_url('http://place.com/')
assert_nil determine_filename_from_url('')
assert_nil determine_filename_from_url('sdfsdf')
assert_nil determine_filename_from_url(nil)
end
test 'check for data or url' do
refute check_for_data_or_url(data: '', data_url: '')
assert check_for_data_or_url(data: 'hhhh')
assert check_for_data_or_url(data_url: 'hhhh')
refute check_for_data_or_url(data: [], data_url: [])
assert check_for_data_or_url(data: ['hhhh'])
assert check_for_data_or_url(data_url: ['hhhh'])
end
test 'retained content blob ids' do
@params = { retained_content_blob_ids: [1, 2] }
assert_equal [1, 2], retained_content_blob_ids
@params = {}
assert_equal [], retained_content_blob_ids
@params = { content_blobs: nil }
assert_equal [], retained_content_blob_ids
@params = { retained_content_blob_ids: [1, 2, 3] }
assert_equal [1, 2, 3], retained_content_blob_ids
end
test 'model image present?' do
file_with_content = ActionDispatch::Http::UploadedFile.new(
filename: 'file',
content_type: 'text/plain',
tempfile: StringIO.new('fish')
)
@params = { model_image: { image_file: file_with_content }, content_blob: {}, model: { title: 'fish' } }
assert model_image_present?
@params = { model_image: {}, content_blob: {}, model: { title: 'fish' } }
refute model_image_present?
@params = { content_blob: {}, model: { title: 'fish' } }
refute model_image_present?
end
test 'check for data if present' do
file_with_content = ActionDispatch::Http::UploadedFile.new(
filename: 'file',
content_type: 'text/plain',
tempfile: StringIO.new('fish')
)
empty_content = ActionDispatch::Http::UploadedFile.new(
filename: 'file',
content_type: 'text/plain',
tempfile: StringIO.new('')
)
assert check_for_empty_data_if_present(data: '', data_url: 'http://fish')
assert check_for_empty_data_if_present(data: file_with_content, data_url: '')
assert check_for_empty_data_if_present(data: file_with_content, data_url: [])
refute check_for_empty_data_if_present(data: empty_content, data_url: '')
refute check_for_empty_data_if_present(data: empty_content, data_url: [])
refute check_for_empty_data_if_present(data: empty_content)
assert check_for_empty_data_if_present(data: [], data_url: 'http://fish')
assert check_for_empty_data_if_present(data: [file_with_content], data_url: '')
assert check_for_empty_data_if_present(data: [file_with_content], data_url: [])
refute check_for_empty_data_if_present(data: [empty_content], data_url: '')
refute check_for_empty_data_if_present(data: [empty_content], data_url: [])
refute check_for_empty_data_if_present(data: [empty_content])
refute check_for_empty_data_if_present(data: [empty_content, file_with_content])
end
# allows some methods to be tested the rely on flash.now[:error]
def flash
ActionDispatch::Flash::FlashHash.new
end
# mock out the params method, set @params for the desired params for the test
attr_reader :params
# mocks out the controller name, defaults to data_files, but can be changed by setting @controller_name
def controller_name
@controller_name || 'data_files'
end
end
|
require 'helper'
require 'date'
module ArelExtensions
module WthAr
class ListTest < Minitest::Test
require 'minitest/pride'
def connect_db
ActiveRecord::Base.configurations = YAML.load_file('test/database.yml')
if ENV['DB'] == 'oracle' && ((defined?(RUBY_ENGINE) && RUBY_ENGINE == "rbx") || (RUBY_PLATFORM == 'java')) # not supported
@env_db = (RUBY_PLATFORM == 'java' ? "jdbc-sqlite" : 'sqlite')
skip "Platform not supported"
else
@env_db = ENV['DB']
end
ActiveRecord::Base.establish_connection(@env_db.try(:to_sym) || (RUBY_PLATFORM == 'java' ? :"jdbc-sqlite" : :sqlite))
ActiveRecord::Base.default_timezone = :utc
@cnx = ActiveRecord::Base.connection
$sqlite = @cnx.adapter_name =~ /sqlite/i
$load_extension_disabled ||= false
csf = CommonSqlFunctions.new(@cnx)
csf.add_sql_functions(@env_db)
end
def setup_db
@cnx.drop_table(:user_tests) rescue nil
@cnx.create_table :user_tests do |t|
t.column :age, :integer
t.column :name, :string
t.column :comments, :text
t.column :created_at, :date
t.column :updated_at, :datetime
t.column :score, :decimal, :precision => 20, :scale => 10
end
@cnx.drop_table(:product_tests) rescue nil
@cnx.create_table :product_tests do |t|
t.column :price, :decimal, :precision => 20, :scale => 10
end
end
class User < ActiveRecord::Base
self.table_name = 'user_tests'
end
class Product < ActiveRecord::Base
self.table_name = 'product_tests'
end
def setup
d = Date.new(2016, 5, 23)
connect_db
setup_db
u = User.create :age => 5, :name => "Lucas", :created_at => d, :score => 20.16, :updated_at => Time.utc(2014, 3, 3, 12, 42, 0)
@lucas = User.where(:id => u.id)
u = User.create :age => 15, :name => "Sophie", :created_at => d, :score => 20.16
@sophie = User.where(:id => u.id)
u = User.create :age => 20, :name => "Camille", :created_at => d, :score => -20.16
@camille = User.where(:id => u.id)
u = User.create :age => 21, :name => "Arthur", :created_at => d, :score => 65.62
@arthur = User.where(:id => u.id)
u = User.create :age => 23, :name => "Myung", :created_at => d, :score => 20.16, :comments => ' '
@myung = User.where(:id => u.id)
u = User.create :age => 25, :name => "Laure", :created_at => d, :score => 20.16
@laure = User.where(:id => u.id)
u = User.create :age => nil, :name => "Test", :created_at => d, :score => 1.62
@test = User.where(:id => u.id)
u = User.create :age => -42, :name => "Negatif", :comments => '1,22,3,42,2', :created_at => d, :updated_at => d.to_time, :score => 0.17
@neg = User.where(:id => u.id)
@age = User.arel_table[:age]
@name = User.arel_table[:name]
@score = User.arel_table[:score]
@created_at = User.arel_table[:created_at]
@updated_at = User.arel_table[:updated_at]
@comments = User.arel_table[:comments]
@price = Product.arel_table[:price]
@not_in_table = User.arel_table[:not_in_table]
@ut = User.arel_table
@pt = Product.arel_table
end
def teardown
@cnx.drop_table(:user_tests)
@cnx.drop_table(:product_tests)
end
def t(scope, node)
scope.select(node.as('res')).first.res
end
# Math Functions
def test_classical_arel
assert_in_epsilon 42.16, t(@laure, @score + 22), 0.01
end
def test_abs
assert_equal 42, t(@neg, @age.abs)
assert_equal 20.16, t(@camille, @score.abs)
assert_equal 14, t(@laure, (@age - 39).abs)
assert_equal 28, t(@laure, (@age - 39).abs + (@age - 39).abs)
end
def test_ceil
# skip "Sqlite version can't load extension for ceil" if $sqlite && $load_extension_disabled
assert_equal 2, t(@test, @score.ceil) # 1.62
assert_equal(-20, t(@camille, @score.ceil)) # -20.16
assert_equal(-20, t(@camille, (@score - 0.5).ceil)) # -20.16
assert_equal 63, t(@arthur, @age.ceil + 42)
end
def test_floor
# skip "Sqlite version can't load extension for floor" if $sqlite && $load_extension_disabled
assert_equal 0, t(@neg, @score.floor)
assert_equal 1, t(@test, @score.floor) # 1.62
assert_equal(-9, t(@test, (@score - 10).floor)) # 1.62
assert_equal 42, t(@arthur, @score.floor - 23)
end
def test_rand
assert 42 != User.select(Arel.rand.as('res')).first.res
assert 0 <= User.select(Arel.rand.abs.as('res')).first.res
assert_equal 8, User.order(Arel.rand).limit(50).count
end
def test_round
assert_equal 1, User.where(@age.round(0).eq(5.0)).count
assert_equal 0, User.where(@age.round(-1).eq(6.0)).count
assert_equal 66, t(@arthur, @score.round)
assert_in_epsilon 67.6, t(@arthur, @score.round(1) + 2), 0.01
end
def test_sum
if @env_db == 'mssql'
skip "SQL Server forces order?" # TODO
assert_equal 68, User.select((@age.sum + 1).as("res"), User.arel_table[:id].sum).take(50).reorder(@age).first.res
assert_equal 134, User.reorder(nil).select((@age.sum + @age.sum).as("res"), User.arel_table[:id].sum).take(50).first.res
assert_equal 201, User.reorder(nil).select(((@age * 3).sum).as("res"), User.arel_table[:id].sum).take(50).first.res
assert_equal 4009, User.reorder(nil).select(((@age * @age).sum).as("res"), User.arel_table[:id].sum).take(50).first.res
else
assert_equal 68, User.select((@age.sum + 1).as("res")).take(50).first.res
assert_equal 134, User.select((@age.sum + @age.sum).as("res")).take(50).first.res
assert_equal 201, User.select(((@age * 3).sum).as("res")).take(50).first.res
assert_equal 4009, User.select(((@age * @age).sum).as("res")).take(50).first.res
end
end
# String Functions
def test_concat
assert_equal 'Camille Camille', t(@camille, @name + ' ' + @name)
assert_equal 'Laure 2', t(@laure, @name + ' ' + 2)
assert_equal 'Test Laure', t(@laure, Arel::Nodes.build_quoted('Test ') + @name)
skip "TODO: find a way... to do group_concat/listagg in SQL Server" if @env_db == 'mssql'
if @env_db == 'postgresql'
assert_equal "Lucas Sophie", t(User.reorder(nil).from(User.select(:name).where(:name => ['Lucas', 'Sophie']).reorder(:name).as('user_tests')), @name.group_concat(' '))
assert_equal "Lucas,Sophie", t(User.reorder(nil).from(User.select(:name).where(:name => ['Lucas', 'Sophie']).reorder(:name).as('user_tests')), @name.group_concat(','))
assert_equal "Lucas Sophie", t(User.reorder(nil).from(User.select(:name).where(:name => ['Lucas', 'Sophie']).reorder(:name).as('user_tests')), @name.group_concat)
else
assert_equal "Lucas Sophie", t(User.where(:name => ['Lucas', 'Sophie']).reorder(:name), @name.group_concat(' '))
assert_equal "Lucas,Sophie", t(User.where(:name => ['Lucas', 'Sophie']).reorder(:name), @name.group_concat(','))
if @env_db == 'oracle'
assert_equal "LucasSophie", t(User.where(:name => ['Lucas', 'Sophie']).reorder(:name), @name.group_concat)
else
assert_equal "Lucas,Sophie", t(User.where(:name => ['Lucas', 'Sophie']).reorder(:name), @name.group_concat)
end
end
end
def test_length
assert_equal 7, t(@camille, @name.length)
assert_equal 7, t(@camille, @name.length.round.abs)
assert_equal 42, t(@laure, @name.length + 37)
end
def test_locate
skip "Sqlite version can't load extension for locate" if $sqlite && $load_extension_disabled
assert_equal 1, t(@camille, @name.locate("C"))
assert_equal 0, t(@lucas, @name.locate("z"))
assert_equal 5, t(@lucas, @name.locate("s"))
end
def test_substring
assert_equal 'C', t(@camille, @name.substring(1, 1))
if @env_db == 'oracle'
assert_nil(t(@lucas, @name.substring(42)))
else
assert_equal('', t(@lucas, @name.substring(42)))
end
assert_equal 'Lu', t(@lucas, @name.substring(1,2))
assert_equal 'C', t(@camille, @name[0, 1])
assert_equal 'C', t(@camille, @name[0])
if @env_db == 'oracle'
assert_nil(t(@lucas, @name[42]))
else
assert_equal('', t(@lucas, @name[42]))
end
assert_equal 'Lu', t(@lucas, @name[0,2])
assert_equal 'Lu', t(@lucas, @name[0..1])
end
def test_find_in_set
skip "Sqlite version can't load extension for find_in_set" if $sqlite && $load_extension_disabled
skip "SQL Server does not know about FIND_IN_SET" if @env_db == 'mssql'
assert_equal 5, t(@neg, @comments & 2)
assert_equal 0, t(@neg, @comments & 6) # not found
end
def test_string_comparators
skip "Oracle can't use math operators to compare strings" if @env_db == 'oracle' # use GREATEST ?
skip "SQL Server can't use math operators to compare strings" if @env_db == 'mssql' # use GREATEST ?
if @env_db == 'postgresql' # may return real boolean
assert t(@neg, @name >= 'Mest') == true || t(@neg, @name >= 'Mest') == 't' # depends of ar version
assert t(@neg, @name <= (@name + 'Z')) == true || t(@neg, @name <= (@name + 'Z')) == 't'
else
assert_equal 1, t(@neg, @name >= 'Mest')
assert_equal 1, t(@neg, @name <= (@name + 'Z'))
end
end
def test_regexp_not_regexp
skip "Sqlite version can't load extension for regexp" if $sqlite && $load_extension_disabled
skip "SQL Server does not know about REGEXP without extensions" if @env_db == 'mssql'
assert_equal 1, User.where(@name =~ '^M').count
assert_equal 6, User.where(@name !~ '^L').count
assert_equal 1, User.where(@name =~ /^M/).count
assert_equal 6, User.where(@name !~ /^L/).count
end
def test_imatches
assert_equal 1, User.where(@name.imatches('m%')).count
assert_equal 4, User.where(@name.imatches_any(['L%', '%e'])).count
assert_equal 6, User.where(@name.idoes_not_match('L%')).count
end
def test_replace
assert_equal "LucaX", t(@lucas, @name.replace("s", "X"))
assert_equal "replace", t(@lucas, @name.replace(@name, "replace"))
end
def test_replace_once
skip "TODO"
# skip "Sqlite version can't load extension for locate" if $sqlite && $load_extension_disabled
assert_equal "LuCas", t(@lucas, @name.substring(1, @name.locate('c') - 1) + 'C' + @name.substring(@name.locate('c') + 1, @name.length))
end
def test_soundex
skip "Sqlite version can't load extension for soundex" if $sqlite && $load_extension_disabled
skip "PostgreSql version can't load extension for soundex" if @env_db == 'postgresql'
assert_equal "C540", t(@camille, @name.soundex)
assert_equal 8, User.where(@name.soundex.eq(@name.soundex)).count
end
def test_change_case
assert_equal "myung", t(@myung, @name.downcase)
assert_equal "MYUNG", t(@myung, @name.upcase)
assert_equal "myung", t(@myung, @name.upcase.downcase)
end
def test_trim
assert_equal "Myung", t(@myung, @name.trim)
assert_equal "Myung", t(@myung, @name.trim.ltrim.rtrim)
assert_equal "Myun", t(@myung, @name.rtrim("g"))
assert_equal "yung", t(@myung, @name.ltrim("M"))
assert_equal "yung", t(@myung, (@name + "M").trim("M"))
skip "Oracle does not accept multi char trim" if @env_db == 'oracle'
assert_equal "", t(@myung, @name.rtrim(@name))
end
def test_blank
if @env_db == 'postgresql'
assert_includes [false, 'f'], t(@myung, @name.blank) # depends of adapter
assert_includes [true, 't'], t(@myung, @name.not_blank) # depends of adapter
assert_includes [true, 't'], t(@myung, @comments.blank)
assert_includes [false, 'f'], t(@myung, @comments.not_blank)
end
assert_equal 0, @myung.where(@name.blank).count
assert_equal 1, @myung.where(@name.not_blank).count
assert_equal 1, @myung.where(@comments.blank).count
assert_equal 0, @neg.where(@comments.blank).count
assert_equal 1, @neg.where(@comments.not_blank).count
assert_equal 0, @myung.where(@comments.not_blank).count
assert_equal 'false', t(@myung, @name.blank.then('true', 'false'))
assert_equal 'true', t(@myung, @name.not_blank.then('true', 'false'))
assert_equal 'true', t(@myung, @comments.blank.then('true', 'false'))
assert_equal 'false', t(@myung, @comments.not_blank.then('true', 'false'))
assert_equal 'false', t(@neg, @comments.blank.then('true', 'false'))
assert_equal 'true', t(@neg, @comments.not_blank.then('true', 'false'))
end
def test_format
assert_equal '2016-05-23', t(@lucas, @created_at.format('%Y-%m-%d'))
skip "SQL Server does not accept any format" if @env_db == 'mssql'
assert_equal '2014/03/03 12:42:00', t(@lucas, @updated_at.format('%Y/%m/%d %H:%M:%S'))
end
def test_coalesce
assert_equal 'Camille concat', t(@camille, @name.coalesce(nil, "default") + ' concat')
assert_equal ' ', t(@myung, @comments.coalesce("Myung").coalesce('ignored'))
assert_equal 'Laure', t(@laure, @comments.coalesce("Laure"))
if @env_db == 'oracle'
assert_nil t(@laure, @comments.coalesce(""))
else
assert_equal('', t(@laure, @comments.coalesce("")))
end
if @env_db == 'postgresql'
assert_equal 100, t(@test, @age.coalesce(100))
assert_equal "Camille", t(@camille, @name.coalesce(nil, "default"))
assert_equal 20, t(@test, @age.coalesce(nil, 20))
else
assert_equal "Camille", t(@camille, @name.coalesce(nil, '20'))
assert_equal 20, t(@test, @age.coalesce(nil, 20))
end
end
# Comparators
def test_number_comparator
assert_equal 2, User.where(@age < 6).count
assert_equal 2, User.where(@age <= 10).count
assert_equal 3, User.where(@age > 20).count
assert_equal 4, User.where(@age >= 20).count
assert_equal 1, User.where(@age > 5).where(@age < 20).count
end
def test_date_comparator
d = Date.new(2016, 5, 23)
assert_equal 0, User.where(@created_at < d).count
assert_equal 8, User.where(@created_at >= d).count
end
def test_date_duration
#Year
assert_equal 2016, t(@lucas, @created_at.year).to_i
assert_equal 0, User.where(@created_at.year.eq("2012")).count
#Month
assert_equal 5, t(@camille, @created_at.month).to_i
assert_equal 8, User.where(@created_at.month.eq("05")).count
#Week
assert_equal(@env_db == 'mssql' ? 22 : 21, t(@arthur, @created_at.week).to_i)
assert_equal 8, User.where(@created_at.month.eq("05")).count
#Day
assert_equal 23, t(@laure, @created_at.day).to_i
assert_equal 0, User.where(@created_at.day.eq("05")).count
skip "manage DATE" if @env_db == 'oracle'
#Hour
assert_equal 0, t(@laure, @created_at.hour).to_i
assert_equal 12, t(@lucas, @updated_at.hour).to_i
#Minute
assert_equal 0, t(@laure, @created_at.minute).to_i
assert_equal 42, t(@lucas, @updated_at.minute).to_i
#Second
assert_equal 0, t(@laure, @created_at.second).to_i
assert_equal 0, t(@lucas, @updated_at.second).to_i
end
def test_datetime_diff
assert_equal 0, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 12, 42)).to_i
if @env_db == 'oracle' && Arel::VERSION.to_i > 6 # in rails 5, result is multiplied by 24*60*60 = 86400...
assert_equal 42 * 86400, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 12, 41, 18)).to_i
assert_equal(-3600 * 86400, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 13, 42)).to_i)
else
assert_equal 42, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 12, 41, 18)).to_i
assert_equal(-3600, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 13, 42)).to_i)
if @env_db == 'mssql' || @env_db == 'oracle' # can't select booleans
assert_equal 0, @lucas.where((@updated_at - Time.utc(2014, 3, 3, 12, 41, 18)) < -1).count
else
assert_includes [nil, 0, 'f', false], t(@lucas, (@updated_at - Time.utc(2014, 3, 3, 12, 41, 18)) < -1)
end
if @env_db == 'mysql'
date1 = Date.new(2016, 5, 23)
durPos = 10.years
durNeg = -10.years
date2 = date1 + durPos
date3 = date1 - durPos
# Pull Request #5 tests
assert_includes [date2,"2026-05-23"], t(@test,(@created_at + durPos))
assert_includes [date3,"2006-05-23"], t(@test,(@created_at + durNeg))
# we test with the ruby object or the string because some adapters don't return an object Date
end
end
end
# TODO; cast types
def test_cast_types
skip "not implemented yet"
assert_equal true, t(@arthur, @score =~ /22/)
end
def test_is_null
assert_equal "Test", User.where(@age.is_null).select(@name).first.name
end
def test_math_plus
d = Date.new(1997, 6, 15)
#Concat String
assert_equal "SophiePhan", t(@sophie, @name + "Phan")
assert_equal "Sophie2", t(@sophie, @name + 2)
assert_equal "Sophie1997-06-15", t(@sophie, @name + d)
assert_equal "Sophie15", t(@sophie, @name + @age)
assert_equal "SophieSophie", t(@sophie, @name + @name)
#FIXME: should work as expected in Oracle
assert_equal "Sophie2016-05-23", t(@sophie, @name + @created_at) unless @env_db == 'oracle'
#concat Integer
assert_equal 1, User.where((@age + 10).eq(33)).count
assert_equal 1, User.where((@age + "1").eq(6)).count
assert_equal 1, User.where((@age + @age).eq(10)).count
#concat Date
# puts((User.arel_table[:created_at] + 1).as("res").to_sql.inspect)
assert_equal "2016-05-24", t(@myung, @created_at + 1).to_date.to_s
assert_equal "2016-05-25", t(@myung, @created_at + 2.day).to_date.to_s
end
def test_math_minus
d = Date.new(2016, 5, 20)
#Datediff
assert_equal 8, User.where((@created_at - @created_at).eq(0)).count
assert_equal 3, @laure.select((@created_at - d).as("res")).first.res.abs.to_i
#Substraction
assert_equal 0, User.where((@age - 10).eq(50)).count
assert_equal 0, User.where((@age - "10").eq(50)).count
# assert_equal 0, User.where((@age - 9.5).eq(50.5)).count # should work: TODO
assert_equal 0, User.where((@age - "9.5").eq(50.5)).count
end
def test_wday
d = Date.new(2016, 6, 26)
assert_equal(@env_db == 'oracle' || @env_db == 'mssql' ? 2 : 1, t(@myung, @created_at.wday).to_i) # monday
assert_equal 0, User.select(d.wday).as("res").first.to_i
end
# Boolean functions
def test_boolean_functions
assert_equal 1, @laure.where(
(@score.round > 19).โ(@score.round < 21).โ(@score.round(1) >= 20.1)
).count
end
# Union operator
def test_union_operator
assert_equal 3, User.find_by_sql((@ut.project(@age).where(@age.gt(22)) + @ut.project(@age).where(@age.lt(0))).to_sql).length
assert_equal 2, User.find_by_sql((@ut.project(@age).where(@age.eq(20)) + @ut.project(@age).where(@age.eq(20)) + @ut.project(@age).where(@age.eq(21))).to_sql).length
assert_equal 3, User.select('*').from((@ut.project(@age).where(@age.gt(22)) + @ut.project(@age).where(@age.lt(0))).as('my_union')).length
assert_equal 3, User.select('*').from((@ut.project(@age).where(@age.eq(20)) + @ut.project(@age).where(@age.eq(23)) + @ut.project(@age).where(@age.eq(21))).as('my_union')).length
assert_equal 2, User.select('*').from((@ut.project(@age).where(@age.eq(20)) + @ut.project(@age).where(@age.eq(20)) + @ut.project(@age).where(@age.eq(21))).as('my_union')).length
assert_equal 3, User.find_by_sql((@ut.project(@age).where(@age.gt(22)).union_all(@ut.project(@age).where(@age.lt(0)))).to_sql).length
assert_equal 3, User.find_by_sql((@ut.project(@age).where(@age.eq(20)).union_all(@ut.project(@age).where(@age.eq(20))).union_all(@ut.project(@age).where(@age.eq(21)))).to_sql).length
assert_equal 3, User.select('*').from((@ut.project(@age).where(@age.gt(22)).union_all(@ut.project(@age).where(@age.lt(0)))).as('my_union')).length
assert_equal 3, User.select('*').from((@ut.project(@age).where(@age.eq(20)).union_all(@ut.project(@age).where(@age.eq(23))).union_all(@ut.project(@age).where(@age.eq(21)))).as('my_union')).length
assert_equal 3, User.select('*').from((@ut.project(@age).where(@age.eq(20)).union_all(@ut.project(@age).where(@age.eq(20))).union_all(@ut.project(@age).where(@age.eq(21)))).as('my_union')).length
end
# Case clause
def test_case
puts @ut.project(@score.when(20.16).then(1).else(0).sum.as('score_sum')).to_sql
assert_equal 4, User.find_by_sql(@ut.project(@score.when(20.16).then(1).else(0).as('score_sum')).to_sql).sum(&:score_sum)
end
end
end
end
adding case clause
require 'helper'
require 'date'
module ArelExtensions
module WthAr
class ListTest < Minitest::Test
require 'minitest/pride'
def connect_db
ActiveRecord::Base.configurations = YAML.load_file('test/database.yml')
if ENV['DB'] == 'oracle' && ((defined?(RUBY_ENGINE) && RUBY_ENGINE == "rbx") || (RUBY_PLATFORM == 'java')) # not supported
@env_db = (RUBY_PLATFORM == 'java' ? "jdbc-sqlite" : 'sqlite')
skip "Platform not supported"
else
@env_db = ENV['DB']
end
ActiveRecord::Base.establish_connection(@env_db.try(:to_sym) || (RUBY_PLATFORM == 'java' ? :"jdbc-sqlite" : :sqlite))
ActiveRecord::Base.default_timezone = :utc
@cnx = ActiveRecord::Base.connection
$sqlite = @cnx.adapter_name =~ /sqlite/i
$load_extension_disabled ||= false
csf = CommonSqlFunctions.new(@cnx)
csf.add_sql_functions(@env_db)
end
def setup_db
@cnx.drop_table(:user_tests) rescue nil
@cnx.create_table :user_tests do |t|
t.column :age, :integer
t.column :name, :string
t.column :comments, :text
t.column :created_at, :date
t.column :updated_at, :datetime
t.column :score, :decimal, :precision => 20, :scale => 10
end
@cnx.drop_table(:product_tests) rescue nil
@cnx.create_table :product_tests do |t|
t.column :price, :decimal, :precision => 20, :scale => 10
end
end
class User < ActiveRecord::Base
self.table_name = 'user_tests'
end
class Product < ActiveRecord::Base
self.table_name = 'product_tests'
end
def setup
d = Date.new(2016, 5, 23)
connect_db
setup_db
u = User.create :age => 5, :name => "Lucas", :created_at => d, :score => 20.16, :updated_at => Time.utc(2014, 3, 3, 12, 42, 0)
@lucas = User.where(:id => u.id)
u = User.create :age => 15, :name => "Sophie", :created_at => d, :score => 20.16
@sophie = User.where(:id => u.id)
u = User.create :age => 20, :name => "Camille", :created_at => d, :score => -20.16
@camille = User.where(:id => u.id)
u = User.create :age => 21, :name => "Arthur", :created_at => d, :score => 65.62
@arthur = User.where(:id => u.id)
u = User.create :age => 23, :name => "Myung", :created_at => d, :score => 20.16, :comments => ' '
@myung = User.where(:id => u.id)
u = User.create :age => 25, :name => "Laure", :created_at => d, :score => 20.16
@laure = User.where(:id => u.id)
u = User.create :age => nil, :name => "Test", :created_at => d, :score => 1.62
@test = User.where(:id => u.id)
u = User.create :age => -42, :name => "Negatif", :comments => '1,22,3,42,2', :created_at => d, :updated_at => d.to_time, :score => 0.17
@neg = User.where(:id => u.id)
@age = User.arel_table[:age]
@name = User.arel_table[:name]
@score = User.arel_table[:score]
@created_at = User.arel_table[:created_at]
@updated_at = User.arel_table[:updated_at]
@comments = User.arel_table[:comments]
@price = Product.arel_table[:price]
@not_in_table = User.arel_table[:not_in_table]
@ut = User.arel_table
@pt = Product.arel_table
end
def teardown
@cnx.drop_table(:user_tests)
@cnx.drop_table(:product_tests)
end
def t(scope, node)
scope.select(node.as('res')).first.res
end
# Math Functions
def test_classical_arel
assert_in_epsilon 42.16, t(@laure, @score + 22), 0.01
end
def test_abs
assert_equal 42, t(@neg, @age.abs)
assert_equal 20.16, t(@camille, @score.abs)
assert_equal 14, t(@laure, (@age - 39).abs)
assert_equal 28, t(@laure, (@age - 39).abs + (@age - 39).abs)
end
def test_ceil
# skip "Sqlite version can't load extension for ceil" if $sqlite && $load_extension_disabled
assert_equal 2, t(@test, @score.ceil) # 1.62
assert_equal(-20, t(@camille, @score.ceil)) # -20.16
assert_equal(-20, t(@camille, (@score - 0.5).ceil)) # -20.16
assert_equal 63, t(@arthur, @age.ceil + 42)
end
def test_floor
# skip "Sqlite version can't load extension for floor" if $sqlite && $load_extension_disabled
assert_equal 0, t(@neg, @score.floor)
assert_equal 1, t(@test, @score.floor) # 1.62
assert_equal(-9, t(@test, (@score - 10).floor)) # 1.62
assert_equal 42, t(@arthur, @score.floor - 23)
end
def test_rand
assert 42 != User.select(Arel.rand.as('res')).first.res
assert 0 <= User.select(Arel.rand.abs.as('res')).first.res
assert_equal 8, User.order(Arel.rand).limit(50).count
end
def test_round
assert_equal 1, User.where(@age.round(0).eq(5.0)).count
assert_equal 0, User.where(@age.round(-1).eq(6.0)).count
assert_equal 66, t(@arthur, @score.round)
assert_in_epsilon 67.6, t(@arthur, @score.round(1) + 2), 0.01
end
def test_sum
if @env_db == 'mssql'
skip "SQL Server forces order?" # TODO
assert_equal 68, User.select((@age.sum + 1).as("res"), User.arel_table[:id].sum).take(50).reorder(@age).first.res
assert_equal 134, User.reorder(nil).select((@age.sum + @age.sum).as("res"), User.arel_table[:id].sum).take(50).first.res
assert_equal 201, User.reorder(nil).select(((@age * 3).sum).as("res"), User.arel_table[:id].sum).take(50).first.res
assert_equal 4009, User.reorder(nil).select(((@age * @age).sum).as("res"), User.arel_table[:id].sum).take(50).first.res
else
assert_equal 68, User.select((@age.sum + 1).as("res")).take(50).first.res
assert_equal 134, User.select((@age.sum + @age.sum).as("res")).take(50).first.res
assert_equal 201, User.select(((@age * 3).sum).as("res")).take(50).first.res
assert_equal 4009, User.select(((@age * @age).sum).as("res")).take(50).first.res
end
end
# String Functions
def test_concat
assert_equal 'Camille Camille', t(@camille, @name + ' ' + @name)
assert_equal 'Laure 2', t(@laure, @name + ' ' + 2)
assert_equal 'Test Laure', t(@laure, Arel::Nodes.build_quoted('Test ') + @name)
skip "TODO: find a way... to do group_concat/listagg in SQL Server" if @env_db == 'mssql'
if @env_db == 'postgresql'
assert_equal "Lucas Sophie", t(User.reorder(nil).from(User.select(:name).where(:name => ['Lucas', 'Sophie']).reorder(:name).as('user_tests')), @name.group_concat(' '))
assert_equal "Lucas,Sophie", t(User.reorder(nil).from(User.select(:name).where(:name => ['Lucas', 'Sophie']).reorder(:name).as('user_tests')), @name.group_concat(','))
assert_equal "Lucas Sophie", t(User.reorder(nil).from(User.select(:name).where(:name => ['Lucas', 'Sophie']).reorder(:name).as('user_tests')), @name.group_concat)
else
assert_equal "Lucas Sophie", t(User.where(:name => ['Lucas', 'Sophie']).reorder(:name), @name.group_concat(' '))
assert_equal "Lucas,Sophie", t(User.where(:name => ['Lucas', 'Sophie']).reorder(:name), @name.group_concat(','))
if @env_db == 'oracle'
assert_equal "LucasSophie", t(User.where(:name => ['Lucas', 'Sophie']).reorder(:name), @name.group_concat)
else
assert_equal "Lucas,Sophie", t(User.where(:name => ['Lucas', 'Sophie']).reorder(:name), @name.group_concat)
end
end
end
def test_length
assert_equal 7, t(@camille, @name.length)
assert_equal 7, t(@camille, @name.length.round.abs)
assert_equal 42, t(@laure, @name.length + 37)
end
def test_locate
skip "Sqlite version can't load extension for locate" if $sqlite && $load_extension_disabled
assert_equal 1, t(@camille, @name.locate("C"))
assert_equal 0, t(@lucas, @name.locate("z"))
assert_equal 5, t(@lucas, @name.locate("s"))
end
def test_substring
assert_equal 'C', t(@camille, @name.substring(1, 1))
if @env_db == 'oracle'
assert_nil(t(@lucas, @name.substring(42)))
else
assert_equal('', t(@lucas, @name.substring(42)))
end
assert_equal 'Lu', t(@lucas, @name.substring(1,2))
assert_equal 'C', t(@camille, @name[0, 1])
assert_equal 'C', t(@camille, @name[0])
if @env_db == 'oracle'
assert_nil(t(@lucas, @name[42]))
else
assert_equal('', t(@lucas, @name[42]))
end
assert_equal 'Lu', t(@lucas, @name[0,2])
assert_equal 'Lu', t(@lucas, @name[0..1])
end
def test_find_in_set
skip "Sqlite version can't load extension for find_in_set" if $sqlite && $load_extension_disabled
skip "SQL Server does not know about FIND_IN_SET" if @env_db == 'mssql'
assert_equal 5, t(@neg, @comments & 2)
assert_equal 0, t(@neg, @comments & 6) # not found
end
def test_string_comparators
skip "Oracle can't use math operators to compare strings" if @env_db == 'oracle' # use GREATEST ?
skip "SQL Server can't use math operators to compare strings" if @env_db == 'mssql' # use GREATEST ?
if @env_db == 'postgresql' # may return real boolean
assert t(@neg, @name >= 'Mest') == true || t(@neg, @name >= 'Mest') == 't' # depends of ar version
assert t(@neg, @name <= (@name + 'Z')) == true || t(@neg, @name <= (@name + 'Z')) == 't'
else
assert_equal 1, t(@neg, @name >= 'Mest')
assert_equal 1, t(@neg, @name <= (@name + 'Z'))
end
end
def test_regexp_not_regexp
skip "Sqlite version can't load extension for regexp" if $sqlite && $load_extension_disabled
skip "SQL Server does not know about REGEXP without extensions" if @env_db == 'mssql'
assert_equal 1, User.where(@name =~ '^M').count
assert_equal 6, User.where(@name !~ '^L').count
assert_equal 1, User.where(@name =~ /^M/).count
assert_equal 6, User.where(@name !~ /^L/).count
end
def test_imatches
assert_equal 1, User.where(@name.imatches('m%')).count
assert_equal 4, User.where(@name.imatches_any(['L%', '%e'])).count
assert_equal 6, User.where(@name.idoes_not_match('L%')).count
end
def test_replace
assert_equal "LucaX", t(@lucas, @name.replace("s", "X"))
assert_equal "replace", t(@lucas, @name.replace(@name, "replace"))
end
def test_replace_once
skip "TODO"
# skip "Sqlite version can't load extension for locate" if $sqlite && $load_extension_disabled
assert_equal "LuCas", t(@lucas, @name.substring(1, @name.locate('c') - 1) + 'C' + @name.substring(@name.locate('c') + 1, @name.length))
end
def test_soundex
skip "Sqlite version can't load extension for soundex" if $sqlite && $load_extension_disabled
skip "PostgreSql version can't load extension for soundex" if @env_db == 'postgresql'
assert_equal "C540", t(@camille, @name.soundex)
assert_equal 8, User.where(@name.soundex.eq(@name.soundex)).count
end
def test_change_case
assert_equal "myung", t(@myung, @name.downcase)
assert_equal "MYUNG", t(@myung, @name.upcase)
assert_equal "myung", t(@myung, @name.upcase.downcase)
end
def test_trim
assert_equal "Myung", t(@myung, @name.trim)
assert_equal "Myung", t(@myung, @name.trim.ltrim.rtrim)
assert_equal "Myun", t(@myung, @name.rtrim("g"))
assert_equal "yung", t(@myung, @name.ltrim("M"))
assert_equal "yung", t(@myung, (@name + "M").trim("M"))
skip "Oracle does not accept multi char trim" if @env_db == 'oracle'
assert_equal "", t(@myung, @name.rtrim(@name))
end
def test_blank
if @env_db == 'postgresql'
assert_includes [false, 'f'], t(@myung, @name.blank) # depends of adapter
assert_includes [true, 't'], t(@myung, @name.not_blank) # depends of adapter
assert_includes [true, 't'], t(@myung, @comments.blank)
assert_includes [false, 'f'], t(@myung, @comments.not_blank)
end
assert_equal 0, @myung.where(@name.blank).count
assert_equal 1, @myung.where(@name.not_blank).count
assert_equal 1, @myung.where(@comments.blank).count
assert_equal 0, @neg.where(@comments.blank).count
assert_equal 1, @neg.where(@comments.not_blank).count
assert_equal 0, @myung.where(@comments.not_blank).count
assert_equal 'false', t(@myung, @name.blank.then('true', 'false'))
assert_equal 'true', t(@myung, @name.not_blank.then('true', 'false'))
assert_equal 'true', t(@myung, @comments.blank.then('true', 'false'))
assert_equal 'false', t(@myung, @comments.not_blank.then('true', 'false'))
assert_equal 'false', t(@neg, @comments.blank.then('true', 'false'))
assert_equal 'true', t(@neg, @comments.not_blank.then('true', 'false'))
end
def test_format
assert_equal '2016-05-23', t(@lucas, @created_at.format('%Y-%m-%d'))
skip "SQL Server does not accept any format" if @env_db == 'mssql'
assert_equal '2014/03/03 12:42:00', t(@lucas, @updated_at.format('%Y/%m/%d %H:%M:%S'))
end
def test_coalesce
assert_equal 'Camille concat', t(@camille, @name.coalesce(nil, "default") + ' concat')
assert_equal ' ', t(@myung, @comments.coalesce("Myung").coalesce('ignored'))
assert_equal 'Laure', t(@laure, @comments.coalesce("Laure"))
if @env_db == 'oracle'
assert_nil t(@laure, @comments.coalesce(""))
else
assert_equal('', t(@laure, @comments.coalesce("")))
end
if @env_db == 'postgresql'
assert_equal 100, t(@test, @age.coalesce(100))
assert_equal "Camille", t(@camille, @name.coalesce(nil, "default"))
assert_equal 20, t(@test, @age.coalesce(nil, 20))
else
assert_equal "Camille", t(@camille, @name.coalesce(nil, '20'))
assert_equal 20, t(@test, @age.coalesce(nil, 20))
end
end
# Comparators
def test_number_comparator
assert_equal 2, User.where(@age < 6).count
assert_equal 2, User.where(@age <= 10).count
assert_equal 3, User.where(@age > 20).count
assert_equal 4, User.where(@age >= 20).count
assert_equal 1, User.where(@age > 5).where(@age < 20).count
end
def test_date_comparator
d = Date.new(2016, 5, 23)
assert_equal 0, User.where(@created_at < d).count
assert_equal 8, User.where(@created_at >= d).count
end
def test_date_duration
#Year
assert_equal 2016, t(@lucas, @created_at.year).to_i
assert_equal 0, User.where(@created_at.year.eq("2012")).count
#Month
assert_equal 5, t(@camille, @created_at.month).to_i
assert_equal 8, User.where(@created_at.month.eq("05")).count
#Week
assert_equal(@env_db == 'mssql' ? 22 : 21, t(@arthur, @created_at.week).to_i)
assert_equal 8, User.where(@created_at.month.eq("05")).count
#Day
assert_equal 23, t(@laure, @created_at.day).to_i
assert_equal 0, User.where(@created_at.day.eq("05")).count
skip "manage DATE" if @env_db == 'oracle'
#Hour
assert_equal 0, t(@laure, @created_at.hour).to_i
assert_equal 12, t(@lucas, @updated_at.hour).to_i
#Minute
assert_equal 0, t(@laure, @created_at.minute).to_i
assert_equal 42, t(@lucas, @updated_at.minute).to_i
#Second
assert_equal 0, t(@laure, @created_at.second).to_i
assert_equal 0, t(@lucas, @updated_at.second).to_i
end
def test_datetime_diff
assert_equal 0, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 12, 42)).to_i
if @env_db == 'oracle' && Arel::VERSION.to_i > 6 # in rails 5, result is multiplied by 24*60*60 = 86400...
assert_equal 42 * 86400, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 12, 41, 18)).to_i
assert_equal(-3600 * 86400, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 13, 42)).to_i)
else
assert_equal 42, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 12, 41, 18)).to_i
assert_equal(-3600, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 13, 42)).to_i)
if @env_db == 'mssql' || @env_db == 'oracle' # can't select booleans
assert_equal 0, @lucas.where((@updated_at - Time.utc(2014, 3, 3, 12, 41, 18)) < -1).count
else
assert_includes [nil, 0, 'f', false], t(@lucas, (@updated_at - Time.utc(2014, 3, 3, 12, 41, 18)) < -1)
end
if @env_db == 'mysql'
date1 = Date.new(2016, 5, 23)
durPos = 10.years
durNeg = -10.years
date2 = date1 + durPos
date3 = date1 - durPos
# Pull Request #5 tests
assert_includes [date2,"2026-05-23"], t(@test,(@created_at + durPos))
assert_includes [date3,"2006-05-23"], t(@test,(@created_at + durNeg))
# we test with the ruby object or the string because some adapters don't return an object Date
end
end
end
# TODO; cast types
def test_cast_types
skip "not implemented yet"
assert_equal true, t(@arthur, @score =~ /22/)
end
def test_is_null
assert_equal "Test", User.where(@age.is_null).select(@name).first.name
end
def test_math_plus
d = Date.new(1997, 6, 15)
#Concat String
assert_equal "SophiePhan", t(@sophie, @name + "Phan")
assert_equal "Sophie2", t(@sophie, @name + 2)
assert_equal "Sophie1997-06-15", t(@sophie, @name + d)
assert_equal "Sophie15", t(@sophie, @name + @age)
assert_equal "SophieSophie", t(@sophie, @name + @name)
#FIXME: should work as expected in Oracle
assert_equal "Sophie2016-05-23", t(@sophie, @name + @created_at) unless @env_db == 'oracle'
#concat Integer
assert_equal 1, User.where((@age + 10).eq(33)).count
assert_equal 1, User.where((@age + "1").eq(6)).count
assert_equal 1, User.where((@age + @age).eq(10)).count
#concat Date
# puts((User.arel_table[:created_at] + 1).as("res").to_sql.inspect)
assert_equal "2016-05-24", t(@myung, @created_at + 1).to_date.to_s
assert_equal "2016-05-25", t(@myung, @created_at + 2.day).to_date.to_s
end
def test_math_minus
d = Date.new(2016, 5, 20)
#Datediff
assert_equal 8, User.where((@created_at - @created_at).eq(0)).count
assert_equal 3, @laure.select((@created_at - d).as("res")).first.res.abs.to_i
#Substraction
assert_equal 0, User.where((@age - 10).eq(50)).count
assert_equal 0, User.where((@age - "10").eq(50)).count
# assert_equal 0, User.where((@age - 9.5).eq(50.5)).count # should work: TODO
assert_equal 0, User.where((@age - "9.5").eq(50.5)).count
end
def test_wday
d = Date.new(2016, 6, 26)
assert_equal(@env_db == 'oracle' || @env_db == 'mssql' ? 2 : 1, t(@myung, @created_at.wday).to_i) # monday
assert_equal 0, User.select(d.wday).as("res").first.to_i
end
# Boolean functions
def test_boolean_functions
assert_equal 1, @laure.where(
(@score.round > 19).โ(@score.round < 21).โ(@score.round(1) >= 20.1)
).count
end
# Union operator
def test_union_operator
assert_equal 3, User.find_by_sql((@ut.project(@age).where(@age.gt(22)) + @ut.project(@age).where(@age.lt(0))).to_sql).length
assert_equal 2, User.find_by_sql((@ut.project(@age).where(@age.eq(20)) + @ut.project(@age).where(@age.eq(20)) + @ut.project(@age).where(@age.eq(21))).to_sql).length
assert_equal 3, User.select('*').from((@ut.project(@age).where(@age.gt(22)) + @ut.project(@age).where(@age.lt(0))).as('my_union')).length
assert_equal 3, User.select('*').from((@ut.project(@age).where(@age.eq(20)) + @ut.project(@age).where(@age.eq(23)) + @ut.project(@age).where(@age.eq(21))).as('my_union')).length
assert_equal 2, User.select('*').from((@ut.project(@age).where(@age.eq(20)) + @ut.project(@age).where(@age.eq(20)) + @ut.project(@age).where(@age.eq(21))).as('my_union')).length
assert_equal 3, User.find_by_sql((@ut.project(@age).where(@age.gt(22)).union_all(@ut.project(@age).where(@age.lt(0)))).to_sql).length
assert_equal 3, User.find_by_sql((@ut.project(@age).where(@age.eq(20)).union_all(@ut.project(@age).where(@age.eq(20))).union_all(@ut.project(@age).where(@age.eq(21)))).to_sql).length
assert_equal 3, User.select('*').from((@ut.project(@age).where(@age.gt(22)).union_all(@ut.project(@age).where(@age.lt(0)))).as('my_union')).length
assert_equal 3, User.select('*').from((@ut.project(@age).where(@age.eq(20)).union_all(@ut.project(@age).where(@age.eq(23))).union_all(@ut.project(@age).where(@age.eq(21)))).as('my_union')).length
assert_equal 3, User.select('*').from((@ut.project(@age).where(@age.eq(20)).union_all(@ut.project(@age).where(@age.eq(20))).union_all(@ut.project(@age).where(@age.eq(21)))).as('my_union')).length
end
# Case clause
def test_case
assert_equal 4, User.find_by_sql(@ut.project(@score.when(20.16).then(1).else(0).as('score_sum')).to_sql).sum(&:score_sum)
end
end
end
end
|
tail added
require 'file-tail'
def chef_parser(line)
unless line.nil?
unless line.match(/ERROR:(.*)/).to_s.nil?
matched = line.match(/ERROR:(.*)/).to_s
error = (matched.include? 'retry') ? matched.split(',')[0] : matched
puts error unless error.empty?
end
unless error.nil? || error.empty?
found = false
$chef.each do |m|
if m["error"] == error
found = true
m["times"] = 1
end
end
if !found
$chef << {
"error" => error,
"times" => 1
}
end
end
end
end
def log_handler(filename)
File.open(filename, 'r') do |log|
log.extend(File::Tail)
log.backward(1)
log.tail { |line| chef_parser(line) }
end
end
|
=begin
Copyright (c) 2007 Pattern Park
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
TODO: Investigate jruby support, especially:
http://livedocs.adobe.com/flex/201/html/wwhelp/wwhimpl/common/html/wwhelp.htm?context=LiveDocs_Book_Parts&file=compilers_123_09.html
=end
module Sprout
class MXMLCError < StandardError #:nodoc:
end
class ExecutionError < StandardError #:nodoc:
end
# The MXMLC task provides a rake front end to the Flex MXMLC command line compiler.
# This task is integrated with the LibraryTask so that if any dependencies are
# library tasks, they will be automatically added to the library_path or source_path
# depending on whether they provide a swc or sources.
#
# The entire MXMLC advanced interface has been provided here. All parameter names should be
# identical to what is available on the regular compiler except dashes have been replaced
# by underscores.
#
# The following example can be pasted in a file named 'rakefile.rb' which should be placed in
# the same folder as an ActionScript 3.0 class named 'SomeProject.as' that extends
# flash.display.Sprite.
#
# # Create a remote library dependency on the corelib swc.
# library :corelib
#
# # Alias the compilation task with one that is easier to type
# task :compile => 'SomeProject.swf'
#
# # Create an MXMLCTask named for the output file that it creates. This task depends on the
# # corelib library and will automatically add the corelib.swc to it's library_path
# mxmlc 'SomeProject.swf' => :corelib do |t|
# t.input = 'SomeProject.as'
# t.default_size = '800 600'
# t.default_background_color = "#FFFFFF"
# end
#
# Note: Be sure to check out the features of the ToolTask to learn more about gem_version and preprocessor
#
class MXMLCTask < ToolTask
# Use a running instance of the FCSH command shell to speed up compilation.
# You need to run 'rake fcsh:start' in another terminal before turning on
# this flag and compiling.
attr_accessor :use_fcsh
# Interface and descriptions found here:
# http://livedocs.adobe.com/flex/2/docs/wwhelp/wwhimpl/common/html/wwhelp.htm?context=LiveDocs_Parts&file=00001481.html
def initialize_task
@default_gem_name = 'sprout-flex3sdk-tool'
@default_gem_path = 'bin/mxmlc'
add_param(:accessible, :boolean) do |p|
p.hidden_value = true
p.description = "Enables accessibility features when compiling the Flex application or SWC file. The default value is false."
end
add_param(:actionscript_file_encoding, :string) do |p|
p.description = "Sets the file encoding for ActionScript files. For more information see: http://livedocs.adobe.com/flex/2/docs/00001503.html#149244"
end
add_param(:allow_source_path_overlap, :boolean) do |p|
p.hidden_value = true
p.description = "Checks if a source-path entry is a subdirectory of another source-path entry. It helps make the package names of MXML components unambiguous. This is an advanced option."
end
# multiline strings won't work in stupid aptana!
add_param(:as3, :boolean) do |p|
p.value = true
p.show_on_false = true
p.description =<<EOF
Use the ActionScript 3.0 class-based object model for greater performance and better error reporting. In the class-based object model, most built-in functions are implemented as fixed methods of classes.
The default value is true. If you set this value to false, you must set the es option to true.
This is an advanced option.
EOF
end
add_param(:benchmark, :boolean) do |p|
p.value = true
p.show_on_false = true
p.description = "Prints detailed compile times to the standard output. The default value is true."
end
add_param(:context_root, :path) do |p|
p.description =<<EOF
Sets the value of the {context.root} token in channel definitions in the flex-services.xml file. If you do not specify the value of this option, Flex uses an empty string.
For more information on using the {context.root} token, see http://livedocs.adobe.com/flex/2/docs/00001446.html#205687.
EOF
end
add_param(:contributor, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)."
end
add_param(:creator, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files. (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)"
end
add_param(:date, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files. (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)"
end
add_param(:debug, :boolean) do |p|
p.hidden_value = true
p.description =<<EOF
Generates a debug SWF file. This file includes line numbers and filenames of all the source files. When a run-time error occurs, the stacktrace shows these line numbers and filenames. This information is also used by the command-line debugger and the Flex Builder debugger. Enabling the debug option generates larger SWF files.
For the mxmlc compiler, the default value is false. For the compc compiler, the default value is true.
For SWC files generated with the compc compiler, set this value to true, unless the target SWC file is an RSL. In that case, set the debug option to false.
For information about the command-line debugger, see Using the Command-Line Debugger (http://livedocs.adobe.com/flex/2/docs/00001540.html#181846).
Flex also uses the verbose-stacktraces setting to determine whether line numbers are added to the stacktrace.
EOF
end
add_param(:debug_password, :string) do |p|
p.description = "Lets you engage in remote debugging sessions with the Flash IDE. This is an advanced option."
end
add_param(:default_background_color, :string) do |p|
p.description =<<EOF
Sets the application's background color. You use the 0x notation to set the color, as the following example shows:
-default-background-color=0xCCCCFF
The default value is null. The default background of a Flex application is an image of a gray gradient. You must override this image for the value of the default-background-color option to be visible. For more information, see Editing application settings (http://livedocs.adobe.com/flex/2/docs/00001504.html#138781).
This is an advanced option.
EOF
end
add_param(:default_frame_rate, :number) do |p|
p.description = "Sets the application's frame rate. The default value is 24. This is an advanced option."
end
add_param(:default_script_limits, :string) do |p|
p.description =<<EOF
Defines the application's script execution limits.
The max-recursion-depth value specifies the maximum depth of Adobe Flash Player call stack before Flash Player stops. This is essentially the stack overflow limit. The default value is 1000.
The max-execution-time value specifies the maximum duration, in seconds, that an ActionScript event handler can execute before Flash Player assumes that it is hung, and aborts it. The default value is 60 seconds. You cannot set this value above 60 seconds.
Example:
# 900 is new max-recursion-depth
# 20 is new max-execution-time
t.default_script_limits = '900 20'
You can override these settings in the application.
This is an advanced option.
EOF
end
add_param(:default_size, :string) do |p|
p.delimiter = ' '
p.description = "Defines the default application size, in pixels for example: default_size = '950 550'. This is an advanced option."
end
add_param(:default_css_url, :url) do |p|
p.description =<<EOF
Defines the location of the default style sheet. Setting this option overrides the implicit use of the defaults.css style sheet in the framework.swc file.
For more information on the defaults.css file, see Using Styles and Themes (http://livedocs.adobe.com/flex/2/docs/00000751.html#241755) in Flex 2 Developer's Guide.
This is an advanced option.
EOF
end
add_param(:define, :string) do |p|
p.description =<<EOF
Define a global AS3 conditional compilation definition, e.g. -define=CONFIG::debugging,true or -define+=CONFIG::debugging,true (to append to existing definitions in flex-config.xml) (advanced, repeatable)
EOF
end
add_param(:description, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)."
end
add_param(:dump_config, :file) do |p|
p.description =<<EOF
Outputs the compiler options in the flex-config.xml file to the target path; for example:
mxmlc -dump-config myapp-config.xml
This is an advanced option.
EOF
end
# TODO: Add parameter interactions to ensure
# valid state?
add_param(:es, :boolean) do |p|
p.description =<<EOF
Use the ECMAScript edition 3 prototype-based object model to allow dynamic overriding of prototype properties. In the prototype-based object model, built-in functions are implemented as dynamic properties of prototype objects.
You can set the strict option to true when you use this model, but it might result in compiler errors for references to dynamic properties.
The default value is false. If you set this option to true, you must set the es3 option to false.
This is an advanced option.
EOF
end
add_param(:externs, :symbols) do |p|
p.description =<<EOF
Sets a list of symbols to exclude from linking when compiling a SWF file.
This option provides compile-time link checking for external references that are dynamically linked.
For more information about dynamic linking, see About linking (http://livedocs.adobe.com/flex/2/docs/00001521.html#205852).
This is an advanced option.
EOF
end
add_param(:external_library_path, :files) do |p|
p.description =<<EOF
Specifies a list of SWC files or directories to exclude from linking when compiling a SWF file. This option provides compile-time link checking for external components that are dynamically linked.
For more information about dynamic linking, see About linking (http://livedocs.adobe.com/flex/2/docs/00001521.html#205852).
You can use the += operator to append the new SWC file to the list of external libraries.
This parameter has been aliased as +el+.
EOF
end
add_param_alias(:el, :external_library_path)
add_param(:file_specs, :files) do |p|
p.description = "Specifies source files to compile. This is the default option for the mxmlc compiler."
end
add_param(:fonts_languages_language_range, :string) do |p|
p.shell_name = '-compiler.fonts.languages.language-range'
p.description =<<EOF
Specifies the range of Unicode settings for that language. For more information, see Using Styles and Themes (http://livedocs.adobe.com/flex/2/docs/00000751.html#241755) in Flex 2 Developer's Guide.
This is an advanced option.
EOF
end
add_param(:fonts_managers, :symbols) do |p|
p.description =<<EOF
Defines the font manager. The default is flash.fonts.JREFontManager. You can also use the flash.fonts.BatikFontManager. For more information, see Using Styles and Themes in Flex 2 Developer's Guide (http://livedocs.adobe.com/flex/2/docs/00000751.html#241755).
This is an advanced option.
EOF
end
add_param(:fonts_max_cached_fonts, :number) do |p|
p.description =<<EOF
Sets the maximum number of fonts to keep in the server cache. For more information, see Caching fonts and glyphs (http://livedocs.adobe.com/flex/2/docs/00001469.html#208457).
This is an advanced option.
EOF
end
add_param(:fonts_max_glyphs_per_face, :number) do |p|
p.description =<<EOF
Sets the maximum number of character glyph-outlines to keep in the server cache for each font face. For more information, see Caching fonts and glyphs (http://livedocs.adobe.com/flex/2/docs/00001469.html#208457).
This is an advanced option.
EOF
end
add_param(:frames_frame, :string) do |p|
p.shell_name = '-frames.frame'
p.description =<<EOF
Specifies a SWF file frame label with a sequence of class names that are linked onto the frame.
For example: frames_frame = 'someLabel MyProject OtherProject FoodProject'
This is an advanced option.
EOF
end
add_param(:generate_frame_loader, :boolean) do |p|
p.description =<<EOF
Toggles the generation of an IFlexBootstrap-derived loader class.
This is an advanced option.
EOF
end
add_param(:headless_server, :boolean) do |p|
p.description =<<EOF
Enables the headless implementation of the Flex compiler. This sets the following:
System.setProperty('java.awt.headless', 'true')
The headless setting (java.awt.headless=true) is required to use fonts and SVG on UNIX systems without X Windows.
This is an advanced option.
EOF
end
add_param(:include_libraries, :files) do |p|
p.description =<<EOF
Links all classes inside a SWC file to the resulting application SWF file, regardless of whether or not they are used.
Contrast this option with the library-path option that includes only those classes that are referenced at compile time.
To link one or more classes whether or not they are used and not an entire SWC file, use the includes option.
This option is commonly used to specify resource bundles.
EOF
end
add_param(:includes, :symbols) do |p|
p.description =<<EOF
Links one or more classes to the resulting application SWF file, whether or not those classes are required at compile time.
To link an entire SWC file rather than individual classes, use the include-libraries option.
EOF
end
add_param(:incremental, :boolean) do |p|
p.description =<<EOF
Enables incremental compilation. For more information, see About incremental compilation (http://livedocs.adobe.com/flex/2/docs/00001506.html#153980).
This option is true by default for the Flex Builder application compiler. For the command-line compiler, the default is false. The web-tier compiler does not support incremental compilation.
EOF
end
add_param(:keep_generated_actionscript, :boolean) do |p|
p.description =<<EOF
Determines whether to keep the generated ActionScript class files.
The generated class files include stubs and classes that are generated by the compiler and used to build the SWF file.
The default location of the files is the /generated subdirectory, which is directly below the target MXML file. If the /generated directory does not exist, the compiler creates one.
The default names of the primary generated class files are filename-generated.as and filename-interface.as.
The default value is false.\nThis is an advanced option.
EOF
end
add_param(:language, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)."
end
add_param(:lazy_init, :boolean) do |p|
p.description =<<EOF
Enables ABC bytecode lazy initialization.
The default value is false.
This is an advanced option.
EOF
end
add_param(:license, :string) do |p|
p.description =<<EOF
<product> <serial-number>
Specifies a product and a serial number. (repeatable)
This is an advanced option.
EOF
end
add_param(:library_path, :files) do |p|
p.description =<<EOF
Links SWC files to the resulting application SWF file. The compiler only links in those classes for the SWC file that are required.
The default value of the library-path option includes all SWC files in the libs directory and the current locale. These are required.
To point to individual classes or packages rather than entire SWC files, use the source-path option.
If you set the value of the library-path as an option of the command-line compiler, you must also explicitly add the framework.swc and locale SWC files. Your new entry is not appended to the library-path but replaces it.
You can use the += operator to append the new argument to the list of existing SWC files.
In a configuration file, you can set the append attribute of the library-path tag to true to indicate that the values should be appended to the library path rather than replace it.
EOF
end
add_param_alias(:l, :library_path)
add_param(:link_report, :file) do |p|
p.description =<<EOF
Prints linking information to the specified output file. This file is an XML file that contains <def>, <pre>, and <ext> symbols showing linker dependencies in the final SWF file.
The file format output by this command can be used to write a file for input to the load-externs option.
For more information on the report, see Examining linker dependencies (http://livedocs.adobe.com/flex/2/docs/00001394.html#211202).
This is an advanced option.
EOF
end
add_param(:load_config, :file) do |p|
p.description =<<EOF
Specifies the location of the configuration file that defines compiler options.
If you specify a configuration file, you can override individual options by setting them on the command line.
All relative paths in the configuration file are relative to the location of the configuration file itself.
Use the += operator to chain this configuration file to other configuration files.
For more information on using configuration files to provide options to the command-line compilers, see About configuration files (http://livedocs.adobe.com/flex/2/docs/00001490.html#138195).
EOF
end
add_param(:load_externs, :file) do |p|
p.description =<<EOF
Specifies the location of an XML file that contains <def>, <pre>, and <ext> symbols to omit from linking when compiling a SWF file. The XML file uses the same syntax as the one produced by the link-report option. For more information on the report, see Examining linker dependencies (http://livedocs.adobe.com/flex/2/docs/00001394.html#211202).
This option provides compile-time link checking for external components that are dynamically linked.
For more information about dynamic linking, see About linking (http://livedocs.adobe.com/flex/2/docs/00001521.html#205852).
This is an advanced option.
EOF
end
add_param(:locale, :string) do |p|
p.description =<<EOF
Specifies the locale that should be packaged in the SWF file (for example, en_EN). You run the mxmlc compiler multiple times to create SWF files for more than one locale, with only the locale option changing.
You must also include the parent directory of the individual locale directories, plus the token {locale}, in the source-path; for example:
mxmlc -locale en_EN -source-path locale/{locale} -file-specs MainApp.mxml
For more information, see Localizing Flex Applicationsin (http://livedocs.adobe.com/flex/2/docs/00000898.html#129288) Flex 2 Developer's Guide.
EOF
end
add_param(:localized_description, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)."
end
add_param(:localized_title, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)."
end
add_param(:namespaces_namespace, :string) do |p|
p.description =<<EOF
Specifies a namespace for the MXML file. You must include a URI and the location of the manifest file that defines the contents of this namespace. This path is relative to the MXML file.
For more information about manifest files, see About manifest files (http://livedocs.adobe.com/flex/2/docs/00001519.html#134676).
EOF
end
add_param(:optimize, :boolean) do |p|
p.description =<<EOF
Enables the ActionScript optimizer. This optimizer reduces file size and increases performance by optimizing the SWF file's bytecode.
The default value is false.
EOF
end
add_param(:output, :file) do |p|
p.description =<<EOF
Specifies the output path and filename for the resulting file. If you omit this option, the compiler saves the SWF file to the directory where the target file is located.
The default SWF filename matches the target filename, but with a SWF file extension.
If you use a relative path to define the filename, it is always relative to the current working directory, not the target MXML application root.
The compiler creates extra directories based on the specified filename if those directories are not present.
When using this option with the component compiler, the output is a SWC file rather than a SWF file.
EOF
end
add_param(:publisher, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)."
end
add_param(:raw_metadata, :string) do |p|
p.description =<<EOF
XML text to store in the SWF metadata (overrides metadata.* configuration) (advanced)
EOF
end
add_param(:resource_bundle_list, :file) do |p|
p.description =<<EOF
Prints a list of resource bundles to input to the compc compiler to create a resource bundle SWC file. The filename argument is the name of the file that contains the list of bundles.
For more information, see Localizing Flex Applications (http://livedocs.adobe.com/flex/2/docs/00000898.html#129288) in Flex 2 Developer's Guide.
EOF
end
add_param(:runtime_shared_libraries, :urls) do |p|
p.description =<<EOF
Specifies a list of run-time shared libraries (RSLs) to use for this application. RSLs are dynamically-linked at run time.
You specify the location of the SWF file relative to the deployment location of the application. For example, if you store a file named library.swf file in the web_root/libraries directory on the web server, and the application in the web root, you specify libraries/library.swf.
For more information about RSLs, see Using Runtime Shared Libraries. (http://livedocs.adobe.com/flex/2/docs/00001520.html#168690)
EOF
end
add_param_alias(:rsl, :runtime_shared_libraries)
add_param(:runtime_shared_library_path, :string) do |p|
p.description =<<EOF
[path-element] [rsl-url] [policy-file-url] [rsl-url] [policy-file-url]
alias -rslp
(repeatable)
EOF
end
add_param_alias(:rslp, :runtime_shared_library_path)
add_param(:services, :file) do |p|
p.description = "Specifies the location of the services-config.xml file. This file is used by Flex Data Services."
end
add_param(:show_actionscript_warnings, :boolean) do |p|
p.value = true
p.show_on_false = true
p.description =<<EOF
Shows warnings for ActionScript classes.
The default value is true.
For more information about viewing warnings and errors, see Viewing warnings and errors (http://livedocs.adobe.com/flex/2/docs/00001517.html#182413).
EOF
end
add_param(:show_binding_warnings, :boolean) do |p|
p.value = true
p.show_on_false = true
p.description =<<EOF
Shows a warning when Flash Player cannot detect changes to a bound property.
The default value is true.
For more information about compiler warnings, see Using SWC files (http://livedocs.adobe.com/flex/2/docs/00001505.html#158337).
EOF
end
add_param(:show_deprecation_warnings, :boolean) do |p|
p.value = true
p.show_on_false = true
p.description =<<EOF
Shows deprecation warnings for Flex components. To see warnings for ActionScript classes, use the show-actionscript-warnings option.
The default value is true.
For more information about viewing warnings and errors, see Viewing warnings and errors.
EOF
end
add_param(:source_path, :paths) do |p|
p.preprocessable = true
p.description =<<EOF
Adds directories or files to the source path. The Flex compiler searches directories in the source path for MXML or AS source files that are used in your Flex applications and includes those that are required at compile time.
You can use wildcards to include all files and subdirectories of a directory.
To link an entire library SWC file and not individual classes or directories, use the library-path option.
The source path is also used as the search path for the component compiler's include-classes and include-resource-bundles options.
You can also use the += operator to append the new argument to the list of existing source path entries.
EOF
end
add_param_alias(:sp, :source_path)
add_param(:static_link_runtime_shared_libraries, :boolean) do |p|
p.description =<<EOF
Statically link the libraries specified by the -runtime-shared-libraries-path option.
alias -static-rsls
EOF
end
add_param_alias(:static_rsls, :static_link_runtime_shared_libraries)
add_param(:strict, :boolean) do |p|
p.value = true
p.show_on_false = true
p.description =<<EOF
Prints undefined property and function calls; also performs compile-time type checking on assignments and options supplied to method calls.
The default value is true.
For more information about viewing warnings and errors, see Viewing warnings and errors (http://livedocs.adobe.com/flex/2/docs/00001517.html#182413).
EOF
end
add_param(:target_player, :string) do |p|
p.description =<<EOF
Specifies the version of the player the application is targeting.
Features requiring a later version will not be compiled into the application. The minimum value supported is "9.0.0".
EOF
end
add_param(:theme, :files) do |p|
p.description =<<EOF
Specifies a list of theme files to use with this application. Theme files can be SWC files with CSS files inside them or CSS files.
For information on compiling a SWC theme file, see Using Styles and Themes (http://livedocs.adobe.com/flex/2/docs/00000751.html#241755) in Flex 2 Developer's Guide.
EOF
end
add_param(:title, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)."
end
add_param(:use_network, :boolean) do |p|
p.value = true
p.show_on_false = true
p.description =<<EOF
Specifies that the current application uses network services.
The default value is true.
When the use-network property is set to false, the application can access the local filesystem (for example, use the XML.load() method with file: URLs) but not network services. In most circumstances, the value of this property should be true.
For more information about the use-network property, see Applying Flex Security (http://livedocs.adobe.com/flex/2/docs/00001328.html#137544).
EOF
end
add_param(:verbose_stacktraces, :boolean) do |p|
p.description =<<EOF
Generates source code that includes line numbers. When a run-time error occurs, the stacktrace shows these line numbers.
Enabling this option generates larger SWF files.\nThe default value is false.
EOF
end
add_param(:verify_digests, :boolean) do |p|
p.description = "Verifies the libraries loaded at runtime are the correct ones."
end
add_param(:warn_warning_type, :boolean) do |p|
p.description = "Enables specified warnings. For more information, see Viewing warnings and errors (http://livedocs.adobe.com/flex/2/docs/00001517.html#182413)."
end
add_param(:warnings, :boolean) do |p|
p.description =<<EOF
Enables all warnings. Set to false to disable all warnings. This option overrides the warn-warning_type options.
The default value is true.
EOF
end
# This must be the last item in this list
add_param(:input, :file) do |p|
p.preprocessable = true
p.hidden_name = true
p.description = "Main source file to send compiler"
end
end
def define # :nodoc:
super
if(!output)
if(name.match(/.swf/) || name.match(/swc/))
self.output = name
end
end
if(input && !input.match(/.css/) && File.exists?(input))
source_path << File.dirname(input)
end
if(link_report)
CLEAN.add(link_report)
end
source_path.uniq!
param_hash['source_path'].value = clean_nested_source_paths(source_path)
CLEAN.add(output)
if(incremental)
CLEAN.add(FileList['**/**/*.cache'])
end
self
end
protected
def clean_nested_source_paths(paths)
results = []
paths.each do |path|
if(check_nested_source_path(results, path))
results << path
end
end
return results
end
def check_nested_source_path(array, path)
array.each_index do |index|
item = array[index]
if(item =~ /^#{path}/)
array.slice!(index, 1)
elsif(path =~ /^#{item}/)
return false
end
end
return true
end
# Use the swc path if possible
# Otherwise add to source
def resolve_library(library_task)
#TODO: Add support for libraries that don't get
# copied into the project
path = library_task.project_path
if(path.match(/.swc$/))
library_path << library_task.project_path
else
source_path << library_task.project_path
end
end
def execute_with_fcsh(command)
begin
display_preprocess_message
puts FCSHSocket.execute("mxmlc #{command}")
rescue FCSHError => fcsh_error
raise fcsh_error
rescue StandardError => std_error
raise StandardError("[ERROR] There was a problem connecting to the Flex Compiler SHell, run 'rake fcsh:start' in another terminal.")
end
end
def execute(*args)
begin
if(@use_fcsh)
execute_with_fcsh(to_shell)
else
super
end
rescue ExecutionError => e
if(e.message.index('Warning:'))
# MXMLC sends warnings to stderr....
Log.puts(e.message.gsub('[ERROR]', '[WARNING]'))
else
raise e
end
end
end
end
end
# Helper method for definining and accessing MXMLCTask instances in a rakefile
def mxmlc(args, &block)
Sprout::MXMLCTask.define_task(args, &block)
end
Fixed missing StandardError exception and made MXMLCTask throw an MXMLCError when use_fcsh=true but an FCSH instance is not found
=begin
Copyright (c) 2007 Pattern Park
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
TODO: Investigate jruby support, especially:
http://livedocs.adobe.com/flex/201/html/wwhelp/wwhimpl/common/html/wwhelp.htm?context=LiveDocs_Book_Parts&file=compilers_123_09.html
=end
module Sprout
class MXMLCError < StandardError #:nodoc:
end
class ExecutionError < StandardError #:nodoc:
end
# The MXMLC task provides a rake front end to the Flex MXMLC command line compiler.
# This task is integrated with the LibraryTask so that if any dependencies are
# library tasks, they will be automatically added to the library_path or source_path
# depending on whether they provide a swc or sources.
#
# The entire MXMLC advanced interface has been provided here. All parameter names should be
# identical to what is available on the regular compiler except dashes have been replaced
# by underscores.
#
# The following example can be pasted in a file named 'rakefile.rb' which should be placed in
# the same folder as an ActionScript 3.0 class named 'SomeProject.as' that extends
# flash.display.Sprite.
#
# # Create a remote library dependency on the corelib swc.
# library :corelib
#
# # Alias the compilation task with one that is easier to type
# task :compile => 'SomeProject.swf'
#
# # Create an MXMLCTask named for the output file that it creates. This task depends on the
# # corelib library and will automatically add the corelib.swc to it's library_path
# mxmlc 'SomeProject.swf' => :corelib do |t|
# t.input = 'SomeProject.as'
# t.default_size = '800 600'
# t.default_background_color = "#FFFFFF"
# end
#
# Note: Be sure to check out the features of the ToolTask to learn more about gem_version and preprocessor
#
class MXMLCTask < ToolTask
# Use a running instance of the FCSH command shell to speed up compilation.
# You need to run 'rake fcsh:start' in another terminal before turning on
# this flag and compiling.
attr_accessor :use_fcsh
# Interface and descriptions found here:
# http://livedocs.adobe.com/flex/2/docs/wwhelp/wwhimpl/common/html/wwhelp.htm?context=LiveDocs_Parts&file=00001481.html
def initialize_task
@default_gem_name = 'sprout-flex3sdk-tool'
@default_gem_path = 'bin/mxmlc'
add_param(:accessible, :boolean) do |p|
p.hidden_value = true
p.description = "Enables accessibility features when compiling the Flex application or SWC file. The default value is false."
end
add_param(:actionscript_file_encoding, :string) do |p|
p.description = "Sets the file encoding for ActionScript files. For more information see: http://livedocs.adobe.com/flex/2/docs/00001503.html#149244"
end
add_param(:allow_source_path_overlap, :boolean) do |p|
p.hidden_value = true
p.description = "Checks if a source-path entry is a subdirectory of another source-path entry. It helps make the package names of MXML components unambiguous. This is an advanced option."
end
# multiline strings won't work in stupid aptana!
add_param(:as3, :boolean) do |p|
p.value = true
p.show_on_false = true
p.description =<<EOF
Use the ActionScript 3.0 class-based object model for greater performance and better error reporting. In the class-based object model, most built-in functions are implemented as fixed methods of classes.
The default value is true. If you set this value to false, you must set the es option to true.
This is an advanced option.
EOF
end
add_param(:benchmark, :boolean) do |p|
p.value = true
p.show_on_false = true
p.description = "Prints detailed compile times to the standard output. The default value is true."
end
add_param(:context_root, :path) do |p|
p.description =<<EOF
Sets the value of the {context.root} token in channel definitions in the flex-services.xml file. If you do not specify the value of this option, Flex uses an empty string.
For more information on using the {context.root} token, see http://livedocs.adobe.com/flex/2/docs/00001446.html#205687.
EOF
end
add_param(:contributor, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)."
end
add_param(:creator, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files. (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)"
end
add_param(:date, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files. (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)"
end
add_param(:debug, :boolean) do |p|
p.hidden_value = true
p.description =<<EOF
Generates a debug SWF file. This file includes line numbers and filenames of all the source files. When a run-time error occurs, the stacktrace shows these line numbers and filenames. This information is also used by the command-line debugger and the Flex Builder debugger. Enabling the debug option generates larger SWF files.
For the mxmlc compiler, the default value is false. For the compc compiler, the default value is true.
For SWC files generated with the compc compiler, set this value to true, unless the target SWC file is an RSL. In that case, set the debug option to false.
For information about the command-line debugger, see Using the Command-Line Debugger (http://livedocs.adobe.com/flex/2/docs/00001540.html#181846).
Flex also uses the verbose-stacktraces setting to determine whether line numbers are added to the stacktrace.
EOF
end
add_param(:debug_password, :string) do |p|
p.description = "Lets you engage in remote debugging sessions with the Flash IDE. This is an advanced option."
end
add_param(:default_background_color, :string) do |p|
p.description =<<EOF
Sets the application's background color. You use the 0x notation to set the color, as the following example shows:
-default-background-color=0xCCCCFF
The default value is null. The default background of a Flex application is an image of a gray gradient. You must override this image for the value of the default-background-color option to be visible. For more information, see Editing application settings (http://livedocs.adobe.com/flex/2/docs/00001504.html#138781).
This is an advanced option.
EOF
end
add_param(:default_frame_rate, :number) do |p|
p.description = "Sets the application's frame rate. The default value is 24. This is an advanced option."
end
add_param(:default_script_limits, :string) do |p|
p.description =<<EOF
Defines the application's script execution limits.
The max-recursion-depth value specifies the maximum depth of Adobe Flash Player call stack before Flash Player stops. This is essentially the stack overflow limit. The default value is 1000.
The max-execution-time value specifies the maximum duration, in seconds, that an ActionScript event handler can execute before Flash Player assumes that it is hung, and aborts it. The default value is 60 seconds. You cannot set this value above 60 seconds.
Example:
# 900 is new max-recursion-depth
# 20 is new max-execution-time
t.default_script_limits = '900 20'
You can override these settings in the application.
This is an advanced option.
EOF
end
add_param(:default_size, :string) do |p|
p.delimiter = ' '
p.description = "Defines the default application size, in pixels for example: default_size = '950 550'. This is an advanced option."
end
add_param(:default_css_url, :url) do |p|
p.description =<<EOF
Defines the location of the default style sheet. Setting this option overrides the implicit use of the defaults.css style sheet in the framework.swc file.
For more information on the defaults.css file, see Using Styles and Themes (http://livedocs.adobe.com/flex/2/docs/00000751.html#241755) in Flex 2 Developer's Guide.
This is an advanced option.
EOF
end
add_param(:define, :string) do |p|
p.description =<<EOF
Define a global AS3 conditional compilation definition, e.g. -define=CONFIG::debugging,true or -define+=CONFIG::debugging,true (to append to existing definitions in flex-config.xml) (advanced, repeatable)
EOF
end
add_param(:description, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)."
end
add_param(:dump_config, :file) do |p|
p.description =<<EOF
Outputs the compiler options in the flex-config.xml file to the target path; for example:
mxmlc -dump-config myapp-config.xml
This is an advanced option.
EOF
end
# TODO: Add parameter interactions to ensure
# valid state?
add_param(:es, :boolean) do |p|
p.description =<<EOF
Use the ECMAScript edition 3 prototype-based object model to allow dynamic overriding of prototype properties. In the prototype-based object model, built-in functions are implemented as dynamic properties of prototype objects.
You can set the strict option to true when you use this model, but it might result in compiler errors for references to dynamic properties.
The default value is false. If you set this option to true, you must set the es3 option to false.
This is an advanced option.
EOF
end
add_param(:externs, :symbols) do |p|
p.description =<<EOF
Sets a list of symbols to exclude from linking when compiling a SWF file.
This option provides compile-time link checking for external references that are dynamically linked.
For more information about dynamic linking, see About linking (http://livedocs.adobe.com/flex/2/docs/00001521.html#205852).
This is an advanced option.
EOF
end
add_param(:external_library_path, :files) do |p|
p.description =<<EOF
Specifies a list of SWC files or directories to exclude from linking when compiling a SWF file. This option provides compile-time link checking for external components that are dynamically linked.
For more information about dynamic linking, see About linking (http://livedocs.adobe.com/flex/2/docs/00001521.html#205852).
You can use the += operator to append the new SWC file to the list of external libraries.
This parameter has been aliased as +el+.
EOF
end
add_param_alias(:el, :external_library_path)
add_param(:file_specs, :files) do |p|
p.description = "Specifies source files to compile. This is the default option for the mxmlc compiler."
end
add_param(:fonts_languages_language_range, :string) do |p|
p.shell_name = '-compiler.fonts.languages.language-range'
p.description =<<EOF
Specifies the range of Unicode settings for that language. For more information, see Using Styles and Themes (http://livedocs.adobe.com/flex/2/docs/00000751.html#241755) in Flex 2 Developer's Guide.
This is an advanced option.
EOF
end
add_param(:fonts_managers, :symbols) do |p|
p.description =<<EOF
Defines the font manager. The default is flash.fonts.JREFontManager. You can also use the flash.fonts.BatikFontManager. For more information, see Using Styles and Themes in Flex 2 Developer's Guide (http://livedocs.adobe.com/flex/2/docs/00000751.html#241755).
This is an advanced option.
EOF
end
add_param(:fonts_max_cached_fonts, :number) do |p|
p.description =<<EOF
Sets the maximum number of fonts to keep in the server cache. For more information, see Caching fonts and glyphs (http://livedocs.adobe.com/flex/2/docs/00001469.html#208457).
This is an advanced option.
EOF
end
add_param(:fonts_max_glyphs_per_face, :number) do |p|
p.description =<<EOF
Sets the maximum number of character glyph-outlines to keep in the server cache for each font face. For more information, see Caching fonts and glyphs (http://livedocs.adobe.com/flex/2/docs/00001469.html#208457).
This is an advanced option.
EOF
end
add_param(:frames_frame, :string) do |p|
p.shell_name = '-frames.frame'
p.description =<<EOF
Specifies a SWF file frame label with a sequence of class names that are linked onto the frame.
For example: frames_frame = 'someLabel MyProject OtherProject FoodProject'
This is an advanced option.
EOF
end
add_param(:generate_frame_loader, :boolean) do |p|
p.description =<<EOF
Toggles the generation of an IFlexBootstrap-derived loader class.
This is an advanced option.
EOF
end
add_param(:headless_server, :boolean) do |p|
p.description =<<EOF
Enables the headless implementation of the Flex compiler. This sets the following:
System.setProperty('java.awt.headless', 'true')
The headless setting (java.awt.headless=true) is required to use fonts and SVG on UNIX systems without X Windows.
This is an advanced option.
EOF
end
add_param(:include_libraries, :files) do |p|
p.description =<<EOF
Links all classes inside a SWC file to the resulting application SWF file, regardless of whether or not they are used.
Contrast this option with the library-path option that includes only those classes that are referenced at compile time.
To link one or more classes whether or not they are used and not an entire SWC file, use the includes option.
This option is commonly used to specify resource bundles.
EOF
end
add_param(:includes, :symbols) do |p|
p.description =<<EOF
Links one or more classes to the resulting application SWF file, whether or not those classes are required at compile time.
To link an entire SWC file rather than individual classes, use the include-libraries option.
EOF
end
add_param(:incremental, :boolean) do |p|
p.description =<<EOF
Enables incremental compilation. For more information, see About incremental compilation (http://livedocs.adobe.com/flex/2/docs/00001506.html#153980).
This option is true by default for the Flex Builder application compiler. For the command-line compiler, the default is false. The web-tier compiler does not support incremental compilation.
EOF
end
add_param(:keep_generated_actionscript, :boolean) do |p|
p.description =<<EOF
Determines whether to keep the generated ActionScript class files.
The generated class files include stubs and classes that are generated by the compiler and used to build the SWF file.
The default location of the files is the /generated subdirectory, which is directly below the target MXML file. If the /generated directory does not exist, the compiler creates one.
The default names of the primary generated class files are filename-generated.as and filename-interface.as.
The default value is false.\nThis is an advanced option.
EOF
end
add_param(:language, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)."
end
add_param(:lazy_init, :boolean) do |p|
p.description =<<EOF
Enables ABC bytecode lazy initialization.
The default value is false.
This is an advanced option.
EOF
end
add_param(:license, :string) do |p|
p.description =<<EOF
<product> <serial-number>
Specifies a product and a serial number. (repeatable)
This is an advanced option.
EOF
end
add_param(:library_path, :files) do |p|
p.description =<<EOF
Links SWC files to the resulting application SWF file. The compiler only links in those classes for the SWC file that are required.
The default value of the library-path option includes all SWC files in the libs directory and the current locale. These are required.
To point to individual classes or packages rather than entire SWC files, use the source-path option.
If you set the value of the library-path as an option of the command-line compiler, you must also explicitly add the framework.swc and locale SWC files. Your new entry is not appended to the library-path but replaces it.
You can use the += operator to append the new argument to the list of existing SWC files.
In a configuration file, you can set the append attribute of the library-path tag to true to indicate that the values should be appended to the library path rather than replace it.
EOF
end
add_param_alias(:l, :library_path)
add_param(:link_report, :file) do |p|
p.description =<<EOF
Prints linking information to the specified output file. This file is an XML file that contains <def>, <pre>, and <ext> symbols showing linker dependencies in the final SWF file.
The file format output by this command can be used to write a file for input to the load-externs option.
For more information on the report, see Examining linker dependencies (http://livedocs.adobe.com/flex/2/docs/00001394.html#211202).
This is an advanced option.
EOF
end
add_param(:load_config, :file) do |p|
p.description =<<EOF
Specifies the location of the configuration file that defines compiler options.
If you specify a configuration file, you can override individual options by setting them on the command line.
All relative paths in the configuration file are relative to the location of the configuration file itself.
Use the += operator to chain this configuration file to other configuration files.
For more information on using configuration files to provide options to the command-line compilers, see About configuration files (http://livedocs.adobe.com/flex/2/docs/00001490.html#138195).
EOF
end
add_param(:load_externs, :file) do |p|
p.description =<<EOF
Specifies the location of an XML file that contains <def>, <pre>, and <ext> symbols to omit from linking when compiling a SWF file. The XML file uses the same syntax as the one produced by the link-report option. For more information on the report, see Examining linker dependencies (http://livedocs.adobe.com/flex/2/docs/00001394.html#211202).
This option provides compile-time link checking for external components that are dynamically linked.
For more information about dynamic linking, see About linking (http://livedocs.adobe.com/flex/2/docs/00001521.html#205852).
This is an advanced option.
EOF
end
add_param(:locale, :string) do |p|
p.description =<<EOF
Specifies the locale that should be packaged in the SWF file (for example, en_EN). You run the mxmlc compiler multiple times to create SWF files for more than one locale, with only the locale option changing.
You must also include the parent directory of the individual locale directories, plus the token {locale}, in the source-path; for example:
mxmlc -locale en_EN -source-path locale/{locale} -file-specs MainApp.mxml
For more information, see Localizing Flex Applicationsin (http://livedocs.adobe.com/flex/2/docs/00000898.html#129288) Flex 2 Developer's Guide.
EOF
end
add_param(:localized_description, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)."
end
add_param(:localized_title, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)."
end
add_param(:namespaces_namespace, :string) do |p|
p.description =<<EOF
Specifies a namespace for the MXML file. You must include a URI and the location of the manifest file that defines the contents of this namespace. This path is relative to the MXML file.
For more information about manifest files, see About manifest files (http://livedocs.adobe.com/flex/2/docs/00001519.html#134676).
EOF
end
add_param(:optimize, :boolean) do |p|
p.description =<<EOF
Enables the ActionScript optimizer. This optimizer reduces file size and increases performance by optimizing the SWF file's bytecode.
The default value is false.
EOF
end
add_param(:output, :file) do |p|
p.description =<<EOF
Specifies the output path and filename for the resulting file. If you omit this option, the compiler saves the SWF file to the directory where the target file is located.
The default SWF filename matches the target filename, but with a SWF file extension.
If you use a relative path to define the filename, it is always relative to the current working directory, not the target MXML application root.
The compiler creates extra directories based on the specified filename if those directories are not present.
When using this option with the component compiler, the output is a SWC file rather than a SWF file.
EOF
end
add_param(:publisher, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)."
end
add_param(:raw_metadata, :string) do |p|
p.description =<<EOF
XML text to store in the SWF metadata (overrides metadata.* configuration) (advanced)
EOF
end
add_param(:resource_bundle_list, :file) do |p|
p.description =<<EOF
Prints a list of resource bundles to input to the compc compiler to create a resource bundle SWC file. The filename argument is the name of the file that contains the list of bundles.
For more information, see Localizing Flex Applications (http://livedocs.adobe.com/flex/2/docs/00000898.html#129288) in Flex 2 Developer's Guide.
EOF
end
add_param(:runtime_shared_libraries, :urls) do |p|
p.description =<<EOF
Specifies a list of run-time shared libraries (RSLs) to use for this application. RSLs are dynamically-linked at run time.
You specify the location of the SWF file relative to the deployment location of the application. For example, if you store a file named library.swf file in the web_root/libraries directory on the web server, and the application in the web root, you specify libraries/library.swf.
For more information about RSLs, see Using Runtime Shared Libraries. (http://livedocs.adobe.com/flex/2/docs/00001520.html#168690)
EOF
end
add_param_alias(:rsl, :runtime_shared_libraries)
add_param(:runtime_shared_library_path, :string) do |p|
p.description =<<EOF
[path-element] [rsl-url] [policy-file-url] [rsl-url] [policy-file-url]
alias -rslp
(repeatable)
EOF
end
add_param_alias(:rslp, :runtime_shared_library_path)
add_param(:services, :file) do |p|
p.description = "Specifies the location of the services-config.xml file. This file is used by Flex Data Services."
end
add_param(:show_actionscript_warnings, :boolean) do |p|
p.value = true
p.show_on_false = true
p.description =<<EOF
Shows warnings for ActionScript classes.
The default value is true.
For more information about viewing warnings and errors, see Viewing warnings and errors (http://livedocs.adobe.com/flex/2/docs/00001517.html#182413).
EOF
end
add_param(:show_binding_warnings, :boolean) do |p|
p.value = true
p.show_on_false = true
p.description =<<EOF
Shows a warning when Flash Player cannot detect changes to a bound property.
The default value is true.
For more information about compiler warnings, see Using SWC files (http://livedocs.adobe.com/flex/2/docs/00001505.html#158337).
EOF
end
add_param(:show_deprecation_warnings, :boolean) do |p|
p.value = true
p.show_on_false = true
p.description =<<EOF
Shows deprecation warnings for Flex components. To see warnings for ActionScript classes, use the show-actionscript-warnings option.
The default value is true.
For more information about viewing warnings and errors, see Viewing warnings and errors.
EOF
end
add_param(:source_path, :paths) do |p|
p.preprocessable = true
p.description =<<EOF
Adds directories or files to the source path. The Flex compiler searches directories in the source path for MXML or AS source files that are used in your Flex applications and includes those that are required at compile time.
You can use wildcards to include all files and subdirectories of a directory.
To link an entire library SWC file and not individual classes or directories, use the library-path option.
The source path is also used as the search path for the component compiler's include-classes and include-resource-bundles options.
You can also use the += operator to append the new argument to the list of existing source path entries.
EOF
end
add_param_alias(:sp, :source_path)
add_param(:static_link_runtime_shared_libraries, :boolean) do |p|
p.description =<<EOF
Statically link the libraries specified by the -runtime-shared-libraries-path option.
alias -static-rsls
EOF
end
add_param_alias(:static_rsls, :static_link_runtime_shared_libraries)
add_param(:strict, :boolean) do |p|
p.value = true
p.show_on_false = true
p.description =<<EOF
Prints undefined property and function calls; also performs compile-time type checking on assignments and options supplied to method calls.
The default value is true.
For more information about viewing warnings and errors, see Viewing warnings and errors (http://livedocs.adobe.com/flex/2/docs/00001517.html#182413).
EOF
end
add_param(:target_player, :string) do |p|
p.description =<<EOF
Specifies the version of the player the application is targeting.
Features requiring a later version will not be compiled into the application. The minimum value supported is "9.0.0".
EOF
end
add_param(:theme, :files) do |p|
p.description =<<EOF
Specifies a list of theme files to use with this application. Theme files can be SWC files with CSS files inside them or CSS files.
For information on compiling a SWC theme file, see Using Styles and Themes (http://livedocs.adobe.com/flex/2/docs/00000751.html#241755) in Flex 2 Developer's Guide.
EOF
end
add_param(:title, :string) do |p|
p.description = "Sets metadata in the resulting SWF file. For more information, see Adding metadata to SWF files (http://livedocs.adobe.com/flex/2/docs/00001502.html#145380)."
end
add_param(:use_network, :boolean) do |p|
p.value = true
p.show_on_false = true
p.description =<<EOF
Specifies that the current application uses network services.
The default value is true.
When the use-network property is set to false, the application can access the local filesystem (for example, use the XML.load() method with file: URLs) but not network services. In most circumstances, the value of this property should be true.
For more information about the use-network property, see Applying Flex Security (http://livedocs.adobe.com/flex/2/docs/00001328.html#137544).
EOF
end
add_param(:verbose_stacktraces, :boolean) do |p|
p.description =<<EOF
Generates source code that includes line numbers. When a run-time error occurs, the stacktrace shows these line numbers.
Enabling this option generates larger SWF files.\nThe default value is false.
EOF
end
add_param(:verify_digests, :boolean) do |p|
p.description = "Verifies the libraries loaded at runtime are the correct ones."
end
add_param(:warn_warning_type, :boolean) do |p|
p.description = "Enables specified warnings. For more information, see Viewing warnings and errors (http://livedocs.adobe.com/flex/2/docs/00001517.html#182413)."
end
add_param(:warnings, :boolean) do |p|
p.description =<<EOF
Enables all warnings. Set to false to disable all warnings. This option overrides the warn-warning_type options.
The default value is true.
EOF
end
# This must be the last item in this list
add_param(:input, :file) do |p|
p.preprocessable = true
p.hidden_name = true
p.description = "Main source file to send compiler"
end
end
def define # :nodoc:
super
if(!output)
if(name.match(/.swf/) || name.match(/swc/))
self.output = name
end
end
if(input && !input.match(/.css/) && File.exists?(input))
source_path << File.dirname(input)
end
if(link_report)
CLEAN.add(link_report)
end
source_path.uniq!
param_hash['source_path'].value = clean_nested_source_paths(source_path)
CLEAN.add(output)
if(incremental)
CLEAN.add(FileList['**/**/*.cache'])
end
self
end
protected
def clean_nested_source_paths(paths)
results = []
paths.each do |path|
if(check_nested_source_path(results, path))
results << path
end
end
return results
end
def check_nested_source_path(array, path)
array.each_index do |index|
item = array[index]
if(item =~ /^#{path}/)
array.slice!(index, 1)
elsif(path =~ /^#{item}/)
return false
end
end
return true
end
# Use the swc path if possible
# Otherwise add to source
def resolve_library(library_task)
#TODO: Add support for libraries that don't get
# copied into the project
path = library_task.project_path
if(path.match(/.swc$/))
library_path << library_task.project_path
else
source_path << library_task.project_path
end
end
def execute_with_fcsh(command)
begin
display_preprocess_message
puts FCSHSocket.execute("mxmlc #{command}")
rescue FCSHError => fcsh_error
raise fcsh_error
rescue StandardError => std_error
raise MXMLCError.new("[ERROR] There was a problem connecting to the Flex Compiler SHell, run 'rake fcsh:start' in another terminal.")
end
end
def execute(*args)
begin
if(@use_fcsh)
execute_with_fcsh(to_shell)
else
super
end
rescue ExecutionError => e
if(e.message.index('Warning:'))
# MXMLC sends warnings to stderr....
Log.puts(e.message.gsub('[ERROR]', '[WARNING]'))
else
raise e
end
end
end
end
end
# Helper method for definining and accessing MXMLCTask instances in a rakefile
def mxmlc(args, &block)
Sprout::MXMLCTask.define_task(args, &block)
end
|
removed scraper.rb
class Scraper
attr_reader :forecast
def initialize(scrape_target)
@scraped_content = Nokogiri::HTML(open(scrape_target))
end
def get_temp_yesterday
@scraped_content.css('p.wx-temp').text.strip[0..1].to_i
end
def get_temp_now
@scraped_content.css('span.temperature-fahrenheit')
end
def get_temp_tomorrow
@scraped_content.css('p.wx-temp').text.strip[0..1].to_i
end
end |
# -*- encoding : utf-8 -*-
module Linael
class Modules::Banana < ModuleIRC
Name="banana"
Lyrics=[
"http://www.youtube.com/watch?v=vNie6hVM8ZI",
" ",
"ba-ba-ba-ba-ba-nana (2x)",
" ",
"banana-ah-ah (ba-ba-ba-ba-ba-nana)",
"potato-na-ah-ah (ba-ba-ba-ba-ba-nana)",
"banana-ah-ah (ba-ba-ba-ba-ba-nana)",
"togari noh pocato-li kani malo mani kano",
"chi ka-baba, ba-ba-nana",
" ",
"yoh plano boo la planonoh too",
"ma bana-na la-ka moobi talamoo",
"ba-na-na",
" ",
"ba-ba (ba-ba-ba-ba-banana)",
"PO-TAE-TOH-OH-OH (ba-ba-ba-ba-banana)",
"togari noh pocato li kani malo mani kano",
"chi ka-ba-ba, ba-ba-naNAAAHHHH!!!!"
]
Help=[
"Module: Banana",
" ",
"=====Fonctions=====",
"!banana => sing the banana song",
"!banana -[add|del] username => add/del a user for this module"
]
def startMod
add_module({cmd: [:song],
cmdAuth: [:addUser,
:delUser]})
end
def song privMsg
if (module? privMsg)
Lyrics.each{|line| answer(privMsg,line);sleep(0.5)}
end
end
def addUser privMsg
if (privMsg.message =~ /!banana.*-add\s*(\S*)/)
answer(privMsg,"Oki doki! #{$~[1]} can now banana :)")
@user << $~[1].downcase
end
end
def delUser privMsg
if (privMsg.message =~ /!banana\s*-del\s*(\S*)/)
answer(privMsg,"Oki doki! #{$~[1]} won't banana anymore :(")
@user.delete $~[1].downcase
end
end
def module? privMsg
(privMsg.message.encode.downcase =~ /^!banana[^A-z]*$/) && ((@user.detect {|user| privMsg.who.downcase.match(user)}) || (privMsg.private_message?))
end
def initialize(runner)
@user=["zaratan"]
super runner
end
end
end
rewrite banana
# -*- encoding : utf-8 -*-
# Sing the banana song :)
linael :banana,require_auth: true do
Lyrics=[
"http://www.youtube.com/watch?v=vNie6hVM8ZI",
" ",
"ba-ba-ba-ba-ba-nana (2x)",
" ",
"banana-ah-ah (ba-ba-ba-ba-ba-nana)",
"potato-na-ah-ah (ba-ba-ba-ba-ba-nana)",
"banana-ah-ah (ba-ba-ba-ba-ba-nana)",
"togari noh pocato-li kani malo mani kano",
"chi ka-baba, ba-ba-nana",
" ",
"yoh plano boo la planonoh too",
"ma bana-na la-ka moobi talamoo",
"ba-na-na",
" ",
"ba-ba (ba-ba-ba-ba-banana)",
"PO-TAE-TOH-OH-OH (ba-ba-ba-ba-banana)",
"togari noh pocato li kani malo mani kano",
"chi ka-ba-ba, ba-ba-naNAAAHHHH!!!!"
]
help [
"Sing the banana song",
" ",
"=====Fonctions=====",
"!banana => sing the banana song",
"!banana -[add|del] username => add/del a user for this module"
]
on_init do
@user=["zaratan"]
end
#sing
on :cmd, :song, /^!banana[^A-z]*$/ do |msg,options|
before(msg) do |msg|
((@user.detect {|user| msg.who.downcase.match(user)}) || (msg.private_message?))
end
Lyrics.each{|line| answer(msg,line);sleep(0.5)}
end
#add a user
on :cmdAuth, :add_user, /!banana\s-add\s/ do |msg,options|
answer(msg,"Oki doki! #{options.who} can now banana :)")
@user << options.who.downcase
end
#del a user
on :cmdAuth, :del_user, /!banana\s-del\s/ do |msg,options|
answer(msg,"Oki doki! #{options.who} won't banana anymore :(")
@user.delete options.who.downcase
end
end
|
require "multi_currency/version"
require "multi_currency/converter"
module MultiCurrency
extend ActiveSupport::Concern
module ClassMethods
def multi_currency_columns
@@multi_currency_columns ||= []
end
def multi_currency_columns=(columns)
if columns.is_a? Array
@@multi_currency_columns = columns
else
raise "Multi currency columns should be an array"
end
end
def multi_currency_for(columns)
multi_currency_columns = columns
multi_currency_columns.each do |column|
define_singleton_method "sum_#{column}" do |currency|
self.sum("
CASE #{column}_currency
WHEN '#{currency.downcase}' THEN #{column}
ELSE #{column} * (SELECT exchange_rates.rate FROM exchange_rates WHERE (exchange_rates.from_code = #{column}_currency AND to_code = '#{currency.downcase}' AND date = #{column}_rate_date) )
END")
end
define_method "#{column}_in" do |currency_code|
default_currency = self.send("#{column}_currency") rescue Money.default_currency.id
date = self.send("#{column}_rate_date") rescue Date.today
rate = MultiCurrency::Converter.get_rate_and_cache(default_currency, currency_code, date)
self.send(column) * rate
end
end
end
end
end
ActiveRecord::Base.include(MultiCurrency)
do_currency_exchange method
require "multi_currency/version"
require "multi_currency/converter"
module MultiCurrency
extend ActiveSupport::Concern
module ClassMethods
def multi_currency_columns
@@multi_currency_columns ||= []
end
def multi_currency_columns=(columns)
if columns.is_a? Array
@@multi_currency_columns = columns
else
raise "Multi currency columns should be an array"
end
end
def multi_currency_for(columns)
multi_currency_columns = columns
multi_currency_columns.each do |column|
define_singleton_method "sum_#{column}" do |currency|
self.sum("
CASE #{column}_currency
WHEN '#{currency.downcase}' THEN #{column}
ELSE #{column} * (SELECT exchange_rates.rate FROM exchange_rates WHERE (exchange_rates.from_code = #{column}_currency AND to_code = '#{currency.downcase}' AND date = #{column}_rate_date) )
END")
end
define_method "#{column}_in" do |currency_code|
default_currency = self.send("#{column}_currency") rescue Money.default_currency.id # to do: should get this from multi_currency config
date = self.send("#{column}_rate_date") rescue Date.today
rate = MultiCurrency::Converter.get_rate_and_cache(default_currency, currency_code, date)
self.send(column) * rate
end
end
define_method "do_currency_exchange" do
multi_currency_columns.each do |column|
eval("self.#{column}_currency = 'usd'") # to do: should get this from multi_currency config
date = self.send("#{column}_rate_date") || Date.today
eval("self.#{column}_rate_date = date")
rate = MultiCurrency::Converter.get_rate_and_cache(self.send("#{column}_source_currency"), self.send("#{column}_currency"), date)
eval("self.#{column} = self.#{column}_source_amount.to_f * rate")
end
end
end
end
end
ActiveRecord::Base.include(MultiCurrency) |
module Muster
# Current version of Muster
VERSION = "0.0.1"
end
Updated version for release
module Muster
# Current version of Muster
VERSION = "0.0.2"
end
|
module Mutton
VERSION = '0.0.11'
end
bump up gem version
module Mutton
VERSION = '0.0.12'
end
|
module Mysql2
VERSION = "0.3.15"
end
Bump version to 0.3.16
module Mysql2
VERSION = "0.3.16"
end
|
# Copyright 2016-2021 The NATS Authors
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative 'parser'
require_relative 'version'
require_relative 'errors'
require_relative 'msg'
require_relative 'subscription'
require_relative 'js'
require 'nats/nuid'
require 'thread'
require 'socket'
require 'json'
require 'monitor'
require 'uri'
require 'securerandom'
begin
require "openssl"
rescue LoadError
end
module NATS
class << self
# NATS.connect creates a connection to the NATS Server.
# @param uri [String] URL endpoint of the NATS Server or cluster.
# @param opts [Hash] Options to customize the NATS connection.
# @return [NATS::Client]
#
# @example
# require 'nats'
# nc = NATS.connect("demo.nats.io")
# nc.publish("hello", "world")
# nc.close
#
def connect(uri=nil, opts={})
nc = NATS::Client.new
nc.connect(uri, opts)
nc
end
end
# Status represents the different states from a NATS connection.
# A client starts from the DISCONNECTED state to CONNECTING during
# the initial connect, then CONNECTED. If the connection is reset
# then it goes from DISCONNECTED to RECONNECTING until it is back to
# the CONNECTED state. In case the client gives up reconnecting or
# the connection is manually closed then it will reach the CLOSED
# connection state after which it will not reconnect again.
module Status
# When the client is not actively connected.
DISCONNECTED = 0
# When the client is connected.
CONNECTED = 1
# When the client will no longer attempt to connect to a NATS Server.
CLOSED = 2
# When the client has disconnected and is attempting to reconnect.
RECONNECTING = 3
# When the client is attempting to connect to a NATS Server for the first time.
CONNECTING = 4
end
# Client creates a connection to the NATS Server.
class Client
include MonitorMixin
include Status
attr_reader :status, :server_info, :server_pool, :options, :connected_server, :stats, :uri
DEFAULT_PORT = 4222
DEFAULT_URI = ("nats://localhost:#{DEFAULT_PORT}".freeze)
CR_LF = ("\r\n".freeze)
CR_LF_SIZE = (CR_LF.bytesize)
PING_REQUEST = ("PING#{CR_LF}".freeze)
PONG_RESPONSE = ("PONG#{CR_LF}".freeze)
NATS_HDR_LINE = ("NATS/1.0#{CR_LF}".freeze)
STATUS_MSG_LEN = 3
STATUS_HDR = ("Status".freeze)
DESC_HDR = ("Description".freeze)
NATS_HDR_LINE_SIZE = (NATS_HDR_LINE.bytesize)
SUB_OP = ('SUB'.freeze)
EMPTY_MSG = (''.freeze)
def initialize
super # required to initialize monitor
@options = nil
# Read/Write IO
@io = nil
# Queues for coalescing writes of commands we need to send to server.
@flush_queue = nil
@pending_queue = nil
# Parser with state
@parser = NATS::Protocol::Parser.new(self)
# Threads for both reading and flushing command
@flusher_thread = nil
@read_loop_thread = nil
@ping_interval_thread = nil
# Info that we get from the server
@server_info = { }
# URI from server to which we are currently connected
@uri = nil
@server_pool = []
@status = DISCONNECTED
# Subscriptions
@subs = { }
@ssid = 0
# Ping interval
@pings_outstanding = 0
@pongs_received = 0
@pongs = []
@pongs.extend(MonitorMixin)
# Accounting
@pending_size = 0
@stats = {
in_msgs: 0,
out_msgs: 0,
in_bytes: 0,
out_bytes: 0,
reconnects: 0
}
# Sticky error
@last_err = nil
# Async callbacks, no ops by default.
@err_cb = proc { }
@close_cb = proc { }
@disconnect_cb = proc { }
@reconnect_cb = proc { }
# Secure TLS options
@tls = nil
# Hostname of current server; used for when TLS host
# verification is enabled.
@hostname = nil
@single_url_connect_used = false
# Track whether connect has been already been called.
@connect_called = false
# New style request/response implementation.
@resp_sub = nil
@resp_map = nil
@resp_sub_prefix = nil
@nuid = NATS::NUID.new
# NKEYS
@user_credentials = nil
@nkeys_seed = nil
@user_nkey_cb = nil
@user_jwt_cb = nil
@signature_cb = nil
end
# Establishes a connection to NATS.
def connect(uri=nil, opts={})
synchronize do
# In case it has been connected already, then do not need to call this again.
return if @connect_called
@connect_called = true
end
# Convert URI to string if needed.
uri = uri.to_s if uri.is_a?(URI)
case uri
when String
# Initialize TLS defaults in case any url is using it.
srvs = opts[:servers] = process_uri(uri)
if srvs.any? {|u| u.scheme == 'tls'} and !opts[:tls]
tls_context = OpenSSL::SSL::SSLContext.new
tls_context.set_params
opts[:tls] = {
context: tls_context
}
end
@single_url_connect_used = true if srvs.size == 1
when Hash
opts = uri
end
opts[:verbose] = false if opts[:verbose].nil?
opts[:pedantic] = false if opts[:pedantic].nil?
opts[:reconnect] = true if opts[:reconnect].nil?
opts[:old_style_request] = false if opts[:old_style_request].nil?
opts[:reconnect_time_wait] = NATS::IO::RECONNECT_TIME_WAIT if opts[:reconnect_time_wait].nil?
opts[:max_reconnect_attempts] = NATS::IO::MAX_RECONNECT_ATTEMPTS if opts[:max_reconnect_attempts].nil?
opts[:ping_interval] = NATS::IO::DEFAULT_PING_INTERVAL if opts[:ping_interval].nil?
opts[:max_outstanding_pings] = NATS::IO::DEFAULT_PING_MAX if opts[:max_outstanding_pings].nil?
# Override with ENV
opts[:verbose] = ENV['NATS_VERBOSE'].downcase == 'true' unless ENV['NATS_VERBOSE'].nil?
opts[:pedantic] = ENV['NATS_PEDANTIC'].downcase == 'true' unless ENV['NATS_PEDANTIC'].nil?
opts[:reconnect] = ENV['NATS_RECONNECT'].downcase == 'true' unless ENV['NATS_RECONNECT'].nil?
opts[:reconnect_time_wait] = ENV['NATS_RECONNECT_TIME_WAIT'].to_i unless ENV['NATS_RECONNECT_TIME_WAIT'].nil?
opts[:max_reconnect_attempts] = ENV['NATS_MAX_RECONNECT_ATTEMPTS'].to_i unless ENV['NATS_MAX_RECONNECT_ATTEMPTS'].nil?
opts[:ping_interval] = ENV['NATS_PING_INTERVAL'].to_i unless ENV['NATS_PING_INTERVAL'].nil?
opts[:max_outstanding_pings] = ENV['NATS_MAX_OUTSTANDING_PINGS'].to_i unless ENV['NATS_MAX_OUTSTANDING_PINGS'].nil?
opts[:connect_timeout] ||= NATS::IO::DEFAULT_CONNECT_TIMEOUT
@options = opts
# Process servers in the NATS cluster and pick one to connect
uris = opts[:servers] || [DEFAULT_URI]
uris.shuffle! unless @options[:dont_randomize_servers]
uris.each do |u|
nats_uri = case u
when URI
u.dup
else
URI.parse(u)
end
@server_pool << {
:uri => nats_uri,
:hostname => nats_uri.host
}
end
if @options[:old_style_request]
# Replace for this instance the implementation
# of request to use the old_request style.
class << self; alias_method :request, :old_request; end
end
# NKEYS
@user_credentials ||= opts[:user_credentials]
@nkeys_seed ||= opts[:nkeys_seed]
setup_nkeys_connect if @user_credentials or @nkeys_seed
# Check for TLS usage
@tls = @options[:tls]
srv = nil
begin
srv = select_next_server
# Create TCP socket connection to NATS
@io = create_socket
@io.connect
# Capture state that we have had a TCP connection established against
# this server and could potentially be used for reconnecting.
srv[:was_connected] = true
# Connection established and now in process of sending CONNECT to NATS
@status = CONNECTING
# Use the hostname from the server for TLS hostname verification.
if client_using_secure_connection? and single_url_connect_used?
# Always reuse the original hostname used to connect.
@hostname ||= srv[:hostname]
else
@hostname = srv[:hostname]
end
# Established TCP connection successfully so can start connect
process_connect_init
# Reset reconnection attempts if connection is valid
srv[:reconnect_attempts] = 0
srv[:auth_required] ||= true if @server_info[:auth_required]
# Add back to rotation since successfully connected
server_pool << srv
rescue NATS::IO::NoServersError => e
@disconnect_cb.call(e) if @disconnect_cb
raise @last_err || e
rescue => e
# Capture sticky error
synchronize do
@last_err = e
srv[:auth_required] ||= true if @server_info[:auth_required]
server_pool << srv if can_reuse_server?(srv)
end
err_cb_call(self, e, nil) if @err_cb
if should_not_reconnect?
@disconnect_cb.call(e) if @disconnect_cb
raise e
end
# Clean up any connecting state and close connection without
# triggering the disconnection/closed callbacks.
close_connection(DISCONNECTED, false)
# always sleep here to safe guard against errors before current[:was_connected]
# is set for the first time
sleep @options[:reconnect_time_wait] if @options[:reconnect_time_wait]
# Continue retrying until there are no options left in the server pool
retry
end
# Initialize queues and loops for message dispatching and processing engine
@flush_queue = SizedQueue.new(NATS::IO::MAX_FLUSH_KICK_SIZE)
@pending_queue = SizedQueue.new(NATS::IO::MAX_PENDING_SIZE)
@pings_outstanding = 0
@pongs_received = 0
@pending_size = 0
# Server roundtrip went ok so consider to be connected at this point
@status = CONNECTED
# Connected to NATS so Ready to start parser loop, flusher and ping interval
start_threads!
self
end
def publish(subject, msg=EMPTY_MSG, opt_reply=nil, &blk)
raise NATS::IO::BadSubject if !subject or subject.empty?
msg_size = msg.bytesize
# Accounting
@stats[:out_msgs] += 1
@stats[:out_bytes] += msg_size
send_command("PUB #{subject} #{opt_reply} #{msg_size}\r\n#{msg}\r\n")
@flush_queue << :pub if @flush_queue.empty?
end
# Publishes a NATS::Msg that may include headers.
def publish_msg(msg)
raise TypeError, "nats: expected NATS::Msg, got #{msg.class.name}" unless msg.is_a?(Msg)
raise NATS::IO::BadSubject if !msg.subject or msg.subject.empty?
msg.reply ||= ''
msg.data ||= ''
msg_size = msg.data.bytesize
# Accounting
@stats[:out_msgs] += 1
@stats[:out_bytes] += msg_size
if msg.header
hdr = ''
hdr << NATS_HDR_LINE
msg.header.each do |k, v|
hdr << "#{k}: #{v}#{CR_LF}"
end
hdr << CR_LF
hdr_len = hdr.bytesize
total_size = msg_size + hdr_len
send_command("HPUB #{msg.subject} #{msg.reply} #{hdr_len} #{total_size}\r\n#{hdr}#{msg.data}\r\n")
else
send_command("PUB #{msg.subject} #{msg.reply} #{msg_size}\r\n#{msg.data}\r\n")
end
@flush_queue << :pub if @flush_queue.empty?
end
# Create subscription which is dispatched asynchronously
# messages to a callback.
def subscribe(subject, opts={}, &callback)
sid = nil
sub = nil
synchronize do
sid = (@ssid += 1)
sub = @subs[sid] = Subscription.new
sub.nc = self
sub.sid = sid
end
opts[:pending_msgs_limit] ||= NATS::IO::DEFAULT_SUB_PENDING_MSGS_LIMIT
opts[:pending_bytes_limit] ||= NATS::IO::DEFAULT_SUB_PENDING_BYTES_LIMIT
sub.subject = subject
sub.callback = callback
sub.received = 0
sub.queue = opts[:queue] if opts[:queue]
sub.max = opts[:max] if opts[:max]
sub.pending_msgs_limit = opts[:pending_msgs_limit]
sub.pending_bytes_limit = opts[:pending_bytes_limit]
sub.pending_queue = SizedQueue.new(sub.pending_msgs_limit)
send_command("SUB #{subject} #{opts[:queue]} #{sid}#{CR_LF}")
@flush_queue << :sub
# Setup server support for auto-unsubscribe when receiving enough messages
sub.unsubscribe(opts[:max]) if opts[:max]
unless callback
cond = sub.new_cond
sub.wait_for_msgs_cond = cond
end
# Async subscriptions each own a single thread for the
# delivery of messages.
# FIXME: Support shared thread pool with configurable limits
# to better support case of having a lot of subscriptions.
sub.wait_for_msgs_t = Thread.new do
loop do
msg = sub.pending_queue.pop
cb = nil
sub.synchronize do
# Decrease pending size since consumed already
sub.pending_size -= msg.data.size
cb = sub.callback
end
begin
# Note: Keep some of the alternative arity versions to slightly
# improve backwards compatibility. Eventually fine to deprecate
# since recommended version would be arity of 1 to get a NATS::Msg.
case cb.arity
when 0 then cb.call
when 1 then cb.call(msg)
when 2 then cb.call(msg.data, msg.reply)
when 3 then cb.call(msg.data, msg.reply, msg.subject)
else cb.call(msg.data, msg.reply, msg.subject, msg.header)
end
rescue => e
synchronize do
err_cb_call(self, e, sub) if @err_cb
end
end
end
end if callback
sub
end
# Sends a request using expecting a single response using a
# single subscription per connection for receiving the responses.
# It times out in case the request is not retrieved within the
# specified deadline.
# If given a callback, then the request happens asynchronously.
def request(subject, payload="", opts={}, &blk)
raise NATS::IO::BadSubject if !subject or subject.empty?
# If a block was given then fallback to method using auto unsubscribe.
return old_request(subject, payload, opts, &blk) if blk
return old_request(subject, payload, opts) if opts[:old_style]
token = nil
inbox = nil
future = nil
response = nil
timeout = opts[:timeout] ||= 0.5
synchronize do
start_resp_mux_sub! unless @resp_sub_prefix
# Create token for this request.
token = @nuid.next
inbox = "#{@resp_sub_prefix}.#{token}"
# Create the a future for the request that will
# get signaled when it receives the request.
future = @resp_sub.new_cond
@resp_map[token][:future] = future
end
# Publish request and wait for reply.
publish(subject, payload, inbox)
begin
MonotonicTime::with_nats_timeout(timeout) do
@resp_sub.synchronize do
future.wait(timeout)
end
end
rescue NATS::Timeout => e
synchronize { @resp_map.delete(token) }
raise e
end
# Check if there is a response already.
synchronize do
result = @resp_map[token]
response = result[:response]
@resp_map.delete(token)
end
if response and response.header
status = response.header[STATUS_HDR]
raise NATS::IO::NoRespondersError if status == "503"
end
response
end
# request_msg makes a NATS request using a NATS::Msg that may include headers.
def request_msg(msg, opts={})
raise TypeError, "nats: expected NATS::Msg, got #{msg.class.name}" unless msg.is_a?(Msg)
raise NATS::IO::BadSubject if !msg.subject or msg.subject.empty?
token = nil
inbox = nil
future = nil
response = nil
timeout = opts[:timeout] ||= 0.5
synchronize do
start_resp_mux_sub! unless @resp_sub_prefix
# Create token for this request.
token = @nuid.next
inbox = "#{@resp_sub_prefix}.#{token}"
# Create the a future for the request that will
# get signaled when it receives the request.
future = @resp_sub.new_cond
@resp_map[token][:future] = future
end
msg.reply = inbox
msg.data ||= ''
msg_size = msg.data.bytesize
# Publish request and wait for reply.
publish_msg(msg)
begin
MonotonicTime::with_nats_timeout(timeout) do
@resp_sub.synchronize do
future.wait(timeout)
end
end
rescue NATS::Timeout => e
synchronize { @resp_map.delete(token) }
raise e
end
# Check if there is a response already.
synchronize do
result = @resp_map[token]
response = result[:response]
@resp_map.delete(token)
end
if response and response.header
status = response.header[STATUS_HDR]
raise NATS::IO::NoRespondersError if status == "503"
end
response
end
# Sends a request creating an ephemeral subscription for the request,
# expecting a single response or raising a timeout in case the request
# is not retrieved within the specified deadline.
# If given a callback, then the request happens asynchronously.
def old_request(subject, payload, opts={}, &blk)
return unless subject
inbox = new_inbox
# If a callback was passed, then have it process
# the messages asynchronously and return the sid.
if blk
opts[:max] ||= 1
s = subscribe(inbox, opts) do |msg|
case blk.arity
when 0 then blk.call
when 1 then blk.call(msg)
when 2 then blk.call(msg.data, msg.reply)
when 3 then blk.call(msg.data, msg.reply, msg.subject)
else blk.call(msg.data, msg.reply, msg.subject, msg.header)
end
end
publish(subject, payload, inbox)
return s
end
# In case block was not given, handle synchronously
# with a timeout and only allow a single response.
timeout = opts[:timeout] ||= 0.5
opts[:max] = 1
sub = Subscription.new
sub.subject = inbox
sub.received = 0
future = sub.new_cond
sub.future = future
sub.nc = self
sid = nil
synchronize do
sid = (@ssid += 1)
sub.sid = sid
@subs[sid] = sub
end
send_command("SUB #{inbox} #{sid}#{CR_LF}")
@flush_queue << :sub
unsubscribe(sub, 1)
sub.synchronize do
# Publish the request and then wait for the response...
publish(subject, payload, inbox)
MonotonicTime::with_nats_timeout(timeout) do
future.wait(timeout)
end
end
response = sub.response
if response and response.header
status = response.header[STATUS_HDR]
raise NATS::IO::NoRespondersError if status == "503"
end
response
end
# Send a ping and wait for a pong back within a timeout.
def flush(timeout=60)
# Schedule sending a PING, and block until we receive PONG back,
# or raise a timeout in case the response is past the deadline.
pong = @pongs.new_cond
@pongs.synchronize do
@pongs << pong
# Flush once pong future has been prepared
@pending_queue << PING_REQUEST
@flush_queue << :ping
MonotonicTime::with_nats_timeout(timeout) do
pong.wait(timeout)
end
end
end
alias :servers :server_pool
# discovered_servers returns the NATS Servers that have been discovered
# via INFO protocol updates.
def discovered_servers
servers.select {|s| s[:discovered] }
end
# Close connection to NATS, flushing in case connection is alive
# and there are any pending messages, should not be used while
# holding the lock.
def close
close_connection(CLOSED, true)
end
# new_inbox returns a unique inbox used for subscriptions.
# @return [String]
def new_inbox
"_INBOX.#{@nuid.next}"
end
def connected_server
connected? ? @uri : nil
end
def connected?
@status == CONNECTED
end
def connecting?
@status == CONNECTING
end
def reconnecting?
@status == RECONNECTING
end
def closed?
@status == CLOSED
end
def on_error(&callback)
@err_cb = callback
end
def on_disconnect(&callback)
@disconnect_cb = callback
end
def on_reconnect(&callback)
@reconnect_cb = callback
end
def on_close(&callback)
@close_cb = callback
end
def last_error
synchronize do
@last_err
end
end
# Create a JetStream context.
def jetstream(opts={})
::NATS::JetStream.new(self, opts)
end
alias_method :JetStream, :jetstream
alias_method :jsm, :jetstream
private
def process_info(line)
parsed_info = JSON.parse(line)
# INFO can be received asynchronously too,
# so has to be done under the lock.
synchronize do
# Symbolize keys from parsed info line
@server_info = parsed_info.reduce({}) do |info, (k,v)|
info[k.to_sym] = v
info
end
# Detect any announced server that we might not be aware of...
connect_urls = @server_info[:connect_urls]
if connect_urls
srvs = []
connect_urls.each do |url|
scheme = client_using_secure_connection? ? "tls" : "nats"
u = URI.parse("#{scheme}://#{url}")
# Skip in case it is the current server which we already know
next if @uri.host == u.host && @uri.port == u.port
present = server_pool.detect do |srv|
srv[:uri].host == u.host && srv[:uri].port == u.port
end
if not present
# Let explicit user and pass options set the credentials.
u.user = options[:user] if options[:user]
u.password = options[:pass] if options[:pass]
# Use creds from the current server if not set explicitly.
if @uri
u.user ||= @uri.user if @uri.user
u.password ||= @uri.password if @uri.password
end
# NOTE: Auto discovery won't work here when TLS host verification is enabled.
srv = { :uri => u, :reconnect_attempts => 0, :discovered => true, :hostname => u.host }
srvs << srv
end
end
srvs.shuffle! unless @options[:dont_randomize_servers]
# Include in server pool but keep current one as the first one.
server_pool.push(*srvs)
end
end
@server_info
end
def process_hdr(header)
hdr = nil
if header
hdr = {}
lines = header.lines
# Check if it is an inline status and description.
if lines.count <= 2
status_hdr = lines.first.rstrip
hdr[STATUS_HDR] = status_hdr.slice(NATS_HDR_LINE_SIZE-1, STATUS_MSG_LEN)
if NATS_HDR_LINE_SIZE+2 < status_hdr.bytesize
desc = status_hdr.slice(NATS_HDR_LINE_SIZE+STATUS_MSG_LEN, status_hdr.bytesize)
hdr[DESC_HDR] = desc unless desc.empty?
end
end
begin
lines.slice(1, header.size).each do |line|
line.rstrip!
next if line.empty?
key, value = line.strip.split(/\s*:\s*/, 2)
hdr[key] = value
end
rescue => e
err = e
end
end
hdr
end
# Methods only used by the parser
def process_pong
# Take first pong wait and signal any flush in case there was one
@pongs.synchronize do
pong = @pongs.pop
pong.signal unless pong.nil?
end
@pings_outstanding -= 1
@pongs_received += 1
end
# Received a ping so respond back with a pong
def process_ping
@pending_queue << PONG_RESPONSE
@flush_queue << :ping
pong = @pongs.new_cond
@pongs.synchronize { @pongs << pong }
end
# Handles protocol errors being sent by the server.
def process_err(err)
# In case of permissions violation then dispatch the error callback
# while holding the lock.
e = synchronize do
current = server_pool.first
case
when err =~ /'Stale Connection'/
@last_err = NATS::IO::StaleConnectionError.new(err)
when current && current[:auth_required]
# We cannot recover from auth errors so mark it to avoid
# retrying to unecessarily next time.
current[:error_received] = true
@last_err = NATS::IO::AuthError.new(err)
else
@last_err = NATS::IO::ServerError.new(err)
end
end
process_op_error(e)
end
def process_msg(subject, sid, reply, data, header)
@stats[:in_msgs] += 1
@stats[:in_bytes] += data.size
# Throw away in case we no longer manage the subscription
sub = nil
synchronize { sub = @subs[sid] }
return unless sub
err = nil
sub.synchronize do
sub.received += 1
# Check for auto_unsubscribe
if sub.max
case
when sub.received > sub.max
# Client side support in case server did not receive unsubscribe
unsubscribe(sid)
return
when sub.received == sub.max
# Cleanup here if we have hit the max..
synchronize { @subs.delete(sid) }
end
end
# In case of a request which requires a future
# do so here already while holding the lock and return
if sub.future
future = sub.future
hdr = process_hdr(header)
sub.response = Msg.new(subject: subject, reply: reply, data: data, header: hdr, nc: self, sub: sub)
future.signal
return
elsif sub.pending_queue
# Async subscribers use a sized queue for processing
# and should be able to consume messages in parallel.
if sub.pending_queue.size >= sub.pending_msgs_limit \
or sub.pending_size >= sub.pending_bytes_limit then
err = NATS::IO::SlowConsumer.new("nats: slow consumer, messages dropped")
else
hdr = process_hdr(header)
# Only dispatch message when sure that it would not block
# the main read loop from the parser.
msg = Msg.new(subject: subject, reply: reply, data: data, header: hdr, nc: self, sub: sub)
sub.pending_queue << msg
# For sync subscribers, signal that there is a new message.
sub.wait_for_msgs_cond.signal if sub.wait_for_msgs_cond
sub.pending_size += data.size
end
end
end
synchronize do
@last_err = err
err_cb_call(self, err, sub) if @err_cb
end if err
end
def select_next_server
raise NATS::IO::NoServersError.new("nats: No servers available") if server_pool.empty?
# Pick next from head of the list
srv = server_pool.shift
# Track connection attempts to this server
srv[:reconnect_attempts] ||= 0
srv[:reconnect_attempts] += 1
# Back off in case we are reconnecting to it and have been connected
sleep @options[:reconnect_time_wait] if should_delay_connect?(srv)
# Set url of the server to which we would be connected
@uri = srv[:uri]
@uri.user = @options[:user] if @options[:user]
@uri.password = @options[:pass] if @options[:pass]
srv
end
def server_using_secure_connection?
@server_info[:ssl_required] || @server_info[:tls_required]
end
def client_using_secure_connection?
@uri.scheme == "tls" || @tls
end
def single_url_connect_used?
@single_url_connect_used
end
def send_command(command)
@pending_size += command.bytesize
@pending_queue << command
# TODO: kick flusher here in case pending_size growing large
end
# Auto unsubscribes the server by sending UNSUB command and throws away
# subscription in case already present and has received enough messages.
def unsubscribe(sub, opt_max=nil)
sid = nil
closed = nil
sub.synchronize do
sid = sub.sid
closed = sub.closed
end
raise NATS::IO::BadSubscription.new("nats: invalid subscription") if closed
opt_max_str = " #{opt_max}" unless opt_max.nil?
send_command("UNSUB #{sid}#{opt_max_str}#{CR_LF}")
@flush_queue << :unsub
synchronize { sub = @subs[sid] }
return unless sub
synchronize do
sub.max = opt_max
@subs.delete(sid) unless (sub.max && (sub.received < sub.max))
# Stop messages delivery thread for async subscribers
if sub.wait_for_msgs_t && sub.wait_for_msgs_t.alive?
sub.wait_for_msgs_t.exit
sub.pending_queue.clear
end
end
sub.synchronize do
sub.closed = true
end
end
def send_flush_queue(s)
@flush_queue << s
end
def delete_sid(sid)
@subs.delete(sid)
end
def err_cb_call(nc, e, sub)
return unless @err_cb
cb = @err_cb
case cb.arity
when 0 then cb.call
when 1 then cb.call(e)
when 2 then cb.call(e, sub)
else cb.call(nc, e, sub)
end
end
def auth_connection?
!@uri.user.nil?
end
def connect_command
cs = {
:verbose => @options[:verbose],
:pedantic => @options[:pedantic],
:lang => NATS::IO::LANG,
:version => NATS::IO::VERSION,
:protocol => NATS::IO::PROTOCOL
}
cs[:name] = @options[:name] if @options[:name]
case
when auth_connection?
if @uri.password
cs[:user] = @uri.user
cs[:pass] = @uri.password
else
cs[:auth_token] = @uri.user
end
when @user_credentials
nonce = @server_info[:nonce]
cs[:jwt] = @user_jwt_cb.call
cs[:sig] = @signature_cb.call(nonce)
when @nkeys_seed
nonce = @server_info[:nonce]
cs[:nkey] = @user_nkey_cb.call
cs[:sig] = @signature_cb.call(nonce)
end
if @server_info[:headers]
cs[:headers] = @server_info[:headers]
cs[:no_responders] = if @options[:no_responders] == false
@options[:no_responders]
else
@server_info[:headers]
end
end
"CONNECT #{cs.to_json}#{CR_LF}"
end
# Handles errors from reading, parsing the protocol or stale connection.
# the lock should not be held entering this function.
def process_op_error(e)
should_bail = synchronize do
connecting? || closed? || reconnecting?
end
return if should_bail
synchronize do
@last_err = e
err_cb_call(self, e, nil) if @err_cb
# If we were connected and configured to reconnect,
# then trigger disconnect and start reconnection logic
if connected? and should_reconnect?
@status = RECONNECTING
@io.close if @io
@io = nil
# TODO: Reconnecting pending buffer?
# Do reconnect under a different thread than the one
# in which we got the error.
Thread.new do
begin
# Abort currently running reads in case they're around
# FIXME: There might be more graceful way here...
@read_loop_thread.exit if @read_loop_thread.alive?
@flusher_thread.exit if @flusher_thread.alive?
@ping_interval_thread.exit if @ping_interval_thread.alive?
attempt_reconnect
rescue NATS::IO::NoServersError => e
@last_err = e
close
end
end
Thread.exit
return
end
# Otherwise, stop trying to reconnect and close the connection
@status = DISCONNECTED
end
# Otherwise close the connection to NATS
close
end
# Gathers data from the socket and sends it to the parser.
def read_loop
loop do
begin
should_bail = synchronize do
# FIXME: In case of reconnect as well?
@status == CLOSED or @status == RECONNECTING
end
if !@io or @io.closed? or should_bail
return
end
# TODO: Remove timeout and just wait to be ready
data = @io.read(NATS::IO::MAX_SOCKET_READ_BYTES)
@parser.parse(data) if data
rescue Errno::ETIMEDOUT
# FIXME: We do not really need a timeout here...
retry
rescue => e
# In case of reading/parser errors, trigger
# reconnection logic in case desired.
process_op_error(e)
end
end
end
# Waits for client to notify the flusher that it will be
# it is sending a command.
def flusher_loop
loop do
# Blocks waiting for the flusher to be kicked...
@flush_queue.pop
should_bail = synchronize do
@status != CONNECTED || @status == CONNECTING
end
return if should_bail
# Skip in case nothing remains pending already.
next if @pending_queue.empty?
# FIXME: should limit how many commands to take at once
# since producers could be adding as many as possible
# until reaching the max pending queue size.
cmds = []
cmds << @pending_queue.pop until @pending_queue.empty?
begin
@io.write(cmds.join) unless cmds.empty?
rescue => e
synchronize do
@last_err = e
err_cb_call(self, e, nil) if @err_cb
end
process_op_error(e)
return
end if @io
synchronize do
@pending_size = 0
end
end
end
def ping_interval_loop
loop do
sleep @options[:ping_interval]
# Skip ping interval until connected
next if !connected?
if @pings_outstanding >= @options[:max_outstanding_pings]
process_op_error(NATS::IO::StaleConnectionError.new("nats: stale connection"))
return
end
@pings_outstanding += 1
send_command(PING_REQUEST)
@flush_queue << :ping
end
rescue => e
process_op_error(e)
end
def process_connect_init
line = @io.read_line(options[:connect_timeout])
if !line or line.empty?
raise NATS::IO::ConnectError.new("nats: protocol exception, INFO not received")
end
if match = line.match(NATS::Protocol::INFO)
info_json = match.captures.first
process_info(info_json)
else
raise NATS::IO::ConnectError.new("nats: protocol exception, INFO not valid")
end
case
when (server_using_secure_connection? and client_using_secure_connection?)
tls_context = nil
if @tls
# Allow prepared context and customizations via :tls opts
tls_context = @tls[:context] if @tls[:context]
else
# Defaults
tls_context = OpenSSL::SSL::SSLContext.new
# Use the default verification options from Ruby:
# https://github.com/ruby/ruby/blob/96db72ce38b27799dd8e80ca00696e41234db6ba/ext/openssl/lib/openssl/ssl.rb#L19-L29
#
# Insecure TLS versions not supported already:
# https://github.com/ruby/openssl/commit/3e5a009966bd7f806f7180d82cf830a04be28986
#
tls_context.set_params
end
# Setup TLS connection by rewrapping the socket
tls_socket = OpenSSL::SSL::SSLSocket.new(@io.socket, tls_context)
# Close TCP socket after closing TLS socket as well.
tls_socket.sync_close = true
# Required to enable hostname verification if Ruby runtime supports it (>= 2.4):
# https://github.com/ruby/openssl/commit/028e495734e9e6aa5dba1a2e130b08f66cf31a21
tls_socket.hostname = @hostname
tls_socket.connect
@io.socket = tls_socket
when (server_using_secure_connection? and !client_using_secure_connection?)
raise NATS::IO::ConnectError.new('TLS/SSL required by server')
when (client_using_secure_connection? and !server_using_secure_connection?)
raise NATS::IO::ConnectError.new('TLS/SSL not supported by server')
else
# Otherwise, use a regular connection.
end
# Send connect and process synchronously. If using TLS,
# it should have handled upgrading at this point.
@io.write(connect_command)
# Send ping/pong after connect
@io.write(PING_REQUEST)
next_op = @io.read_line(options[:connect_timeout])
if @options[:verbose]
# Need to get another command here if verbose
raise NATS::IO::ConnectError.new("expected to receive +OK") unless next_op =~ NATS::Protocol::OK
next_op = @io.read_line(options[:connect_timeout])
end
case next_op
when NATS::Protocol::PONG
when NATS::Protocol::ERR
if @server_info[:auth_required]
raise NATS::IO::AuthError.new($1)
else
raise NATS::IO::ServerError.new($1)
end
else
raise NATS::IO::ConnectError.new("expected PONG, got #{next_op}")
end
end
# Reconnect logic, this is done while holding the lock.
def attempt_reconnect
@disconnect_cb.call(@last_err) if @disconnect_cb
# Clear sticky error
@last_err = nil
# Do reconnect
srv = nil
begin
srv = select_next_server
# Establish TCP connection with new server
@io = create_socket
@io.connect
@stats[:reconnects] += 1
# Set hostname to use for TLS hostname verification
if client_using_secure_connection? and single_url_connect_used?
# Reuse original hostname name in case of using TLS.
@hostname ||= srv[:hostname]
else
@hostname = srv[:hostname]
end
# Established TCP connection successfully so can start connect
process_connect_init
# Reset reconnection attempts if connection is valid
srv[:reconnect_attempts] = 0
srv[:auth_required] ||= true if @server_info[:auth_required]
# Add back to rotation since successfully connected
server_pool << srv
rescue NATS::IO::NoServersError => e
raise e
rescue => e
# In case there was an error from the server check
# to see whether need to take it out from rotation
srv[:auth_required] ||= true if @server_info[:auth_required]
server_pool << srv if can_reuse_server?(srv)
@last_err = e
# Trigger async error handler
err_cb_call(self, e, nil) if @err_cb
# Continue retrying until there are no options left in the server pool
retry
end
# Clear pending flush calls and reset state before restarting loops
@flush_queue.clear
@pings_outstanding = 0
@pongs_received = 0
# Replay all subscriptions
@subs.each_pair do |sid, sub|
@io.write("SUB #{sub.subject} #{sub.queue} #{sid}#{CR_LF}")
end
# Flush anything which was left pending, in case of errors during flush
# then we should raise error then retry the reconnect logic
cmds = []
cmds << @pending_queue.pop until @pending_queue.empty?
@io.write(cmds.join) unless cmds.empty?
@status = CONNECTED
@pending_size = 0
# Reset parser state here to avoid unknown protocol errors
# on reconnect...
@parser.reset!
# Now connected to NATS, and we can restart parser loop, flusher
# and ping interval
start_threads!
# Dispatch the reconnected callback while holding lock
# which we should have already
@reconnect_cb.call if @reconnect_cb
end
def close_connection(conn_status, do_cbs=true)
synchronize do
if @status == CLOSED
@status = conn_status
return
end
end
# Kick the flusher so it bails due to closed state
@flush_queue << :fallout if @flush_queue
Thread.pass
# FIXME: More graceful way of handling the following?
# Ensure ping interval and flusher are not running anymore
if @ping_interval_thread and @ping_interval_thread.alive?
@ping_interval_thread.exit
end
if @flusher_thread and @flusher_thread.alive?
@flusher_thread.exit
end
if @read_loop_thread and @read_loop_thread.alive?
@read_loop_thread.exit
end
# TODO: Delete any other state which we are not using here too.
synchronize do
@pongs.synchronize do
@pongs.each do |pong|
pong.signal
end
@pongs.clear
end
# Try to write any pending flushes in case
# we have a connection then close it.
should_flush = (@pending_queue && @io && @io.socket && !@io.closed?)
begin
cmds = []
cmds << @pending_queue.pop until @pending_queue.empty?
# FIXME: Fails when empty on TLS connection?
@io.write(cmds.join) unless cmds.empty?
rescue => e
@last_err = e
err_cb_call(self, e, nil) if @err_cb
end if should_flush
# Destroy any remaining subscriptions.
@subs.each do |_, sub|
if sub.wait_for_msgs_t && sub.wait_for_msgs_t.alive?
sub.wait_for_msgs_t.exit
sub.pending_queue.clear
end
end
@subs.clear
if do_cbs
@disconnect_cb.call(@last_err) if @disconnect_cb
@close_cb.call if @close_cb
end
@status = conn_status
# Close the established connection in case
# we still have it.
if @io
@io.close if @io.socket
@io = nil
end
end
end
def start_threads!
# Reading loop for gathering data
@read_loop_thread = Thread.new { read_loop }
@read_loop_thread.abort_on_exception = true
# Flusher loop for sending commands
@flusher_thread = Thread.new { flusher_loop }
@flusher_thread.abort_on_exception = true
# Ping interval handling for keeping alive the connection
@ping_interval_thread = Thread.new { ping_interval_loop }
@ping_interval_thread.abort_on_exception = true
end
# Prepares requests subscription that handles the responses
# for the new style request response.
def start_resp_mux_sub!
@resp_sub_prefix = "_INBOX.#{@nuid.next}"
@resp_map = Hash.new { |h,k| h[k] = { }}
@resp_sub = Subscription.new
@resp_sub.subject = "#{@resp_sub_prefix}.*"
@resp_sub.received = 0
@resp_sub.nc = self
# FIXME: Allow setting pending limits for responses mux subscription.
@resp_sub.pending_msgs_limit = NATS::IO::DEFAULT_SUB_PENDING_MSGS_LIMIT
@resp_sub.pending_bytes_limit = NATS::IO::DEFAULT_SUB_PENDING_BYTES_LIMIT
@resp_sub.pending_queue = SizedQueue.new(@resp_sub.pending_msgs_limit)
@resp_sub.wait_for_msgs_t = Thread.new do
loop do
msg = @resp_sub.pending_queue.pop
@resp_sub.pending_size -= msg.data.size
# Pick the token and signal the request under the mutex
# from the subscription itself.
token = msg.subject.split('.').last
future = nil
synchronize do
future = @resp_map[token][:future]
@resp_map[token][:response] = msg
end
# Signal back that the response has arrived
# in case the future has not been yet delete.
@resp_sub.synchronize do
future.signal if future
end
end
end
sid = (@ssid += 1)
@subs[sid] = @resp_sub
send_command("SUB #{@resp_sub.subject} #{sid}#{CR_LF}")
@flush_queue << :sub
end
def can_reuse_server?(server)
return false if server.nil?
# We can always reuse servers with infinite reconnects settings
return true if @options[:max_reconnect_attempts] < 0
# In case of hard errors like authorization errors, drop the server
# already since won't be able to connect.
return false if server[:error_received]
# We will retry a number of times to reconnect to a server.
return server[:reconnect_attempts] <= @options[:max_reconnect_attempts]
end
def should_delay_connect?(server)
server[:was_connected] && server[:reconnect_attempts] >= 0
end
def should_not_reconnect?
!@options[:reconnect]
end
def should_reconnect?
@options[:reconnect]
end
def create_socket
NATS::IO::Socket.new({
uri: @uri,
connect_timeout: NATS::IO::DEFAULT_CONNECT_TIMEOUT
})
end
def setup_nkeys_connect
begin
require 'nkeys'
require 'base64'
rescue LoadError
raise(Error, "nkeys is not installed")
end
case
when @nkeys_seed
@user_nkey_cb = proc {
seed = File.read(@nkeys_seed).chomp
kp = NKEYS::from_seed(seed)
# Take a copy since original will be gone with the wipe.
pub_key = kp.public_key.dup
kp.wipe!
pub_key
}
@signature_cb = proc { |nonce|
seed = File.read(@nkeys_seed).chomp
kp = NKEYS::from_seed(seed)
raw_signed = kp.sign(nonce)
kp.wipe!
encoded = Base64.urlsafe_encode64(raw_signed)
encoded.gsub('=', '')
}
when @user_credentials
# When the credentials are within a single decorated file.
@user_jwt_cb = proc {
jwt_start = "BEGIN NATS USER JWT".freeze
found = false
jwt = nil
File.readlines(@user_credentials).each do |line|
case
when found
jwt = line.chomp
break
when line.include?(jwt_start)
found = true
end
end
raise(Error, "No JWT found in #{@user_credentials}") if not found
jwt
}
@signature_cb = proc { |nonce|
seed_start = "BEGIN USER NKEY SEED".freeze
found = false
seed = nil
File.readlines(@user_credentials).each do |line|
case
when found
seed = line.chomp
break
when line.include?(seed_start)
found = true
end
end
raise(Error, "No nkey user seed found in #{@user_credentials}") if not found
kp = NKEYS::from_seed(seed)
raw_signed = kp.sign(nonce)
# seed is a reference so also cleared when doing wipe,
# which can be done since Ruby strings are mutable.
kp.wipe
encoded = Base64.urlsafe_encode64(raw_signed)
# Remove padding
encoded.gsub('=', '')
}
end
end
def process_uri(uris)
connect_uris = []
uris.split(',').each do |uri|
opts = {}
# Scheme
if uri.include?("://")
scheme, uri = uri.split("://")
opts[:scheme] = scheme
else
opts[:scheme] = 'nats'
end
# UserInfo
if uri.include?("@")
userinfo, endpoint = uri.split("@")
host, port = endpoint.split(":")
opts[:userinfo] = userinfo
else
host, port = uri.split(":")
end
# Host and Port
opts[:host] = host || "localhost"
opts[:port] = port || DEFAULT_PORT
connect_uris << URI::Generic.build(opts)
end
connect_uris
end
end
module IO
include Status
# Client creates a connection to the NATS Server.
Client = ::NATS::Client
MAX_RECONNECT_ATTEMPTS = 10
RECONNECT_TIME_WAIT = 2
# Maximum accumulated pending commands bytesize before forcing a flush.
MAX_PENDING_SIZE = 32768
# Maximum number of flush kicks that can be queued up before we block.
MAX_FLUSH_KICK_SIZE = 1024
# Maximum number of bytes which we will be gathering on a single read.
# TODO: Make dynamic?
MAX_SOCKET_READ_BYTES = 32768
# Ping intervals
DEFAULT_PING_INTERVAL = 120
DEFAULT_PING_MAX = 2
# Default IO timeouts
DEFAULT_CONNECT_TIMEOUT = 2
DEFAULT_READ_WRITE_TIMEOUT = 2
# Default Pending Limits
DEFAULT_SUB_PENDING_MSGS_LIMIT = 65536
DEFAULT_SUB_PENDING_BYTES_LIMIT = 65536 * 1024
# Implementation adapted from https://github.com/redis/redis-rb
class Socket
attr_accessor :socket
def initialize(options={})
@uri = options[:uri]
@connect_timeout = options[:connect_timeout]
@write_timeout = options[:write_timeout]
@read_timeout = options[:read_timeout]
@socket = nil
end
def connect
addrinfo = ::Socket.getaddrinfo(@uri.host, nil, ::Socket::AF_UNSPEC, ::Socket::SOCK_STREAM)
addrinfo.each_with_index do |ai, i|
begin
@socket = connect_addrinfo(ai, @uri.port, @connect_timeout)
break
rescue SystemCallError => e
# Give up if no more available
raise e if addrinfo.length == i+1
end
end
# Set TCP no delay by default
@socket.setsockopt(::Socket::IPPROTO_TCP, ::Socket::TCP_NODELAY, 1)
end
def read_line(deadline=nil)
# FIXME: Should accumulate and read in a non blocking way instead
unless ::IO.select([@socket], nil, nil, deadline)
raise NATS::IO::SocketTimeoutError
end
@socket.gets
end
def read(max_bytes, deadline=nil)
begin
return @socket.read_nonblock(max_bytes)
rescue ::IO::WaitReadable
if ::IO.select([@socket], nil, nil, deadline)
retry
else
raise NATS::IO::SocketTimeoutError
end
rescue ::IO::WaitWritable
if ::IO.select(nil, [@socket], nil, deadline)
retry
else
raise NATS::IO::SocketTimeoutError
end
end
rescue EOFError => e
if RUBY_ENGINE == 'jruby' and e.message == 'No message available'
# FIXME: <EOFError: No message available> can happen in jruby
# even though seems it is temporary and eventually possible
# to read from socket.
return nil
end
raise Errno::ECONNRESET
end
def write(data, deadline=nil)
length = data.bytesize
total_written = 0
loop do
begin
written = @socket.write_nonblock(data)
total_written += written
break total_written if total_written >= length
data = data.byteslice(written..-1)
rescue ::IO::WaitWritable
if ::IO.select(nil, [@socket], nil, deadline)
retry
else
raise NATS::IO::SocketTimeoutError
end
rescue ::IO::WaitReadable
if ::IO.select([@socket], nil, nil, deadline)
retry
else
raise NATS::IO::SocketTimeoutError
end
end
end
rescue EOFError
raise Errno::ECONNRESET
end
def close
@socket.close
end
def closed?
@socket.closed?
end
private
def connect_addrinfo(ai, port, timeout)
sock = ::Socket.new(::Socket.const_get(ai[0]), ::Socket::SOCK_STREAM, 0)
sockaddr = ::Socket.pack_sockaddr_in(port, ai[3])
begin
sock.connect_nonblock(sockaddr)
rescue Errno::EINPROGRESS, Errno::EALREADY, ::IO::WaitWritable
unless ::IO.select(nil, [sock], nil, @connect_timeout)
raise NATS::IO::SocketTimeoutError
end
# Confirm that connection was established
begin
sock.connect_nonblock(sockaddr)
rescue Errno::EISCONN
# Connection was established without issues.
end
end
sock
end
end
end
class MonotonicTime
# Implementation of MonotonicTime adapted from
# https://github.com/ruby-concurrency/concurrent-ruby/
class << self
case
when defined?(Process::CLOCK_MONOTONIC)
def now
Process.clock_gettime(Process::CLOCK_MONOTONIC)
end
when RUBY_ENGINE == 'jruby'
def now
java.lang.System.nanoTime() / 1_000_000_000.0
end
else
def now
# Fallback to regular time behavior
::Time.now.to_f
end
end
def with_nats_timeout(timeout)
start_time = now
yield
end_time = now
duration = end_time - start_time
if duration > timeout
raise NATS::Timeout.new("nats: timeout")
end
end
def since(t0)
now - t0
end
end
end
end
Update nc.jetstream doc
Signed-off-by: Waldemar Quevedo <2d029dbd3aa3d2941d38f053a387cc1ce6eef7a1@synadia.com>
# Copyright 2016-2021 The NATS Authors
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative 'parser'
require_relative 'version'
require_relative 'errors'
require_relative 'msg'
require_relative 'subscription'
require_relative 'js'
require 'nats/nuid'
require 'thread'
require 'socket'
require 'json'
require 'monitor'
require 'uri'
require 'securerandom'
begin
require "openssl"
rescue LoadError
end
module NATS
class << self
# NATS.connect creates a connection to the NATS Server.
# @param uri [String] URL endpoint of the NATS Server or cluster.
# @param opts [Hash] Options to customize the NATS connection.
# @return [NATS::Client]
#
# @example
# require 'nats'
# nc = NATS.connect("demo.nats.io")
# nc.publish("hello", "world")
# nc.close
#
def connect(uri=nil, opts={})
nc = NATS::Client.new
nc.connect(uri, opts)
nc
end
end
# Status represents the different states from a NATS connection.
# A client starts from the DISCONNECTED state to CONNECTING during
# the initial connect, then CONNECTED. If the connection is reset
# then it goes from DISCONNECTED to RECONNECTING until it is back to
# the CONNECTED state. In case the client gives up reconnecting or
# the connection is manually closed then it will reach the CLOSED
# connection state after which it will not reconnect again.
module Status
# When the client is not actively connected.
DISCONNECTED = 0
# When the client is connected.
CONNECTED = 1
# When the client will no longer attempt to connect to a NATS Server.
CLOSED = 2
# When the client has disconnected and is attempting to reconnect.
RECONNECTING = 3
# When the client is attempting to connect to a NATS Server for the first time.
CONNECTING = 4
end
# Client creates a connection to the NATS Server.
class Client
include MonitorMixin
include Status
attr_reader :status, :server_info, :server_pool, :options, :connected_server, :stats, :uri
DEFAULT_PORT = 4222
DEFAULT_URI = ("nats://localhost:#{DEFAULT_PORT}".freeze)
CR_LF = ("\r\n".freeze)
CR_LF_SIZE = (CR_LF.bytesize)
PING_REQUEST = ("PING#{CR_LF}".freeze)
PONG_RESPONSE = ("PONG#{CR_LF}".freeze)
NATS_HDR_LINE = ("NATS/1.0#{CR_LF}".freeze)
STATUS_MSG_LEN = 3
STATUS_HDR = ("Status".freeze)
DESC_HDR = ("Description".freeze)
NATS_HDR_LINE_SIZE = (NATS_HDR_LINE.bytesize)
SUB_OP = ('SUB'.freeze)
EMPTY_MSG = (''.freeze)
def initialize
super # required to initialize monitor
@options = nil
# Read/Write IO
@io = nil
# Queues for coalescing writes of commands we need to send to server.
@flush_queue = nil
@pending_queue = nil
# Parser with state
@parser = NATS::Protocol::Parser.new(self)
# Threads for both reading and flushing command
@flusher_thread = nil
@read_loop_thread = nil
@ping_interval_thread = nil
# Info that we get from the server
@server_info = { }
# URI from server to which we are currently connected
@uri = nil
@server_pool = []
@status = DISCONNECTED
# Subscriptions
@subs = { }
@ssid = 0
# Ping interval
@pings_outstanding = 0
@pongs_received = 0
@pongs = []
@pongs.extend(MonitorMixin)
# Accounting
@pending_size = 0
@stats = {
in_msgs: 0,
out_msgs: 0,
in_bytes: 0,
out_bytes: 0,
reconnects: 0
}
# Sticky error
@last_err = nil
# Async callbacks, no ops by default.
@err_cb = proc { }
@close_cb = proc { }
@disconnect_cb = proc { }
@reconnect_cb = proc { }
# Secure TLS options
@tls = nil
# Hostname of current server; used for when TLS host
# verification is enabled.
@hostname = nil
@single_url_connect_used = false
# Track whether connect has been already been called.
@connect_called = false
# New style request/response implementation.
@resp_sub = nil
@resp_map = nil
@resp_sub_prefix = nil
@nuid = NATS::NUID.new
# NKEYS
@user_credentials = nil
@nkeys_seed = nil
@user_nkey_cb = nil
@user_jwt_cb = nil
@signature_cb = nil
end
# Establishes a connection to NATS.
def connect(uri=nil, opts={})
synchronize do
# In case it has been connected already, then do not need to call this again.
return if @connect_called
@connect_called = true
end
# Convert URI to string if needed.
uri = uri.to_s if uri.is_a?(URI)
case uri
when String
# Initialize TLS defaults in case any url is using it.
srvs = opts[:servers] = process_uri(uri)
if srvs.any? {|u| u.scheme == 'tls'} and !opts[:tls]
tls_context = OpenSSL::SSL::SSLContext.new
tls_context.set_params
opts[:tls] = {
context: tls_context
}
end
@single_url_connect_used = true if srvs.size == 1
when Hash
opts = uri
end
opts[:verbose] = false if opts[:verbose].nil?
opts[:pedantic] = false if opts[:pedantic].nil?
opts[:reconnect] = true if opts[:reconnect].nil?
opts[:old_style_request] = false if opts[:old_style_request].nil?
opts[:reconnect_time_wait] = NATS::IO::RECONNECT_TIME_WAIT if opts[:reconnect_time_wait].nil?
opts[:max_reconnect_attempts] = NATS::IO::MAX_RECONNECT_ATTEMPTS if opts[:max_reconnect_attempts].nil?
opts[:ping_interval] = NATS::IO::DEFAULT_PING_INTERVAL if opts[:ping_interval].nil?
opts[:max_outstanding_pings] = NATS::IO::DEFAULT_PING_MAX if opts[:max_outstanding_pings].nil?
# Override with ENV
opts[:verbose] = ENV['NATS_VERBOSE'].downcase == 'true' unless ENV['NATS_VERBOSE'].nil?
opts[:pedantic] = ENV['NATS_PEDANTIC'].downcase == 'true' unless ENV['NATS_PEDANTIC'].nil?
opts[:reconnect] = ENV['NATS_RECONNECT'].downcase == 'true' unless ENV['NATS_RECONNECT'].nil?
opts[:reconnect_time_wait] = ENV['NATS_RECONNECT_TIME_WAIT'].to_i unless ENV['NATS_RECONNECT_TIME_WAIT'].nil?
opts[:max_reconnect_attempts] = ENV['NATS_MAX_RECONNECT_ATTEMPTS'].to_i unless ENV['NATS_MAX_RECONNECT_ATTEMPTS'].nil?
opts[:ping_interval] = ENV['NATS_PING_INTERVAL'].to_i unless ENV['NATS_PING_INTERVAL'].nil?
opts[:max_outstanding_pings] = ENV['NATS_MAX_OUTSTANDING_PINGS'].to_i unless ENV['NATS_MAX_OUTSTANDING_PINGS'].nil?
opts[:connect_timeout] ||= NATS::IO::DEFAULT_CONNECT_TIMEOUT
@options = opts
# Process servers in the NATS cluster and pick one to connect
uris = opts[:servers] || [DEFAULT_URI]
uris.shuffle! unless @options[:dont_randomize_servers]
uris.each do |u|
nats_uri = case u
when URI
u.dup
else
URI.parse(u)
end
@server_pool << {
:uri => nats_uri,
:hostname => nats_uri.host
}
end
if @options[:old_style_request]
# Replace for this instance the implementation
# of request to use the old_request style.
class << self; alias_method :request, :old_request; end
end
# NKEYS
@user_credentials ||= opts[:user_credentials]
@nkeys_seed ||= opts[:nkeys_seed]
setup_nkeys_connect if @user_credentials or @nkeys_seed
# Check for TLS usage
@tls = @options[:tls]
srv = nil
begin
srv = select_next_server
# Create TCP socket connection to NATS
@io = create_socket
@io.connect
# Capture state that we have had a TCP connection established against
# this server and could potentially be used for reconnecting.
srv[:was_connected] = true
# Connection established and now in process of sending CONNECT to NATS
@status = CONNECTING
# Use the hostname from the server for TLS hostname verification.
if client_using_secure_connection? and single_url_connect_used?
# Always reuse the original hostname used to connect.
@hostname ||= srv[:hostname]
else
@hostname = srv[:hostname]
end
# Established TCP connection successfully so can start connect
process_connect_init
# Reset reconnection attempts if connection is valid
srv[:reconnect_attempts] = 0
srv[:auth_required] ||= true if @server_info[:auth_required]
# Add back to rotation since successfully connected
server_pool << srv
rescue NATS::IO::NoServersError => e
@disconnect_cb.call(e) if @disconnect_cb
raise @last_err || e
rescue => e
# Capture sticky error
synchronize do
@last_err = e
srv[:auth_required] ||= true if @server_info[:auth_required]
server_pool << srv if can_reuse_server?(srv)
end
err_cb_call(self, e, nil) if @err_cb
if should_not_reconnect?
@disconnect_cb.call(e) if @disconnect_cb
raise e
end
# Clean up any connecting state and close connection without
# triggering the disconnection/closed callbacks.
close_connection(DISCONNECTED, false)
# always sleep here to safe guard against errors before current[:was_connected]
# is set for the first time
sleep @options[:reconnect_time_wait] if @options[:reconnect_time_wait]
# Continue retrying until there are no options left in the server pool
retry
end
# Initialize queues and loops for message dispatching and processing engine
@flush_queue = SizedQueue.new(NATS::IO::MAX_FLUSH_KICK_SIZE)
@pending_queue = SizedQueue.new(NATS::IO::MAX_PENDING_SIZE)
@pings_outstanding = 0
@pongs_received = 0
@pending_size = 0
# Server roundtrip went ok so consider to be connected at this point
@status = CONNECTED
# Connected to NATS so Ready to start parser loop, flusher and ping interval
start_threads!
self
end
def publish(subject, msg=EMPTY_MSG, opt_reply=nil, &blk)
raise NATS::IO::BadSubject if !subject or subject.empty?
msg_size = msg.bytesize
# Accounting
@stats[:out_msgs] += 1
@stats[:out_bytes] += msg_size
send_command("PUB #{subject} #{opt_reply} #{msg_size}\r\n#{msg}\r\n")
@flush_queue << :pub if @flush_queue.empty?
end
# Publishes a NATS::Msg that may include headers.
def publish_msg(msg)
raise TypeError, "nats: expected NATS::Msg, got #{msg.class.name}" unless msg.is_a?(Msg)
raise NATS::IO::BadSubject if !msg.subject or msg.subject.empty?
msg.reply ||= ''
msg.data ||= ''
msg_size = msg.data.bytesize
# Accounting
@stats[:out_msgs] += 1
@stats[:out_bytes] += msg_size
if msg.header
hdr = ''
hdr << NATS_HDR_LINE
msg.header.each do |k, v|
hdr << "#{k}: #{v}#{CR_LF}"
end
hdr << CR_LF
hdr_len = hdr.bytesize
total_size = msg_size + hdr_len
send_command("HPUB #{msg.subject} #{msg.reply} #{hdr_len} #{total_size}\r\n#{hdr}#{msg.data}\r\n")
else
send_command("PUB #{msg.subject} #{msg.reply} #{msg_size}\r\n#{msg.data}\r\n")
end
@flush_queue << :pub if @flush_queue.empty?
end
# Create subscription which is dispatched asynchronously
# messages to a callback.
def subscribe(subject, opts={}, &callback)
sid = nil
sub = nil
synchronize do
sid = (@ssid += 1)
sub = @subs[sid] = Subscription.new
sub.nc = self
sub.sid = sid
end
opts[:pending_msgs_limit] ||= NATS::IO::DEFAULT_SUB_PENDING_MSGS_LIMIT
opts[:pending_bytes_limit] ||= NATS::IO::DEFAULT_SUB_PENDING_BYTES_LIMIT
sub.subject = subject
sub.callback = callback
sub.received = 0
sub.queue = opts[:queue] if opts[:queue]
sub.max = opts[:max] if opts[:max]
sub.pending_msgs_limit = opts[:pending_msgs_limit]
sub.pending_bytes_limit = opts[:pending_bytes_limit]
sub.pending_queue = SizedQueue.new(sub.pending_msgs_limit)
send_command("SUB #{subject} #{opts[:queue]} #{sid}#{CR_LF}")
@flush_queue << :sub
# Setup server support for auto-unsubscribe when receiving enough messages
sub.unsubscribe(opts[:max]) if opts[:max]
unless callback
cond = sub.new_cond
sub.wait_for_msgs_cond = cond
end
# Async subscriptions each own a single thread for the
# delivery of messages.
# FIXME: Support shared thread pool with configurable limits
# to better support case of having a lot of subscriptions.
sub.wait_for_msgs_t = Thread.new do
loop do
msg = sub.pending_queue.pop
cb = nil
sub.synchronize do
# Decrease pending size since consumed already
sub.pending_size -= msg.data.size
cb = sub.callback
end
begin
# Note: Keep some of the alternative arity versions to slightly
# improve backwards compatibility. Eventually fine to deprecate
# since recommended version would be arity of 1 to get a NATS::Msg.
case cb.arity
when 0 then cb.call
when 1 then cb.call(msg)
when 2 then cb.call(msg.data, msg.reply)
when 3 then cb.call(msg.data, msg.reply, msg.subject)
else cb.call(msg.data, msg.reply, msg.subject, msg.header)
end
rescue => e
synchronize do
err_cb_call(self, e, sub) if @err_cb
end
end
end
end if callback
sub
end
# Sends a request using expecting a single response using a
# single subscription per connection for receiving the responses.
# It times out in case the request is not retrieved within the
# specified deadline.
# If given a callback, then the request happens asynchronously.
def request(subject, payload="", opts={}, &blk)
raise NATS::IO::BadSubject if !subject or subject.empty?
# If a block was given then fallback to method using auto unsubscribe.
return old_request(subject, payload, opts, &blk) if blk
return old_request(subject, payload, opts) if opts[:old_style]
token = nil
inbox = nil
future = nil
response = nil
timeout = opts[:timeout] ||= 0.5
synchronize do
start_resp_mux_sub! unless @resp_sub_prefix
# Create token for this request.
token = @nuid.next
inbox = "#{@resp_sub_prefix}.#{token}"
# Create the a future for the request that will
# get signaled when it receives the request.
future = @resp_sub.new_cond
@resp_map[token][:future] = future
end
# Publish request and wait for reply.
publish(subject, payload, inbox)
begin
MonotonicTime::with_nats_timeout(timeout) do
@resp_sub.synchronize do
future.wait(timeout)
end
end
rescue NATS::Timeout => e
synchronize { @resp_map.delete(token) }
raise e
end
# Check if there is a response already.
synchronize do
result = @resp_map[token]
response = result[:response]
@resp_map.delete(token)
end
if response and response.header
status = response.header[STATUS_HDR]
raise NATS::IO::NoRespondersError if status == "503"
end
response
end
# request_msg makes a NATS request using a NATS::Msg that may include headers.
def request_msg(msg, opts={})
raise TypeError, "nats: expected NATS::Msg, got #{msg.class.name}" unless msg.is_a?(Msg)
raise NATS::IO::BadSubject if !msg.subject or msg.subject.empty?
token = nil
inbox = nil
future = nil
response = nil
timeout = opts[:timeout] ||= 0.5
synchronize do
start_resp_mux_sub! unless @resp_sub_prefix
# Create token for this request.
token = @nuid.next
inbox = "#{@resp_sub_prefix}.#{token}"
# Create the a future for the request that will
# get signaled when it receives the request.
future = @resp_sub.new_cond
@resp_map[token][:future] = future
end
msg.reply = inbox
msg.data ||= ''
msg_size = msg.data.bytesize
# Publish request and wait for reply.
publish_msg(msg)
begin
MonotonicTime::with_nats_timeout(timeout) do
@resp_sub.synchronize do
future.wait(timeout)
end
end
rescue NATS::Timeout => e
synchronize { @resp_map.delete(token) }
raise e
end
# Check if there is a response already.
synchronize do
result = @resp_map[token]
response = result[:response]
@resp_map.delete(token)
end
if response and response.header
status = response.header[STATUS_HDR]
raise NATS::IO::NoRespondersError if status == "503"
end
response
end
# Sends a request creating an ephemeral subscription for the request,
# expecting a single response or raising a timeout in case the request
# is not retrieved within the specified deadline.
# If given a callback, then the request happens asynchronously.
def old_request(subject, payload, opts={}, &blk)
return unless subject
inbox = new_inbox
# If a callback was passed, then have it process
# the messages asynchronously and return the sid.
if blk
opts[:max] ||= 1
s = subscribe(inbox, opts) do |msg|
case blk.arity
when 0 then blk.call
when 1 then blk.call(msg)
when 2 then blk.call(msg.data, msg.reply)
when 3 then blk.call(msg.data, msg.reply, msg.subject)
else blk.call(msg.data, msg.reply, msg.subject, msg.header)
end
end
publish(subject, payload, inbox)
return s
end
# In case block was not given, handle synchronously
# with a timeout and only allow a single response.
timeout = opts[:timeout] ||= 0.5
opts[:max] = 1
sub = Subscription.new
sub.subject = inbox
sub.received = 0
future = sub.new_cond
sub.future = future
sub.nc = self
sid = nil
synchronize do
sid = (@ssid += 1)
sub.sid = sid
@subs[sid] = sub
end
send_command("SUB #{inbox} #{sid}#{CR_LF}")
@flush_queue << :sub
unsubscribe(sub, 1)
sub.synchronize do
# Publish the request and then wait for the response...
publish(subject, payload, inbox)
MonotonicTime::with_nats_timeout(timeout) do
future.wait(timeout)
end
end
response = sub.response
if response and response.header
status = response.header[STATUS_HDR]
raise NATS::IO::NoRespondersError if status == "503"
end
response
end
# Send a ping and wait for a pong back within a timeout.
def flush(timeout=60)
# Schedule sending a PING, and block until we receive PONG back,
# or raise a timeout in case the response is past the deadline.
pong = @pongs.new_cond
@pongs.synchronize do
@pongs << pong
# Flush once pong future has been prepared
@pending_queue << PING_REQUEST
@flush_queue << :ping
MonotonicTime::with_nats_timeout(timeout) do
pong.wait(timeout)
end
end
end
alias :servers :server_pool
# discovered_servers returns the NATS Servers that have been discovered
# via INFO protocol updates.
def discovered_servers
servers.select {|s| s[:discovered] }
end
# Close connection to NATS, flushing in case connection is alive
# and there are any pending messages, should not be used while
# holding the lock.
def close
close_connection(CLOSED, true)
end
# new_inbox returns a unique inbox used for subscriptions.
# @return [String]
def new_inbox
"_INBOX.#{@nuid.next}"
end
def connected_server
connected? ? @uri : nil
end
def connected?
@status == CONNECTED
end
def connecting?
@status == CONNECTING
end
def reconnecting?
@status == RECONNECTING
end
def closed?
@status == CLOSED
end
def on_error(&callback)
@err_cb = callback
end
def on_disconnect(&callback)
@disconnect_cb = callback
end
def on_reconnect(&callback)
@reconnect_cb = callback
end
def on_close(&callback)
@close_cb = callback
end
def last_error
synchronize do
@last_err
end
end
# Create a JetStream context.
# @param opts [Hash] Options to customize the JetStream context.
# @option params [String] :prefix JetStream API prefix to use for the requests.
# @option params [String] :domain JetStream Domain to use for the requests.
# @option params [Float] :timeout Default timeout to use for JS requests.
# @return [NATS::JetStream]
def jetstream(opts={})
::NATS::JetStream.new(self, opts)
end
alias_method :JetStream, :jetstream
alias_method :jsm, :jetstream
private
def process_info(line)
parsed_info = JSON.parse(line)
# INFO can be received asynchronously too,
# so has to be done under the lock.
synchronize do
# Symbolize keys from parsed info line
@server_info = parsed_info.reduce({}) do |info, (k,v)|
info[k.to_sym] = v
info
end
# Detect any announced server that we might not be aware of...
connect_urls = @server_info[:connect_urls]
if connect_urls
srvs = []
connect_urls.each do |url|
scheme = client_using_secure_connection? ? "tls" : "nats"
u = URI.parse("#{scheme}://#{url}")
# Skip in case it is the current server which we already know
next if @uri.host == u.host && @uri.port == u.port
present = server_pool.detect do |srv|
srv[:uri].host == u.host && srv[:uri].port == u.port
end
if not present
# Let explicit user and pass options set the credentials.
u.user = options[:user] if options[:user]
u.password = options[:pass] if options[:pass]
# Use creds from the current server if not set explicitly.
if @uri
u.user ||= @uri.user if @uri.user
u.password ||= @uri.password if @uri.password
end
# NOTE: Auto discovery won't work here when TLS host verification is enabled.
srv = { :uri => u, :reconnect_attempts => 0, :discovered => true, :hostname => u.host }
srvs << srv
end
end
srvs.shuffle! unless @options[:dont_randomize_servers]
# Include in server pool but keep current one as the first one.
server_pool.push(*srvs)
end
end
@server_info
end
def process_hdr(header)
hdr = nil
if header
hdr = {}
lines = header.lines
# Check if it is an inline status and description.
if lines.count <= 2
status_hdr = lines.first.rstrip
hdr[STATUS_HDR] = status_hdr.slice(NATS_HDR_LINE_SIZE-1, STATUS_MSG_LEN)
if NATS_HDR_LINE_SIZE+2 < status_hdr.bytesize
desc = status_hdr.slice(NATS_HDR_LINE_SIZE+STATUS_MSG_LEN, status_hdr.bytesize)
hdr[DESC_HDR] = desc unless desc.empty?
end
end
begin
lines.slice(1, header.size).each do |line|
line.rstrip!
next if line.empty?
key, value = line.strip.split(/\s*:\s*/, 2)
hdr[key] = value
end
rescue => e
err = e
end
end
hdr
end
# Methods only used by the parser
def process_pong
# Take first pong wait and signal any flush in case there was one
@pongs.synchronize do
pong = @pongs.pop
pong.signal unless pong.nil?
end
@pings_outstanding -= 1
@pongs_received += 1
end
# Received a ping so respond back with a pong
def process_ping
@pending_queue << PONG_RESPONSE
@flush_queue << :ping
pong = @pongs.new_cond
@pongs.synchronize { @pongs << pong }
end
# Handles protocol errors being sent by the server.
def process_err(err)
# In case of permissions violation then dispatch the error callback
# while holding the lock.
e = synchronize do
current = server_pool.first
case
when err =~ /'Stale Connection'/
@last_err = NATS::IO::StaleConnectionError.new(err)
when current && current[:auth_required]
# We cannot recover from auth errors so mark it to avoid
# retrying to unecessarily next time.
current[:error_received] = true
@last_err = NATS::IO::AuthError.new(err)
else
@last_err = NATS::IO::ServerError.new(err)
end
end
process_op_error(e)
end
def process_msg(subject, sid, reply, data, header)
@stats[:in_msgs] += 1
@stats[:in_bytes] += data.size
# Throw away in case we no longer manage the subscription
sub = nil
synchronize { sub = @subs[sid] }
return unless sub
err = nil
sub.synchronize do
sub.received += 1
# Check for auto_unsubscribe
if sub.max
case
when sub.received > sub.max
# Client side support in case server did not receive unsubscribe
unsubscribe(sid)
return
when sub.received == sub.max
# Cleanup here if we have hit the max..
synchronize { @subs.delete(sid) }
end
end
# In case of a request which requires a future
# do so here already while holding the lock and return
if sub.future
future = sub.future
hdr = process_hdr(header)
sub.response = Msg.new(subject: subject, reply: reply, data: data, header: hdr, nc: self, sub: sub)
future.signal
return
elsif sub.pending_queue
# Async subscribers use a sized queue for processing
# and should be able to consume messages in parallel.
if sub.pending_queue.size >= sub.pending_msgs_limit \
or sub.pending_size >= sub.pending_bytes_limit then
err = NATS::IO::SlowConsumer.new("nats: slow consumer, messages dropped")
else
hdr = process_hdr(header)
# Only dispatch message when sure that it would not block
# the main read loop from the parser.
msg = Msg.new(subject: subject, reply: reply, data: data, header: hdr, nc: self, sub: sub)
sub.pending_queue << msg
# For sync subscribers, signal that there is a new message.
sub.wait_for_msgs_cond.signal if sub.wait_for_msgs_cond
sub.pending_size += data.size
end
end
end
synchronize do
@last_err = err
err_cb_call(self, err, sub) if @err_cb
end if err
end
def select_next_server
raise NATS::IO::NoServersError.new("nats: No servers available") if server_pool.empty?
# Pick next from head of the list
srv = server_pool.shift
# Track connection attempts to this server
srv[:reconnect_attempts] ||= 0
srv[:reconnect_attempts] += 1
# Back off in case we are reconnecting to it and have been connected
sleep @options[:reconnect_time_wait] if should_delay_connect?(srv)
# Set url of the server to which we would be connected
@uri = srv[:uri]
@uri.user = @options[:user] if @options[:user]
@uri.password = @options[:pass] if @options[:pass]
srv
end
def server_using_secure_connection?
@server_info[:ssl_required] || @server_info[:tls_required]
end
def client_using_secure_connection?
@uri.scheme == "tls" || @tls
end
def single_url_connect_used?
@single_url_connect_used
end
def send_command(command)
@pending_size += command.bytesize
@pending_queue << command
# TODO: kick flusher here in case pending_size growing large
end
# Auto unsubscribes the server by sending UNSUB command and throws away
# subscription in case already present and has received enough messages.
def unsubscribe(sub, opt_max=nil)
sid = nil
closed = nil
sub.synchronize do
sid = sub.sid
closed = sub.closed
end
raise NATS::IO::BadSubscription.new("nats: invalid subscription") if closed
opt_max_str = " #{opt_max}" unless opt_max.nil?
send_command("UNSUB #{sid}#{opt_max_str}#{CR_LF}")
@flush_queue << :unsub
synchronize { sub = @subs[sid] }
return unless sub
synchronize do
sub.max = opt_max
@subs.delete(sid) unless (sub.max && (sub.received < sub.max))
# Stop messages delivery thread for async subscribers
if sub.wait_for_msgs_t && sub.wait_for_msgs_t.alive?
sub.wait_for_msgs_t.exit
sub.pending_queue.clear
end
end
sub.synchronize do
sub.closed = true
end
end
def send_flush_queue(s)
@flush_queue << s
end
def delete_sid(sid)
@subs.delete(sid)
end
def err_cb_call(nc, e, sub)
return unless @err_cb
cb = @err_cb
case cb.arity
when 0 then cb.call
when 1 then cb.call(e)
when 2 then cb.call(e, sub)
else cb.call(nc, e, sub)
end
end
def auth_connection?
!@uri.user.nil?
end
def connect_command
cs = {
:verbose => @options[:verbose],
:pedantic => @options[:pedantic],
:lang => NATS::IO::LANG,
:version => NATS::IO::VERSION,
:protocol => NATS::IO::PROTOCOL
}
cs[:name] = @options[:name] if @options[:name]
case
when auth_connection?
if @uri.password
cs[:user] = @uri.user
cs[:pass] = @uri.password
else
cs[:auth_token] = @uri.user
end
when @user_credentials
nonce = @server_info[:nonce]
cs[:jwt] = @user_jwt_cb.call
cs[:sig] = @signature_cb.call(nonce)
when @nkeys_seed
nonce = @server_info[:nonce]
cs[:nkey] = @user_nkey_cb.call
cs[:sig] = @signature_cb.call(nonce)
end
if @server_info[:headers]
cs[:headers] = @server_info[:headers]
cs[:no_responders] = if @options[:no_responders] == false
@options[:no_responders]
else
@server_info[:headers]
end
end
"CONNECT #{cs.to_json}#{CR_LF}"
end
# Handles errors from reading, parsing the protocol or stale connection.
# the lock should not be held entering this function.
def process_op_error(e)
should_bail = synchronize do
connecting? || closed? || reconnecting?
end
return if should_bail
synchronize do
@last_err = e
err_cb_call(self, e, nil) if @err_cb
# If we were connected and configured to reconnect,
# then trigger disconnect and start reconnection logic
if connected? and should_reconnect?
@status = RECONNECTING
@io.close if @io
@io = nil
# TODO: Reconnecting pending buffer?
# Do reconnect under a different thread than the one
# in which we got the error.
Thread.new do
begin
# Abort currently running reads in case they're around
# FIXME: There might be more graceful way here...
@read_loop_thread.exit if @read_loop_thread.alive?
@flusher_thread.exit if @flusher_thread.alive?
@ping_interval_thread.exit if @ping_interval_thread.alive?
attempt_reconnect
rescue NATS::IO::NoServersError => e
@last_err = e
close
end
end
Thread.exit
return
end
# Otherwise, stop trying to reconnect and close the connection
@status = DISCONNECTED
end
# Otherwise close the connection to NATS
close
end
# Gathers data from the socket and sends it to the parser.
def read_loop
loop do
begin
should_bail = synchronize do
# FIXME: In case of reconnect as well?
@status == CLOSED or @status == RECONNECTING
end
if !@io or @io.closed? or should_bail
return
end
# TODO: Remove timeout and just wait to be ready
data = @io.read(NATS::IO::MAX_SOCKET_READ_BYTES)
@parser.parse(data) if data
rescue Errno::ETIMEDOUT
# FIXME: We do not really need a timeout here...
retry
rescue => e
# In case of reading/parser errors, trigger
# reconnection logic in case desired.
process_op_error(e)
end
end
end
# Waits for client to notify the flusher that it will be
# it is sending a command.
def flusher_loop
loop do
# Blocks waiting for the flusher to be kicked...
@flush_queue.pop
should_bail = synchronize do
@status != CONNECTED || @status == CONNECTING
end
return if should_bail
# Skip in case nothing remains pending already.
next if @pending_queue.empty?
# FIXME: should limit how many commands to take at once
# since producers could be adding as many as possible
# until reaching the max pending queue size.
cmds = []
cmds << @pending_queue.pop until @pending_queue.empty?
begin
@io.write(cmds.join) unless cmds.empty?
rescue => e
synchronize do
@last_err = e
err_cb_call(self, e, nil) if @err_cb
end
process_op_error(e)
return
end if @io
synchronize do
@pending_size = 0
end
end
end
def ping_interval_loop
loop do
sleep @options[:ping_interval]
# Skip ping interval until connected
next if !connected?
if @pings_outstanding >= @options[:max_outstanding_pings]
process_op_error(NATS::IO::StaleConnectionError.new("nats: stale connection"))
return
end
@pings_outstanding += 1
send_command(PING_REQUEST)
@flush_queue << :ping
end
rescue => e
process_op_error(e)
end
def process_connect_init
line = @io.read_line(options[:connect_timeout])
if !line or line.empty?
raise NATS::IO::ConnectError.new("nats: protocol exception, INFO not received")
end
if match = line.match(NATS::Protocol::INFO)
info_json = match.captures.first
process_info(info_json)
else
raise NATS::IO::ConnectError.new("nats: protocol exception, INFO not valid")
end
case
when (server_using_secure_connection? and client_using_secure_connection?)
tls_context = nil
if @tls
# Allow prepared context and customizations via :tls opts
tls_context = @tls[:context] if @tls[:context]
else
# Defaults
tls_context = OpenSSL::SSL::SSLContext.new
# Use the default verification options from Ruby:
# https://github.com/ruby/ruby/blob/96db72ce38b27799dd8e80ca00696e41234db6ba/ext/openssl/lib/openssl/ssl.rb#L19-L29
#
# Insecure TLS versions not supported already:
# https://github.com/ruby/openssl/commit/3e5a009966bd7f806f7180d82cf830a04be28986
#
tls_context.set_params
end
# Setup TLS connection by rewrapping the socket
tls_socket = OpenSSL::SSL::SSLSocket.new(@io.socket, tls_context)
# Close TCP socket after closing TLS socket as well.
tls_socket.sync_close = true
# Required to enable hostname verification if Ruby runtime supports it (>= 2.4):
# https://github.com/ruby/openssl/commit/028e495734e9e6aa5dba1a2e130b08f66cf31a21
tls_socket.hostname = @hostname
tls_socket.connect
@io.socket = tls_socket
when (server_using_secure_connection? and !client_using_secure_connection?)
raise NATS::IO::ConnectError.new('TLS/SSL required by server')
when (client_using_secure_connection? and !server_using_secure_connection?)
raise NATS::IO::ConnectError.new('TLS/SSL not supported by server')
else
# Otherwise, use a regular connection.
end
# Send connect and process synchronously. If using TLS,
# it should have handled upgrading at this point.
@io.write(connect_command)
# Send ping/pong after connect
@io.write(PING_REQUEST)
next_op = @io.read_line(options[:connect_timeout])
if @options[:verbose]
# Need to get another command here if verbose
raise NATS::IO::ConnectError.new("expected to receive +OK") unless next_op =~ NATS::Protocol::OK
next_op = @io.read_line(options[:connect_timeout])
end
case next_op
when NATS::Protocol::PONG
when NATS::Protocol::ERR
if @server_info[:auth_required]
raise NATS::IO::AuthError.new($1)
else
raise NATS::IO::ServerError.new($1)
end
else
raise NATS::IO::ConnectError.new("expected PONG, got #{next_op}")
end
end
# Reconnect logic, this is done while holding the lock.
def attempt_reconnect
@disconnect_cb.call(@last_err) if @disconnect_cb
# Clear sticky error
@last_err = nil
# Do reconnect
srv = nil
begin
srv = select_next_server
# Establish TCP connection with new server
@io = create_socket
@io.connect
@stats[:reconnects] += 1
# Set hostname to use for TLS hostname verification
if client_using_secure_connection? and single_url_connect_used?
# Reuse original hostname name in case of using TLS.
@hostname ||= srv[:hostname]
else
@hostname = srv[:hostname]
end
# Established TCP connection successfully so can start connect
process_connect_init
# Reset reconnection attempts if connection is valid
srv[:reconnect_attempts] = 0
srv[:auth_required] ||= true if @server_info[:auth_required]
# Add back to rotation since successfully connected
server_pool << srv
rescue NATS::IO::NoServersError => e
raise e
rescue => e
# In case there was an error from the server check
# to see whether need to take it out from rotation
srv[:auth_required] ||= true if @server_info[:auth_required]
server_pool << srv if can_reuse_server?(srv)
@last_err = e
# Trigger async error handler
err_cb_call(self, e, nil) if @err_cb
# Continue retrying until there are no options left in the server pool
retry
end
# Clear pending flush calls and reset state before restarting loops
@flush_queue.clear
@pings_outstanding = 0
@pongs_received = 0
# Replay all subscriptions
@subs.each_pair do |sid, sub|
@io.write("SUB #{sub.subject} #{sub.queue} #{sid}#{CR_LF}")
end
# Flush anything which was left pending, in case of errors during flush
# then we should raise error then retry the reconnect logic
cmds = []
cmds << @pending_queue.pop until @pending_queue.empty?
@io.write(cmds.join) unless cmds.empty?
@status = CONNECTED
@pending_size = 0
# Reset parser state here to avoid unknown protocol errors
# on reconnect...
@parser.reset!
# Now connected to NATS, and we can restart parser loop, flusher
# and ping interval
start_threads!
# Dispatch the reconnected callback while holding lock
# which we should have already
@reconnect_cb.call if @reconnect_cb
end
def close_connection(conn_status, do_cbs=true)
synchronize do
if @status == CLOSED
@status = conn_status
return
end
end
# Kick the flusher so it bails due to closed state
@flush_queue << :fallout if @flush_queue
Thread.pass
# FIXME: More graceful way of handling the following?
# Ensure ping interval and flusher are not running anymore
if @ping_interval_thread and @ping_interval_thread.alive?
@ping_interval_thread.exit
end
if @flusher_thread and @flusher_thread.alive?
@flusher_thread.exit
end
if @read_loop_thread and @read_loop_thread.alive?
@read_loop_thread.exit
end
# TODO: Delete any other state which we are not using here too.
synchronize do
@pongs.synchronize do
@pongs.each do |pong|
pong.signal
end
@pongs.clear
end
# Try to write any pending flushes in case
# we have a connection then close it.
should_flush = (@pending_queue && @io && @io.socket && !@io.closed?)
begin
cmds = []
cmds << @pending_queue.pop until @pending_queue.empty?
# FIXME: Fails when empty on TLS connection?
@io.write(cmds.join) unless cmds.empty?
rescue => e
@last_err = e
err_cb_call(self, e, nil) if @err_cb
end if should_flush
# Destroy any remaining subscriptions.
@subs.each do |_, sub|
if sub.wait_for_msgs_t && sub.wait_for_msgs_t.alive?
sub.wait_for_msgs_t.exit
sub.pending_queue.clear
end
end
@subs.clear
if do_cbs
@disconnect_cb.call(@last_err) if @disconnect_cb
@close_cb.call if @close_cb
end
@status = conn_status
# Close the established connection in case
# we still have it.
if @io
@io.close if @io.socket
@io = nil
end
end
end
def start_threads!
# Reading loop for gathering data
@read_loop_thread = Thread.new { read_loop }
@read_loop_thread.abort_on_exception = true
# Flusher loop for sending commands
@flusher_thread = Thread.new { flusher_loop }
@flusher_thread.abort_on_exception = true
# Ping interval handling for keeping alive the connection
@ping_interval_thread = Thread.new { ping_interval_loop }
@ping_interval_thread.abort_on_exception = true
end
# Prepares requests subscription that handles the responses
# for the new style request response.
def start_resp_mux_sub!
@resp_sub_prefix = "_INBOX.#{@nuid.next}"
@resp_map = Hash.new { |h,k| h[k] = { }}
@resp_sub = Subscription.new
@resp_sub.subject = "#{@resp_sub_prefix}.*"
@resp_sub.received = 0
@resp_sub.nc = self
# FIXME: Allow setting pending limits for responses mux subscription.
@resp_sub.pending_msgs_limit = NATS::IO::DEFAULT_SUB_PENDING_MSGS_LIMIT
@resp_sub.pending_bytes_limit = NATS::IO::DEFAULT_SUB_PENDING_BYTES_LIMIT
@resp_sub.pending_queue = SizedQueue.new(@resp_sub.pending_msgs_limit)
@resp_sub.wait_for_msgs_t = Thread.new do
loop do
msg = @resp_sub.pending_queue.pop
@resp_sub.pending_size -= msg.data.size
# Pick the token and signal the request under the mutex
# from the subscription itself.
token = msg.subject.split('.').last
future = nil
synchronize do
future = @resp_map[token][:future]
@resp_map[token][:response] = msg
end
# Signal back that the response has arrived
# in case the future has not been yet delete.
@resp_sub.synchronize do
future.signal if future
end
end
end
sid = (@ssid += 1)
@subs[sid] = @resp_sub
send_command("SUB #{@resp_sub.subject} #{sid}#{CR_LF}")
@flush_queue << :sub
end
def can_reuse_server?(server)
return false if server.nil?
# We can always reuse servers with infinite reconnects settings
return true if @options[:max_reconnect_attempts] < 0
# In case of hard errors like authorization errors, drop the server
# already since won't be able to connect.
return false if server[:error_received]
# We will retry a number of times to reconnect to a server.
return server[:reconnect_attempts] <= @options[:max_reconnect_attempts]
end
def should_delay_connect?(server)
server[:was_connected] && server[:reconnect_attempts] >= 0
end
def should_not_reconnect?
!@options[:reconnect]
end
def should_reconnect?
@options[:reconnect]
end
def create_socket
NATS::IO::Socket.new({
uri: @uri,
connect_timeout: NATS::IO::DEFAULT_CONNECT_TIMEOUT
})
end
def setup_nkeys_connect
begin
require 'nkeys'
require 'base64'
rescue LoadError
raise(Error, "nkeys is not installed")
end
case
when @nkeys_seed
@user_nkey_cb = proc {
seed = File.read(@nkeys_seed).chomp
kp = NKEYS::from_seed(seed)
# Take a copy since original will be gone with the wipe.
pub_key = kp.public_key.dup
kp.wipe!
pub_key
}
@signature_cb = proc { |nonce|
seed = File.read(@nkeys_seed).chomp
kp = NKEYS::from_seed(seed)
raw_signed = kp.sign(nonce)
kp.wipe!
encoded = Base64.urlsafe_encode64(raw_signed)
encoded.gsub('=', '')
}
when @user_credentials
# When the credentials are within a single decorated file.
@user_jwt_cb = proc {
jwt_start = "BEGIN NATS USER JWT".freeze
found = false
jwt = nil
File.readlines(@user_credentials).each do |line|
case
when found
jwt = line.chomp
break
when line.include?(jwt_start)
found = true
end
end
raise(Error, "No JWT found in #{@user_credentials}") if not found
jwt
}
@signature_cb = proc { |nonce|
seed_start = "BEGIN USER NKEY SEED".freeze
found = false
seed = nil
File.readlines(@user_credentials).each do |line|
case
when found
seed = line.chomp
break
when line.include?(seed_start)
found = true
end
end
raise(Error, "No nkey user seed found in #{@user_credentials}") if not found
kp = NKEYS::from_seed(seed)
raw_signed = kp.sign(nonce)
# seed is a reference so also cleared when doing wipe,
# which can be done since Ruby strings are mutable.
kp.wipe
encoded = Base64.urlsafe_encode64(raw_signed)
# Remove padding
encoded.gsub('=', '')
}
end
end
def process_uri(uris)
connect_uris = []
uris.split(',').each do |uri|
opts = {}
# Scheme
if uri.include?("://")
scheme, uri = uri.split("://")
opts[:scheme] = scheme
else
opts[:scheme] = 'nats'
end
# UserInfo
if uri.include?("@")
userinfo, endpoint = uri.split("@")
host, port = endpoint.split(":")
opts[:userinfo] = userinfo
else
host, port = uri.split(":")
end
# Host and Port
opts[:host] = host || "localhost"
opts[:port] = port || DEFAULT_PORT
connect_uris << URI::Generic.build(opts)
end
connect_uris
end
end
module IO
include Status
# Client creates a connection to the NATS Server.
Client = ::NATS::Client
MAX_RECONNECT_ATTEMPTS = 10
RECONNECT_TIME_WAIT = 2
# Maximum accumulated pending commands bytesize before forcing a flush.
MAX_PENDING_SIZE = 32768
# Maximum number of flush kicks that can be queued up before we block.
MAX_FLUSH_KICK_SIZE = 1024
# Maximum number of bytes which we will be gathering on a single read.
# TODO: Make dynamic?
MAX_SOCKET_READ_BYTES = 32768
# Ping intervals
DEFAULT_PING_INTERVAL = 120
DEFAULT_PING_MAX = 2
# Default IO timeouts
DEFAULT_CONNECT_TIMEOUT = 2
DEFAULT_READ_WRITE_TIMEOUT = 2
# Default Pending Limits
DEFAULT_SUB_PENDING_MSGS_LIMIT = 65536
DEFAULT_SUB_PENDING_BYTES_LIMIT = 65536 * 1024
# Implementation adapted from https://github.com/redis/redis-rb
class Socket
attr_accessor :socket
def initialize(options={})
@uri = options[:uri]
@connect_timeout = options[:connect_timeout]
@write_timeout = options[:write_timeout]
@read_timeout = options[:read_timeout]
@socket = nil
end
def connect
addrinfo = ::Socket.getaddrinfo(@uri.host, nil, ::Socket::AF_UNSPEC, ::Socket::SOCK_STREAM)
addrinfo.each_with_index do |ai, i|
begin
@socket = connect_addrinfo(ai, @uri.port, @connect_timeout)
break
rescue SystemCallError => e
# Give up if no more available
raise e if addrinfo.length == i+1
end
end
# Set TCP no delay by default
@socket.setsockopt(::Socket::IPPROTO_TCP, ::Socket::TCP_NODELAY, 1)
end
def read_line(deadline=nil)
# FIXME: Should accumulate and read in a non blocking way instead
unless ::IO.select([@socket], nil, nil, deadline)
raise NATS::IO::SocketTimeoutError
end
@socket.gets
end
def read(max_bytes, deadline=nil)
begin
return @socket.read_nonblock(max_bytes)
rescue ::IO::WaitReadable
if ::IO.select([@socket], nil, nil, deadline)
retry
else
raise NATS::IO::SocketTimeoutError
end
rescue ::IO::WaitWritable
if ::IO.select(nil, [@socket], nil, deadline)
retry
else
raise NATS::IO::SocketTimeoutError
end
end
rescue EOFError => e
if RUBY_ENGINE == 'jruby' and e.message == 'No message available'
# FIXME: <EOFError: No message available> can happen in jruby
# even though seems it is temporary and eventually possible
# to read from socket.
return nil
end
raise Errno::ECONNRESET
end
def write(data, deadline=nil)
length = data.bytesize
total_written = 0
loop do
begin
written = @socket.write_nonblock(data)
total_written += written
break total_written if total_written >= length
data = data.byteslice(written..-1)
rescue ::IO::WaitWritable
if ::IO.select(nil, [@socket], nil, deadline)
retry
else
raise NATS::IO::SocketTimeoutError
end
rescue ::IO::WaitReadable
if ::IO.select([@socket], nil, nil, deadline)
retry
else
raise NATS::IO::SocketTimeoutError
end
end
end
rescue EOFError
raise Errno::ECONNRESET
end
def close
@socket.close
end
def closed?
@socket.closed?
end
private
def connect_addrinfo(ai, port, timeout)
sock = ::Socket.new(::Socket.const_get(ai[0]), ::Socket::SOCK_STREAM, 0)
sockaddr = ::Socket.pack_sockaddr_in(port, ai[3])
begin
sock.connect_nonblock(sockaddr)
rescue Errno::EINPROGRESS, Errno::EALREADY, ::IO::WaitWritable
unless ::IO.select(nil, [sock], nil, @connect_timeout)
raise NATS::IO::SocketTimeoutError
end
# Confirm that connection was established
begin
sock.connect_nonblock(sockaddr)
rescue Errno::EISCONN
# Connection was established without issues.
end
end
sock
end
end
end
class MonotonicTime
# Implementation of MonotonicTime adapted from
# https://github.com/ruby-concurrency/concurrent-ruby/
class << self
case
when defined?(Process::CLOCK_MONOTONIC)
def now
Process.clock_gettime(Process::CLOCK_MONOTONIC)
end
when RUBY_ENGINE == 'jruby'
def now
java.lang.System.nanoTime() / 1_000_000_000.0
end
else
def now
# Fallback to regular time behavior
::Time.now.to_f
end
end
def with_nats_timeout(timeout)
start_time = now
yield
end_time = now
duration = end_time - start_time
if duration > timeout
raise NATS::Timeout.new("nats: timeout")
end
end
def since(t0)
now - t0
end
end
end
end
|
module Nerpin
VERSION = "0.0.1"
end
version bump to 0.0.2
module Nerpin
VERSION = "0.0.2"
end
|
module Nexpose
# Object that represents a connection to a Nexpose Security Console.
class Connection
# Perform an asset filter search that will locate assets matching the
# provided conditions.
#
# For example, the following call will return assets with Java installed:
# nsc.filter(Search::Field::SOFTWARE, Search::Operator::CONTAINS, 'java')
#
# @param [String] field Constant from Search::Field
# @param [String] operator Constant from Search::Operator
# @param [String] value Search term or constant from Search::Value
# @return [Array[FilteredAsset]] List of matching assets.
#
def filter(field, operator, value = '')
criterion = Criterion.new(field, operator, value)
criteria = Criteria.new(criterion)
search(criteria)
end
# Perform a search that will match the criteria provided.
#
# For example, the following call will return assets with Java and .NET:
# java_criterion = Criterion.new(Search::Field::SOFTWARE,
# Search::Operator::CONTAINS,
# 'java')
# dot_net_criterion = Criterion.new(Search::Field::SOFTWARE,
# Search::Operator::CONTAINS,
# '.net')
# criteria = Criteria.new([java_criterion, dot_net_criterion])
# results = nsc.search(criteria)
#
# @param [Criteria] criteria Criteria search object.
# @return [Array[FilteredAsset]] List of matching assets.
#
def search(criteria)
results = DataTable._get_json_table(self,
'/data/asset/filterAssets',
criteria._to_payload)
results.map { |a| FilteredAsset.new(a) }
end
end
# Constants for performing Asset Filter searches and generating Dynamic Asset
# Groups.
#
module Search
module_function
# Search constants
# Only these values are accepted for a field value.
#
module Field
# Search for an Asset by name.
# Valid Operators: IS, IS_NOT, STARTS_WITH, ENDS_WITH, CONTAINS,
# NOT_CONTAINS
ASSET = 'ASSET'
# Search for an Asset by CVE ID
# Valid Operators: IS, IS_NOT, CONTAINS, NOT_CONTAINS
CVE_ID = 'CVE_ID'
# Valid Operators: IS, IS_NOT
# Valid Values (See Value::AccessComplexity): LOW, MEDIUM, HIGH
CVSS_ACCESS_COMPLEXITY = 'CVSS_ACCESS_COMPLEXITY'
# Valid Operators: IS, IS_NOT
# Valid Values (See Value::AccessVector): LOCAL, ADJACENT, NETWORK
CVSS_ACCESS_VECTOR = 'CVSS_ACCESS_VECTOR'
# Valid Operators: IS, IS_NOT
# Valid Values (See Value::AuthenticationRequired): NONE, SINGLE, MULTIPLE
CVSS_AUTHENTICATION_REQUIRED = 'CVSS_AUTHENTICATION_REQUIRED'
# Valid Operators: IS, IS_NOT
# Valid Values (See Value::CVSSImpact): NONE, PARTIAL, COMPLETE
CVSS_AVAILABILITY_IMPACT = 'CVSS_AVAILABILITY_IMPACT'
# Valid Operators: IS, IS_NOT
# Valid Values (See Value::CVSSImpact): NONE, PARTIAL, COMPLETE
CVSS_CONFIDENTIALITY_IMPACT = 'CVSS_CONFIDENTIALITY_IMPACT'
# Valid Operators: IS, IS_NOT
# Valid Values (See Value::CVSSImpact): NONE, PARTIAL, COMPLETE
CVSS_INTEGRITY_IMPACT = 'CVSS_INTEGRITY_IMPACT'
# Valid Operators: IS, IS_NOT, IN_RANGE, GREATER_THAN, LESS_THAN
# Valid Values: Floats from 0.0 to 10.0
CVSS_SCORE = 'CVSS_SCORE'
# Valid Operators: IN, NOT_IN
# Valid Values (See Value::HostType): UNKNOWN, VIRTUAL, HYPERVISOR,
# BARE_METAL
HOST_TYPE = 'HOST_TYPE'
# Valid Operators: IN, NOT_IN
# Valid Values (See Value::IPType): IPv4, IPv6
IP_ADDRESS_TYPE = 'IP_ADDRESS_TYPE'
# Valid Operators: IN
# Valid Values (See Value::IPType): IPv4, IPv6
IP_ALT_ADDRESS_TYPE = 'IP_ALT_ADDRESS_TYPE'
# Valid Operators: IN, NOT_IN
IP_RANGE = 'IP_RANGE'
# Valid Operators: IS, IS_NOT, IN_RANGE
# Valid Values: Integers from 1 to 65535
OPEN_PORT = 'OPEN_PORT'
# Valid Operators: CONTAINS, NOT_CONTAINS, IS_EMPTY, IS_NOT_EMPTY
OS = 'OS'
# Valid Operators: IS
# Valid Values (See Value::PCICompliance): PASS, FAIL
PCI_COMPLIANCE_STATUS = 'PCI_COMPLIANCE_STATUS'
# Valid Operators: IS, IS_NOT, IN_RANGE, GREATER_THAN, LESS_THAN
RISK_SCORE = 'RISK_SCORE'
# Search based on the last scan date of an asset.
# Valid Operators: ON_OR_BEFORE, ON_OR_AFTER, BETWEEN, EARLIER_THAN,
# WITHIN_THE_LAST
# Valid Values: Use Value::ScanDate::FORMAT for date arguments.
# Use FixNum for day arguments.
SCAN_DATE = 'SCAN_DATE'
# Valid Operators: CONTAINS, NOT_CONTAINS
SERVICE = 'SERVICE'
# Search based on the Site ID of an asset.
# (Note that underlying search used Site ID, despite 'site name' value.)
# Valid Operators: IN, NOT_IN
# Valid Values: FixNum Site ID of the site.
SITE_ID = 'SITE_NAME'
# Valid Operators: CONTAINS, NOT_CONTAINS
SOFTWARE = 'SOFTWARE'
# Valid Operators: IS, IS_NOT, GREATER_THAN, LESS_THAN, IS_APPLIED,
# IS_NOT_APPLIED
# Valid Values: VERY_HIGH, HIGH, NORMAL, LOW, VERY_LOW
USER_ADDED_CRITICALITY_LEVEL = 'TAG_CRITICALITY'
# Valid Operators: IS, IS_NOT, STARTS_WITH, ENDS_WITH, IS_APPLIED,
# IS_NOT_APPLIED, CONTAINS, NOT_CONTAINS
USER_ADDED_CUSTOM_TAG = 'TAG'
# Valid Operators: IS, IS_NOT, STARTS_WITH, ENDS_WITH, IS_APPLIED,
# IS_NOT_APPLIED, CONTAINS, NOT_CONTAINS
USER_ADDED_TAG_LOCATION = 'TAG_LOCATION'
# Valid Operators: IS, IS_NOT, STARTS_WITH, ENDS_WITH, IS_APPLIED,
# IS_NOT_APPLIED, CONTAINS, NOT_CONTAINS
USER_ADDED_TAG_OWNER = 'TAG_OWNER'
# Valid Operators: ARE
# Valid Values: PRESENT, NOT_PRESENT
VALIDATED_VULNERABILITIES = 'VULNERABILITY_VALIDATED_STATUS'
# Search against vulnerability titles that an asset contains.
# Valid Operators: CONTAINS, NOT_CONTAINS
VULNERABILITY = 'VULNERABILITY'
# Valid Operators: INCLUDE, DO_NOT_INCLUDE
# Valid Values (See Value::VulnerabilityExposure): MALWARE, METASPLOIT,
# DATABASE
VULNERABILITY_EXPOSURES = 'VULNERABILITY_EXPOSURES'
# Search by VULNERABILITY CATEGORY
# Valid Operators: IS, IS_NOT, CONTAINS, NOT_CONTAINS, STARTS_WITH,
# ENDS_WITH
VULN_CATEGORY = 'VULN_CATEGORY'
end
# List of acceptable operators. Not all fields accept all operators.
#
module Operator
CONTAINS = 'CONTAINS'
NOT_CONTAINS = 'NOT_CONTAINS'
IS = 'IS'
IS_NOT = 'IS_NOT'
ARE = 'ARE'
IN = 'IN'
NOT_IN = 'NOT_IN'
IN_RANGE = 'IN_RANGE'
STARTS_WITH = 'STARTS_WITH'
ENDS_WITH = 'ENDS_WITH'
ON_OR_BEFORE = 'ON_OR_BEFORE'
ON_OR_AFTER = 'ON_OR_AFTER'
BETWEEN = 'BETWEEN'
EARLIER_THAN = 'EARLIER_THAN'
WITHIN_THE_LAST = 'WITHIN_THE_LAST'
GREATER_THAN = 'GREATER_THAN'
LESS_THAN = 'LESS_THAN'
IS_EMPTY = 'IS_EMPTY'
IS_NOT_EMPTY = 'IS_NOT_EMPTY'
INCLUDE = 'INCLUDE'
DO_NOT_INCLUDE = 'DO_NOT_INCLUDE'
IS_APPLIED = 'IS_APPLIED'
IS_NOT_APPLIED = 'IS_NOT_APPLIED'
end
# Specialized values used by certain search fields
#
module Value
# Constants for filtering on access complexity.
module AccessComplexity
LOW = 'L'
MEDIUM = 'M'
HIGH = 'H'
end
# Constants for filtering on access vector.
module AccessVector
LOCAL = 'L'
ADJACENT = 'A'
NETWORK = 'N'
end
# Constants for filtering on whether authentication is required.
module AuthenticationRequired
NONE = 'N'
SINGLE = 'S'
MULTIPLE = 'M'
end
# Constants for filtering on CVSS impact.
module CVSSImpact
NONE = 'N'
PARTIAL = 'P'
COMPLETE = 'C'
end
# Constants for filtering on host type.
module HostType
UNKNOWN = '0'
VIRTUAL = '1'
HYPERVISOR = '2'
BARE_METAL = '3'
end
# Constants for filtering on IP type.
module IPType
IPv4 = '0'
IPv6 = '1'
end
# Constants for filtering on PCI compliance.
module PCICompliance
PASS = '1'
FAIL = '0'
end
# Constants for filtering on scan date.
module ScanDate
# Pass this format to #strftime() to get expected format for requests.
FORMAT = '%m/%d/%Y'
end
# Constants for filtering on vulnerability validations.
module ValidatedVulnerability
NOT_PRESENT = 1
PRESENT = 0
end
# Constants for filtering on vulnerability exposure.
module VulnerabilityExposure
MALWARE = 'type:"malware_type", name:"malwarekit"'
# TODO: A problem in Nexpose causes these values to not be constant.
METASPLOIT = 'type:"exploit_source_type", name:"2"'
DATABASE = 'type:"exploit_source_type", name:"1"'
end
end
end
# Individual search criterion.
#
class Criterion
# Search field. One of Nexpose::Search::Field
# @see Nexpose::Search::Field for any restrictions on the other attibutes.
attr_accessor :field
# Search operator. One of Nexpose::Search::Operator
attr_accessor :operator
# Search value. A search string or one of Nexpose::Search::Value
attr_accessor :value
def initialize(field, operator, value = '')
@field, @operator = field.upcase, operator.upcase
if value.is_a? Array
@value = value.map(&:to_s)
else
@value = value.to_s
end
end
# Convert this object into the map format expected by Nexpose.
#
def to_h
{ 'metadata' => { 'fieldName' => field },
'operator' => operator,
'values' => Array(value) }
end
def self.parse(json)
Criterion.new(json['metadata']['fieldName'],
json['operator'],
json['values'])
end
end
# Join search criteria for an asset filter search or dynamic asset group.
#
class Criteria
# Whether to match any or all filters. One of 'OR' or 'AND'.
attr_accessor :match
# Array of criteria to match against.
attr_accessor :criteria
def initialize(criteria = [], match = 'AND')
@criteria = Array(criteria)
@match = match.upcase
end
def to_h
{ 'operator' => @match,
'criteria' => @criteria.map(&:to_h) }
end
# Convert this object into the format expected by Nexpose.
#
def to_json
JSON.generate(to_h)
end
# Generate the payload needed for a POST request for Asset Filter.
#
def _to_payload
{ 'dir' => -1,
'results' => -1,
'sort' => 'assetIP',
'startIndex' => -1,
'table-id' => 'assetfilter',
'searchCriteria' => to_json }
end
def <<(criterion)
criteria << criterion
end
def self.parse(json)
# The call returns empty JSON, so default to 'AND' if not present.
operator = json['operator'] || 'AND'
ret = Criteria.new([], operator)
json['criteria'].each do |c|
ret.criteria << Criterion.parse(c)
end
ret
end
end
# Asset data as returned by an Asset Filter search.
#
class FilteredAsset
# Unique identifier of this asset. Also known as device ID.
attr_reader :id
attr_reader :ip
attr_reader :name
attr_reader :os
attr_reader :exploit_count
attr_reader :malware_count
attr_reader :vuln_count
attr_reader :risk_score
# The first Site ID returned for this asset.
# Not recommended if Asset Linking feature is enabled.
attr_reader :site_id
# Array of Site IDs for the asset. Use when Asset Linking is enabled.
attr_reader :site_ids
attr_reader :last_scan
def initialize(json)
@id = json['assetID']
@ip = json['assetIP']
@name = json['assetName']
@os = json['assetOSName']
@exploit_count = json['exploitCount'].to_i
@malware_count = json['malwareCount'].to_i
@vuln_count = json['vulnCount'].to_i
@risk_score = json['riskScore'].to_f
@site_ids = json['sitePermissions'].map { |site| site['siteID'] }
@site_id = @site_ids.first
@last_scan = Time.at(json['lastScanDate'].to_i / 1000)
end
end
end
New filter and updated operators
module Nexpose
# Object that represents a connection to a Nexpose Security Console.
class Connection
# Perform an asset filter search that will locate assets matching the
# provided conditions.
#
# For example, the following call will return assets with Java installed:
# nsc.filter(Search::Field::SOFTWARE, Search::Operator::CONTAINS, 'java')
#
# @param [String] field Constant from Search::Field
# @param [String] operator Constant from Search::Operator
# @param [String] value Search term or constant from Search::Value
# @return [Array[FilteredAsset]] List of matching assets.
#
def filter(field, operator, value = '')
criterion = Criterion.new(field, operator, value)
criteria = Criteria.new(criterion)
search(criteria)
end
# Perform a search that will match the criteria provided.
#
# For example, the following call will return assets with Java and .NET:
# java_criterion = Criterion.new(Search::Field::SOFTWARE,
# Search::Operator::CONTAINS,
# 'java')
# dot_net_criterion = Criterion.new(Search::Field::SOFTWARE,
# Search::Operator::CONTAINS,
# '.net')
# criteria = Criteria.new([java_criterion, dot_net_criterion])
# results = nsc.search(criteria)
#
# @param [Criteria] criteria Criteria search object.
# @return [Array[FilteredAsset]] List of matching assets.
#
def search(criteria)
results = DataTable._get_json_table(self,
'/data/asset/filterAssets',
criteria._to_payload)
results.map { |a| FilteredAsset.new(a) }
end
end
# Constants for performing Asset Filter searches and generating Dynamic Asset
# Groups.
#
module Search
module_function
# Search constants
# Only these values are accepted for a field value.
#
module Field
# Search for an Asset by name.
# Valid Operators: IS, IS_NOT, STARTS_WITH, ENDS_WITH, CONTAINS,
# NOT_CONTAINS
ASSET = 'ASSET'
# Search for an Asset by CVE ID
# Valid Operators: IS, IS_NOT, CONTAINS, NOT_CONTAINS
CVE_ID = 'CVE_ID'
# Valid Operators: IS, IS_NOT
# Valid Values (See Value::AccessComplexity): LOW, MEDIUM, HIGH
CVSS_ACCESS_COMPLEXITY = 'CVSS_ACCESS_COMPLEXITY'
# Valid Operators: IS, IS_NOT
# Valid Values (See Value::AccessVector): LOCAL, ADJACENT, NETWORK
CVSS_ACCESS_VECTOR = 'CVSS_ACCESS_VECTOR'
# Valid Operators: IS, IS_NOT
# Valid Values (See Value::AuthenticationRequired): NONE, SINGLE, MULTIPLE
CVSS_AUTHENTICATION_REQUIRED = 'CVSS_AUTHENTICATION_REQUIRED'
# Valid Operators: IS, IS_NOT
# Valid Values (See Value::CVSSImpact): NONE, PARTIAL, COMPLETE
CVSS_AVAILABILITY_IMPACT = 'CVSS_AVAILABILITY_IMPACT'
# Valid Operators: IS, IS_NOT
# Valid Values (See Value::CVSSImpact): NONE, PARTIAL, COMPLETE
CVSS_CONFIDENTIALITY_IMPACT = 'CVSS_CONFIDENTIALITY_IMPACT'
# Valid Operators: IS, IS_NOT
# Valid Values (See Value::CVSSImpact): NONE, PARTIAL, COMPLETE
CVSS_INTEGRITY_IMPACT = 'CVSS_INTEGRITY_IMPACT'
# Valid Operators: IS, IS_NOT, IN_RANGE, GREATER_THAN, LESS_THAN
# Valid Values: Floats from 0.0 to 10.0
CVSS_SCORE = 'CVSS_SCORE'
# Valid Operators: IN, NOT_IN
# Valid Values (See Value::HostType): UNKNOWN, VIRTUAL, HYPERVISOR,
# BARE_METAL
HOST_TYPE = 'HOST_TYPE'
# Valid Operators: IN, NOT_IN
# Valid Values (See Value::IPType): IPv4, IPv6
IP_ADDRESS_TYPE = 'IP_ADDRESS_TYPE'
# Valid Operators: IN
# Valid Values (See Value::IPType): IPv4, IPv6
IP_ALT_ADDRESS_TYPE = 'IP_ALT_ADDRESS_TYPE'
# Valid Operators: IS, IS_NOT, IN_RANGE, NOT_IN_RANGE
IP_ADDRESS = 'IP_ADDRESS'
# Valid Operators: IS, IS_NOT, IN_RANGE
# Valid Values: Integers from 1 to 65535
OPEN_PORT = 'OPEN_PORT'
# Valid Operators: CONTAINS, NOT_CONTAINS, IS_EMPTY, IS_NOT_EMPTY
OS = 'OS'
# Valid Operators: IS
# Valid Values (See Value::PCICompliance): PASS, FAIL
PCI_COMPLIANCE_STATUS = 'PCI_COMPLIANCE_STATUS'
# Valid Operators: IS, IS_NOT, IN_RANGE, GREATER_THAN, LESS_THAN
RISK_SCORE = 'RISK_SCORE'
# Search based on the last scan date of an asset.
# Valid Operators: ON_OR_BEFORE, ON_OR_AFTER, BETWEEN, EARLIER_THAN,
# WITHIN_THE_LAST
# Valid Values: Use Value::ScanDate::FORMAT for date arguments.
# Use FixNum for day arguments.
SCAN_DATE = 'SCAN_DATE'
# Valid Operators: CONTAINS, NOT_CONTAINS
SERVICE = 'SERVICE'
# Search based on the Site ID of an asset.
# (Note that underlying search used Site ID, despite 'site name' value.)
# Valid Operators: IN, NOT_IN
# Valid Values: FixNum Site ID of the site.
SITE_ID = 'SITE_NAME'
# Valid Operators: CONTAINS, NOT_CONTAINS
SOFTWARE = 'SOFTWARE'
# Valid Operators: IS, IS_NOT, GREATER_THAN, LESS_THAN, IS_APPLIED,
# IS_NOT_APPLIED
# Valid Values: VERY_HIGH, HIGH, NORMAL, LOW, VERY_LOW
USER_ADDED_CRITICALITY_LEVEL = 'TAG_CRITICALITY'
# Valid Operators: IS, IS_NOT, STARTS_WITH, ENDS_WITH, IS_APPLIED,
# IS_NOT_APPLIED, CONTAINS, NOT_CONTAINS
USER_ADDED_CUSTOM_TAG = 'TAG'
# Valid Operators: IS, IS_NOT, STARTS_WITH, ENDS_WITH, IS_APPLIED,
# IS_NOT_APPLIED, CONTAINS, NOT_CONTAINS
USER_ADDED_TAG_LOCATION = 'TAG_LOCATION'
# Valid Operators: IS, IS_NOT, STARTS_WITH, ENDS_WITH, IS_APPLIED,
# IS_NOT_APPLIED, CONTAINS, NOT_CONTAINS
USER_ADDED_TAG_OWNER = 'TAG_OWNER'
# Valid Operators: ARE
# Valid Values: PRESENT, NOT_PRESENT
VALIDATED_VULNERABILITIES = 'VULNERABILITY_VALIDATED_STATUS'
# Search against vulnerability titles that an asset contains.
# Valid Operators: CONTAINS, NOT_CONTAINS
VULNERABILITY = 'VULNERABILITY'
# Valid Operators: INCLUDE, DO_NOT_INCLUDE
# Valid Values (See Value::VulnerabilityExposure): MALWARE, METASPLOIT,
# DATABASE
VULNERABILITY_EXPOSURES = 'VULNERABILITY_EXPOSURES'
# Search by VULNERABILITY CATEGORY
# Valid Operators: IS, IS_NOT, CONTAINS, NOT_CONTAINS, STARTS_WITH,
# ENDS_WITH
VULN_CATEGORY = 'VULN_CATEGORY'
end
# List of acceptable operators. Not all fields accept all operators.
#
module Operator
CONTAINS = 'CONTAINS'
NOT_CONTAINS = 'NOT_CONTAINS'
IS = 'IS'
IS_NOT = 'IS_NOT'
ARE = 'ARE'
IN = 'IN'
NOT_IN = 'NOT_IN'
IN_RANGE = 'IN_RANGE'
NOT_IN_RANGE = 'NOT_IN_RANGE'
STARTS_WITH = 'STARTS_WITH'
ENDS_WITH = 'ENDS_WITH'
ON_OR_BEFORE = 'ON_OR_BEFORE'
ON_OR_AFTER = 'ON_OR_AFTER'
BETWEEN = 'BETWEEN'
EARLIER_THAN = 'EARLIER_THAN'
WITHIN_THE_LAST = 'WITHIN_THE_LAST'
GREATER_THAN = 'GREATER_THAN'
LESS_THAN = 'LESS_THAN'
IS_EMPTY = 'IS_EMPTY'
IS_NOT_EMPTY = 'IS_NOT_EMPTY'
INCLUDE = 'INCLUDE'
DO_NOT_INCLUDE = 'DO_NOT_INCLUDE'
IS_APPLIED = 'IS_APPLIED'
IS_NOT_APPLIED = 'IS_NOT_APPLIED'
end
# Specialized values used by certain search fields
#
module Value
# Constants for filtering on access complexity.
module AccessComplexity
LOW = 'L'
MEDIUM = 'M'
HIGH = 'H'
end
# Constants for filtering on access vector.
module AccessVector
LOCAL = 'L'
ADJACENT = 'A'
NETWORK = 'N'
end
# Constants for filtering on whether authentication is required.
module AuthenticationRequired
NONE = 'N'
SINGLE = 'S'
MULTIPLE = 'M'
end
# Constants for filtering on CVSS impact.
module CVSSImpact
NONE = 'N'
PARTIAL = 'P'
COMPLETE = 'C'
end
# Constants for filtering on host type.
module HostType
UNKNOWN = '0'
VIRTUAL = '1'
HYPERVISOR = '2'
BARE_METAL = '3'
end
# Constants for filtering on IP type.
module IPType
IPv4 = '0'
IPv6 = '1'
end
# Constants for filtering on PCI compliance.
module PCICompliance
PASS = '1'
FAIL = '0'
end
# Constants for filtering on scan date.
module ScanDate
# Pass this format to #strftime() to get expected format for requests.
FORMAT = '%m/%d/%Y'
end
# Constants for filtering on vulnerability validations.
module ValidatedVulnerability
NOT_PRESENT = 1
PRESENT = 0
end
# Constants for filtering on vulnerability exposure.
module VulnerabilityExposure
MALWARE = 'type:"malware_type", name:"malwarekit"'
# TODO: A problem in Nexpose causes these values to not be constant.
METASPLOIT = 'type:"exploit_source_type", name:"2"'
DATABASE = 'type:"exploit_source_type", name:"1"'
end
end
end
# Individual search criterion.
#
class Criterion
# Search field. One of Nexpose::Search::Field
# @see Nexpose::Search::Field for any restrictions on the other attibutes.
attr_accessor :field
# Search operator. One of Nexpose::Search::Operator
attr_accessor :operator
# Search value. A search string or one of Nexpose::Search::Value
attr_accessor :value
def initialize(field, operator, value = '')
@field, @operator = field.upcase, operator.upcase
if value.is_a? Array
@value = value.map(&:to_s)
else
@value = value.to_s
end
end
# Convert this object into the map format expected by Nexpose.
#
def to_h
{ 'metadata' => { 'fieldName' => field },
'operator' => operator,
'values' => Array(value) }
end
def self.parse(json)
Criterion.new(json['metadata']['fieldName'],
json['operator'],
json['values'])
end
end
# Join search criteria for an asset filter search or dynamic asset group.
#
class Criteria
# Whether to match any or all filters. One of 'OR' or 'AND'.
attr_accessor :match
# Array of criteria to match against.
attr_accessor :criteria
def initialize(criteria = [], match = 'AND')
@criteria = Array(criteria)
@match = match.upcase
end
def to_h
{ 'operator' => @match,
'criteria' => @criteria.map(&:to_h) }
end
# Convert this object into the format expected by Nexpose.
#
def to_json
JSON.generate(to_h)
end
# Generate the payload needed for a POST request for Asset Filter.
#
def _to_payload
{ 'dir' => -1,
'results' => -1,
'sort' => 'assetIP',
'startIndex' => -1,
'table-id' => 'assetfilter',
'searchCriteria' => to_json }
end
def <<(criterion)
criteria << criterion
end
def self.parse(json)
# The call returns empty JSON, so default to 'AND' if not present.
operator = json['operator'] || 'AND'
ret = Criteria.new([], operator)
json['criteria'].each do |c|
ret.criteria << Criterion.parse(c)
end
ret
end
end
# Asset data as returned by an Asset Filter search.
#
class FilteredAsset
# Unique identifier of this asset. Also known as device ID.
attr_reader :id
attr_reader :ip
attr_reader :name
attr_reader :os
attr_reader :exploit_count
attr_reader :malware_count
attr_reader :vuln_count
attr_reader :risk_score
# The first Site ID returned for this asset.
# Not recommended if Asset Linking feature is enabled.
attr_reader :site_id
# Array of Site IDs for the asset. Use when Asset Linking is enabled.
attr_reader :site_ids
attr_reader :last_scan
def initialize(json)
@id = json['assetID']
@ip = json['assetIP']
@name = json['assetName']
@os = json['assetOSName']
@exploit_count = json['exploitCount'].to_i
@malware_count = json['malwareCount'].to_i
@vuln_count = json['vulnCount'].to_i
@risk_score = json['riskScore'].to_f
@site_ids = json['sitePermissions'].map { |site| site['siteID'] }
@site_id = @site_ids.first
@last_scan = Time.at(json['lastScanDate'].to_i / 1000)
end
end
end
|
require "json"
module Nexus
class Response
attr_accessor :body
attr_accessor :success
def initialize(body: "", success: true)
@body = body
@success = success
end
def serialize
{ body: body, success: success }.to_json
end
def self.from_json(json)
data = JSON.parse(json)
Response.new(body: data['body'])
end
end
end
adding active support to response
require "json"
module Nexus
class Response
attr_accessor :body
attr_accessor :success
def initialize(body: "", success: true)
@body = body
@success = success
end
def serialize
{ body: body, success: success }.to_json
end
def self.from_json(json)
data = ActiveSupport::HashWithIndifferentAccess.new(JSON.parse(json))
data_body = ActiveSupport::HashWithIndifferentAccess.new(data[:body])
Response.new(body: data_body)
end
end
end
|
Facter.add('firewall_rules') do
confine :operatingsystem => 'windows'
setcode do
system_rules = WIN32OLE.new("HNetCfg.FwPolicy2").rules
rule_hash = Hash.new()
attr_names = Hash.new()
attr_names['protocol'] = 'protocol'
attr_names['localports'] = 'local_ports'
attr_names['remoteports'] = 'remote_ports'
attr_names['localaddresses'] = 'local_addresses'
attr_names['remoteaddresses'] = 'remote_addresses'
attr_names['direction'] = 'direction'
attr_names['action'] = 'action'
system_rules.select('enabled', true).each do |rule|
attr_hash = Hash.new()
attr_names.keys.each do |key|
attr_value = rule.invoke(key)
if attr_value != ''
attr_hash[attr_names[key]] = attr_value
end
end
rule_hash[rule.name] = attr_hash
end
return rule_hash.to_json
end
end
firewall_rules fact in ruby
require 'win32ole'
require 'json'
Facter.add('firewall_rules') do
confine :operatingsystem => 'windows'
setcode do
system_rules = WIN32OLE.new("HNetCfg.FwPolicy2").rules
rule_hash = Hash.new()
attr_names = Hash.new()
attr_names['protocol'] = 'protocol'
attr_names['localports'] = 'local_ports'
attr_names['remoteports'] = 'remote_ports'
attr_names['localaddresses'] = 'local_addresses'
attr_names['remoteaddresses'] = 'remote_addresses'
attr_names['direction'] = 'direction'
attr_names['action'] = 'action'
system_rules.select('enabled', true).each do |rule|
attr_hash = Hash.new()
attr_names.keys.each do |key|
attr_value = rule.invoke(key)
if attr_value != ''
attr_hash[attr_names[key]] = attr_value
end
end
rule_hash[rule.name] = attr_hash
end
return rule_hash.to_json
end
end |
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "bio-faster"
s.version = "0.2.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Francesco Strozzi"]
s.date = "2012-01-04"
s.description = "A fast parser for Fasta and FastQ files"
s.email = "francesco.strozzi@gmail.com"
s.extensions = ["ext/extconf.rb"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"bio-faster.gemspec",
"ext/extconf.rb",
"ext/faster.c",
"ext/kseq.h",
"lib/bio-faster.rb",
"spec/helper.rb",
"spec/parser_spec.rb",
"test/data/sample.fasta",
"test/data/sample.fastq",
"test/data/sample.fastq.gz",
"test/data/sff_sample.fastq"
]
s.homepage = "http://github.com/fstrozzi/bioruby-faster"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.required_ruby_version = Gem::Requirement.new(">= 1.9")
s.rubygems_version = "1.8.12"
s.summary = "A fast parser for Fasta and FastQ files"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<bio>, [">= 1.4.2"])
s.add_development_dependency(%q<rspec>, [">= 0"])
else
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<bio>, [">= 1.4.2"])
s.add_dependency(%q<rspec>, [">= 0"])
end
else
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<bio>, [">= 1.4.2"])
s.add_dependency(%q<rspec>, [">= 0"])
end
end
Regenerate gemspec for version 0.2.2
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "bio-faster"
s.version = "0.2.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Francesco Strozzi"]
s.date = "2012-04-02"
s.description = "A fast parser for Fasta and FastQ files"
s.email = "francesco.strozzi@gmail.com"
s.extensions = ["ext/extconf.rb"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
".travis.yml",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"bio-faster.gemspec",
"ext/extconf.rb",
"ext/faster.c",
"ext/kseq.h",
"lib/bio-faster.rb",
"spec/helper.rb",
"spec/parser_spec.rb",
"test/data/sample.fasta",
"test/data/sample.fastq",
"test/data/sample.fastq.gz",
"test/data/sff_sample.fastq"
]
s.homepage = "http://github.com/fstrozzi/bioruby-faster"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.required_ruby_version = Gem::Requirement.new(">= 1.9")
s.rubygems_version = "1.8.15"
s.summary = "A fast parser for Fasta and FastQ files"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<bio>, [">= 1.4.2"])
s.add_development_dependency(%q<rspec>, [">= 0"])
else
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<bio>, [">= 1.4.2"])
s.add_dependency(%q<rspec>, [">= 0"])
end
else
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<bio>, [">= 1.4.2"])
s.add_dependency(%q<rspec>, [">= 0"])
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "bio-faster"
s.version = "0.0.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Francesco Strozzi"]
s.date = "2011-12-22"
s.description = "TODO: longer description of your gem"
s.email = "francesco.strozzi@gmail.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/bio-faster.rb",
"test/helper.rb",
"test/test_bio-faster.rb"
]
s.homepage = "http://github.com/fstrozzi/bioruby-faster"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.12"
s.summary = "TODO: one-line summary of your gem"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<bio>, [">= 1.4.2"])
else
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<bio>, [">= 1.4.2"])
end
else
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<bio>, [">= 1.4.2"])
end
end
regenerated gemspec
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "bio-faster"
s.version = "0.0.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Francesco Strozzi"]
s.date = "2012-01-04"
s.description = "A fast parser for Fasta and FastQ files"
s.email = "francesco.strozzi@gmail.com"
s.extensions = ["ext/extconf.rb"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"bio-faster.gemspec",
"ext/extconf.rb",
"ext/faster.c",
"ext/kseq.h",
"lib/bio-faster.rb",
"spec/helper.rb",
"spec/parser_spec.rb",
"test/data/sample.fasta",
"test/data/sample.fastq",
"test/data/sample.fastq.gz",
"test/data/sff_sample.fastq"
]
s.homepage = "http://github.com/fstrozzi/bioruby-faster"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.required_ruby_version = Gem::Requirement.new(">= 1.9")
s.rubygems_version = "1.8.12"
s.summary = "A fast parser for Fasta and FastQ files"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<bio>, [">= 1.4.2"])
s.add_development_dependency(%q<rspec>, [">= 0"])
else
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<bio>, [">= 1.4.2"])
s.add_dependency(%q<rspec>, [">= 0"])
end
else
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<bio>, [">= 1.4.2"])
s.add_dependency(%q<rspec>, [">= 0"])
end
end
|
added gemspec
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{sinagen}
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Sidney Burks"]
s.date = %q{2011-03-11}
s.default_executable = %q{sinagen}
s.description = %q{Thor-based Sinatra app generator}
s.email = %q{sid137@gmail.com}
s.executables = ["sinagen"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
".rspec",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"bin/sinagen",
"lib/sinagen.rb",
"spec/sinagen_spec.rb",
"spec/spec_helper.rb",
"tags",
"templates/.gitignore",
"templates/.rspec",
"templates/Gemfile",
"templates/README.md",
"templates/application.rb",
"templates/config.ru",
"templates/spec/setup_spec.rb.tt",
"templates/spec/spec_helper.rb.tt",
"templates/views/stylesheets/main.sass"
]
s.homepage = %q{http://github.com/sid137/sinagen}
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{Sinatra app skeleton generator based on Thor}
s.test_files = [
"spec/sinagen_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activesupport>, [">= 0"])
s.add_runtime_dependency(%q<i18n>, [">= 0"])
s.add_runtime_dependency(%q<thor>, [">= 0"])
s.add_runtime_dependency(%q<compass>, [">= 0"])
s.add_runtime_dependency(%q<rspec>, [">= 0"])
s.add_runtime_dependency(%q<sinatra>, [">= 0"])
s.add_runtime_dependency(%q<rack-test>, [">= 0"])
s.add_runtime_dependency(%q<heroku>, [">= 0"])
s.add_development_dependency(%q<yard>, ["~> 0.6.0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_development_dependency(%q<rcov>, [">= 0"])
else
s.add_dependency(%q<activesupport>, [">= 0"])
s.add_dependency(%q<i18n>, [">= 0"])
s.add_dependency(%q<thor>, [">= 0"])
s.add_dependency(%q<compass>, [">= 0"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<sinatra>, [">= 0"])
s.add_dependency(%q<rack-test>, [">= 0"])
s.add_dependency(%q<heroku>, [">= 0"])
s.add_dependency(%q<yard>, ["~> 0.6.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_dependency(%q<rcov>, [">= 0"])
end
else
s.add_dependency(%q<activesupport>, [">= 0"])
s.add_dependency(%q<i18n>, [">= 0"])
s.add_dependency(%q<thor>, [">= 0"])
s.add_dependency(%q<compass>, [">= 0"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<sinatra>, [">= 0"])
s.add_dependency(%q<rack-test>, [">= 0"])
s.add_dependency(%q<heroku>, [">= 0"])
s.add_dependency(%q<yard>, ["~> 0.6.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_dependency(%q<rcov>, [">= 0"])
end
end
|
$LOAD_PATH.unshift File.expand_path('../lib', __FILE__)
require 'sinatra/version'
Gem::Specification.new 'sinatra', Sinatra::VERSION do |s|
s.description = "Sinatra is a DSL for quickly creating web applications in Ruby with minimal effort."
s.summary = "Classy web-development dressed in a DSL"
s.authors = ["Blake Mizerany", "Ryan Tomayko", "Simon Rozet", "Konstantin Haase"]
s.email = "sinatrarb@googlegroups.com"
s.homepage = "http://www.sinatrarb.com/"
s.license = 'MIT'
s.files = %w(.yardopts AUTHORS.md CHANGELOG.md CONTRIBUTING.md LICENSE MAINTENANCE.md SECURITY.md sinatra.gemspec) + Dir['README.*.md', 'lib/**/*.rb']
s.test_files = s.files.select { |p| p =~ /^test\/.*_test.rb/ }
s.extra_rdoc_files = s.files.select { |p| p =~ /^README/ } << 'LICENSE'
s.rdoc_options = %w[--line-numbers --inline-source --title Sinatra --main README.rdoc --encoding=UTF-8]
s.required_ruby_version = '>= 2.2.0'
s.add_dependency 'rack', '= 2.0.0.alpha'
s.add_dependency 'tilt', '~> 2.0'
s.add_dependency 'rack-protection', '~> 1.5'
end
Ensure examples, all lib files, Gemfile and Rakefile are included in package
$LOAD_PATH.unshift File.expand_path('../lib', __FILE__)
require 'sinatra/version'
Gem::Specification.new 'sinatra', Sinatra::VERSION do |s|
s.description = "Sinatra is a DSL for quickly creating web applications in Ruby with minimal effort."
s.summary = "Classy web-development dressed in a DSL"
s.authors = ["Blake Mizerany", "Ryan Tomayko", "Simon Rozet", "Konstantin Haase"]
s.email = "sinatrarb@googlegroups.com"
s.homepage = "http://www.sinatrarb.com/"
s.license = 'MIT'
s.files = %w(.yardopts AUTHORS.md CHANGELOG.md CONTRIBUTING.md Gemfile LICENSE MAINTENANCE.md Rakefile SECURITY.md sinatra.gemspec) + Dir['README*.md', 'lib/**/*', 'examples/*']
s.test_files = s.files.select { |p| p =~ /^test\/.*_test.rb/ }
s.extra_rdoc_files = s.files.select { |p| p =~ /^README/ } << 'LICENSE'
s.rdoc_options = %w[--line-numbers --inline-source --title Sinatra --main README.rdoc --encoding=UTF-8]
s.required_ruby_version = '>= 2.2.0'
s.add_dependency 'rack', '= 2.0.0.alpha'
s.add_dependency 'tilt', '~> 2.0'
s.add_dependency 'rack-protection', '~> 1.5'
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'sisimai/version'
Gem::Specification.new do |spec|
spec.name = 'sisimai'
spec.version = Sisimai::VERSION
spec.authors = ['azumakuniyuki']
spec.email = ['azumakuniyuki+rubygems.org@gmail.com']
spec.summary = 'Mail Analyzing Interface'
spec.description = 'Sisimai is a Ruby library for analyzing RFC5322 bounce emails and generating structured data from parsed results.'
spec.homepage = 'http://libsisimai.org/'
spec.license = 'BSD-2-Clause'
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.required_ruby_version = '>= 2.1.0'
spec.add_development_dependency 'bundler', '~> 1.8'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'rspec', '~> 0'
spec.add_runtime_dependency 'oj', '~> 2.14', '>= 2.14.4'
end
REQUIRE Oj 3.0.0 OR LATER. DO NOT USE Oj 2.18.*
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'sisimai/version'
Gem::Specification.new do |spec|
spec.name = 'sisimai'
spec.version = Sisimai::VERSION
spec.authors = ['azumakuniyuki']
spec.email = ['azumakuniyuki+rubygems.org@gmail.com']
spec.summary = 'Mail Analyzing Interface'
spec.description = 'Sisimai is a Ruby library for analyzing RFC5322 bounce emails and generating structured data from parsed results.'
spec.homepage = 'http://libsisimai.org/'
spec.license = 'BSD-2-Clause'
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.required_ruby_version = '>= 2.1.0'
spec.add_development_dependency 'bundler', '~> 1.8'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'rspec', '~> 0'
spec.add_runtime_dependency 'oj', '~> 3.0.0', '>= 3.0.0'
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{lexical_uuid}
s.version = "0.1.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["James Golick"]
s.date = %q{2010-08-19}
s.description = %q{UUIDs that are byte-ordered lamport clocks (timestamp, worker_id). Much simpler than type-1 UUID's crappy, weirdo layout.}
s.email = %q{jamesgolick@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
".gitignore",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"lexical_uuid.gemspec",
"lib/lexical_uuid.rb",
"spec/lexical_uuid_spec.rb",
"spec/spec.opts",
"spec/spec_helper.rb"
]
s.homepage = %q{http://github.com/jamesgolick/lexical_uuid}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{UUIDs that are byte-ordered lamport clocks (timestamp, worker_id). Much simpler than type-1 UUID's crappy, weirdo layout.}
s.test_files = [
"spec/lexical_uuid_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, [">= 1.2.9"])
s.add_runtime_dependency(%q<RubyInline>, ["= 3.8.4"])
else
s.add_dependency(%q<rspec>, [">= 1.2.9"])
s.add_dependency(%q<RubyInline>, ["= 3.8.4"])
end
else
s.add_dependency(%q<rspec>, [">= 1.2.9"])
s.add_dependency(%q<RubyInline>, ["= 3.8.4"])
end
end
Regenerated gemspec for version 0.1.2
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{lexical_uuid}
s.version = "0.1.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["James Golick"]
s.date = %q{2010-10-04}
s.description = %q{UUIDs that are byte-ordered lamport clocks (timestamp, worker_id). Much simpler than type-1 UUID's crappy, weirdo layout.}
s.email = %q{jamesgolick@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
".gitignore",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"lexical_uuid.gemspec",
"lib/lexical_uuid.rb",
"spec/lexical_uuid_spec.rb",
"spec/spec.opts",
"spec/spec_helper.rb"
]
s.homepage = %q{http://github.com/jamesgolick/lexical_uuid}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{UUIDs that are byte-ordered lamport clocks (timestamp, worker_id). Much simpler than type-1 UUID's crappy, weirdo layout.}
s.test_files = [
"spec/lexical_uuid_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, [">= 1.2.9"])
s.add_runtime_dependency(%q<RubyInline>, ["= 3.8.4"])
else
s.add_dependency(%q<rspec>, [">= 1.2.9"])
s.add_dependency(%q<RubyInline>, ["= 3.8.4"])
end
else
s.add_dependency(%q<rspec>, [">= 1.2.9"])
s.add_dependency(%q<RubyInline>, ["= 3.8.4"])
end
end
|
Pod::Spec.new do |spec|
spec.name = 'Typhoon'
spec.version = '2.0.0'
spec.license = 'Apache2.0'
spec.summary = 'A dependency injection container for Objective-C. Light-weight, yet flexible and full-featured.'
spec.homepage = 'http://www.typhoonframework.org'
spec.author = {'Jasper Blues, Robert Gilliam, Daniel Rodrรญguez, Erik Sundin, Aleksey Garbarev & Contributors' => 'info@typhoonframework.org'}
spec.source = {:git => 'https://github.com/typhoon-framework/Typhoon.git', :tag => spec.version.to_s, :submodules => true}
spec.ios.deployment_target = '5.0'
spec.osx.deployment_target = '10.7'
spec.source_files = 'Source/**/*.{h,m}'
non_arc_files = 'Source/Factory/Internal/NSInvocation+TCFInstanceBuilder.{h,m}'
spec.ios.exclude_files = 'Source/osx', non_arc_files
spec.osx.exclude_files = 'Source/ios', non_arc_files
spec.requires_arc = true
spec.subspec 'no-arc' do |sna|
sna.requires_arc = false
sna.source_files = non_arc_files
end
spec.documentation_url = 'http://www.typhoonframework.org/docs/latest/api/'
end
Update podspec.
Pod::Spec.new do |spec|
spec.name = 'Typhoon'
spec.version = '2.0.1'
spec.license = 'Apache2.0'
spec.summary = 'A dependency injection container for Objective-C. Light-weight, yet flexible and full-featured.'
spec.homepage = 'http://www.typhoonframework.org'
spec.author = {'Jasper Blues, Robert Gilliam, Daniel Rodrรญguez, Erik Sundin, Aleksey Garbarev & Contributors' => 'info@typhoonframework.org'}
spec.source = {:git => 'https://github.com/typhoon-framework/Typhoon.git', :tag => spec.version.to_s, :submodules => true}
spec.ios.deployment_target = '5.0'
spec.osx.deployment_target = '10.7'
spec.source_files = 'Source/**/*.{h,m}'
non_arc_files = 'Source/Factory/Internal/NSInvocation+TCFInstanceBuilder.{h,m}'
spec.ios.exclude_files = 'Source/osx', non_arc_files
spec.osx.exclude_files = 'Source/ios', non_arc_files
spec.requires_arc = true
spec.subspec 'no-arc' do |sna|
sna.requires_arc = false
sna.source_files = non_arc_files
end
spec.documentation_url = 'http://www.typhoonframework.org/docs/latest/api/'
end
|
Pod::Spec.new do |spec|
spec.name = 'Typhoon'
spec.version = '3.2.1'
spec.license = 'Apache2.0'
spec.summary = 'Dependency injection for Objective-C and Swift. Light-weight, yet flexible and full-featured.'
spec.homepage = 'http://www.typhoonframework.org'
spec.author = {'Jasper Blues, Aleksey Garbarev, Robert Gilliam, Daniel Rodrรญguez, Erik Sundin & Contributors' => 'info@typhoonframework.org'}
spec.source = {:git => 'https://github.com/appsquickly/Typhoon.git', :tag => spec.version.to_s, :submodules => true}
spec.ios.deployment_target = '5.0'
spec.osx.deployment_target = '10.7'
spec.source_files = 'Source/**/*.{h,m}'
non_arc_files = 'Source/Factory/Internal/NSInvocation+TCFInstanceBuilder.{h,m}'
spec.ios.exclude_files = 'Source/osx', non_arc_files
spec.osx.exclude_files = 'Source/ios', non_arc_files
spec.requires_arc = true
spec.subspec 'no-arc' do |sna|
sna.requires_arc = false
sna.source_files = non_arc_files
end
spec.documentation_url = 'http://www.typhoonframework.org/docs/latest/api/'
end
Update podspec.
Pod::Spec.new do |spec|
spec.name = 'Typhoon'
spec.version = '3.2.2'
spec.license = 'Apache2.0'
spec.summary = 'Dependency injection for Objective-C and Swift. Light-weight, yet flexible and full-featured.'
spec.homepage = 'http://www.typhoonframework.org'
spec.author = {'Jasper Blues, Aleksey Garbarev, Robert Gilliam, Daniel Rodrรญguez, Erik Sundin & Contributors' => 'info@typhoonframework.org'}
spec.source = {:git => 'https://github.com/appsquickly/Typhoon.git', :tag => spec.version.to_s, :submodules => true}
spec.ios.deployment_target = '5.0'
spec.osx.deployment_target = '10.7'
spec.source_files = 'Source/**/*.{h,m}'
non_arc_files = 'Source/Factory/Internal/NSInvocation+TCFInstanceBuilder.{h,m}'
spec.ios.exclude_files = 'Source/osx', non_arc_files
spec.osx.exclude_files = 'Source/ios', non_arc_files
spec.requires_arc = true
spec.subspec 'no-arc' do |sna|
sna.requires_arc = false
sna.source_files = non_arc_files
end
spec.documentation_url = 'http://www.typhoonframework.org/docs/latest/api/'
end
|
Pod::Spec.new do |spec|
spec.name = 'Typhoon'
spec.version = '1.1.9'
spec.license = 'Apache2.0'
spec.summary = 'A dependency injection container for Objective-C. Light-weight, yet flexible and full-featured.'
spec.homepage = 'http://www.typhoonframework.org'
spec.author = { 'Jasper Blues' => 'jasper@appsquick.ly' }
spec.source = { :git => 'https://github.com/jasperblues/Typhoon.git', :tag => '1.1.9' }
spec.source_files = 'Source/**/*.{h,m}'
spec.libraries = 'z', 'xml2'
spec.xcconfig = { 'HEADER_SEARCH_PATHS' => '$(SDKROOT)/usr/include/libxml2' }
spec.requires_arc = true
end
Update podspec.
Pod::Spec.new do |spec|
spec.name = 'Typhoon'
spec.version = '1.1.10'
spec.license = 'Apache2.0'
spec.summary = 'A dependency injection container for Objective-C. Light-weight, yet flexible and full-featured.'
spec.homepage = 'http://www.typhoonframework.org'
spec.author = { 'Jasper Blues' => 'jasper@appsquick.ly' }
spec.source = { :git => 'https://github.com/jasperblues/Typhoon.git', :tag => '1.1.10' }
spec.source_files = 'Source/**/*.{h,m}'
spec.libraries = 'z', 'xml2'
spec.xcconfig = { 'HEADER_SEARCH_PATHS' => '$(SDKROOT)/usr/include/libxml2' }
spec.requires_arc = true
end
|
$LOAD_PATH << '..'
require 'musikbot'
MIN_ARTICLE_COUNT = 25
MAINTENANCE_CATEGORIES = [
'All_articles_lacking_sources',
'All_articles_needing_additional_references',
'All_unreferenced_BLPs',
'All_BLP_articles_lacking_sources',
'All_articles_lacking_reliable_references',
'All_articles_with_a_promotional_tone',
'All_articles_with_topics_of_unclear_notability'
]
REPORT_PAGE = 'Wikipedia:Database reports/Editors eligible for Autopatrol privilege'
module AutopatrolledCandidates
def self.run
@mb = MusikBot::Session.new(inspect)
users = {}
page_creators.each_with_index do |user, i|
username = user['user_name']
# not sure how this happens
next if username.nil?
articles = articles_created(username)
# Skip if they don't meet article count prerequisite or recently had a PERM request declined
next if articles.length < MIN_ARTICLE_COUNT || perm_request_declined?(username)
user_data = {
created: articles.length,
edits: user['user_editcount'],
deleted: deleted_counts(username),
blocks: block_count(username),
tagged: maintenance_count(articles),
perm_revoked: autopatrolled_revoked?(username),
copyvios: scan_talk_page(username)
}
users[username] = user_data
puts "#{i} of #{page_creators.length}: #{username} = #{articles.length}"
end
generate_report(users)
end
# Generate markup for the report and write it to REPORT_PAGE
def self.generate_report(users)
cat_str = MAINTENANCE_CATEGORIES.collect { |c| "[[:Category:#{c}|#{c.descore}]]" }.join(', ')
markup = <<~END
<div style='font-size:24px'>Users eligible to be autopatrolled as of #{I18n.l(@mb.today, format: :heading)}</div>
{{FORMATNUM:#{users.length}}} users who have created an article in the past month, and may be eligible for the autopatrolled privilege but don't have it yet.
Users who had a [[WP:PERM/A|request for autopatrolled]] declined in the past 90 days are not listed.
Prepared by ~~~ <onlyinclude>~~~~~</onlyinclude>
== Key ==
* '''Articles''': Number of live, non-redirect articles
* '''Tagged''': Number of articles with maintenance tags<ref name=tags />
* '''Deleted''': Number of deleted articles in the past year (may include redirects)<ref name='deleted' />
* '''Edit count''': Raw edit count of the user
* '''Blocks''': Number of blocks in the past year
* '''Copyvios''': Number of ''possible'' user talk notifications about copyright concerns in the past year<ref name='copyvios' />
* '''Revoked?''': Whether or not the autopatrolled permission was previously revoked
{{pb}}
;Notes
{{reflist|refs="
<ref name='tags'>Supported maintenance categories include: #{cat_str}</ref>
<ref name='deleted'>[[WP:G6|G6]] (technical) and [[WP:G7|G7]] (user-requested) speedy deletions are not included. The number of speedy, (BLP)PROD and AfD deletions are shown if detected via the deletion summary.</ref>
<ref name='copyvios'>This works by scanning the edit summaries for "copyvios" or "copyright". Links are provided to the diffs, which may include removal of notices.</ref>
}}
== Report ==
{| class='wikitable sortable'
! Username
! Articles
! Tagged
! Deleted
! Edit count
! Blocks
! Copyvios
! Revoked?
! class="unsortable" | Links
|-
END
# Sort by number of articles created
users = users.sort_by { |username, data| -data[:created] }.to_h
users.each do |username, data|
user_rights_log = "https://en.wikipedia.org/w/index.php?title=Special:Log&page=User:#{username.score}&type=rights"
block_log = "https://en.wikipedia.org/w/index.php?title=Special:Log&action=view&page=#{username.score}&type=block"
xtools_link = "[https://tools.wmflabs.org/xtools/pages/?user=#{URI.escape(username)}" \
"&project=en.wikipedia.org&namespace=0&redirects=noredirects {{FORMATNUM:#{data[:created]}}}]"
deleted_str = '0'
# Generate string that lists the different types of deletions that were detected
if data[:deleted][:total] > 0
deleted_str = "#{data[:deleted][:total]} total"
deletion_stats = []
[:Speedy, :PROD, :AfD].each do |type|
if data[:deleted][type] > 0
deletion_stats << "#{type}: {{FORMATNUM:#{data[:deleted][type]}}}"
end
end
deleted_str += " (#{deletion_stats.join(', ')})".chomp('()')
end
block_str = data[:blocks] > 0 ? "[#{block_log} {{FORMATNUM:#{data[:blocks]}}}]" : '0'
copyvios_str = 0
if data[:copyvios].any?
copyvios_str = "#{data[:copyvios].length}<ref>"
data[:copyvios].each do |rev_id|
copyvios_str += "[https://en.wikipedia.org/wiki/Special:Diff/#{rev_id}]"
end
copyvios_str += "</ref>"
end
revoked_str = data[:perm_revoked] ? "[#{user_rights_log} Yes]" : 'No'
links = [
"[[Special:UserRights/#{username}|User rights]]",
"[https://tools.wmflabs.org/xtools-ec/?user=#{URI.encode(username)}&project=en.wikipedia.org EC]",
"[https://tools.wmflabs.org/musikanimal/blp_edits?username=#{URI.encode(username)}&offset=0&contribs=on BLP edits]"
].join(' · ')
markup += <<~END
| {{User0|#{username}}}
| #{xtools_link}
| {{FORMATNUM:#{data[:tagged]}}}
| data-sort-value=#{data[:deleted][:total]} | #{deleted_str}
| [[Special:Contributions/#{username}|{{FORMATNUM:#{data[:edits]}}}]]
| #{block_str}
| data-sort-value=#{data[:copyvios].length} | #{copyvios_str}
| #{revoked_str}
| #{links}
|-
END
end
markup = markup.chomp("\n|-") + <<~END
|}
;Links
{{reflist}}
END
@mb.edit(REPORT_PAGE,
content: markup,
summary: "Reporting #{users.length} users eligible for autopatrolled"
)
end
# Scan talk page history for messages that are potentially about copyvios
def self.scan_talk_page(username)
sql = %{
SELECT rev_id
FROM revision_userindex
WHERE rev_page = (
SELECT page_id
FROM page
WHERE page_title = ?
AND page_namespace = 3
)
AND rev_timestamp > #{@mb.db_date(@mb.today - 365)}
AND rev_comment REGEXP "[Cc]opy(right|vio)"
}
@mb.repl_query(sql, username.score).to_a.collect { |r| r['rev_id'] }
end
# Get data about pages the user created that were deleted
# Returns:
# {
# total: total number of articles deleted
# Speedy: total number of articles deleted under [[WP:CSD]]
# PROD: total number of articles deleted under [[WP:PROD]] or [[WP:BLPPROD]]
# AfD: total number of articles deleted under [[WP:AfD]]
# }
def self.deleted_counts(username)
sql = %{
SELECT log_comment
FROM logging_logindex
LEFT JOIN archive_userindex ON ar_page_id = log_page
WHERE log_type = 'delete'
AND ar_user_text = ?
AND ar_namespace = 0
AND ar_parent_id = 0
AND ar_timestamp > #{@mb.db_date(@mb.now - 365)}
}
counts = {
total: 0,
Speedy: 0,
PROD: 0,
AfD: 0
}
@mb.repl_query(sql, username.score).to_a.each do |data|
# don't count technical or user-requested deletions
next if data['log_comment'] =~ /\[\[WP:CSD#G(6|7)\|/
counts[:total] += 1
case data['log_comment']
when /\[\[WP:CSD#/
counts[:Speedy] += 1
when /\[\[WP:(BLP)?PROD/
counts[:PROD] += 1
when /\[\[(Wikipedia|WP):Articles for deletion\//
counts[:AfD] += 1
end
end
counts
end
# Get the number of blocks the user had in the past year
def self.block_count(username)
sql = %{
SELECT COUNT(*) AS count
FROM logging_logindex
WHERE log_type = 'block'
AND log_title = ?
AND log_timestamp > #{@mb.db_date(@mb.now - 365)}
}
@mb.repl_query(sql, username.score).to_a.first['count']
end
# Check if the user has had the autopatrolled permission revoked in the past
def self.autopatrolled_revoked?(username)
sql = %{
SELECT COUNT(*) AS count
FROM logging_logindex
WHERE log_type = 'rights'
AND log_title = ?
AND log_params REGEXP "oldgroups.*?autoreviewer.*?newgroups(?!.*?autoreviewer)"
}
@mb.repl_query(sql, username.score).to_a.first['count'] > 0
end
# Get the page title, ID and creation date of articles created by the given user
def self.articles_created(username)
sql = %{
SELECT page_title, page_id, rev_timestamp
FROM revision_userindex
LEFT JOIN page ON page_id = rev_page
WHERE page_namespace = 0
AND rev_parent_id = 0
AND rev_user_text = ?
AND rev_deleted = 0
AND page_is_redirect = 0
}
@mb.repl_query(sql, username.score).to_a
end
# Get the number of articles created by the user that are in maintenance categories
def self.maintenance_count(articles)
# Create list of categories to be used in the `cl_to IN ()` clause
categories_sql = MAINTENANCE_CATEGORIES.collect { |c| "'#{c}'" }.join(',')
# Create list of article IDs to be used in the `cl_from IN ()` clause
article_ids = articles
.select { |a| @mb.parse_date(a['rev_timestamp']) > @mb.now - 365 }
.collect { |a| a['page_id'] }
sql = %{
SELECT COUNT(DISTINCT(cl_from)) AS count
FROM categorylinks
WHERE cl_from IN (#{article_ids.join(',')})
AND cl_to IN (#{categories_sql})
AND cl_type = 'page'
}
@mb.repl.query(sql).to_a.first['count']
end
def self.perm_request_declined?(username)
target_date = @mb.today - 90
links = []
dates_to_fetch = (target_date..@mb.today).select { |d| d.day == target_date.day || d.day == @mb.today.day }.uniq(&:month)
dates_to_fetch.each do |date|
key = "#{Date::MONTHNAMES[date.month]} #{date.year}"
page = @mb.get("Wikipedia:Requests for permissions/Denied/#{key}")
next unless page
# Regexp.escape may break with other encodings
username = username.force_encoding('utf-8')
decline_days = page.split(/==\s*\w+\s+/i)
decline_days.each do |decline_day|
day_number = decline_day.scan(/^(\d+)\s*==/).flatten[0].to_i
next if day_number == 0
decline_day_date = @mb.parse_date("#{date.year}-#{date.month}-#{day_number}")
matches = decline_day.scan(/\{\{Usercheck.*\|#{Regexp.escape(username).descore}}}.*Autopatrolled\]\]/i)[0]
return true if matches && decline_day_date >= target_date
end
end
false
end
# Get the usernames and edit counts of users who have created a page in the past month
def self.page_creators
# Cache so this can be re-called without repeating the query
return @page_creators if @page_creators
sql = %{
SELECT DISTINCT(user_name), user_editcount
FROM recentchanges
LEFT JOIN user
ON rc_user = user_id
LEFT JOIN page
ON rc_cur_id = page_id
WHERE
rc_timestamp > #{@mb.db_date(@mb.now - 3)} AND
rc_namespace = 0 AND
rc_bot = 0 AND
rc_new = 1 AND
page_is_redirect = 0 AND
NOT EXISTS
(
SELECT 1
FROM user_groups
WHERE ug_user = user_id
AND ( ug_group = 'autoreviewer' OR ug_group = 'sysop' )
)
}
@page_creators = @mb.repl_query(sql).to_a
end
end
AutopatrolledCandidates.run
AutopatrolledCandidates: use new comment table for log/edit summaries
$LOAD_PATH << '..'
require 'musikbot'
MIN_ARTICLE_COUNT = 25
MAINTENANCE_CATEGORIES = [
'All_articles_lacking_sources',
'All_articles_needing_additional_references',
'All_unreferenced_BLPs',
'All_BLP_articles_lacking_sources',
'All_articles_lacking_reliable_references',
'All_articles_with_a_promotional_tone',
'All_articles_with_topics_of_unclear_notability'
]
REPORT_PAGE = 'Wikipedia:Database reports/Editors eligible for Autopatrol privilege'
module AutopatrolledCandidates
def self.run
@mb = MusikBot::Session.new(inspect)
users = {}
page_creators.each_with_index do |user, i|
username = user['user_name']
# not sure how this happens
next if username.nil?
articles = articles_created(username)
# Skip if they don't meet article count prerequisite or recently had a PERM request declined
next if articles.length < MIN_ARTICLE_COUNT || perm_request_declined?(username)
user_data = {
created: articles.length,
edits: user['user_editcount'],
deleted: deleted_counts(username),
blocks: block_count(username),
tagged: maintenance_count(articles),
perm_revoked: autopatrolled_revoked?(username),
copyvios: scan_talk_page(username)
}
users[username] = user_data
puts "#{i} of #{page_creators.length}: #{username} = #{articles.length}"
end
generate_report(users)
end
# Generate markup for the report and write it to REPORT_PAGE
def self.generate_report(users)
cat_str = MAINTENANCE_CATEGORIES.collect { |c| "[[:Category:#{c}|#{c.descore}]]" }.join(', ')
markup = <<~END
<div style='font-size:24px'>Users eligible to be autopatrolled as of #{I18n.l(@mb.today, format: :heading)}</div>
{{FORMATNUM:#{users.length}}} users who have created an article in the past month, and may be eligible for the autopatrolled privilege but don't have it yet.
Users who had a [[WP:PERM/A|request for autopatrolled]] declined in the past 90 days are not listed.
Prepared by ~~~ <onlyinclude>~~~~~</onlyinclude>
== Key ==
* '''Articles''': Number of live, non-redirect articles
* '''Tagged''': Number of articles with maintenance tags<ref name=tags />
* '''Deleted''': Number of deleted articles in the past year (may include redirects)<ref name='deleted' />
* '''Edit count''': Raw edit count of the user
* '''Blocks''': Number of blocks in the past year
* '''Copyvios''': Number of ''possible'' user talk notifications about copyright concerns in the past year<ref name='copyvios' />
* '''Revoked?''': Whether or not the autopatrolled permission was previously revoked
{{pb}}
;Notes
{{reflist|refs="
<ref name='tags'>Supported maintenance categories include: #{cat_str}</ref>
<ref name='deleted'>[[WP:G6|G6]] (technical) and [[WP:G7|G7]] (user-requested) speedy deletions are not included. The number of speedy, (BLP)PROD and AfD deletions are shown if detected via the deletion summary.</ref>
<ref name='copyvios'>This works by scanning the edit summaries for "copyvios" or "copyright". Links are provided to the diffs, which may include removal of notices.</ref>
}}
== Report ==
{| class='wikitable sortable'
! Username
! Articles
! Tagged
! Deleted
! Edit count
! Blocks
! Copyvios
! Revoked?
! class="unsortable" | Links
|-
END
# Sort by number of articles created
users = users.sort_by { |username, data| -data[:created] }.to_h
users.each do |username, data|
user_rights_log = "https://en.wikipedia.org/w/index.php?title=Special:Log&page=User:#{username.score}&type=rights"
block_log = "https://en.wikipedia.org/w/index.php?title=Special:Log&action=view&page=#{username.score}&type=block"
xtools_link = "[https://tools.wmflabs.org/xtools/pages/?user=#{URI.escape(username)}" \
"&project=en.wikipedia.org&namespace=0&redirects=noredirects {{FORMATNUM:#{data[:created]}}}]"
deleted_str = '0'
# Generate string that lists the different types of deletions that were detected
if data[:deleted][:total] > 0
deleted_str = "#{data[:deleted][:total]} total"
deletion_stats = []
[:Speedy, :PROD, :AfD].each do |type|
if data[:deleted][type] > 0
deletion_stats << "#{type}: {{FORMATNUM:#{data[:deleted][type]}}}"
end
end
deleted_str += " (#{deletion_stats.join(', ')})".chomp('()')
end
block_str = data[:blocks] > 0 ? "[#{block_log} {{FORMATNUM:#{data[:blocks]}}}]" : '0'
copyvios_str = 0
if data[:copyvios].any?
copyvios_str = "#{data[:copyvios].length}<ref>"
data[:copyvios].each do |rev_id|
copyvios_str += "[https://en.wikipedia.org/wiki/Special:Diff/#{rev_id}]"
end
copyvios_str += "</ref>"
end
revoked_str = data[:perm_revoked] ? "[#{user_rights_log} Yes]" : 'No'
links = [
"[[Special:UserRights/#{username}|User rights]]",
"[https://tools.wmflabs.org/xtools-ec/?user=#{URI.encode(username)}&project=en.wikipedia.org EC]",
"[https://tools.wmflabs.org/musikanimal/blp_edits?username=#{URI.encode(username)}&offset=0&contribs=on BLP edits]"
].join(' · ')
markup += <<~END
| {{User0|#{username}}}
| #{xtools_link}
| {{FORMATNUM:#{data[:tagged]}}}
| data-sort-value=#{data[:deleted][:total]} | #{deleted_str}
| [[Special:Contributions/#{username}|{{FORMATNUM:#{data[:edits]}}}]]
| #{block_str}
| data-sort-value=#{data[:copyvios].length} | #{copyvios_str}
| #{revoked_str}
| #{links}
|-
END
end
markup = markup.chomp("\n|-") + <<~END
|}
;Links
{{reflist}}
END
@mb.edit(REPORT_PAGE,
content: markup,
summary: "Reporting #{users.length} users eligible for autopatrolled"
)
end
# Scan talk page history for messages that are potentially about copyvios
def self.scan_talk_page(username)
sql = %{
SELECT rev_id
FROM revision_userindex
LEFT OUTER JOIN comment ON rev_comment_id = comment_id
WHERE rev_page = (
SELECT page_id
FROM page
WHERE page_title = ?
AND page_namespace = 3
)
AND rev_timestamp > #{@mb.db_date(@mb.today - 365)}
AND comment_text REGEXP "[Cc]opy(right|vio)"
}
@mb.repl_query(sql, username.score).to_a.collect { |r| r['rev_id'] }
end
# Get data about pages the user created that were deleted
# Returns:
# {
# total: total number of articles deleted
# Speedy: total number of articles deleted under [[WP:CSD]]
# PROD: total number of articles deleted under [[WP:PROD]] or [[WP:BLPPROD]]
# AfD: total number of articles deleted under [[WP:AfD]]
# }
def self.deleted_counts(username)
sql = %{
SELECT comment_text
FROM logging_logindex
LEFT OUTER JOIN comment ON log_comment_id = comment_id
LEFT JOIN archive_userindex ON ar_page_id = log_page
WHERE log_type = 'delete'
AND ar_user_text = ?
AND ar_namespace = 0
AND ar_parent_id = 0
AND ar_timestamp > #{@mb.db_date(@mb.now - 365)}
}
counts = {
total: 0,
Speedy: 0,
PROD: 0,
AfD: 0
}
@mb.repl_query(sql, username.score).to_a.each do |data|
# don't count technical or user-requested deletions
next if data['comment_text'] =~ /\[\[WP:CSD#G(6|7)\|/
counts[:total] += 1
case data['comment_text']
when /\[\[WP:CSD#/
counts[:Speedy] += 1
when /\[\[WP:(BLP)?PROD/
counts[:PROD] += 1
when /\[\[(Wikipedia|WP):Articles for deletion\//
counts[:AfD] += 1
end
end
counts
end
# Get the number of blocks the user had in the past year
def self.block_count(username)
sql = %{
SELECT COUNT(*) AS count
FROM logging_logindex
WHERE log_type = 'block'
AND log_title = ?
AND log_timestamp > #{@mb.db_date(@mb.now - 365)}
}
@mb.repl_query(sql, username.score).to_a.first['count']
end
# Check if the user has had the autopatrolled permission revoked in the past
def self.autopatrolled_revoked?(username)
sql = %{
SELECT COUNT(*) AS count
FROM logging_logindex
WHERE log_type = 'rights'
AND log_title = ?
AND log_params REGEXP "oldgroups.*?autoreviewer.*?newgroups(?!.*?autoreviewer)"
}
@mb.repl_query(sql, username.score).to_a.first['count'] > 0
end
# Get the page title, ID and creation date of articles created by the given user
def self.articles_created(username)
sql = %{
SELECT page_title, page_id, rev_timestamp
FROM revision_userindex
LEFT JOIN page ON page_id = rev_page
WHERE page_namespace = 0
AND rev_parent_id = 0
AND rev_user_text = ?
AND rev_deleted = 0
AND page_is_redirect = 0
}
@mb.repl_query(sql, username.score).to_a
end
# Get the number of articles created by the user that are in maintenance categories
def self.maintenance_count(articles)
# Create list of categories to be used in the `cl_to IN ()` clause
categories_sql = MAINTENANCE_CATEGORIES.collect { |c| "'#{c}'" }.join(',')
# Create list of article IDs to be used in the `cl_from IN ()` clause
article_ids = articles
.select { |a| @mb.parse_date(a['rev_timestamp']) > @mb.now - 365 }
.collect { |a| a['page_id'] }
sql = %{
SELECT COUNT(DISTINCT(cl_from)) AS count
FROM categorylinks
WHERE cl_from IN (#{article_ids.join(',')})
AND cl_to IN (#{categories_sql})
AND cl_type = 'page'
}
@mb.repl.query(sql).to_a.first['count']
end
def self.perm_request_declined?(username)
target_date = @mb.today - 90
links = []
dates_to_fetch = (target_date..@mb.today).select { |d| d.day == target_date.day || d.day == @mb.today.day }.uniq(&:month)
dates_to_fetch.each do |date|
key = "#{Date::MONTHNAMES[date.month]} #{date.year}"
page = @mb.get("Wikipedia:Requests for permissions/Denied/#{key}")
next unless page
# Regexp.escape may break with other encodings
username = username.force_encoding('utf-8')
decline_days = page.split(/==\s*\w+\s+/i)
decline_days.each do |decline_day|
day_number = decline_day.scan(/^(\d+)\s*==/).flatten[0].to_i
next if day_number == 0
decline_day_date = @mb.parse_date("#{date.year}-#{date.month}-#{day_number}")
matches = decline_day.scan(/\{\{Usercheck.*\|#{Regexp.escape(username).descore}}}.*Autopatrolled\]\]/i)[0]
return true if matches && decline_day_date >= target_date
end
end
false
end
# Get the usernames and edit counts of users who have created a page in the past month
def self.page_creators
# Cache so this can be re-called without repeating the query
return @page_creators if @page_creators
sql = %{
SELECT DISTINCT(user_name), user_editcount
FROM recentchanges
LEFT JOIN user
ON rc_user = user_id
LEFT JOIN page
ON rc_cur_id = page_id
WHERE
rc_timestamp > #{@mb.db_date(@mb.now - 3)} AND
rc_namespace = 0 AND
rc_bot = 0 AND
rc_new = 1 AND
page_is_redirect = 0 AND
NOT EXISTS
(
SELECT 1
FROM user_groups
WHERE ug_user = user_id
AND ( ug_group = 'autoreviewer' OR ug_group = 'sysop' )
)
}
@page_creators = @mb.repl_query(sql).to_a
end
end
AutopatrolledCandidates.run
|
Added formula for mmkcmd
require 'formula'
class Mmkcmd < Formula
homepage 'https://github.com/fernandotcl/mmkcmd'
head 'https://github.com/fernandotcl/mmkcmd.git'
depends_on 'cmake' => :build
def install
system "cmake", ".", *std_cmake_args
system "make install"
end
end
|
require "cli/parser"
require "utils/github"
module Homebrew
module_function
def request_bottle_args
Homebrew::CLI::Parser.new do
usage_banner <<~EOS
`request-bottle` <formula>
Build a bottle for this formula with GitHub Actions.
EOS
switch "--ignore-errors",
description: "Instruct the workflow action to ignore e.g., audit errors and upload bottles if they exist."
max_named 1
end
end
def head_is_merge_commit?
Utils.popen_read(Utils.git_path, "log", "--merges", "-1", "--format=%H").chomp == Utils.popen_read(Utils.git_path, "rev-parse", "HEAD").chomp
end
def git_user
if ENV["CI"] && head_is_merge_commit?
Utils.popen_read(Utils.git_path, "log", "-1", "--pretty=%an")
else
ENV["HOMEBREW_GIT_NAME"] || ENV["GIT_AUTHOR_NAME"] || ENV["GIT_COMMITTER_NAME"] || Utils.popen_read(Utils.git_path, "config", "--get", "user.name")
end
end
def git_email
if ENV["CI"] && head_is_merge_commit?
Utils.popen_read(Utils.git_path, "log", "-1", "--pretty=%ae")
else
ENV["HOMEBREW_GIT_EMAIL"] || ENV["GIT_AUTHOR_EMAIL"] || ENV["GIT_COMMITTER_EMAIL"] || Utils.popen_read(Utils.git_path, "config", "--get", "user.email")
end
end
def request_bottle
request_bottle_args.parse
raise FormulaUnspecifiedError if Homebrew.args.named.empty?
user = git_user.strip
email = git_email.strip
odie "User not specified" if user.empty?
odie "Email not specified" if email.empty?
formula = Homebrew.args.resolved_formulae.last.full_name
payload = { formula: formula, name: user, email: email, ignore_errors: Homebrew.args.ignore_errors? }
data = { event_type: "bottling", client_payload: payload }
url = "https://api.github.com/repos/Homebrew/linuxbrew-core/dispatches"
GitHub.open_api(url, data: data, request_method: :POST, scopes: ["repo"])
end
end
request-bottle: allow multiple formulae (#158)
* request-bottle: allow multiple formulae
Co-authored-by: Jonathan Chang <0bd51b8a224feeddb634965eebae512adcd49a8d@gmail.com>
require "cli/parser"
require "utils/github"
module Homebrew
module_function
def request_bottle_args
Homebrew::CLI::Parser.new do
usage_banner <<~EOS
`request-bottle` <formula> [<formula> ...]
Build bottles for these formulae with GitHub Actions.
EOS
switch "--ignore-errors",
description: "Instruct the workflow action to ignore e.g., audit errors and upload bottles if they exist."
end
end
def head_is_merge_commit?
Utils.popen_read(Utils.git_path, "log", "--merges", "-1", "--format=%H").chomp == Utils.popen_read(Utils.git_path, "rev-parse", "HEAD").chomp
end
def git_user
if ENV["CI"] && head_is_merge_commit?
Utils.popen_read(Utils.git_path, "log", "-1", "--pretty=%an")
else
ENV["HOMEBREW_GIT_NAME"] || ENV["GIT_AUTHOR_NAME"] || ENV["GIT_COMMITTER_NAME"] || Utils.popen_read(Utils.git_path, "config", "--get", "user.name")
end
end
def git_email
if ENV["CI"] && head_is_merge_commit?
Utils.popen_read(Utils.git_path, "log", "-1", "--pretty=%ae")
else
ENV["HOMEBREW_GIT_EMAIL"] || ENV["GIT_AUTHOR_EMAIL"] || ENV["GIT_COMMITTER_EMAIL"] || Utils.popen_read(Utils.git_path, "config", "--get", "user.email")
end
end
def request_bottle
request_bottle_args.parse
raise FormulaUnspecifiedError if Homebrew.args.named.empty?
user = git_user.strip
email = git_email.strip
odie "User not specified" if user.empty?
odie "Email not specified" if email.empty?
Homebrew.args.resolved_formulae.each do |formula|
payload = { formula: formula.name, name: user, email: email, ignore_errors: Homebrew.args.ignore_errors? }
data = { event_type: "bottling", client_payload: payload }
url = "https://api.github.com/repos/Homebrew/linuxbrew-core/dispatches"
GitHub.open_api(url, data: data, request_method: :POST, scopes: ["repo"])
end
end
end
|
module DoxyHaml
class Compound < Node
def initialize id, parent
super id, parent
parse_xml
end
def html_name
link_to_self name
end
private
def parse_xml
File.open id_xml_filepath do |xml_file|
@xml = Nokogiri::XML xml_file
end
end
def id_xml_filepath
File.join Parser.xml_folder, "#{@id}.xml"
end
def html_filename
"#{@id}.html"
end
def link_to_self link_name
link_to link_name, html_filename
end
end
end
Renaming html_filename to filename in Compound
module DoxyHaml
class Compound < Node
def initialize id, parent
super id, parent
parse_xml
end
def html_name
link_to_self name
end
def filename
"#{@id}.html"
end
private
def parse_xml
File.open id_xml_filepath do |xml_file|
@xml = Nokogiri::XML xml_file
end
end
def id_xml_filepath
File.join Parser.xml_folder, "#{@id}.xml"
end
def link_to_self link_name
link_to link_name, filename
end
end
end |
module Noodall
module Search
STOPWORDS = ["all","also","and","any","are","been","but","can", "cannot", "cant","else","etc","for","from","get", "give","had","has","hasnt","have","inc","into","its","not","put","see","this","too","via","was","were","when","with"]
def self.configure(model)
require 'lingua/stemmer'
model.class_eval do
key :_keywords, Array, :index => true
attr_accessor :relevance
before_save :_update_keywords
end
end
module ClassMethods
def searchable_keys(*keys)
@searchable_keys ||= Set.new
@searchable_keys += keys
@searchable_keys
end
def language(lang = 'en')
@language ||= lang
end
def stemmer
@stemmer ||= Lingua::Stemmer.new(:language => language)
end
def search(query, options = {})
if options[:per_page] || options[:per_page]
per_page = options.delete(:per_page)
page = options.delete(:page)
end
plucky_query = query(options.reverse_merge(
:order => 'relevance DESC'
))
criteria = plucky_query.criteria.to_hash
options = plucky_query.options.to_hash
# Extract words from the query and clean up
words = query.downcase.split(/\W/) - STOPWORDS
words.reject!{|w| w.length < 3}
# add stemmed words to the array of words
words = stem(words) | words
criteria.merge!( :_keywords => { :$in => words } )
# The Search result
search_result = collection.map_reduce(search_map(words), search_reduce, :query => criteria, :out => "#{self.collection_name}_search")
# Add value to sort options because model is stored in the value key
options[:sort].map! do |s,v|
["value.#{s}",v]
end
search_query = Plucky::Query.new(search_result, options)
if per_page
results = search_query.paginate(:per_page => per_page, :page => page)
else
results = search_query.all
end
#return results mappped to objects
results.tap do |docs|
docs.map! { |hash| load(hash['value']) }
end
end
def stem(words)
words.map { |word| stemmer.stem(word) }
end
def search_map(words)
#convert words into Regex OR
q = words.map do |k|
Regexp.escape(k)
end.join("|")
"function(){" +
"this.relevance = this._keywords.filter(" +
"function(z){" +
"return String(z).match(/(#{q})/i);" +
"}).length;" +
"emit(this._id, this);" +
"}"
end
def search_reduce
"function( key , values ){return values[0];}"
end
def search_finalize
"function( key , values ){return values.model;}"
end
end
module InstanceMethods
protected
def _update_keywords
self._keywords = []
self.class.searchable_keys.each do |search_key|
self._keywords += keywords_for_value(send(search_key)).compact
end
end
private
def keywords_for_value(val)
if val.kind_of?(String)
words = val.gsub(/<\/?[^>]*>/, "").downcase.split(/\W/) - STOPWORDS
words.reject!{|w| w.length < 3}
words.map do |word|
stem = self.class.stemmer.stem(word)
if stem != word
[stem, word]
else
word
end
end.flatten
elsif val.kind_of?(Array)
val.map { |e| keywords_for_value(stemmer, e) }.flatten
else
[val]
end
end
end
end
end
convert queries to string - fixes nil query bug
module Noodall
module Search
STOPWORDS = ["all","also","and","any","are","been","but","can", "cannot", "cant","else","etc","for","from","get", "give","had","has","hasnt","have","inc","into","its","not","put","see","this","too","via","was","were","when","with"]
def self.configure(model)
require 'lingua/stemmer'
model.class_eval do
key :_keywords, Array, :index => true
attr_accessor :relevance
before_save :_update_keywords
end
end
module ClassMethods
def searchable_keys(*keys)
@searchable_keys ||= Set.new
@searchable_keys += keys
@searchable_keys
end
def language(lang = 'en')
@language ||= lang
end
def stemmer
@stemmer ||= Lingua::Stemmer.new(:language => language)
end
def search(query, options = {})
if options[:per_page] || options[:per_page]
per_page = options.delete(:per_page)
page = options.delete(:page)
end
plucky_query = query(options.reverse_merge(
:order => 'relevance DESC'
))
criteria = plucky_query.criteria.to_hash
options = plucky_query.options.to_hash
# Extract words from the query and clean up
words = query.to_s.downcase.split(/\W/) - STOPWORDS
words.reject!{|w| w.length < 3}
# add stemmed words to the array of words
words = stem(words) | words
criteria.merge!( :_keywords => { :$in => words } )
# The Search result
search_result = collection.map_reduce(search_map(words), search_reduce, :query => criteria, :out => "#{self.collection_name}_search")
# Add value to sort options because model is stored in the value key
options[:sort].map! do |s,v|
["value.#{s}",v]
end
search_query = Plucky::Query.new(search_result, options)
if per_page
results = search_query.paginate(:per_page => per_page, :page => page)
else
results = search_query.all
end
#return results mappped to objects
results.tap do |docs|
docs.map! { |hash| load(hash['value']) }
end
end
def stem(words)
words.map { |word| stemmer.stem(word) }
end
def search_map(words)
#convert words into Regex OR
q = words.map do |k|
Regexp.escape(k)
end.join("|")
"function(){" +
"this.relevance = this._keywords.filter(" +
"function(z){" +
"return String(z).match(/(#{q})/i);" +
"}).length;" +
"emit(this._id, this);" +
"}"
end
def search_reduce
"function( key , values ){return values[0];}"
end
def search_finalize
"function( key , values ){return values.model;}"
end
end
module InstanceMethods
protected
def _update_keywords
self._keywords = []
self.class.searchable_keys.each do |search_key|
self._keywords += keywords_for_value(send(search_key)).compact
end
end
private
def keywords_for_value(val)
if val.kind_of?(String)
words = val.gsub(/<\/?[^>]*>/, "").downcase.split(/\W/) - STOPWORDS
words.reject!{|w| w.length < 3}
words.map do |word|
stem = self.class.stemmer.stem(word)
if stem != word
[stem, word]
else
word
end
end.flatten
elsif val.kind_of?(Array)
val.map { |e| keywords_for_value(stemmer, e) }.flatten
else
[val]
end
end
end
end
end
|
module KingForm
module Helper
# renders a form with the # KingForm::Builder::DefinitionList
# It allows to devide the form into sections(fieldsets) where each contains
# a definition list with dl/dd blocks for each label/field
#
# Read on to find out more about the available tags/fieldtypes
#
# === Example haml
# -dl_form_for(:client, :url => object_url, :html => { :method => :put }) do |f|
# - f.section 'Client Details' do
# = f.text :number
# - f.bundle 'Gender/Title' do
# = f.selection :gender
# = f.text :title, :class => 'medium'
# = f.text :position
# = f.text :last_name
# = f.date :birthday
# # =><form .. method..> <fieldset>
# <legend>Client Details</legend>
# <dl>
# <dt>Number</dt>
# <dd><input name=client[number] type=text></dd>
# ....
# </dl>
# </fieldset></form>
#
def dl_form_for(record_or_name_or_array, *args, &proc)
options = args.last.is_a?(Hash) ? args.pop : {}
options[:builder] = KingForm::Builder::DefinitionList
form_for(record_or_name_or_array, *(args << options), &proc)
end
def dl_fields_for(record_or_name_or_array, *args, &block)
options = args.last.is_a?(Hash) ? args.pop : {}
options[:builder] = KingForm::Builder::DefinitionList
fields_for(record_or_name_or_array, *(args << options), &block)
end
# renders a form with the KingForm::Builder::Labeled
# It allows to devide the form into sections(fieldsets) where each contains
# a definition list with dl/dd blocks for each label/field
#
# Read on to find out more about the avalable tags/fieldtypes
#
# === Example haml
# -labeled_form_for(:client, :url => object_url, :html => { :method => :put }) do |f|
# - f.section 'Client Details' do
# = f.text :number
# - f.bundle 'Gender/Title' do
# = f.text :gender
# = f.text :title, :class => 'medium'
# # =><form ...>
# <fieldset>
# <legend>Client Details</legend>
# <div>
# <label>Number </label>
# <input name=client[number] type=text>
# </div>
# <div>
# <label>Gender/Title</label>
# <input type='text' name='client[gender]' value='male'/>
# <input type='text' name='client[title]' value='Prof.'/>
# </div>
# </fieldset>
# </form>
#
def labeled_form_for(record_or_name_or_array, *args, &proc)
options = args.last.is_a?(Hash) ? args.pop : {}
options[:builder] = KingForm::Builder::Labeled
case record_or_name_or_array
when String, Symbol
options[:as] = record_or_name_or_array
form_for(args.shift, *(args << options), &proc)
else
form_for(record_or_name_or_array, *(args << options), &proc)
end
end
def labeled_fields_for(record_or_name_or_array, *args, &block)
options = args.last.is_a?(Hash) ? args.pop : {}
options[:builder] = KingForm::Builder::Labeled
fields_for(record_or_name_or_array, *(args << options), &block)
end
# Returns an array for a given settings which has comma-seperated values.
# In the view those are used for select boxes
# Accepts an optional block to change the array elements
def make_select(values, &block)
return nil unless values
raise ArgumentError unless values.class == String
result = []
values.split(',').each do |s|
s.strip!
s = yield(s) if block_given?
result.push(s)
end
result
end
end
end
adapt the new method signature for `fields_for` otherwise the options aren't picked up anymore resulting in NoMethodErrors
module KingForm
module Helper
# renders a form with the # KingForm::Builder::DefinitionList
# It allows to devide the form into sections(fieldsets) where each contains
# a definition list with dl/dd blocks for each label/field
#
# Read on to find out more about the available tags/fieldtypes
#
# === Example haml
# -dl_form_for(:client, :url => object_url, :html => { :method => :put }) do |f|
# - f.section 'Client Details' do
# = f.text :number
# - f.bundle 'Gender/Title' do
# = f.selection :gender
# = f.text :title, :class => 'medium'
# = f.text :position
# = f.text :last_name
# = f.date :birthday
# # =><form .. method..> <fieldset>
# <legend>Client Details</legend>
# <dl>
# <dt>Number</dt>
# <dd><input name=client[number] type=text></dd>
# ....
# </dl>
# </fieldset></form>
#
def dl_form_for(record_or_name_or_array, *args, &proc)
options = args.last.is_a?(Hash) ? args.pop : {}
options[:builder] = KingForm::Builder::DefinitionList
form_for(record_or_name_or_array, *(args << options), &proc)
end
def dl_fields_for(record_or_name_or_array, *args, &block)
options = args.last.is_a?(Hash) ? args.pop : {}
options[:builder] = KingForm::Builder::DefinitionList
case record_or_name_or_array
when String, Symbol
object_name = record_or_name_or_array
object = args.first
else
object = record_or_name_or_array
object_name = ActiveModel::Naming.singular(object)
end
fields_for(object_name, object, options, &block)
end
# renders a form with the KingForm::Builder::Labeled
# It allows to devide the form into sections(fieldsets) where each contains
# a definition list with dl/dd blocks for each label/field
#
# Read on to find out more about the avalable tags/fieldtypes
#
# === Example haml
# -labeled_form_for(:client, :url => object_url, :html => { :method => :put }) do |f|
# - f.section 'Client Details' do
# = f.text :number
# - f.bundle 'Gender/Title' do
# = f.text :gender
# = f.text :title, :class => 'medium'
# # =><form ...>
# <fieldset>
# <legend>Client Details</legend>
# <div>
# <label>Number </label>
# <input name=client[number] type=text>
# </div>
# <div>
# <label>Gender/Title</label>
# <input type='text' name='client[gender]' value='male'/>
# <input type='text' name='client[title]' value='Prof.'/>
# </div>
# </fieldset>
# </form>
#
def labeled_form_for(record_or_name_or_array, *args, &proc)
options = args.last.is_a?(Hash) ? args.pop : {}
options[:builder] = KingForm::Builder::Labeled
case record_or_name_or_array
when String, Symbol
options[:as] = record_or_name_or_array
form_for(args.shift, *(args << options), &proc)
else
form_for(record_or_name_or_array, *(args << options), &proc)
end
end
def labeled_fields_for(record_or_name_or_array, *args, &block)
options = args.last.is_a?(Hash) ? args.pop : {}
options[:builder] = KingForm::Builder::Labeled
fields_for(record_or_name_or_array, *(args << options), &block)
end
# Returns an array for a given settings which has comma-seperated values.
# In the view those are used for select boxes
# Accepts an optional block to change the array elements
def make_select(values, &block)
return nil unless values
raise ArgumentError unless values.class == String
result = []
values.split(',').each do |s|
s.strip!
s = yield(s) if block_given?
result.push(s)
end
result
end
end
end
|
require 'rubygems'
require 'sinatra'
get '/' do
liquors = ["tequila", "vodka"]
citrus = ["lime juice", "lemon juice"]
sweets = ["agave syrup", "simple syrup"]
default_portion = 0.75
ratio = [1,1,2]
amounts = ratio.map {|r| r*default_portion}
base = [liquors.sample, citrus.sample, sweets.sample]
ingreds = amounts.zip(base)
@cocktail = ingreds.map{|i,j| "%s oz %s" % [i.to_s ,j]}
erb :show
end
fixed indent
require 'rubygems'
require 'sinatra'
get '/' do
liquors = ["tequila", "vodka"]
citrus = ["lime juice", "lemon juice"]
sweets = ["agave syrup", "simple syrup"]
default_portion = 0.75
ratio = [1,1,2]
amounts = ratio.map {|r| r*default_portion}
base = [liquors.sample, citrus.sample, sweets.sample]
ingreds = amounts.zip(base)
@cocktail = ingreds.map{|i,j| "%s oz %s" % [i.to_s ,j]}
erb :show
end
|
Revert "replace mini_racer with therubyracer"
This reverts commit c0cf7343116be35b1ab257f97b3f8c9cab31c550.
|
#CLI controller as in the music controller :)
class BringFido::CLI
def call
puts "Don't you forget to bring your doggo"
list_options
end
def list_options
puts "the 3x8 grid of what I want to scrape"
end
end
added functionality to CLI.rb
#CLI controller as in the music controller :)
class BringFido::CLI
def call
puts "Don't you forget to bring your doggo"
list_options
menu
end
def list_options
puts "the 3x8 grid of what I want to scrape"
end
def menu
puts "Where do you want to bring Fido today? Pick a number"
input = gets.strip
case input
when "1"
puts "info on 1st 3x8"
when "2"
puts "info on 2 3x8"
end
end
end
|
# frozen_string_literal: true
require 'rails/all'
require 'acts_as_list'
require 'awesome_nested_set'
require 'cancan'
require 'friendly_id'
require 'kaminari/activerecord'
require 'mail'
require 'monetize'
require 'paperclip'
require 'paranoia'
require 'ransack'
require 'state_machines-activerecord'
require 'spree/deprecation'
require 'spree/paranoia_deprecations'
# This is required because ActiveModel::Validations#invalid? conflicts with the
# invalid state of a Payment. In the future this should be removed.
StateMachines::Machine.ignore_method_conflicts = true
module Spree
mattr_accessor :user_class
def self.user_class
if @@user_class.is_a?(Class)
raise "Spree.user_class MUST be a String or Symbol object, not a Class object."
elsif @@user_class.is_a?(String) || @@user_class.is_a?(Symbol)
@@user_class.to_s.constantize
end
end
# Used to configure Spree.
#
# Example:
#
# Spree.config do |config|
# config.track_inventory_levels = false
# end
#
# This method is defined within the core gem on purpose.
# Some people may only wish to use the Core part of Spree.
def self.config(&_block)
yield(Spree::Config)
end
module Core
class GatewayError < RuntimeError; end
include ActiveSupport::Deprecation::DeprecatedConstantAccessor
deprecate_constant 'DestroyWithOrdersError', ActiveRecord::DeleteRestrictionError, deprecator: Spree::Deprecation
end
end
require 'spree/core/version'
require 'spree/core/active_merchant_dependencies'
require 'spree/core/class_constantizer'
require 'spree/core/environment_extension'
require 'spree/core/environment/calculators'
require 'spree/core/environment/promotions'
require 'spree/core/environment'
require 'spree/promo/environment'
require 'spree/migrations'
require 'spree/migration_helpers'
require 'spree/event'
require 'spree/core/engine'
require 'spree/i18n'
require 'spree/localized_number'
require 'spree/money'
require 'spree/permitted_attributes'
require 'spree/core/importer'
require 'spree/core/permalinks'
require 'spree/core/product_duplicator'
require 'spree/core/current_store'
require 'spree/core/controller_helpers/auth'
require 'spree/core/controller_helpers/common'
require 'spree/core/controller_helpers/order'
require 'spree/core/controller_helpers/payment_parameters'
require 'spree/core/controller_helpers/pricing'
require 'spree/core/controller_helpers/search'
require 'spree/core/controller_helpers/store'
require 'spree/core/controller_helpers/strong_parameters'
require 'spree/core/role_configuration'
require 'spree/core/stock_configuration'
require 'spree/core/validators/email'
require 'spree/permission_sets'
require 'spree/preferences/store'
require 'spree/preferences/static_model_preferences'
require 'spree/preferences/scoped_store'
Add a warning for who is still using Rails 5.1
Rails 5.1 will not be suported from Solidus starting from the next
version. It is in its EOL phase and will not receive security patches
anymore.
# frozen_string_literal: true
require 'rails/all'
require 'acts_as_list'
require 'awesome_nested_set'
require 'cancan'
require 'friendly_id'
require 'kaminari/activerecord'
require 'mail'
require 'monetize'
require 'paperclip'
require 'paranoia'
require 'ransack'
require 'state_machines-activerecord'
require 'spree/deprecation'
require 'spree/paranoia_deprecations'
# This is required because ActiveModel::Validations#invalid? conflicts with the
# invalid state of a Payment. In the future this should be removed.
StateMachines::Machine.ignore_method_conflicts = true
module Spree
mattr_accessor :user_class
def self.user_class
if @@user_class.is_a?(Class)
raise "Spree.user_class MUST be a String or Symbol object, not a Class object."
elsif @@user_class.is_a?(String) || @@user_class.is_a?(Symbol)
@@user_class.to_s.constantize
end
end
# Used to configure Spree.
#
# Example:
#
# Spree.config do |config|
# config.track_inventory_levels = false
# end
#
# This method is defined within the core gem on purpose.
# Some people may only wish to use the Core part of Spree.
def self.config(&_block)
yield(Spree::Config)
end
module Core
class GatewayError < RuntimeError; end
include ActiveSupport::Deprecation::DeprecatedConstantAccessor
deprecate_constant 'DestroyWithOrdersError', ActiveRecord::DeleteRestrictionError, deprecator: Spree::Deprecation
end
end
if Gem::Version.new(Rails.version) < Gem::Version.new('5.2')
warn <<~HEREDOC
Rails 5.1 (EOL) is deprecated and will not be supported anymore from the next Solidus version.
Please, upgrade to a more recent Rails version.
Read more on upgrading from Rails 5.1 to Rails 5.2 here:
https://guides.rubyonrails.org/upgrading_ruby_on_rails.html#upgrading-from-rails-5-1-to-rails-5-2
HEREDOC
end
require 'spree/core/version'
require 'spree/core/active_merchant_dependencies'
require 'spree/core/class_constantizer'
require 'spree/core/environment_extension'
require 'spree/core/environment/calculators'
require 'spree/core/environment/promotions'
require 'spree/core/environment'
require 'spree/promo/environment'
require 'spree/migrations'
require 'spree/migration_helpers'
require 'spree/event'
require 'spree/core/engine'
require 'spree/i18n'
require 'spree/localized_number'
require 'spree/money'
require 'spree/permitted_attributes'
require 'spree/core/importer'
require 'spree/core/permalinks'
require 'spree/core/product_duplicator'
require 'spree/core/current_store'
require 'spree/core/controller_helpers/auth'
require 'spree/core/controller_helpers/common'
require 'spree/core/controller_helpers/order'
require 'spree/core/controller_helpers/payment_parameters'
require 'spree/core/controller_helpers/pricing'
require 'spree/core/controller_helpers/search'
require 'spree/core/controller_helpers/store'
require 'spree/core/controller_helpers/strong_parameters'
require 'spree/core/role_configuration'
require 'spree/core/stock_configuration'
require 'spree/core/validators/email'
require 'spree/permission_sets'
require 'spree/preferences/store'
require 'spree/preferences/static_model_preferences'
require 'spree/preferences/scoped_store'
|
#++
# Copyright (c) 2007-2011, Rails Dog LLC and other contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the Rails Dog LLC nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#--
require 'rails/all'
require 'rails/generators'
require 'state_machine'
require 'paperclip'
require 'stringex'
require 'kaminari'
require 'nested_set'
require 'acts_as_list'
require 'active_merchant'
require 'meta_search'
require 'find_by_param'
require 'jquery-rails'
module Spree
module Core
end
end
require 'spree/core/ext/active_record'
require 'spree/core/ext/hash'
require 'spree/core/delegate_belongs_to'
require 'spree/core/theme_support'
require 'spree/core/responder'
require 'spree/core/respond_with'
require 'spree/core/ssl_requirement'
require 'spree/core/preferences/model_hooks'
require 'spree/core/preferences/preference_definition'
require 'store_helpers'
require 'spree/file_utilz'
require 'spree/calculated_adjustments'
require 'spree/current_order'
require 'spree/preference_access'
require 'spree/config'
require 'spree/mail_settings'
require 'spree/mail_interceptor'
require 'redirect_legacy_product_url'
require 'middleware/seo_assist'
require 'spree_base' # added 11-3 JBD
silence_warnings do
require 'spree/core/authorize_net_cim_hack'
end
require 'spree/core/version'
require 'spree/core/engine'
require 'generators/spree/site/site_generator'
require 'generators/spree/dummy/dummy_generator'
require 'generators/spree/sandbox/sandbox_generator'
ActiveRecord::Base.class_eval do
include Spree::CalculatedAdjustments
include CollectiveIdea::Acts::NestedSet
end
if defined?(ActionView)
require 'nested_set/helper'
ActionView::Base.class_eval do
include CollectiveIdea::Acts::NestedSet::Helper
end
end
ActiveSupport.on_load(:action_view) do
include StoreHelpers
end
module SpreeCore
end
Fix require to spree/core/store_helpers
#++
# Copyright (c) 2007-2011, Rails Dog LLC and other contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the Rails Dog LLC nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#--
require 'rails/all'
require 'rails/generators'
require 'state_machine'
require 'paperclip'
require 'stringex'
require 'kaminari'
require 'nested_set'
require 'acts_as_list'
require 'active_merchant'
require 'meta_search'
require 'find_by_param'
require 'jquery-rails'
module Spree
module Core
end
end
require 'spree/core/ext/active_record'
require 'spree/core/ext/hash'
require 'spree/core/delegate_belongs_to'
require 'spree/core/theme_support'
require 'spree/core/responder'
require 'spree/core/respond_with'
require 'spree/core/ssl_requirement'
require 'spree/core/preferences/model_hooks'
require 'spree/core/preferences/preference_definition'
require 'spree/core/store_helpers'
require 'spree/file_utilz'
require 'spree/calculated_adjustments'
require 'spree/current_order'
require 'spree/preference_access'
require 'spree/config'
require 'spree/mail_settings'
require 'spree/mail_interceptor'
require 'redirect_legacy_product_url'
require 'middleware/seo_assist'
require 'spree_base' # added 11-3 JBD
silence_warnings do
require 'spree/core/authorize_net_cim_hack'
end
require 'spree/core/version'
require 'spree/core/engine'
require 'generators/spree/site/site_generator'
require 'generators/spree/dummy/dummy_generator'
require 'generators/spree/sandbox/sandbox_generator'
ActiveRecord::Base.class_eval do
include Spree::CalculatedAdjustments
include CollectiveIdea::Acts::NestedSet
end
if defined?(ActionView)
require 'nested_set/helper'
ActionView::Base.class_eval do
include CollectiveIdea::Acts::NestedSet::Helper
end
end
ActiveSupport.on_load(:action_view) do
include StoreHelpers
end
module SpreeCore
end
|
Pod::Spec.new do |s|
s.name = 'YAUIKit'
s.version = '2.5.3'
s.summary = 'YAUIKit'
s.homepage = 'https://github.com/candyan/YAUIKit'
s.license = 'MIT'
s.author = { 'Candyan' => 'liuyanhp@gmail.com' }
s.platform = :ios, '6.0'
s.requires_arc = true
s.source = {
:git => 'https://github.com/candyan/YAUIKit.git',
:tag => s.version.to_s
}
s.source_files = 'Source/**/*.{c,h,m}'
s.dependency 'Masonry'
end
add 2.5.4 version
Pod::Spec.new do |s|
s.name = 'YAUIKit'
s.version = '2.5.4'
s.summary = 'YAUIKit'
s.homepage = 'https://github.com/candyan/YAUIKit'
s.license = 'MIT'
s.author = { 'Candyan' => 'liuyanhp@gmail.com' }
s.platform = :ios, '6.0'
s.requires_arc = true
s.source = {
:git => 'https://github.com/candyan/YAUIKit.git',
:tag => s.version.to_s
}
s.source_files = 'Source/**/*.{c,h,m}'
s.dependency 'Masonry'
end
|
module Nugget
class Service
def self.run_daemon(test_name = nil)
Nugget::Log.info("Starting up Nugget in daemon mode ...")
loop do
Nugget::Log.debug("Running tests ...")
run(test_name)
# chill
Nugget::Log.debug("Sleeping for #{Nugget::Config.interval.to_i} ...")
sleep(Nugget::Config.interval.to_i)
end
end
def self.run_once(test_name = nil)
Nugget::Log.info("Starting up Nugget in run-once mode ...")
run(test_name)
end
def self.run(test_name = nil)
config_file = open(Nugget::Config.config)
parser = Yajl::Parser.new(:symbolize_keys => true)
config = parser.parse(config_file)
results = Hash.new
threadlist = Array.new
if test_name
if definition = config[test_name.to_s.to_sym]
run_test(results, test_name, definition)
else
raise "No test name #{test_name.inspect} found."
end
else
config.each do |test, definition|
threadlist << Thread.new { run_test(results, test, definition) }
end
end
threadlist.each { |x|
x.join
}
Nugget::Service.write_results(results)
end
def self.run_test(results, test, definition)
result = nil
response = nil
begin
request_definition = config_converter(definition)
response_definition = definition[:response]
status = Timeout::timeout(TIMEOUT) {
Nugget::Log.debug("Asserting turd definitions ...")
response = Turd.run(request_definition, response_definition)
}
result = "PASS"
rescue Timeout::Error => e
Nugget::Log.error("#{definition[:type]} test #{test} took too long to run (#{TIMEOUT}s)!")
Nugget::Log.error(e)
result = "FAIL"
response = "timeout"
rescue Exception => e
Nugget::Log.error("#{definition[:type]} test #{test} failed due to #{e.response[:failed]}!")
Nugget::Log.error(e)
result = "FAIL"
response = e.response
end
Nugget::Log.info("Test #{test} complete with status #{result}")
results.store(test, {
:config => request_definition,
:result => result,
:response => response,
:timestamp => Time.now.to_i
})
send_metrics(test, result, response)
end
def self.config_converter(definition)
options = definition[:request]
if options[:method]
sym_method = options[:method].to_sym
options.store(:method, sym_method)
end
if options[:url]
url = options[:url]
options.delete(:url)
end
{
:url => url,
:type => definition[:type],
:options => options
}
end
def self.write_results(results)
begin
if Nugget::Config.resultsfile
Nugget::Log.debug("Writing results to #{Nugget::Config.resultsfile} ...")
file = File.open(Nugget::Config.resultsfile, "w")
file.puts(results.to_json)
file.close
end
rescue Exception => e
Nugget::Log.error("Something went wrong with writing out the results file!")
Nugget::Log.error(e)
end
end
def self.send_metrics(test, result, response)
if Nugget::Config.backstop_url
Nugget::Backstop.send_metrics(test, result, response)
end
if Nugget::Config.statsd_host
Nugget::NStatsd.send_metrics(test, result, response)
end
end
end
end
more logging
module Nugget
class Service
def self.run_daemon(test_name = nil)
Nugget::Log.info("Starting up Nugget in daemon mode ...")
loop do
Nugget::Log.debug("Running tests ...")
run(test_name)
# chill
Nugget::Log.debug("Sleeping for #{Nugget::Config.interval.to_i} ...")
sleep(Nugget::Config.interval.to_i)
end
end
def self.run_once(test_name = nil)
Nugget::Log.info("Starting up Nugget in run-once mode ...")
run(test_name)
end
def self.run(test_name = nil)
config_file = open(Nugget::Config.config)
parser = Yajl::Parser.new(:symbolize_keys => true)
config = parser.parse(config_file)
results = Hash.new
threadlist = Array.new
if test_name
if definition = config[test_name.to_s.to_sym]
run_test(results, test_name, definition)
else
raise "No test name #{test_name.inspect} found."
end
else
config.each do |test, definition|
threadlist << Thread.new { run_test(results, test, definition) }
end
end
threadlist.each { |x|
x.join
}
Nugget::Service.write_results(results)
end
def self.run_test(results, test, definition)
result = nil
response = nil
begin
request_definition = config_converter(definition)
response_definition = definition[:response]
status = Timeout::timeout(TIMEOUT) {
Nugget::Log.debug("Asserting turd definitions for #{test}...")
response = Turd.run(request_definition, response_definition)
}
result = "PASS"
rescue Timeout::Error => e
Nugget::Log.error("#{definition[:type]} test #{test} took too long to run (#{TIMEOUT}s)!")
Nugget::Log.error(e)
result = "FAIL"
response = "timeout"
rescue Exception => e
Nugget::Log.error("#{definition[:type]} test #{test} failed due to #{e.response[:failed]}!")
Nugget::Log.error("return code: #{e.response[:return_code]}")
Nugget::Log.error(e)
result = "FAIL"
response = e.response
end
Nugget::Log.info("Test #{test} complete with status #{result}")
results.store(test, {
:config => request_definition,
:result => result,
:response => response,
:timestamp => Time.now.to_i
})
send_metrics(test, result, response)
end
def self.config_converter(definition)
options = definition[:request]
if options[:method]
sym_method = options[:method].to_sym
options.store(:method, sym_method)
end
if options[:url]
url = options[:url]
options.delete(:url)
end
{
:url => url,
:type => definition[:type],
:options => options
}
end
def self.write_results(results)
begin
if Nugget::Config.resultsfile
Nugget::Log.debug("Writing results to #{Nugget::Config.resultsfile} ...")
file = File.open(Nugget::Config.resultsfile, "w")
file.puts(results.to_json)
file.close
end
rescue Exception => e
Nugget::Log.error("Something went wrong with writing out the results file!")
Nugget::Log.error(e)
end
end
def self.send_metrics(test, result, response)
if Nugget::Config.backstop_url
Nugget::Backstop.send_metrics(test, result, response)
end
if Nugget::Config.statsd_host
Nugget::NStatsd.send_metrics(test, result, response)
end
end
end
end
|
require 'net/http'
require 'net/https'
require 'oauth/oauth'
require 'oauth/client/net_http'
require 'oauth/errors'
require 'cgi'
module OAuth
class Consumer
# determine the certificate authority path to verify SSL certs
CA_FILES = %w(/etc/ssl/certs/ca-certificates.crt /usr/share/curl/curl-ca-bundle.crt)
CA_FILES.each do |ca_file|
if File.exists?(ca_file)
CA_FILE = ca_file
break
end
end
CA_FILE = nil unless defined?(CA_FILE)
@@default_options = {
# Signature method used by server. Defaults to HMAC-SHA1
:signature_method => 'HMAC-SHA1',
# default paths on site. These are the same as the defaults set up by the generators
:request_token_path => '/oauth/request_token',
:authorize_path => '/oauth/authorize',
:access_token_path => '/oauth/access_token',
:proxy => nil,
# How do we send the oauth values to the server see
# http://oauth.net/core/1.0/#consumer_req_param for more info
#
# Possible values:
#
# :header - via the Authorize header (Default) ( option 1. in spec)
# :body - url form encoded in body of POST request ( option 2. in spec)
# :query_string - via the query part of the url ( option 3. in spec)
:scheme => :header,
# Default http method used for OAuth Token Requests (defaults to :post)
:http_method => :post,
# Add a custom ca_file for consumer
# :ca_file => '/etc/certs.pem'
:oauth_version => "1.0"
}
attr_accessor :options, :key, :secret
attr_writer :site, :http
# Create a new consumer instance by passing it a configuration hash:
#
# @consumer = OAuth::Consumer.new(key, secret, {
# :site => "http://term.ie",
# :scheme => :header,
# :http_method => :post,
# :request_token_path => "/oauth/example/request_token.php",
# :access_token_path => "/oauth/example/access_token.php",
# :authorize_path => "/oauth/example/authorize.php"
# })
#
# Start the process by requesting a token
#
# @request_token = @consumer.get_request_token
# session[:request_token] = @request_token
# redirect_to @request_token.authorize_url
#
# When user returns create an access_token
#
# @access_token = @request_token.get_access_token
# @photos=@access_token.get('/photos.xml')
#
def initialize(consumer_key, consumer_secret, options = {})
@key = consumer_key
@secret = consumer_secret
# ensure that keys are symbols
@options = @@default_options.merge(options.inject({}) do |opts, (key, value)|
opts[key.to_sym] = value
opts
end)
end
# The default http method
def http_method
@http_method ||= @options[:http_method] || :post
end
# The HTTP object for the site. The HTTP Object is what you get when you do Net::HTTP.new
def http
@http ||= create_http
end
# Contains the root URI for this site
def uri(custom_uri = nil)
if custom_uri
@uri = custom_uri
@http = create_http # yike, oh well. less intrusive this way
else # if no custom passed, we use existing, which, if unset, is set to site uri
@uri ||= URI.parse(site)
end
end
def get_access_token(request_token, request_options = {}, *arguments, &block)
response = token_request(http_method, (access_token_url? ? access_token_url : access_token_path), request_token, request_options, *arguments, &block)
OAuth::AccessToken.from_hash(self, response)
end
# Makes a request to the service for a new OAuth::RequestToken
#
# @request_token = @consumer.get_request_token
#
# To include OAuth parameters:
#
# @request_token = @consumer.get_request_token \
# :oauth_callback => "http://example.com/cb"
#
# To include application-specific parameters:
#
# @request_token = @consumer.get_request_token({}, :foo => "bar")
#
# TODO oauth_callback should be a mandatory parameter
def get_request_token(request_options = {}, *arguments, &block)
# if oauth_callback wasn't provided, it is assumed that oauth_verifiers
# will be exchanged out of band
request_options[:oauth_callback] ||= OAuth::OUT_OF_BAND unless request_options[:exclude_callback]
if block_given?
response = token_request(http_method,
(request_token_url? ? request_token_url : request_token_path),
nil,
request_options,
*arguments, &block)
else
response = token_request(http_method, (request_token_url? ? request_token_url : request_token_path), nil, request_options, *arguments)
end
OAuth::RequestToken.from_hash(self, response)
end
# Creates, signs and performs an http request.
# It's recommended to use the OAuth::Token classes to set this up correctly.
# request_options take precedence over consumer-wide options when signing
# a request.
# arguments are POST and PUT bodies (a Hash, string-encoded parameters, or
# absent), followed by additional HTTP headers.
#
# @consumer.request(:get, '/people', @token, { :scheme => :query_string })
# @consumer.request(:post, '/people', @token, {}, @person.to_xml, { 'Content-Type' => 'application/xml' })
#
def request(http_method, path, token = nil, request_options = {}, *arguments)
if path !~ /^\//
@http = create_http(path)
_uri = URI.parse(path)
path = "#{_uri.path}#{_uri.query ? "?#{_uri.query}" : ""}"
end
# override the request with your own, this is useful for file uploads which Net::HTTP does not do
req = create_signed_request(http_method, path, token, request_options, *arguments)
return nil if block_given? and yield(req) == :done
rsp = http.request(req)
# check for an error reported by the Problem Reporting extension
# (http://wiki.oauth.net/ProblemReporting)
# note: a 200 may actually be an error; check for an oauth_problem key to be sure
if !(headers = rsp.to_hash["www-authenticate"]).nil? &&
(h = headers.select { |hdr| hdr =~ /^OAuth / }).any? &&
h.first =~ /oauth_problem/
# puts "Header: #{h.first}"
# TODO doesn't handle broken responses from api.login.yahoo.com
# remove debug code when done
params = OAuth::Helper.parse_header(h.first)
# puts "Params: #{params.inspect}"
# puts "Body: #{rsp.body}"
raise OAuth::Problem.new(params.delete("oauth_problem"), rsp, params)
end
rsp
end
# Creates and signs an http request.
# It's recommended to use the Token classes to set this up correctly
def create_signed_request(http_method, path, token = nil, request_options = {}, *arguments)
request = create_http_request(http_method, path, *arguments)
sign!(request, token, request_options)
request
end
# Creates a request and parses the result as url_encoded. This is used internally for the RequestToken and AccessToken requests.
def token_request(http_method, path, token = nil, request_options = {}, *arguments)
response = request(http_method, path, token, request_options, *arguments)
case response.code.to_i
when (200..299)
if block_given?
yield response.body
else
# symbolize keys
# TODO this could be considered unexpected behavior; symbols or not?
# TODO this also drops subsequent values from multi-valued keys
CGI.parse(response.body).inject({}) do |h,(k,v)|
h[k.strip.to_sym] = v.first
h[k.strip] = v.first
h
end
end
when (300..399)
# this is a redirect
uri = URI.parse(response.header['location'])
response.error! if uri.path == path # careful of those infinite redirects
self.token_request(http_method, uri.path, token, request_options, arguments)
when (400..499)
raise OAuth::Unauthorized, response
else
response.error!
end
end
# Sign the Request object. Use this if you have an externally generated http request object you want to sign.
def sign!(request, token = nil, request_options = {})
request.oauth!(http, self, token, options.merge(request_options))
end
# Return the signature_base_string
def signature_base_string(request, token = nil, request_options = {})
request.signature_base_string(http, self, token, options.merge(request_options))
end
def site
@options[:site].to_s
end
def request_endpoint
return nil if @options[:request_endpoint].nil?
@options[:request_endpoint].to_s
end
def scheme
@options[:scheme]
end
def request_token_path
@options[:request_token_path]
end
def authorize_path
@options[:authorize_path]
end
def access_token_path
@options[:access_token_path]
end
# TODO this is ugly, rewrite
def request_token_url
@options[:request_token_url] || site + request_token_path
end
def request_token_url?
@options.has_key?(:request_token_url)
end
def authorize_url
@options[:authorize_url] || site + authorize_path
end
def authorize_url?
@options.has_key?(:authorize_url)
end
def access_token_url
@options[:access_token_url] || site + access_token_path
end
def access_token_url?
@options.has_key?(:access_token_url)
end
def proxy
@options[:proxy]
end
protected
# Instantiates the http object
def create_http(_url = nil)
if !request_endpoint.nil?
_url = request_endpoint
end
if _url.nil? || _url[0] =~ /^\//
our_uri = URI.parse(site)
else
our_uri = URI.parse(_url)
end
if proxy.nil?
http_object = Net::HTTP.new(our_uri.host, our_uri.port)
else
proxy_uri = proxy.is_a?(URI) ? proxy : URI.parse(proxy)
http_object = Net::HTTP.new(our_uri.host, our_uri.port, proxy_uri.host, proxy_uri.port, proxy_uri.user, proxy_uri.password)
end
http_object.use_ssl = (our_uri.scheme == 'https')
if @options[:ca_file] || CA_FILE
http_object.ca_file = @options[:ca_file] || CA_FILE
http_object.verify_mode = OpenSSL::SSL::VERIFY_PEER
http_object.verify_depth = 5
else
http_object.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
http_object
end
# create the http request object for a given http_method and path
def create_http_request(http_method, path, *arguments)
http_method = http_method.to_sym
if [:post, :put].include?(http_method)
data = arguments.shift
end
# if the base site contains a path, add it now
uri = URI.parse(site)
path = uri.path + path if uri.path && uri.path != '/'
headers = arguments.first.is_a?(Hash) ? arguments.shift : {}
case http_method
when :post
request = Net::HTTP::Post.new(path,headers)
request["Content-Length"] = '0' # Default to 0
when :put
request = Net::HTTP::Put.new(path,headers)
request["Content-Length"] = '0' # Default to 0
when :get
request = Net::HTTP::Get.new(path,headers)
when :delete
request = Net::HTTP::Delete.new(path,headers)
when :head
request = Net::HTTP::Head.new(path,headers)
else
raise ArgumentError, "Don't know how to handle http_method: :#{http_method.to_s}"
end
if data.is_a?(Hash)
form_data = {}
data.each {|k,v| form_data[k.to_s] = v if !v.nil?}
request.set_form_data(form_data)
elsif data
if data.respond_to?(:read)
request.body_stream = data
if data.respond_to?(:length)
request["Content-Length"] = data.length.to_s
elsif data.respond_to?(:stat) && data.stat.respond_to?(:size)
request["Content-Length"] = data.stat.size.to_s
else
raise ArgumentError, "Don't know how to send a body_stream that doesn't respond to .length or .stat.size"
end
else
request.body = data.to_s
request["Content-Length"] = request.body.length.to_s
end
end
request
end
def marshal_dump(*args)
{:key => @key, :secret => @secret, :options => @options}
end
def marshal_load(data)
initialize(data[:key], data[:secret], data[:options])
end
end
end
Fixes strange behaivior in my setup
require 'net/http'
require 'net/https'
require 'oauth/oauth'
require 'oauth/client/net_http'
require 'oauth/errors'
require 'cgi'
module OAuth
class Consumer
# determine the certificate authority path to verify SSL certs
CA_FILES = %w(/etc/ssl/certs/ca-certificates.crt /usr/share/curl/curl-ca-bundle.crt)
CA_FILES.each do |ca_file|
if File.exists?(ca_file)
CA_FILE = ca_file
break
end
end
CA_FILE = nil unless defined?(CA_FILE)
@@default_options = {
# Signature method used by server. Defaults to HMAC-SHA1
:signature_method => 'HMAC-SHA1',
# default paths on site. These are the same as the defaults set up by the generators
:request_token_path => '/oauth/request_token',
:authorize_path => '/oauth/authorize',
:access_token_path => '/oauth/access_token',
:proxy => nil,
# How do we send the oauth values to the server see
# http://oauth.net/core/1.0/#consumer_req_param for more info
#
# Possible values:
#
# :header - via the Authorize header (Default) ( option 1. in spec)
# :body - url form encoded in body of POST request ( option 2. in spec)
# :query_string - via the query part of the url ( option 3. in spec)
:scheme => :header,
# Default http method used for OAuth Token Requests (defaults to :post)
:http_method => :post,
# Add a custom ca_file for consumer
# :ca_file => '/etc/certs.pem'
:oauth_version => "1.0"
}
attr_accessor :options, :key, :secret
attr_writer :site, :http
# Create a new consumer instance by passing it a configuration hash:
#
# @consumer = OAuth::Consumer.new(key, secret, {
# :site => "http://term.ie",
# :scheme => :header,
# :http_method => :post,
# :request_token_path => "/oauth/example/request_token.php",
# :access_token_path => "/oauth/example/access_token.php",
# :authorize_path => "/oauth/example/authorize.php"
# })
#
# Start the process by requesting a token
#
# @request_token = @consumer.get_request_token
# session[:request_token] = @request_token
# redirect_to @request_token.authorize_url
#
# When user returns create an access_token
#
# @access_token = @request_token.get_access_token
# @photos=@access_token.get('/photos.xml')
#
def initialize(consumer_key, consumer_secret, options = {})
@key = consumer_key
@secret = consumer_secret
# ensure that keys are symbols
@options = @@default_options.merge(options.inject({}) do |opts, (key, value)|
opts[key.to_sym] = value
opts
end)
end
# The default http method
def http_method
@http_method ||= @options[:http_method] || :post
end
# The HTTP object for the site. The HTTP Object is what you get when you do Net::HTTP.new
def http
@http ||= create_http
end
# Contains the root URI for this site
def uri(custom_uri = nil)
if custom_uri
@uri = custom_uri
@http = create_http # yike, oh well. less intrusive this way
else # if no custom passed, we use existing, which, if unset, is set to site uri
@uri ||= URI.parse(site)
end
end
def get_access_token(request_token, request_options = {}, *arguments, &block)
response = token_request(http_method, (access_token_url? ? access_token_url : access_token_path), request_token, request_options, *arguments, &block)
OAuth::AccessToken.from_hash(self, response)
end
# Makes a request to the service for a new OAuth::RequestToken
#
# @request_token = @consumer.get_request_token
#
# To include OAuth parameters:
#
# @request_token = @consumer.get_request_token \
# :oauth_callback => "http://example.com/cb"
#
# To include application-specific parameters:
#
# @request_token = @consumer.get_request_token({}, :foo => "bar")
#
# TODO oauth_callback should be a mandatory parameter
def get_request_token(request_options = {}, *arguments, &block)
# if oauth_callback wasn't provided, it is assumed that oauth_verifiers
# will be exchanged out of band
request_options[:oauth_callback] ||= OAuth::OUT_OF_BAND unless request_options[:exclude_callback]
if block_given?
response = token_request(http_method,
(request_token_url? ? request_token_url : request_token_path),
nil,
request_options,
*arguments, &block)
else
response = token_request(http_method, (request_token_url? ? request_token_url : request_token_path), nil, request_options, *arguments)
end
OAuth::RequestToken.from_hash(self, response)
end
# Creates, signs and performs an http request.
# It's recommended to use the OAuth::Token classes to set this up correctly.
# request_options take precedence over consumer-wide options when signing
# a request.
# arguments are POST and PUT bodies (a Hash, string-encoded parameters, or
# absent), followed by additional HTTP headers.
#
# @consumer.request(:get, '/people', @token, { :scheme => :query_string })
# @consumer.request(:post, '/people', @token, {}, @person.to_xml, { 'Content-Type' => 'application/xml' })
#
def request(http_method, path, token = nil, request_options = {}, *arguments)
if path !~ /^\//
@http = create_http(path)
_uri = URI.parse(path)
path = "#{_uri.path}#{_uri.query ? "?#{_uri.query}" : ""}"
end
# override the request with your own, this is useful for file uploads which Net::HTTP does not do
req = create_signed_request(http_method, path, token, request_options, *arguments)
return nil if block_given? and yield(req) == :done
rsp = http.request(req)
# check for an error reported by the Problem Reporting extension
# (http://wiki.oauth.net/ProblemReporting)
# note: a 200 may actually be an error; check for an oauth_problem key to be sure
if !(headers = rsp.to_hash["www-authenticate"]).nil? &&
(h = headers.select { |hdr| hdr =~ /^OAuth / }).any? &&
h.first =~ /oauth_problem/
# puts "Header: #{h.first}"
# TODO doesn't handle broken responses from api.login.yahoo.com
# remove debug code when done
params = OAuth::Helper.parse_header(h.first)
# puts "Params: #{params.inspect}"
# puts "Body: #{rsp.body}"
raise OAuth::Problem.new(params.delete("oauth_problem"), rsp, params)
end
rsp
end
# Creates and signs an http request.
# It's recommended to use the Token classes to set this up correctly
def create_signed_request(http_method, path, token = nil, request_options = {}, *arguments)
request = create_http_request(http_method, path, *arguments)
sign!(request, token, request_options)
request
end
# Creates a request and parses the result as url_encoded. This is used internally for the RequestToken and AccessToken requests.
def token_request(http_method, path, token = nil, request_options = {}, *arguments)
response = request(http_method, path, token, request_options, *arguments)
case response.code.to_i
when (200..299)
if block_given?
yield response.body
else
# symbolize keys
# TODO this could be considered unexpected behavior; symbols or not?
# TODO this also drops subsequent values from multi-valued keys
CGI.parse(response.body).inject({}) do |h,(k,v)|
h[k.strip.to_sym] = v.first
h[k.strip] = v.first
h
end
end
when (300..399)
# this is a redirect
uri = URI.parse(response.header['location'])
response.error! if uri.path == path # careful of those infinite redirects
self.token_request(http_method, uri.path, token, request_options, arguments)
when (400..499)
raise OAuth::Unauthorized, response
else
response.error!
end
end
# Sign the Request object. Use this if you have an externally generated http request object you want to sign.
def sign!(request, token = nil, request_options = {})
request.oauth!(http, self, token, options.merge(request_options))
end
# Return the signature_base_string
def signature_base_string(request, token = nil, request_options = {})
request.signature_base_string(http, self, token, options.merge(request_options))
end
def site
@options[:site].to_s
end
def request_endpoint
return nil if @options[:request_endpoint].nil?
@options[:request_endpoint].to_s
end
def scheme
@options[:scheme]
end
def request_token_path
@options[:request_token_path]
end
def authorize_path
@options[:authorize_path]
end
def access_token_path
@options[:access_token_path]
end
# TODO this is ugly, rewrite
def request_token_url
@options[:request_token_url] || site + request_token_path
end
def request_token_url?
@options.has_key?(:request_token_url)
end
def authorize_url
@options[:authorize_url] || site + authorize_path
end
def authorize_url?
@options.has_key?(:authorize_url)
end
def access_token_url
@options[:access_token_url] || site + access_token_path
end
def access_token_url?
@options.has_key?(:access_token_url)
end
def proxy
@options[:proxy]
end
protected
# Instantiates the http object
def create_http(_url = nil)
if !request_endpoint.nil?
_url = request_endpoint
end
if _url.nil? || _url[0] =~ /^\//
our_uri = URI.parse(site)
else
our_uri = URI.parse(_url)
end
if proxy.nil?
http_object = Net::HTTP.new(our_uri.host, our_uri.port)
else
proxy_uri = proxy.is_a?(URI) ? proxy : URI.parse(proxy)
http_object = Net::HTTP.new(our_uri.host, our_uri.port, proxy_uri.host, proxy_uri.port, proxy_uri.user, proxy_uri.password)
end
http_object.use_ssl = (our_uri.scheme == 'https')
if @options[:ca_file] || CA_FILE
http_object.ca_file = @options[:ca_file] || CA_FILE
http_object.verify_mode = OpenSSL::SSL::VERIFY_PEER
http_object.verify_depth = 5
else
http_object.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
http_object
end
# create the http request object for a given http_method and path
def create_http_request(http_method, path, *arguments)
http_method = http_method.to_sym
if [:post, :put].include?(http_method)
data = arguments.shift
end
# if the base site contains a path, add it now
uri = URI.parse(site)
path = uri.path + path if uri.path && uri.path != '/'
headers = arguments.first.is_a?(Hash) ? arguments.shift : {}
case http_method
when :post
request = Net::HTTP::Post.new(path,headers)
# request["Content-Length"] = '0' # Default to 0
when :put
request = Net::HTTP::Put.new(path,headers)
request["Content-Length"] = '0' # Default to 0
when :get
request = Net::HTTP::Get.new(path,headers)
when :delete
request = Net::HTTP::Delete.new(path,headers)
when :head
request = Net::HTTP::Head.new(path,headers)
else
raise ArgumentError, "Don't know how to handle http_method: :#{http_method.to_s}"
end
if data.is_a?(Hash)
form_data = {}
data.each {|k,v| form_data[k.to_s] = v if !v.nil?}
request.set_form_data(form_data)
elsif data
if data.respond_to?(:read)
request.body_stream = data
if data.respond_to?(:length)
request["Content-Length"] = data.length.to_s
elsif data.respond_to?(:stat) && data.stat.respond_to?(:size)
request["Content-Length"] = data.stat.size.to_s
else
raise ArgumentError, "Don't know how to send a body_stream that doesn't respond to .length or .stat.size"
end
else
request.body = data.to_s
request["Content-Length"] = request.body.length.to_s
end
end
request
end
def marshal_dump(*args)
{:key => @key, :secret => @secret, :options => @options}
end
def marshal_load(data)
initialize(data[:key], data[:secret], data[:options])
end
end
end
|
Pod::Spec.new do |spec|
spec.name = 'OpenSans'
spec.version = '1.0.1'
spec.summary = 'A podspec encapsulating OpenSans font for iOS'
spec.description = "Open Sans is a humanist sans serif typeface designed by Steve Matteson, Type Director of Ascender Corp. This version contains the complete 897 character set, which includes the standard ISO Latin 1, Latin CE, Greek and Cyrillic character sets. Open Sans was designed with an upright stress, open forms and a neutral, yet friendly appearance. It was optimized for print, web, and mobile interfaces, and has excellent legibility characteristics in its letterforms."
spec.license = { :type => 'Apache License, Version 2.0', :file => 'LICENSE.txt' }
spec.authors = { 'Kyle Fuller' => 'inbox@kylefuller.co.uk' }
spec.homepage = 'https://github.com/kylef/OpenSans-pod'
spec.screenshot = 'http://f.cl.ly/items/2t2F032e3W0h2T1i0j1n/opensans-ios7-iphone5.png'
spec.social_media_url = 'https://twitter.com/kylefuller'
spec.platform = :ios
spec.source = { :git => 'https://github.com/kylef/OpenSans-pod.git', :tag => spec.version.to_s }
spec.source_files = 'UIFont+OpenSans.{h,m}'
spec.resource_bundle = { 'OpenSans' => 'Fonts/*.ttf' }
spec.frameworks = 'UIKit', 'CoreText'
spec.requires_arc = true
end
Bump to 1.0.2
Pod::Spec.new do |spec|
spec.name = 'OpenSans'
spec.version = '1.0.2'
spec.summary = 'A podspec encapsulating OpenSans font for iOS'
spec.description = "Open Sans is a humanist sans serif typeface designed by Steve Matteson, Type Director of Ascender Corp. This version contains the complete 897 character set, which includes the standard ISO Latin 1, Latin CE, Greek and Cyrillic character sets. Open Sans was designed with an upright stress, open forms and a neutral, yet friendly appearance. It was optimized for print, web, and mobile interfaces, and has excellent legibility characteristics in its letterforms."
spec.license = { :type => 'Apache License, Version 2.0', :file => 'LICENSE.txt' }
spec.authors = { 'Kyle Fuller' => 'inbox@kylefuller.co.uk' }
spec.homepage = 'https://github.com/kylef/OpenSans-pod'
spec.screenshot = 'http://f.cl.ly/items/2t2F032e3W0h2T1i0j1n/opensans-ios7-iphone5.png'
spec.social_media_url = 'https://twitter.com/kylefuller'
spec.platform = :ios
spec.source = { :git => 'https://github.com/kylef/OpenSans-pod.git', :tag => spec.version.to_s }
spec.source_files = 'UIFont+OpenSans.{h,m}'
spec.resource_bundle = { 'OpenSans' => 'Fonts/*.ttf' }
spec.frameworks = 'UIKit', 'CoreText'
spec.requires_arc = true
end
|
Pod::Spec.new do |s|
s.name = 'Overcoat'
s.version = '2.0'
s.license = 'MIT'
s.summary = 'Overcoat is an AFNetworking extension that makes it extremely simple for developers to use Mantle model objects with a REST client.'
s.homepage = 'https://github.com/gonzalezreal/Overcoat'
s.authors = { 'Guillermo Gonzalez' => 'gonzalezreal@icloud.com' }
s.social_media_url = 'https://twitter.com/gonzalezreal'
s.source = { :git => 'https://github.com/gonzalezreal/Overcoat.git', :branch => '2.0-development' }
s.requires_arc = true
s.ios.deployment_target = '6.0'
s.osx.deployment_target = '10.8'
s.public_header_files = 'Overcoat/*.h'
s.source_files = 'Overcoat/Overcoat.h'
s.subspec 'Core' do |ss|
ss.dependency 'AFNetworking', '~> 2.0'
ss.dependency 'Mantle', '~> 1.3'
ss.source_files = 'Overcoat/OVCResponse.{h,m}', 'Overcoat/OVCURLMatcher.{h,m}', 'Overcoat/OVC{ModelResponse,SocialRequest}Serializer.{h,m}', 'Overcoat/OVCManagedStore.{h,m}'
ss.frameworks = 'Foundation', 'Accounts', 'Social', 'CoreData'
end
s.subspec 'NSURLConnection' do |ss|
ss.dependency 'Overcoat/Core'
ss.source_files = 'Overcoat/OVCHTTPRequestOperationManager.{h,m}'
end
s.subspec 'NSURLSession' do |ss|
ss.dependency 'Overcoat/NSURLConnection'
ss.source_files = 'Overcoat/OVCHTTPSessionManager.{h,m}'
end
s.subspec 'PromiseKit' do |ss|
ss.dependency 'Overcoat/NSURLConnection'
ss.dependency 'Overcoat/NSURLSession'
ss.dependency 'PromiseKit'
ss.public_header_files = 'PromiseKit+Overcoat/*.h'
ss.source_files = 'PromiseKit+Overcoat'
end
end
Add RAC subspec
Pod::Spec.new do |s|
s.name = 'Overcoat'
s.version = '2.0'
s.license = 'MIT'
s.summary = 'Overcoat is an AFNetworking extension that makes it extremely simple for developers to use Mantle model objects with a REST client.'
s.homepage = 'https://github.com/gonzalezreal/Overcoat'
s.authors = { 'Guillermo Gonzalez' => 'gonzalezreal@icloud.com' }
s.social_media_url = 'https://twitter.com/gonzalezreal'
s.source = { :git => 'https://github.com/gonzalezreal/Overcoat.git', :branch => '2.0-development' }
s.requires_arc = true
s.ios.deployment_target = '6.0'
s.osx.deployment_target = '10.8'
s.public_header_files = 'Overcoat/*.h'
s.source_files = 'Overcoat/Overcoat.h'
s.subspec 'Core' do |ss|
ss.dependency 'AFNetworking', '~> 2.0'
ss.dependency 'Mantle', '~> 1.3'
ss.source_files = 'Overcoat/OVCResponse.{h,m}', 'Overcoat/OVCURLMatcher.{h,m}', 'Overcoat/OVC{ModelResponse,SocialRequest}Serializer.{h,m}', 'Overcoat/OVCManagedStore.{h,m}'
ss.frameworks = 'Foundation', 'Accounts', 'Social', 'CoreData'
end
s.subspec 'NSURLConnection' do |ss|
ss.dependency 'Overcoat/Core'
ss.source_files = 'Overcoat/OVCHTTPRequestOperationManager.{h,m}'
end
s.subspec 'NSURLSession' do |ss|
ss.dependency 'Overcoat/NSURLConnection'
ss.source_files = 'Overcoat/OVCHTTPSessionManager.{h,m}'
end
s.subspec 'PromiseKit' do |ss|
ss.dependency 'Overcoat/NSURLConnection'
ss.dependency 'Overcoat/NSURLSession'
ss.dependency 'PromiseKit'
ss.public_header_files = 'PromiseKit+Overcoat/*.h'
ss.source_files = 'PromiseKit+Overcoat'
end
s.subspec 'ReactiveCocoa' do |ss|
ss.dependency 'Overcoat/NSURLConnection'
ss.dependency 'Overcoat/NSURLSession'
ss.dependency 'ReactiveCocoa'
ss.public_header_files = 'ReactiveCocoa+Overcoat/*.h'
ss.source_files = 'ReactiveCocoa+Overcoat'
end
end
|
Pod::Spec.new do |s|
s.name = "PageMenu"
s.version = "1.2.9"
s.summary = "A paging menu controller built from other view controllers allowing the user to switch between any kind of view controller."
s.homepage = "https://github.com/uacaps/PageMenu"
s.license = { :type => 'UA', :file => 'LICENSE' }
s.author = { "uacaps" => "nfahl@cs.ua.edu" }
s.source = { :git => "https://github.com/uacaps/PageMenu.git", :tag => '1.2.9' }
s.platform = :ios, '8.0'
s.source_files = 'Classes/*'
s.requires_arc = true
end
- readied spec for 2.0.0 Swift 3 release
Pod::Spec.new do |s|
# โโโ Spec Metadata โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "PageMenu"
s.version = "2.0.0"
s.summary = "Highly customizable paging controller"
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = 'A paging menu controller built from other view controllers allowing the user to switch between any kind of view controller.'
s.homepage = "https://github.com/uacaps/PageMenu"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# โโโ Spec License โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
#s.license = "AERON"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
# โโโ Author Metadata โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "Niklas Fahl" => "niklas.fahl@ua.edu", "Matthew York" => "matt.york@ua.edu" }
# Or just: s.author = "Matthew York"
# s.authors = { "Matthew York" => "my3681@gmail.com" }
# s.social_media_url = "http://twitter.com/Matthew York"
# โโโ Platform Specifics โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
# s.platform = :ios, "5.0"
# When using multiple platforms
s.ios.deployment_target = "8.1"
s.osx.deployment_target = "10.7"
s.watchos.deployment_target = "2.0"
s.tvos.deployment_target = "9.0"
# โโโ Source Location โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/uacaps/PageMenu.git", :tag => "#{s.version}" }
# โโโ Source Code โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "Classes/**/*.*"
# s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# โโโ Resources โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# โโโ Project Linking โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# โโโ Project Settings โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
s.dependency "ObjectMapper"
end
|
require 'uri'
require 'json'
module OctoExtractor
class << self
# List all the Octokit client modules ruby source filepaths for the latest
# installed version.
#
# @return [Array<String>] List of Octokit source ruby filepaths
def filepaths
`gem content octokit`.split("\n").
select { |fp| fp[-3..-1] == ".rb" && fp.include?('lib/octokit/client') }
end
# Extract GitHub API documentation links and the methods associated with it
# from an Octokit source code ruby file.
#
# Example:
#
# Octokit meta.rb source file:
#
# module Octokit
# class Client
#
# # Methods for the Meta API
# #
# # @see http://developer.github.com/v3/meta/
# module Meta
#
# # Get meta information about GitHub.com, the service.
# # @see http://developer.github.com/v3/meta/#meta
# # @return [Sawyer::Resource] Hash with meta information.
# # @example Get GitHub meta information
# # @client.github_meta
# def meta(options = {})
# get "meta", options
# end
# alias :github_meta :meta
#
# end
# end
# end
#
# Output
#
# => [{
# selectors: ["#meta"],
# method_name: "meta"
# }]
#
# @param filepath [String] Path to an Octokit ruby source file.
# @return [Array<Hash>] list of relations with :selectors and :method_name.
def process(filepath)
data = []
urls = []
method_name = nil
File.read(filepath).each_line do |line|
url = line.split(" ")
.select { |string| string =~ /developer\.github/ }
.first
urls.push url if !url.nil? && !url.empty? && url.include?("#")
if line.include? "def "
line_words = line.split(" ")
method_name = line_words[line_words.index("def") + 1]
if method_name.include?("(")
method_name = method_name.split("(")[0]
end
if !urls.empty?
data.push({
selectors: urls.map { |url| "##{url.split('#')[1]}" },
method_name: method_name,
octokit_doc_url: octokit_doc_path(filepath, method_name),
doc_paths: urls.map { |url| URI.parse(url).path }
})
end
urls = []
method_name = nil
end
end
data
end
# Extract all the method names and urls and package it up nicely into some
# json that our chrome extension can use nicely.
#
# We are going to combine all the data for specific paths into their own
# array so the extension doesn't have to parse through all the data
# unnessarily only for the methods for a single page. Here is what the
# output will look like
#
# data = {
# "/v3/repos/": [{
# selectors: ['#list-statuses-for-a-specific-ref'],
# method_name: 'statuses',
# octokit_doc_url: "Octokit/Client/Statuses.html#statuses-instance_method"
# }]
# }
#
# @return data [Hash] API Docs paths, method names and urls
def build_extension_data
data = {}
OctoExtractor.filepaths.each do |filepath|
OctoExtractor.process(filepath).each do |entity|
paths = entity.delete(:doc_paths)
paths.each do |path|
data[path] ||= []
data[path].push entity
end
end
end
data
end
# Scrap together a url for the octokit docs
#
# Input:
# filepath: '....octokit-2.6.3/lib/octokit/client/users.rb'
# method_name: 'user_authenticated?'
#
# Output:
# Octokit/Authentication.html#user_authenticated?-instance_method
#
# @param filepath [String] source file path for the method we are linking
# @param method_name [String] method name we are linking
# @return [String] relative path to method in octokit yard docs
def octokit_doc_path(filepath, method_name)
base = filepath[filepath.index('lib/octokit')+4..-1].gsub(/\.rb/, '')
.split('/')
.map { |name| name.split("_").each(&:capitalize!).join }
.join('/')
"#{base}.html##{method_name}-instance_method"
end
end
end
Include data from additional octokit clients such as enterprise
require 'uri'
require 'json'
module OctoExtractor
class << self
# List all the Octokit client modules ruby source filepaths for the latest
# installed version.
#
# @return [Array<String>] List of Octokit source ruby filepaths
def filepaths
`gem content octokit`.split("\n").
select { |fp| fp[-3..-1] == ".rb" && fp.include?('client') }
end
# Extract GitHub API documentation links and the methods associated with it
# from an Octokit source code ruby file.
#
# Example:
#
# Octokit meta.rb source file:
#
# module Octokit
# class Client
#
# # Methods for the Meta API
# #
# # @see http://developer.github.com/v3/meta/
# module Meta
#
# # Get meta information about GitHub.com, the service.
# # @see http://developer.github.com/v3/meta/#meta
# # @return [Sawyer::Resource] Hash with meta information.
# # @example Get GitHub meta information
# # @client.github_meta
# def meta(options = {})
# get "meta", options
# end
# alias :github_meta :meta
#
# end
# end
# end
#
# Output
#
# => [{
# selectors: ["#meta"],
# method_name: "meta"
# }]
#
# @param filepath [String] Path to an Octokit ruby source file.
# @return [Array<Hash>] list of relations with :selectors and :method_name.
def process(filepath)
data = []
urls = []
method_name = nil
File.read(filepath).each_line do |line|
url = line.split(" ")
.select { |string| string =~ /developer\.github/ }
.first
urls.push url if !url.nil? && !url.empty? && url.include?("#")
if line.include? "def "
line_words = line.split(" ")
method_name = line_words[line_words.index("def") + 1]
if method_name.include?("(")
method_name = method_name.split("(")[0]
end
if !urls.empty?
data.push({
selectors: urls.map { |url| "##{url.split('#')[1]}" },
method_name: method_name,
octokit_doc_url: octokit_doc_path(filepath, method_name),
doc_paths: urls.map { |url| URI.parse(url).path }
})
end
urls = []
method_name = nil
end
end
data
end
# Extract all the method names and urls and package it up nicely into some
# json that our chrome extension can use nicely.
#
# We are going to combine all the data for specific paths into their own
# array so the extension doesn't have to parse through all the data
# unnessarily only for the methods for a single page. Here is what the
# output will look like
#
# data = {
# "/v3/repos/": [{
# selectors: ['#list-statuses-for-a-specific-ref'],
# method_name: 'statuses',
# octokit_doc_url: "Octokit/Client/Statuses.html#statuses-instance_method"
# }]
# }
#
# @return data [Hash] API Docs paths, method names and urls
def build_extension_data
data = {}
OctoExtractor.filepaths.each do |filepath|
OctoExtractor.process(filepath).each do |entity|
paths = entity.delete(:doc_paths)
paths.each do |path|
data[path] ||= []
data[path].push entity
end
end
end
data
end
# Scrap together a url for the octokit docs
#
# Input:
# filepath: '....octokit-2.6.3/lib/octokit/client/users.rb'
# method_name: 'user_authenticated?'
#
# Output:
# Octokit/Authentication.html#user_authenticated?-instance_method
#
# @param filepath [String] source file path for the method we are linking
# @param method_name [String] method name we are linking
# @return [String] relative path to method in octokit yard docs
def octokit_doc_path(filepath, method_name)
base = filepath[filepath.index('lib/octokit')+4..-1].gsub(/\.rb/, '')
.split('/')
.map { |name| name.split("_").each(&:capitalize!).join }
.join('/')
"#{base}.html##{method_name}-instance_method"
end
end
end
|
module Octospy
class Worker
attr_reader :thread
def initialize(repositories, &block)
@repositories = repositories
@block = block
@last_event_id = nil
thread_loop
end
def work_interval
(Octospy.api_request_interval * @repositories.count) + Octospy.worker_interval
end
def thread_loop
@thread = Thread.start { loop { work } }
end
def work
notify_recent_envets
sleep work_interval
rescue => e
error e.message
sleep worker_interval
end
def api_requestable?
limit = Octokit.rate_limit
if limit.remaining.zero?
notify "ใพ(;ยดะ`)๏พ #{limit}"
false
else
true
end
# No rate limit for white listed users
rescue Octokit::NotFound
true
end
def repository_events
@repositories.each_with_object([]) do |repo, arr|
break unless api_requestable?
sleep Octospy.api_request_interval
arr.concat ::Octokit.repository_events(repo.to_s)
end
end
def skipping?(event)
case
when event.nil?,
@last_event_id.nil? && while_ago >= event.created_at,
!@last_event_id.nil? && @last_event_id >= event.id.to_i
true
else
false
end
end
def notify_recent_envets
events = repository_events
return if events.count.zero?
# ascending by event.id
events.sort_by(&:id).each { |event|
next if skipping?(event)
parsed_event = Octospy.parse(event)
next unless parsed_event
@last_event_id = event.id.to_i
parsed_event.each { |p| notify p[:message] }
}
end
private
def while_ago
Time.now.utc - (60 * 30)
end
def notify(message)
@block.call message
end
def debug(name, message = nil)
return unless Octospy.debug
prefix = '[DEBUG]'.colorize_for_irc.orange
info = name.colorize_for_irc.bold
@block.call "#{prefix} #{info} #{message}"
end
def error(message)
prefix = '[ERROR]'.colorize_for_irc.red
@block.call "#{prefix} #{message}"
end
end
end
separate
module Octospy
class Worker
attr_reader :thread
def initialize(repositories, &block)
@repositories = repositories
@block = block
@last_event_id = nil
thread_loop
end
def work_interval
(Octospy.api_request_interval * @repositories.count) + Octospy.worker_interval
end
def thread_loop
@thread = Thread.start { loop { work } }
end
def work
notify_recent_envets
sleep work_interval
rescue => e
error e.message
sleep worker_interval
end
def api_requestable?
limit = Octokit.rate_limit
if limit.remaining.zero?
notify "ใพ(;ยดะ`)๏พ #{limit}"
false
else
true
end
# No rate limit for white listed users
rescue Octokit::NotFound
true
end
def repository_events
@repositories.each_with_object([]) do |repo, arr|
break unless api_requestable?
sleep Octospy.api_request_interval
events = ::Octokit.repository_events(repo.to_s)
arr.concat events
end
end
def skipping?(event)
case
when event.nil?,
@last_event_id.nil? && while_ago >= event.created_at,
!@last_event_id.nil? && @last_event_id >= event.id.to_i
true
else
false
end
end
def notify_recent_envets
events = repository_events
return if events.count.zero?
# ascending by event.id
events.sort_by(&:id).each { |event|
next if skipping?(event)
parsed_event = Octospy.parse(event)
next unless parsed_event
@last_event_id = event.id.to_i
parsed_event.each { |p| notify p[:message] }
}
end
private
def while_ago
Time.now.utc - (60 * 30)
end
def notify(message)
@block.call message
end
def debug(name, message = nil)
return unless Octospy.debug
prefix = '[DEBUG]'.colorize_for_irc.orange
info = name.colorize_for_irc.bold
@block.call "#{prefix} #{info} #{message}"
end
def error(message)
prefix = '[ERROR]'.colorize_for_irc.red
@block.call "#{prefix} #{message}"
end
end
end
|
Gem::Specification.new do |s|
s.name = 'skyhook'
s.version = '0.0.0'
s.date = '2014-04-28'
s.summary = "Steam API wrapper/library"
s.description = "A wrapper/library for the Steam web API"
s.authors = ["ClikeX"]
s.email = 'w.s.van.der.meulen@gmail.com'
s.files = ["lib/skyhook.rb", "lib/skyhook/core.rb", "lib/skyhook/game.rb", "lib/skyhook/user.rb"]
s.homepage =
'http://rubygems.org/gems/skyhook'
s.license = 'MIT'
end
updated gemspec
Gem::Specification.new do |s|
s.name = 'skyhook'
s.version = '0.0.0'
s.date = '2014-04-28'
s.summary = "Steam API wrapper/library"
s.description = "A wrapper/library for the Steam web API"
s.authors = ["ClikeX"]
s.email = 'w.s.van.der.meulen@gmail.com'
s.files = ["lib/skyhook.rb", "lib/skyhook/core.rb", "lib/skyhook/game.rb", "lib/skyhook/user.rb"]
s.homepage =
'http://rubygems.org/gems/skyhook'
s.license = 'MIT'
s.required_ruby_version = '~> 1.9.3'
s.requirements << 'ActiveSupport'
s.post_install_message = "Thank you for installing the Steampowered Skyhook for Ruby. Have fun!"
end
|
podspec
|
# coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "clarification/version"
Gem::Specification.new do |spec|
spec.name = "clarification"
spec.version = Clarification::VERSION
spec.authors = ["Nichol Alexander"]
spec.email = ["nichol.alexander@gmail.com"]
spec.summary = %q{An unofficial API wrapper for the Clarifai Image and Video Recognition API.}
spec.description = %q{Alpha Version - Predict and Search using Clarifai.}
spec.homepage = "https://github.com/nicholalexander/clarification"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.required_ruby_version = '>=2.2'
spec.add_runtime_dependency "json"
spec.add_development_dependency "bundler", "~> 1.15"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "pry"
spec.add_development_dependency "guard"
spec.add_development_dependency "guard-rspec"
spec.add_development_dependency "simplecov"
# spec.add_development_dependency "codacy-coverage"
spec.add_development_dependency "webmock"
spec.add_development_dependency "vcr"
end
remove comment.
# coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "clarification/version"
Gem::Specification.new do |spec|
spec.name = "clarification"
spec.version = Clarification::VERSION
spec.authors = ["Nichol Alexander"]
spec.email = ["nichol.alexander@gmail.com"]
spec.summary = %q{An unofficial API wrapper for the Clarifai Image and Video Recognition API.}
spec.description = %q{Alpha Version - Predict and Search using Clarifai.}
spec.homepage = "https://github.com/nicholalexander/clarification"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.required_ruby_version = '>=2.2'
spec.add_runtime_dependency "json"
spec.add_development_dependency "bundler", "~> 1.15"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "pry"
spec.add_development_dependency "guard"
spec.add_development_dependency "guard-rspec"
spec.add_development_dependency "simplecov"
spec.add_development_dependency "webmock"
spec.add_development_dependency "vcr"
end
|
# include Rack::Test::Methods
remove old empty file
|
# TL;DR: YOU SHOULD DELETE THIS FILE
#
# This file is used by web_steps.rb, which you should also delete
#
# You have been warned
module NavigationHelpers
# Maps a name to a path. Used by the
#
# When /^I go to (.+)$/ do |page_name|
#
# step definition in web_steps.rb
#
def path_to(page_name)
case page_name
when /^the "login" page$/ then '/member/sign_in'
# ID SHOULDN'T BE IN THE ROUTE
when /^the "profile" page for "(.*)"$/ then "/users/#{User.find_by_email($1).id}"
when /^the "registration" page$/ then "/member/sign_up"
# Add more mappings here.
# Here is an example that pulls values out of the Regexp:
#
# when /^(.*)'s profile page$/i
# user_profile_path(User.find_by_login($1))
when /^the edit page for "(.*)"/ then edit_movie_path(Movie.find_by_title($1))
when /^the details page for "(.*)"/ then movie_path(Movie.find_by_title($1))
when /^the Similar Movies page for "(.*)"/ then similar_path(Movie.find_by_title($1))
else
begin
page_name =~ /^the (.*) page$/
path_components = $1.split(/\s+/)
self.send(path_components.push('path').join('_').to_sym)
rescue NoMethodError, ArgumentError
raise "Can't find mapping from \"#{page_name}\" to a path.\n" +
"Now, go and add a mapping in #{__FILE__}"
end
end
end
end
World(NavigationHelpers)
Update paths.rb
# TL;DR: YOU SHOULD DELETE THIS FILE
#
# This file is used by web_steps.rb, which you should also delete
#
# You have been warned
module NavigationHelpers
# Maps a name to a path. Used by the
#
# When /^I go to (.+)$/ do |page_name|
#
# step definition in web_steps.rb
#
def path_to(page_name)
case page_name
when /^the "login" page$/ then '/member/sign_in'
# ID SHOULDN'T BE IN THE ROUTE
when /^the "profile" page for "(.*)"$/ then "/users/#{User.find_by_email($1).id}"
when /^the "registration" page$/ then "/member/sign_up"
# Add more mappings here.
# Here is an example that pulls values out of the Regexp:
#
# when /^(.*)'s profile page$/i
# user_profile_path(User.find_by_login($1))
else
begin
page_name =~ /^the (.*) page$/
path_components = $1.split(/\s+/)
self.send(path_components.push('path').join('_').to_sym)
rescue NoMethodError, ArgumentError
raise "Can't find mapping from \"#{page_name}\" to a path.\n" +
"Now, go and add a mapping in #{__FILE__}"
end
end
end
end
World(NavigationHelpers)
|
require 'aruba/cucumber'
Then(/^the output should be equal to file "([^"]*)"$/) do |reference_file|
expected = File.open(reference_file).read
assert_exact_output(expected, all_stdout)
end
Then(/^the output should have (\d+) lines$/) do |n|
assert_exact_output(n, all_stdout.split("\n").length.to_s)
end
Then(/^the output should be empty$/) do
assert_exact_output('', all_stdout)
end
Then(/^the errors should contain "([^"]*)"$/) do | pattern|
assert_matching_output(pattern, all_stderr)
end
Update aruba setup
require 'aruba/cucumber'
Then(/^the output should be equal to file "([^"]*)"$/) do |reference_file|
expected = File.open(reference_file).read
expect(all_commands.map(&:stdout).join("\n")).to eq(expected)
end
Then(/^the output should have (\d+) lines$/) do |n|
expect(all_commands.map(&:stdout).join("\n").split("\n").length).to eq(n.to_i)
end
Then(/^the output should be empty$/) do
expect(all_commands.map(&:stdout).join("\n")).to eq('')
end
Then(/^the errors should contain "([^"]*)"$/) do | pattern|
expect(all_commands.map(&:stderr).join("\n")).to match(pattern)
end
|
# frozen_string_literal: true
module Onebox
VERSION = "2.1.7"
end
DEV: Bump version to 2.1.8 (#444)
# frozen_string_literal: true
module Onebox
VERSION = "2.1.8"
end
|
require "a1437ky_bmi3/version"
module A1437kyBmi3
# Your code goes here...
end
second commit
#BMI no keisan
def getBMI(h,w)
h = h * h
bmi = w / h
return bmi.to_f
end
def iData
puts "่บซ้ทใๅ
ฅๅใใฆใใ ใใ\n"
height = gets.to_f
if height == 0
print "ๅ่งๆฐๅญใงๅ
ฅๅใใฆใใ ใใ\n"
iData()
end
puts "ไฝ้ใๅ
ฅๅใใฆใใ ใใ\n"
weight = gets.to_f
if weight == 0
print "ๅ่งๆฐๅญใงๅ
ฅๅใใฆใใ ใใ\n"
iData()
end
height = height / 100
oData(height,weight)
end
def oData(height,weight)
bmi = getBMI(height,weight)
print "ใใชใใฎBMIๆๆฐใฏ[",bmi,"]ใงใ\n"
print "ใใชใใฎ่จบๆญ็ตๆใฏ...\n\n"
sleep(3)
if bmi < 18.5
print "ใใชใใฏไฝไฝ้ใงใ๏ผ"
elsif bmi > 18.5 && bmi < 25.0
print "ใใชใใฏๆจๆบใงใ"
elsif bmi > 25.0 && bmi < 29.0
print "ใใชใใฏๆจๆบใใๅคชใฃใฆใใพใ"
elsif bmi > 29.0
print "ๅคชใใใ๏ผ๏ผ๏ผ๏ผ๏ผ๏ผ๏ผ"
end
end
iData()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.