CombinedText stringlengths 4 3.42M |
|---|
module DTK
class AssemblyController < AuthController
helper :assembly_helper
helper :task_helper
#### create and delete actions ###
#TODO: rename to delete_and_destroy
def rest__delete()
assembly_id,subtype = ret_assembly_params_id_and_subtype()
if subtype == :template
#returning module_repo_info so client can update this in its local module
rest_ok_response Assembly::Template.delete_and_ret_module_repo_info(id_handle(assembly_id))
else #subtype == :instance
Assembly::Instance.delete(id_handle(assembly_id),:destroy_nodes => true)
rest_ok_response
end
end
def rest__purge()
workspace = ret_workspace_object?()
workspace.purge(:destroy_nodes => true)
rest_ok_response
end
def rest__destroy_and_reset_nodes()
assembly = ret_assembly_instance_object()
assembly.destroy_and_reset_nodes()
rest_ok_response
end
def rest__remove_from_system()
assembly = ret_assembly_instance_object()
Assembly::Instance.delete(assembly.id_handle())
rest_ok_response
end
def rest__set_target()
workspace = ret_workspace_object?()
target = create_obj(:target_id, Target::Instance)
workspace.set_target(target)
rest_ok_response
end
def rest__delete_node()
assembly = ret_assembly_instance_object()
node_idh = ret_node_id_handle(:node_id,assembly)
assembly.delete_node(node_idh,:destroy_nodes => true)
rest_ok_response
end
def rest__delete_component()
# Retrieving node_id to validate if component belongs to node when delete-component invoked from component-level context
node_id = ret_non_null_request_params(:node_id)
component_id = ret_non_null_request_params(:component_id)
assembly = ret_assembly_instance_object()
assembly_id = assembly.id()
cmp_full_name = ret_request_params(:cmp_full_name)
if cmp_full_name
cmp_idh = ret_component_id_handle(:cmp_full_name,:assembly_id => assembly_id)
else
cmp_idh = id_handle(component_id,:component)
end
assembly.delete_component(cmp_idh, node_id)
rest_ok_response
end
#### end: create and delete actions ###
#### list and info actions ###
def rest__info()
assembly = ret_assembly_object()
node_id, component_id, attribute_id, return_json = ret_request_params(:node_id, :component_id, :attribute_id, :json_return)
if return_json.eql?('true')
rest_ok_response assembly.info(node_id, component_id, attribute_id)
else
rest_ok_response assembly.info(node_id, component_id, attribute_id), :encode_into => :yaml
end
end
def rest__rename()
assembly = ret_assembly_object()
assembly_name = ret_non_null_request_params(:assembly_name)
new_assembly_name = ret_non_null_request_params(:new_assembly_name)
rest_ok_response assembly.rename(model_handle(), assembly_name, new_assembly_name)
end
#TODO: may be cleaner if we break into list_nodes, list_components with some shared helper functions
def rest__info_about()
node_id, component_id, detail_level, detail_to_include = ret_request_params(:node_id, :component_id, :detail_level, :detail_to_include)
assembly,subtype = ret_assembly_params_object_and_subtype()
response_opts = Hash.new
if format = ret_request_params(:format)
format = format.to_sym
unless SupportedFormats.include?(format)
raise ErrorUsage.new("Illegal format (#{format}) specified; it must be one of: #{SupportedFormats.join(',')}")
end
end
about = ret_non_null_request_params(:about).to_sym
unless AboutEnum[subtype].include?(about)
raise ErrorUsage::BadParamValue.new(:about,AboutEnum[subtype])
end
filter_proc = Proc.new do |e|
ret_val = check_element(e,[:node,:id],node_id) && check_element(e,[:attribute,:component_component_id],component_id) && e
ret_val = nil if (e[:attribute] and e[:attribute][:hidden])
ret_val
end
opts = Opts.new(:filter_proc => filter_proc, :detail_level => detail_level)
opts.add_return_datatype!()
if detail_to_include
opts.merge!(:detail_to_include => detail_to_include.map{|r|r.to_sym})
opts.add_value_to_return!(:datatype)
end
if about == :attributes
if format == :yaml
opts.merge!(:raw_attribute_value => true,:mark_unset_required => true)
else
opts.merge!(:truncate_attribute_values => true,:mark_unset_required => true)
end
end
if node_id
opts.merge!(:node_cmp_name => true) unless node_id.empty?
end
data = assembly.info_about(about, opts)
datatype = opts.get_datatype
response_opts = Hash.new
if format == :yaml
response_opts.merge!(:encode_into => :yaml)
else
response_opts.merge!(:datatype => datatype)
end
rest_ok_response data, response_opts
end
SupportedFormats = [:yaml]
def rest__info_about_task()
assembly = ret_assembly_instance_object()
task_action = ret_request_params(:task_action)
opts = {:donot_parse => true,:action_types=>[:assembly]}
response = assembly.get_task_template_serialized_content(task_action,opts)
response_opts = Hash.new
if response
response_opts.merge!(:encode_into => :yaml)
else
response = {:message => "Task not yet generated for assembly (#{assembly.get_field?(:display_name)})"}
end
rest_ok_response response, response_opts
end
def rest__cancel_task()
assembly = ret_assembly_instance_object()
unless top_task_id = ret_request_params(:task_id)
unless top_task = get_most_recent_executing_task([:eq,:assembly_id,assembly.id()])
raise ErrorUsage.new("No running tasks found")
end
top_task_id = top_task.id()
end
cancel_task(top_task_id)
rest_ok_response :task_id => top_task_id
end
def rest__list_modules()
ids = ret_request_params(:assemblies)
assembly_templates = get_assemblies_from_ids(ids)
components = Assembly::Template.list_modules(assembly_templates)
rest_ok_response components
end
def rest__prepare_for_edit_module()
assembly = ret_assembly_instance_object()
module_type = ret_non_null_request_params(:module_type)
response =
case module_type.to_sym
when :component_module
component_module = create_obj(:module_name,ComponentModule)
AssemblyModule::Component.prepare_for_edit(assembly,component_module)
when :service_module
modification_type = ret_non_null_request_params(:modification_type).to_sym
AssemblyModule::Service.prepare_for_edit(assembly,modification_type)
else
raise ErrorUsage.new("Illegal module_type #{module_type}")
end
rest_ok_response response
end
def rest__promote_module_updates()
assembly = ret_assembly_instance_object()
module_type = ret_non_null_request_params(:module_type)
unless module_type.to_sym == :component_module
raise Error.new("promote_module_changes only treats component_module type")
end
module_name = ret_non_null_request_params(:module_name)
component_module = create_obj(:module_name,ComponentModule)
opts = ret_boolean_params_hash(:force)
rest_ok_response AssemblyModule::Component.promote_module_updates(assembly,component_module,opts)
end
def rest__create_component_dependency()
assembly = ret_assembly_instance_object()
cmp_template = ret_component_template(:component_template_id)
antecedent_cmp_template = ret_component_template(:antecedent_component_template_id)
type = :simple
AssemblyModule::Component.create_component_dependency?(type,assembly,cmp_template,antecedent_cmp_template)
rest_ok_response
end
# checks element through set of fields
def check_element(element, fields, element_id_val)
return true if (element_id_val.nil? || element_id_val.empty?)
return false if element.nil?
temp_element = element.dup
fields.each do |field|
temp_element = temp_element[field]
return false if temp_element.nil?
end
return (temp_element == element_id_val.to_i)
end
AboutEnum = {
:instance => [:nodes,:components,:tasks,:attributes,:modules],
:template => [:nodes,:components,:targets]
}
FilterProc = {
:attributes => lambda{|attr|not attr[:hidden]}
}
def rest__add_ad_hoc_attribute_links()
assembly = ret_assembly_instance_object()
target_attr_term,source_attr_term = ret_non_null_request_params(:target_attribute_term,:source_attribute_term)
update_meta = ret_request_params(:update_meta)
opts = Hash.new
#update_meta == true is the default
unless !update_meta.nil? and !update_meta
opts.merge!(:update_meta => true)
end
AttributeLink::AdHoc.create_adhoc_links(assembly,target_attr_term,source_attr_term,opts)
rest_ok_response
end
def rest__delete_service_link()
port_link = ret_port_link()
Model.delete_instance(port_link.id_handle())
rest_ok_response
end
def rest__add_service_link()
assembly = ret_assembly_instance_object()
assembly_id = assembly.id()
input_cmp_idh = ret_component_id_handle(:input_component_id,:assembly_id => assembly_id)
output_cmp_idh = ret_component_id_handle(:output_component_id,:assembly_id => assembly_id)
opts = ret_params_hash(:dependency_name)
service_link_idh = assembly.add_service_link?(input_cmp_idh,output_cmp_idh,opts)
rest_ok_response :service_link => service_link_idh.get_id()
end
def rest__list_attribute_mappings()
port_link = ret_port_link()
#TODO: stub
ams = port_link.list_attribute_mappings()
pp ams
rest_ok_response
end
def rest__list_service_links()
assembly = ret_assembly_instance_object()
component_id = ret_component_id?(:component_id,:assembly_id => assembly.id())
context = (ret_request_params(:context)||:assembly).to_sym
opts = {:context => context}
if component_id
opts.merge!(:filter => {:input_component_id => component_id})
end
ret = assembly.list_service_links(opts)
rest_ok_response ret
end
#TODO: deprecate below for above
def rest__list_connections()
assembly = ret_assembly_instance_object()
find_missing,find_possible = ret_request_params(:find_missing,:find_possible)
ret =
if find_possible
assembly.list_connections__possible()
elsif find_missing
raise Error.new("Deprecated")
else
raise Error.new("Deprecated")
end
rest_ok_response ret
end
def rest__list_possible_add_ons()
assembly = ret_assembly_instance_object()
rest_ok_response assembly.get_service_add_ons()
end
def rest__get_attributes()
filter = ret_request_params(:filter)
filter = filter && filter.to_sym
assembly = ret_assembly_instance_object()
rest_ok_response assembly.get_attributes_print_form(Opts.new(:filter => filter))
end
def rest__workspace_object()
rest_ok_response Assembly::Instance.get_workspace_object(model_handle(),{})
end
def rest__list()
subtype = ret_assembly_subtype()
result =
if subtype == :instance
opts = ret_params_hash(:filter,:detail_level)
Assembly::Instance.list(model_handle(),opts)
else
project = get_default_project()
opts = {:version_suffix => true}.merge(ret_params_hash(:filter,:detail_level))
Assembly::Template.list(model_handle(),opts.merge(:project_idh => project.id_handle()))
end
rest_ok_response result
end
def rest__list_with_workspace()
opts = ret_params_hash(:filter)
rest_ok_response Assembly::Instance.list_with_workspace(model_handle(),opts)
end
#### end: list and info actions ###
#TODO: update what input can be
#the body has an array each element of form
# {:pattern => PAT, :value => VAL}
#pat can be one of three forms
#1 - an id
#2 - a name of form ASSEM-LEVEL-ATTR or NODE/COMONENT/CMP-ATTR, or
#3 - a pattern (TODO: give syntax) that can pick out multiple vars
# this returns same output as info about attributes, pruned for just new ones set
#TODO: this is a minsnomer in that it can be used to just create attributes
def rest__set_attributes()
assembly = ret_assembly_instance_object()
av_pairs = ret_params_av_pairs()
opts = ret_params_hash(:format,:context,:create)
create_options = ret_boolean_params_hash(:required,:dynamic)
if semantic_data_type = ret_request_params(:datatype)
unless Attribute::SemanticDatatype.isa?(semantic_data_type)
raise ErrorUsage.new("The term (#{semantic_data_type}) is not a valid data type")
end
create_options.merge!(:semantic_data_type => semantic_data_type)
end
unless create_options.empty?
unless opts[:create]
raise ErrorUsage.new("Options (#{create_options.values.join(',')}) can only be given if :create is true")
end
opts.merge!(:attribute_properties => create_options)
end
#update_meta == true is the default
update_meta = ret_request_params(:update_meta)
unless !update_meta.nil? and !update_meta
opts.merge!(:update_meta => true)
end
assembly.set_attributes(av_pairs,opts)
rest_ok_response
end
#### actions to update and create assembly templates
def rest__promote_to_template()
assembly = ret_assembly_instance_object()
assembly_template_name,service_module_name = get_template_and_service_names_params(assembly)
if assembly_template_name.nil? or service_module_name.nil?
raise ErrorUsage.new("SERVICE-NAME/ASSEMBLY-NAME cannot be determined and must be explicitly given")
end
project = get_default_project()
opts = ret_symbol_params_hash(:mode)
service_module = Assembly::Template.create_or_update_from_instance(project,assembly,service_module_name,assembly_template_name,opts)
clone_update_info = service_module.ret_clone_update_info()
rest_ok_response clone_update_info
end
#### end: actions to update and create assembly templates
#### methods to modify the assembly instance
def rest__add_node()
assembly = ret_assembly_instance_object()
assembly_node_name = ret_non_null_request_params(:assembly_node_name)
node_binding_rs = node_binding_ruleset?(:node_template_identifier)
node_instance_idh = assembly.add_node(assembly_node_name,node_binding_rs)
rest_ok_response node_instance_idh
end
def rest__add_component()
assembly = ret_assembly_instance_object()
component_template, component_title = ret_component_template_and_title_for_assembly(:component_template_id,assembly)
#not checking here if node_id points to valid object; check is in add_component
node_idh = ret_request_param_id_handle(:node_id,Node)
new_component_idh = assembly.add_component(node_idh,component_template,component_title)
rest_ok_response(:component_id => new_component_idh.get_id())
end
def rest__add_assembly_template()
assembly = ret_assembly_instance_object()
assembly_template = ret_assembly_template_object(:assembly_template_id)
assembly.add_assembly_template(assembly_template)
rest_ok_response
end
def rest__add_service_add_on()
assembly = ret_assembly_instance_object()
add_on_name = ret_non_null_request_params(:service_add_on_name)
new_sub_assembly_idh = assembly.service_add_on(add_on_name)
rest_ok_response(:sub_assembly_id => new_sub_assembly_idh.get_id())
end
#### end: methods to modify the assembly instance
#### method(s) related to staging assembly template
def rest__stage()
target_id = ret_request_param_id_optional(:target_id, ::DTK::Target::Instance)
target = target_idh_with_default(target_id).create_object(:model_name => :target_instance)
assembly_template = ret_assembly_template_object()
assembly_name = ret_request_params(:name)
new_assembly_obj = assembly_template.stage(target,assembly_name)
response = {
:new_service_instance => {
:name => new_assembly_obj.display_name_print_form,
:id => new_assembly_obj.id()
}
}
rest_ok_response(response,:encode_into => :yaml)
end
#### end: method(s) related to staging assembly template
#### creates tasks to execute/converge assemblies and monitor status
def rest__find_violations()
assembly = ret_assembly_instance_object()
violation_objects = assembly.find_violations()
violation_table = violation_objects.map do |v|
{:type => v.type(),:description => v.description()}
end.sort{|a,b|a[:type].to_s <=> b[:type].to_s}
rest_ok_response violation_table
end
def rest__create_task()
assembly = ret_assembly_instance_object()
task = nil
if assembly.is_stopped?
start_assembly = ret_request_params(:start_assembly)
return rest_ok_response :confirmation_message=>true if start_assembly.nil?
assembly_idh = ret_request_param_id_handle(:assembly_id,Assembly::Instance)
node_pattern = ret_request_params(:node_pattern)
# filters only stopped nodes for this assembly
nodes = assembly.get_nodes(:id,:display_name,:type,:external_ref,:hostname_external_ref, :admin_op_status)
assembly_name = Assembly::Instance.pretty_print_name(assembly)
nodes, is_valid, error_msg = nodes_valid_for_aws?(assembly_name, nodes, node_pattern, :stopped)
unless is_valid
Log.info(error_msg)
return rest_ok_response(:errors => [error_msg])
end
user_object = user_object = ::DTK::CurrentSession.new.user_object()
CreateThread.defer_with_session(user_object) do
# invoking command to start the nodes
CommandAndControl.start_instances(nodes)
end
#TODO: not doing at this point puppet version per run; it just can be set when node is created
opts = ret_params_hash(:commit_msg,:puppet_version)
task = Task.create_and_start_from_assembly_instance(assembly,opts)
else
raise ErrorUsage, "Task is already running on requested nodes. Please wait until task is complete" if assembly.are_nodes_running?
#TODO: not doing at this point puppet version per run; it just can be set when node is created
opts = ret_params_hash(:commit_msg,:puppet_version)
task = Task.create_from_assembly_instance(assembly,opts)
end
task.save!()
# TODO: this was called from gui commit window
# pp Attribute.augmented_attribute_list_from_task(task)
rest_ok_response :task_id => task.id
end
# leaving this commented until we test out if methode above works properly
# def rest__create_task()
# assembly = ret_assembly_instance_object()
# if assembly.is_stopped?
# validate_params = [
# :action => :start,
# :params => {:assembly => assembly[:id]},
# :wait_for_complete => {:type => :assembly, :id => assembly[:id]}
# ]
# return rest_validate_response("Assembly is stopped, you need to start it.", validate_params)
# end
# if assembly.are_nodes_running?
# raise ErrorUsage, "Task is already running on requested nodes. Please wait until task is complete"
# end
# opts = ret_params_hash(:commit_msg,:puppet_version)
# task = Task.create_from_assembly_instance(assembly,opts)
# task.save!()
# # TODO: this was called from gui commit window
# # pp Attribute.augmented_attribute_list_from_task(task)
# rest_ok_response :task_id => task.id
# end
# #TODO: replace or given options to specify specific smoketests to run
# def rest__create_smoketests_task()
# assembly = ret_assembly_instance_object()
# opts = ret_params_hash(:commit_msg).merge(:component_type => :smoketest)
# task = Task.create_from_assembly_instance(assembly,opts)
# task.save!()
# rest_ok_response :task_id => task.id
# end
def rest__clear_tasks()
assembly = ret_assembly_instance_object()
assembly.clear_tasks()
rest_ok_response
end
def rest__start()
assembly = ret_assembly_instance_object()
assembly_idh = ret_request_param_id_handle(:assembly_id,Assembly::Instance)
node_pattern = ret_request_params(:node_pattern)
task = nil
# filters only stopped nodes for this assembly
nodes = assembly.get_nodes(:id,:display_name,:type,:external_ref,:hostname_external_ref, :admin_op_status)
assembly_name = Assembly::Instance.pretty_print_name(assembly)
nodes, is_valid, error_msg = nodes_valid_for_aws?(assembly_name, nodes, node_pattern, :stopped)
unless is_valid
Log.info(error_msg)
return rest_ok_response(:errors => [error_msg])
end
queue = SimpleActionQueue.new
user_object = ::DTK::CurrentSession.new.user_object()
CreateThread.defer_with_session(user_object) do
# invoking command to start the nodes
CommandAndControl.start_instances(nodes)
end
opts ={}
opts.merge!(:node => nodes.first) if (nodes.size == 1)
task = Task.task_when_nodes_ready_from_assembly(assembly_idh.create_object(),:assembly, opts)
task.save!()
queue.set_result(:task_id => task.id)
rest_ok_response :task_id => task.id
end
def rest__stop()
assembly = ret_assembly_instance_object()
node_pattern = ret_request_params(:node_pattern)
nodes = assembly.get_nodes(:id,:display_name,:type, :external_ref,:admin_op_status)
assembly_idh = ret_request_param_id_handle(:assembly_id,Assembly::Instance)
assembly_name = Assembly::Instance.pretty_print_name(assembly)
nodes, is_valid, error_msg = nodes_valid_for_aws?(assembly_name, nodes, node_pattern, :running)
unless is_valid
Log.info(error_msg)
return rest_ok_response(:errors => [error_msg])
end
CommandAndControl.stop_instances(nodes)
rest_ok_response :status => :ok
end
##
# Method that will validate if nodes list is ready to started or stopped.
#
# * *Args* :
# - +assembly_id+ -> assembly id
# - +nodes+ -> node list
# - +node_pattern+ -> match id regexp pattern
# - +status_pattern+ -> pattern to match node status
# * *Returns* :
# - is valid flag
# - filtered nodes by pattern (if pattern not nil)
# - erorr message in case it is not valid
#
def nodes_valid_for_aws?(assembly_name, nodes, node_pattern, status_pattern)
# check for pattern
unless node_pattern.nil?
regex = Regexp.new(node_pattern)
#temp nodes_list
nodes_list = nodes
nodes = nodes.select { |node| regex =~ node.id.to_s}
if nodes.size == 0
nodes = nodes_list.select { |node| node_pattern.to_s.eql?(node.display_name.to_s)}
return nodes, false, "No nodes have been matched via ID ~ '#{node_pattern}'." if nodes.size == 0
end
end
# check if staged
nodes.each do |node|
if node[:type] == "staged"
return nodes, false, "Nodes for assembly '#{assembly_name}' are 'staged' and as such cannot be started/stopped."
end
end
# check for status -> this will translate to /running|pending/ and /stopped|pending/ checks
filtered_nodes = nodes.select { |node| node[:admin_op_status] =~ Regexp.new("#{status_pattern.to_s}|pending") }
if filtered_nodes.size == 0
return nodes, false, "There are no #{status_pattern} nodes for assembly '#{assembly_name}'."
end
return filtered_nodes, true, nil
end
def rest__initiate_get_log()
assembly = ret_assembly_instance_object()
params = ret_params_hash(:node_identifier,:log_path, :start_line)
queue = ActionResultsQueue.new
assembly.initiate_get_log(queue, params)
rest_ok_response :action_results_id => queue.id
end
def rest__initiate_ssh_pub_access()
assembly = ret_assembly_instance_object()
params = ret_params_hash(:rsa_pub_name, :rsa_pub_key, :system_user)
agent_action = ret_non_null_request_params(:agent_action)
system_user, key_name = params[:system_user], params[:rsa_pub_name]
data_exists, data_exists_on_every_node = Component::Instance::Interpreted.check_existance?(assembly, system_user, key_name)
if agent_action.to_sym == :revoke_access && !data_exists
raise ErrorUsage.new("Access is not granted to system user '#{system_user}' with name '#{key_name}'")
end
if agent_action.to_sym == :grant_access && data_exists_on_every_node
raise ErrorUsage.new("All nodes already have access to system user '#{system_user}' with name '#{key_name}'")
end
queue = ActionResultsQueue.new
assembly.initiate_ssh_agent_action(agent_action.to_sym, queue, params)
rest_ok_response :action_results_id => queue.id
end
def rest__list_ssh_access()
assembly = ret_assembly_instance_object()
rest_ok_response Component::Instance::Interpreted.list_ssh_access(assembly)
end
def rest__initiate_grep()
assembly = ret_assembly_instance_object()
params = ret_params_hash(:node_pattern, :log_path, :grep_pattern, :stop_on_first_match)
queue = ActionResultsQueue.new
assembly.initiate_grep(queue, params)
rest_ok_response :action_results_id => queue.id
end
def rest__task_status()
assembly_id = ret_request_param_id(:assembly_id,Assembly::Instance)
format = (ret_request_params(:format)||:hash).to_sym
response = Task::Status::Assembly.get_status(id_handle(assembly_id),:format => format)
rest_ok_response response
end
### mcollective actions
def rest__initiate_get_netstats()
node_id = ret_non_null_request_params(:node_id)
assembly = ret_assembly_instance_object()
queue = ActionResultsQueue.new
assembly.initiate_get_netstats(queue, node_id)
rest_ok_response :action_results_id => queue.id
end
def rest__initiate_get_ps()
node_id = ret_non_null_request_params(:node_id)
assembly = ret_assembly_instance_object()
queue = ActionResultsQueue.new
assembly.initiate_get_ps(queue, node_id)
rest_ok_response :action_results_id => queue.id
end
def rest__initiate_execute_tests()
node_id, components = ret_non_null_request_params(:node_id, :components)
assembly = ret_assembly_instance_object()
queue = ActionResultsQueue.new
assembly.initiate_execute_tests(queue, node_id, components)
rest_ok_response :action_results_id => queue.id
end
def rest__get_action_results()
#TODO: to be safe need to garbage collect on ActionResultsQueue in case miss anything
action_results_id = ret_non_null_request_params(:action_results_id)
ret_only_if_complete = ret_request_param_boolean(:return_only_if_complete)
disable_post_processing = ret_request_param_boolean(:disable_post_processing)
sort_key = ret_request_params(:sort_key)
if ret_request_param_boolean(:using_simple_queue)
rest_ok_response SimpleActionQueue.get_results(action_results_id)
else
if sort_key
sort_key = sort_key.to_sym
rest_ok_response ActionResultsQueue.get_results(action_results_id,ret_only_if_complete,disable_post_processing, sort_key)
else
rest_ok_response ActionResultsQueue.get_results(action_results_id,ret_only_if_complete,disable_post_processing)
end
end
end
### end: mcollective actions
#TDODO: got here in cleanup of rest calls
def rest__list_smoketests()
assembly = ret_assembly_object()
rest_ok_response assembly.list_smoketests()
end
def test_get_items(id)
assembly = id_handle(id,:component).create_object()
item_list = assembly.get_items()
return {
:data=>item_list
}
end
def search
params = request.params.dup
cols = model_class(:component).common_columns()
filter_conjuncts = params.map do |name,value|
[:regex,name.to_sym,"^#{value}"] if cols.include?(name.to_sym)
end.compact
#restrict results to belong to library and not nested in assembly
filter_conjuncts += [[:eq,:type,"composite"],[:neq,:library_library_id,nil],[:eq,:assembly_id,nil]]
sp_hash = {
:cols => cols,
:filter => [:and] + filter_conjuncts
}
component_list = Model.get_objs(model_handle(:component),sp_hash).each{|r|r.materialize!(cols)}
i18n = get_i18n_mappings_for_models(:component)
component_list.each_with_index do |model,index|
component_list[index][:model_name] = :component
component_list[index][:ui] ||= {}
component_list[index][:ui][:images] ||= {}
# name = component_list[index][:display_name]
name = Assembly.pretty_print_name(component_list[index])
title = name.nil? ? "" : i18n_string(i18n,:component,name)
#TODO: change after implementing all the new types and making generic icons for them
model_type = 'service'
model_sub_type = 'db'
model_type_str = "#{model_type}-#{model_sub_type}"
prefix = "#{R8::Config[:base_images_uri]}/v1/componentIcons"
png = component_list[index][:ui][:images][:tnail] || "unknown-#{model_type_str}.png"
component_list[index][:image_path] = "#{prefix}/#{png}"
component_list[index][:i18n] = title
end
return {:data=>component_list}
end
def get_tree(id)
return {:data=>'some tree data goes here'}
end
def get_assemblies_from_ids(ids)
assemblies = []
ids.each do |id|
assembly = id_handle(id.to_i,:component).create_object(:model_name => :assembly_template)
assemblies << assembly
end
return assemblies
end
#TODO: unify with clone(id)
#clone assembly from library to target
def stage()
target_idh = target_idh_with_default(request.params["target_id"])
assembly_id = ret_request_param_id(:assembly_id,::DTK::Assembly::Template)
#TODO: if naem given and not unique either reject or generate a -n suffix
assembly_name = ret_request_params(:name)
id_handle = id_handle(assembly_id)
#TODO: need to copy in avatar when hash["ui"] is non null
override_attrs = Hash.new
override_attrs[:display_name] = assembly_name if assembly_name
target_object = target_idh.create_object()
clone_opts = {:ret_new_obj_with_cols => [:id,:type]}
new_assembly_obj = target_object.clone_into(id_handle.create_object(),override_attrs,clone_opts)
id = new_assembly_obj && new_assembly_obj.id()
#compute ui positions
nested_objs = new_assembly_obj.get_node_assembly_nested_objects()
#TODO: this does not leverage assembly node relative positions
nested_objs[:nodes].each do |node|
target_object.update_ui_for_new_item(node[:id])
end
rest_ok_response(:assembly_id => id)
end
#clone assembly from library to target
def clone(id)
handle_errors do
id_handle = id_handle(id)
hash = request.params
target_id_handle = nil
if hash["target_id"] and hash["target_model_name"]
input_target_id_handle = id_handle(hash["target_id"].to_i,hash["target_model_name"].to_sym)
target_id_handle = Model.find_real_target_id_handle(id_handle,input_target_id_handle)
else
Log.info("not implemented yet")
return redirect "/xyz/#{model_name()}/display/#{id.to_s}"
end
#TODO: need to copy in avatar when hash["ui"] is non null
override_attrs = hash["ui"] ? {:ui=>hash["ui"]} : {}
target_object = target_id_handle.create_object()
clone_opts = {:ret_new_obj_with_cols => [:id,:type]}
new_assembly_obj = target_object.clone_into(id_handle.create_object(),override_attrs,clone_opts)
id = new_assembly_obj && new_assembly_obj.id()
nested_objs = new_assembly_obj.get_node_assembly_nested_objects()
#just want external ports
(nested_objs[:nodes]||[]).each{|n|(n[:ports]||[]).reject!{|p|p[:type] == "component_internal"}}
#TODO: ganglia hack: remove after putting this info in teh r8 meta files
(nested_objs[:nodes]||[]).each do |n|
(n[:ports]||[]).each do |port|
if port[:display_name] =~ /ganglia__server/
port[:location] = "east"
elsif port[:display_name] =~ /ganglia__monitor/
port[:location] = "west"
end
end
end
#TODO: get node positions going for assemblies
#compute uui positions
parent_id = request.params["parent_id"]
assembly_left_pos = request.params["assembly_left_pos"]
# node_list = get_objects(:node,{:assembly_id=>id})
dc_hash = get_object_by_id(parent_id,:datacenter)
raise Error.new("Not implemented when parent_id is not a datacenter") if dc_hash.nil?
#get the top most item in the list to set new positions
top_node = {}
top_most = 2000
# node_list.each do |node|
nested_objs[:nodes].each do |node|
# node = create_object_from_id(node_hash[:id],:node)
ui = node.get_ui_info(dc_hash)
if ui and (ui[:top].to_i < top_most.to_i)
left_diff = assembly_left_pos.to_i - ui[:left].to_i
top_node = {:id=>node[:id],:ui=>ui,:left_diff=>left_diff}
top_most = ui[:top]
end
end
nested_objs[:nodes].each_with_index do |node,i|
ui = node.get_ui_info(dc_hash)
Log.error("no coordinates for node with id #{node[:id].to_s} in #{parent_id.to_s}") unless ui
if ui
if node[:id] == top_node[:id]
ui[:left] = assembly_left_pos.to_i
else
ui[:left] = ui[:left].to_i + top_node[:left_diff].to_i
end
end
node.update_ui_info!(ui,dc_hash)
nested_objs[:nodes][i][:assembly_ui] = ui
end
nested_objs[:port_links].each_with_index do |link,i|
nested_objs[:port_links][i][:ui] ||= {
:type => R8::Config[:links][:default_type],
:style => R8::Config[:links][:default_style]
}
end
return {:data=>nested_objs}
#TODO: clean this up,hack to update UI params for newly cloned object
# update_from_hash(id,{:ui=>hash["ui"]})
# hash["redirect"] ? redirect_route = "/xyz/#{hash["redirect"]}/#{id.to_s}" : redirect_route = "/xyz/#{model_name()}/display/#{id.to_s}"
if hash["model_redirect"]
base_redirect = "/xyz/#{hash["model_redirect"]}/#{hash["action_redirect"]}"
redirect_id = hash["id_redirect"].match(/^\*/) ? id.to_s : hash["id_redirect"]
redirect_route = "#{base_redirect}/#{redirect_id}"
request_params = ''
expected_params = ['model_redirect','action_redirect','id_redirect','target_id','target_model_name']
request.params.each do |name,value|
if !expected_params.include?(name)
request_params << '&' if request_params != ''
request_params << "#{name}=#{value}"
end
end
ajax_request? ? redirect_route += '.json' : nil
redirect_route << URI.encode("?#{request_params}") if request_params != ''
else
redirect_route = "/xyz/#{model_name()}/display/#{id.to_s}"
ajax_request? ? redirect_route += '.json' : nil
end
redirect redirect_route
end
end
end
end
Added validation error for executing serverspec tests on staged assemblies
module DTK
class AssemblyController < AuthController
helper :assembly_helper
helper :task_helper
#### create and delete actions ###
#TODO: rename to delete_and_destroy
def rest__delete()
assembly_id,subtype = ret_assembly_params_id_and_subtype()
if subtype == :template
#returning module_repo_info so client can update this in its local module
rest_ok_response Assembly::Template.delete_and_ret_module_repo_info(id_handle(assembly_id))
else #subtype == :instance
Assembly::Instance.delete(id_handle(assembly_id),:destroy_nodes => true)
rest_ok_response
end
end
def rest__purge()
workspace = ret_workspace_object?()
workspace.purge(:destroy_nodes => true)
rest_ok_response
end
def rest__destroy_and_reset_nodes()
assembly = ret_assembly_instance_object()
assembly.destroy_and_reset_nodes()
rest_ok_response
end
def rest__remove_from_system()
assembly = ret_assembly_instance_object()
Assembly::Instance.delete(assembly.id_handle())
rest_ok_response
end
def rest__set_target()
workspace = ret_workspace_object?()
target = create_obj(:target_id, Target::Instance)
workspace.set_target(target)
rest_ok_response
end
def rest__delete_node()
assembly = ret_assembly_instance_object()
node_idh = ret_node_id_handle(:node_id,assembly)
assembly.delete_node(node_idh,:destroy_nodes => true)
rest_ok_response
end
def rest__delete_component()
# Retrieving node_id to validate if component belongs to node when delete-component invoked from component-level context
node_id = ret_non_null_request_params(:node_id)
component_id = ret_non_null_request_params(:component_id)
assembly = ret_assembly_instance_object()
assembly_id = assembly.id()
cmp_full_name = ret_request_params(:cmp_full_name)
if cmp_full_name
cmp_idh = ret_component_id_handle(:cmp_full_name,:assembly_id => assembly_id)
else
cmp_idh = id_handle(component_id,:component)
end
assembly.delete_component(cmp_idh, node_id)
rest_ok_response
end
#### end: create and delete actions ###
#### list and info actions ###
def rest__info()
assembly = ret_assembly_object()
node_id, component_id, attribute_id, return_json = ret_request_params(:node_id, :component_id, :attribute_id, :json_return)
if return_json.eql?('true')
rest_ok_response assembly.info(node_id, component_id, attribute_id)
else
rest_ok_response assembly.info(node_id, component_id, attribute_id), :encode_into => :yaml
end
end
def rest__rename()
assembly = ret_assembly_object()
assembly_name = ret_non_null_request_params(:assembly_name)
new_assembly_name = ret_non_null_request_params(:new_assembly_name)
rest_ok_response assembly.rename(model_handle(), assembly_name, new_assembly_name)
end
#TODO: may be cleaner if we break into list_nodes, list_components with some shared helper functions
def rest__info_about()
node_id, component_id, detail_level, detail_to_include = ret_request_params(:node_id, :component_id, :detail_level, :detail_to_include)
assembly,subtype = ret_assembly_params_object_and_subtype()
response_opts = Hash.new
if format = ret_request_params(:format)
format = format.to_sym
unless SupportedFormats.include?(format)
raise ErrorUsage.new("Illegal format (#{format}) specified; it must be one of: #{SupportedFormats.join(',')}")
end
end
about = ret_non_null_request_params(:about).to_sym
unless AboutEnum[subtype].include?(about)
raise ErrorUsage::BadParamValue.new(:about,AboutEnum[subtype])
end
filter_proc = Proc.new do |e|
ret_val = check_element(e,[:node,:id],node_id) && check_element(e,[:attribute,:component_component_id],component_id) && e
ret_val = nil if (e[:attribute] and e[:attribute][:hidden])
ret_val
end
opts = Opts.new(:filter_proc => filter_proc, :detail_level => detail_level)
opts.add_return_datatype!()
if detail_to_include
opts.merge!(:detail_to_include => detail_to_include.map{|r|r.to_sym})
opts.add_value_to_return!(:datatype)
end
if about == :attributes
if format == :yaml
opts.merge!(:raw_attribute_value => true,:mark_unset_required => true)
else
opts.merge!(:truncate_attribute_values => true,:mark_unset_required => true)
end
end
if node_id
opts.merge!(:node_cmp_name => true) unless node_id.empty?
end
data = assembly.info_about(about, opts)
datatype = opts.get_datatype
response_opts = Hash.new
if format == :yaml
response_opts.merge!(:encode_into => :yaml)
else
response_opts.merge!(:datatype => datatype)
end
rest_ok_response data, response_opts
end
SupportedFormats = [:yaml]
def rest__info_about_task()
assembly = ret_assembly_instance_object()
task_action = ret_request_params(:task_action)
opts = {:donot_parse => true,:action_types=>[:assembly]}
response = assembly.get_task_template_serialized_content(task_action,opts)
response_opts = Hash.new
if response
response_opts.merge!(:encode_into => :yaml)
else
response = {:message => "Task not yet generated for assembly (#{assembly.get_field?(:display_name)})"}
end
rest_ok_response response, response_opts
end
def rest__cancel_task()
assembly = ret_assembly_instance_object()
unless top_task_id = ret_request_params(:task_id)
unless top_task = get_most_recent_executing_task([:eq,:assembly_id,assembly.id()])
raise ErrorUsage.new("No running tasks found")
end
top_task_id = top_task.id()
end
cancel_task(top_task_id)
rest_ok_response :task_id => top_task_id
end
def rest__list_modules()
ids = ret_request_params(:assemblies)
assembly_templates = get_assemblies_from_ids(ids)
components = Assembly::Template.list_modules(assembly_templates)
rest_ok_response components
end
def rest__prepare_for_edit_module()
assembly = ret_assembly_instance_object()
module_type = ret_non_null_request_params(:module_type)
response =
case module_type.to_sym
when :component_module
component_module = create_obj(:module_name,ComponentModule)
AssemblyModule::Component.prepare_for_edit(assembly,component_module)
when :service_module
modification_type = ret_non_null_request_params(:modification_type).to_sym
AssemblyModule::Service.prepare_for_edit(assembly,modification_type)
else
raise ErrorUsage.new("Illegal module_type #{module_type}")
end
rest_ok_response response
end
def rest__promote_module_updates()
assembly = ret_assembly_instance_object()
module_type = ret_non_null_request_params(:module_type)
unless module_type.to_sym == :component_module
raise Error.new("promote_module_changes only treats component_module type")
end
module_name = ret_non_null_request_params(:module_name)
component_module = create_obj(:module_name,ComponentModule)
opts = ret_boolean_params_hash(:force)
rest_ok_response AssemblyModule::Component.promote_module_updates(assembly,component_module,opts)
end
def rest__create_component_dependency()
assembly = ret_assembly_instance_object()
cmp_template = ret_component_template(:component_template_id)
antecedent_cmp_template = ret_component_template(:antecedent_component_template_id)
type = :simple
AssemblyModule::Component.create_component_dependency?(type,assembly,cmp_template,antecedent_cmp_template)
rest_ok_response
end
# checks element through set of fields
def check_element(element, fields, element_id_val)
return true if (element_id_val.nil? || element_id_val.empty?)
return false if element.nil?
temp_element = element.dup
fields.each do |field|
temp_element = temp_element[field]
return false if temp_element.nil?
end
return (temp_element == element_id_val.to_i)
end
AboutEnum = {
:instance => [:nodes,:components,:tasks,:attributes,:modules],
:template => [:nodes,:components,:targets]
}
FilterProc = {
:attributes => lambda{|attr|not attr[:hidden]}
}
def rest__add_ad_hoc_attribute_links()
assembly = ret_assembly_instance_object()
target_attr_term,source_attr_term = ret_non_null_request_params(:target_attribute_term,:source_attribute_term)
update_meta = ret_request_params(:update_meta)
opts = Hash.new
#update_meta == true is the default
unless !update_meta.nil? and !update_meta
opts.merge!(:update_meta => true)
end
AttributeLink::AdHoc.create_adhoc_links(assembly,target_attr_term,source_attr_term,opts)
rest_ok_response
end
def rest__delete_service_link()
port_link = ret_port_link()
Model.delete_instance(port_link.id_handle())
rest_ok_response
end
def rest__add_service_link()
assembly = ret_assembly_instance_object()
assembly_id = assembly.id()
input_cmp_idh = ret_component_id_handle(:input_component_id,:assembly_id => assembly_id)
output_cmp_idh = ret_component_id_handle(:output_component_id,:assembly_id => assembly_id)
opts = ret_params_hash(:dependency_name)
service_link_idh = assembly.add_service_link?(input_cmp_idh,output_cmp_idh,opts)
rest_ok_response :service_link => service_link_idh.get_id()
end
def rest__list_attribute_mappings()
port_link = ret_port_link()
#TODO: stub
ams = port_link.list_attribute_mappings()
pp ams
rest_ok_response
end
def rest__list_service_links()
assembly = ret_assembly_instance_object()
component_id = ret_component_id?(:component_id,:assembly_id => assembly.id())
context = (ret_request_params(:context)||:assembly).to_sym
opts = {:context => context}
if component_id
opts.merge!(:filter => {:input_component_id => component_id})
end
ret = assembly.list_service_links(opts)
rest_ok_response ret
end
#TODO: deprecate below for above
def rest__list_connections()
assembly = ret_assembly_instance_object()
find_missing,find_possible = ret_request_params(:find_missing,:find_possible)
ret =
if find_possible
assembly.list_connections__possible()
elsif find_missing
raise Error.new("Deprecated")
else
raise Error.new("Deprecated")
end
rest_ok_response ret
end
def rest__list_possible_add_ons()
assembly = ret_assembly_instance_object()
rest_ok_response assembly.get_service_add_ons()
end
def rest__get_attributes()
filter = ret_request_params(:filter)
filter = filter && filter.to_sym
assembly = ret_assembly_instance_object()
rest_ok_response assembly.get_attributes_print_form(Opts.new(:filter => filter))
end
def rest__workspace_object()
rest_ok_response Assembly::Instance.get_workspace_object(model_handle(),{})
end
def rest__list()
subtype = ret_assembly_subtype()
result =
if subtype == :instance
opts = ret_params_hash(:filter,:detail_level)
Assembly::Instance.list(model_handle(),opts)
else
project = get_default_project()
opts = {:version_suffix => true}.merge(ret_params_hash(:filter,:detail_level))
Assembly::Template.list(model_handle(),opts.merge(:project_idh => project.id_handle()))
end
rest_ok_response result
end
def rest__list_with_workspace()
opts = ret_params_hash(:filter)
rest_ok_response Assembly::Instance.list_with_workspace(model_handle(),opts)
end
#### end: list and info actions ###
#TODO: update what input can be
#the body has an array each element of form
# {:pattern => PAT, :value => VAL}
#pat can be one of three forms
#1 - an id
#2 - a name of form ASSEM-LEVEL-ATTR or NODE/COMONENT/CMP-ATTR, or
#3 - a pattern (TODO: give syntax) that can pick out multiple vars
# this returns same output as info about attributes, pruned for just new ones set
#TODO: this is a minsnomer in that it can be used to just create attributes
def rest__set_attributes()
assembly = ret_assembly_instance_object()
av_pairs = ret_params_av_pairs()
opts = ret_params_hash(:format,:context,:create)
create_options = ret_boolean_params_hash(:required,:dynamic)
if semantic_data_type = ret_request_params(:datatype)
unless Attribute::SemanticDatatype.isa?(semantic_data_type)
raise ErrorUsage.new("The term (#{semantic_data_type}) is not a valid data type")
end
create_options.merge!(:semantic_data_type => semantic_data_type)
end
unless create_options.empty?
unless opts[:create]
raise ErrorUsage.new("Options (#{create_options.values.join(',')}) can only be given if :create is true")
end
opts.merge!(:attribute_properties => create_options)
end
#update_meta == true is the default
update_meta = ret_request_params(:update_meta)
unless !update_meta.nil? and !update_meta
opts.merge!(:update_meta => true)
end
assembly.set_attributes(av_pairs,opts)
rest_ok_response
end
#### actions to update and create assembly templates
def rest__promote_to_template()
assembly = ret_assembly_instance_object()
assembly_template_name,service_module_name = get_template_and_service_names_params(assembly)
if assembly_template_name.nil? or service_module_name.nil?
raise ErrorUsage.new("SERVICE-NAME/ASSEMBLY-NAME cannot be determined and must be explicitly given")
end
project = get_default_project()
opts = ret_symbol_params_hash(:mode)
service_module = Assembly::Template.create_or_update_from_instance(project,assembly,service_module_name,assembly_template_name,opts)
clone_update_info = service_module.ret_clone_update_info()
rest_ok_response clone_update_info
end
#### end: actions to update and create assembly templates
#### methods to modify the assembly instance
def rest__add_node()
assembly = ret_assembly_instance_object()
assembly_node_name = ret_non_null_request_params(:assembly_node_name)
node_binding_rs = node_binding_ruleset?(:node_template_identifier)
node_instance_idh = assembly.add_node(assembly_node_name,node_binding_rs)
rest_ok_response node_instance_idh
end
def rest__add_component()
assembly = ret_assembly_instance_object()
component_template, component_title = ret_component_template_and_title_for_assembly(:component_template_id,assembly)
#not checking here if node_id points to valid object; check is in add_component
node_idh = ret_request_param_id_handle(:node_id,Node)
new_component_idh = assembly.add_component(node_idh,component_template,component_title)
rest_ok_response(:component_id => new_component_idh.get_id())
end
def rest__add_assembly_template()
assembly = ret_assembly_instance_object()
assembly_template = ret_assembly_template_object(:assembly_template_id)
assembly.add_assembly_template(assembly_template)
rest_ok_response
end
def rest__add_service_add_on()
assembly = ret_assembly_instance_object()
add_on_name = ret_non_null_request_params(:service_add_on_name)
new_sub_assembly_idh = assembly.service_add_on(add_on_name)
rest_ok_response(:sub_assembly_id => new_sub_assembly_idh.get_id())
end
#### end: methods to modify the assembly instance
#### method(s) related to staging assembly template
def rest__stage()
target_id = ret_request_param_id_optional(:target_id, ::DTK::Target::Instance)
target = target_idh_with_default(target_id).create_object(:model_name => :target_instance)
assembly_template = ret_assembly_template_object()
assembly_name = ret_request_params(:name)
new_assembly_obj = assembly_template.stage(target,assembly_name)
response = {
:new_service_instance => {
:name => new_assembly_obj.display_name_print_form,
:id => new_assembly_obj.id()
}
}
rest_ok_response(response,:encode_into => :yaml)
end
#### end: method(s) related to staging assembly template
#### creates tasks to execute/converge assemblies and monitor status
def rest__find_violations()
assembly = ret_assembly_instance_object()
violation_objects = assembly.find_violations()
violation_table = violation_objects.map do |v|
{:type => v.type(),:description => v.description()}
end.sort{|a,b|a[:type].to_s <=> b[:type].to_s}
rest_ok_response violation_table
end
def rest__create_task()
assembly = ret_assembly_instance_object()
task = nil
if assembly.is_stopped?
start_assembly = ret_request_params(:start_assembly)
return rest_ok_response :confirmation_message=>true if start_assembly.nil?
assembly_idh = ret_request_param_id_handle(:assembly_id,Assembly::Instance)
node_pattern = ret_request_params(:node_pattern)
# filters only stopped nodes for this assembly
nodes = assembly.get_nodes(:id,:display_name,:type,:external_ref,:hostname_external_ref, :admin_op_status)
assembly_name = Assembly::Instance.pretty_print_name(assembly)
nodes, is_valid, error_msg = nodes_valid_for_aws?(assembly_name, nodes, node_pattern, :stopped)
unless is_valid
Log.info(error_msg)
return rest_ok_response(:errors => [error_msg])
end
user_object = user_object = ::DTK::CurrentSession.new.user_object()
CreateThread.defer_with_session(user_object) do
# invoking command to start the nodes
CommandAndControl.start_instances(nodes)
end
#TODO: not doing at this point puppet version per run; it just can be set when node is created
opts = ret_params_hash(:commit_msg,:puppet_version)
task = Task.create_and_start_from_assembly_instance(assembly,opts)
else
raise ErrorUsage, "Task is already running on requested nodes. Please wait until task is complete" if assembly.are_nodes_running?
#TODO: not doing at this point puppet version per run; it just can be set when node is created
opts = ret_params_hash(:commit_msg,:puppet_version)
task = Task.create_from_assembly_instance(assembly,opts)
end
task.save!()
# TODO: this was called from gui commit window
# pp Attribute.augmented_attribute_list_from_task(task)
rest_ok_response :task_id => task.id
end
# leaving this commented until we test out if methode above works properly
# def rest__create_task()
# assembly = ret_assembly_instance_object()
# if assembly.is_stopped?
# validate_params = [
# :action => :start,
# :params => {:assembly => assembly[:id]},
# :wait_for_complete => {:type => :assembly, :id => assembly[:id]}
# ]
# return rest_validate_response("Assembly is stopped, you need to start it.", validate_params)
# end
# if assembly.are_nodes_running?
# raise ErrorUsage, "Task is already running on requested nodes. Please wait until task is complete"
# end
# opts = ret_params_hash(:commit_msg,:puppet_version)
# task = Task.create_from_assembly_instance(assembly,opts)
# task.save!()
# # TODO: this was called from gui commit window
# # pp Attribute.augmented_attribute_list_from_task(task)
# rest_ok_response :task_id => task.id
# end
# #TODO: replace or given options to specify specific smoketests to run
# def rest__create_smoketests_task()
# assembly = ret_assembly_instance_object()
# opts = ret_params_hash(:commit_msg).merge(:component_type => :smoketest)
# task = Task.create_from_assembly_instance(assembly,opts)
# task.save!()
# rest_ok_response :task_id => task.id
# end
def rest__clear_tasks()
assembly = ret_assembly_instance_object()
assembly.clear_tasks()
rest_ok_response
end
def rest__start()
assembly = ret_assembly_instance_object()
assembly_idh = ret_request_param_id_handle(:assembly_id,Assembly::Instance)
node_pattern = ret_request_params(:node_pattern)
task = nil
# filters only stopped nodes for this assembly
nodes = assembly.get_nodes(:id,:display_name,:type,:external_ref,:hostname_external_ref, :admin_op_status)
assembly_name = Assembly::Instance.pretty_print_name(assembly)
nodes, is_valid, error_msg = nodes_valid_for_aws?(assembly_name, nodes, node_pattern, :stopped)
unless is_valid
Log.info(error_msg)
return rest_ok_response(:errors => [error_msg])
end
queue = SimpleActionQueue.new
user_object = ::DTK::CurrentSession.new.user_object()
CreateThread.defer_with_session(user_object) do
# invoking command to start the nodes
CommandAndControl.start_instances(nodes)
end
opts ={}
opts.merge!(:node => nodes.first) if (nodes.size == 1)
task = Task.task_when_nodes_ready_from_assembly(assembly_idh.create_object(),:assembly, opts)
task.save!()
queue.set_result(:task_id => task.id)
rest_ok_response :task_id => task.id
end
def rest__stop()
assembly = ret_assembly_instance_object()
node_pattern = ret_request_params(:node_pattern)
nodes = assembly.get_nodes(:id,:display_name,:type, :external_ref,:admin_op_status)
assembly_idh = ret_request_param_id_handle(:assembly_id,Assembly::Instance)
assembly_name = Assembly::Instance.pretty_print_name(assembly)
nodes, is_valid, error_msg = nodes_valid_for_aws?(assembly_name, nodes, node_pattern, :running)
unless is_valid
Log.info(error_msg)
return rest_ok_response(:errors => [error_msg])
end
CommandAndControl.stop_instances(nodes)
rest_ok_response :status => :ok
end
##
# Method that will validate if nodes list is ready to started or stopped.
#
# * *Args* :
# - +assembly_id+ -> assembly id
# - +nodes+ -> node list
# - +node_pattern+ -> match id regexp pattern
# - +status_pattern+ -> pattern to match node status
# * *Returns* :
# - is valid flag
# - filtered nodes by pattern (if pattern not nil)
# - erorr message in case it is not valid
#
def nodes_valid_for_aws?(assembly_name, nodes, node_pattern, status_pattern)
# check for pattern
unless node_pattern.nil?
regex = Regexp.new(node_pattern)
#temp nodes_list
nodes_list = nodes
nodes = nodes.select { |node| regex =~ node.id.to_s}
if nodes.size == 0
nodes = nodes_list.select { |node| node_pattern.to_s.eql?(node.display_name.to_s)}
return nodes, false, "No nodes have been matched via ID ~ '#{node_pattern}'." if nodes.size == 0
end
end
# check if staged
nodes.each do |node|
if node[:type] == "staged"
return nodes, false, "Nodes for assembly '#{assembly_name}' are 'staged' and as such cannot be started/stopped."
end
end
# check for status -> this will translate to /running|pending/ and /stopped|pending/ checks
filtered_nodes = nodes.select { |node| node[:admin_op_status] =~ Regexp.new("#{status_pattern.to_s}|pending") }
if filtered_nodes.size == 0
return nodes, false, "There are no #{status_pattern} nodes for assembly '#{assembly_name}'."
end
return filtered_nodes, true, nil
end
def nodes_are_up?(assembly_name, nodes, status_pattern)
# check if staged
nodes.each do |node|
if node[:type] == "staged"
return nodes, false, "Serverspec tests cannot be executed on nodes that are 'staged'."
end
end
# check for status -> this will translate to /running|pending/ and /stopped|pending/ checks
filtered_nodes = nodes.select { |node| node[:admin_op_status] =~ Regexp.new("#{status_pattern.to_s}|pending") }
if filtered_nodes.size == 0
return nodes, false, "There are no #{status_pattern} nodes for assembly '#{assembly_name}'."
end
return filtered_nodes, true, nil
end
def rest__initiate_get_log()
assembly = ret_assembly_instance_object()
params = ret_params_hash(:node_identifier,:log_path, :start_line)
queue = ActionResultsQueue.new
assembly.initiate_get_log(queue, params)
rest_ok_response :action_results_id => queue.id
end
def rest__initiate_ssh_pub_access()
assembly = ret_assembly_instance_object()
params = ret_params_hash(:rsa_pub_name, :rsa_pub_key, :system_user)
agent_action = ret_non_null_request_params(:agent_action)
system_user, key_name = params[:system_user], params[:rsa_pub_name]
data_exists, data_exists_on_every_node = Component::Instance::Interpreted.check_existance?(assembly, system_user, key_name)
if agent_action.to_sym == :revoke_access && !data_exists
raise ErrorUsage.new("Access is not granted to system user '#{system_user}' with name '#{key_name}'")
end
if agent_action.to_sym == :grant_access && data_exists_on_every_node
raise ErrorUsage.new("All nodes already have access to system user '#{system_user}' with name '#{key_name}'")
end
queue = ActionResultsQueue.new
assembly.initiate_ssh_agent_action(agent_action.to_sym, queue, params)
rest_ok_response :action_results_id => queue.id
end
def rest__list_ssh_access()
assembly = ret_assembly_instance_object()
rest_ok_response Component::Instance::Interpreted.list_ssh_access(assembly)
end
def rest__initiate_grep()
assembly = ret_assembly_instance_object()
params = ret_params_hash(:node_pattern, :log_path, :grep_pattern, :stop_on_first_match)
queue = ActionResultsQueue.new
assembly.initiate_grep(queue, params)
rest_ok_response :action_results_id => queue.id
end
def rest__task_status()
assembly_id = ret_request_param_id(:assembly_id,Assembly::Instance)
format = (ret_request_params(:format)||:hash).to_sym
response = Task::Status::Assembly.get_status(id_handle(assembly_id),:format => format)
rest_ok_response response
end
### mcollective actions
def rest__initiate_get_netstats()
node_id = ret_non_null_request_params(:node_id)
assembly = ret_assembly_instance_object()
queue = ActionResultsQueue.new
assembly.initiate_get_netstats(queue, node_id)
rest_ok_response :action_results_id => queue.id
end
def rest__initiate_get_ps()
node_id = ret_non_null_request_params(:node_id)
assembly = ret_assembly_instance_object()
queue = ActionResultsQueue.new
assembly.initiate_get_ps(queue, node_id)
rest_ok_response :action_results_id => queue.id
end
def rest__initiate_execute_tests()
node_id, components = ret_non_null_request_params(:node_id, :components)
assembly = ret_assembly_instance_object()
# DEBUG SNIPPET >>> REMOVE <<<
require (RUBY_VERSION.match(/1\.8\..*/) ? 'ruby-debug' : 'debugger');Debugger.start; debugger
#Logic for validation
# filters only stopped nodes for this assembly
nodes = assembly.get_nodes(:id,:display_name,:type,:external_ref,:hostname_external_ref, :admin_op_status)
assembly_name = Assembly::Instance.pretty_print_name(assembly)
nodes, is_valid, error_msg = nodes_are_up?(assembly_name, nodes, :stopped)
unless is_valid
Log.info(error_msg)
return rest_ok_response(:errors => [error_msg])
end
queue = ActionResultsQueue.new
assembly.initiate_execute_tests(queue, node_id, components)
rest_ok_response :action_results_id => queue.id
end
def rest__get_action_results()
#TODO: to be safe need to garbage collect on ActionResultsQueue in case miss anything
action_results_id = ret_non_null_request_params(:action_results_id)
ret_only_if_complete = ret_request_param_boolean(:return_only_if_complete)
disable_post_processing = ret_request_param_boolean(:disable_post_processing)
sort_key = ret_request_params(:sort_key)
if ret_request_param_boolean(:using_simple_queue)
rest_ok_response SimpleActionQueue.get_results(action_results_id)
else
if sort_key
sort_key = sort_key.to_sym
rest_ok_response ActionResultsQueue.get_results(action_results_id,ret_only_if_complete,disable_post_processing, sort_key)
else
rest_ok_response ActionResultsQueue.get_results(action_results_id,ret_only_if_complete,disable_post_processing)
end
end
end
### end: mcollective actions
#TDODO: got here in cleanup of rest calls
def rest__list_smoketests()
assembly = ret_assembly_object()
rest_ok_response assembly.list_smoketests()
end
def test_get_items(id)
assembly = id_handle(id,:component).create_object()
item_list = assembly.get_items()
return {
:data=>item_list
}
end
def search
params = request.params.dup
cols = model_class(:component).common_columns()
filter_conjuncts = params.map do |name,value|
[:regex,name.to_sym,"^#{value}"] if cols.include?(name.to_sym)
end.compact
#restrict results to belong to library and not nested in assembly
filter_conjuncts += [[:eq,:type,"composite"],[:neq,:library_library_id,nil],[:eq,:assembly_id,nil]]
sp_hash = {
:cols => cols,
:filter => [:and] + filter_conjuncts
}
component_list = Model.get_objs(model_handle(:component),sp_hash).each{|r|r.materialize!(cols)}
i18n = get_i18n_mappings_for_models(:component)
component_list.each_with_index do |model,index|
component_list[index][:model_name] = :component
component_list[index][:ui] ||= {}
component_list[index][:ui][:images] ||= {}
# name = component_list[index][:display_name]
name = Assembly.pretty_print_name(component_list[index])
title = name.nil? ? "" : i18n_string(i18n,:component,name)
#TODO: change after implementing all the new types and making generic icons for them
model_type = 'service'
model_sub_type = 'db'
model_type_str = "#{model_type}-#{model_sub_type}"
prefix = "#{R8::Config[:base_images_uri]}/v1/componentIcons"
png = component_list[index][:ui][:images][:tnail] || "unknown-#{model_type_str}.png"
component_list[index][:image_path] = "#{prefix}/#{png}"
component_list[index][:i18n] = title
end
return {:data=>component_list}
end
def get_tree(id)
return {:data=>'some tree data goes here'}
end
def get_assemblies_from_ids(ids)
assemblies = []
ids.each do |id|
assembly = id_handle(id.to_i,:component).create_object(:model_name => :assembly_template)
assemblies << assembly
end
return assemblies
end
#TODO: unify with clone(id)
#clone assembly from library to target
def stage()
target_idh = target_idh_with_default(request.params["target_id"])
assembly_id = ret_request_param_id(:assembly_id,::DTK::Assembly::Template)
#TODO: if naem given and not unique either reject or generate a -n suffix
assembly_name = ret_request_params(:name)
id_handle = id_handle(assembly_id)
#TODO: need to copy in avatar when hash["ui"] is non null
override_attrs = Hash.new
override_attrs[:display_name] = assembly_name if assembly_name
target_object = target_idh.create_object()
clone_opts = {:ret_new_obj_with_cols => [:id,:type]}
new_assembly_obj = target_object.clone_into(id_handle.create_object(),override_attrs,clone_opts)
id = new_assembly_obj && new_assembly_obj.id()
#compute ui positions
nested_objs = new_assembly_obj.get_node_assembly_nested_objects()
#TODO: this does not leverage assembly node relative positions
nested_objs[:nodes].each do |node|
target_object.update_ui_for_new_item(node[:id])
end
rest_ok_response(:assembly_id => id)
end
#clone assembly from library to target
def clone(id)
handle_errors do
id_handle = id_handle(id)
hash = request.params
target_id_handle = nil
if hash["target_id"] and hash["target_model_name"]
input_target_id_handle = id_handle(hash["target_id"].to_i,hash["target_model_name"].to_sym)
target_id_handle = Model.find_real_target_id_handle(id_handle,input_target_id_handle)
else
Log.info("not implemented yet")
return redirect "/xyz/#{model_name()}/display/#{id.to_s}"
end
#TODO: need to copy in avatar when hash["ui"] is non null
override_attrs = hash["ui"] ? {:ui=>hash["ui"]} : {}
target_object = target_id_handle.create_object()
clone_opts = {:ret_new_obj_with_cols => [:id,:type]}
new_assembly_obj = target_object.clone_into(id_handle.create_object(),override_attrs,clone_opts)
id = new_assembly_obj && new_assembly_obj.id()
nested_objs = new_assembly_obj.get_node_assembly_nested_objects()
#just want external ports
(nested_objs[:nodes]||[]).each{|n|(n[:ports]||[]).reject!{|p|p[:type] == "component_internal"}}
#TODO: ganglia hack: remove after putting this info in teh r8 meta files
(nested_objs[:nodes]||[]).each do |n|
(n[:ports]||[]).each do |port|
if port[:display_name] =~ /ganglia__server/
port[:location] = "east"
elsif port[:display_name] =~ /ganglia__monitor/
port[:location] = "west"
end
end
end
#TODO: get node positions going for assemblies
#compute uui positions
parent_id = request.params["parent_id"]
assembly_left_pos = request.params["assembly_left_pos"]
# node_list = get_objects(:node,{:assembly_id=>id})
dc_hash = get_object_by_id(parent_id,:datacenter)
raise Error.new("Not implemented when parent_id is not a datacenter") if dc_hash.nil?
#get the top most item in the list to set new positions
top_node = {}
top_most = 2000
# node_list.each do |node|
nested_objs[:nodes].each do |node|
# node = create_object_from_id(node_hash[:id],:node)
ui = node.get_ui_info(dc_hash)
if ui and (ui[:top].to_i < top_most.to_i)
left_diff = assembly_left_pos.to_i - ui[:left].to_i
top_node = {:id=>node[:id],:ui=>ui,:left_diff=>left_diff}
top_most = ui[:top]
end
end
nested_objs[:nodes].each_with_index do |node,i|
ui = node.get_ui_info(dc_hash)
Log.error("no coordinates for node with id #{node[:id].to_s} in #{parent_id.to_s}") unless ui
if ui
if node[:id] == top_node[:id]
ui[:left] = assembly_left_pos.to_i
else
ui[:left] = ui[:left].to_i + top_node[:left_diff].to_i
end
end
node.update_ui_info!(ui,dc_hash)
nested_objs[:nodes][i][:assembly_ui] = ui
end
nested_objs[:port_links].each_with_index do |link,i|
nested_objs[:port_links][i][:ui] ||= {
:type => R8::Config[:links][:default_type],
:style => R8::Config[:links][:default_style]
}
end
return {:data=>nested_objs}
#TODO: clean this up,hack to update UI params for newly cloned object
# update_from_hash(id,{:ui=>hash["ui"]})
# hash["redirect"] ? redirect_route = "/xyz/#{hash["redirect"]}/#{id.to_s}" : redirect_route = "/xyz/#{model_name()}/display/#{id.to_s}"
if hash["model_redirect"]
base_redirect = "/xyz/#{hash["model_redirect"]}/#{hash["action_redirect"]}"
redirect_id = hash["id_redirect"].match(/^\*/) ? id.to_s : hash["id_redirect"]
redirect_route = "#{base_redirect}/#{redirect_id}"
request_params = ''
expected_params = ['model_redirect','action_redirect','id_redirect','target_id','target_model_name']
request.params.each do |name,value|
if !expected_params.include?(name)
request_params << '&' if request_params != ''
request_params << "#{name}=#{value}"
end
end
ajax_request? ? redirect_route += '.json' : nil
redirect_route << URI.encode("?#{request_params}") if request_params != ''
else
redirect_route = "/xyz/#{model_name()}/display/#{id.to_s}"
ajax_request? ? redirect_route += '.json' : nil
end
redirect redirect_route
end
end
end
end
|
module DTK
class AssemblyController < AuthController
helper :assembly_helper
helper :task_helper
#### create and delete actions ###
#TODO: rename to delete_and_destroy
def rest__delete()
assembly_id,subtype = ret_assembly_params_id_and_subtype()
if subtype == :template
#returning module_repo_info so client can update this in its local module
rest_ok_response Assembly::Template.delete_and_ret_module_repo_info(id_handle(assembly_id))
else #subtype == :instance
Assembly::Instance.delete(id_handle(assembly_id),:destroy_nodes => true)
rest_ok_response
end
end
def rest__purge()
workspace = ret_workspace_object?()
workspace.purge(:destroy_nodes => true)
rest_ok_response
end
def rest__destroy_and_reset_nodes()
assembly = ret_assembly_instance_object()
assembly.destroy_and_reset_nodes()
rest_ok_response
end
def rest__remove_from_system()
assembly = ret_assembly_instance_object()
Assembly::Instance.delete(assembly.id_handle())
rest_ok_response
end
def rest__set_target()
workspace = ret_workspace_object?()
target = create_obj(:target_id, Target::Instance)
workspace.set_target(target)
rest_ok_response
end
def rest__delete_node()
assembly = ret_assembly_instance_object()
node_idh = ret_node_id_handle(:node_id,assembly)
assembly.delete_node(node_idh,:destroy_nodes => true)
rest_ok_response
end
def rest__delete_component()
# Retrieving node_id to validate if component belongs to node when delete-component invoked from component-level context
node_id = ret_non_null_request_params(:node_id)
component_id = ret_non_null_request_params(:component_id)
assembly = ret_assembly_instance_object()
assembly_id = assembly.id()
cmp_full_name = ret_request_params(:cmp_full_name)
if cmp_full_name
cmp_idh = ret_component_id_handle(:cmp_full_name,:assembly_id => assembly_id)
else
cmp_idh = id_handle(component_id,:component)
end
assembly.delete_component(cmp_idh, node_id)
rest_ok_response
end
#### end: create and delete actions ###
#### list and info actions ###
def rest__info()
assembly = ret_assembly_object()
node_id, component_id, attribute_id = ret_request_params(:node_id, :component_id, :attribute_id)
rest_ok_response assembly.info(node_id, component_id, attribute_id), :encode_into => :yaml
end
def rest__rename()
assembly = ret_assembly_object()
assembly_name = ret_non_null_request_params(:assembly_name)
new_assembly_name = ret_non_null_request_params(:new_assembly_name)
rest_ok_response assembly.rename(model_handle(), assembly_name, new_assembly_name)
end
#TODO: may be cleaner if we break into list_nodes, list_components with some shared helper functions
def rest__info_about()
node_id, component_id, detail_level, detail_to_include = ret_request_params(:node_id, :component_id, :detail_level, :detail_to_include)
assembly,subtype = ret_assembly_params_object_and_subtype()
response_opts = Hash.new
if format = ret_request_params(:format)
format = format.to_sym
unless SupportedFormats.include?(format)
raise ErrorUsage.new("Illegal format (#{format}) specified; it must be one of: #{SupportedFormats.join(',')}")
end
end
about = ret_non_null_request_params(:about).to_sym
unless AboutEnum[subtype].include?(about)
raise ErrorUsage::BadParamValue.new(:about,AboutEnum[subtype])
end
filter_proc = Proc.new do |e|
ret_val = check_element(e,[:node,:id],node_id) && check_element(e,[:attribute,:component_component_id],component_id) && e
ret_val = nil if (e[:attribute] and e[:attribute][:hidden])
ret_val
end
opts = Opts.new(:filter_proc => filter_proc, :detail_level => detail_level)
opts.add_return_datatype!()
if detail_to_include
opts.merge!(:detail_to_include => detail_to_include.map{|r|r.to_sym})
opts.add_value_to_return!(:datatype)
end
if about == :attributes
if format == :yaml
opts.merge!(:raw_attribute_value => true,:mark_unset_required => true)
else
opts.merge!(:truncate_attribute_values => true,:mark_unset_required => true)
end
end
if node_id
opts.merge!(:node_cmp_name => true) unless node_id.empty?
end
data = assembly.info_about(about, opts)
datatype = opts.get_datatype
response_opts = Hash.new
if format == :yaml
response_opts.merge!(:encode_into => :yaml)
else
response_opts.merge!(:datatype => datatype)
end
rest_ok_response data, response_opts
end
SupportedFormats = [:yaml]
def rest__info_about_task()
assembly = ret_assembly_instance_object()
task_action = ret_request_params(:task_action)
opts = {:donot_parse => true,:action_types=>[:assembly]}
response = assembly.get_task_template_serialized_content(task_action,opts)
response_opts = Hash.new
if response
response_opts.merge!(:encode_into => :yaml)
else
response = {:message => "Task not yet generated for assembly (#{assembly.get_field?(:display_name)})"}
end
rest_ok_response response, response_opts
end
def rest__cancel_task()
assembly = ret_assembly_instance_object()
unless top_task_id = ret_request_params(:task_id)
unless top_task = get_most_recent_executing_task([:eq,:assembly_id,assembly.id()])
raise ErrorUsage.new("No running tasks found")
end
top_task_id = top_task.id()
end
cancel_task(top_task_id)
rest_ok_response :task_id => top_task_id
end
def rest__list_modules()
ids = ret_request_params(:assemblies)
assembly_templates = get_assemblies_from_ids(ids)
components = Assembly::Template.list_modules(assembly_templates)
rest_ok_response components
end
def rest__prepare_for_edit_module()
assembly = ret_assembly_instance_object()
module_type = ret_non_null_request_params(:module_type)
response =
case module_type.to_sym
when :component_module
component_module = create_obj(:module_name,ComponentModule)
AssemblyModule::Component.prepare_for_edit(assembly,component_module)
when :service_module
modification_type = ret_non_null_request_params(:modification_type).to_sym
AssemblyModule::Service.prepare_for_edit(assembly,modification_type)
else
raise ErrorUsage.new("Illegal module_type #{module_type}")
end
rest_ok_response response
end
def rest__promote_module_updates()
assembly = ret_assembly_instance_object()
module_type = ret_non_null_request_params(:module_type)
unless module_type.to_sym == :component_module
raise Error.new("promote_module_changes only treats component_module type")
end
module_name = ret_non_null_request_params(:module_name)
component_module = create_obj(:module_name,ComponentModule)
opts = ret_boolean_params_hash(:force)
rest_ok_response AssemblyModule::Component.promote_module_updates(assembly,component_module,opts)
end
def rest__create_component_dependency()
assembly = ret_assembly_instance_object()
cmp_template = ret_component_template(:component_template_id)
antecedent_cmp_template = ret_component_template(:antecedent_component_template_id)
type = :simple
AssemblyModule::Component.create_component_dependency?(type,assembly,cmp_template,antecedent_cmp_template)
rest_ok_response
end
# checks element through set of fields
def check_element(element, fields, element_id_val)
return true if (element_id_val.nil? || element_id_val.empty?)
return false if element.nil?
temp_element = element.dup
fields.each do |field|
temp_element = temp_element[field]
return false if temp_element.nil?
end
return (temp_element == element_id_val.to_i)
end
AboutEnum = {
:instance => [:nodes,:components,:tasks,:attributes,:modules],
:template => [:nodes,:components,:targets]
}
FilterProc = {
:attributes => lambda{|attr|not attr[:hidden]}
}
def rest__add_ad_hoc_attribute_links()
assembly = ret_assembly_instance_object()
target_attr_term,source_attr_term = ret_non_null_request_params(:target_attribute_term,:source_attribute_term)
update_meta = ret_request_params(:update_meta)
opts = Hash.new
#update_meta == true is the default
unless !update_meta.nil? and !update_meta
opts.merge!(:update_meta => true)
end
AttributeLink::AdHoc.create_adhoc_links(assembly,target_attr_term,source_attr_term,opts)
rest_ok_response
end
def rest__delete_service_link()
port_link = ret_port_link()
Model.delete_instance(port_link.id_handle())
rest_ok_response
end
def rest__add_service_link()
assembly = ret_assembly_instance_object()
assembly_id = assembly.id()
input_cmp_idh = ret_component_id_handle(:input_component_id,:assembly_id => assembly_id)
output_cmp_idh = ret_component_id_handle(:output_component_id,:assembly_id => assembly_id)
opts = ret_params_hash(:dependency_name)
service_link_idh = assembly.add_service_link?(input_cmp_idh,output_cmp_idh,opts)
rest_ok_response :service_link => service_link_idh.get_id()
end
def rest__list_attribute_mappings()
port_link = ret_port_link()
#TODO: stub
ams = port_link.list_attribute_mappings()
pp ams
rest_ok_response
end
def rest__list_service_links()
assembly = ret_assembly_instance_object()
component_id = ret_component_id?(:component_id,:assembly_id => assembly.id())
context = (ret_request_params(:context)||:assembly).to_sym
opts = {:context => context}
if component_id
opts.merge!(:filter => {:input_component_id => component_id})
end
ret = assembly.list_service_links(opts)
rest_ok_response ret
end
#TODO: deprecate below for above
def rest__list_connections()
assembly = ret_assembly_instance_object()
find_missing,find_possible = ret_request_params(:find_missing,:find_possible)
ret =
if find_possible
assembly.list_connections__possible()
elsif find_missing
raise Error.new("Deprecated")
else
raise Error.new("Deprecated")
end
rest_ok_response ret
end
def rest__list_possible_add_ons()
assembly = ret_assembly_instance_object()
rest_ok_response assembly.get_service_add_ons()
end
def rest__get_attributes()
filter = ret_request_params(:filter)
filter = filter && filter.to_sym
assembly = ret_assembly_instance_object()
rest_ok_response assembly.get_attributes_print_form(Opts.new(:filter => filter))
end
def rest__workspace_object()
rest_ok_response Assembly::Instance.get_workspace_object(model_handle(),{})
end
def rest__list()
subtype = ret_assembly_subtype()
result =
if subtype == :instance
opts = ret_params_hash(:filter,:detail_level)
Assembly::Instance.list(model_handle(),opts)
else
project = get_default_project()
opts = {:version_suffix => true}.merge(ret_params_hash(:filter,:detail_level))
Assembly::Template.list(model_handle(),opts.merge(:project_idh => project.id_handle()))
end
rest_ok_response result
end
def rest__list_with_workspace()
opts = ret_params_hash(:filter)
rest_ok_response Assembly::Instance.list_with_workspace(model_handle(),opts)
end
#### end: list and info actions ###
#TODO: update what input can be
#the body has an array each element of form
# {:pattern => PAT, :value => VAL}
#pat can be one of three forms
#1 - an id
#2 - a name of form ASSEM-LEVEL-ATTR or NODE/COMONENT/CMP-ATTR, or
#3 - a pattern (TODO: give syntax) that can pick out multiple vars
# this returns same output as info about attributes, pruned for just new ones set
#TODO: this is a minsnomer in that it can be used to just create attributes
def rest__set_attributes()
assembly = ret_assembly_instance_object()
av_pairs = ret_params_av_pairs()
opts = ret_params_hash(:format,:context,:create)
create_options = ret_boolean_params_hash(:required,:dynamic)
if semantic_data_type = ret_request_params(:datatype)
unless Attribute::SemanticDatatype.isa?(semantic_data_type)
raise ErrorUsage.new("The term (#{semantic_data_type}) is not a valid data type")
end
create_options.merge!(:semantic_data_type => semantic_data_type)
end
unless create_options.empty?
unless opts[:create]
raise ErrorUsage.new("Options (#{create_options.values.join(',')}) can only be given if :create is true")
end
opts.merge!(:attribute_properties => create_options)
end
#update_meta == true is the default
update_meta = ret_request_params(:update_meta)
unless !update_meta.nil? and !update_meta
opts.merge!(:update_meta => true)
end
assembly.set_attributes(av_pairs,opts)
rest_ok_response
end
#### actions to update and create assembly templates
def rest__promote_to_template()
assembly = ret_assembly_instance_object()
assembly_template_name,service_module_name = get_template_and_service_names_params(assembly)
if assembly_template_name.nil? or service_module_name.nil?
raise ErrorUsage.new("SERVICE-NAME/ASSEMBLY-NAME cannot be determined and must be explicitly given")
end
project = get_default_project()
opts = ret_symbol_params_hash(:mode)
service_module = Assembly::Template.create_or_update_from_instance(project,assembly,service_module_name,assembly_template_name,opts)
clone_update_info = service_module.ret_clone_update_info()
rest_ok_response clone_update_info
end
#### end: actions to update and create assembly templates
#### methods to modify the assembly instance
def rest__add_node()
assembly = ret_assembly_instance_object()
assembly_node_name = ret_non_null_request_params(:assembly_node_name)
node_binding_rs = node_binding_ruleset?(:node_template_identifier)
node_instance_idh = assembly.add_node(assembly_node_name,node_binding_rs)
rest_ok_response node_instance_idh
end
def rest__add_component()
assembly = ret_assembly_instance_object()
component_template, component_title = ret_component_template_and_title_for_assembly(:component_template_id,assembly)
#not checking here if node_id points to valid object; check is in add_component
node_idh = ret_request_param_id_handle(:node_id,Node)
new_component_idh = assembly.add_component(node_idh,component_template,component_title)
rest_ok_response(:component_id => new_component_idh.get_id())
end
def rest__add_assembly_template()
assembly = ret_assembly_instance_object()
assembly_template = ret_assembly_template_object(:assembly_template_id)
assembly.add_assembly_template(assembly_template)
rest_ok_response
end
def rest__add_service_add_on()
assembly = ret_assembly_instance_object()
add_on_name = ret_non_null_request_params(:service_add_on_name)
new_sub_assembly_idh = assembly.service_add_on(add_on_name)
rest_ok_response(:sub_assembly_id => new_sub_assembly_idh.get_id())
end
#### end: methods to modify the assembly instance
#### method(s) related to staging assembly template
def rest__stage()
target_id = ret_request_param_id_optional(:target_id, ::DTK::Target::Instance)
target = target_idh_with_default(target_id).create_object(:model_name => :target_instance)
assembly_template = ret_assembly_template_object()
assembly_name = ret_request_params(:name)
new_assembly_obj = assembly_template.stage(target,assembly_name)
response = {
:new_service_instance => {
:name => new_assembly_obj.display_name_print_form,
:id => new_assembly_obj.id()
}
}
rest_ok_response(response,:encode_into => :yaml)
end
#### end: method(s) related to staging assembly template
#### creates tasks to execute/converge assemblies and monitor status
def rest__find_violations()
assembly = ret_assembly_instance_object()
violation_objects = assembly.find_violations()
violation_table = violation_objects.map do |v|
{:type => v.type(),:description => v.description()}
end.sort{|a,b|a[:type].to_s <=> b[:type].to_s}
rest_ok_response violation_table
end
def rest__create_task()
assembly = ret_assembly_instance_object()
task = nil
if assembly.is_stopped?
start_assembly = ret_request_params(:start_assembly)
return rest_ok_response :confirmation_message=>true if start_assembly.nil?
assembly_idh = ret_request_param_id_handle(:assembly_id,Assembly::Instance)
node_pattern = ret_request_params(:node_pattern)
# filters only stopped nodes for this assembly
nodes = assembly.get_nodes(:id,:display_name,:type,:external_ref,:hostname_external_ref, :admin_op_status)
assembly_name = Assembly::Instance.pretty_print_name(assembly)
nodes, is_valid, error_msg = nodes_valid_for_aws?(assembly_name, nodes, node_pattern, :stopped)
unless is_valid
Log.info(error_msg)
return rest_ok_response(:errors => [error_msg])
end
user_object = user_object = ::DTK::CurrentSession.new.user_object()
CreateThread.defer_with_session(user_object) do
# invoking command to start the nodes
CommandAndControl.start_instances(nodes)
end
opts = ret_params_hash(:commit_msg,:puppet_version)
task = Task.create_and_start_from_assembly_instance(assembly,opts)
else
raise ErrorUsage, "Task is already running on requested nodes. Please wait until task is complete" if assembly.are_nodes_running?
opts = ret_params_hash(:commit_msg,:puppet_version)
task = Task.create_from_assembly_instance(assembly,opts)
end
task.save!()
# TODO: this was called from gui commit window
# pp Attribute.augmented_attribute_list_from_task(task)
rest_ok_response :task_id => task.id
end
# leaving this commented until we test out if methode above works properly
# def rest__create_task()
# assembly = ret_assembly_instance_object()
# if assembly.is_stopped?
# validate_params = [
# :action => :start,
# :params => {:assembly => assembly[:id]},
# :wait_for_complete => {:type => :assembly, :id => assembly[:id]}
# ]
# return rest_validate_response("Assembly is stopped, you need to start it.", validate_params)
# end
# if assembly.are_nodes_running?
# raise ErrorUsage, "Task is already running on requested nodes. Please wait until task is complete"
# end
# opts = ret_params_hash(:commit_msg,:puppet_version)
# task = Task.create_from_assembly_instance(assembly,opts)
# task.save!()
# # TODO: this was called from gui commit window
# # pp Attribute.augmented_attribute_list_from_task(task)
# rest_ok_response :task_id => task.id
# end
# #TODO: replace or given options to specify specific smoketests to run
# def rest__create_smoketests_task()
# assembly = ret_assembly_instance_object()
# opts = ret_params_hash(:commit_msg).merge(:component_type => :smoketest)
# task = Task.create_from_assembly_instance(assembly,opts)
# task.save!()
# rest_ok_response :task_id => task.id
# end
def rest__clear_tasks()
assembly = ret_assembly_instance_object()
assembly.clear_tasks()
rest_ok_response
end
def rest__start()
assembly = ret_assembly_instance_object()
assembly_idh = ret_request_param_id_handle(:assembly_id,Assembly::Instance)
node_pattern = ret_request_params(:node_pattern)
task = nil
# filters only stopped nodes for this assembly
nodes = assembly.get_nodes(:id,:display_name,:type,:external_ref,:hostname_external_ref, :admin_op_status)
assembly_name = Assembly::Instance.pretty_print_name(assembly)
nodes, is_valid, error_msg = nodes_valid_for_aws?(assembly_name, nodes, node_pattern, :stopped)
unless is_valid
Log.info(error_msg)
return rest_ok_response(:errors => [error_msg])
end
queue = SimpleActionQueue.new
user_object = ::DTK::CurrentSession.new.user_object()
CreateThread.defer_with_session(user_object) do
# invoking command to start the nodes
CommandAndControl.start_instances(nodes)
end
opts ={}
opts.merge!(:node => nodes.first) if (nodes.size == 1)
task = Task.task_when_nodes_ready_from_assembly(assembly_idh.create_object(),:assembly, opts)
task.save!()
queue.set_result(:task_id => task.id)
rest_ok_response :task_id => task.id
end
def rest__stop()
assembly = ret_assembly_instance_object()
node_pattern = ret_request_params(:node_pattern)
nodes = assembly.get_nodes(:id,:display_name,:type, :external_ref,:admin_op_status)
assembly_idh = ret_request_param_id_handle(:assembly_id,Assembly::Instance)
assembly_name = Assembly::Instance.pretty_print_name(assembly)
nodes, is_valid, error_msg = nodes_valid_for_aws?(assembly_name, nodes, node_pattern, :running)
unless is_valid
Log.info(error_msg)
return rest_ok_response(:errors => [error_msg])
end
CommandAndControl.stop_instances(nodes)
rest_ok_response :status => :ok
end
##
# Method that will validate if nodes list is ready to started or stopped.
#
# * *Args* :
# - +assembly_id+ -> assembly id
# - +nodes+ -> node list
# - +node_pattern+ -> match id regexp pattern
# - +status_pattern+ -> pattern to match node status
# * *Returns* :
# - is valid flag
# - filtered nodes by pattern (if pattern not nil)
# - erorr message in case it is not valid
#
def nodes_valid_for_aws?(assembly_name, nodes, node_pattern, status_pattern)
# check for pattern
unless node_pattern.nil?
regex = Regexp.new(node_pattern)
nodes = nodes.select { |node| regex =~ node.id.to_s}
if nodes.size == 0
return nodes, false, "No nodes have been matched via ID ~ '#{node_pattern}'."
end
end
# check if staged
nodes.each do |node|
if node[:type] == "staged"
return nodes, false, "Nodes for assembly '#{assembly_name}' are 'staged' and as such cannot be started/stopped."
end
end
# check for status -> this will translate to /running|pending/ and /stopped|pending/ checks
filtered_nodes = nodes.select { |node| node[:admin_op_status] =~ Regexp.new("#{status_pattern.to_s}|pending") }
if filtered_nodes.size == 0
return nodes, false, "There are no #{status_pattern} nodes for assembly '#{assembly_name}'."
end
return filtered_nodes, true, nil
end
def rest__initiate_get_log()
assembly = ret_assembly_instance_object()
params = ret_params_hash(:node_identifier,:log_path, :start_line)
queue = ActionResultsQueue.new
assembly.initiate_get_log(queue, params)
rest_ok_response :action_results_id => queue.id
end
def rest__initiate_grep()
assembly = ret_assembly_instance_object()
params = ret_params_hash(:node_pattern, :log_path, :grep_pattern, :stop_on_first_match)
queue = ActionResultsQueue.new
assembly.initiate_grep(queue, params)
rest_ok_response :action_results_id => queue.id
end
def rest__task_status()
assembly_id = ret_request_param_id(:assembly_id,Assembly::Instance)
format = (ret_request_params(:format)||:hash).to_sym
response = Task::Status::Assembly.get_status(id_handle(assembly_id),:format => format)
rest_ok_response response
end
### mcollective actions
def rest__initiate_get_netstats()
node_id = ret_non_null_request_params(:node_id)
assembly = ret_assembly_instance_object()
queue = ActionResultsQueue.new
assembly.initiate_get_netstats(queue, node_id)
rest_ok_response :action_results_id => queue.id
end
def rest__initiate_get_ps()
node_id = ret_non_null_request_params(:node_id)
assembly = ret_assembly_instance_object()
queue = ActionResultsQueue.new
assembly.initiate_get_ps(queue, node_id)
rest_ok_response :action_results_id => queue.id
end
def rest__initiate_execute_tests()
node_id = ret_non_null_request_params(:node_id)
assembly = ret_assembly_instance_object()
queue = ActionResultsQueue.new
assembly.initiate_execute_tests(queue, node_id)
rest_ok_response :action_results_id => queue.id
end
def rest__get_action_results()
#TODO: to be safe need to garbage collect on ActionResultsQueue in case miss anything
action_results_id = ret_non_null_request_params(:action_results_id)
ret_only_if_complete = ret_request_param_boolean(:return_only_if_complete)
disable_post_processing = ret_request_param_boolean(:disable_post_processing)
sort_key = ret_request_params(:sort_key)
if ret_request_param_boolean(:using_simple_queue)
rest_ok_response SimpleActionQueue.get_results(action_results_id)
else
if sort_key
sort_key = sort_key.to_sym
rest_ok_response ActionResultsQueue.get_results(action_results_id,ret_only_if_complete,disable_post_processing, sort_key)
else
rest_ok_response ActionResultsQueue.get_results(action_results_id,ret_only_if_complete,disable_post_processing)
end
end
end
### end: mcollective actions
#TDODO: got here in cleanup of rest calls
def rest__list_smoketests()
assembly = ret_assembly_object()
rest_ok_response assembly.list_smoketests()
end
def test_get_items(id)
assembly = id_handle(id,:component).create_object()
item_list = assembly.get_items()
return {
:data=>item_list
}
end
def search
params = request.params.dup
cols = model_class(:component).common_columns()
filter_conjuncts = params.map do |name,value|
[:regex,name.to_sym,"^#{value}"] if cols.include?(name.to_sym)
end.compact
#restrict results to belong to library and not nested in assembly
filter_conjuncts += [[:eq,:type,"composite"],[:neq,:library_library_id,nil],[:eq,:assembly_id,nil]]
sp_hash = {
:cols => cols,
:filter => [:and] + filter_conjuncts
}
component_list = Model.get_objs(model_handle(:component),sp_hash).each{|r|r.materialize!(cols)}
i18n = get_i18n_mappings_for_models(:component)
component_list.each_with_index do |model,index|
component_list[index][:model_name] = :component
component_list[index][:ui] ||= {}
component_list[index][:ui][:images] ||= {}
# name = component_list[index][:display_name]
name = Assembly.pretty_print_name(component_list[index])
title = name.nil? ? "" : i18n_string(i18n,:component,name)
#TODO: change after implementing all the new types and making generic icons for them
model_type = 'service'
model_sub_type = 'db'
model_type_str = "#{model_type}-#{model_sub_type}"
prefix = "#{R8::Config[:base_images_uri]}/v1/componentIcons"
png = component_list[index][:ui][:images][:tnail] || "unknown-#{model_type_str}.png"
component_list[index][:image_path] = "#{prefix}/#{png}"
component_list[index][:i18n] = title
end
return {:data=>component_list}
end
def get_tree(id)
return {:data=>'some tree data goes here'}
end
def get_assemblies_from_ids(ids)
assemblies = []
ids.each do |id|
assembly = id_handle(id.to_i,:component).create_object(:model_name => :assembly_template)
assemblies << assembly
end
return assemblies
end
#TODO: unify with clone(id)
#clone assembly from library to target
def stage()
target_idh = target_idh_with_default(request.params["target_id"])
assembly_id = ret_request_param_id(:assembly_id,::DTK::Assembly::Template)
#TODO: if naem given and not unique either reject or generate a -n suffix
assembly_name = ret_request_params(:name)
id_handle = id_handle(assembly_id)
#TODO: need to copy in avatar when hash["ui"] is non null
override_attrs = Hash.new
override_attrs[:display_name] = assembly_name if assembly_name
target_object = target_idh.create_object()
clone_opts = {:ret_new_obj_with_cols => [:id,:type]}
new_assembly_obj = target_object.clone_into(id_handle.create_object(),override_attrs,clone_opts)
id = new_assembly_obj && new_assembly_obj.id()
#compute ui positions
nested_objs = new_assembly_obj.get_node_assembly_nested_objects()
#TODO: this does not leverage assembly node relative positions
nested_objs[:nodes].each do |node|
target_object.update_ui_for_new_item(node[:id])
end
rest_ok_response(:assembly_id => id)
end
#clone assembly from library to target
def clone(id)
handle_errors do
id_handle = id_handle(id)
hash = request.params
target_id_handle = nil
if hash["target_id"] and hash["target_model_name"]
input_target_id_handle = id_handle(hash["target_id"].to_i,hash["target_model_name"].to_sym)
target_id_handle = Model.find_real_target_id_handle(id_handle,input_target_id_handle)
else
Log.info("not implemented yet")
return redirect "/xyz/#{model_name()}/display/#{id.to_s}"
end
#TODO: need to copy in avatar when hash["ui"] is non null
override_attrs = hash["ui"] ? {:ui=>hash["ui"]} : {}
target_object = target_id_handle.create_object()
clone_opts = {:ret_new_obj_with_cols => [:id,:type]}
new_assembly_obj = target_object.clone_into(id_handle.create_object(),override_attrs,clone_opts)
id = new_assembly_obj && new_assembly_obj.id()
nested_objs = new_assembly_obj.get_node_assembly_nested_objects()
#just want external ports
(nested_objs[:nodes]||[]).each{|n|(n[:ports]||[]).reject!{|p|p[:type] == "component_internal"}}
#TODO: ganglia hack: remove after putting this info in teh r8 meta files
(nested_objs[:nodes]||[]).each do |n|
(n[:ports]||[]).each do |port|
if port[:display_name] =~ /ganglia__server/
port[:location] = "east"
elsif port[:display_name] =~ /ganglia__monitor/
port[:location] = "west"
end
end
end
#TODO: get node positions going for assemblies
#compute uui positions
parent_id = request.params["parent_id"]
assembly_left_pos = request.params["assembly_left_pos"]
# node_list = get_objects(:node,{:assembly_id=>id})
dc_hash = get_object_by_id(parent_id,:datacenter)
raise Error.new("Not implemented when parent_id is not a datacenter") if dc_hash.nil?
#get the top most item in the list to set new positions
top_node = {}
top_most = 2000
# node_list.each do |node|
nested_objs[:nodes].each do |node|
# node = create_object_from_id(node_hash[:id],:node)
ui = node.get_ui_info(dc_hash)
if ui and (ui[:top].to_i < top_most.to_i)
left_diff = assembly_left_pos.to_i - ui[:left].to_i
top_node = {:id=>node[:id],:ui=>ui,:left_diff=>left_diff}
top_most = ui[:top]
end
end
nested_objs[:nodes].each_with_index do |node,i|
ui = node.get_ui_info(dc_hash)
Log.error("no coordinates for node with id #{node[:id].to_s} in #{parent_id.to_s}") unless ui
if ui
if node[:id] == top_node[:id]
ui[:left] = assembly_left_pos.to_i
else
ui[:left] = ui[:left].to_i + top_node[:left_diff].to_i
end
end
node.update_ui_info!(ui,dc_hash)
nested_objs[:nodes][i][:assembly_ui] = ui
end
nested_objs[:port_links].each_with_index do |link,i|
nested_objs[:port_links][i][:ui] ||= {
:type => R8::Config[:links][:default_type],
:style => R8::Config[:links][:default_style]
}
end
return {:data=>nested_objs}
#TODO: clean this up,hack to update UI params for newly cloned object
# update_from_hash(id,{:ui=>hash["ui"]})
# hash["redirect"] ? redirect_route = "/xyz/#{hash["redirect"]}/#{id.to_s}" : redirect_route = "/xyz/#{model_name()}/display/#{id.to_s}"
if hash["model_redirect"]
base_redirect = "/xyz/#{hash["model_redirect"]}/#{hash["action_redirect"]}"
redirect_id = hash["id_redirect"].match(/^\*/) ? id.to_s : hash["id_redirect"]
redirect_route = "#{base_redirect}/#{redirect_id}"
request_params = ''
expected_params = ['model_redirect','action_redirect','id_redirect','target_id','target_model_name']
request.params.each do |name,value|
if !expected_params.include?(name)
request_params << '&' if request_params != ''
request_params << "#{name}=#{value}"
end
end
ajax_request? ? redirect_route += '.json' : nil
redirect_route << URI.encode("?#{request_params}") if request_params != ''
else
redirect_route = "/xyz/#{model_name()}/display/#{id.to_s}"
ajax_request? ? redirect_route += '.json' : nil
end
redirect redirect_route
end
end
end
end
DTK-1351 Start/stop service node does not work if name provided
module DTK
class AssemblyController < AuthController
helper :assembly_helper
helper :task_helper
#### create and delete actions ###
#TODO: rename to delete_and_destroy
def rest__delete()
assembly_id,subtype = ret_assembly_params_id_and_subtype()
if subtype == :template
#returning module_repo_info so client can update this in its local module
rest_ok_response Assembly::Template.delete_and_ret_module_repo_info(id_handle(assembly_id))
else #subtype == :instance
Assembly::Instance.delete(id_handle(assembly_id),:destroy_nodes => true)
rest_ok_response
end
end
def rest__purge()
workspace = ret_workspace_object?()
workspace.purge(:destroy_nodes => true)
rest_ok_response
end
def rest__destroy_and_reset_nodes()
assembly = ret_assembly_instance_object()
assembly.destroy_and_reset_nodes()
rest_ok_response
end
def rest__remove_from_system()
assembly = ret_assembly_instance_object()
Assembly::Instance.delete(assembly.id_handle())
rest_ok_response
end
def rest__set_target()
workspace = ret_workspace_object?()
target = create_obj(:target_id, Target::Instance)
workspace.set_target(target)
rest_ok_response
end
def rest__delete_node()
assembly = ret_assembly_instance_object()
node_idh = ret_node_id_handle(:node_id,assembly)
assembly.delete_node(node_idh,:destroy_nodes => true)
rest_ok_response
end
def rest__delete_component()
# Retrieving node_id to validate if component belongs to node when delete-component invoked from component-level context
node_id = ret_non_null_request_params(:node_id)
component_id = ret_non_null_request_params(:component_id)
assembly = ret_assembly_instance_object()
assembly_id = assembly.id()
cmp_full_name = ret_request_params(:cmp_full_name)
if cmp_full_name
cmp_idh = ret_component_id_handle(:cmp_full_name,:assembly_id => assembly_id)
else
cmp_idh = id_handle(component_id,:component)
end
assembly.delete_component(cmp_idh, node_id)
rest_ok_response
end
#### end: create and delete actions ###
#### list and info actions ###
def rest__info()
assembly = ret_assembly_object()
node_id, component_id, attribute_id = ret_request_params(:node_id, :component_id, :attribute_id)
rest_ok_response assembly.info(node_id, component_id, attribute_id), :encode_into => :yaml
end
def rest__rename()
assembly = ret_assembly_object()
assembly_name = ret_non_null_request_params(:assembly_name)
new_assembly_name = ret_non_null_request_params(:new_assembly_name)
rest_ok_response assembly.rename(model_handle(), assembly_name, new_assembly_name)
end
#TODO: may be cleaner if we break into list_nodes, list_components with some shared helper functions
def rest__info_about()
node_id, component_id, detail_level, detail_to_include = ret_request_params(:node_id, :component_id, :detail_level, :detail_to_include)
assembly,subtype = ret_assembly_params_object_and_subtype()
response_opts = Hash.new
if format = ret_request_params(:format)
format = format.to_sym
unless SupportedFormats.include?(format)
raise ErrorUsage.new("Illegal format (#{format}) specified; it must be one of: #{SupportedFormats.join(',')}")
end
end
about = ret_non_null_request_params(:about).to_sym
unless AboutEnum[subtype].include?(about)
raise ErrorUsage::BadParamValue.new(:about,AboutEnum[subtype])
end
filter_proc = Proc.new do |e|
ret_val = check_element(e,[:node,:id],node_id) && check_element(e,[:attribute,:component_component_id],component_id) && e
ret_val = nil if (e[:attribute] and e[:attribute][:hidden])
ret_val
end
opts = Opts.new(:filter_proc => filter_proc, :detail_level => detail_level)
opts.add_return_datatype!()
if detail_to_include
opts.merge!(:detail_to_include => detail_to_include.map{|r|r.to_sym})
opts.add_value_to_return!(:datatype)
end
if about == :attributes
if format == :yaml
opts.merge!(:raw_attribute_value => true,:mark_unset_required => true)
else
opts.merge!(:truncate_attribute_values => true,:mark_unset_required => true)
end
end
if node_id
opts.merge!(:node_cmp_name => true) unless node_id.empty?
end
data = assembly.info_about(about, opts)
datatype = opts.get_datatype
response_opts = Hash.new
if format == :yaml
response_opts.merge!(:encode_into => :yaml)
else
response_opts.merge!(:datatype => datatype)
end
rest_ok_response data, response_opts
end
SupportedFormats = [:yaml]
def rest__info_about_task()
assembly = ret_assembly_instance_object()
task_action = ret_request_params(:task_action)
opts = {:donot_parse => true,:action_types=>[:assembly]}
response = assembly.get_task_template_serialized_content(task_action,opts)
response_opts = Hash.new
if response
response_opts.merge!(:encode_into => :yaml)
else
response = {:message => "Task not yet generated for assembly (#{assembly.get_field?(:display_name)})"}
end
rest_ok_response response, response_opts
end
def rest__cancel_task()
assembly = ret_assembly_instance_object()
unless top_task_id = ret_request_params(:task_id)
unless top_task = get_most_recent_executing_task([:eq,:assembly_id,assembly.id()])
raise ErrorUsage.new("No running tasks found")
end
top_task_id = top_task.id()
end
cancel_task(top_task_id)
rest_ok_response :task_id => top_task_id
end
def rest__list_modules()
ids = ret_request_params(:assemblies)
assembly_templates = get_assemblies_from_ids(ids)
components = Assembly::Template.list_modules(assembly_templates)
rest_ok_response components
end
def rest__prepare_for_edit_module()
assembly = ret_assembly_instance_object()
module_type = ret_non_null_request_params(:module_type)
response =
case module_type.to_sym
when :component_module
component_module = create_obj(:module_name,ComponentModule)
AssemblyModule::Component.prepare_for_edit(assembly,component_module)
when :service_module
modification_type = ret_non_null_request_params(:modification_type).to_sym
AssemblyModule::Service.prepare_for_edit(assembly,modification_type)
else
raise ErrorUsage.new("Illegal module_type #{module_type}")
end
rest_ok_response response
end
def rest__promote_module_updates()
assembly = ret_assembly_instance_object()
module_type = ret_non_null_request_params(:module_type)
unless module_type.to_sym == :component_module
raise Error.new("promote_module_changes only treats component_module type")
end
module_name = ret_non_null_request_params(:module_name)
component_module = create_obj(:module_name,ComponentModule)
opts = ret_boolean_params_hash(:force)
rest_ok_response AssemblyModule::Component.promote_module_updates(assembly,component_module,opts)
end
def rest__create_component_dependency()
assembly = ret_assembly_instance_object()
cmp_template = ret_component_template(:component_template_id)
antecedent_cmp_template = ret_component_template(:antecedent_component_template_id)
type = :simple
AssemblyModule::Component.create_component_dependency?(type,assembly,cmp_template,antecedent_cmp_template)
rest_ok_response
end
# checks element through set of fields
def check_element(element, fields, element_id_val)
return true if (element_id_val.nil? || element_id_val.empty?)
return false if element.nil?
temp_element = element.dup
fields.each do |field|
temp_element = temp_element[field]
return false if temp_element.nil?
end
return (temp_element == element_id_val.to_i)
end
AboutEnum = {
:instance => [:nodes,:components,:tasks,:attributes,:modules],
:template => [:nodes,:components,:targets]
}
FilterProc = {
:attributes => lambda{|attr|not attr[:hidden]}
}
def rest__add_ad_hoc_attribute_links()
assembly = ret_assembly_instance_object()
target_attr_term,source_attr_term = ret_non_null_request_params(:target_attribute_term,:source_attribute_term)
update_meta = ret_request_params(:update_meta)
opts = Hash.new
#update_meta == true is the default
unless !update_meta.nil? and !update_meta
opts.merge!(:update_meta => true)
end
AttributeLink::AdHoc.create_adhoc_links(assembly,target_attr_term,source_attr_term,opts)
rest_ok_response
end
def rest__delete_service_link()
port_link = ret_port_link()
Model.delete_instance(port_link.id_handle())
rest_ok_response
end
def rest__add_service_link()
assembly = ret_assembly_instance_object()
assembly_id = assembly.id()
input_cmp_idh = ret_component_id_handle(:input_component_id,:assembly_id => assembly_id)
output_cmp_idh = ret_component_id_handle(:output_component_id,:assembly_id => assembly_id)
opts = ret_params_hash(:dependency_name)
service_link_idh = assembly.add_service_link?(input_cmp_idh,output_cmp_idh,opts)
rest_ok_response :service_link => service_link_idh.get_id()
end
def rest__list_attribute_mappings()
port_link = ret_port_link()
#TODO: stub
ams = port_link.list_attribute_mappings()
pp ams
rest_ok_response
end
def rest__list_service_links()
assembly = ret_assembly_instance_object()
component_id = ret_component_id?(:component_id,:assembly_id => assembly.id())
context = (ret_request_params(:context)||:assembly).to_sym
opts = {:context => context}
if component_id
opts.merge!(:filter => {:input_component_id => component_id})
end
ret = assembly.list_service_links(opts)
rest_ok_response ret
end
#TODO: deprecate below for above
def rest__list_connections()
assembly = ret_assembly_instance_object()
find_missing,find_possible = ret_request_params(:find_missing,:find_possible)
ret =
if find_possible
assembly.list_connections__possible()
elsif find_missing
raise Error.new("Deprecated")
else
raise Error.new("Deprecated")
end
rest_ok_response ret
end
def rest__list_possible_add_ons()
assembly = ret_assembly_instance_object()
rest_ok_response assembly.get_service_add_ons()
end
def rest__get_attributes()
filter = ret_request_params(:filter)
filter = filter && filter.to_sym
assembly = ret_assembly_instance_object()
rest_ok_response assembly.get_attributes_print_form(Opts.new(:filter => filter))
end
def rest__workspace_object()
rest_ok_response Assembly::Instance.get_workspace_object(model_handle(),{})
end
def rest__list()
subtype = ret_assembly_subtype()
result =
if subtype == :instance
opts = ret_params_hash(:filter,:detail_level)
Assembly::Instance.list(model_handle(),opts)
else
project = get_default_project()
opts = {:version_suffix => true}.merge(ret_params_hash(:filter,:detail_level))
Assembly::Template.list(model_handle(),opts.merge(:project_idh => project.id_handle()))
end
rest_ok_response result
end
def rest__list_with_workspace()
opts = ret_params_hash(:filter)
rest_ok_response Assembly::Instance.list_with_workspace(model_handle(),opts)
end
#### end: list and info actions ###
#TODO: update what input can be
#the body has an array each element of form
# {:pattern => PAT, :value => VAL}
#pat can be one of three forms
#1 - an id
#2 - a name of form ASSEM-LEVEL-ATTR or NODE/COMONENT/CMP-ATTR, or
#3 - a pattern (TODO: give syntax) that can pick out multiple vars
# this returns same output as info about attributes, pruned for just new ones set
#TODO: this is a minsnomer in that it can be used to just create attributes
def rest__set_attributes()
assembly = ret_assembly_instance_object()
av_pairs = ret_params_av_pairs()
opts = ret_params_hash(:format,:context,:create)
create_options = ret_boolean_params_hash(:required,:dynamic)
if semantic_data_type = ret_request_params(:datatype)
unless Attribute::SemanticDatatype.isa?(semantic_data_type)
raise ErrorUsage.new("The term (#{semantic_data_type}) is not a valid data type")
end
create_options.merge!(:semantic_data_type => semantic_data_type)
end
unless create_options.empty?
unless opts[:create]
raise ErrorUsage.new("Options (#{create_options.values.join(',')}) can only be given if :create is true")
end
opts.merge!(:attribute_properties => create_options)
end
#update_meta == true is the default
update_meta = ret_request_params(:update_meta)
unless !update_meta.nil? and !update_meta
opts.merge!(:update_meta => true)
end
assembly.set_attributes(av_pairs,opts)
rest_ok_response
end
#### actions to update and create assembly templates
def rest__promote_to_template()
assembly = ret_assembly_instance_object()
assembly_template_name,service_module_name = get_template_and_service_names_params(assembly)
if assembly_template_name.nil? or service_module_name.nil?
raise ErrorUsage.new("SERVICE-NAME/ASSEMBLY-NAME cannot be determined and must be explicitly given")
end
project = get_default_project()
opts = ret_symbol_params_hash(:mode)
service_module = Assembly::Template.create_or_update_from_instance(project,assembly,service_module_name,assembly_template_name,opts)
clone_update_info = service_module.ret_clone_update_info()
rest_ok_response clone_update_info
end
#### end: actions to update and create assembly templates
#### methods to modify the assembly instance
def rest__add_node()
assembly = ret_assembly_instance_object()
assembly_node_name = ret_non_null_request_params(:assembly_node_name)
node_binding_rs = node_binding_ruleset?(:node_template_identifier)
node_instance_idh = assembly.add_node(assembly_node_name,node_binding_rs)
rest_ok_response node_instance_idh
end
def rest__add_component()
assembly = ret_assembly_instance_object()
component_template, component_title = ret_component_template_and_title_for_assembly(:component_template_id,assembly)
#not checking here if node_id points to valid object; check is in add_component
node_idh = ret_request_param_id_handle(:node_id,Node)
new_component_idh = assembly.add_component(node_idh,component_template,component_title)
rest_ok_response(:component_id => new_component_idh.get_id())
end
def rest__add_assembly_template()
assembly = ret_assembly_instance_object()
assembly_template = ret_assembly_template_object(:assembly_template_id)
assembly.add_assembly_template(assembly_template)
rest_ok_response
end
def rest__add_service_add_on()
assembly = ret_assembly_instance_object()
add_on_name = ret_non_null_request_params(:service_add_on_name)
new_sub_assembly_idh = assembly.service_add_on(add_on_name)
rest_ok_response(:sub_assembly_id => new_sub_assembly_idh.get_id())
end
#### end: methods to modify the assembly instance
#### method(s) related to staging assembly template
def rest__stage()
target_id = ret_request_param_id_optional(:target_id, ::DTK::Target::Instance)
target = target_idh_with_default(target_id).create_object(:model_name => :target_instance)
assembly_template = ret_assembly_template_object()
assembly_name = ret_request_params(:name)
new_assembly_obj = assembly_template.stage(target,assembly_name)
response = {
:new_service_instance => {
:name => new_assembly_obj.display_name_print_form,
:id => new_assembly_obj.id()
}
}
rest_ok_response(response,:encode_into => :yaml)
end
#### end: method(s) related to staging assembly template
#### creates tasks to execute/converge assemblies and monitor status
def rest__find_violations()
assembly = ret_assembly_instance_object()
violation_objects = assembly.find_violations()
violation_table = violation_objects.map do |v|
{:type => v.type(),:description => v.description()}
end.sort{|a,b|a[:type].to_s <=> b[:type].to_s}
rest_ok_response violation_table
end
def rest__create_task()
assembly = ret_assembly_instance_object()
task = nil
if assembly.is_stopped?
start_assembly = ret_request_params(:start_assembly)
return rest_ok_response :confirmation_message=>true if start_assembly.nil?
assembly_idh = ret_request_param_id_handle(:assembly_id,Assembly::Instance)
node_pattern = ret_request_params(:node_pattern)
# filters only stopped nodes for this assembly
nodes = assembly.get_nodes(:id,:display_name,:type,:external_ref,:hostname_external_ref, :admin_op_status)
assembly_name = Assembly::Instance.pretty_print_name(assembly)
nodes, is_valid, error_msg = nodes_valid_for_aws?(assembly_name, nodes, node_pattern, :stopped)
unless is_valid
Log.info(error_msg)
return rest_ok_response(:errors => [error_msg])
end
user_object = user_object = ::DTK::CurrentSession.new.user_object()
CreateThread.defer_with_session(user_object) do
# invoking command to start the nodes
CommandAndControl.start_instances(nodes)
end
opts = ret_params_hash(:commit_msg,:puppet_version)
task = Task.create_and_start_from_assembly_instance(assembly,opts)
else
raise ErrorUsage, "Task is already running on requested nodes. Please wait until task is complete" if assembly.are_nodes_running?
opts = ret_params_hash(:commit_msg,:puppet_version)
task = Task.create_from_assembly_instance(assembly,opts)
end
task.save!()
# TODO: this was called from gui commit window
# pp Attribute.augmented_attribute_list_from_task(task)
rest_ok_response :task_id => task.id
end
# leaving this commented until we test out if methode above works properly
# def rest__create_task()
# assembly = ret_assembly_instance_object()
# if assembly.is_stopped?
# validate_params = [
# :action => :start,
# :params => {:assembly => assembly[:id]},
# :wait_for_complete => {:type => :assembly, :id => assembly[:id]}
# ]
# return rest_validate_response("Assembly is stopped, you need to start it.", validate_params)
# end
# if assembly.are_nodes_running?
# raise ErrorUsage, "Task is already running on requested nodes. Please wait until task is complete"
# end
# opts = ret_params_hash(:commit_msg,:puppet_version)
# task = Task.create_from_assembly_instance(assembly,opts)
# task.save!()
# # TODO: this was called from gui commit window
# # pp Attribute.augmented_attribute_list_from_task(task)
# rest_ok_response :task_id => task.id
# end
# #TODO: replace or given options to specify specific smoketests to run
# def rest__create_smoketests_task()
# assembly = ret_assembly_instance_object()
# opts = ret_params_hash(:commit_msg).merge(:component_type => :smoketest)
# task = Task.create_from_assembly_instance(assembly,opts)
# task.save!()
# rest_ok_response :task_id => task.id
# end
def rest__clear_tasks()
assembly = ret_assembly_instance_object()
assembly.clear_tasks()
rest_ok_response
end
def rest__start()
assembly = ret_assembly_instance_object()
assembly_idh = ret_request_param_id_handle(:assembly_id,Assembly::Instance)
node_pattern = ret_request_params(:node_pattern)
task = nil
# filters only stopped nodes for this assembly
nodes = assembly.get_nodes(:id,:display_name,:type,:external_ref,:hostname_external_ref, :admin_op_status)
assembly_name = Assembly::Instance.pretty_print_name(assembly)
nodes, is_valid, error_msg = nodes_valid_for_aws?(assembly_name, nodes, node_pattern, :stopped)
unless is_valid
Log.info(error_msg)
return rest_ok_response(:errors => [error_msg])
end
queue = SimpleActionQueue.new
user_object = ::DTK::CurrentSession.new.user_object()
CreateThread.defer_with_session(user_object) do
# invoking command to start the nodes
CommandAndControl.start_instances(nodes)
end
opts ={}
opts.merge!(:node => nodes.first) if (nodes.size == 1)
task = Task.task_when_nodes_ready_from_assembly(assembly_idh.create_object(),:assembly, opts)
task.save!()
queue.set_result(:task_id => task.id)
rest_ok_response :task_id => task.id
end
def rest__stop()
assembly = ret_assembly_instance_object()
node_pattern = ret_request_params(:node_pattern)
nodes = assembly.get_nodes(:id,:display_name,:type, :external_ref,:admin_op_status)
assembly_idh = ret_request_param_id_handle(:assembly_id,Assembly::Instance)
assembly_name = Assembly::Instance.pretty_print_name(assembly)
nodes, is_valid, error_msg = nodes_valid_for_aws?(assembly_name, nodes, node_pattern, :running)
unless is_valid
Log.info(error_msg)
return rest_ok_response(:errors => [error_msg])
end
CommandAndControl.stop_instances(nodes)
rest_ok_response :status => :ok
end
##
# Method that will validate if nodes list is ready to started or stopped.
#
# * *Args* :
# - +assembly_id+ -> assembly id
# - +nodes+ -> node list
# - +node_pattern+ -> match id regexp pattern
# - +status_pattern+ -> pattern to match node status
# * *Returns* :
# - is valid flag
# - filtered nodes by pattern (if pattern not nil)
# - erorr message in case it is not valid
#
def nodes_valid_for_aws?(assembly_name, nodes, node_pattern, status_pattern)
# check for pattern
unless node_pattern.nil?
regex = Regexp.new(node_pattern)
#temp nodes_list
nodes_list = nodes
nodes = nodes.select { |node| regex =~ node.id.to_s}
if nodes.size == 0
nodes = nodes_list.select { |node| node_pattern.to_s.eql?(node.display_name.to_s)}
return nodes, false, "No nodes have been matched via ID ~ '#{node_pattern}'." if nodes.size == 0
end
end
# check if staged
nodes.each do |node|
if node[:type] == "staged"
return nodes, false, "Nodes for assembly '#{assembly_name}' are 'staged' and as such cannot be started/stopped."
end
end
# check for status -> this will translate to /running|pending/ and /stopped|pending/ checks
filtered_nodes = nodes.select { |node| node[:admin_op_status] =~ Regexp.new("#{status_pattern.to_s}|pending") }
if filtered_nodes.size == 0
return nodes, false, "There are no #{status_pattern} nodes for assembly '#{assembly_name}'."
end
return filtered_nodes, true, nil
end
def rest__initiate_get_log()
assembly = ret_assembly_instance_object()
params = ret_params_hash(:node_identifier,:log_path, :start_line)
queue = ActionResultsQueue.new
assembly.initiate_get_log(queue, params)
rest_ok_response :action_results_id => queue.id
end
def rest__initiate_grep()
assembly = ret_assembly_instance_object()
params = ret_params_hash(:node_pattern, :log_path, :grep_pattern, :stop_on_first_match)
queue = ActionResultsQueue.new
assembly.initiate_grep(queue, params)
rest_ok_response :action_results_id => queue.id
end
def rest__task_status()
assembly_id = ret_request_param_id(:assembly_id,Assembly::Instance)
format = (ret_request_params(:format)||:hash).to_sym
response = Task::Status::Assembly.get_status(id_handle(assembly_id),:format => format)
rest_ok_response response
end
### mcollective actions
def rest__initiate_get_netstats()
node_id = ret_non_null_request_params(:node_id)
assembly = ret_assembly_instance_object()
queue = ActionResultsQueue.new
assembly.initiate_get_netstats(queue, node_id)
rest_ok_response :action_results_id => queue.id
end
def rest__initiate_get_ps()
node_id = ret_non_null_request_params(:node_id)
assembly = ret_assembly_instance_object()
queue = ActionResultsQueue.new
assembly.initiate_get_ps(queue, node_id)
rest_ok_response :action_results_id => queue.id
end
def rest__initiate_execute_tests()
node_id = ret_non_null_request_params(:node_id)
assembly = ret_assembly_instance_object()
queue = ActionResultsQueue.new
assembly.initiate_execute_tests(queue, node_id)
rest_ok_response :action_results_id => queue.id
end
def rest__get_action_results()
#TODO: to be safe need to garbage collect on ActionResultsQueue in case miss anything
action_results_id = ret_non_null_request_params(:action_results_id)
ret_only_if_complete = ret_request_param_boolean(:return_only_if_complete)
disable_post_processing = ret_request_param_boolean(:disable_post_processing)
sort_key = ret_request_params(:sort_key)
if ret_request_param_boolean(:using_simple_queue)
rest_ok_response SimpleActionQueue.get_results(action_results_id)
else
if sort_key
sort_key = sort_key.to_sym
rest_ok_response ActionResultsQueue.get_results(action_results_id,ret_only_if_complete,disable_post_processing, sort_key)
else
rest_ok_response ActionResultsQueue.get_results(action_results_id,ret_only_if_complete,disable_post_processing)
end
end
end
### end: mcollective actions
#TDODO: got here in cleanup of rest calls
def rest__list_smoketests()
assembly = ret_assembly_object()
rest_ok_response assembly.list_smoketests()
end
def test_get_items(id)
assembly = id_handle(id,:component).create_object()
item_list = assembly.get_items()
return {
:data=>item_list
}
end
def search
params = request.params.dup
cols = model_class(:component).common_columns()
filter_conjuncts = params.map do |name,value|
[:regex,name.to_sym,"^#{value}"] if cols.include?(name.to_sym)
end.compact
#restrict results to belong to library and not nested in assembly
filter_conjuncts += [[:eq,:type,"composite"],[:neq,:library_library_id,nil],[:eq,:assembly_id,nil]]
sp_hash = {
:cols => cols,
:filter => [:and] + filter_conjuncts
}
component_list = Model.get_objs(model_handle(:component),sp_hash).each{|r|r.materialize!(cols)}
i18n = get_i18n_mappings_for_models(:component)
component_list.each_with_index do |model,index|
component_list[index][:model_name] = :component
component_list[index][:ui] ||= {}
component_list[index][:ui][:images] ||= {}
# name = component_list[index][:display_name]
name = Assembly.pretty_print_name(component_list[index])
title = name.nil? ? "" : i18n_string(i18n,:component,name)
#TODO: change after implementing all the new types and making generic icons for them
model_type = 'service'
model_sub_type = 'db'
model_type_str = "#{model_type}-#{model_sub_type}"
prefix = "#{R8::Config[:base_images_uri]}/v1/componentIcons"
png = component_list[index][:ui][:images][:tnail] || "unknown-#{model_type_str}.png"
component_list[index][:image_path] = "#{prefix}/#{png}"
component_list[index][:i18n] = title
end
return {:data=>component_list}
end
def get_tree(id)
return {:data=>'some tree data goes here'}
end
def get_assemblies_from_ids(ids)
assemblies = []
ids.each do |id|
assembly = id_handle(id.to_i,:component).create_object(:model_name => :assembly_template)
assemblies << assembly
end
return assemblies
end
#TODO: unify with clone(id)
#clone assembly from library to target
def stage()
target_idh = target_idh_with_default(request.params["target_id"])
assembly_id = ret_request_param_id(:assembly_id,::DTK::Assembly::Template)
#TODO: if naem given and not unique either reject or generate a -n suffix
assembly_name = ret_request_params(:name)
id_handle = id_handle(assembly_id)
#TODO: need to copy in avatar when hash["ui"] is non null
override_attrs = Hash.new
override_attrs[:display_name] = assembly_name if assembly_name
target_object = target_idh.create_object()
clone_opts = {:ret_new_obj_with_cols => [:id,:type]}
new_assembly_obj = target_object.clone_into(id_handle.create_object(),override_attrs,clone_opts)
id = new_assembly_obj && new_assembly_obj.id()
#compute ui positions
nested_objs = new_assembly_obj.get_node_assembly_nested_objects()
#TODO: this does not leverage assembly node relative positions
nested_objs[:nodes].each do |node|
target_object.update_ui_for_new_item(node[:id])
end
rest_ok_response(:assembly_id => id)
end
#clone assembly from library to target
def clone(id)
handle_errors do
id_handle = id_handle(id)
hash = request.params
target_id_handle = nil
if hash["target_id"] and hash["target_model_name"]
input_target_id_handle = id_handle(hash["target_id"].to_i,hash["target_model_name"].to_sym)
target_id_handle = Model.find_real_target_id_handle(id_handle,input_target_id_handle)
else
Log.info("not implemented yet")
return redirect "/xyz/#{model_name()}/display/#{id.to_s}"
end
#TODO: need to copy in avatar when hash["ui"] is non null
override_attrs = hash["ui"] ? {:ui=>hash["ui"]} : {}
target_object = target_id_handle.create_object()
clone_opts = {:ret_new_obj_with_cols => [:id,:type]}
new_assembly_obj = target_object.clone_into(id_handle.create_object(),override_attrs,clone_opts)
id = new_assembly_obj && new_assembly_obj.id()
nested_objs = new_assembly_obj.get_node_assembly_nested_objects()
#just want external ports
(nested_objs[:nodes]||[]).each{|n|(n[:ports]||[]).reject!{|p|p[:type] == "component_internal"}}
#TODO: ganglia hack: remove after putting this info in teh r8 meta files
(nested_objs[:nodes]||[]).each do |n|
(n[:ports]||[]).each do |port|
if port[:display_name] =~ /ganglia__server/
port[:location] = "east"
elsif port[:display_name] =~ /ganglia__monitor/
port[:location] = "west"
end
end
end
#TODO: get node positions going for assemblies
#compute uui positions
parent_id = request.params["parent_id"]
assembly_left_pos = request.params["assembly_left_pos"]
# node_list = get_objects(:node,{:assembly_id=>id})
dc_hash = get_object_by_id(parent_id,:datacenter)
raise Error.new("Not implemented when parent_id is not a datacenter") if dc_hash.nil?
#get the top most item in the list to set new positions
top_node = {}
top_most = 2000
# node_list.each do |node|
nested_objs[:nodes].each do |node|
# node = create_object_from_id(node_hash[:id],:node)
ui = node.get_ui_info(dc_hash)
if ui and (ui[:top].to_i < top_most.to_i)
left_diff = assembly_left_pos.to_i - ui[:left].to_i
top_node = {:id=>node[:id],:ui=>ui,:left_diff=>left_diff}
top_most = ui[:top]
end
end
nested_objs[:nodes].each_with_index do |node,i|
ui = node.get_ui_info(dc_hash)
Log.error("no coordinates for node with id #{node[:id].to_s} in #{parent_id.to_s}") unless ui
if ui
if node[:id] == top_node[:id]
ui[:left] = assembly_left_pos.to_i
else
ui[:left] = ui[:left].to_i + top_node[:left_diff].to_i
end
end
node.update_ui_info!(ui,dc_hash)
nested_objs[:nodes][i][:assembly_ui] = ui
end
nested_objs[:port_links].each_with_index do |link,i|
nested_objs[:port_links][i][:ui] ||= {
:type => R8::Config[:links][:default_type],
:style => R8::Config[:links][:default_style]
}
end
return {:data=>nested_objs}
#TODO: clean this up,hack to update UI params for newly cloned object
# update_from_hash(id,{:ui=>hash["ui"]})
# hash["redirect"] ? redirect_route = "/xyz/#{hash["redirect"]}/#{id.to_s}" : redirect_route = "/xyz/#{model_name()}/display/#{id.to_s}"
if hash["model_redirect"]
base_redirect = "/xyz/#{hash["model_redirect"]}/#{hash["action_redirect"]}"
redirect_id = hash["id_redirect"].match(/^\*/) ? id.to_s : hash["id_redirect"]
redirect_route = "#{base_redirect}/#{redirect_id}"
request_params = ''
expected_params = ['model_redirect','action_redirect','id_redirect','target_id','target_model_name']
request.params.each do |name,value|
if !expected_params.include?(name)
request_params << '&' if request_params != ''
request_params << "#{name}=#{value}"
end
end
ajax_request? ? redirect_route += '.json' : nil
redirect_route << URI.encode("?#{request_params}") if request_params != ''
else
redirect_route = "/xyz/#{model_name()}/display/#{id.to_s}"
ajax_request? ? redirect_route += '.json' : nil
end
redirect redirect_route
end
end
end
end
|
#
# Copyright (C) 2010-2016 dtk contributors
#
# This file is part of the dtk project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module DTK
class CommonModule < Model
# Mixins must go first
require_relative('common_module/mixin')
require_relative('common_module/class_mixin')
require_relative('common_module/dsl')
require_relative('common_module/module_repo_info')
require_relative('common_module/service')
require_relative('common_module/component')
require_relative('common_module/create')
extend CommonModule::ClassMixin
include CommonModule::Mixin
extend CommonModule::Create
extend ModuleClassMixin
# extend AutoImport
include ModuleMixin
include BaseModule::DeleteMixin
# extend DSLClassMixin
# include DSLMixin
# include ModuleRefs::Mixin
def self.list_assembly_templates(project)
Service::Template.list_assembly_templates(project)
end
def self.get_module_dependencies(project, rsa_pub_key, remote_params)
Component::Template.get_module_dependencies(project, rsa_pub_key, remote_params)
end
def self.install_component_module(project, local_params, remote_params, dtk_client_pub_key)
Component::Template.install_module(project, local_params, remote_params, dtk_client_pub_key)
end
def self.exists(project, namespace, module_name, version)
if service = Service::Template.find_from_name_with_version?(project, namespace, module_name, version)
{ service_module_id: service.id() }
elsif component = Component::Template.find_from_name_with_version?(project, namespace, module_name, version)
{ component_module_id: component.id() }
end
end
def self.get_common_module?(project, namespace, module_name, version)
CommonModule.find_from_name_with_version?(project, namespace, module_name, version)
end
# TODO: Aldin: here and other places stripping out opts and adding back only if caller fn uses them
def self.create_empty_module(project, local_params)
module_branch = create_module(project, local_params, return_module_branch: true)
ModuleRepoInfo.new(module_branch)
end
# opts can have keys
# :force_pull - Boolean (default false)
# :force_parse - Boolean (default false)
def self.update_from_repo(project, local_params, branch, repo_name, commit_sha, opts = {})
ret = ModuleDSLInfo.new
force_pull = opts[:force_pull]
namespace = Namespace.find_by_name(project.model_handle(:namespace), local_params.namespace)
module_branch = get_workspace_module_branch(project, local_params.module_name, local_params.version, namespace)
pull_was_needed = module_branch.pull_repo_changes?(commit_sha, force_pull)
parse_needed = (opts[:force_parse] || !module_branch.dsl_parsed?)
# TODO: for debugging
parse_needed = true
return ret unless parse_needed || pull_was_needed
DSL::Parse.update_model_from_dsl(module_branch)
end
def self.delete(project, namespace, module_name, version)
unless common_module = get_common_module?(project, namespace, module_name, version)
fail ErrorUsage.new("DTK module '#{namespace}/#{module_name}' does not exist!")
end
common_module.delete_object(skip_validations: true)
end
def self.model_type
:common_module
end
private
def self.get_class_from_type(module_type)
case module_type.to_sym
when :common_module then CommonModule
when :service_module then Service::Template
when :component_module then Component::Template
else fail ErrorUsage.new("Unknown module type '#{module_type}'.")
end
end
end
end
small update
#
# Copyright (C) 2010-2016 dtk contributors
#
# This file is part of the dtk project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module DTK
class CommonModule < Model
# Mixins must go first
require_relative('common_module/mixin')
require_relative('common_module/class_mixin')
require_relative('common_module/dsl')
require_relative('common_module/module_repo_info')
require_relative('common_module/service')
require_relative('common_module/component')
require_relative('common_module/create')
extend CommonModule::ClassMixin
include CommonModule::Mixin
extend CommonModule::Create
extend ModuleClassMixin
# extend AutoImport
include ModuleMixin
include BaseModule::DeleteMixin
# extend DSLClassMixin
# include DSLMixin
# include ModuleRefs::Mixin
def self.list_assembly_templates(project)
Service::Template.list_assembly_templates(project)
end
def self.get_module_dependencies(project, rsa_pub_key, remote_params)
Component::Template.get_module_dependencies(project, rsa_pub_key, remote_params)
end
def self.install_component_module(project, local_params, remote_params, dtk_client_pub_key)
Component::Template.install_module(project, local_params, remote_params, dtk_client_pub_key)
end
def self.exists(project, namespace, module_name, version)
if service = Service::Template.find_from_name_with_version?(project, namespace, module_name, version)
{ service_module_id: service.id() }
elsif component = Component::Template.find_from_name_with_version?(project, namespace, module_name, version)
{ component_module_id: component.id() }
end
end
def self.get_common_module?(project, namespace, module_name, version)
CommonModule.find_from_name_with_version?(project, namespace, module_name, version)
end
# TODO: Aldin: here and other places stripping out opts and adding back only if caller fn uses them
def self.create_empty_module(project, local_params)
module_branch = create_module(project, local_params, return_module_branch: true)
ModuleRepoInfo.new(module_branch)
end
# opts can have keys
# :force_pull - Boolean (default false)
# :force_parse - Boolean (default false)
def self.update_from_repo(project, local_params, branch, repo_name, commit_sha, opts = {})
ret = ModuleDSLInfo.new
force_pull = opts[:force_pull]
namespace = Namespace.find_by_name(project.model_handle(:namespace), local_params.namespace)
module_branch = get_workspace_module_branch(project, local_params.module_name, local_params.version, namespace)
pull_was_needed = module_branch.pull_repo_changes?(commit_sha, force_pull)
parse_needed = (opts[:force_parse] || !module_branch.dsl_parsed?)
# TODO: for debugging
parse_needed = true
return ret unless parse_needed || pull_was_needed
DSL::Parse.update_model_from_dsl(module_branch)
end
def self.delete(project, namespace, module_name, version)
unless common_module = get_common_module?(project, namespace, module_name, version)
fail ErrorUsage.new("DTK module '#{namespace}:#{module_name}' does not exist!")
end
common_module.delete_object(skip_validations: true)
end
def self.model_type
:common_module
end
private
def self.get_class_from_type(module_type)
case module_type.to_sym
when :common_module then CommonModule
when :service_module then Service::Template
when :component_module then Component::Template
else fail ErrorUsage.new("Unknown module type '#{module_type}'.")
end
end
end
end
|
module Pod
class ConfigureIOS
attr_reader :configurator
def self.perform(options)
new(options).perform
end
def initialize(options)
@configurator = options.fetch(:configurator)
end
def perform
remove_demo = configurator.ask_with_answers("Would you like to have a demo for your library", ["Yes", "No"]).to_sym
framework = configurator.ask_with_answers("Which testing frameworks will you use", ["Specta", "Kiwi"]).to_sym
case framework
when :specta
configurator.add_pod_to_podfile "Specta', '~> 0.2.1"
configurator.add_pod_to_podfile "Expecta"
configurator.add_line_to_pch "#define EXP_SHORTHAND"
configurator.add_line_to_pch "#import <Specta/Specta.h>"
configurator.add_line_to_pch "#import <Expecta/Expecta.h>"
configurator.set_test_framework("specta")
when :kiwi
configurator.add_pod_to_podfile "Kiwi"
configurator.add_line_to_pch "#import <Kiwi/Kiwi.h>"
configurator.set_test_framework("kiwi")
end
snapshots = configurator.ask_with_answers("Would you like to do view based testing", ["Yes", "No"]).to_sym
case snapshots
when :yes
configurator.add_pod_to_podfile "FBSnapshotTestCase"
configurator.add_line_to_pch "#import <FBSnapshotTestCase/FBSnapshotTestCase.h>"
if framework == :specta
configurator.add_pod_to_podfile "Expecta+Snapshots"
configurator.add_line_to_pch "#import <Expecta+Snapshots/EXPMatchers+FBSnapshotTest.h>"
end
end
prefix = nil
loop do
prefix = configurator.ask("What is your class prefix")
if prefix.include?(' ')
puts 'Your class prefix cannot contain spaces.'.red
else
break
end
end
Pod::ProjectManipulator.new({
:configurator => @configurator,
:xcodeproj_path => "templates/ios/Example/PROJECT.xcodeproj",
:platform => :ios,
:remove_demo_project => (remove_demo == :no),
:prefix => prefix
}).run
`mv ./templates/ios/* ./`
end
end
end
add an option to skip a BDD framework, and force demo project if you use snapshots - fixes #67
module Pod
class ConfigureIOS
attr_reader :configurator
def self.perform(options)
new(options).perform
end
def initialize(options)
@configurator = options.fetch(:configurator)
end
def perform
keep_demo = configurator.ask_with_answers("Would you like to have a demo for your library", ["Yes", "No"]).to_sym
framework = configurator.ask_with_answers("Which testing frameworks will you use", ["Specta", "Kiwi", "None"]).to_sym
case framework
when :specta
configurator.add_pod_to_podfile "Specta', '~> 0.2.1"
configurator.add_pod_to_podfile "Expecta"
configurator.add_line_to_pch "#define EXP_SHORTHAND"
configurator.add_line_to_pch "#import <Specta/Specta.h>"
configurator.add_line_to_pch "#import <Expecta/Expecta.h>"
configurator.set_test_framework("specta")
when :kiwi
configurator.add_pod_to_podfile "Kiwi"
configurator.add_line_to_pch "#import <Kiwi/Kiwi.h>"
configurator.set_test_framework("kiwi")
when :none
end
snapshots = configurator.ask_with_answers("Would you like to do view based testing", ["Yes", "No"]).to_sym
case snapshots
when :yes
configurator.add_pod_to_podfile "FBSnapshotTestCase"
configurator.add_line_to_pch "#import <FBSnapshotTestCase/FBSnapshotTestCase.h>"
if keep_demo == :no
puts " Putting demo application back in, you cannot do view tests without a host application."
keep_demo = :yes
end
if framework == :specta
configurator.add_pod_to_podfile "Expecta+Snapshots"
configurator.add_line_to_pch "#import <Expecta+Snapshots/EXPMatchers+FBSnapshotTest.h>"
end
end
prefix = nil
loop do
prefix = configurator.ask("What is your class prefix")
if prefix.include?(' ')
puts 'Your class prefix cannot contain spaces.'.red
else
break
end
end
Pod::ProjectManipulator.new({
:configurator => @configurator,
:xcodeproj_path => "templates/ios/Example/PROJECT.xcodeproj",
:platform => :ios,
:remove_demo_project => (keep_demo == :no),
:prefix => prefix
}).run
`mv ./templates/ios/* ./`
end
end
end
|
$:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "github_widgets/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "github_widgets"
s.version = GithubWidgets::VERSION
s.authors = ["Yovoslav Ivnaov"]
s.email = ["yovoslav.ivanov@gmail.com"]
s.homepage = "https://github.com/ji/github_widgets"
s.summary = "Rails github API widgets."
s.description = "A full engine for ruby on rails, which contains widgets for accessing github API data."
s.files = Dir["{app,config,db,lib}/**/*"] + ["MIT-LICENSE", "Rakefile", "README.rdoc"]
s.test_files = Dir["test/**/*"]
s.add_dependency "rails", "~> 3.2.1"
s.add_dependency "jquery-rails"
s.add_development_dependency "sqlite3"
end
Fixes a small typo.
$:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "github_widgets/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "github_widgets"
s.version = GithubWidgets::VERSION
s.authors = ["Yovoslav Ivanov"]
s.email = ["yovoslav.ivanov@gmail.com"]
s.homepage = "https://github.com/ji/github_widgets"
s.summary = "Rails github API widgets."
s.description = "A full engine for ruby on rails, which contains widgets for accessing github API data."
s.files = Dir["{app,config,db,lib}/**/*"] + ["MIT-LICENSE", "Rakefile", "README.rdoc"]
s.test_files = Dir["test/**/*"]
s.add_dependency "rails", "~> 3.2.1"
s.add_dependency "jquery-rails"
s.add_development_dependency "sqlite3"
end
|
##
# Cookbook Name:: my_cookbook
# Recipe:: default
#
# Copyright 2014, YOUR_COMPANY_NAME
#
# All rights reserved - Do Not Redistribute
#
node[:deploy].each do |application, deploy|
app_root = "/srv/www/classifieds_carsifu/current/carsifu-v2/storage"
directory app_root do
owner 'deploy'
group 'www-data'
mode '0775'
recursive true
end
execute "chmo-775" do
command "chmod 775 /srv/www/classifieds_carsifu/current/automania-v2/wp-content/cache; chmod 775 /srv/www/classifieds_carsifu/current/automania-v2/wp-content/uploads; chmod 775 /srv/www/classifieds_carsifu/current/automania-v2/wp-content/w3tc-config; chmod 775 /srv/www/classifieds_carsifu/current/carsifu-v2/storage/framework; chmod 775 /srv/www/classifieds_carsifu/current/carsifu-v2/storage/framework/cache; chmod 775 /srv/www/classifieds_carsifu/current/carsifu-v2/storage/framework/sessions; chmod 775 /srv/www/classifieds_carsifu/current/carsifu-v2/storage/framework/views; chmod 775 /srv/www/classifieds_carsifu/current/carsifu-v2/storage/logs; chmod a+x /srv/www/classifieds_carsifu/current/carsifu-v2/vendor/monolog/monolog/src/Monolog/Handler"
action :run
end
execute "php5enmod mcrypt" do
command "php5enmod mcrypt"
action :run
end
end
Change carsifu wp-content path
##
# Cookbook Name:: my_cookbook
# Recipe:: default
#
# Copyright 2014, YOUR_COMPANY_NAME
#
# All rights reserved - Do Not Redistribute
#
node[:deploy].each do |application, deploy|
app_root = "/srv/www/classifieds_carsifu/current/carsifu-v2/storage"
directory app_root do
owner 'deploy'
group 'www-data'
mode '0775'
recursive true
end
execute "chmo-775" do
command "chmod 775 /srv/www/carsifu/current/automania-v2/wp-content/cache; chmod 775 /srv/www/carsifu/current/automania-v2/wp-content/uploads; chmod 775 /srv/www/carsifu/current/automania-v2/wp-content/w3tc-config; chmod 775 /srv/www/classifieds_carsifu/current/carsifu-v2/storage/framework; chmod 775 /srv/www/classifieds_carsifu/current/carsifu-v2/storage/framework/cache; chmod 775 /srv/www/classifieds_carsifu/current/carsifu-v2/storage/framework/sessions; chmod 775 /srv/www/classifieds_carsifu/current/carsifu-v2/storage/framework/views; chmod 775 /srv/www/classifieds_carsifu/current/carsifu-v2/storage/logs; chmod a+x /srv/www/classifieds_carsifu/current/carsifu-v2/vendor/monolog/monolog/src/Monolog/Handler"
action :run
end
execute "php5enmod mcrypt" do
command "php5enmod mcrypt"
action :run
end
end
|
require File.expand_path(File.dirname(__FILE__) + '/../../test_helper')
class ThemeTest < ActiveSupport::TestCase
include ThemeTestHelper
def setup
super
@site = Site.first
@theme = Theme.find_by_name 'a theme'
@theme_params = { :name => 'another theme',
:version => '1.0.0',
:homepage => 'http://homepage.org',
:author => 'author',
:summary => 'summary' }
end
# ASSOCIATIONS
test "belongs to a site" do
@theme.should belong_to(:site)
end
test "has many templates" do
@theme.should have_many(:templates)
end
test "the templates association only finds templates" do
template = uploaded_template
@theme.templates.should == [template]
end
test "has many images" do
@theme.should have_many(:images)
end
test "the assets association only finds images" do
image = uploaded_image
@theme.images.should == [image]
end
test "has many javascripts" do
@theme.should have_many(:javascripts)
end
test "the assets association only finds javascripts" do
javascript = uploaded_javascript
@theme.javascripts.should == [javascript]
end
test "has many stylesheets" do
@theme.should have_many(:stylesheets)
end
test "the assets association only finds stylesheets" do
stylesheet = uploaded_stylesheet
@theme.stylesheets.should == [stylesheet]
end
test "has one preview" do
@theme.should have_one(:preview)
end
test "the preview association finds a preview" do
preview = uploaded_preview
@theme.preview(true).should == preview
end
# VALIDATIONS
test "validates the presence of a name" do
@theme.should validate_presence_of(:name)
end
# CALLBACKS
test "creates an empty preview after create" do
theme = @site.themes.create! @theme_params
theme.preview.should_not be_nil
end
# CLASS METHODS
test "imports a zip theme" do
theme_file = theme_fixture
assert_difference '@site.themes.size', +1 do
@site.themes.import(theme_file)
end
assert_equal 5, @site.themes.last.files.size
end
test "valid_theme? returns true if theme folder contains one of the theme folders: images, templates, stylesheets or javascripts" do
assert Theme.valid_theme?(theme_fixture)
end
test "valid_theme? returns false if theme folder does not contain one of the theme folders: images, templates, stylesheets or javascripts" do
assert ! Theme.valid_theme?(invalid_theme_fixture)
end
# INSTANCE METHODS
test "returns about hash" do
about_hash = { "name" => "a theme", "author" => "author", "version" => "1.0.0",
"homepage" => "http://homepage.org", "summary" => "summary" }
@theme.about.should == about_hash
end
test "creates a file when exporting a theme" do
theme = @site.themes.create!(:name => 'export-theme')
zip_path = theme.export
zip_path.should be_file
end
test "created ZIP file includes all theme files" do
theme = @site.themes.create!(:name => 'export-theme')
uploaded_stylesheet(theme)
uploaded_javascript(theme)
uploaded_image(theme)
uploaded_template(theme)
zip_path = theme.export
zip_file = Zip::ZipFile.new(zip_path)
theme.files.each do |file|
zip_file.entries.map(&:name).should include(file.base_path)
end
end
test "activate! activates the theme" do
@theme.activate!
@theme.active?.should be_true
end
test "deactivate! deactivates the theme" do
@theme.update_attributes :active => true
@theme.deactivate!
@theme.active?.should be_false
end
test "find_theme_root finds the root directory of the theme from deeply nested zip archive" do
assert_equal 'deep_nesting/theme/', Theme.send(:find_theme_root, deeply_nested_theme_fixture)
end
test "find_theme_root works with nested templates directory" do
# fix that find_theme_root does not result in incorrect foo/bar/ root
assert_equal '', Theme.send(:find_theme_root, theme_fixture)
end
test "strip_path strips the given folder structure out of path" do
assert_equal 'images/image.jpg', Theme.send(:strip_path, 'deep_nesting/theme/images/image.jpg', 'deep_nesting/theme/')
end
test "strip_path strips only from the start" do
assert_equal 'images/foo/bar/image.jpg', Theme.send(:strip_path, 'foo/bar/images/foo/bar/image.jpg', 'foo/bar/')
end
# cached_files
test "#cached_files returns an array of cached files" do
setup_theme_with_cached_files(@theme)
assert_equal @theme.cached_files, ["#{@theme.path}/javascripts/cached_javascript.js",
"#{@theme.path}/javascripts/folder/cached/folder",
"#{@theme.path}/javascripts/folder/cached/folder/some_javascript.css",
"#{@theme.path}/stylesheets/cached_stylesheet.css"]
end
# clear_asset_cache!
test "#clear_asset_cache! removes the cached folders and cached_ files from theme" do
setup_theme_with_cached_files(@theme)
@theme.clear_asset_cache!
theme_files = Dir.glob("#{@theme.path}/**/*")
assert ! theme_files.include?("#{@theme.path}/javascripts/cached_javascript.js")
assert ! theme_files.include?("#{@theme.path}/stylesheets/cached_stylesheet.css")
assert ! theme_files.include?("#{@theme.path}/javascripts/folder/cached/")
end
def setup_theme_with_cached_files(theme = theme)
uploaded_template(theme)
uploaded_stylesheet(theme)
uploaded_javascript(theme)
uploaded_image(theme)
FileUtils.touch(theme.path + '/stylesheets/cached_stylesheet.css')
FileUtils.touch(theme.path + '/javascripts/cached_javascript.js')
FileUtils.mkdir_p(theme.path + '/javascripts/folder/cached/folder')
FileUtils.touch(theme.path + '/javascripts/folder/cached/folder/some_javascript.css')
end
end
sort arrays to make stupid test error disappear
require File.expand_path(File.dirname(__FILE__) + '/../../test_helper')
class ThemeTest < ActiveSupport::TestCase
include ThemeTestHelper
def setup
super
@site = Site.first
@theme = Theme.find_by_name 'a theme'
@theme_params = { :name => 'another theme',
:version => '1.0.0',
:homepage => 'http://homepage.org',
:author => 'author',
:summary => 'summary' }
end
# ASSOCIATIONS
test "belongs to a site" do
@theme.should belong_to(:site)
end
test "has many templates" do
@theme.should have_many(:templates)
end
test "the templates association only finds templates" do
template = uploaded_template
@theme.templates.should == [template]
end
test "has many images" do
@theme.should have_many(:images)
end
test "the assets association only finds images" do
image = uploaded_image
@theme.images.should == [image]
end
test "has many javascripts" do
@theme.should have_many(:javascripts)
end
test "the assets association only finds javascripts" do
javascript = uploaded_javascript
@theme.javascripts.should == [javascript]
end
test "has many stylesheets" do
@theme.should have_many(:stylesheets)
end
test "the assets association only finds stylesheets" do
stylesheet = uploaded_stylesheet
@theme.stylesheets.should == [stylesheet]
end
test "has one preview" do
@theme.should have_one(:preview)
end
test "the preview association finds a preview" do
preview = uploaded_preview
@theme.preview(true).should == preview
end
# VALIDATIONS
test "validates the presence of a name" do
@theme.should validate_presence_of(:name)
end
# CALLBACKS
test "creates an empty preview after create" do
theme = @site.themes.create! @theme_params
theme.preview.should_not be_nil
end
# CLASS METHODS
test "imports a zip theme" do
theme_file = theme_fixture
assert_difference '@site.themes.size', +1 do
@site.themes.import(theme_file)
end
assert_equal 5, @site.themes.last.files.size
end
test "valid_theme? returns true if theme folder contains one of the theme folders: images, templates, stylesheets or javascripts" do
assert Theme.valid_theme?(theme_fixture)
end
test "valid_theme? returns false if theme folder does not contain one of the theme folders: images, templates, stylesheets or javascripts" do
assert ! Theme.valid_theme?(invalid_theme_fixture)
end
# INSTANCE METHODS
test "returns about hash" do
about_hash = { "name" => "a theme", "author" => "author", "version" => "1.0.0",
"homepage" => "http://homepage.org", "summary" => "summary" }
@theme.about.should == about_hash
end
test "creates a file when exporting a theme" do
theme = @site.themes.create!(:name => 'export-theme')
zip_path = theme.export
zip_path.should be_file
end
test "created ZIP file includes all theme files" do
theme = @site.themes.create!(:name => 'export-theme')
uploaded_stylesheet(theme)
uploaded_javascript(theme)
uploaded_image(theme)
uploaded_template(theme)
zip_path = theme.export
zip_file = Zip::ZipFile.new(zip_path)
theme.files.each do |file|
zip_file.entries.map(&:name).should include(file.base_path)
end
end
test "activate! activates the theme" do
@theme.activate!
@theme.active?.should be_true
end
test "deactivate! deactivates the theme" do
@theme.update_attributes :active => true
@theme.deactivate!
@theme.active?.should be_false
end
test "find_theme_root finds the root directory of the theme from deeply nested zip archive" do
assert_equal 'deep_nesting/theme/', Theme.send(:find_theme_root, deeply_nested_theme_fixture)
end
test "find_theme_root works with nested templates directory" do
# fix that find_theme_root does not result in incorrect foo/bar/ root
assert_equal '', Theme.send(:find_theme_root, theme_fixture)
end
test "strip_path strips the given folder structure out of path" do
assert_equal 'images/image.jpg', Theme.send(:strip_path, 'deep_nesting/theme/images/image.jpg', 'deep_nesting/theme/')
end
test "strip_path strips only from the start" do
assert_equal 'images/foo/bar/image.jpg', Theme.send(:strip_path, 'foo/bar/images/foo/bar/image.jpg', 'foo/bar/')
end
# cached_files
test "#cached_files returns an array of cached files" do
setup_theme_with_cached_files(@theme)
assert_equal @theme.cached_files.sort, ["#{@theme.path}/javascripts/cached_javascript.js",
"#{@theme.path}/javascripts/folder/cached/folder",
"#{@theme.path}/javascripts/folder/cached/folder/some_javascript.css",
"#{@theme.path}/stylesheets/cached_stylesheet.css"].sort
end
# clear_asset_cache!
test "#clear_asset_cache! removes the cached folders and cached_ files from theme" do
setup_theme_with_cached_files(@theme)
@theme.clear_asset_cache!
theme_files = Dir.glob("#{@theme.path}/**/*")
assert ! theme_files.include?("#{@theme.path}/javascripts/cached_javascript.js")
assert ! theme_files.include?("#{@theme.path}/stylesheets/cached_stylesheet.css")
assert ! theme_files.include?("#{@theme.path}/javascripts/folder/cached/")
end
def setup_theme_with_cached_files(theme = theme)
uploaded_template(theme)
uploaded_stylesheet(theme)
uploaded_javascript(theme)
uploaded_image(theme)
FileUtils.touch(theme.path + '/stylesheets/cached_stylesheet.css')
FileUtils.touch(theme.path + '/javascripts/cached_javascript.js')
FileUtils.mkdir_p(theme.path + '/javascripts/folder/cached/folder')
FileUtils.touch(theme.path + '/javascripts/folder/cached/folder/some_javascript.css')
end
end |
# -*- encoding: utf-8 -*-
#
# 新生銀行
# http://www.binzume.net/
require "kconv"
require "rexml/document"
require "time"
require_relative "httpclient"
class ShinseiPowerDirect
attr_accessor :account, :account_status, :accounts, :funds
def initialize(account = nil)
@account_status = {:total=>nil}
if account
login(account)
end
end
##
# ログイン
def login(account)
@account = account
ua = "Mozilla/5.0 (Windows; U; Windows NT 5.1;) PowerDirectBot/0.1"
@client = HTTPClient.new(:agent_name => ua)
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'RT',
'fldTxnID'=>'LGN',
'fldScrSeqNo'=>'01',
'fldRequestorID'=>'41',
'fldDeviceID'=>'01',
'fldLangID'=>'JPN',
'fldUserID'=>account['ID'],
'fldUserNumId'=>account['NUM'],
'fldUserPass'=>account['PASS'],
'fldRegAuthFlag'=>'A'
}
url = 'https://direct18.shinseibank.co.jp/FLEXCUBEAt/LiveConnect.dll'
res = @client.post(url, postdata)
values= {}
['fldSessionID', 'fldGridChallange1', 'fldGridChallange2', 'fldGridChallange3', 'fldRegAuthFlag'].each{|k|
if res.body =~/#{k}=['"](\w+)['"]/
values[k] = $1
end
}
@ssid = values['fldSessionID']
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'RT',
'fldTxnID'=>'LGN',
'fldScrSeqNo'=>'41',
'fldRequestorID'=>'55',
'fldSessionID'=> @ssid,
'fldDeviceID'=>'01',
'fldLangID'=>'JPN',
'fldGridChallange1'=>getgrid(account, values['fldGridChallange1']),
'fldGridChallange2'=>getgrid(account, values['fldGridChallange2']),
'fldGridChallange3'=>getgrid(account, values['fldGridChallange3']),
'fldUserID'=>'',
'fldUserNumId'=>'',
'fldNumSeq'=>'1',
'fldRegAuthFlag'=>values['fldRegAuthFlag'],
}
url = 'https://direct18.shinseibank.co.jp/FLEXCUBEAt/LiveConnect.dll'
res = @client.post(url, postdata)
get_accounts
end
##
# ログアウト
def logout
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'RT',
'fldTxnID'=>'CDC',
'fldScrSeqNo'=>'49',
'fldRequestorID'=>'',
'fldSessionID'=> @ssid,
'fldIncludeBal'=>'Y',
'fldCurDef'=>'JPY'
}
#p postdata
url = 'https://direct18.shinseibank.co.jp/FLEXCUBEAt/LiveConnect.dll'
res = @client.post(url, postdata)
end
##
# 残高確認
def total_balance
@account_status[:total]
end
##
# 直近の取引履歴
def recent
get_history nil, nil, @accounts.keys[0]
end
def get_accounts
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'RT',
'fldTxnID'=>'ACS',
'fldScrSeqNo'=>'00',
'fldRequestorID'=>'23',
'fldSessionID'=> @ssid,
'fldAcctID'=>'', # 400????
'fldAcctType'=>'CHECKING',
'fldIncludeBal'=>'Y',
'fldPeriod'=>'',
'fldCurDef'=>'JPY'
}
#p postdata
url = 'https://direct18.shinseibank.co.jp/FLEXCUBEAt/LiveConnect.dll'
res = @client.post(url, postdata)
#puts res.body
accountid=[]
accounts = {}
res.body.scan(/fldAccountID\[(\d+)\]="(\w+)"/) { m = Regexp.last_match
accountid[m[1].to_i] = m[2]
accounts[m[2]] = {:id=>m[2]}
}
res.body.scan(/fldAccountType\[(\d+)\]="(\w+)"/) { m = Regexp.last_match
accounts[accountid[m[1].to_i]][:type] = m[2]
}
res.body.scan(/fldAccountDesc\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
accounts[accountid[m[1].to_i]][:desc] = m[2].toutf8
}
res.body.scan(/fldCurrCcy\[(\d+)\]="(\w+)"/) { m = Regexp.last_match
accounts[accountid[m[1].to_i]][:curr] = m[2]
}
res.body.scan(/fldCurrBalance\[(\d+)\]="([\w\.,]+)"/) { m = Regexp.last_match
accounts[accountid[m[1].to_i]][:balance] = m[2].gsub(/,/,'').to_f
}
res.body.scan(/fldBaseBalance\[(\d+)\]="([\w\.,]+)"/) { m = Regexp.last_match
accounts[accountid[m[1].to_i]][:base_balance] = m[2].gsub(/,/,'').to_f
}
funds = []
res.body.scan(/fldUHIDArray\[(\d+)\]="([\w\.,]+)"/) { m = Regexp.last_match
funds[m[1].to_i] = { :id => m[2]}
}
res.body.scan(/fldFundID\[(\d+)\]="([\w\.,]+)"/) { m = Regexp.last_match
funds[m[1].to_i][:fid] = m[2]
}
res.body.scan(/fldFundNameArray\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
funds[m[1].to_i][:name] = m[2].toutf8
}
res.body.scan(/fldCurrentHoldingArray\[(\d+)\]="([\w\.,]+)"/) { m = Regexp.last_match
funds[m[1].to_i][:holding] = m[2].gsub(/,/,'').to_i
}
res.body.scan(/fldValInBaseCurrArray\[(\d+)\]="([\w\.,]+)"/) { m = Regexp.last_match
funds[m[1].to_i][:base_curr] = m[2].gsub(/,/,'').to_f
}
res.body.scan(/fldCurrentNAVArray\[(\d+)\]="([\w\.,]+)"/) { m = Regexp.last_match
funds[m[1].to_i][:current_nav] = m[2].gsub(/,/,'').to_f
}
@funds = funds
total = "0"
if res.body =~/fldGrandTotalCR="([\d\.,]+)"/
total = $1.gsub(/,/,'').to_i
end
@accounts = accounts
@account_status = {:total=>total}
end
def get_history from,to,id
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'RT',
'fldTxnID'=>'ACA',
'fldScrSeqNo'=>'01',
'fldRequestorID'=>'9',
'fldSessionID'=> @ssid,
'fldAcctID'=> id, # 400????
'fldAcctType'=>'CHECKING',
'fldIncludeBal'=>'N',
'fldStartDate'=> from ? from.strftime('%Y%m%d') : '',
'fldEndDate'=> to ? to.strftime('%Y%m%d') : '',
'fldStartNum'=>'0',
'fldEndNum'=>'0',
'fldCurDef'=>'JPY',
'fldPeriod'=>'1'
}
#p postdata
url = 'https://direct18.shinseibank.co.jp/FLEXCUBEAt/LiveConnect.dll'
res = @client.post(url, postdata)
history = []
res.body.scan(/fldDate\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
history[m[1].to_i] = {:date=>m[2]}
}
res.body.scan(/fldDesc\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
history[m[1].to_i][:description] = m[2].toutf8
}
res.body.scan(/fldRefNo\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
history[m[1].to_i][:ref_no] = m[2]
}
res.body.scan(/fldDRCRFlag\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
history[m[1].to_i][:drcr] = m[2]
}
res.body.scan(/fldAmount\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
history[m[1].to_i][:amount] = m[2].gsub(/[,\.]/,'').to_i
if history[m[1].to_i][:drcr] == 'D'
history[m[1].to_i][:out] = m[2].gsub(/[,\.]/,'').to_i
else
history[m[1].to_i][:in] = m[2].gsub(/[,\.]/,'').to_i
end
}
res.body.scan(/fldRunningBalanceRaw\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
history[m[1].to_i][:balance] = m[2].gsub(/,/,'').to_i
}
@account_status = {:total=>history[0][:amount], :id=>id}
history[1..-1]
end
##
# move to registered account
# name = target 7digit account num.
# amount < 2000000 ?
def transfer_to_registered_account name, amount
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'RT',
'fldTxnID'=>'ZNT',
'fldScrSeqNo'=>'00',
'fldRequestorID'=>'71',
'fldSessionID'=> @ssid,
}
#p postdata
url = 'https://direct18.shinseibank.co.jp/FLEXCUBEAt/LiveConnect.dll'
res = @client.post(url, postdata)
registered_account = []
res.body.scan(/fldListPayeeAcctId\[(\d+)\]="([^"]+)"/).each{|m|
registered_account[m[0].to_i] = {:account_id=>m[1]}
}
res.body.scan(/fldListPayeeAcctType\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
registered_account[m[1].to_i][:account_type] = m[2]
}
res.body.scan(/fldListPayeeName\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
registered_account[m[1].to_i][:name] = m[2]
}
res.body.scan(/fldListPayeeBank\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
registered_account[m[1].to_i][:bank] = m[2]
}
res.body.scan(/fldListPayeeBankKanji\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
registered_account[m[1].to_i][:bank_kanji] = m[2]
}
res.body.scan(/fldListPayeeBankKana\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
registered_account[m[1].to_i][:bank_kana] = m[2]
}
res.body.scan(/fldListPayeeBranch\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
registered_account[m[1].to_i][:branch] = m[2]
}
res.body.scan(/fldListPayeeBranchKanji\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
registered_account[m[1].to_i][:branch_kanji] = m[2]
}
res.body.scan(/fldListPayeeBranchKana\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
registered_account[m[1].to_i][:branch_kana] = m[2]
}
#p registered_account
values= {}
['fldRemitterName', 'fldInvoice', 'fldInvoicePosition','fldDomFTLimit', 'fldRemReimburse'].each{|k|
if res.body =~/#{k}=['"]([^'"]*)['"]/
values[k] = $1
end
}
target_account = registered_account.find{|a| a[:account_id] == name };
from_name = values['fldRemitterName']
account = @accounts.keys[0] # とりあえず普通円預金っぽいやつ
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'RT',
'fldTxnID'=>'ZNT',
'fldScrSeqNo'=>'07',
'fldRequestorID'=>'74',
'fldSessionID'=> @ssid,
'fldAcctId' => account,
'fldAcctType' => @accounts[account][:type] ,
'fldAcctDesc'=> @accounts[account][:desc],
'fldMemo'=> from_name,
#'fldRemitterName'=> '',
#'fldInvoice'=>'',
#'fldInvoicePosition'=>'B',
'fldTransferAmount' => amount,
'fldTransferType'=>'P', # P(registerd) or D
#'fldPayeeId'=>'',
'fldPayeeName' => target_account[:name],
'fldPayeeAcctId' => target_account[:account_id],
'fldPayeeAcctType' => target_account[:account_type],
#fldPayeeBankCode:undefined
'fldPayeeBankName' => target_account[:bank],
'fldPayeeBankNameKana' => target_account[:bank_kana],
'fldPayeeBankNameKanji' => target_account[:bank_kanji],
#fldPayeeBranchCode:undefined
'fldPayeeBranchName' => target_account[:branch],
'fldPayeeBranchNameKana' => target_account[:branch_kana],
'fldPayeeBranchNameKanji' => target_account[:branch_kanji],
#fldSearchBankName:
#fldSearchBranchName:
#fldFlagRegister:
#'fldDomFTLimit'=>'4000000',
#'fldRemReimburse'=>4,
}.merge(values)
res = @client.post(url, postdata)
values= {}
['fldMemo', 'fldInvoicePosition', 'fldTransferType', 'fldTransferDate', 'fldTransferFeeUnformatted',
'fldDebitAmountUnformatted', 'fldReimbursedAmt', 'fldRemReimburse'].each{|k|
if res.body =~/#{k}=['"]([^'"]*)['"]/
values[k] = $1
end
}
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'RT',
'fldTxnID'=>'ZNT',
'fldScrSeqNo'=>'08',
'fldRequestorID'=>'76',
'fldSessionID'=> @ssid,
'fldAcctId' => @accounts.keys[0],
'fldAcctType' => @accounts[ @accounts.keys[0] ][:type] ,
'fldAcctDesc'=> @accounts[ @accounts.keys[0] ][:desc],
#'fldMemo'=> from_name,
'fldRemitterName'=> target_account[:name],
#'fldInvoice'=>'',
#'fldInvoicePosition'=>'B',
'fldTransferAmount' => amount,
'fldTransferType'=>'P', # P(registerd) or D
#'fldTransferDate' => transfar_date,
#'fldPayeeId'=>'',
'fldPayeeName' => target_account[:name],
'fldPayeeAcctId' => target_account[:account_id],
'fldPayeeAcctType' => target_account[:account_type],
#fldPayeeBankCode:undefined
'fldPayeeBankName' => target_account[:bank],
'fldPayeeBankNameKana' => target_account[:bank_kana],
'fldPayeeBankNameKanji' => target_account[:bank_kanji],
#fldPayeeBranchCode:undefined
'fldPayeeBranchName' => target_account[:branch],
'fldPayeeBranchNameKana' => target_account[:branch_kana],
'fldPayeeBranchNameKanji' => target_account[:branch_kanji],
#fldSearchBankName:
#fldSearchBranchName:
#fldFlagRegister:
#'fldDomFTLimit'=>'4000000',
}.merge(values)
p postdata
res = @client.post(url, postdata)
puts res.body
end
# 投資信託売る(実装中)
def sell_fund fund, amount
url = 'https://direct18.shinseibank.co.jp/FLEXCUBEAt/LiveConnect.dll'
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'IS',
'fldTxnID'=>'SMF',
'fldScrSeqNo'=>'01',
'fldRequestorID'=>'15',
'fldSessionID'=> @ssid,
'fldDefFundID'=>fund[:fid],
'fldCDCCode'=>'',
'fldUHID'=>fund[:id],
'fldTkApplicable'=>'0',
}
res = @client.post(url, postdata)
acc= {}
['fldBankIDArray', 'fldBranchIDArray', 'fldAcctIDArray', 'fldAcctTypeArray', 'fldAcctCurrArray',
'fldDebitAmountUnformatted', 'fldReimbursedAmt', 'fldRemReimburse'].each{|k|
if res.body =~/#{k}\[0\]\[0\]=['"]([^'"]*)['"]/
acc[k] = $1
end
}
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'IS',
'fldTxnID'=>'SMF',
'fldScrSeqNo'=>'02',
'fldRequestorID'=>'16',
'fldSessionID'=> @ssid,
'fldMFID'=>fund[:fid],
'fldRdmMode'=>'BANKXFER',
'fldAcctID'=> acc['fldAcctIDArray'],
'fldAcctType'=>acc['fldAcctTypeArray'],
'fldAcctCurr'=>acc['fldAcctCurrArray'],
'fldBankID'=>acc['fldBankIDArray'],
'fldBranchID'=>acc['fldBranchIDArray'],
'fldUHID'=>fund[:id],
'fldTxnCurr'=> acc['fldAcctCurrArray'],
'fldSellType'=>'UNITS',
'fldSellUnits'=>amount,
'fldGrossOrNet'=>'GROSS',
'fldTkApplicable'=> '0',
}
#p postdata
res = @client.post(url, postdata)
values= {}
['fldEODRunning', 'fldTkApplicable', 'fldAllocationDate', 'fldPaymentDate', 'fldConfirmationDate', 'fldTransactionDate', 'fldFCISDPRefNo'].each{|k|
if res.body =~/#{k}=['"]([^'"]*)['"]/
values[k] = $1
end
}
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'IS',
'fldTxnID'=>'SMF',
'fldScrSeqNo'=>'03',
'fldRequestorID'=>'17',
'fldSessionID'=> @ssid,
'fldDefFundID'=>fund[:fid],
'fldDefSellType'=>'UNITS',
'fldDefSellUnits'=>amount,
'fldDefTxnCurr'=> acc['fldAcctCurrArray'],
'fldDefRdmMode'=>'BANKXFER',
'fldDefAcctID'=> acc['fldAcctIDArray'],
'fldDefAcctType'=>acc['fldAcctTypeArray'],
'fldDefBankID'=>acc['fldBankIDArray'],
'fldDefBranchID'=>acc['fldBranchIDArray'],
'fldDefAcctCurr'=>acc['fldAcctCurrArray'],
'fldUHID'=>fund[:id],
'fldGrossOrNet'=>'GROSS',
'fldEODRunning'=> values['fldEODRunning'],
'fldUserOverride'=>'Y',
'fldFCISDPRefNo'=> values['fldFCISDPRefNo'],
'fldTransactionDate'=> values['fldTransactionDate'].gsub('/',''),
'fldAllocationDate'=> values['fldAllocationDate'].gsub('/',''),
'fldConfirmationDate'=> values['fldConfirmationDate'].gsub('/',''),
'fldPaymentDate'=> values['fldPaymentDate'].gsub('/',''),
'fldPreCalcFlag'=>'Y',
'fldTkApplicable'=> values['fldTkApplicable'],
}
p postdata
res = @client.post(url, postdata)
puts res.body
end
private
def getgrid account, cell
x = cell[0].tr('A-J', '0-9').to_i
y = cell[1].to_i
account['GRID'][y][x]
end
end
リファクタ
# -*- encoding: utf-8 -*-
#
# 新生銀行
# Shinsei power direct client
#
# @author binzume http://www.binzume.net/
#
require 'kconv'
require 'time'
require_relative 'httpclient'
class ShinseiPowerDirect
attr_reader :account_status, :accounts, :funds, :last_html
attr_accessor :account
def initialize(account = nil)
@account_status = {:total=>nil}
@url = 'https://direct18.shinseibank.co.jp/FLEXCUBEAt/LiveConnect.dll'
ua = "Mozilla/5.0 (Windows; U; Windows NT 5.1;) PowerDirectBot/0.1"
@client = HTTPClient.new(:agent_name => ua)
if account
login(account)
end
end
##
# ログイン
#
# @param [Hash] account アカウント情報(see shinsei_account.yaml.sample)
def login(account)
@account = account
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'RT',
'fldTxnID'=>'LGN',
'fldScrSeqNo'=>'01',
'fldRequestorID'=>'41',
'fldDeviceID'=>'01',
'fldLangID'=>'JPN',
'fldUserID'=>account['ID'],
'fldUserNumId'=>account['NUM'],
'fldUserPass'=>account['PASS'],
'fldRegAuthFlag'=>'A'
}
res = @client.post(@url, postdata)
values= {}
['fldSessionID', 'fldGridChallange1', 'fldGridChallange2', 'fldGridChallange3', 'fldRegAuthFlag'].each{|k|
if res.body =~/#{k}=['"](\w+)['"]/
values[k] = $1
end
}
@ssid = values['fldSessionID']
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'RT',
'fldTxnID'=>'LGN',
'fldScrSeqNo'=>'41',
'fldRequestorID'=>'55',
'fldSessionID'=> @ssid,
'fldDeviceID'=>'01',
'fldLangID'=>'JPN',
'fldGridChallange1'=>getgrid(account, values['fldGridChallange1']),
'fldGridChallange2'=>getgrid(account, values['fldGridChallange2']),
'fldGridChallange3'=>getgrid(account, values['fldGridChallange3']),
'fldUserID'=>'',
'fldUserNumId'=>'',
'fldNumSeq'=>'1',
'fldRegAuthFlag'=>values['fldRegAuthFlag'],
}
res = @client.post(@url, postdata)
get_accounts
end
##
# ログアウト
def logout
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'RT',
'fldTxnID'=>'CDC',
'fldScrSeqNo'=>'49',
'fldRequestorID'=>'',
'fldSessionID'=> @ssid,
'fldIncludeBal'=>'Y',
'fldCurDef'=>'JPY'
}
#p postdata
res = @client.post(@url, postdata)
end
##
# 残高確認
#
# @return [int] 残高(yen)
def total_balance
@account_status[:total]
end
##
# 直近の取引履歴(円口座)
#
# @return [Array] 履歴の配列
def recent
get_history nil, nil, @accounts.keys[0]
end
def get_accounts
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'RT',
'fldTxnID'=>'ACS',
'fldScrSeqNo'=>'00',
'fldRequestorID'=>'23',
'fldSessionID'=> @ssid,
'fldAcctID'=>'', # 400????
'fldAcctType'=>'CHECKING',
'fldIncludeBal'=>'Y',
'fldPeriod'=>'',
'fldCurDef'=>'JPY'
}
#p postdata
res = @client.post(@url, postdata)
#puts res.body
accountid=[]
accounts = {}
res.body.scan(/fldAccountID\[(\d+)\]="(\w+)"/) { m = Regexp.last_match
accountid[m[1].to_i] = m[2]
accounts[m[2]] = {:id=>m[2]}
}
res.body.scan(/fldAccountType\[(\d+)\]="(\w+)"/) { m = Regexp.last_match
accounts[accountid[m[1].to_i]][:type] = m[2]
}
res.body.scan(/fldAccountDesc\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
accounts[accountid[m[1].to_i]][:desc] = m[2].toutf8
}
res.body.scan(/fldCurrCcy\[(\d+)\]="(\w+)"/) { m = Regexp.last_match
accounts[accountid[m[1].to_i]][:curr] = m[2]
}
res.body.scan(/fldCurrBalance\[(\d+)\]="([\w\.,]+)"/) { m = Regexp.last_match
accounts[accountid[m[1].to_i]][:balance] = m[2].gsub(/,/,'').to_f
}
res.body.scan(/fldBaseBalance\[(\d+)\]="([\w\.,]+)"/) { m = Regexp.last_match
accounts[accountid[m[1].to_i]][:base_balance] = m[2].gsub(/,/,'').to_f
}
funds = []
res.body.scan(/fldUHIDArray\[(\d+)\]="([\w\.,]+)"/) { m = Regexp.last_match
funds[m[1].to_i] = { :id => m[2]}
}
res.body.scan(/fldFundID\[(\d+)\]="([\w\.,]+)"/) { m = Regexp.last_match
funds[m[1].to_i][:fid] = m[2]
}
res.body.scan(/fldUHCurrArray\[(\d+)\]="([\w]+)"/) { m = Regexp.last_match
funds[m[1].to_i][:curr] = m[2]
}
res.body.scan(/fldFundNameArray\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
funds[m[1].to_i][:name] = m[2].toutf8
}
res.body.scan(/fldCurrentHoldingArray\[(\d+)\]="([\w\.,]+)"/) { m = Regexp.last_match
funds[m[1].to_i][:holding] = m[2].gsub(/,/,'').to_i
}
res.body.scan(/fldValInBaseCurrArray\[(\d+)\]="([\w\.,]+)"/) { m = Regexp.last_match
funds[m[1].to_i][:base_curr] = m[2].gsub(/,/,'').to_f
}
res.body.scan(/fldCurrentNAVArray\[(\d+)\]="([\w\.,]+)"/) { m = Regexp.last_match
funds[m[1].to_i][:current_nav] = m[2].gsub(/,/,'').to_f
}
@funds = funds
total = "0"
if res.body =~/fldGrandTotalCR="([\d\.,]+)"/
total = $1.gsub(/,/,'').to_i
end
@accounts = accounts
@account_status = {:total=>total}
end
def get_history from,to,id
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'RT',
'fldTxnID'=>'ACA',
'fldScrSeqNo'=>'01',
'fldRequestorID'=>'9',
'fldSessionID'=> @ssid,
'fldAcctID'=> id, # 400????
'fldAcctType'=>@accounts[id][:type],
'fldIncludeBal'=>'N',
'fldStartDate'=> from ? from.strftime('%Y%m%d') : '',
'fldEndDate'=> to ? to.strftime('%Y%m%d') : '',
'fldStartNum'=>'0',
'fldEndNum'=>'0',
'fldCurDef'=>'JPY',
'fldPeriod'=>'1'
}
#p postdata
res = @client.post(@url, postdata)
history = []
res.body.scan(/fldDate\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
history[m[1].to_i] = {:date=>m[2]}
}
res.body.scan(/fldDesc\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
history[m[1].to_i][:description] = m[2].toutf8
}
res.body.scan(/fldRefNo\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
history[m[1].to_i][:ref_no] = m[2]
}
res.body.scan(/fldDRCRFlag\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
history[m[1].to_i][:drcr] = m[2]
}
res.body.scan(/fldAmount\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
history[m[1].to_i][:amount] = m[2].gsub(/[,\.]/,'').to_i
if history[m[1].to_i][:drcr] == 'D'
history[m[1].to_i][:out] = m[2].gsub(/[,\.]/,'').to_i
else
history[m[1].to_i][:in] = m[2].gsub(/[,\.]/,'').to_i
end
}
res.body.scan(/fldRunningBalanceRaw\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
history[m[1].to_i][:balance] = m[2].gsub(/,/,'').to_i
}
@account_status = {:total=>history[0][:amount], :id=>id}
history[1..-1]
end
##
# transfer to registered account
#
# @param [string] name = target 7digit account num. TODO:口座番号被る可能性について考える
# @param [int] amount < 2000000 ?
def transfer_to_registered_account name, amount
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'RT',
'fldTxnID'=>'ZNT',
'fldScrSeqNo'=>'00',
'fldRequestorID'=>'71',
'fldSessionID'=> @ssid,
}
#p postdata
res = @client.post(@url, postdata)
registered_account = []
res.body.scan(/fldListPayeeAcctId\[(\d+)\]="([^"]+)"/).each{|m|
registered_account[m[0].to_i] = {:account_id=>m[1]}
}
res.body.scan(/fldListPayeeAcctType\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
registered_account[m[1].to_i][:account_type] = m[2]
}
res.body.scan(/fldListPayeeName\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
registered_account[m[1].to_i][:name] = m[2]
}
res.body.scan(/fldListPayeeBank\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
registered_account[m[1].to_i][:bank] = m[2]
}
res.body.scan(/fldListPayeeBankKanji\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
registered_account[m[1].to_i][:bank_kanji] = m[2]
}
res.body.scan(/fldListPayeeBankKana\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
registered_account[m[1].to_i][:bank_kana] = m[2]
}
res.body.scan(/fldListPayeeBranch\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
registered_account[m[1].to_i][:branch] = m[2]
}
res.body.scan(/fldListPayeeBranchKanji\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
registered_account[m[1].to_i][:branch_kanji] = m[2]
}
res.body.scan(/fldListPayeeBranchKana\[(\d+)\]="([^"]+)"/) { m = Regexp.last_match
registered_account[m[1].to_i][:branch_kana] = m[2]
}
#p registered_account
values= {}
['fldRemitterName', 'fldInvoice', 'fldInvoicePosition','fldDomFTLimit', 'fldRemReimburse'].each{|k|
if res.body =~/#{k}=['"]([^'"]*)['"]/
values[k] = $1
end
}
target_account = registered_account.find{|a| a[:account_id] == name };
from_name = values['fldRemitterName']
account = @accounts.keys[0] # とりあえず普通円預金っぽいやつ
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'RT',
'fldTxnID'=>'ZNT',
'fldScrSeqNo'=>'07',
'fldRequestorID'=>'74',
'fldSessionID'=> @ssid,
'fldAcctId' => account,
'fldAcctType' => @accounts[account][:type] ,
'fldAcctDesc'=> @accounts[account][:desc],
'fldMemo'=> from_name,
#'fldRemitterName'=> '',
#'fldInvoice'=>'',
#'fldInvoicePosition'=>'B',
'fldTransferAmount' => amount,
'fldTransferType'=>'P', # P(registerd) or D
#'fldPayeeId'=>'',
'fldPayeeName' => target_account[:name],
'fldPayeeAcctId' => target_account[:account_id],
'fldPayeeAcctType' => target_account[:account_type],
#fldPayeeBankCode:undefined
'fldPayeeBankName' => target_account[:bank],
'fldPayeeBankNameKana' => target_account[:bank_kana],
'fldPayeeBankNameKanji' => target_account[:bank_kanji],
#fldPayeeBranchCode:undefined
'fldPayeeBranchName' => target_account[:branch],
'fldPayeeBranchNameKana' => target_account[:branch_kana],
'fldPayeeBranchNameKanji' => target_account[:branch_kanji],
#fldSearchBankName:
#fldSearchBranchName:
#fldFlagRegister:
#'fldDomFTLimit'=>'4000000',
#'fldRemReimburse'=>4,
}.merge(values)
res = @client.post(@url, postdata)
values= {}
['fldMemo', 'fldInvoicePosition', 'fldTransferType', 'fldTransferDate', 'fldTransferFeeUnformatted',
'fldDebitAmountUnformatted', 'fldReimbursedAmt', 'fldRemReimburse'].each{|k|
if res.body =~/#{k}=['"]([^'"]*)['"]/
values[k] = $1
end
}
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'RT',
'fldTxnID'=>'ZNT',
'fldScrSeqNo'=>'08',
'fldRequestorID'=>'76',
'fldSessionID'=> @ssid,
'fldAcctId' => @accounts.keys[0],
'fldAcctType' => @accounts[ @accounts.keys[0] ][:type] ,
'fldAcctDesc'=> @accounts[ @accounts.keys[0] ][:desc],
#'fldMemo'=> from_name,
'fldRemitterName'=> target_account[:name],
#'fldInvoice'=>'',
#'fldInvoicePosition'=>'B',
'fldTransferAmount' => amount,
'fldTransferType'=>'P', # P(registerd) or D
#'fldTransferDate' => transfar_date,
#'fldPayeeId'=>'',
'fldPayeeName' => target_account[:name],
'fldPayeeAcctId' => target_account[:account_id],
'fldPayeeAcctType' => target_account[:account_type],
#fldPayeeBankCode:undefined
'fldPayeeBankName' => target_account[:bank],
'fldPayeeBankNameKana' => target_account[:bank_kana],
'fldPayeeBankNameKanji' => target_account[:bank_kanji],
#fldPayeeBranchCode:undefined
'fldPayeeBranchName' => target_account[:branch],
'fldPayeeBranchNameKana' => target_account[:branch_kana],
'fldPayeeBranchNameKanji' => target_account[:branch_kanji],
#fldSearchBankName:
#fldSearchBranchName:
#fldFlagRegister:
#'fldDomFTLimit'=>'4000000',
}.merge(values)
#p postdata
res = @client.post(@url, postdata)
@last_html = res.body
end
##
# 投資信託買う(未実装)
#
# @param [Hash] fund 投資信託情報
# @param [int] amount yen
def buy_fund fund, amount
acc = @accounts.values.find{|a| a[:curr] == fund[:curr]}
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'IS',
'fldTxnID'=>'BMF',
'fldScrSeqNo'=>'02',
'fldRequestorID'=>'4',
'fldSessionID'=> @ssid,
'fldPayMode'=> 'BANKXFER',
'fldMFID'=> fund[:fid],
'fldBuyType'=> 'AMOUNT',
'fldBuyUnits'=> amount,
'fldTxnCurr'=> acc[:curr],
'fldAcctID'=> acc[:id],
'fldAcctType'=> acc[:type],
'fldAcctCurr'=> acc[:curr],
'fldBankID'=> '397', # shinsei-bank
'fldBranchID'=> acc[:id][0..2],
'fldUHID'=> fund[:id],
'fldAcctBalance'=> acc[:balance],
'fldLOIApplicable'=> '0',
'fldCertReqd'=> '0',
'fldSingleCert'=> '0',
'fldGrossOrNet'=> 'GROSS',
'fldUserOverride'=> 'Y',
'fldTkEnabled'=> '0',
'fldMfTk'=> '1',
'fldTkApplicable'=>'0',
}
p postdata
res = @client.post(@url, postdata)
@last_html = res.body
end
##
# 投資信託売る
#
# @param [Hash] fund 投資信託情報( funds()で得たもののいずれか )
# @param [Int] amount:口数
def sell_fund fund, amount
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'IS',
'fldTxnID'=>'SMF',
'fldScrSeqNo'=>'01',
'fldRequestorID'=>'15',
'fldSessionID'=> @ssid,
'fldDefFundID'=>fund[:fid],
'fldCDCCode'=>'',
'fldUHID'=>fund[:id],
'fldTkApplicable'=>'0',
}
res = @client.post(@url, postdata)
acc= {}
['fldBankIDArray', 'fldBranchIDArray', 'fldAcctIDArray', 'fldAcctTypeArray', 'fldAcctCurrArray',
'fldDebitAmountUnformatted', 'fldReimbursedAmt', 'fldRemReimburse'].each{|k|
if res.body =~/#{k}\[0\]\[0\]=['"]([^'"]*)['"]/
acc[k] = $1
end
}
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'IS',
'fldTxnID'=>'SMF',
'fldScrSeqNo'=>'02',
'fldRequestorID'=>'16',
'fldSessionID'=> @ssid,
'fldMFID'=>fund[:fid],
'fldRdmMode'=>'BANKXFER',
'fldAcctID'=> acc['fldAcctIDArray'],
'fldAcctType'=>acc['fldAcctTypeArray'],
'fldAcctCurr'=>acc['fldAcctCurrArray'],
'fldBankID'=>acc['fldBankIDArray'],
'fldBranchID'=>acc['fldBranchIDArray'],
'fldUHID'=>fund[:id],
'fldTxnCurr'=> acc['fldAcctCurrArray'],
'fldSellType'=>'UNITS',
'fldSellUnits'=>amount,
'fldGrossOrNet'=>'GROSS',
'fldTkApplicable'=> '0',
}
#p postdata
res = @client.post(@url, postdata)
values= {}
['fldEODRunning', 'fldTkApplicable', 'fldAllocationDate', 'fldPaymentDate', 'fldConfirmationDate', 'fldTransactionDate', 'fldFCISDPRefNo'].each{|k|
if res.body =~/#{k}=['"]([^'"]*)['"]/
values[k] = $1
end
}
postdata = {
'MfcISAPICommand'=>'EntryFunc',
'fldAppID'=>'IS',
'fldTxnID'=>'SMF',
'fldScrSeqNo'=>'03',
'fldRequestorID'=>'17',
'fldSessionID'=> @ssid,
'fldDefFundID'=>fund[:fid],
'fldDefSellType'=>'UNITS',
'fldDefSellUnits'=>amount,
'fldDefTxnCurr'=> acc['fldAcctCurrArray'],
'fldDefRdmMode'=>'BANKXFER',
'fldDefAcctID'=> acc['fldAcctIDArray'],
'fldDefAcctType'=>acc['fldAcctTypeArray'],
'fldDefBankID'=>acc['fldBankIDArray'],
'fldDefBranchID'=>acc['fldBranchIDArray'],
'fldDefAcctCurr'=>acc['fldAcctCurrArray'],
'fldUHID'=>fund[:id],
'fldGrossOrNet'=>'GROSS',
'fldEODRunning'=> values['fldEODRunning'],
'fldUserOverride'=>'Y',
'fldFCISDPRefNo'=> values['fldFCISDPRefNo'],
'fldTransactionDate'=> values['fldTransactionDate'].gsub('/',''),
'fldAllocationDate'=> values['fldAllocationDate'].gsub('/',''),
'fldConfirmationDate'=> values['fldConfirmationDate'].gsub('/',''),
'fldPaymentDate'=> values['fldPaymentDate'].gsub('/',''),
'fldPreCalcFlag'=>'Y',
'fldTkApplicable'=> values['fldTkApplicable'],
}
#p postdata
res = @client.post(@url, postdata)
@last_html = res.body
end
private
def getgrid account, cell
x = cell[0].tr('A-J', '0-9').to_i
y = cell[1].to_i
account['GRID'][y][x]
end
end
|
Add Accessors module with attr_accessor_with_history method
module Accessors
def self.included(base)
base.extend ClassMethods
end
module ClassMethods
def attr_accessor_with_history(*args)
args.each do |arg|
var_name = "@#{arg}".to_sym
define_method(arg) { instance_variable_get(var_name) }
define_method("#{arg}=".to_sym) do |value|
instance_variable_set(var_name, value)
@var_history ||= {}
@var_history[var_name] ||= []
@var_history[var_name] << value
end
define_method("#{arg}_history") { @var_history[var_name] }
end
end
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{bulldog}
s.version = "0.0.11"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["George Ogata"]
s.date = %q{2009-12-03}
s.description = %q{= Bulldog
Flexible file attachments for active record.
}
s.email = %q{george.ogata@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".gitignore",
"CHANGELOG",
"DESCRIPTION.txt",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"bulldog.gemspec",
"lib/bulldog.rb",
"lib/bulldog/attachment.rb",
"lib/bulldog/attachment/base.rb",
"lib/bulldog/attachment/has_dimensions.rb",
"lib/bulldog/attachment/image.rb",
"lib/bulldog/attachment/maybe.rb",
"lib/bulldog/attachment/none.rb",
"lib/bulldog/attachment/pdf.rb",
"lib/bulldog/attachment/unknown.rb",
"lib/bulldog/attachment/video.rb",
"lib/bulldog/error.rb",
"lib/bulldog/has_attachment.rb",
"lib/bulldog/interpolation.rb",
"lib/bulldog/missing_file.rb",
"lib/bulldog/processor.rb",
"lib/bulldog/processor/argument_tree.rb",
"lib/bulldog/processor/base.rb",
"lib/bulldog/processor/ffmpeg.rb",
"lib/bulldog/processor/image_magick.rb",
"lib/bulldog/processor/one_shot.rb",
"lib/bulldog/reflection.rb",
"lib/bulldog/saved_file.rb",
"lib/bulldog/stream.rb",
"lib/bulldog/style.rb",
"lib/bulldog/style_set.rb",
"lib/bulldog/tempfile.rb",
"lib/bulldog/util.rb",
"lib/bulldog/validations.rb",
"lib/bulldog/vector2.rb",
"rails/init.rb",
"rails/rails.rb",
"script/console",
"spec/data/empty.txt",
"spec/data/test.jpg",
"spec/data/test.mov",
"spec/data/test.ogg",
"spec/data/test.pdf",
"spec/data/test.png",
"spec/data/test2.jpg",
"spec/helpers/image_creation.rb",
"spec/helpers/temporary_directory.rb",
"spec/helpers/temporary_models.rb",
"spec/helpers/temporary_values.rb",
"spec/helpers/test_upload_files.rb",
"spec/helpers/time_travel.rb",
"spec/integration/data/test.jpg",
"spec/integration/lifecycle_hooks_spec.rb",
"spec/integration/processing_image_attachments.rb",
"spec/integration/processing_video_attachments_spec.rb",
"spec/integration/saving_an_attachment_spec.rb",
"spec/matchers/file_operations.rb",
"spec/spec_helper.rb",
"spec/unit/attachment/base_spec.rb",
"spec/unit/attachment/image_spec.rb",
"spec/unit/attachment/maybe_spec.rb",
"spec/unit/attachment/pdf_spec.rb",
"spec/unit/attachment/video_spec.rb",
"spec/unit/attachment_spec.rb",
"spec/unit/has_attachment_spec.rb",
"spec/unit/interpolation_spec.rb",
"spec/unit/processor/argument_tree_spec.rb",
"spec/unit/processor/ffmpeg_spec.rb",
"spec/unit/processor/image_magick_spec.rb",
"spec/unit/processor/one_shot_spec.rb",
"spec/unit/rails_spec.rb",
"spec/unit/reflection_spec.rb",
"spec/unit/stream_spec.rb",
"spec/unit/style_set_spec.rb",
"spec/unit/style_spec.rb",
"spec/unit/validations_spec.rb",
"spec/unit/vector2_spec.rb",
"tasks/bulldog_tasks.rake"
]
s.homepage = %q{http://github.com/oggy/bulldog}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.5}
s.summary = %q{A heavy-duty paperclip. File attachments for ActiveRecord.}
s.test_files = [
"spec/helpers/image_creation.rb",
"spec/helpers/temporary_directory.rb",
"spec/helpers/temporary_models.rb",
"spec/helpers/temporary_values.rb",
"spec/helpers/test_upload_files.rb",
"spec/helpers/time_travel.rb",
"spec/integration/lifecycle_hooks_spec.rb",
"spec/integration/processing_image_attachments.rb",
"spec/integration/processing_video_attachments_spec.rb",
"spec/integration/saving_an_attachment_spec.rb",
"spec/matchers/file_operations.rb",
"spec/spec_helper.rb",
"spec/unit/attachment/base_spec.rb",
"spec/unit/attachment/image_spec.rb",
"spec/unit/attachment/maybe_spec.rb",
"spec/unit/attachment/pdf_spec.rb",
"spec/unit/attachment/video_spec.rb",
"spec/unit/attachment_spec.rb",
"spec/unit/has_attachment_spec.rb",
"spec/unit/interpolation_spec.rb",
"spec/unit/processor/argument_tree_spec.rb",
"spec/unit/processor/ffmpeg_spec.rb",
"spec/unit/processor/image_magick_spec.rb",
"spec/unit/processor/one_shot_spec.rb",
"spec/unit/rails_spec.rb",
"spec/unit/reflection_spec.rb",
"spec/unit/stream_spec.rb",
"spec/unit/style_set_spec.rb",
"spec/unit/style_spec.rb",
"spec/unit/validations_spec.rb",
"spec/unit/vector2_spec.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, [">= 0"])
s.add_development_dependency(%q<rspec_outlines>, [">= 0"])
s.add_development_dependency(%q<mocha>, [">= 0"])
else
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rspec_outlines>, [">= 0"])
s.add_dependency(%q<mocha>, [">= 0"])
end
else
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rspec_outlines>, [">= 0"])
s.add_dependency(%q<mocha>, [">= 0"])
end
end
Update gemspec.
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{bulldog}
s.version = "0.0.12"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["George Ogata"]
s.date = %q{2009-12-11}
s.description = %q{= Bulldog
Flexible file attachments for active record.
}
s.email = %q{george.ogata@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".gitignore",
"CHANGELOG",
"DESCRIPTION.txt",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"bulldog.gemspec",
"lib/bulldog.rb",
"lib/bulldog/attachment.rb",
"lib/bulldog/attachment/base.rb",
"lib/bulldog/attachment/has_dimensions.rb",
"lib/bulldog/attachment/image.rb",
"lib/bulldog/attachment/maybe.rb",
"lib/bulldog/attachment/none.rb",
"lib/bulldog/attachment/pdf.rb",
"lib/bulldog/attachment/unknown.rb",
"lib/bulldog/attachment/video.rb",
"lib/bulldog/error.rb",
"lib/bulldog/has_attachment.rb",
"lib/bulldog/interpolation.rb",
"lib/bulldog/missing_file.rb",
"lib/bulldog/processor.rb",
"lib/bulldog/processor/argument_tree.rb",
"lib/bulldog/processor/base.rb",
"lib/bulldog/processor/ffmpeg.rb",
"lib/bulldog/processor/image_magick.rb",
"lib/bulldog/processor/one_shot.rb",
"lib/bulldog/reflection.rb",
"lib/bulldog/saved_file.rb",
"lib/bulldog/stream.rb",
"lib/bulldog/style.rb",
"lib/bulldog/style_set.rb",
"lib/bulldog/tempfile.rb",
"lib/bulldog/util.rb",
"lib/bulldog/validations.rb",
"lib/bulldog/vector2.rb",
"rails/init.rb",
"rails/rails.rb",
"script/console",
"spec/data/empty.txt",
"spec/data/test.jpg",
"spec/data/test.mov",
"spec/data/test.ogg",
"spec/data/test.pdf",
"spec/data/test.png",
"spec/data/test2.jpg",
"spec/helpers/image_creation.rb",
"spec/helpers/temporary_directory.rb",
"spec/helpers/temporary_models.rb",
"spec/helpers/temporary_values.rb",
"spec/helpers/test_upload_files.rb",
"spec/helpers/time_travel.rb",
"spec/integration/data/test.jpg",
"spec/integration/lifecycle_hooks_spec.rb",
"spec/integration/processing_image_attachments.rb",
"spec/integration/processing_video_attachments_spec.rb",
"spec/integration/saving_an_attachment_spec.rb",
"spec/matchers/file_operations.rb",
"spec/spec_helper.rb",
"spec/unit/attachment/base_spec.rb",
"spec/unit/attachment/image_spec.rb",
"spec/unit/attachment/maybe_spec.rb",
"spec/unit/attachment/pdf_spec.rb",
"spec/unit/attachment/video_spec.rb",
"spec/unit/attachment_spec.rb",
"spec/unit/has_attachment_spec.rb",
"spec/unit/interpolation_spec.rb",
"spec/unit/processor/argument_tree_spec.rb",
"spec/unit/processor/ffmpeg_spec.rb",
"spec/unit/processor/image_magick_spec.rb",
"spec/unit/processor/one_shot_spec.rb",
"spec/unit/rails_spec.rb",
"spec/unit/reflection_spec.rb",
"spec/unit/stream_spec.rb",
"spec/unit/style_set_spec.rb",
"spec/unit/style_spec.rb",
"spec/unit/validations_spec.rb",
"spec/unit/vector2_spec.rb",
"tasks/bulldog_tasks.rake"
]
s.homepage = %q{http://github.com/oggy/bulldog}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.5}
s.summary = %q{A heavy-duty paperclip. File attachments for ActiveRecord.}
s.test_files = [
"spec/helpers/image_creation.rb",
"spec/helpers/temporary_directory.rb",
"spec/helpers/temporary_models.rb",
"spec/helpers/temporary_values.rb",
"spec/helpers/test_upload_files.rb",
"spec/helpers/time_travel.rb",
"spec/integration/lifecycle_hooks_spec.rb",
"spec/integration/processing_image_attachments.rb",
"spec/integration/processing_video_attachments_spec.rb",
"spec/integration/saving_an_attachment_spec.rb",
"spec/matchers/file_operations.rb",
"spec/spec_helper.rb",
"spec/unit/attachment/base_spec.rb",
"spec/unit/attachment/image_spec.rb",
"spec/unit/attachment/maybe_spec.rb",
"spec/unit/attachment/pdf_spec.rb",
"spec/unit/attachment/video_spec.rb",
"spec/unit/attachment_spec.rb",
"spec/unit/has_attachment_spec.rb",
"spec/unit/interpolation_spec.rb",
"spec/unit/processor/argument_tree_spec.rb",
"spec/unit/processor/ffmpeg_spec.rb",
"spec/unit/processor/image_magick_spec.rb",
"spec/unit/processor/one_shot_spec.rb",
"spec/unit/rails_spec.rb",
"spec/unit/reflection_spec.rb",
"spec/unit/stream_spec.rb",
"spec/unit/style_set_spec.rb",
"spec/unit/style_spec.rb",
"spec/unit/validations_spec.rb",
"spec/unit/vector2_spec.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, [">= 0"])
s.add_development_dependency(%q<rspec_outlines>, [">= 0"])
s.add_development_dependency(%q<mocha>, [">= 0"])
else
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rspec_outlines>, [">= 0"])
s.add_dependency(%q<mocha>, [">= 0"])
end
else
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rspec_outlines>, [">= 0"])
s.add_dependency(%q<mocha>, [">= 0"])
end
end
|
Pod::Spec.new do |s|
s.name = "FeedMediaSamplePlayer"
s.version = "1.0"
s.summary = "Feed Media Sample Player"
s.description = <<-DESC
Feed Media Sample Player for iOS
Resources
=========
For more information, please contact `support@fuzz.com` or check out our Github repo at [https://github.com/fuzz-radio/iOS-SDK][2].
[1]: http://feed.fm/documentation
[2]: https://github.com/fuzz-radio/iOS-SDK
[3]: http://feed.fm/dashboard
[4]: http://feed.fm/
DESC
s.homepage = "https://github.com/fuzz-radio/iOS-SDK"
s.author = { "FUZZ ftw!" => "eric@fuzz.com" }
s.source = { :git => "https://github.com/GrioSF/feedmedia-ios-player.git", :commit => "9c622e84fc5f912850b3c9f5ff211abd200a562d" }
s.source_files = "PlayerInterfaceLibrary/Player Interface", "PlayerInterfaceLibrary/Player Interface/**/*.{h,m}"
s.license = { :type => "MIT", :file => "LICENSE" }
s.dependency 'FeedMediaSdk', :git => "https://github.com/fuzz-radio/iOS-SDK.git" , :tag => '1.0.0'
s.platform = :ios, "7.0"
s.public_header_files = "PlayerInterfaceLibrary/Player Interface/**/*.h"
s.requires_arc = true
end
Change dependency
Pod::Spec.new do |s|
s.name = "FeedMediaSamplePlayer"
s.version = "1.0"
s.summary = "Feed Media Sample Player"
s.description = <<-DESC
Feed Media Sample Player for iOS
Resources
=========
For more information, please contact `support@fuzz.com` or check out our Github repo at [https://github.com/fuzz-radio/iOS-SDK][2].
[1]: http://feed.fm/documentation
[2]: https://github.com/fuzz-radio/iOS-SDK
[3]: http://feed.fm/dashboard
[4]: http://feed.fm/
DESC
s.homepage = "https://github.com/fuzz-radio/iOS-SDK"
s.author = { "FUZZ ftw!" => "eric@fuzz.com" }
s.source = { :git => "https://github.com/GrioSF/feedmedia-ios-player.git", :commit => "9c622e84fc5f912850b3c9f5ff211abd200a562d" }
s.source_files = "PlayerInterfaceLibrary/Player Interface", "PlayerInterfaceLibrary/Player Interface/**/*.{h,m}"
s.license = { :type => "MIT", :file => "LICENSE" }
s.dependency 'FeedMediaSdk', '~> 1.0.0'
s.platform = :ios, "7.0"
s.public_header_files = "PlayerInterfaceLibrary/Player Interface/**/*.h"
s.requires_arc = true
end
|
class Antlr4CppRuntime < Formula
desc "ANother Tool for Language Recognition C++ Runtime Library"
homepage "https://www.antlr.org/"
url "https://www.antlr.org/download/antlr4-cpp-runtime-4.9.2-source.zip"
sha256 "838a2c804573f927c044e5f45a8feb297683a7047ab62dfac8ddc995498db11c"
license "BSD-3-Clause"
livecheck do
url "https://www.antlr.org/download.html"
regex(/href=.*?antlr4-cpp-runtime[._-]v?(\d+(?:\.\d+)+)-source\.zip/i)
end
bottle do
sha256 cellar: :any, arm64_big_sur: "2675f194768a527b27fb7423a2cd13d449d7102f87a84648dc3dee4c9a5a2ef1"
sha256 cellar: :any, big_sur: "1a0ecd0f7f72c1ec539b5e827d4249d163a648678fe236697a78a4acb05e3766"
sha256 cellar: :any, catalina: "8d4d96b21b91529016470d651680f6c90f02854e7b0fa1569570c9c830da0c6b"
sha256 cellar: :any, mojave: "e9c6ac2f0d41c5e4c69894e6c4fdfb079693eaa0f297a013a66487339439c164"
end
depends_on "cmake" => :build
on_linux do
depends_on "pkg-config" => :build
depends_on "util-linux"
end
def install
system "cmake", ".", "-DANTLR4_INSTALL=ON", *std_cmake_args
system "cmake", "--build", ".", "--target", "install"
end
test do
(testpath/"test.cc").write <<~EOS
#include <antlr4-runtime.h>
int main(int argc, const char* argv[]) {
try {
throw antlr4::ParseCancellationException() ;
} catch (antlr4::ParseCancellationException &exception) {
/* ignore */
}
return 0 ;
}
EOS
system ENV.cxx, "-std=c++11", "-I#{include}/antlr4-runtime", "test.cc",
"-L#{lib}", "-lantlr4-runtime", "-o", "test"
system "./test"
end
end
antlr4-cpp-runtime: update 4.9.2 bottle.
class Antlr4CppRuntime < Formula
desc "ANother Tool for Language Recognition C++ Runtime Library"
homepage "https://www.antlr.org/"
url "https://www.antlr.org/download/antlr4-cpp-runtime-4.9.2-source.zip"
sha256 "838a2c804573f927c044e5f45a8feb297683a7047ab62dfac8ddc995498db11c"
license "BSD-3-Clause"
livecheck do
url "https://www.antlr.org/download.html"
regex(/href=.*?antlr4-cpp-runtime[._-]v?(\d+(?:\.\d+)+)-source\.zip/i)
end
bottle do
sha256 cellar: :any, arm64_big_sur: "2675f194768a527b27fb7423a2cd13d449d7102f87a84648dc3dee4c9a5a2ef1"
sha256 cellar: :any, big_sur: "1a0ecd0f7f72c1ec539b5e827d4249d163a648678fe236697a78a4acb05e3766"
sha256 cellar: :any, catalina: "8d4d96b21b91529016470d651680f6c90f02854e7b0fa1569570c9c830da0c6b"
sha256 cellar: :any, mojave: "e9c6ac2f0d41c5e4c69894e6c4fdfb079693eaa0f297a013a66487339439c164"
sha256 cellar: :any_skip_relocation, x86_64_linux: "f3c5b79c485d2af18036f370291f4ca452f398ad85556ff1260d91b5eadaa0a8"
end
depends_on "cmake" => :build
on_linux do
depends_on "pkg-config" => :build
depends_on "util-linux"
end
def install
system "cmake", ".", "-DANTLR4_INSTALL=ON", *std_cmake_args
system "cmake", "--build", ".", "--target", "install"
end
test do
(testpath/"test.cc").write <<~EOS
#include <antlr4-runtime.h>
int main(int argc, const char* argv[]) {
try {
throw antlr4::ParseCancellationException() ;
} catch (antlr4::ParseCancellationException &exception) {
/* ignore */
}
return 0 ;
}
EOS
system ENV.cxx, "-std=c++11", "-I#{include}/antlr4-runtime", "test.cc",
"-L#{lib}", "-lantlr4-runtime", "-o", "test"
system "./test"
end
end
|
gtk-murrine-engine: New formula
Added gtk-murrine-engine formula: a GTK2 engine that features a glassy
looks. This formula is written based on gtk2-murrine MacPorts Portfile (https://trac.macports.org/browser/trunk/dports/gnome/gtk2-murrine/Portfile).
Closes Homebrew/homebrew#24289.
Signed-off-by: Adam Vandenberg <34c2b6407fd5a10249a15d699d40f9ed1782e98c@gmail.com>
require 'formula'
class GtkMurrineEngine < Formula
homepage 'https://github.com/GNOME/murrine'
url 'http://ftp.gnome.org/pub/GNOME/sources/murrine/0.98/murrine-0.98.2.tar.xz'
sha1 'ddaca56b6e10736838572014ae9d20b814242615'
depends_on 'intltool' => :build
depends_on 'pkg-config' => :build
depends_on 'gtk+'
depends_on 'gettext'
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--enable-animation"
system "make", "install"
end
end
|
class Libhttpseverywhere < Formula
desc "Bring HTTPSEverywhere to desktop apps"
homepage "https://github.com/gnome/libhttpseverywhere"
url "https://download.gnome.org/sources/libhttpseverywhere/0.8/libhttpseverywhere-0.8.3.tar.xz"
sha256 "1c006f5633842a2b131c1cf644ab929556fc27968a60da55c00955bd4934b6ca"
license "LGPL-3.0"
revision 4
bottle do
cellar :any
sha256 "0f7bd135303104c5c99fce5d6a215b7e8ec14430fe083b7f31cf187d31318a76" => :catalina
sha256 "8f2bffc0b7677ca274a7db722d0c206094283492453dff52276a35d3c0539887" => :mojave
sha256 "423d5b3f578db02af3da3498d0970eee3a6d26add491949f59927723503385d3" => :high_sierra
end
depends_on "gobject-introspection" => :build
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "vala" => :build
depends_on "glib"
depends_on "json-glib"
depends_on "libarchive"
depends_on "libgee"
depends_on "libsoup"
# see https://gitlab.gnome.org/GNOME/libhttpseverywhere/issues/1
# remove when next version is released
patch do
url "https://gitlab.gnome.org/GNOME/libhttpseverywhere/commit/6da08ef1ade9ea267cecf14dd5cb2c3e6e5e50cb.diff"
sha256 "e5499c290c5b48b243f67763a2c710acc5bd52b90541eb8da3f8b24b516f7430"
end
def install
mkdir "build" do
system "meson", *std_meson_args, ".."
system "ninja"
system "ninja", "install"
end
end
test do
(testpath/"test.c").write <<~EOS
#include <httpseverywhere.h>
int main(int argc, char *argv[]) {
GType type = https_everywhere_context_get_type();
return 0;
}
EOS
ENV.libxml2
gettext = Formula["gettext"]
glib = Formula["glib"]
json_glib = Formula["json-glib"]
libarchive = Formula["libarchive"]
libgee = Formula["libgee"]
libsoup = Formula["libsoup"]
pcre = Formula["pcre"]
flags = (ENV.cflags || "").split + (ENV.cppflags || "").split + (ENV.ldflags || "").split
flags += %W[
-I#{gettext.opt_include}
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{include}/httpseverywhere-0.8
-I#{json_glib.opt_include}/json-glib-1.0
-I#{libarchive.opt_include}
-I#{libgee.opt_include}/gee-0.8
-I#{libsoup.opt_include}/libsoup-2.4
-I#{pcre.opt_include}
-D_REENTRANT
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{json_glib.opt_lib}
-L#{libarchive.opt_lib}
-L#{libgee.opt_lib}
-L#{libsoup.opt_lib}
-L#{lib}
-larchive
-lgee-0.8
-lgio-2.0
-lglib-2.0
-lgobject-2.0
-lhttpseverywhere-0.8
-lintl
-ljson-glib-1.0
-lsoup-2.4
-lxml2
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
libhttpseverywhere: update license
Closes #59742.
Signed-off-by: Sean Molenaar <2b250e3fea88cfef248b497ad5fc17f7dc435154@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Libhttpseverywhere < Formula
desc "Bring HTTPSEverywhere to desktop apps"
homepage "https://github.com/gnome/libhttpseverywhere"
url "https://download.gnome.org/sources/libhttpseverywhere/0.8/libhttpseverywhere-0.8.3.tar.xz"
sha256 "1c006f5633842a2b131c1cf644ab929556fc27968a60da55c00955bd4934b6ca"
license "LGPL-3.0-or-later"
revision 4
bottle do
cellar :any
sha256 "0f7bd135303104c5c99fce5d6a215b7e8ec14430fe083b7f31cf187d31318a76" => :catalina
sha256 "8f2bffc0b7677ca274a7db722d0c206094283492453dff52276a35d3c0539887" => :mojave
sha256 "423d5b3f578db02af3da3498d0970eee3a6d26add491949f59927723503385d3" => :high_sierra
end
depends_on "gobject-introspection" => :build
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "vala" => :build
depends_on "glib"
depends_on "json-glib"
depends_on "libarchive"
depends_on "libgee"
depends_on "libsoup"
# see https://gitlab.gnome.org/GNOME/libhttpseverywhere/issues/1
# remove when next version is released
patch do
url "https://gitlab.gnome.org/GNOME/libhttpseverywhere/commit/6da08ef1ade9ea267cecf14dd5cb2c3e6e5e50cb.diff"
sha256 "e5499c290c5b48b243f67763a2c710acc5bd52b90541eb8da3f8b24b516f7430"
end
def install
mkdir "build" do
system "meson", *std_meson_args, ".."
system "ninja"
system "ninja", "install"
end
end
test do
(testpath/"test.c").write <<~EOS
#include <httpseverywhere.h>
int main(int argc, char *argv[]) {
GType type = https_everywhere_context_get_type();
return 0;
}
EOS
ENV.libxml2
gettext = Formula["gettext"]
glib = Formula["glib"]
json_glib = Formula["json-glib"]
libarchive = Formula["libarchive"]
libgee = Formula["libgee"]
libsoup = Formula["libsoup"]
pcre = Formula["pcre"]
flags = (ENV.cflags || "").split + (ENV.cppflags || "").split + (ENV.ldflags || "").split
flags += %W[
-I#{gettext.opt_include}
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{include}/httpseverywhere-0.8
-I#{json_glib.opt_include}/json-glib-1.0
-I#{libarchive.opt_include}
-I#{libgee.opt_include}/gee-0.8
-I#{libsoup.opt_include}/libsoup-2.4
-I#{pcre.opt_include}
-D_REENTRANT
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{json_glib.opt_lib}
-L#{libarchive.opt_lib}
-L#{libgee.opt_lib}
-L#{libsoup.opt_lib}
-L#{lib}
-larchive
-lgee-0.8
-lgio-2.0
-lglib-2.0
-lgobject-2.0
-lhttpseverywhere-0.8
-lintl
-ljson-glib-1.0
-lsoup-2.4
-lxml2
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
|
class OpenImageDenoise < Formula
desc "High-performance denoising library for ray tracing"
homepage "https://openimagedenoise.github.io"
url "https://github.com/OpenImageDenoise/oidn/releases/download/v1.2.2/oidn-1.2.2.src.tar.gz"
sha256 "da30dfb8daa21663525124d7e44804251ba578f8c13d2024cdb92bfdda7d8121"
license "Apache-2.0"
bottle do
cellar :any
sha256 "5c36d9284a8e1d31ca9769c14650816126e43302acf5c8be3ee66d7f75f00ed7" => :catalina
sha256 "defb1bb5d21e21f0fd1487b632568cb52daa581ecd8204cdc2a31861b1cc2e1d" => :mojave
sha256 "6c04bd559262a2b15f03ca3e62291929d68184a63144035cca9877d1f0c2d931" => :high_sierra
end
depends_on "cmake" => :build
depends_on "ispc" => :build
# clang: error: unknown argument: '-fopenmp-simd'
# https://github.com/OpenImageDenoise/oidn/issues/35
depends_on :macos => :high_sierra
depends_on "tbb"
def install
mkdir "build" do
system "cmake", *std_cmake_args, ".."
system "make", "install"
end
end
test do
(testpath/"test.c").write <<~EOS
#include <OpenImageDenoise/oidn.h>
int main() {
OIDNDevice device = oidnNewDevice(OIDN_DEVICE_TYPE_DEFAULT);
oidnCommitDevice(device);
return oidnGetDeviceError(device, 0);
}
EOS
system ENV.cc, "-I#{include}", "test.c", "-L#{lib}", "-lOpenImageDenoise"
system "./a.out"
end
end
open-image-denoise: update 1.2.2 bottle.
class OpenImageDenoise < Formula
desc "High-performance denoising library for ray tracing"
homepage "https://openimagedenoise.github.io"
url "https://github.com/OpenImageDenoise/oidn/releases/download/v1.2.2/oidn-1.2.2.src.tar.gz"
sha256 "da30dfb8daa21663525124d7e44804251ba578f8c13d2024cdb92bfdda7d8121"
license "Apache-2.0"
bottle do
cellar :any
sha256 "406ce42f0c495827f99546bd757e4186c2d841805a3b9f88b3ce875fb968725f" => :catalina
sha256 "a288d6dd4ce6f0b5b80d181f00be19b83c1eae84d51dd5b6dcf35735f7b7d2ab" => :mojave
sha256 "437aba3f7b6da6bff5a81280dbe7ca5af45309992cbddee8df3f6b562b4fec48" => :high_sierra
end
depends_on "cmake" => :build
depends_on "ispc" => :build
# clang: error: unknown argument: '-fopenmp-simd'
# https://github.com/OpenImageDenoise/oidn/issues/35
depends_on :macos => :high_sierra
depends_on "tbb"
def install
mkdir "build" do
system "cmake", *std_cmake_args, ".."
system "make", "install"
end
end
test do
(testpath/"test.c").write <<~EOS
#include <OpenImageDenoise/oidn.h>
int main() {
OIDNDevice device = oidnNewDevice(OIDN_DEVICE_TYPE_DEFAULT);
oidnCommitDevice(device);
return oidnGetDeviceError(device, 0);
}
EOS
system ENV.cc, "-I#{include}", "test.c", "-L#{lib}", "-lOpenImageDenoise"
system "./a.out"
end
end
|
class PerconaXtrabackup < Formula
desc "Open source hot backup tool for InnoDB and XtraDB databases"
homepage "https://www.percona.com/software/mysql-database/percona-xtrabackup"
url "https://downloads.percona.com/downloads/Percona-XtraBackup-LATEST/Percona-XtraBackup-8.0.29-22/source/tarball/percona-xtrabackup-8.0.29-22.tar.gz"
sha256 "7c3bdfaf0b02ec4c09b3cdb41b2a7f18f79dce9c5d396ada36fbc2557562ff55"
revision 1
livecheck do
url "https://www.percona.com/downloads/Percona-XtraBackup-LATEST/"
regex(/value=.*?Percona-XtraBackup[._-]v?(\d+(?:\.\d+)+-\d+)["' >]/i)
end
bottle do
rebuild 1
sha256 arm64_monterey: "458c3e1c42fb886b761b488c96b16cf2b6fabf9c15f9757be2bb1252a097b3fe"
sha256 arm64_big_sur: "6334b08778ac9f84cdc5bae40a3e79a3c5847bc5d4ad940c55b0d577792609e2"
sha256 monterey: "50e0cea6a56236121455148b9409a96321e6bfda33ccbd544b69f76d5c95a42e"
sha256 big_sur: "dfb6c59a87caac4de10d941443761d0cf11a8ce293339f48dff126a0e10ec82a"
sha256 catalina: "ed9c776f742ab91702841843d76f357a217483ff2baa1806be817efa2bf18522"
sha256 x86_64_linux: "a48195f5f43f4c86e45e13dda822de73917e791f8bc654d05fe0d396a6d4acf5"
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "sphinx-doc" => :build
depends_on "icu4c"
depends_on "libev"
depends_on "libevent"
depends_on "libfido2"
depends_on "libgcrypt"
depends_on "lz4"
depends_on "mysql-client"
depends_on "openssl@1.1"
depends_on "protobuf"
depends_on "zstd"
uses_from_macos "vim" => :build # needed for xxd
uses_from_macos "curl"
uses_from_macos "cyrus-sasl"
uses_from_macos "libedit"
uses_from_macos "perl"
uses_from_macos "zlib"
on_linux do
depends_on "patchelf" => :build
depends_on "libaio"
end
conflicts_with "percona-server", because: "both install a `kmip.h`"
fails_with :gcc do
version "6"
cause "The build requires GCC 7.1 or later."
end
# Should be installed before DBD::mysql
resource "Devel::CheckLib" do
url "https://cpan.metacpan.org/authors/id/M/MA/MATTN/Devel-CheckLib-1.14.tar.gz"
sha256 "f21c5e299ad3ce0fdc0cb0f41378dca85a70e8d6c9a7599f0e56a957200ec294"
end
# This is not part of the system Perl on Linux and on macOS since Mojave
resource "DBI" do
url "https://cpan.metacpan.org/authors/id/T/TI/TIMB/DBI-1.643.tar.gz"
sha256 "8a2b993db560a2c373c174ee976a51027dd780ec766ae17620c20393d2e836fa"
end
resource "DBD::mysql" do
url "https://cpan.metacpan.org/authors/id/D/DV/DVEEDEN/DBD-mysql-4.050.tar.gz"
sha256 "4f48541ff15a0a7405f76adc10f81627c33996fbf56c95c26c094444c0928d78"
end
# https://github.com/percona/percona-xtrabackup/blob/percona-xtrabackup-#{version}/cmake/boost.cmake
resource "boost" do
url "https://boostorg.jfrog.io/artifactory/main/release/1.77.0/source/boost_1_77_0.tar.bz2"
sha256 "fc9f85fc030e233142908241af7a846e60630aa7388de9a5fafb1f3a26840854"
end
# Fix CMake install error with manpages.
# https://github.com/percona/percona-xtrabackup/pull/1266
patch do
url "https://github.com/percona/percona-xtrabackup/commit/1d733eade782dd9fdf8ef66b9e9cb9e00f572606.patch?full_index=1"
sha256 "9b38305b4e4bae23b085b3ef9cb406451fa3cc14963524e95fc1e6cbf761c7cf"
end
# Patch out check for Homebrew `boost`.
# This should not be necessary when building inside `brew`.
# https://github.com/Homebrew/homebrew-test-bot/pull/820
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/030f7433e89376ffcff836bb68b3903ab90f9cdc/percona-server/boost-check.patch"
sha256 "3223f7eebd04b471de1c21104c46b2cdec3fe7b26e13535bdcd0d7b8fd341bde"
end
def install
# Disable ABI checking
inreplace "cmake/abi_check.cmake", "RUN_ABI_CHECK 1", "RUN_ABI_CHECK 0" if OS.linux?
cmake_args = %W[
-DBUILD_CONFIG=xtrabackup_release
-DCOMPILATION_COMMENT=Homebrew
-DINSTALL_PLUGINDIR=lib/percona-xtrabackup/plugin
-DINSTALL_MANDIR=share/man
-DWITH_MAN_PAGES=ON
-DINSTALL_MYSQLTESTDIR=
-DWITH_SYSTEM_LIBS=ON
-DWITH_EDITLINE=system
-DWITH_FIDO=system
-DWITH_ICU=system
-DWITH_LIBEVENT=system
-DWITH_LZ4=system
-DWITH_PROTOBUF=system
-DWITH_SSL=system
-DOPENSSL_ROOT_DIR=#{Formula["openssl@1.1"].opt_prefix}
-DWITH_ZLIB=system
-DWITH_ZSTD=system
]
(buildpath/"boost").install resource("boost")
cmake_args << "-DWITH_BOOST=#{buildpath}/boost"
cmake_args.concat std_cmake_args
# Remove conflicting manpages
rm (Dir["man/*"] - ["man/CMakeLists.txt"])
mkdir "build" do
system "cmake", "..", *cmake_args
system "make"
system "make", "install"
end
# remove conflicting library that is already installed by mysql
rm lib/"libmysqlservices.a"
ENV.prepend_create_path "PERL5LIB", buildpath/"build_deps/lib/perl5"
resource("Devel::CheckLib").stage do
system "perl", "Makefile.PL", "INSTALL_BASE=#{buildpath}/build_deps"
system "make", "install"
end
ENV.prepend_create_path "PERL5LIB", libexec/"lib/perl5"
# This is not part of the system Perl on Linux and on macOS since Mojave
if OS.linux? || MacOS.version >= :mojave
resource("DBI").stage do
system "perl", "Makefile.PL", "INSTALL_BASE=#{libexec}"
system "make", "install"
end
end
resource("DBD::mysql").stage do
system "perl", "Makefile.PL", "INSTALL_BASE=#{libexec}"
system "make", "install"
end
bin.env_script_all_files(libexec/"bin", PERL5LIB: libexec/"lib/perl5")
end
test do
assert_match version.to_s, shell_output("#{bin}/xtrabackup --version 2>&1")
mkdir "backup"
output = shell_output("#{bin}/xtrabackup --target-dir=backup --backup 2>&1", 1)
assert_match "Failed to connect to MySQL server", output
end
end
percona-xtrabackup: update 8.0.29-22_1 bottle.
class PerconaXtrabackup < Formula
desc "Open source hot backup tool for InnoDB and XtraDB databases"
homepage "https://www.percona.com/software/mysql-database/percona-xtrabackup"
url "https://downloads.percona.com/downloads/Percona-XtraBackup-LATEST/Percona-XtraBackup-8.0.29-22/source/tarball/percona-xtrabackup-8.0.29-22.tar.gz"
sha256 "7c3bdfaf0b02ec4c09b3cdb41b2a7f18f79dce9c5d396ada36fbc2557562ff55"
revision 1
livecheck do
url "https://www.percona.com/downloads/Percona-XtraBackup-LATEST/"
regex(/value=.*?Percona-XtraBackup[._-]v?(\d+(?:\.\d+)+-\d+)["' >]/i)
end
bottle do
rebuild 1
sha256 arm64_ventura: "8b8c907a1ef919347c9e4470f555007d0d737ce0beff94e5e83562ff72f8029e"
sha256 arm64_monterey: "458c3e1c42fb886b761b488c96b16cf2b6fabf9c15f9757be2bb1252a097b3fe"
sha256 arm64_big_sur: "6334b08778ac9f84cdc5bae40a3e79a3c5847bc5d4ad940c55b0d577792609e2"
sha256 monterey: "50e0cea6a56236121455148b9409a96321e6bfda33ccbd544b69f76d5c95a42e"
sha256 big_sur: "dfb6c59a87caac4de10d941443761d0cf11a8ce293339f48dff126a0e10ec82a"
sha256 catalina: "ed9c776f742ab91702841843d76f357a217483ff2baa1806be817efa2bf18522"
sha256 x86_64_linux: "a48195f5f43f4c86e45e13dda822de73917e791f8bc654d05fe0d396a6d4acf5"
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "sphinx-doc" => :build
depends_on "icu4c"
depends_on "libev"
depends_on "libevent"
depends_on "libfido2"
depends_on "libgcrypt"
depends_on "lz4"
depends_on "mysql-client"
depends_on "openssl@1.1"
depends_on "protobuf"
depends_on "zstd"
uses_from_macos "vim" => :build # needed for xxd
uses_from_macos "curl"
uses_from_macos "cyrus-sasl"
uses_from_macos "libedit"
uses_from_macos "perl"
uses_from_macos "zlib"
on_linux do
depends_on "patchelf" => :build
depends_on "libaio"
end
conflicts_with "percona-server", because: "both install a `kmip.h`"
fails_with :gcc do
version "6"
cause "The build requires GCC 7.1 or later."
end
# Should be installed before DBD::mysql
resource "Devel::CheckLib" do
url "https://cpan.metacpan.org/authors/id/M/MA/MATTN/Devel-CheckLib-1.14.tar.gz"
sha256 "f21c5e299ad3ce0fdc0cb0f41378dca85a70e8d6c9a7599f0e56a957200ec294"
end
# This is not part of the system Perl on Linux and on macOS since Mojave
resource "DBI" do
url "https://cpan.metacpan.org/authors/id/T/TI/TIMB/DBI-1.643.tar.gz"
sha256 "8a2b993db560a2c373c174ee976a51027dd780ec766ae17620c20393d2e836fa"
end
resource "DBD::mysql" do
url "https://cpan.metacpan.org/authors/id/D/DV/DVEEDEN/DBD-mysql-4.050.tar.gz"
sha256 "4f48541ff15a0a7405f76adc10f81627c33996fbf56c95c26c094444c0928d78"
end
# https://github.com/percona/percona-xtrabackup/blob/percona-xtrabackup-#{version}/cmake/boost.cmake
resource "boost" do
url "https://boostorg.jfrog.io/artifactory/main/release/1.77.0/source/boost_1_77_0.tar.bz2"
sha256 "fc9f85fc030e233142908241af7a846e60630aa7388de9a5fafb1f3a26840854"
end
# Fix CMake install error with manpages.
# https://github.com/percona/percona-xtrabackup/pull/1266
patch do
url "https://github.com/percona/percona-xtrabackup/commit/1d733eade782dd9fdf8ef66b9e9cb9e00f572606.patch?full_index=1"
sha256 "9b38305b4e4bae23b085b3ef9cb406451fa3cc14963524e95fc1e6cbf761c7cf"
end
# Patch out check for Homebrew `boost`.
# This should not be necessary when building inside `brew`.
# https://github.com/Homebrew/homebrew-test-bot/pull/820
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/030f7433e89376ffcff836bb68b3903ab90f9cdc/percona-server/boost-check.patch"
sha256 "3223f7eebd04b471de1c21104c46b2cdec3fe7b26e13535bdcd0d7b8fd341bde"
end
def install
# Disable ABI checking
inreplace "cmake/abi_check.cmake", "RUN_ABI_CHECK 1", "RUN_ABI_CHECK 0" if OS.linux?
cmake_args = %W[
-DBUILD_CONFIG=xtrabackup_release
-DCOMPILATION_COMMENT=Homebrew
-DINSTALL_PLUGINDIR=lib/percona-xtrabackup/plugin
-DINSTALL_MANDIR=share/man
-DWITH_MAN_PAGES=ON
-DINSTALL_MYSQLTESTDIR=
-DWITH_SYSTEM_LIBS=ON
-DWITH_EDITLINE=system
-DWITH_FIDO=system
-DWITH_ICU=system
-DWITH_LIBEVENT=system
-DWITH_LZ4=system
-DWITH_PROTOBUF=system
-DWITH_SSL=system
-DOPENSSL_ROOT_DIR=#{Formula["openssl@1.1"].opt_prefix}
-DWITH_ZLIB=system
-DWITH_ZSTD=system
]
(buildpath/"boost").install resource("boost")
cmake_args << "-DWITH_BOOST=#{buildpath}/boost"
cmake_args.concat std_cmake_args
# Remove conflicting manpages
rm (Dir["man/*"] - ["man/CMakeLists.txt"])
mkdir "build" do
system "cmake", "..", *cmake_args
system "make"
system "make", "install"
end
# remove conflicting library that is already installed by mysql
rm lib/"libmysqlservices.a"
ENV.prepend_create_path "PERL5LIB", buildpath/"build_deps/lib/perl5"
resource("Devel::CheckLib").stage do
system "perl", "Makefile.PL", "INSTALL_BASE=#{buildpath}/build_deps"
system "make", "install"
end
ENV.prepend_create_path "PERL5LIB", libexec/"lib/perl5"
# This is not part of the system Perl on Linux and on macOS since Mojave
if OS.linux? || MacOS.version >= :mojave
resource("DBI").stage do
system "perl", "Makefile.PL", "INSTALL_BASE=#{libexec}"
system "make", "install"
end
end
resource("DBD::mysql").stage do
system "perl", "Makefile.PL", "INSTALL_BASE=#{libexec}"
system "make", "install"
end
bin.env_script_all_files(libexec/"bin", PERL5LIB: libexec/"lib/perl5")
end
test do
assert_match version.to_s, shell_output("#{bin}/xtrabackup --version 2>&1")
mkdir "backup"
output = shell_output("#{bin}/xtrabackup --target-dir=backup --backup 2>&1", 1)
assert_match "Failed to connect to MySQL server", output
end
end
|
class VagrantCompletion < Formula
desc "Bash completion for Vagrant"
homepage "https://github.com/hashicorp/vagrant"
url "https://github.com/hashicorp/vagrant/archive/v2.2.19.tar.gz"
sha256 "4f0e6b1d466e26dead682c4d4843e8f64a012eba4be91506ae6c6d34d3d9c8f9"
license "MIT"
head "https://github.com/hashicorp/vagrant.git", branch: "main"
bottle do
sha256 cellar: :any_skip_relocation, all: "2d2e1d003d04ede074ccc7f12d497f378b5d32fe44e0163422d6cf8a0a8581e0"
end
def install
bash_completion.install "contrib/bash/completion.sh" => "vagrant"
zsh_completion.install "contrib/zsh/_vagrant"
end
test do
assert_match "-F _vagrant",
shell_output("/bin/bash -c 'source #{bash_completion}/vagrant && complete -p vagrant'")
end
end
vagrant-completion: update test for linux build
Signed-off-by: Rui Chen <907c7afd57be493757f13ccd1dd45dddf02db069@chenrui.dev>
class VagrantCompletion < Formula
desc "Bash completion for Vagrant"
homepage "https://github.com/hashicorp/vagrant"
url "https://github.com/hashicorp/vagrant/archive/v2.2.19.tar.gz"
sha256 "4f0e6b1d466e26dead682c4d4843e8f64a012eba4be91506ae6c6d34d3d9c8f9"
license "MIT"
head "https://github.com/hashicorp/vagrant.git", branch: "main"
bottle do
sha256 cellar: :any_skip_relocation, all: "2d2e1d003d04ede074ccc7f12d497f378b5d32fe44e0163422d6cf8a0a8581e0"
end
def install
bash_completion.install "contrib/bash/completion.sh" => "vagrant"
zsh_completion.install "contrib/zsh/_vagrant"
end
test do
assert_match "-F _vagrant",
shell_output("bash -c 'source #{bash_completion}/vagrant && complete -p vagrant'")
end
end
|
class CalculixCcx < Formula
desc "Three-Dimensional Finite Element Solver"
homepage "http://www.calculix.de/"
url "http://www.dhondt.de/ccx_2.10.src.tar.bz2"
version "2.10"
sha256 "693497d19d8dd2a5376e64e038d5c248d87f0e2df46d409a83bf976596b319f5"
bottle do
cellar :any
sha256 "14234c148db24b572e196bb9fc0cd5dfc0ca6b45aa447f4fec8b008ce81870e4" => :el_capitan
sha256 "2daea483bcf55bee8893686fadd3b5028aeccaf9708c4f5feeefa4c8d46321b0" => :yosemite
sha256 "37463517923beb6a33083c333bff5845ec8901f5cf33220ede9523551e7fef58" => :mavericks
end
depends_on :fortran
depends_on "arpack"
depends_on "pkg-config" => :build
resource "test" do
url "http://www.dhondt.de/ccx_2.10.test.tar.bz2"
version "2.10"
sha256 "a5e00abc7f9b2a5a5f1a4f7b414617dc65cd0be9b2a66c93e20b5a25c1392a75"
end
resource "doc" do
url "http://www.dhondt.de/ccx_2.10.htm.tar.bz2"
version "2.10"
sha256 "28f09511d791016dadb9f9cce382789fc250dfa5a60b105cfc4c9c2008e437c2"
end
resource "spooles" do
# The spooles library is not currently maintained and so would not make a
# good brew candidate. Instead it will be static linked to ccx.
url "http://www.netlib.org/linalg/spooles/spooles.2.2.tgz"
sha256 "a84559a0e987a1e423055ef4fdf3035d55b65bbe4bf915efaa1a35bef7f8c5dd"
end
# Add <pthread.h> to Calculix.h
# Read arpack link options from pkg-config
# u_free must return a void pointer
patch :DATA
def install
(buildpath/"spooles").install resource("spooles")
# Patch spooles library
inreplace "spooles/Make.inc", "/usr/lang-4.0/bin/cc", ENV.cc
inreplace "spooles/Tree/src/makeGlobalLib", "drawTree.c", "tree.c"
# Build serial spooles library
system "make", "-C", "spooles", "lib"
# Extend library with multi-threading (MT) subroutines
system "make", "-C", "spooles/MT/src", "makeLib"
# Build Calculix ccx
args = [
"CC=#{ENV.cc}",
"FC=#{ENV.fc}",
"CFLAGS=-O2 -I../../spooles -DARCH=Linux -DSPOOLES -DARPACK -DMATRIXSTORAGE -DUSE_MT=1",
"FFLAGS=-O2 -fopenmp",
"DIR=../../spooles",
]
target = Pathname.new("ccx_2.10/src/ccx_2.10")
system "make", "-C", target.dirname, target.basename, *args
bin.install target
(buildpath/"test").install resource("test")
pkgshare.install Dir["test/ccx_2.10/test/*"]
(buildpath/"doc").install resource("doc")
doc.install Dir["doc/ccx_2.10/doc/ccx/*"]
end
test do
cp "#{pkgshare}/spring1.inp", testpath
system "ccx_2.10", "spring1"
end
end
__END__
diff --git a/ccx_2.10/src/CalculiX.h b/ccx_2.10/src/CalculiX.h
index ee81ca8..d957130 100644
--- a/ccx_2.10/src/CalculiX.h
+++ b/ccx_2.10/src/CalculiX.h
@@ -15,6 +15,7 @@
/* along with this program; if not, write to the Free Software */
/* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. */
+#include <pthread.h>
#define Linux 1
#define IRIX 2
#define IRIX64 3
diff --git a/ccx_2.10/src/Makefile b/ccx_2.10/src/Makefile
index 9335028..a587fdd 100755
--- a/ccx_2.10/src/Makefile
+++ b/ccx_2.10/src/Makefile
@@ -22,11 +22,12 @@ DIR=../../../SPOOLES.2.2
LIBS = \
$(DIR)/spooles.a \
- ../../../ARPACK/libarpack_INTEL.a \
- -lpthread -lm -lc
+ $(shell pkg-config --libs arpack)
-ccx_2.10: $(OCCXMAIN) ccx_2.10.a $(LIBS)
- ./date.pl; $(CC) $(CFLAGS) -c ccx_2.10.c; $(FC) -fopenmp -Wall -O3 -o $@ $(OCCXMAIN) ccx_2.10.a $(LIBS)
+ccx_2.10: $(OCCXMAIN) ccx_2.10.a
+ ./date.pl
+ $(CC) $(CFLAGS) -c ccx_2.10.c
+ $(FC) $(FFLAGS) -o $@ $(OCCXMAIN) ccx_2.10.a $(LIBS)
ccx_2.10.a: $(OCCXF) $(OCCXC)
ar vr $@ $?
diff --git a/ccx_2.10/src/u_free.c b/ccx_2.10/src/u_free.c
index acccf3b..da517de 100644
--- a/ccx_2.10/src/u_free.c
+++ b/ccx_2.10/src/u_free.c
@@ -41,5 +41,5 @@ void *u_free(void* ptr,const char *file,const int line, const char* ptr_name){
if(log_realloc==1) {
printf("FREEING of variable %s, file %s, line=%d: oldaddress= %ld\n",ptr_name,file,line,(long int)ptr);
}
- return;
+ return NULL;
}
calculix-ccx: update 2.10 bottle.
class CalculixCcx < Formula
desc "Three-Dimensional Finite Element Solver"
homepage "http://www.calculix.de/"
url "http://www.dhondt.de/ccx_2.10.src.tar.bz2"
version "2.10"
sha256 "693497d19d8dd2a5376e64e038d5c248d87f0e2df46d409a83bf976596b319f5"
bottle do
cellar :any
sha256 "14f29dd416a4d8ad14d55598b5d53d5bbb74681dec38bd6f1760c781b9a0671b" => :el_capitan
sha256 "490e79f5f3ad45f456b58a4959436735cf4225ff5e2c56c54ee5ab932bccced5" => :yosemite
sha256 "6ebcb57fd3af53068e3955a17b85fe9016bb6a03393bdb778fa280bbd24bca32" => :mavericks
end
depends_on :fortran
depends_on "arpack"
depends_on "pkg-config" => :build
resource "test" do
url "http://www.dhondt.de/ccx_2.10.test.tar.bz2"
version "2.10"
sha256 "a5e00abc7f9b2a5a5f1a4f7b414617dc65cd0be9b2a66c93e20b5a25c1392a75"
end
resource "doc" do
url "http://www.dhondt.de/ccx_2.10.htm.tar.bz2"
version "2.10"
sha256 "28f09511d791016dadb9f9cce382789fc250dfa5a60b105cfc4c9c2008e437c2"
end
resource "spooles" do
# The spooles library is not currently maintained and so would not make a
# good brew candidate. Instead it will be static linked to ccx.
url "http://www.netlib.org/linalg/spooles/spooles.2.2.tgz"
sha256 "a84559a0e987a1e423055ef4fdf3035d55b65bbe4bf915efaa1a35bef7f8c5dd"
end
# Add <pthread.h> to Calculix.h
# Read arpack link options from pkg-config
# u_free must return a void pointer
patch :DATA
def install
(buildpath/"spooles").install resource("spooles")
# Patch spooles library
inreplace "spooles/Make.inc", "/usr/lang-4.0/bin/cc", ENV.cc
inreplace "spooles/Tree/src/makeGlobalLib", "drawTree.c", "tree.c"
# Build serial spooles library
system "make", "-C", "spooles", "lib"
# Extend library with multi-threading (MT) subroutines
system "make", "-C", "spooles/MT/src", "makeLib"
# Build Calculix ccx
args = [
"CC=#{ENV.cc}",
"FC=#{ENV.fc}",
"CFLAGS=-O2 -I../../spooles -DARCH=Linux -DSPOOLES -DARPACK -DMATRIXSTORAGE -DUSE_MT=1",
"FFLAGS=-O2 -fopenmp",
"DIR=../../spooles",
]
target = Pathname.new("ccx_2.10/src/ccx_2.10")
system "make", "-C", target.dirname, target.basename, *args
bin.install target
(buildpath/"test").install resource("test")
pkgshare.install Dir["test/ccx_2.10/test/*"]
(buildpath/"doc").install resource("doc")
doc.install Dir["doc/ccx_2.10/doc/ccx/*"]
end
test do
cp "#{pkgshare}/spring1.inp", testpath
system "ccx_2.10", "spring1"
end
end
__END__
diff --git a/ccx_2.10/src/CalculiX.h b/ccx_2.10/src/CalculiX.h
index ee81ca8..d957130 100644
--- a/ccx_2.10/src/CalculiX.h
+++ b/ccx_2.10/src/CalculiX.h
@@ -15,6 +15,7 @@
/* along with this program; if not, write to the Free Software */
/* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. */
+#include <pthread.h>
#define Linux 1
#define IRIX 2
#define IRIX64 3
diff --git a/ccx_2.10/src/Makefile b/ccx_2.10/src/Makefile
index 9335028..a587fdd 100755
--- a/ccx_2.10/src/Makefile
+++ b/ccx_2.10/src/Makefile
@@ -22,11 +22,12 @@ DIR=../../../SPOOLES.2.2
LIBS = \
$(DIR)/spooles.a \
- ../../../ARPACK/libarpack_INTEL.a \
- -lpthread -lm -lc
+ $(shell pkg-config --libs arpack)
-ccx_2.10: $(OCCXMAIN) ccx_2.10.a $(LIBS)
- ./date.pl; $(CC) $(CFLAGS) -c ccx_2.10.c; $(FC) -fopenmp -Wall -O3 -o $@ $(OCCXMAIN) ccx_2.10.a $(LIBS)
+ccx_2.10: $(OCCXMAIN) ccx_2.10.a
+ ./date.pl
+ $(CC) $(CFLAGS) -c ccx_2.10.c
+ $(FC) $(FFLAGS) -o $@ $(OCCXMAIN) ccx_2.10.a $(LIBS)
ccx_2.10.a: $(OCCXF) $(OCCXC)
ar vr $@ $?
diff --git a/ccx_2.10/src/u_free.c b/ccx_2.10/src/u_free.c
index acccf3b..da517de 100644
--- a/ccx_2.10/src/u_free.c
+++ b/ccx_2.10/src/u_free.c
@@ -41,5 +41,5 @@ void *u_free(void* ptr,const char *file,const int line, const char* ptr_name){
if(log_realloc==1) {
printf("FREEING of variable %s, file %s, line=%d: oldaddress= %ld\n",ptr_name,file,line,(long int)ptr);
}
- return;
+ return NULL;
}
|
add gemspec
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "captify"
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Huiming Teo"]
s.date = "2012-06-06"
s.description = "Capistrano capify with canned templates."
s.email = "teohuiming@gmail.com"
s.executables = ["captify"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"bin/captify",
"lib/captify/template.rb"
]
s.homepage = "http://github.com/teohm/captify"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.24"
s.summary = "Capistrano capify with canned templates."
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<capistrano>, [">= 0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.3"])
else
s.add_dependency(%q<capistrano>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.3"])
end
else
s.add_dependency(%q<capistrano>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.3"])
end
end
|
require 'yaml'
module BOAST
@@compiler_default_options = {
:FC => 'gfortran',
:FCFLAGS => '-O2 -Wall',
:CC => 'gcc',
:CFLAGS => '-O2 -Wall',
:CXX => 'g++',
:CXXFLAGS => '-O2 -Wall',
:NVCC => 'nvcc',
:NVCCFLAGS => '-O2',
:LDFLAGS => '',
:CLFLAGS => '',
:CLVENDOR => nil,
:CLPLATFORM => nil,
:CLDEVICE => nil,
:CLDEVICETYPE => nil,
:MAQAO => 'maqao',
:MAQAO_FLAGS => '',
:openmp => false
}
@@openmp_default_flags = {
"gcc" => "-fopenmp",
"icc" => "-openmp",
"gfortran" => "-fopenmp",
"ifort" => "-openmp",
"g++" => "-fopenmp",
"icpc" => "-openmp"
}
module PrivateStateAccessor
private_boolean_state_accessor :verbose
private_boolean_state_accessor :debug_source
private_boolean_state_accessor :ffi
private_state_accessor :fortran_line_length
end
boolean_state_accessor :verbose
boolean_state_accessor :debug_source
boolean_state_accessor :ffi
boolean_state_accessor :keep_temp
state_accessor :fortran_line_length
default_state_getter :verbose, false
default_state_getter :debug_source, false
default_state_getter :ffi, false
default_state_getter :keep_temp, false
default_state_getter :fortran_line_length, 72
module_function
def read_boast_config
home_config_dir = ENV["XDG_CONFIG_HOME"]
home_config_dir = "#{Dir.home}/.config" if not home_config_dir
Dir.mkdir( home_config_dir ) if not File::exist?( home_config_dir )
return if not File::directory?(home_config_dir)
boast_config_dir = "#{home_config_dir}/BOAST"
Dir.mkdir( boast_config_dir ) if not File::exist?( boast_config_dir )
compiler_options_file = "#{boast_config_dir}/compiler_options"
if File::exist?( compiler_options_file ) then
File::open( compiler_options_file, "r" ) { |f|
@@compiler_default_options.update( YAML::load( f.read ) )
}
else
File::open( compiler_options_file, "w" ) { |f|
f.write YAML::dump( @@compiler_default_options )
}
end
openmp_flags_file = "#{boast_config_dir}/openmp_flags"
if File::exist?( openmp_flags_file ) then
File::open( openmp_flags_file, "r" ) { |f|
@@openmp_default_flags.update( YAML::load( f.read ) )
}
else
File::open( openmp_flags_file, "w" ) { |f|
f.write YAML::dump( @@openmp_default_flags )
}
end
@@compiler_default_options.each_key { |k|
@@compiler_default_options[k] = ENV[k.to_s] if ENV[k.to_s]
}
@@compiler_default_options[:LD] = ENV["LD"] if ENV["LD"]
end
read_boast_config
def get_openmp_flags
return @@openmp_default_flags.clone
end
def get_compiler_options
return @@compiler_default_options.clone
end
end
Fix crash when not including BOAST.
require 'yaml'
module BOAST
@@compiler_default_options = {
:FC => 'gfortran',
:FCFLAGS => '-O2 -Wall',
:CC => 'gcc',
:CFLAGS => '-O2 -Wall',
:CXX => 'g++',
:CXXFLAGS => '-O2 -Wall',
:NVCC => 'nvcc',
:NVCCFLAGS => '-O2',
:LDFLAGS => '',
:CLFLAGS => '',
:CLVENDOR => nil,
:CLPLATFORM => nil,
:CLDEVICE => nil,
:CLDEVICETYPE => nil,
:MAQAO => 'maqao',
:MAQAO_FLAGS => '',
:openmp => false
}
@@openmp_default_flags = {
"gcc" => "-fopenmp",
"icc" => "-openmp",
"gfortran" => "-fopenmp",
"ifort" => "-openmp",
"g++" => "-fopenmp",
"icpc" => "-openmp"
}
module PrivateStateAccessor
private_boolean_state_accessor :verbose
private_boolean_state_accessor :debug_source
private_boolean_state_accessor :ffi
private_boolean_state_accessor :keep_temp
private_state_accessor :fortran_line_length
end
boolean_state_accessor :verbose
boolean_state_accessor :debug_source
boolean_state_accessor :ffi
boolean_state_accessor :keep_temp
state_accessor :fortran_line_length
default_state_getter :verbose, false
default_state_getter :debug_source, false
default_state_getter :ffi, false
default_state_getter :keep_temp, false
default_state_getter :fortran_line_length, 72
module_function
def read_boast_config
home_config_dir = ENV["XDG_CONFIG_HOME"]
home_config_dir = "#{Dir.home}/.config" if not home_config_dir
Dir.mkdir( home_config_dir ) if not File::exist?( home_config_dir )
return if not File::directory?(home_config_dir)
boast_config_dir = "#{home_config_dir}/BOAST"
Dir.mkdir( boast_config_dir ) if not File::exist?( boast_config_dir )
compiler_options_file = "#{boast_config_dir}/compiler_options"
if File::exist?( compiler_options_file ) then
File::open( compiler_options_file, "r" ) { |f|
@@compiler_default_options.update( YAML::load( f.read ) )
}
else
File::open( compiler_options_file, "w" ) { |f|
f.write YAML::dump( @@compiler_default_options )
}
end
openmp_flags_file = "#{boast_config_dir}/openmp_flags"
if File::exist?( openmp_flags_file ) then
File::open( openmp_flags_file, "r" ) { |f|
@@openmp_default_flags.update( YAML::load( f.read ) )
}
else
File::open( openmp_flags_file, "w" ) { |f|
f.write YAML::dump( @@openmp_default_flags )
}
end
@@compiler_default_options.each_key { |k|
@@compiler_default_options[k] = ENV[k.to_s] if ENV[k.to_s]
}
@@compiler_default_options[:LD] = ENV["LD"] if ENV["LD"]
end
read_boast_config
def get_openmp_flags
return @@openmp_default_flags.clone
end
def get_compiler_options
return @@compiler_default_options.clone
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{carpool}
s.version = "0.2.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Brent Kirby"]
s.date = %q{2010-12-06}
s.description = %q{Carpool is a single sign on solution for Rack-based applications allowing you to persist sessions across domains.}
s.email = %q{dev@kurbmedia.com}
s.extra_rdoc_files = [
"LICENSE",
"README.md"
]
s.files = [
".document",
"LICENSE",
"README.md",
"Rakefile",
"VERSION",
"carpool.gemspec",
"init.rb",
"lib/carpool.rb",
"lib/carpool/driver.rb",
"lib/carpool/mixins/action_controller.rb",
"lib/carpool/mixins/action_view.rb",
"lib/carpool/mixins/core.rb",
"lib/carpool/passenger.rb",
"lib/carpool/rails/railtie.rb",
"lib/carpool/seatbelt.rb",
"test/helper.rb",
"test/test_carpool.rb"
]
s.homepage = %q{http://github.com/kurbmedia/carpool}
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{Single Sign On solution for Rack-Based applications}
s.test_files = [
"test/helper.rb",
"test/test_carpool.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<thoughtbot-shoulda>, [">= 0"])
s.add_runtime_dependency(%q<fast-aes>, [">= 0"])
else
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
s.add_dependency(%q<fast-aes>, [">= 0"])
end
else
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
s.add_dependency(%q<fast-aes>, [">= 0"])
end
end
Regenerate gemspec for version 0.3.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{carpool}
s.version = "0.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Brent Kirby"]
s.date = %q{2010-12-06}
s.description = %q{Carpool is a single sign on solution for Rack-based applications allowing you to persist sessions across domains.}
s.email = %q{dev@kurbmedia.com}
s.extra_rdoc_files = [
"LICENSE",
"README.md"
]
s.files = [
".document",
"LICENSE",
"README.md",
"Rakefile",
"VERSION",
"carpool.gemspec",
"init.rb",
"lib/carpool.rb",
"lib/carpool/driver.rb",
"lib/carpool/encryptor.rb",
"lib/carpool/mixins/action_controller.rb",
"lib/carpool/mixins/action_view.rb",
"lib/carpool/mixins/core.rb",
"lib/carpool/passenger.rb",
"lib/carpool/rails/railtie.rb",
"lib/carpool/responder.rb",
"lib/carpool/seatbelt.rb",
"test/helper.rb",
"test/test_carpool.rb"
]
s.homepage = %q{http://github.com/kurbmedia/carpool}
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{Single Sign On solution for Rack-Based applications}
s.test_files = [
"test/helper.rb",
"test/test_carpool.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<thoughtbot-shoulda>, [">= 0"])
s.add_runtime_dependency(%q<fast-aes>, [">= 0"])
else
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
s.add_dependency(%q<fast-aes>, [">= 0"])
end
else
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
s.add_dependency(%q<fast-aes>, [">= 0"])
end
end
|
# brew-services(1) - Easily start and stop formulae via launchctl
# ===============================================================
#
# ## SYNOPSIS
#
# [<sudo>] `brew services` `list`
# [<sudo>] `brew services` `restart` <formula>
# [<sudo>] `brew services` `start` <formula> [<plist>]
# [<sudo>] `brew services` `stop` <formula>
# [<sudo>] `brew services` `cleanup`
#
# ## DESCRIPTION
#
# Integrates Homebrew formulae with OS X's `launchctl` manager. Services can be
# added to either `/Library/LaunchDaemons` or `~/Library/LaunchAgents`.
# Basically, items in `/Library/LaunchDaemons` are started at boot, while those
# in `~/Library/LaunchAgents` are started at login.
#
# When started with `sudo`, it operates on `/Library/LaunchDaemons`; otherwise,
# it operates on `~/Library/LaunchAgents`.
#
# On `start` the plist file is generated and written to a `Tempfile`, and then
# copied to the launch path (existing plists are overwritten).
#
# ## OPTIONS
#
# To access everything quickly, some aliases have been added:
#
# * `rm`:
# Shortcut for `cleanup`, because that's basically what's being done.
#
# * `ls`:
# Because `list` is too much to type. :)
#
# * `reload', 'r':
# Alias for `restart`, which gracefully restarts the selected service.
#
# * `load`, `s`:
# Alias for `start`, guess what it does...
#
# * `unload`, `term`, `t`:
# Alias for `stop`, stops and unloads selected service.
#
# ## SYNTAX
#
# Several existing formulae (like mysql, nginx) already write a custom plist
# file to the formulae prefix. Most of these implement `#plist`, which
# then, in turn, returns a neato plist file as a string.
#
# `brew services` operates on `#plist` as well, and requires supporting
# formulae to implement it. This method should either return a string containing
# the generated XML file, or return a `Pathname` instance pointing to a plist
# template or to a hash like this:
#
# { :url => "https://gist.github.com/raw/534777/63c4698872aaef11fe6e6c0c5514f35fd1b1687b/nginx.plist.xml" }
#
# Some simple template parsing is performed. All variables like `{{name}}` are
# replaced by basically doing the following:
# `formula.send('name').to_s if formula.respond_to?('name')`, a bit like
# mustache. So any variable in the `Formula` is available as a template
# variable, like `{{var}}`, `{{bin}}`, and `{{usr}}`.
#
# ## EXAMPLES
#
# Install and start the service "mysql" at boot:
#
# $ brew install mysql
# $ sudo brew services start mysql
#
# Stop the service "mysql" (after it was launched at boot):
#
# $ sudo brew services stop mysql
#
# Start the service "memcached" at login:
#
# $ brew install memcached
# $ brew services start memcached
#
# List all running services for the current user and then for root:
#
# $ brew services list
# $ sudo brew services list
#
# Stop all running services for the current user:
#
# $ brew services stop --all
#
# ## BUGS
#
# `brew-services.rb` might not handle all edge cases, but it will try to fix
# problems if you run `brew services cleanup`.
#
module ServicesCli
class << self
# Binary name.
def bin
"brew services"
end
# Path to launchctl binary.
def launchctl
which("launchctl")
end
# Woohoo, we are root dude!
def root?
Process.uid == 0
end
# Current user, i.e., owner of `HOMEBREW_CELLAR`.
def user
@user ||= `/usr/bin/stat -f '%Su' #{HOMEBREW_CELLAR} 2>/dev/null`.chomp || `/usr/bin/whoami`.chomp
end
# Run at boot.
def boot_path
Pathname.new("/Library/LaunchDaemons")
end
# Run at login.
def user_path
Pathname.new(ENV["HOME"] + "/Library/LaunchAgents")
end
# If root, return `boot_path`, else return `user_path`.
def path
root? ? boot_path : user_path
end
# Find all currently running services via launchctl list.
def running
`#{launchctl} list | grep homebrew.mxcl`.chomp.split("\n").map { |svc| $1 if svc =~ /(homebrew\.mxcl\..+)\z/ }.compact
end
# Check if running as Homebrew and load required libraries, et al.
def homebrew!
abort("Runtime error: Homebrew is required. Please start via `#{bin} ...`") unless defined?(HOMEBREW_LIBRARY_PATH)
%w[fileutils pathname tempfile formula utils].each { |req| require(req) }
extend(FileUtils)
end
# All available services
def available_services
Formula.installed.map { |formula| Service.new(formula) }.select(&:plist?)
end
# Print usage and `exit(...)` with supplied exit code. If code
# is set to `false`, then exit is ignored.
def usage(code = 0)
puts "usage: [sudo] #{bin} [--help] <command> [<formula>|--all]"
puts
puts "Small wrapper around `launchctl` for supported formulae, commands available:"
puts " cleanup Get rid of stale services and unused plists"
puts " list List all services managed by `#{bin}`"
puts " restart Gracefully restart service(s)"
puts " start Start service(s)"
puts " stop Stop service(s)"
puts
puts "Options, sudo and paths:"
puts
puts " sudo When run as root, operates on #{boot_path} (run at boot!)"
puts " Run at boot: #{boot_path}"
puts " Run at login: #{user_path}"
puts
exit(code) unless code == false
true
end
# Run and start the command loop.
def run!
homebrew!
usage if ARGV.empty? || ARGV.include?("help") || ARGV.include?("--help") || ARGV.include?("-h")
# pbpaste's exit status is a proxy for detecting the use of reattach-to-user-namespace
if ENV["TMUX"] && !quiet_system("/usr/bin/pbpaste")
odie "brew services cannot run under tmux!"
end
# Parse arguments.
act_on_all_services = ARGV.include?("--all")
cmd = ARGV.named[0]
formula = ARGV.named[1]
custom_plist = ARGV.named[2]
target = if act_on_all_services
available_services
elsif formula
Service.new(Formula.factory(formula))
end
# Dispatch commands and aliases.
case cmd
when "cleanup", "clean", "cl", "rm" then cleanup
when "list", "ls" then list
when "restart", "relaunch", "reload", "r" then check(target) and restart(target)
when "start", "launch", "load", "s", "l" then check(target) and start(target, custom_plist)
when "stop", "unload", "terminate", "term", "t", "u" then check(target) and stop(target)
else
onoe "Unknown command `#{cmd}`"
usage(1)
end
end
# Check if formula has been found.
def check(target)
odie("Formula(e) missing, please provide a formula name or use --all") unless target
true
end
# List all available services with status, user, and path to the plist file.
def list
formulae = available_services.map do |service|
formula = {
:name => service.formula.name,
:started => false,
:user => nil,
:plist => nil,
}
if service.started?(:as => :root)
formula[:started] = true
formula[:user] = "root"
formula[:plist] = ServicesCli.boot_path + "#{service.label}.plist"
elsif service.started?(:as => :user)
formula[:started] = true
formula[:user] = ServicesCli.user
formula[:plist] = ServicesCli.user_path + "#{service.label}.plist"
end
formula
end
if formulae.empty?
opoo("No services available to control with `#{bin}`")
return
end
longest_name = [formulae.max_by { |formula| formula[:name].length }[:name].length, 4].max
longest_user = [formulae.map { |formula| formula[:user].nil? ? 4 : formula[:user].length }.max, 4].max
puts "#{Tty.white}%-#{longest_name}.#{longest_name}s %-7.7s %-#{longest_user}.#{longest_user}s %s#{Tty.reset}" % ["Name", "Status", "User", "Plist"]
formulae.each do |formula|
puts "%-#{longest_name}.#{longest_name}s %s %-#{longest_user}.#{longest_user}s %s" % [formula[:name], formula[:started] ? "#{Tty.green}started#{Tty.reset}" : "stopped", formula[:user], formula[:plist]]
end
end
# Kill services that don't have a plist file, and remove unused plist files.
def cleanup
cleaned = []
# 1. Kill services that don't have a plist file.
running.each do |label|
if svc = Service.from(label)
unless svc.dest.file?
puts "%-15.15s #{Tty.white}stale#{Tty.reset} => killing service..." % svc.name
kill(svc)
cleaned << label
end
else
opoo "Service #{label} not managed by `#{bin}` => skipping"
end
end
# 2. Remove unused plist files.
Dir[path + "homebrew.mxcl.*.plist"].each do |file|
next if running.include?(File.basename(file).sub(/\.plist$/i, ""))
puts "Removing unused plist #{file}"
rm file
cleaned << file
end
puts "All #{root? ? "root" : "user-space"} services OK, nothing cleaned..." if cleaned.empty?
end
# Stop if loaded, then start again.
def restart(target)
Array(target).each do |service|
stop(service) if service.loaded?
start(service)
end
end
# Start a service.
def start(target, custom_plist = nil)
if target.is_a?(Service)
if target.loaded?
puts "Service `#{target.name}` already started, use `#{bin} restart #{target.name}` to restart."
return
end
if custom_plist
if custom_plist =~ %r{\Ahttps?://.+}
custom_plist = { :url => custom_plist }
elsif File.exist?(custom_plist)
custom_plist = Pathname.new(custom_plist)
else
odie "#{custom_plist} is not a url or existing file"
end
elsif !target.plist.file? && target.formula.plist.nil?
if target.formula.opt_prefix.exist? &&
(keg = Keg.for target.formula.opt_prefix) &&
keg.plist_installed?
custom_plist = Pathname.new Dir["#{keg}/*.plist"].first
else
odie "Formula `#{target.name}` not installed, #plist not implemented or no plist file found"
end
end
end
Array(target).each do |service|
temp = Tempfile.new(service.label)
temp << service.generate_plist(custom_plist)
temp.flush
rm service.dest if service.dest.exist?
service.dest_dir.mkpath unless service.dest_dir.directory?
cp temp.path, service.dest
# Clear tempfile.
temp.close
safe_system launchctl, "load", "-w", service.dest.to_s
$?.to_i != 0 ? odie("Failed to start `#{service.name}`") : ohai("Successfully started `#{service.name}` (label: #{service.label})")
end
end
# Stop a service, or kill it if no plist file is available.
def stop(target)
if target.is_a?(Service) && !target.loaded?
rm target.dest if target.dest.exist? # get rid of installed plist anyway, dude
if target.started?
odie "Service `#{target.name}` is started as `#{target.started_as}`. Try `#{"sudo " unless ServicesCli.root?}#{bin} stop #{target.name}`"
else
odie "Service `#{target.name}` is not started."
end
end
Array(target).select(&:loaded?).each do |service|
if service.dest.exist?
puts "Stopping `#{service.name}`... (might take a while)"
safe_system launchctl, "unload", "-w", service.dest.to_s
$?.to_i != 0 ? odie("Failed to stop `#{service.name}`") : ohai("Successfully stopped `#{service.name}` (label: #{service.label})")
else
puts "Stopping stale service `#{service.name}`... (might take a while)"
kill(service)
end
rm service.dest if service.dest.exist?
end
end
# Kill a service that has no plist file by issuing `launchctl remove`.
def kill(svc)
safe_system launchctl, "remove", svc.label
odie("Failed to remove `#{svc.name}`, try again?") unless $?.to_i == 0
while svc.loaded?
puts " ...checking status"
sleep(5)
end
ohai "Successfully stopped `#{svc.name}` via #{svc.label}"
end
end
end
# Wrapper for a formula to handle service-related stuff like parsing and
# generating the plist file.
class Service
# Access the `Formula` instance.
attr_reader :formula
# Create a new `Service` instance from either a path or label.
def self.from(path_or_label)
return nil unless path_or_label =~ /homebrew\.mxcl\.([^\.]+)(\.plist)?\z/
begin
new(Formula.factory($1))
rescue
nil
end
end
# Initialize a new `Service` instance with supplied formula.
def initialize(formula)
@formula = formula
end
# Delegate access to `formula.name`.
def name
@name ||= formula.name
end
# Label delegates with formula.plist_name (e.g., `homebrew.mxcl.<formula>`).
def label
@label ||= formula.plist_name
end
# Path to a static plist file. This is always `homebrew.mxcl.<formula>.plist`.
def plist
@plist ||= formula.opt_prefix + "#{label}.plist"
end
# Path to destination plist directory. If run as root, it's `boot_path`, else `user_path`.
def dest_dir
ServicesCli.root? ? ServicesCli.boot_path : ServicesCli.user_path
end
# Path to destination plist. If run as root, it's in `boot_path`, else `user_path`.
def dest
dest_dir + "#{label}.plist"
end
# Returns `true` if any version of the formula is installed.
def installed?
formula.installed? || ((dir = formula.opt_prefix).directory? && !dir.children.empty?)
end
# Returns `true` if the formula implements #plist or the plist file exists.
def plist?
return false unless installed?
return true if plist.file?
return true unless formula.plist.nil?
return false unless formula.opt_prefix.exist?
return true if Keg.for(formula.opt_prefix).plist_installed?
rescue NotAKegError
false
end
# Returns `true` if the service is loaded, else false.
def loaded?
`#{ServicesCli.launchctl} list | grep #{label} 2>/dev/null`.chomp =~ /#{label}\z/
end
# Returns `true` if service is started (.plist is present in LaunchDaemon or LaunchAgent path), else `false`
# Accepts Hash option `:as` with values `:root` for LaunchDaemon path or `:user` for LaunchAgent path.
def started?(opts = {:as => false})
if opts[:as] && opts[:as] == :root
(ServicesCli.boot_path + "#{label}.plist").exist?
elsif opts[:as] && opts[:as] == :user
(ServicesCli.user_path + "#{label}.plist").exist?
else
started?(:as => :root) || started?(:as => :user)
end
end
def started_as
return "root" if started?(:as => :root)
return ServicesCli.user if started?(:as => :user)
nil
end
# Get current PID of daemon process from launchctl.
def pid
status = `#{ServicesCli.launchctl} list | grep #{label} 2>/dev/null`.chomp
return $1.to_i if status =~ /\A([\d]+)\s+.+#{label}\z/
end
# Generate that plist file, dude.
def generate_plist(data = nil)
data ||= plist.file? ? plist : formula.plist
if data.respond_to?(:file?) && data.file?
data = data.read
elsif data.respond_to?(:keys) && data.keys.include?(:url)
require "open-uri"
data = open(data).read
elsif !data
odie "Could not read the plist for `#{name}`!"
end
# Replace "template" variables and ensure label is always, always homebrew.mxcl.<formula>
data = data.to_s.gsub(/\{\{([a-z][a-z0-9_]*)\}\}/i) { |_m| formula.send($1).to_s if formula.respond_to?($1) }.gsub(%r{(<key>Label</key>\s*<string>)[^<]*(</string>)}, '\1' + label + '\2')
# Always remove the "UserName" as it doesn't work since 10.11.5
if data =~ %r{<key>UserName</key>}
data = data.gsub(%r{(<key>UserName</key>\s*<string>)[^<]*(</string>)}, "")
end
if ARGV.verbose?
ohai "Generated plist for #{formula.name}:"
puts " " + data.gsub("\n", "\n ")
puts
end
data
end
end
# Start the CLI dispatch stuff.
#
ServicesCli.run!
Formula.factory is deprecated
* use Formulary.factory instead.
# brew-services(1) - Easily start and stop formulae via launchctl
# ===============================================================
#
# ## SYNOPSIS
#
# [<sudo>] `brew services` `list`
# [<sudo>] `brew services` `restart` <formula>
# [<sudo>] `brew services` `start` <formula> [<plist>]
# [<sudo>] `brew services` `stop` <formula>
# [<sudo>] `brew services` `cleanup`
#
# ## DESCRIPTION
#
# Integrates Homebrew formulae with OS X's `launchctl` manager. Services can be
# added to either `/Library/LaunchDaemons` or `~/Library/LaunchAgents`.
# Basically, items in `/Library/LaunchDaemons` are started at boot, while those
# in `~/Library/LaunchAgents` are started at login.
#
# When started with `sudo`, it operates on `/Library/LaunchDaemons`; otherwise,
# it operates on `~/Library/LaunchAgents`.
#
# On `start` the plist file is generated and written to a `Tempfile`, and then
# copied to the launch path (existing plists are overwritten).
#
# ## OPTIONS
#
# To access everything quickly, some aliases have been added:
#
# * `rm`:
# Shortcut for `cleanup`, because that's basically what's being done.
#
# * `ls`:
# Because `list` is too much to type. :)
#
# * `reload', 'r':
# Alias for `restart`, which gracefully restarts the selected service.
#
# * `load`, `s`:
# Alias for `start`, guess what it does...
#
# * `unload`, `term`, `t`:
# Alias for `stop`, stops and unloads selected service.
#
# ## SYNTAX
#
# Several existing formulae (like mysql, nginx) already write a custom plist
# file to the formulae prefix. Most of these implement `#plist`, which
# then, in turn, returns a neato plist file as a string.
#
# `brew services` operates on `#plist` as well, and requires supporting
# formulae to implement it. This method should either return a string containing
# the generated XML file, or return a `Pathname` instance pointing to a plist
# template or to a hash like this:
#
# { :url => "https://gist.github.com/raw/534777/63c4698872aaef11fe6e6c0c5514f35fd1b1687b/nginx.plist.xml" }
#
# Some simple template parsing is performed. All variables like `{{name}}` are
# replaced by basically doing the following:
# `formula.send('name').to_s if formula.respond_to?('name')`, a bit like
# mustache. So any variable in the `Formula` is available as a template
# variable, like `{{var}}`, `{{bin}}`, and `{{usr}}`.
#
# ## EXAMPLES
#
# Install and start the service "mysql" at boot:
#
# $ brew install mysql
# $ sudo brew services start mysql
#
# Stop the service "mysql" (after it was launched at boot):
#
# $ sudo brew services stop mysql
#
# Start the service "memcached" at login:
#
# $ brew install memcached
# $ brew services start memcached
#
# List all running services for the current user and then for root:
#
# $ brew services list
# $ sudo brew services list
#
# Stop all running services for the current user:
#
# $ brew services stop --all
#
# ## BUGS
#
# `brew-services.rb` might not handle all edge cases, but it will try to fix
# problems if you run `brew services cleanup`.
#
module ServicesCli
class << self
# Binary name.
def bin
"brew services"
end
# Path to launchctl binary.
def launchctl
which("launchctl")
end
# Woohoo, we are root dude!
def root?
Process.uid == 0
end
# Current user, i.e., owner of `HOMEBREW_CELLAR`.
def user
@user ||= `/usr/bin/stat -f '%Su' #{HOMEBREW_CELLAR} 2>/dev/null`.chomp || `/usr/bin/whoami`.chomp
end
# Run at boot.
def boot_path
Pathname.new("/Library/LaunchDaemons")
end
# Run at login.
def user_path
Pathname.new(ENV["HOME"] + "/Library/LaunchAgents")
end
# If root, return `boot_path`, else return `user_path`.
def path
root? ? boot_path : user_path
end
# Find all currently running services via launchctl list.
def running
`#{launchctl} list | grep homebrew.mxcl`.chomp.split("\n").map { |svc| $1 if svc =~ /(homebrew\.mxcl\..+)\z/ }.compact
end
# Check if running as Homebrew and load required libraries, et al.
def homebrew!
abort("Runtime error: Homebrew is required. Please start via `#{bin} ...`") unless defined?(HOMEBREW_LIBRARY_PATH)
%w[fileutils pathname tempfile formula utils].each { |req| require(req) }
extend(FileUtils)
end
# All available services
def available_services
Formula.installed.map { |formula| Service.new(formula) }.select(&:plist?)
end
# Print usage and `exit(...)` with supplied exit code. If code
# is set to `false`, then exit is ignored.
def usage(code = 0)
puts "usage: [sudo] #{bin} [--help] <command> [<formula>|--all]"
puts
puts "Small wrapper around `launchctl` for supported formulae, commands available:"
puts " cleanup Get rid of stale services and unused plists"
puts " list List all services managed by `#{bin}`"
puts " restart Gracefully restart service(s)"
puts " start Start service(s)"
puts " stop Stop service(s)"
puts
puts "Options, sudo and paths:"
puts
puts " sudo When run as root, operates on #{boot_path} (run at boot!)"
puts " Run at boot: #{boot_path}"
puts " Run at login: #{user_path}"
puts
exit(code) unless code == false
true
end
# Run and start the command loop.
def run!
homebrew!
usage if ARGV.empty? || ARGV.include?("help") || ARGV.include?("--help") || ARGV.include?("-h")
# pbpaste's exit status is a proxy for detecting the use of reattach-to-user-namespace
if ENV["TMUX"] && !quiet_system("/usr/bin/pbpaste")
odie "brew services cannot run under tmux!"
end
# Parse arguments.
act_on_all_services = ARGV.include?("--all")
cmd = ARGV.named[0]
formula = ARGV.named[1]
custom_plist = ARGV.named[2]
target = if act_on_all_services
available_services
elsif formula
Service.new(Formulary.factory(formula))
end
# Dispatch commands and aliases.
case cmd
when "cleanup", "clean", "cl", "rm" then cleanup
when "list", "ls" then list
when "restart", "relaunch", "reload", "r" then check(target) and restart(target)
when "start", "launch", "load", "s", "l" then check(target) and start(target, custom_plist)
when "stop", "unload", "terminate", "term", "t", "u" then check(target) and stop(target)
else
onoe "Unknown command `#{cmd}`"
usage(1)
end
end
# Check if formula has been found.
def check(target)
odie("Formula(e) missing, please provide a formula name or use --all") unless target
true
end
# List all available services with status, user, and path to the plist file.
def list
formulae = available_services.map do |service|
formula = {
:name => service.formula.name,
:started => false,
:user => nil,
:plist => nil,
}
if service.started?(:as => :root)
formula[:started] = true
formula[:user] = "root"
formula[:plist] = ServicesCli.boot_path + "#{service.label}.plist"
elsif service.started?(:as => :user)
formula[:started] = true
formula[:user] = ServicesCli.user
formula[:plist] = ServicesCli.user_path + "#{service.label}.plist"
end
formula
end
if formulae.empty?
opoo("No services available to control with `#{bin}`")
return
end
longest_name = [formulae.max_by { |formula| formula[:name].length }[:name].length, 4].max
longest_user = [formulae.map { |formula| formula[:user].nil? ? 4 : formula[:user].length }.max, 4].max
puts "#{Tty.white}%-#{longest_name}.#{longest_name}s %-7.7s %-#{longest_user}.#{longest_user}s %s#{Tty.reset}" % ["Name", "Status", "User", "Plist"]
formulae.each do |formula|
puts "%-#{longest_name}.#{longest_name}s %s %-#{longest_user}.#{longest_user}s %s" % [formula[:name], formula[:started] ? "#{Tty.green}started#{Tty.reset}" : "stopped", formula[:user], formula[:plist]]
end
end
# Kill services that don't have a plist file, and remove unused plist files.
def cleanup
cleaned = []
# 1. Kill services that don't have a plist file.
running.each do |label|
if svc = Service.from(label)
unless svc.dest.file?
puts "%-15.15s #{Tty.white}stale#{Tty.reset} => killing service..." % svc.name
kill(svc)
cleaned << label
end
else
opoo "Service #{label} not managed by `#{bin}` => skipping"
end
end
# 2. Remove unused plist files.
Dir[path + "homebrew.mxcl.*.plist"].each do |file|
next if running.include?(File.basename(file).sub(/\.plist$/i, ""))
puts "Removing unused plist #{file}"
rm file
cleaned << file
end
puts "All #{root? ? "root" : "user-space"} services OK, nothing cleaned..." if cleaned.empty?
end
# Stop if loaded, then start again.
def restart(target)
Array(target).each do |service|
stop(service) if service.loaded?
start(service)
end
end
# Start a service.
def start(target, custom_plist = nil)
if target.is_a?(Service)
if target.loaded?
puts "Service `#{target.name}` already started, use `#{bin} restart #{target.name}` to restart."
return
end
if custom_plist
if custom_plist =~ %r{\Ahttps?://.+}
custom_plist = { :url => custom_plist }
elsif File.exist?(custom_plist)
custom_plist = Pathname.new(custom_plist)
else
odie "#{custom_plist} is not a url or existing file"
end
elsif !target.plist.file? && target.formula.plist.nil?
if target.formula.opt_prefix.exist? &&
(keg = Keg.for target.formula.opt_prefix) &&
keg.plist_installed?
custom_plist = Pathname.new Dir["#{keg}/*.plist"].first
else
odie "Formula `#{target.name}` not installed, #plist not implemented or no plist file found"
end
end
end
Array(target).each do |service|
temp = Tempfile.new(service.label)
temp << service.generate_plist(custom_plist)
temp.flush
rm service.dest if service.dest.exist?
service.dest_dir.mkpath unless service.dest_dir.directory?
cp temp.path, service.dest
# Clear tempfile.
temp.close
safe_system launchctl, "load", "-w", service.dest.to_s
$?.to_i != 0 ? odie("Failed to start `#{service.name}`") : ohai("Successfully started `#{service.name}` (label: #{service.label})")
end
end
# Stop a service, or kill it if no plist file is available.
def stop(target)
if target.is_a?(Service) && !target.loaded?
rm target.dest if target.dest.exist? # get rid of installed plist anyway, dude
if target.started?
odie "Service `#{target.name}` is started as `#{target.started_as}`. Try `#{"sudo " unless ServicesCli.root?}#{bin} stop #{target.name}`"
else
odie "Service `#{target.name}` is not started."
end
end
Array(target).select(&:loaded?).each do |service|
if service.dest.exist?
puts "Stopping `#{service.name}`... (might take a while)"
safe_system launchctl, "unload", "-w", service.dest.to_s
$?.to_i != 0 ? odie("Failed to stop `#{service.name}`") : ohai("Successfully stopped `#{service.name}` (label: #{service.label})")
else
puts "Stopping stale service `#{service.name}`... (might take a while)"
kill(service)
end
rm service.dest if service.dest.exist?
end
end
# Kill a service that has no plist file by issuing `launchctl remove`.
def kill(svc)
safe_system launchctl, "remove", svc.label
odie("Failed to remove `#{svc.name}`, try again?") unless $?.to_i == 0
while svc.loaded?
puts " ...checking status"
sleep(5)
end
ohai "Successfully stopped `#{svc.name}` via #{svc.label}"
end
end
end
# Wrapper for a formula to handle service-related stuff like parsing and
# generating the plist file.
class Service
# Access the `Formula` instance.
attr_reader :formula
# Create a new `Service` instance from either a path or label.
def self.from(path_or_label)
return nil unless path_or_label =~ /homebrew\.mxcl\.([^\.]+)(\.plist)?\z/
begin
new(Formula.factory($1))
rescue
nil
end
end
# Initialize a new `Service` instance with supplied formula.
def initialize(formula)
@formula = formula
end
# Delegate access to `formula.name`.
def name
@name ||= formula.name
end
# Label delegates with formula.plist_name (e.g., `homebrew.mxcl.<formula>`).
def label
@label ||= formula.plist_name
end
# Path to a static plist file. This is always `homebrew.mxcl.<formula>.plist`.
def plist
@plist ||= formula.opt_prefix + "#{label}.plist"
end
# Path to destination plist directory. If run as root, it's `boot_path`, else `user_path`.
def dest_dir
ServicesCli.root? ? ServicesCli.boot_path : ServicesCli.user_path
end
# Path to destination plist. If run as root, it's in `boot_path`, else `user_path`.
def dest
dest_dir + "#{label}.plist"
end
# Returns `true` if any version of the formula is installed.
def installed?
formula.installed? || ((dir = formula.opt_prefix).directory? && !dir.children.empty?)
end
# Returns `true` if the formula implements #plist or the plist file exists.
def plist?
return false unless installed?
return true if plist.file?
return true unless formula.plist.nil?
return false unless formula.opt_prefix.exist?
return true if Keg.for(formula.opt_prefix).plist_installed?
rescue NotAKegError
false
end
# Returns `true` if the service is loaded, else false.
def loaded?
`#{ServicesCli.launchctl} list | grep #{label} 2>/dev/null`.chomp =~ /#{label}\z/
end
# Returns `true` if service is started (.plist is present in LaunchDaemon or LaunchAgent path), else `false`
# Accepts Hash option `:as` with values `:root` for LaunchDaemon path or `:user` for LaunchAgent path.
def started?(opts = {:as => false})
if opts[:as] && opts[:as] == :root
(ServicesCli.boot_path + "#{label}.plist").exist?
elsif opts[:as] && opts[:as] == :user
(ServicesCli.user_path + "#{label}.plist").exist?
else
started?(:as => :root) || started?(:as => :user)
end
end
def started_as
return "root" if started?(:as => :root)
return ServicesCli.user if started?(:as => :user)
nil
end
# Get current PID of daemon process from launchctl.
def pid
status = `#{ServicesCli.launchctl} list | grep #{label} 2>/dev/null`.chomp
return $1.to_i if status =~ /\A([\d]+)\s+.+#{label}\z/
end
# Generate that plist file, dude.
def generate_plist(data = nil)
data ||= plist.file? ? plist : formula.plist
if data.respond_to?(:file?) && data.file?
data = data.read
elsif data.respond_to?(:keys) && data.keys.include?(:url)
require "open-uri"
data = open(data).read
elsif !data
odie "Could not read the plist for `#{name}`!"
end
# Replace "template" variables and ensure label is always, always homebrew.mxcl.<formula>
data = data.to_s.gsub(/\{\{([a-z][a-z0-9_]*)\}\}/i) { |_m| formula.send($1).to_s if formula.respond_to?($1) }.gsub(%r{(<key>Label</key>\s*<string>)[^<]*(</string>)}, '\1' + label + '\2')
# Always remove the "UserName" as it doesn't work since 10.11.5
if data =~ %r{<key>UserName</key>}
data = data.gsub(%r{(<key>UserName</key>\s*<string>)[^<]*(</string>)}, "")
end
if ARGV.verbose?
ohai "Generated plist for #{formula.name}:"
puts " " + data.gsub("\n", "\n ")
puts
end
data
end
end
# Start the CLI dispatch stuff.
#
ServicesCli.run!
|
#
# Be sure to run `pod spec lint HLZInfiniteScrollView.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "HLZInfiniteScrollView"
s.version = "1.0.0"
s.summary = "An Objective-C class providing a scroll view which can be scrolled infinitely and automatically."
s.description = <<-DESC
Infinite scroll view.
DESC
s.homepage = "https://github.com/hulizhen/HLZInfiniteScrollView"
s.screenshots = "https://cloud.githubusercontent.com/assets/2831422/16691969/7bef5aec-4561-11e6-9163-2dae603c0635.gif"
s.license = "MIT"
s.author = { "Hu Lizhen" => "hulizhen.public@gmail.com" }
s.platform = :ios, "7.0"
s.source = { :git => "https://github.com/hulizhen/HLZInfiniteScrollView", :tag => "#{s.version}" }
s.source_files = "HLZInfiniteScrollView", "HLZInfiniteScrollView/HLZInfiniteScrollView.{h,m}"
s.requires_arc = true
end
Update HLZInfiniteScrollView.podspec.
#
# Be sure to run `pod spec lint HLZInfiniteScrollView.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
s.name = "HLZInfiniteScrollView"
s.version = "1.0.0"
s.summary = "An infinite scroll view."
s.description = <<-DESC
An Objective-C class providing a scroll view which can be scrolled infinitely and automatically.
DESC
s.homepage = "https://github.com/hulizhen/HLZInfiniteScrollView"
s.screenshots = "https://cloud.githubusercontent.com/assets/2831422/16691969/7bef5aec-4561-11e6-9163-2dae603c0635.gif"
s.license = "MIT"
s.author = { "Hu Lizhen" => "hulizhen.public@gmail.com" }
s.platform = :ios, "7.0"
s.source = { :git => "https://github.com/hulizhen/HLZInfiniteScrollView.git", :tag => "#{s.version}" }
s.source_files = "HLZInfiniteScrollView", "HLZInfiniteScrollView/HLZInfiniteScrollView.{h,m}"
s.requires_arc = true
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'pactas_itero/version'
Gem::Specification.new do |spec|
spec.name = 'pactas_itero'
spec.version = PactasItero::VERSION
spec.authors = ['Simon Fröhler']
spec.email = "simon@shipcloud.io"
spec.summary = %q{pactas_itero provides a client mapping for accessing
the Pactas Itero API.}
spec.description = %q{pactas_itero provides a client mapping for accessing
the Pactas Itero API, making it easy to post your data to, adn read your
data from your Pactas account.}
spec.homepage = 'https://github.com/webionate/pactas_itero'
spec.license = 'MIT'
spec.files = Dir["lib/**/*.rb"] + Dir["bin/*"]
spec.files += Dir["[A-Z]*"] + Dir["spec/**/*"]
spec.test_files = spec.files.grep(%r{^spec/})
spec.executables = spec.files.grep(%r{^bin/}).map { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.required_ruby_version = ">= 2.0"
spec.add_dependency('faraday_middleware', '~> 0.9.1')
spec.add_dependency('rash', '~> 0.4.0')
spec.add_development_dependency 'bundler'
spec.add_development_dependency 'rake'
spec.add_development_dependency "rspec", '~> 3.5.0'
spec.add_development_dependency("simplecov", "~> 0.12.0")
spec.add_development_dependency("webmock", "~> 2.3")
end
Upgrade rspec to version 3.6.0 (#16)
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'pactas_itero/version'
Gem::Specification.new do |spec|
spec.name = 'pactas_itero'
spec.version = PactasItero::VERSION
spec.authors = ['Simon Fröhler']
spec.email = "simon@shipcloud.io"
spec.summary = %q{pactas_itero provides a client mapping for accessing
the Pactas Itero API.}
spec.description = %q{pactas_itero provides a client mapping for accessing
the Pactas Itero API, making it easy to post your data to, adn read your
data from your Pactas account.}
spec.homepage = 'https://github.com/webionate/pactas_itero'
spec.license = 'MIT'
spec.files = Dir["lib/**/*.rb"] + Dir["bin/*"]
spec.files += Dir["[A-Z]*"] + Dir["spec/**/*"]
spec.test_files = spec.files.grep(%r{^spec/})
spec.executables = spec.files.grep(%r{^bin/}).map { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.required_ruby_version = ">= 2.0"
spec.add_dependency('faraday_middleware', '~> 0.9.1')
spec.add_dependency('rash', '~> 0.4.0')
spec.add_development_dependency 'bundler'
spec.add_development_dependency 'rake'
spec.add_development_dependency "rspec", '~> 3.6.0'
spec.add_development_dependency("simplecov", "~> 0.12.0")
spec.add_development_dependency("webmock", "~> 2.3")
end
|
# -*- encoding: utf-8 -*-
Gem::Specification.new do |gem|
gem.authors = ["Dwayne Macgowan"]
gem.email = ["dwaynemac@gmail.com"]
gem.description = %q{Assets to be re-used in PADMA applications}
gem.summary = %q{Assets to be re-used in PADMA applications}
gem.homepage = ""
gem.files = Dir["{app,lib,vendor,config}/**/*"]
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.name = "padma-assets"
gem.require_paths = ["lib"]
gem.version = '0.2.11'
gem.add_dependency "railties", ">= 3.1"
gem.add_dependency "accounts_client", ">= 0.2.22"
end
v0.2.13
# -*- encoding: utf-8 -*-
Gem::Specification.new do |gem|
gem.authors = ["Dwayne Macgowan"]
gem.email = ["dwaynemac@gmail.com"]
gem.description = %q{Assets to be re-used in PADMA applications}
gem.summary = %q{Assets to be re-used in PADMA applications}
gem.homepage = ""
gem.files = Dir["{app,lib,vendor,config}/**/*"]
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.name = "padma-assets"
gem.require_paths = ["lib"]
gem.version = '0.2.13'
gem.add_dependency "railties", ">= 3.1"
gem.add_dependency "accounts_client", ">= 0.2.22"
end
|
require 'net/http'
require 'uri'
require 'rexml/document'
require 'json'
require 'jenkins_api_client'
@new_branch = ENV['NEW_BRANCH'] || 'Release-1.0.XX'
@view_to_configure = ENV['VIEW_TO_CONFIGURE'] || 'CI-XX-Rel'
@client = JenkinsApi::Client.new(:server_url => 'http://jenkins.slidev.org', :server_port => '8080',
:username => '', :password => '', :jenkins_path => '/jenkins', :debug => false)
def get_jobs(view)
jobs = @client.view.list_jobs(view)
body = JSON.parse(response.body)
return body['jobs']
end
def fix_job(job_name)
puts "Adjusting " + job_name + " to " + @new_branch
job_xml = @client.job.get_config(job_name)
job_xml.elements.each("*/scm/branches/hudson.plugins.git.BranchSpec/name") do |element|
puts "Current Branch: #{element.text} changing it to #{@new_branch}"
element.text = @new_branch
end
@client.job.post_config(job_name, job_xml)
end
actual_jobs = get_jobs @view_to_configure
puts actual_jobs
actual_jobs.each {|j| fix_job(j) }
Bug fixing
require 'net/http'
require 'uri'
require 'rexml/document'
require 'json'
require 'jenkins_api_client'
@new_branch = ENV['NEW_BRANCH'] || 'Release-1.0.XX'
@view_to_configure = ENV['VIEW_TO_CONFIGURE'] || 'CI-XX-Rel'
@client = JenkinsApi::Client.new(:server_url => 'http://jenkins.slidev.org', :server_port => '8080',
:username => '', :password => '', :jenkins_path => '/jenkins', :debug => false)
def get_jobs(view)
return @client.view.list_jobs(view)
end
def fix_job(job_name)
puts "Adjusting " + job_name + " to " + @new_branch
job_xml = @client.job.get_config(job_name)
job_xml.elements.each("*/scm/branches/hudson.plugins.git.BranchSpec/name") do |element|
puts "Current Branch: #{element.text} changing it to #{@new_branch}"
element.text = @new_branch
end
@client.job.post_config(job_name, job_xml)
end
actual_jobs = get_jobs @view_to_configure
puts actual_jobs
actual_jobs.each {|j| fix_job(j) } |
# -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'metaforce/delivery_method/version'
Gem::Specification.new do |gem|
gem.name = 'metaforce-delivery_method'
gem.version = Metaforce::DeliveryMethod::VERSION
gem.authors = ['Eric J. Holmes']
gem.email = ['eric@ejholmes.net']
gem.description = %q{Delivery method for sending emails from Ruby using Salesforce}
gem.summary = %q{Delivery method for sending emails from Ruby using Salesforce}
gem.homepage = 'https://github.com/ejholmes/metaforce-delivery_method'
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ['lib']
gem.add_dependency 'metaforce', '~> 1.0.4'
gem.add_development_dependency 'rspec'
gem.add_development_dependency 'mail'
end
Loser gem restriction.
# -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'metaforce/delivery_method/version'
Gem::Specification.new do |gem|
gem.name = 'metaforce-delivery_method'
gem.version = Metaforce::DeliveryMethod::VERSION
gem.authors = ['Eric J. Holmes']
gem.email = ['eric@ejholmes.net']
gem.description = %q{Delivery method for sending emails from Ruby using Salesforce}
gem.summary = %q{Delivery method for sending emails from Ruby using Salesforce}
gem.homepage = 'https://github.com/ejholmes/metaforce-delivery_method'
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ['lib']
gem.add_dependency 'metaforce', '~> 1.0'
gem.add_development_dependency 'rspec'
gem.add_development_dependency 'mail'
end
|
class BHLEADConverter < EADConverter
def self.import_types(show_hidden = false)
[
{
:name => "bhl_ead_xml",
:description => "Import BHL EAD records from an XML file"
}
]
end
def self.instance_for(type, input_file)
if type == "bhl_ead_xml"
self.new(input_file)
else
nil
end
end
def self.profile
"Convert EAD To ArchivesSpace JSONModel records"
end
def format_content(content)
super.gsub(/[, ]+$/,"") # Remove trailing commas and spaces
end
def self.configure
super
# BEGIN UNITID CUSTOMIZATIONS
# Let's take those brackets off of unitids and just add them in the exporter
with 'unitid' do |node|
ancestor(:note_multipart, :resource, :archival_object) do |obj|
case obj.class.record_type
when 'resource'
# inner_xml.split(/[\/_\-\.\s]/).each_with_index do |id, i|
# set receiver, "id_#{i}".to_sym, id
# end
set obj, :id_0, inner_xml
when 'archival_object'
set obj, :component_id, inner_xml.gsub("[","").gsub("]","").strip
end
end
end
# BEGIN TITLEPROPER AND AUTHOR CUSTOMIZATIONS
# The stock ArchivesSpace converter sets the author and titleproper elements each time it finds a titleproper or author elements
# This means that it first creates the elements using titlestmt/author and titlestmt/titleproper, and then overwrites the values when it reaches titlepage
# We want to use the titlepage statements. Changing this to be more explicit about using the statement that we want, and to remove some unwanted linebreaks.
# The EAD importer ignores titlepage; we need to unignore it
with "titlepage" do
@ignore = false
end
with 'titlepage/titleproper' do
type = att('type')
title_statement = inner_xml.gsub("<lb/>"," <lb/>")
case type
when 'filing'
set :finding_aid_filing_title, title_statement.gsub("<lb/>","").gsub(/<date(.*?)<\/date>/,"").gsub(/\s+/," ").strip
else
set :finding_aid_title, title_statement.gsub("<lb/>","").gsub(/<date(.*?)<\/date>/,"").gsub(/\s+/," ").gsub(/[,\s]+$/,"").strip
end
end
with 'titlepage/author' do
author_statement = inner_xml.gsub("<lb/>"," <lb/>")
set :finding_aid_author, author_statement.gsub("<lb/>","").gsub(/\s+/," ").strip
end
# Skip the titleproper and author statements from titlestmt
with 'titlestmt/titleproper' do
next
end
with 'titlestmt/author' do
next
end
# Skip these to override the default ArchiveSpace functionality, which searches for a titleproper or an author anywhere
with 'titleproper' do
next
end
with 'author' do
next
end
# END TITLEPROPER CUSTOMIZATIONS
# BEGIN CLASSIFICATION CUSTOMIZATIONS
# In our EADs, the most consistent way that MHC and UARP finding aids are identified is via the titlepage/publisher
# In ArchivesSpace, we will be using Classifications to distinguish between the two
# This modification will link the resource being created to the appropriate Classification in ArchivesSpace
with 'classification' do
set :classifications, {'ref' => att('ref')}
end
# END CLASSIFICATION CUSTOMIZATIONS
# BEGIN CHRONLIST CUSTOMIZATIONS
# For some reason the stock importer doesn't separate <chronlist>s out of notes like it does with <list>s
# Like, it includes the mixed content <chronlist> within the note text and also makes a chronological list, duplicating the content
# The addition of (split_tag = 'chronlist') to the insert_into_subnotes method call here fixes that
with 'chronlist' do
if ancestor(:note_multipart)
left_overs = insert_into_subnotes(split_tag = 'chronlist')
else
left_overs = nil
make :note_multipart, {
:type => node.name,
:persistent_id => att('id'),
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
make :note_chronology do |note|
set ancestor(:note_multipart), :subnotes, note
end
# and finally put the leftovers back in the list of subnotes...
if ( !left_overs.nil? && left_overs["content"] && left_overs["content"].length > 0 )
set ancestor(:note_multipart), :subnotes, left_overs
end
end
# END CHRONLIST CUSTOMIZATIONS
# BEGIN BIBLIOGRAPHY CUSTOMIZATIONS
# Our bibliographies are really more like general notes with paragraphs, lists, etc. We don't have any bibliographies
# that are simply a collection of <bibref>s, and all of the bibliographies that do have <bibref>s have them inserted into
# items in lists. This change will import bibliographies as a general note, which is really more appropriate given their content
with 'bibliography' do |node|
content = inner_xml.tap {|xml|
xml.sub!(/<head>.*?<\/head>/m, '')
# xml.sub!(/<list [^>]*>.*?<\/list>/m, '')
# xml.sub!(/<chronlist [^>]*>.*<\/chronlist>/m, '')
}
make :note_multipart, {
:type => 'odd',
:persistent_id => att('id'),
:subnotes => {
'jsonmodel_type' => 'note_text',
'content' => format_content( content )
}
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
%w(bibliography index).each do |x|
next if x == 'bibliography'
with "index/head" do |node|
set :label, format_content( inner_xml )
end
with "index/p" do
set :content, format_content( inner_xml )
end
end
with 'bibliography/bibref' do
next
end
with 'bibliography/p' do
next
end
with 'bibliography/head' do
next
end
# END BIBLIOGRAPHY CUSTOMIZATIONS
# BEGIN BLOCKQUOTE P TAG FIX
# The ArchivesSpace EAD importer replaces all <p> tags with double line breaks
# This leads to too many line breaks surrounding closing block quote tags
# On export, this invalidates the EAD
# The following code is really hacky workaround to reinsert <p> tags within <blockquote>s
# Note: We only have blockquotes in bioghists and scopecontents, so call modified_format_content on just this block is sufficient
# This function calls the regular format_content function, and then does a few other things, like preserving blockquote p tags and removing opening and closing parens from some notes, before returning the content
def modified_format_content(content, note)
content = format_content(content)
# Remove parentheses from single-paragraph odds
blocks = content.split("\n\n")
if blocks.length == 1
case note
when 'odd','abstract','accessrestrict','daodesc'
if content =~ /^[\[\(]+(.*)[\]\)]+$/
content = $1
end
end
end
content.gsub(/<blockquote>\s*?/,"<blockquote><p>").gsub(/\s*?<\/blockquote>/,"</p></blockquote>")
end
%w(accessrestrict accessrestrict/legalstatus \
accruals acqinfo altformavail appraisal arrangement \
bioghist custodhist \
fileplan odd otherfindaid originalsloc phystech \
prefercite processinfo relatedmaterial scopecontent \
separatedmaterial userestrict ).each do |note|
with note do |node|
content = inner_xml.tap {|xml|
xml.sub!(/<head>.*?<\/head>/m, '')
# xml.sub!(/<list [^>]*>.*?<\/list>/m, '')
# xml.sub!(/<chronlist [^>]*>.*<\/chronlist>/m, '')
}
make :note_multipart, {
:type => node.name,
:persistent_id => att('id'),
:publish => true,
:subnotes => {
'jsonmodel_type' => 'note_text',
'content' => modified_format_content( content, note )
}
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
end
# END BLOCKQUOTE P TAG FIX
# BEGIN CONDITIONAL SKIPS
# We have lists and indexes with all sorts of crazy things, like <container>, <physdesc>, <physloc>, etc. tags within <item> or <ref> tags
# So, we need to tell the importer to skip those things only when they appear in places where they shouldn't, otherwise do
# it's normal thing
# REMINDER: If using the container management plugin, add the line 'next if context == :note_orderedlist' to "with 'container' do" in
# the converter_extra_container_values mixin
%w(abstract langmaterial materialspec physloc).each do |note|
next if note == "langmaterial"
with note do |node|
next if context == :note_orderedlist # skip these
next if context == :items # these too
content = inner_xml
next if content =~ /\A<language langcode=\"[a-z]+\"\/>\Z/
if content.match(/\A<language langcode=\"[a-z]+\"\s*>([^<]+)<\/language>\Z/)
content = $1
end
make :note_singlepart, {
:type => note,
:persistent_id => att('id'),
:publish => true,
:content => modified_format_content( content.sub(/<head>.*?<\/head>/, ''), note )
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
end
with 'list' do
next if ancestor(:note_index)
if ancestor(:note_multipart)
left_overs = insert_into_subnotes
else
left_overs = nil
make :note_multipart, {
:type => 'odd',
:persistent_id => att('id'),
:publish => true,
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
# now let's make the subnote list
type = att('type')
if type == 'deflist' || (type.nil? && inner_xml.match(/<deflist>/))
make :note_definedlist do |note|
set ancestor(:note_multipart), :subnotes, note
end
else
make :note_orderedlist, {
:enumeration => att('numeration')
} do |note|
set ancestor(:note_multipart), :subnotes, note
end
end
# and finally put the leftovers back in the list of subnotes...
if ( !left_overs.nil? && left_overs["content"] && left_overs["content"].length > 0 )
set ancestor(:note_multipart), :subnotes, left_overs
end
end
with 'list/head' do |node|
ancestor(:note_definedlist, :note_orderedlist) do |obj|
next if obj.title
obj.title = format_content( inner_xml)
end
end
with 'list/item' do
# Okay this is another one of those hacky things that work
# The problem: we have many items nested within items, like <list><item>First item <list><item>Subitem</item></list></item></list>
# This would make one item like:
# First item <list><item>Subitem</item></list>
# And another like:
# Subitem
# ArchivesSpace lists are flat and do not allow for nesting lists within lists within items within lists within.. (you get the idea)...
# Now, it would be nice to have a better way to tell the importer to only account for subitems one time, but there doesn't seem to be
# With this modification we can change nested lists to <sublist> and nested items to <subitem> before migration
# That way, the importer will ignore those sublists and subitems and sub out those tags for the correct tags
set :items, inner_xml.gsub("<sublist","<list").gsub("<subitem","<item").gsub("</subitem>","</item>").gsub("</sublist>","</list>") if context == :note_orderedlist
end
# END CONDITIONAL SKIPS
# BEGIN CONTAINER MODIFICATIONS
# Skip containers that appear in lists
# Don't downcase the instance_label
# Import att('type') as the container type for top containers, att('label') as the container type for subcontainers
# example of a 1:many tag:record relation (1+ <container> => 1 instance with 1 container)
with 'container' do
next if context == :note_orderedlist
@containers ||= {}
# we've found that the container has a parent att and the parent is in
# our queue
if att("parent") && @containers[att('parent')]
cont = @containers[att('parent')]
else
# there is not a parent. if there is an id, let's check if there's an
# instance before we proceed
inst = context == :instance ? context_obj : context_obj.instances.last
# if there are no instances, we need to make a new one.
# or, if there is an @id ( but no @parent) we can assume its a new
# top level container that will be referenced later, so we need to
# make a new instance
if ( inst.nil? or att('id') )
instance_label = att("label") ? att("label") : 'mixed_materials'
if instance_label =~ /(.*)\s\[([0-9]+)\]$/
instance_label = $1
barcode = $2
end
make :instance, {
:instance_type => instance_label
} do |instance|
set ancestor(:resource, :archival_object), :instances, instance
end
inst = context_obj
end
# now let's check out instance to see if there's a container...
if inst.container.nil?
make :container do |cont|
set inst, :container, cont
end
end
# and now finally we get the container.
cont = inst.container || context_obj
cont['barcode_1'] = barcode if barcode
cont['container_profile_key'] = att("altrender")
end
# now we fill it in
(1..3).to_a.each do |i|
next unless cont["type_#{i}"].nil?
if i == 1
cont["type_#{i}"] = att('type')
elsif i == 2 or i == 3
cont["type_#{i}"] = att('label')
end
cont["indicator_#{i}"] = format_content( inner_xml )
break
end
#store it here incase we find it has a parent
@containers[att("id")] = cont if att("id")
end
# END CONTAINER MODIFICATIONS
# BEGIN CUSTOM SUBJECT AND AGENT IMPORTS
# We'll be importing most of our subjects and agents separately and linking directly to the URI from our finding
# aids and accession records.
# This will check our subject, geogname, genreform, corpname, famname, and persname elements in our EADs for a ref attribute
# If a ref attribute is present, it will use that to link the agent to the resource.
# If there is no ref attribute, it will make a new agent as usual.
# We also have compound agents (agents with both a persname, corpname or famname and subdivided subject terms)
# In ArchivesSpace, this kind of agent can be represented in a resource by linking to the agent and adding terms/subdivisions
# within the resource. We will be accomplishing this by invalidating our EAD at some point (gasp!) to add <term> tags
# around the individual terms in a corpname, persname, or famname. This modification will also make sure that those terms
# get imported properly.
{
'function' => 'function',
'genreform' => 'genre_form',
'geogname' => 'geographic',
'occupation' => 'occupation',
'subject' => 'topical',
'title' => 'uniform_title' # added title since we have some <title> tags in our controlaccesses
}.each do |tag, type|
with "controlaccess/#{tag}" do
if att('ref')
set ancestor(:resource, :archival_object), :subjects, {'ref' => att('ref')}
else
make :subject, {
:terms => {'term' => inner_xml, 'term_type' => type, 'vocabulary' => '/vocabularies/1'},
:vocabulary => '/vocabularies/1',
:source => att('source') || 'ingest'
} do |subject|
set ancestor(:resource, :archival_object), :subjects, {'ref' => subject.uri}
end
end
end
end
with 'origination/corpname' do
if att('ref')
set ancestor(:resource, :archival_object), :linked_agents, {'ref' => att('ref'), 'role' => 'creator'}
else
make_corp_template(:role => 'creator')
end
end
with 'controlaccess/corpname' do
corpname = Nokogiri::XML::DocumentFragment.parse(inner_xml)
terms ||= []
corpname.children.each do |child|
if child.respond_to?(:name) && child.name == 'term'
term = child.content.strip
term_type = child['type']
terms << {'term' => term, 'term_type' => term_type, 'vocabulary' => '/vocabularies/1'}
end
end
if att('role')
relator = att('role')
elsif att('encodinganalog') == '710'
relator = 'ctb'
else
relator = nil
end
if att('ref')
set ancestor(:resource, :archival_object), :linked_agents, {'ref' => att('ref'), 'role' => 'subject', 'terms' => terms, 'relator' => relator}
else
make_corp_template(:role => 'subject')
end
end
with 'origination/famname' do
if att('ref')
set ancestor(:resource, :archival_object), :linked_agents, {'ref' => att('ref'), 'role' => 'creator'}
else
make_family_template(:role => 'creator')
end
end
with 'controlaccess/famname' do
famname = Nokogiri::XML::DocumentFragment.parse(inner_xml)
terms ||= []
famname.children.each do |child|
if child.respond_to?(:name) && child.name == 'term'
term = child.content.strip
term_type = child['type']
terms << {'term' => term, 'term_type' => term_type, 'vocabulary' => '/vocabularies/1'}
end
end
if att('role')
relator = att('role')
elsif att('encodinganalog') == '700'
relator = 'ctb'
else
relator = nil
end
if att('ref')
set ancestor(:resource, :archival_object), :linked_agents, {'ref' => att('ref'), 'role' => 'subject', 'terms' => terms, 'relator' => relator}
else
make_family_template(:role => 'subject')
end
end
with 'origination/persname' do
if att('ref')
set ancestor(:resource, :archival_object), :linked_agents, {'ref' => att('ref'), 'role' => 'creator'}
else
make_person_template(:role => 'creator')
end
end
with 'controlaccess/persname' do
persname = Nokogiri::XML::DocumentFragment.parse(inner_xml)
terms ||= []
persname.children.each do |child|
if child.respond_to?(:name) && child.name == 'term'
term = child.content.strip
term_type = child['type']
terms << {'term' => term, 'term_type' => term_type, 'vocabulary' => '/vocabularies/1'}
end
end
if att('role')
relator = att('role')
elsif att('encodinganalog') == '700'
relator = 'ctb'
else
relator = nil
end
if att('ref')
set ancestor(:resource, :archival_object), :linked_agents, {'ref' => att('ref'), 'role' => 'subject', 'terms' => terms, 'relator' => relator}
else
make_person_template(:role => 'subject')
end
end
# END CUSTOM SUBJECT AND AGENT IMPORTS
# BEGIN PHYSDESC CUSTOMIZATIONS
# The stock EAD importer doesn't import <physfacet> and <dimensions> tags into extent objects; instead making them notes
# This is a corrected version
# first, some methods for generating note objects
def make_single_note(note_name, tag, tag_name="")
content = tag.inner_text
if !tag_name.empty?
content = tag_name + ": " + content
end
make :note_singlepart, {
:type => note_name,
:persistent_id => att('id'),
:publish => true,
:content => format_content( content.sub(/<head>.?<\/head>/, '').strip)
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
def make_nested_note(note_name, tag)
content = tag.inner_text
make :note_multipart, {
:type => note_name,
:persistent_id => att('id'),
:publish => true,
:subnotes => {
'jsonmodel_type' => 'note_text',
'content' => format_content( content )
}
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
with 'physdesc' do
next if context == :note_orderedlist # skip these
physdesc = Nokogiri::XML::DocumentFragment.parse(inner_xml)
extent_number_and_type = nil
dimensions = []
physfacets = []
container_summaries = []
other_extent_data = []
container_summary_texts = []
dimensions_texts = []
physfacet_texts = []
# If there is already a portion specified, use it
portion = att('altrender') || 'whole'
# Special case: if the physdesc is just a plain string with no child elements, treat its contents as a physdesc note
if physdesc.children.length == 1 && physdesc.children[0].name == 'text'
container_summaries << physdesc
else
# Otherwise, attempt to parse out an extent record from the child elements.
physdesc.children.each do |child|
# "extent" can have one of two kinds of semantic meanings: either a true extent with number and type,
# or a container summary. Disambiguation is done through a regex.
if child.name == 'extent'
child_content = child.content.strip
if extent_number_and_type.nil? && child_content =~ /^([0-9\.]+)+\s+(.*)$/
extent_number_and_type = {:number => $1, :extent_type => $2}
else
container_summaries << child
container_summary_texts << child.content.strip
end
elsif child.name == 'physfacet'
physfacets << child
physfacet_texts << child.content.strip
elsif child.name == 'dimensions'
dimensions << child
dimensions_texts << child.content.strip
elsif child.name != 'text'
other_extent_data << child
end
end
end
# only make an extent if we got a number and type, otherwise put all physdesc contents into a note
if extent_number_and_type
make :extent, {
:number => $1,
:extent_type => $2,
:portion => portion,
:container_summary => container_summary_texts.join('; '),
:physical_details => physfacet_texts.join('; '),
:dimensions => dimensions_texts.join('; ')
} do |extent|
set ancestor(:resource, :archival_object), :extents, extent
end
# there's no true extent; split up the rest into individual notes
else
container_summaries.each do |summary|
make_single_note("physdesc", summary)
end
physfacets.each do |physfacet|
make_single_note("physfacet", physfacet)
end
#
dimensions.each do |dimension|
make_nested_note("dimensions", dimension)
end
end
other_extent_data.each do |unknown_tag|
make_single_note("physdesc", unknown_tag, unknown_tag.name)
end
end
# END PHYSDESC CUSTOMIZATIONS
# BEGIN LANGUAGE CUSTOMIZATIONS
# By default, ASpace just uses the last <language> tag it finds as the primary
# language of the material described. This results in incorrect finding-aid languages for many eads.
# for example, ead with the following <langmaterial> tag:
## <langmaterial>
## The material is mostly in <language langcode="eng" encodinganalog="041">English</language>;
## some correspondence is in <language langcode="arm" encodinganalog="041">Armenian;</language>;
## select items are in <language langcode="ger" encodinganalog="041">German</language>.
## </langmaterial>
# will result in a primary material language of German.
# these changes fix that
with "langmaterial" do
# first, assign the primary language to the ead
langmaterial = Nokogiri::XML::DocumentFragment.parse(inner_xml)
langmaterial.children.each do |child|
if child.name == 'language'
set ancestor(:resource, :archival_object), :language, child.attr("langcode")
break
end
end
# write full tag content to a note, subbing out the language tags
content = inner_xml
next if content =~ /\A<language langcode=\"[a-z]+\"\/>\Z/
if content.match(/\A<language langcode=\"[a-z]+\"\s*>([^<]+)<\/language>\Z/)
content = $1
end
make :note_singlepart, {
:type => "langmaterial",
:persistent_id => att('id'),
:publish => true,
:content => format_content( content.sub(/<head>.*?<\/head>/, '') )
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
# overwrite the default langusage tag behavior
with "language" do
next
end
# END LANGUAGE CUSTOMIZATIONS
# BEGIN INDEX CUSTOMIZATIONS
# The stock EAD converter creates separate index items for each indexentry,
# one for the value (persname, famname, etc) and one for the reference (ref),
# even when they are within the same indexentry and are related
# (i.e., the persname is a correspondent, the ref is a date or a location at which
# correspondence with that person can be found).
# The Bentley's <indexentry>s generally look something like:
# # <indexentry><persname>Some person</persname><ref>Some date or folder</ref></indexentry>
# # As the <persname> and the <ref> are associated with one another,
# we want to keep them together in the same index item in ArchiveSpace.
# This will treat each <indexentry> as one item,
# creating an index item with a 'value' from the <persname>, <famname>, etc.
# and a 'reference_text' from the <ref>.
with 'indexentry' do
entry_type = ''
entry_value = ''
entry_reference = ''
indexentry = Nokogiri::XML::DocumentFragment.parse(inner_xml)
indexentry.children.each do |child|
case child.name
when 'name'
entry_value << child.content
entry_type << 'name'
when 'persname'
entry_value << child.content
entry_type << 'person'
when 'famname'
entry_value << child.content
entry_type << 'family'
when 'corpname'
entry_value << child.content
entry_type << 'corporate_entity'
when 'subject'
entry_value << child.content
entry_type << 'subject'
when 'function'
entry_value << child.content
entry_type << 'function'
when 'occupation'
entry_value << child.content
entry_type << 'occupation'
when 'genreform'
entry_value << child.content
entry_type << 'genre_form'
when 'title'
entry_value << child.content
entry_type << 'title'
when 'geogname'
entry_value << child.content
entry_type << 'geographic_name'
end
if child.name == 'ref'
entry_reference << child.content
end
end
make :note_index_item, {
:type => entry_type,
:value => entry_value,
:reference_text => entry_reference
} do |item|
set ancestor(:note_index), :items, item
end
end
# Skip the stock importer actions to avoid confusion/duplication
{
'name' => 'name',
'persname' => 'person',
'famname' => 'family',
'corpname' => 'corporate_entity',
'subject' => 'subject',
'function' => 'function',
'occupation' => 'occupation',
'genreform' => 'genre_form',
'title' => 'title',
'geogname' => 'geographic_name'
}.each do |k, v|
with "indexentry/#{k}" do |node|
next
end
end
with 'indexentry/ref' do
next
end
# END INDEX CUSTOMIZATIONS
# BEGIN HEAD CUSTOMIZATIONS
# This issue is similar to the language issue -- if there is a note with multiple <head> elements (say, a bioghist with its own head and sublists with their own heads),
# the stock importer action is to set the note label to the very last <head> it finds. This modification will only set the label if it does not already exist, ensuring
# that it will only be set once.
with 'head' do
if context == :note_multipart
ancestor(:note_multipart) do |note|
next unless note["label"].nil?
set :label, format_content( inner_xml )
end
elsif context == :note_chronology
ancestor(:note_chronology) do |note|
next unless note["title"].nil?
set :title, format_content( inner_xml )
end
end
end
# END HEAD CUSTOMIZATIONS
# BEGIN DAO TITLE CUSTOMIZATIONS
# The Bentley has many EADs with <dao> tags that lack title attributes.
# The stock ArchivesSpace EAD Converter uses each <dao>'s title attribute as
# the value for the imported digital object's title, which is a required property.
# As a result, all of our EADs with <dao> tags fail when trying to import into ArchivesSpace.
# This section of the BHL EAD Converter plugin modifies the stock ArchivesSpace EAD Converter
# by forming a string containing the digital object's parent archival object's title and date (if both exist),
# or just its title (if only the title exists), or just it's date (if only the date exists)
# and then using that string as the imported digital object's title.
with 'dao' do
if att('ref') # A digital object has already been made
make :instance, {
:instance_type => 'digital_object',
:digital_object => {'ref' => att('ref')}
} do |instance|
set ancestor(:resource, :archival_object), :instances, instance
end
else # Make a digital object
make :instance, {
:instance_type => 'digital_object'
} do |instance|
set ancestor(:resource, :archival_object), :instances, instance
end
# We'll use either the <dao> title attribute (if it exists) or our display_string (if the title attribute does not exist)
# This forms a title string using the parent archival object's title, if it exists
daotitle = nil
ancestor(:archival_object ) do |ao|
if ao.title && ao.title.length > 0
daotitle = ao.title
end
end
# This forms a date string using the parent archival object's date expression,
# or its begin date - end date, or just it's begin date, if any exist
# (Actually, we have expressions for all of our dates...let's just use those for the sake of simplicity)
daodates = []
ancestor(:archival_object) do |aod|
if aod.dates && aod.dates.length > 0
aod.dates.each do |dl|
if dl['expression'].length > 0
daodates << dl['expression']
end
end
end
end
title = daotitle
date_label = daodates.join(', ') if daodates.length > 0
# This forms a display string using the parent archival object's title and date (if both exist),
# or just its title or date (if only one exists)
display_string = title || ''
display_string += ', ' if title && date_label
display_string += date_label if date_label
make :digital_object, {
:digital_object_id => SecureRandom.uuid,
:title => att('title') || display_string,
} do |obj|
obj.file_versions << {
:use_statement => att('role'),
:file_uri => att('href'),
:xlink_actuate_attribute => att('actuate'),
:xlink_show_attribute => att('show')
}
set ancestor(:instance), :digital_object, obj
end
end
end
end
with 'daodesc' do
ancestor(:digital_object) do |dobj|
next if dobj.ref
end
make :note_digital_object, {
:type => 'note',
:persistent_id => att('id'),
:content => modified_format_content(inner_xml.strip,'daodesc')
} do |note|
set ancestor(:digital_object), :notes, note
end
end
# END DAO TITLE CUSTOMIZATIONS
=begin
# Note: The following bits are here for historical reasons
# We have either decided against implementing the functionality OR the ArchivesSpace importer has changed, deprecating the following customizations
# START IGNORE
# Setting some of these to ignore because we have some physdesc, container, etc.
# Within list/items in our descgrps at the end of finding aids.
# Without setting these to ignore, ASpace both makes the list AND makes separate
# notes for physdesc, dimension, etc. and tries to make instances out of the
# containers, causing import errors.
# Note: if using this in conjunction with the Yale container management plugin,
# be sure to include the line 'next ignore if @ignore' within the with container do
# section of the ConverterExtraContainerValues module.
with 'archref/container' do
@ignore = true
end
with 'archref/physdesc/dimensions' do
@ignore = true
end
with 'archref/unittitle' do
@ignore = true
end
with 'archref/unittitle/unitdate' do
@ignore = true
end
with 'archref/note' do
@ignore = true
end
with 'archref/note/p/unitdate' do
@ignore = true
end
with 'archref/note/p/geogname' do
@ignore = true
end
with 'unittitle' do |node|
ancestor(:note_multipart, :resource, :archival_object) do |obj|
unless obj.class.record_type == "note_multipart" or context == "note_orderedlist"
title = Nokogiri::XML::DocumentFragment.parse(inner_xml.strip)
title.xpath(".//unitdate").remove
obj.title = format_content( title.to_xml(:encoding => 'utf-8') )
end
end
end
with 'unitdate' do |node|
next ignore if @ignore
norm_dates = (att('normal') || "").sub(/^\s/, '').sub(/\s$/, '').split('/')
if norm_dates.length == 1
norm_dates[1] = norm_dates[0]
end
norm_dates.map! {|d| d =~ /^([0-9]{4}(\-(1[0-2]|0[1-9])(\-(0[1-9]|[12][0-9]|3[01]))?)?)$/ ? d : nil}
make :date, {
:date_type => att('type') || 'inclusive',
:expression => inner_xml,
:label => 'creation',
:begin => norm_dates[0],
:end => norm_dates[1],
:calendar => att('calendar'),
:era => att('era'),
:certainty => att('certainty')
} do |date|
set ancestor(:resource, :archival_object), :dates, date
end
end
with 'dimensions' do |node|
next ignore if @ignore
unless context == :note_orderedlist
content = inner_xml.tap {|xml|
xml.sub!(/<head>.*?<\/head>/m, '')
# xml.sub!(/<list [^>]*>.*?<\/list>/m, '')
# xml.sub!(/<chronlist [^>]*>.*<\/chronlist>/m, '')
}
make :note_multipart, {
:type => node.name,
:persistent_id => att('id'),
:subnotes => {
'jsonmodel_type' => 'note_text',
'content' => format_content( content )
}
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
end
%w(accessrestrict accessrestrict/legalstatus \
accruals acqinfo altformavail appraisal arrangement \
bioghist custodhist \
fileplan odd otherfindaid originalsloc phystech \
prefercite processinfo relatedmaterial scopecontent \
separatedmaterial userestrict ).each do |note|
with note do |node|
content = inner_xml.tap {|xml|
xml.sub!(/<head>.*?<\/head>/m, '')
# xml.sub!(/<list [^>]*>.*?<\/list>/m, '')
# xml.sub!(/<chronlist [^>]*>.*<\/chronlist>/m, '')
}
make :note_multipart, {
:type => node.name,
:persistent_id => att('id'),
:subnotes => {
'jsonmodel_type' => 'note_text',
'content' => format_content( content )
}
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
end
# START RIGHTS STATEMENTS
# The stock ASpace EAD importer only makes "Conditions Governing Access" notes out of <accessrestrict> tags
# We want to also import our <accessrestrict> tags that have a restriction end date as a "Rights Statements"
# Let ArchivesSpace do its normal thing with accessrestrict
%w(accessrestrict accessrestrict/legalstatus \
accruals acqinfo altformavail appraisal arrangement \
bioghist custodhist dimensions \
fileplan odd otherfindaid originalsloc phystech \
prefercite processinfo relatedmaterial scopecontent \
separatedmaterial userestrict ).each do |note|
with note do |node|
content = inner_xml.tap {|xml|
xml.sub!(/<head>.*?<\/head>/m, '')
# xml.sub!(/<list [^>]*>.*?<\/list>/m, '')
# xml.sub!(/<chronlist [^>]*>.*<\/chronlist>/m, '')
}
make :note_multipart, {
:type => node.name,
:persistent_id => att('id'),
:subnotes => {
'jsonmodel_type' => 'note_text',
'content' => format_content( content )
}
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
end
# Now make a Rights Statement using the content from the "Conditions Governing Access" note
# and the restriction end date from the accessrestrict/date
with 'accessrestrict/date' do
ancestor(:archival_object) do |ao|
ao.notes.each do |n|
if n['type'] == 'accessrestrict'
n['subnotes'].each do |sn|
make :rights_statement, {
:rights_type => 'institutional_policy',
:restrictions => sn['content'],
:restriction_end_date => att('normal')
} do |rights|
set ancestor(:resource, :archival_object), :rights_statements, rights
end
end
end
end
end
end
=end
end
add support for creating external documents
class BHLEADConverter < EADConverter
def self.import_types(show_hidden = false)
[
{
:name => "bhl_ead_xml",
:description => "Import BHL EAD records from an XML file"
}
]
end
def self.instance_for(type, input_file)
if type == "bhl_ead_xml"
self.new(input_file)
else
nil
end
end
def self.profile
"Convert EAD To ArchivesSpace JSONModel records"
end
def format_content(content)
super.gsub(/[, ]+$/,"") # Remove trailing commas and spaces
end
def self.configure
super
# BEGIN UNITID CUSTOMIZATIONS
# Let's take those brackets off of unitids and just add them in the exporter
with 'unitid' do |node|
ancestor(:note_multipart, :resource, :archival_object) do |obj|
case obj.class.record_type
when 'resource'
# inner_xml.split(/[\/_\-\.\s]/).each_with_index do |id, i|
# set receiver, "id_#{i}".to_sym, id
# end
set obj, :id_0, inner_xml
when 'archival_object'
set obj, :component_id, inner_xml.gsub("[","").gsub("]","").strip
end
end
end
# BEGIN TITLEPROPER AND AUTHOR CUSTOMIZATIONS
# The stock ArchivesSpace converter sets the author and titleproper elements each time it finds a titleproper or author elements
# This means that it first creates the elements using titlestmt/author and titlestmt/titleproper, and then overwrites the values when it reaches titlepage
# We want to use the titlepage statements. Changing this to be more explicit about using the statement that we want, and to remove some unwanted linebreaks.
# The EAD importer ignores titlepage; we need to unignore it
with "titlepage" do
@ignore = false
end
with 'titlepage/titleproper' do
type = att('type')
title_statement = inner_xml.gsub("<lb/>"," <lb/>")
case type
when 'filing'
set :finding_aid_filing_title, title_statement.gsub("<lb/>","").gsub(/<date(.*?)<\/date>/,"").gsub(/\s+/," ").strip
else
set :finding_aid_title, title_statement.gsub("<lb/>","").gsub(/<date(.*?)<\/date>/,"").gsub(/\s+/," ").gsub(/[,\s]+$/,"").strip
end
end
with 'titlepage/author' do
author_statement = inner_xml.gsub("<lb/>"," <lb/>")
set :finding_aid_author, author_statement.gsub("<lb/>","").gsub(/\s+/," ").strip
end
# Skip the titleproper and author statements from titlestmt
with 'titlestmt/titleproper' do
next
end
with 'titlestmt/author' do
next
end
# Skip these to override the default ArchiveSpace functionality, which searches for a titleproper or an author anywhere
with 'titleproper' do
next
end
with 'author' do
next
end
# END TITLEPROPER CUSTOMIZATIONS
# BEGIN EXTERNAL DOCUMENT CUSTOMIZATIONS
with 'externaldocument' do
set :external_documents, {'title' => att('title'), 'location' => att('location'), 'jsonmodel_type' => 'external_document'}
end
# END EXTERNAL DOCUMENT CUSTOMIZATIONS
# BEGIN CLASSIFICATION CUSTOMIZATIONS
# In our EADs, the most consistent way that MHC and UARP finding aids are identified is via the titlepage/publisher
# In ArchivesSpace, we will be using Classifications to distinguish between the two
# This modification will link the resource being created to the appropriate Classification in ArchivesSpace
with 'classification' do
set :classifications, {'ref' => att('ref')}
end
# END CLASSIFICATION CUSTOMIZATIONS
# BEGIN CHRONLIST CUSTOMIZATIONS
# For some reason the stock importer doesn't separate <chronlist>s out of notes like it does with <list>s
# Like, it includes the mixed content <chronlist> within the note text and also makes a chronological list, duplicating the content
# The addition of (split_tag = 'chronlist') to the insert_into_subnotes method call here fixes that
with 'chronlist' do
if ancestor(:note_multipart)
left_overs = insert_into_subnotes(split_tag = 'chronlist')
else
left_overs = nil
make :note_multipart, {
:type => node.name,
:persistent_id => att('id'),
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
make :note_chronology do |note|
set ancestor(:note_multipart), :subnotes, note
end
# and finally put the leftovers back in the list of subnotes...
if ( !left_overs.nil? && left_overs["content"] && left_overs["content"].length > 0 )
set ancestor(:note_multipart), :subnotes, left_overs
end
end
# END CHRONLIST CUSTOMIZATIONS
# BEGIN BIBLIOGRAPHY CUSTOMIZATIONS
# Our bibliographies are really more like general notes with paragraphs, lists, etc. We don't have any bibliographies
# that are simply a collection of <bibref>s, and all of the bibliographies that do have <bibref>s have them inserted into
# items in lists. This change will import bibliographies as a general note, which is really more appropriate given their content
with 'bibliography' do |node|
content = inner_xml.tap {|xml|
xml.sub!(/<head>.*?<\/head>/m, '')
# xml.sub!(/<list [^>]*>.*?<\/list>/m, '')
# xml.sub!(/<chronlist [^>]*>.*<\/chronlist>/m, '')
}
make :note_multipart, {
:type => 'odd',
:persistent_id => att('id'),
:subnotes => {
'jsonmodel_type' => 'note_text',
'content' => format_content( content )
}
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
%w(bibliography index).each do |x|
next if x == 'bibliography'
with "index/head" do |node|
set :label, format_content( inner_xml )
end
with "index/p" do
set :content, format_content( inner_xml )
end
end
with 'bibliography/bibref' do
next
end
with 'bibliography/p' do
next
end
with 'bibliography/head' do
next
end
# END BIBLIOGRAPHY CUSTOMIZATIONS
# BEGIN BLOCKQUOTE P TAG FIX
# The ArchivesSpace EAD importer replaces all <p> tags with double line breaks
# This leads to too many line breaks surrounding closing block quote tags
# On export, this invalidates the EAD
# The following code is really hacky workaround to reinsert <p> tags within <blockquote>s
# Note: We only have blockquotes in bioghists and scopecontents, so call modified_format_content on just this block is sufficient
# This function calls the regular format_content function, and then does a few other things, like preserving blockquote p tags and removing opening and closing parens from some notes, before returning the content
def modified_format_content(content, note)
content = format_content(content)
# Remove parentheses from single-paragraph odds
blocks = content.split("\n\n")
if blocks.length == 1
case note
when 'odd','abstract','accessrestrict','daodesc'
if content =~ /^[\[\(]+(.*)[\]\)]+$/
content = $1
end
end
end
content.gsub(/<blockquote>\s*?/,"<blockquote><p>").gsub(/\s*?<\/blockquote>/,"</p></blockquote>")
end
%w(accessrestrict accessrestrict/legalstatus \
accruals acqinfo altformavail appraisal arrangement \
bioghist custodhist \
fileplan odd otherfindaid originalsloc phystech \
prefercite processinfo relatedmaterial scopecontent \
separatedmaterial userestrict ).each do |note|
with note do |node|
content = inner_xml.tap {|xml|
xml.sub!(/<head>.*?<\/head>/m, '')
# xml.sub!(/<list [^>]*>.*?<\/list>/m, '')
# xml.sub!(/<chronlist [^>]*>.*<\/chronlist>/m, '')
}
make :note_multipart, {
:type => node.name,
:persistent_id => att('id'),
:publish => true,
:subnotes => {
'jsonmodel_type' => 'note_text',
'content' => modified_format_content( content, note )
}
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
end
# END BLOCKQUOTE P TAG FIX
# BEGIN CONDITIONAL SKIPS
# We have lists and indexes with all sorts of crazy things, like <container>, <physdesc>, <physloc>, etc. tags within <item> or <ref> tags
# So, we need to tell the importer to skip those things only when they appear in places where they shouldn't, otherwise do
# it's normal thing
# REMINDER: If using the container management plugin, add the line 'next if context == :note_orderedlist' to "with 'container' do" in
# the converter_extra_container_values mixin
%w(abstract langmaterial materialspec physloc).each do |note|
next if note == "langmaterial"
with note do |node|
next if context == :note_orderedlist # skip these
next if context == :items # these too
content = inner_xml
next if content =~ /\A<language langcode=\"[a-z]+\"\/>\Z/
if content.match(/\A<language langcode=\"[a-z]+\"\s*>([^<]+)<\/language>\Z/)
content = $1
end
make :note_singlepart, {
:type => note,
:persistent_id => att('id'),
:publish => true,
:content => modified_format_content( content.sub(/<head>.*?<\/head>/, ''), note )
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
end
with 'list' do
next if ancestor(:note_index)
if ancestor(:note_multipart)
left_overs = insert_into_subnotes
else
left_overs = nil
make :note_multipart, {
:type => 'odd',
:persistent_id => att('id'),
:publish => true,
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
# now let's make the subnote list
type = att('type')
if type == 'deflist' || (type.nil? && inner_xml.match(/<deflist>/))
make :note_definedlist do |note|
set ancestor(:note_multipart), :subnotes, note
end
else
make :note_orderedlist, {
:enumeration => att('numeration')
} do |note|
set ancestor(:note_multipart), :subnotes, note
end
end
# and finally put the leftovers back in the list of subnotes...
if ( !left_overs.nil? && left_overs["content"] && left_overs["content"].length > 0 )
set ancestor(:note_multipart), :subnotes, left_overs
end
end
with 'list/head' do |node|
ancestor(:note_definedlist, :note_orderedlist) do |obj|
next if obj.title
obj.title = format_content( inner_xml)
end
end
with 'list/item' do
# Okay this is another one of those hacky things that work
# The problem: we have many items nested within items, like <list><item>First item <list><item>Subitem</item></list></item></list>
# This would make one item like:
# First item <list><item>Subitem</item></list>
# And another like:
# Subitem
# ArchivesSpace lists are flat and do not allow for nesting lists within lists within items within lists within.. (you get the idea)...
# Now, it would be nice to have a better way to tell the importer to only account for subitems one time, but there doesn't seem to be
# With this modification we can change nested lists to <sublist> and nested items to <subitem> before migration
# That way, the importer will ignore those sublists and subitems and sub out those tags for the correct tags
set :items, inner_xml.gsub("<sublist","<list").gsub("<subitem","<item").gsub("</subitem>","</item>").gsub("</sublist>","</list>") if context == :note_orderedlist
end
# END CONDITIONAL SKIPS
# BEGIN CONTAINER MODIFICATIONS
# Skip containers that appear in lists
# Don't downcase the instance_label
# Import att('type') as the container type for top containers, att('label') as the container type for subcontainers
# example of a 1:many tag:record relation (1+ <container> => 1 instance with 1 container)
with 'container' do
next if context == :note_orderedlist
@containers ||= {}
# we've found that the container has a parent att and the parent is in
# our queue
if att("parent") && @containers[att('parent')]
cont = @containers[att('parent')]
else
# there is not a parent. if there is an id, let's check if there's an
# instance before we proceed
inst = context == :instance ? context_obj : context_obj.instances.last
# if there are no instances, we need to make a new one.
# or, if there is an @id ( but no @parent) we can assume its a new
# top level container that will be referenced later, so we need to
# make a new instance
if ( inst.nil? or att('id') )
instance_label = att("label") ? att("label") : 'mixed_materials'
if instance_label =~ /(.*)\s\[([0-9]+)\]$/
instance_label = $1
barcode = $2
end
make :instance, {
:instance_type => instance_label
} do |instance|
set ancestor(:resource, :archival_object), :instances, instance
end
inst = context_obj
end
# now let's check out instance to see if there's a container...
if inst.container.nil?
make :container do |cont|
set inst, :container, cont
end
end
# and now finally we get the container.
cont = inst.container || context_obj
cont['barcode_1'] = barcode if barcode
cont['container_profile_key'] = att("altrender")
end
# now we fill it in
(1..3).to_a.each do |i|
next unless cont["type_#{i}"].nil?
if i == 1
cont["type_#{i}"] = att('type')
elsif i == 2 or i == 3
cont["type_#{i}"] = att('label')
end
cont["indicator_#{i}"] = format_content( inner_xml )
break
end
#store it here incase we find it has a parent
@containers[att("id")] = cont if att("id")
end
# END CONTAINER MODIFICATIONS
# BEGIN CUSTOM SUBJECT AND AGENT IMPORTS
# We'll be importing most of our subjects and agents separately and linking directly to the URI from our finding
# aids and accession records.
# This will check our subject, geogname, genreform, corpname, famname, and persname elements in our EADs for a ref attribute
# If a ref attribute is present, it will use that to link the agent to the resource.
# If there is no ref attribute, it will make a new agent as usual.
# We also have compound agents (agents with both a persname, corpname or famname and subdivided subject terms)
# In ArchivesSpace, this kind of agent can be represented in a resource by linking to the agent and adding terms/subdivisions
# within the resource. We will be accomplishing this by invalidating our EAD at some point (gasp!) to add <term> tags
# around the individual terms in a corpname, persname, or famname. This modification will also make sure that those terms
# get imported properly.
{
'function' => 'function',
'genreform' => 'genre_form',
'geogname' => 'geographic',
'occupation' => 'occupation',
'subject' => 'topical',
'title' => 'uniform_title' # added title since we have some <title> tags in our controlaccesses
}.each do |tag, type|
with "controlaccess/#{tag}" do
if att('ref')
set ancestor(:resource, :archival_object), :subjects, {'ref' => att('ref')}
else
make :subject, {
:terms => {'term' => inner_xml, 'term_type' => type, 'vocabulary' => '/vocabularies/1'},
:vocabulary => '/vocabularies/1',
:source => att('source') || 'ingest'
} do |subject|
set ancestor(:resource, :archival_object), :subjects, {'ref' => subject.uri}
end
end
end
end
with 'origination/corpname' do
if att('ref')
set ancestor(:resource, :archival_object), :linked_agents, {'ref' => att('ref'), 'role' => 'creator'}
else
make_corp_template(:role => 'creator')
end
end
with 'controlaccess/corpname' do
corpname = Nokogiri::XML::DocumentFragment.parse(inner_xml)
terms ||= []
corpname.children.each do |child|
if child.respond_to?(:name) && child.name == 'term'
term = child.content.strip
term_type = child['type']
terms << {'term' => term, 'term_type' => term_type, 'vocabulary' => '/vocabularies/1'}
end
end
if att('role')
relator = att('role')
elsif att('encodinganalog') == '710'
relator = 'ctb'
else
relator = nil
end
if att('ref')
set ancestor(:resource, :archival_object), :linked_agents, {'ref' => att('ref'), 'role' => 'subject', 'terms' => terms, 'relator' => relator}
else
make_corp_template(:role => 'subject')
end
end
with 'origination/famname' do
if att('ref')
set ancestor(:resource, :archival_object), :linked_agents, {'ref' => att('ref'), 'role' => 'creator'}
else
make_family_template(:role => 'creator')
end
end
with 'controlaccess/famname' do
famname = Nokogiri::XML::DocumentFragment.parse(inner_xml)
terms ||= []
famname.children.each do |child|
if child.respond_to?(:name) && child.name == 'term'
term = child.content.strip
term_type = child['type']
terms << {'term' => term, 'term_type' => term_type, 'vocabulary' => '/vocabularies/1'}
end
end
if att('role')
relator = att('role')
elsif att('encodinganalog') == '700'
relator = 'ctb'
else
relator = nil
end
if att('ref')
set ancestor(:resource, :archival_object), :linked_agents, {'ref' => att('ref'), 'role' => 'subject', 'terms' => terms, 'relator' => relator}
else
make_family_template(:role => 'subject')
end
end
with 'origination/persname' do
if att('ref')
set ancestor(:resource, :archival_object), :linked_agents, {'ref' => att('ref'), 'role' => 'creator'}
else
make_person_template(:role => 'creator')
end
end
with 'controlaccess/persname' do
persname = Nokogiri::XML::DocumentFragment.parse(inner_xml)
terms ||= []
persname.children.each do |child|
if child.respond_to?(:name) && child.name == 'term'
term = child.content.strip
term_type = child['type']
terms << {'term' => term, 'term_type' => term_type, 'vocabulary' => '/vocabularies/1'}
end
end
if att('role')
relator = att('role')
elsif att('encodinganalog') == '700'
relator = 'ctb'
else
relator = nil
end
if att('ref')
set ancestor(:resource, :archival_object), :linked_agents, {'ref' => att('ref'), 'role' => 'subject', 'terms' => terms, 'relator' => relator}
else
make_person_template(:role => 'subject')
end
end
# END CUSTOM SUBJECT AND AGENT IMPORTS
# BEGIN PHYSDESC CUSTOMIZATIONS
# The stock EAD importer doesn't import <physfacet> and <dimensions> tags into extent objects; instead making them notes
# This is a corrected version
# first, some methods for generating note objects
def make_single_note(note_name, tag, tag_name="")
content = tag.inner_text
if !tag_name.empty?
content = tag_name + ": " + content
end
make :note_singlepart, {
:type => note_name,
:persistent_id => att('id'),
:publish => true,
:content => format_content( content.sub(/<head>.?<\/head>/, '').strip)
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
def make_nested_note(note_name, tag)
content = tag.inner_text
make :note_multipart, {
:type => note_name,
:persistent_id => att('id'),
:publish => true,
:subnotes => {
'jsonmodel_type' => 'note_text',
'content' => format_content( content )
}
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
with 'physdesc' do
next if context == :note_orderedlist # skip these
physdesc = Nokogiri::XML::DocumentFragment.parse(inner_xml)
extent_number_and_type = nil
dimensions = []
physfacets = []
container_summaries = []
other_extent_data = []
container_summary_texts = []
dimensions_texts = []
physfacet_texts = []
# If there is already a portion specified, use it
portion = att('altrender') || 'whole'
# Special case: if the physdesc is just a plain string with no child elements, treat its contents as a physdesc note
if physdesc.children.length == 1 && physdesc.children[0].name == 'text'
container_summaries << physdesc
else
# Otherwise, attempt to parse out an extent record from the child elements.
physdesc.children.each do |child|
# "extent" can have one of two kinds of semantic meanings: either a true extent with number and type,
# or a container summary. Disambiguation is done through a regex.
if child.name == 'extent'
child_content = child.content.strip
if extent_number_and_type.nil? && child_content =~ /^([0-9\.]+)+\s+(.*)$/
extent_number_and_type = {:number => $1, :extent_type => $2}
else
container_summaries << child
container_summary_texts << child.content.strip
end
elsif child.name == 'physfacet'
physfacets << child
physfacet_texts << child.content.strip
elsif child.name == 'dimensions'
dimensions << child
dimensions_texts << child.content.strip
elsif child.name != 'text'
other_extent_data << child
end
end
end
# only make an extent if we got a number and type, otherwise put all physdesc contents into a note
if extent_number_and_type
make :extent, {
:number => $1,
:extent_type => $2,
:portion => portion,
:container_summary => container_summary_texts.join('; '),
:physical_details => physfacet_texts.join('; '),
:dimensions => dimensions_texts.join('; ')
} do |extent|
set ancestor(:resource, :archival_object), :extents, extent
end
# there's no true extent; split up the rest into individual notes
else
container_summaries.each do |summary|
make_single_note("physdesc", summary)
end
physfacets.each do |physfacet|
make_single_note("physfacet", physfacet)
end
#
dimensions.each do |dimension|
make_nested_note("dimensions", dimension)
end
end
other_extent_data.each do |unknown_tag|
make_single_note("physdesc", unknown_tag, unknown_tag.name)
end
end
# END PHYSDESC CUSTOMIZATIONS
# BEGIN LANGUAGE CUSTOMIZATIONS
# By default, ASpace just uses the last <language> tag it finds as the primary
# language of the material described. This results in incorrect finding-aid languages for many eads.
# for example, ead with the following <langmaterial> tag:
## <langmaterial>
## The material is mostly in <language langcode="eng" encodinganalog="041">English</language>;
## some correspondence is in <language langcode="arm" encodinganalog="041">Armenian;</language>;
## select items are in <language langcode="ger" encodinganalog="041">German</language>.
## </langmaterial>
# will result in a primary material language of German.
# these changes fix that
with "langmaterial" do
# first, assign the primary language to the ead
langmaterial = Nokogiri::XML::DocumentFragment.parse(inner_xml)
langmaterial.children.each do |child|
if child.name == 'language'
set ancestor(:resource, :archival_object), :language, child.attr("langcode")
break
end
end
# write full tag content to a note, subbing out the language tags
content = inner_xml
next if content =~ /\A<language langcode=\"[a-z]+\"\/>\Z/
if content.match(/\A<language langcode=\"[a-z]+\"\s*>([^<]+)<\/language>\Z/)
content = $1
end
make :note_singlepart, {
:type => "langmaterial",
:persistent_id => att('id'),
:publish => true,
:content => format_content( content.sub(/<head>.*?<\/head>/, '') )
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
# overwrite the default langusage tag behavior
with "language" do
next
end
# END LANGUAGE CUSTOMIZATIONS
# BEGIN INDEX CUSTOMIZATIONS
# The stock EAD converter creates separate index items for each indexentry,
# one for the value (persname, famname, etc) and one for the reference (ref),
# even when they are within the same indexentry and are related
# (i.e., the persname is a correspondent, the ref is a date or a location at which
# correspondence with that person can be found).
# The Bentley's <indexentry>s generally look something like:
# # <indexentry><persname>Some person</persname><ref>Some date or folder</ref></indexentry>
# # As the <persname> and the <ref> are associated with one another,
# we want to keep them together in the same index item in ArchiveSpace.
# This will treat each <indexentry> as one item,
# creating an index item with a 'value' from the <persname>, <famname>, etc.
# and a 'reference_text' from the <ref>.
with 'indexentry' do
entry_type = ''
entry_value = ''
entry_reference = ''
indexentry = Nokogiri::XML::DocumentFragment.parse(inner_xml)
indexentry.children.each do |child|
case child.name
when 'name'
entry_value << child.content
entry_type << 'name'
when 'persname'
entry_value << child.content
entry_type << 'person'
when 'famname'
entry_value << child.content
entry_type << 'family'
when 'corpname'
entry_value << child.content
entry_type << 'corporate_entity'
when 'subject'
entry_value << child.content
entry_type << 'subject'
when 'function'
entry_value << child.content
entry_type << 'function'
when 'occupation'
entry_value << child.content
entry_type << 'occupation'
when 'genreform'
entry_value << child.content
entry_type << 'genre_form'
when 'title'
entry_value << child.content
entry_type << 'title'
when 'geogname'
entry_value << child.content
entry_type << 'geographic_name'
end
if child.name == 'ref'
entry_reference << child.content
end
end
make :note_index_item, {
:type => entry_type,
:value => entry_value,
:reference_text => entry_reference
} do |item|
set ancestor(:note_index), :items, item
end
end
# Skip the stock importer actions to avoid confusion/duplication
{
'name' => 'name',
'persname' => 'person',
'famname' => 'family',
'corpname' => 'corporate_entity',
'subject' => 'subject',
'function' => 'function',
'occupation' => 'occupation',
'genreform' => 'genre_form',
'title' => 'title',
'geogname' => 'geographic_name'
}.each do |k, v|
with "indexentry/#{k}" do |node|
next
end
end
with 'indexentry/ref' do
next
end
# END INDEX CUSTOMIZATIONS
# BEGIN HEAD CUSTOMIZATIONS
# This issue is similar to the language issue -- if there is a note with multiple <head> elements (say, a bioghist with its own head and sublists with their own heads),
# the stock importer action is to set the note label to the very last <head> it finds. This modification will only set the label if it does not already exist, ensuring
# that it will only be set once.
with 'head' do
if context == :note_multipart
ancestor(:note_multipart) do |note|
next unless note["label"].nil?
set :label, format_content( inner_xml )
end
elsif context == :note_chronology
ancestor(:note_chronology) do |note|
next unless note["title"].nil?
set :title, format_content( inner_xml )
end
end
end
# END HEAD CUSTOMIZATIONS
# BEGIN DAO TITLE CUSTOMIZATIONS
# The Bentley has many EADs with <dao> tags that lack title attributes.
# The stock ArchivesSpace EAD Converter uses each <dao>'s title attribute as
# the value for the imported digital object's title, which is a required property.
# As a result, all of our EADs with <dao> tags fail when trying to import into ArchivesSpace.
# This section of the BHL EAD Converter plugin modifies the stock ArchivesSpace EAD Converter
# by forming a string containing the digital object's parent archival object's title and date (if both exist),
# or just its title (if only the title exists), or just it's date (if only the date exists)
# and then using that string as the imported digital object's title.
with 'dao' do
if att('ref') # A digital object has already been made
make :instance, {
:instance_type => 'digital_object',
:digital_object => {'ref' => att('ref')}
} do |instance|
set ancestor(:resource, :archival_object), :instances, instance
end
else # Make a digital object
make :instance, {
:instance_type => 'digital_object'
} do |instance|
set ancestor(:resource, :archival_object), :instances, instance
end
# We'll use either the <dao> title attribute (if it exists) or our display_string (if the title attribute does not exist)
# This forms a title string using the parent archival object's title, if it exists
daotitle = nil
ancestor(:archival_object ) do |ao|
if ao.title && ao.title.length > 0
daotitle = ao.title
end
end
# This forms a date string using the parent archival object's date expression,
# or its begin date - end date, or just it's begin date, if any exist
# (Actually, we have expressions for all of our dates...let's just use those for the sake of simplicity)
daodates = []
ancestor(:archival_object) do |aod|
if aod.dates && aod.dates.length > 0
aod.dates.each do |dl|
if dl['expression'].length > 0
daodates << dl['expression']
end
end
end
end
title = daotitle
date_label = daodates.join(', ') if daodates.length > 0
# This forms a display string using the parent archival object's title and date (if both exist),
# or just its title or date (if only one exists)
display_string = title || ''
display_string += ', ' if title && date_label
display_string += date_label if date_label
make :digital_object, {
:digital_object_id => SecureRandom.uuid,
:title => att('title') || display_string,
} do |obj|
obj.file_versions << {
:use_statement => att('role'),
:file_uri => att('href'),
:xlink_actuate_attribute => att('actuate'),
:xlink_show_attribute => att('show')
}
set ancestor(:instance), :digital_object, obj
end
end
end
end
with 'daodesc' do
ancestor(:digital_object) do |dobj|
next if dobj.ref
end
make :note_digital_object, {
:type => 'note',
:persistent_id => att('id'),
:content => modified_format_content(inner_xml.strip,'daodesc')
} do |note|
set ancestor(:digital_object), :notes, note
end
end
# END DAO TITLE CUSTOMIZATIONS
=begin
# Note: The following bits are here for historical reasons
# We have either decided against implementing the functionality OR the ArchivesSpace importer has changed, deprecating the following customizations
# START IGNORE
# Setting some of these to ignore because we have some physdesc, container, etc.
# Within list/items in our descgrps at the end of finding aids.
# Without setting these to ignore, ASpace both makes the list AND makes separate
# notes for physdesc, dimension, etc. and tries to make instances out of the
# containers, causing import errors.
# Note: if using this in conjunction with the Yale container management plugin,
# be sure to include the line 'next ignore if @ignore' within the with container do
# section of the ConverterExtraContainerValues module.
with 'archref/container' do
@ignore = true
end
with 'archref/physdesc/dimensions' do
@ignore = true
end
with 'archref/unittitle' do
@ignore = true
end
with 'archref/unittitle/unitdate' do
@ignore = true
end
with 'archref/note' do
@ignore = true
end
with 'archref/note/p/unitdate' do
@ignore = true
end
with 'archref/note/p/geogname' do
@ignore = true
end
with 'unittitle' do |node|
ancestor(:note_multipart, :resource, :archival_object) do |obj|
unless obj.class.record_type == "note_multipart" or context == "note_orderedlist"
title = Nokogiri::XML::DocumentFragment.parse(inner_xml.strip)
title.xpath(".//unitdate").remove
obj.title = format_content( title.to_xml(:encoding => 'utf-8') )
end
end
end
with 'unitdate' do |node|
next ignore if @ignore
norm_dates = (att('normal') || "").sub(/^\s/, '').sub(/\s$/, '').split('/')
if norm_dates.length == 1
norm_dates[1] = norm_dates[0]
end
norm_dates.map! {|d| d =~ /^([0-9]{4}(\-(1[0-2]|0[1-9])(\-(0[1-9]|[12][0-9]|3[01]))?)?)$/ ? d : nil}
make :date, {
:date_type => att('type') || 'inclusive',
:expression => inner_xml,
:label => 'creation',
:begin => norm_dates[0],
:end => norm_dates[1],
:calendar => att('calendar'),
:era => att('era'),
:certainty => att('certainty')
} do |date|
set ancestor(:resource, :archival_object), :dates, date
end
end
with 'dimensions' do |node|
next ignore if @ignore
unless context == :note_orderedlist
content = inner_xml.tap {|xml|
xml.sub!(/<head>.*?<\/head>/m, '')
# xml.sub!(/<list [^>]*>.*?<\/list>/m, '')
# xml.sub!(/<chronlist [^>]*>.*<\/chronlist>/m, '')
}
make :note_multipart, {
:type => node.name,
:persistent_id => att('id'),
:subnotes => {
'jsonmodel_type' => 'note_text',
'content' => format_content( content )
}
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
end
%w(accessrestrict accessrestrict/legalstatus \
accruals acqinfo altformavail appraisal arrangement \
bioghist custodhist \
fileplan odd otherfindaid originalsloc phystech \
prefercite processinfo relatedmaterial scopecontent \
separatedmaterial userestrict ).each do |note|
with note do |node|
content = inner_xml.tap {|xml|
xml.sub!(/<head>.*?<\/head>/m, '')
# xml.sub!(/<list [^>]*>.*?<\/list>/m, '')
# xml.sub!(/<chronlist [^>]*>.*<\/chronlist>/m, '')
}
make :note_multipart, {
:type => node.name,
:persistent_id => att('id'),
:subnotes => {
'jsonmodel_type' => 'note_text',
'content' => format_content( content )
}
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
end
# START RIGHTS STATEMENTS
# The stock ASpace EAD importer only makes "Conditions Governing Access" notes out of <accessrestrict> tags
# We want to also import our <accessrestrict> tags that have a restriction end date as a "Rights Statements"
# Let ArchivesSpace do its normal thing with accessrestrict
%w(accessrestrict accessrestrict/legalstatus \
accruals acqinfo altformavail appraisal arrangement \
bioghist custodhist dimensions \
fileplan odd otherfindaid originalsloc phystech \
prefercite processinfo relatedmaterial scopecontent \
separatedmaterial userestrict ).each do |note|
with note do |node|
content = inner_xml.tap {|xml|
xml.sub!(/<head>.*?<\/head>/m, '')
# xml.sub!(/<list [^>]*>.*?<\/list>/m, '')
# xml.sub!(/<chronlist [^>]*>.*<\/chronlist>/m, '')
}
make :note_multipart, {
:type => node.name,
:persistent_id => att('id'),
:subnotes => {
'jsonmodel_type' => 'note_text',
'content' => format_content( content )
}
} do |note|
set ancestor(:resource, :archival_object), :notes, note
end
end
end
# Now make a Rights Statement using the content from the "Conditions Governing Access" note
# and the restriction end date from the accessrestrict/date
with 'accessrestrict/date' do
ancestor(:archival_object) do |ao|
ao.notes.each do |n|
if n['type'] == 'accessrestrict'
n['subnotes'].each do |sn|
make :rights_statement, {
:rights_type => 'institutional_policy',
:restrictions => sn['content'],
:restriction_end_date => att('normal')
} do |rights|
set ancestor(:resource, :archival_object), :rights_statements, rights
end
end
end
end
end
end
=end
end
|
module AccessSchema
class Schema
attr_reader :roles
def initialize
@roles = []
@asserts = {}
@resources = {}
end
def add_role(role)
@roles << role
end
def add_assert(assert)
@asserts[assert.name] = assert
end
def add_resource(resource)
@resources[resource.name] = resource
end
def allow?(*args)
require!(*args)
rescue NotAllowedError => e
false
else
true
end
def require!(*args)
check!(*normalize_args(args))
end
private
def normalize_args(args)
options = args.last.is_a?(Hash) ? args.pop : {}
privilege = args[1].to_s
roles = args[2]
case args[0]
when String, Symbol
resource = args[0].to_s
else
resource = args[0].class.model_name.to_s
options.merge!(:subject => args[0])
end
roles = calculate_roles(roles, options)
if (self.roles & roles).empty?
raise InvalidRolesError.new(:roles => roles)
end
roles = sort_roles(roles)
[resource, privilege, roles, options]
end
def calculate_roles(roles, check_options)
roles = if roles.respond_to?(:call)
roles.call(check_options.dup)
elsif !roles.respond_to?(:map)
[ roles ]
else
roles
end
unless roles.respond_to?(:map)
raise InvalidRolesError.new(:result => roles)
end
roles.map(&:to_s)
end
def sort_roles(roles)
@roles.select do |role|
roles.include? role
end
end
def check!(resource_name, privilege_name, roles, options)
resouce_name = resource_name.to_s
privilege_name = privilege_name.to_s
resource = @resources[resource_name]
if resource.nil?
raise NoResourceError.new(:resource => resource_name)
end
privilege = resource.get_privilege(privilege_name)
if privilege.nil?
raise NoPrivilegeError.new(:resource => resource_name, :privilege => privilege_name)
end
failed_asserts = Hash.new{|h, k| h[k] = []}
roles_checks = roles.map do |role|
privilege.allow?([role]) do |expectation|
assert = @asserts[expectation.name]
check_options = expectation.options.merge(options)
assert.check?(check_options).tap do |result|
failed_asserts[role] << expectation.name unless result
end
end
end
log_payload = {
:resource => resource_name,
:privilege => privilege_name,
:roles => roles,
:options => options
}
unless roles_checks.any?
log_payload[:failed_asserts] = failed_asserts
logger.info{ "check FAILED: #{log_payload.inspect}" }
raise NotAllowedError.new(log_payload)
else
logger.debug{ "check PASSED: #{log_payload.inspect}" }
true
end
end
def logger
AccessSchema.config.logger
end
end
end
Refactor Schema#normalize_args method
module AccessSchema
class Schema
attr_reader :roles
def initialize
@roles = []
@asserts = {}
@resources = {}
end
def add_role(role)
@roles << role
end
def add_assert(assert)
@asserts[assert.name] = assert
end
def add_resource(resource)
@resources[resource.name] = resource
end
def allow?(*args)
require!(*args)
rescue NotAllowedError => e
false
else
true
end
def require!(*args)
check!(*normalize_args(args))
end
private
def normalize_args(args)
privilege = args[1].to_s
options = args.last.is_a?(Hash) ? args.pop : {}
unless subject_by_name?(args[0])
options.merge! :subject => args[0]
end
roles = calculate_roles(args[2], options)
if (self.roles & roles).empty?
raise InvalidRolesError.new(:roles => roles)
end
[
resource_name(args[0]),
privilege,
sort_roles(roles),
options
]
end
def resource_name(obj)
if subject_by_name?(obj)
obj.to_s
else
klass = obj.class
if klass.respond_to?(:model_name)
klass.model_name
else
klass.name
end
end
end
def subject_by_name?(obj)
case obj
when String, Symbol
true
else
false
end
end
def calculate_roles(roles, check_options)
roles = if roles.respond_to?(:call)
roles.call(check_options.dup)
elsif !roles.respond_to?(:map)
[ roles ]
else
roles
end
unless roles.respond_to?(:map)
raise InvalidRolesError.new(:result => roles)
end
roles.map(&:to_s)
end
def sort_roles(roles)
@roles.select do |role|
roles.include? role
end
end
def check!(resource_name, privilege_name, roles, options)
resouce_name = resource_name.to_s
privilege_name = privilege_name.to_s
resource = @resources[resource_name]
if resource.nil?
raise NoResourceError.new(:resource => resource_name)
end
privilege = resource.get_privilege(privilege_name)
if privilege.nil?
raise NoPrivilegeError.new(:resource => resource_name, :privilege => privilege_name)
end
failed_asserts = Hash.new{|h, k| h[k] = []}
roles_checks = roles.map do |role|
privilege.allow?([role]) do |expectation|
assert = @asserts[expectation.name]
check_options = expectation.options.merge(options)
assert.check?(check_options).tap do |result|
failed_asserts[role] << expectation.name unless result
end
end
end
log_payload = {
:resource => resource_name,
:privilege => privilege_name,
:roles => roles,
:options => options
}
unless roles_checks.any?
log_payload[:failed_asserts] = failed_asserts
logger.info{ "check FAILED: #{log_payload.inspect}" }
raise NotAllowedError.new(log_payload)
else
logger.debug{ "check PASSED: #{log_payload.inspect}" }
true
end
end
def logger
AccessSchema.config.logger
end
end
end
|
require "access_token_wrapper/version"
module AccessTokenWrapper
class Base
NON_ERROR_CODES=[404, 422, 414, 429]
attr_reader :raw_token
def initialize(raw_token, &callback)
@raw_token = raw_token
@callback = callback
end
def method_missing(method, *args, &block)
@raw_token.send(method, *args, &block)
rescue OAuth2::Error => exception
if NON_ERROR_CODES.include?(exception.response.status)
raise exception
else
@raw_token = @raw_token.refresh!
@callback.call(@raw_token, exception)
@raw_token.send(method, *args, &block)
end
end
def respond_to_missing?(method_name, include_private = false)
@raw_token.respond_to?(method_name, include_private) || super
end
end
end
Exclude 402 as well
require "access_token_wrapper/version"
module AccessTokenWrapper
class Base
NON_ERROR_CODES=[402, 404, 422, 414, 429]
attr_reader :raw_token
def initialize(raw_token, &callback)
@raw_token = raw_token
@callback = callback
end
def method_missing(method, *args, &block)
@raw_token.send(method, *args, &block)
rescue OAuth2::Error => exception
if NON_ERROR_CODES.include?(exception.response.status)
raise exception
else
@raw_token = @raw_token.refresh!
@callback.call(@raw_token, exception)
@raw_token.send(method, *args, &block)
end
end
def respond_to_missing?(method_name, include_private = false)
@raw_token.respond_to?(method_name, include_private) || super
end
end
end
|
require 'active_resource/connection'
require 'cgi'
require 'set'
module ActiveResource
# ActiveResource::Base is the main class for mapping RESTful resources as models in a Rails application.
#
# For an outline of what Active Resource is capable of, see link:files/vendor/rails/activeresource/README.html.
#
# == Automated mapping
#
# Active Resource objects represent your RESTful resources as manipulatable Ruby objects. To map resources
# to Ruby objects, Active Resource only needs a class name that corresponds to the resource name (e.g., the class
# Person maps to the resources people, very similarly to Active Record) and a +site+ value, which holds the
# URI of the resources.
#
# class Person < ActiveResource::Base
# self.site = "http://api.people.com:3000/"
# end
#
# Now the Person class is mapped to RESTful resources located at <tt>http://api.people.com:3000/people/</tt>, and
# you can now use Active Resource's lifecycles methods to manipulate resources.
#
# == Lifecycle methods
#
# Active Resource exposes methods for creating, finding, updating, and deleting resources
# from REST web services.
#
# ryan = Person.new(:first => 'Ryan', :last => 'Daigle')
# ryan.save #=> true
# ryan.id #=> 2
# Person.exists?(ryan.id) #=> true
# ryan.exists? #=> true
#
# ryan = Person.find(1)
# # => Resource holding our newly created Person object
#
# ryan.first = 'Rizzle'
# ryan.save #=> true
#
# ryan.destroy #=> true
#
# As you can see, these are very similar to Active Record's lifecycle methods for database records.
# You can read more about each of these methods in their respective documentation.
#
# === Custom REST methods
#
# Since simple CRUD/lifecycle methods can't accomplish every task, Active Resource also supports
# defining your own custom REST methods. To invoke them, Active Resource provides the <tt>get</tt>,
# <tt>post</tt>, <tt>put</tt> and <tt>delete</tt> methods where you can specify a custom REST method
# name to invoke.
#
# # POST to the custom 'register' REST method, i.e. POST /people/new/register.xml.
# Person.new(:name => 'Ryan').post(:register)
# # => { :id => 1, :name => 'Ryan', :position => 'Clerk' }
#
# # PUT an update by invoking the 'promote' REST method, i.e. PUT /people/1/promote.xml?position=Manager.
# Person.find(1).put(:promote, :position => 'Manager')
# # => { :id => 1, :name => 'Ryan', :position => 'Manager' }
#
# # GET all the positions available, i.e. GET /people/positions.xml.
# Person.get(:positions)
# # => [{:name => 'Manager'}, {:name => 'Clerk'}]
#
# # DELETE to 'fire' a person, i.e. DELETE /people/1/fire.xml.
# Person.find(1).delete(:fire)
#
# For more information on using custom REST methods, see the
# ActiveResource::CustomMethods documentation.
#
# == Validations
#
# You can validate resources client side by overriding validation methods in the base class.
#
# class Person < ActiveResource::Base
# self.site = "http://api.people.com:3000/"
# protected
# def validate
# errors.add("last", "has invalid characters") unless last =~ /[a-zA-Z]*/
# end
# end
#
# See the ActiveResource::Validations documentation for more information.
#
# == Authentication
#
# Many REST APIs will require authentication, usually in the form of basic
# HTTP authentication. Authentication can be specified by:
# * putting the credentials in the URL for the +site+ variable.
#
# class Person < ActiveResource::Base
# self.site = "http://ryan:password@api.people.com:3000/"
# end
#
# * defining +user+ and/or +password+ variables
#
# class Person < ActiveResource::Base
# self.site = "http://api.people.com:3000/"
# self.user = "ryan"
# self.password = "password"
# end
#
# For obvious security reasons, it is probably best if such services are available
# over HTTPS.
#
# Note: Some values cannot be provided in the URL passed to site. e.g. email addresses
# as usernames. In those situations you should use the seperate user and password option.
# == Errors & Validation
#
# Error handling and validation is handled in much the same manner as you're used to seeing in
# Active Record. Both the response code in the HTTP response and the body of the response are used to
# indicate that an error occurred.
#
# === Resource errors
#
# When a GET is requested for a resource that does not exist, the HTTP <tt>404</tt> (Resource Not Found)
# response code will be returned from the server which will raise an ActiveResource::ResourceNotFound
# exception.
#
# # GET http://api.people.com:3000/people/999.xml
# ryan = Person.find(999) # => Raises ActiveResource::ResourceNotFound
# # => Response = 404
#
# <tt>404</tt> is just one of the HTTP error response codes that ActiveResource will handle with its own exception. The
# following HTTP response codes will also result in these exceptions:
#
# 200 - 399:: Valid response, no exception
# 404:: ActiveResource::ResourceNotFound
# 409:: ActiveResource::ResourceConflict
# 422:: ActiveResource::ResourceInvalid (rescued by save as validation errors)
# 401 - 499:: ActiveResource::ClientError
# 500 - 599:: ActiveResource::ServerError
#
# These custom exceptions allow you to deal with resource errors more naturally and with more precision
# rather than returning a general HTTP error. For example:
#
# begin
# ryan = Person.find(my_id)
# rescue ActiveResource::ResourceNotFound
# redirect_to :action => 'not_found'
# rescue ActiveResource::ResourceConflict, ActiveResource::ResourceInvalid
# redirect_to :action => 'new'
# end
#
# === Validation errors
#
# Active Resource supports validations on resources and will return errors if any these validations fail
# (e.g., "First name can not be blank" and so on). These types of errors are denoted in the response by
# a response code of <tt>422</tt> and an XML representation of the validation errors. The save operation will
# then fail (with a <tt>false</tt> return value) and the validation errors can be accessed on the resource in question.
#
# ryan = Person.find(1)
# ryan.first #=> ''
# ryan.save #=> false
#
# # When
# # PUT http://api.people.com:3000/people/1.xml
# # is requested with invalid values, the response is:
# #
# # Response (422):
# # <errors type="array"><error>First cannot be empty</error></errors>
# #
#
# ryan.errors.invalid?(:first) #=> true
# ryan.errors.full_messages #=> ['First cannot be empty']
#
# Learn more about Active Resource's validation features in the ActiveResource::Validations documentation.
#
# === Timeouts
#
# Active Resource relies on HTTP to access RESTful APIs and as such is inherently susceptible to slow or
# unresponsive servers. In such cases, your Active Resource method calls could timeout. You can control the
# amount of time before Active Resource times out with the +timeout+ variable.
#
# class Person < ActiveResource::Base
# self.site = "http://api.people.com:3000/"
# self.timeout = 5
# end
#
# This sets the +timeout+ to 5 seconds. You can adjust the timeout to a value suitable for the RESTful API
# you are accessing. It is recommended to set this to a reasonably low value to allow your Active Resource
# clients (especially if you are using Active Resource in a Rails application) to fail-fast (see
# http://en.wikipedia.org/wiki/Fail-fast) rather than cause cascading failures that could incapacitate your
# server.
#
# Internally, Active Resource relies on Ruby's Net::HTTP library to make HTTP requests. Setting +timeout+
# sets the <tt>read_timeout</tt> of the internal Net::HTTP instance to the same value. The default
# <tt>read_timeout</tt> is 60 seconds on most Ruby implementations.
class Base
# The logger for diagnosing and tracing Active Resource calls.
cattr_accessor :logger
class << self
# Gets the URI of the REST resources to map for this class. The site variable is required
# ActiveResource's mapping to work.
def site
# Not using superclass_delegating_reader because don't want subclasses to modify superclass instance
#
# With superclass_delegating_reader
#
# Parent.site = 'http://anonymous@test.com'
# Subclass.site # => 'http://anonymous@test.com'
# Subclass.site.user = 'david'
# Parent.site # => 'http://david@test.com'
#
# Without superclass_delegating_reader (expected behaviour)
#
# Parent.site = 'http://anonymous@test.com'
# Subclass.site # => 'http://anonymous@test.com'
# Subclass.site.user = 'david' # => TypeError: can't modify frozen object
#
if defined?(@site)
@site
elsif superclass != Object && superclass.site
superclass.site.dup.freeze
end
end
# Sets the URI of the REST resources to map for this class to the value in the +site+ argument.
# The site variable is required ActiveResource's mapping to work.
def site=(site)
@connection = nil
if site.nil?
@site = nil
else
@site = create_site_uri_from(site)
@user = URI.decode(@site.user) if @site.user
@password = URI.decode(@site.password) if @site.password
end
end
# Gets the user for REST HTTP authentication
def user
# Not using superclass_delegating_reader. See +site+ for explanation
if defined?(@user)
@user
elsif superclass != Object && superclass.user
superclass.user.dup.freeze
end
end
# Sets the user for REST HTTP authentication
def user=(user)
@connection = nil
@user = user
end
# Gets the password for REST HTTP authentication
def password
# Not using superclass_delegating_reader. See +site+ for explanation
if defined?(@password)
@password
elsif superclass != Object && superclass.password
superclass.password.dup.freeze
end
end
# Sets the password for REST HTTP authentication
def password=(password)
@connection = nil
@password = password
end
# Sets the format that attributes are sent and received in from a mime type reference. Example:
#
# Person.format = :json
# Person.find(1) # => GET /people/1.json
#
# Person.format = ActiveResource::Formats::XmlFormat
# Person.find(1) # => GET /people/1.xml
#
# Default format is :xml.
def format=(mime_type_reference_or_format)
format = mime_type_reference_or_format.is_a?(Symbol) ?
ActiveResource::Formats[mime_type_reference_or_format] : mime_type_reference_or_format
write_inheritable_attribute("format", format)
connection.format = format if site
end
# Returns the current format, default is ActiveResource::Formats::XmlFormat
def format # :nodoc:
read_inheritable_attribute("format") || ActiveResource::Formats[:xml]
end
# Sets the number of seconds after which requests to the REST API should time out.
def timeout=(timeout)
@connection = nil
@timeout = timeout
end
# Gets tthe number of seconds after which requests to the REST API should time out.
def timeout
if defined?(@timeout)
@timeout
elsif superclass != Object && superclass.timeout
superclass.timeout
end
end
# An instance of ActiveResource::Connection that is the base connection to the remote service.
# The +refresh+ parameter toggles whether or not the connection is refreshed at every request
# or not (defaults to <tt>false</tt>).
def connection(refresh = false)
if defined?(@connection) || superclass == Object
@connection = Connection.new(site, format) if refresh || @connection.nil?
@connection.user = user if user
@connection.password = password if password
@connection.timeout = timeout if timeout
@connection
else
superclass.connection
end
end
def headers
@headers ||= {}
end
# Do not include any modules in the default element name. This makes it easier to seclude ARes objects
# in a separate namespace without having to set element_name repeatedly.
attr_accessor_with_default(:element_name) { to_s.split("::").last.underscore } #:nodoc:
attr_accessor_with_default(:collection_name) { element_name.pluralize } #:nodoc:
attr_accessor_with_default(:primary_key, 'id') #:nodoc:
# Gets the prefix for a resource's nested URL (e.g., <tt>prefix/collectionname/1.xml</tt>)
# This method is regenerated at runtime based on what the prefix is set to.
def prefix(options={})
default = site.path
default << '/' unless default[-1..-1] == '/'
# generate the actual method based on the current site path
self.prefix = default
prefix(options)
end
# An attribute reader for the source string for the resource path prefix. This
# method is regenerated at runtime based on what the prefix is set to.
def prefix_source
prefix # generate #prefix and #prefix_source methods first
prefix_source
end
# Sets the prefix for a resource's nested URL (e.g., <tt>prefix/collectionname/1.xml</tt>).
# Default value is <tt>site.path</tt>.
def prefix=(value = '/')
# Replace :placeholders with '#{embedded options[:lookups]}'
prefix_call = value.gsub(/:\w+/) { |key| "\#{options[#{key}]}" }
# Redefine the new methods.
code = <<-end_code
def prefix_source() "#{value}" end
def prefix(options={}) "#{prefix_call}" end
end_code
silence_warnings { instance_eval code, __FILE__, __LINE__ }
rescue
logger.error "Couldn't set prefix: #{$!}\n #{code}"
raise
end
alias_method :set_prefix, :prefix= #:nodoc:
alias_method :set_element_name, :element_name= #:nodoc:
alias_method :set_collection_name, :collection_name= #:nodoc:
# Gets the element path for the given ID in +id+. If the +query_options+ parameter is omitted, Rails
# will split from the prefix options.
#
# ==== Options
# +prefix_options+:: A hash to add a prefix to the request for nested URL's (e.g., <tt>:account_id => 19</tt>
# would yield a URL like <tt>/accounts/19/purchases.xml</tt>).
# +query_options+:: A hash to add items to the query string for the request.
#
# ==== Examples
# Post.element_path(1)
# # => /posts/1.xml
#
# Comment.element_path(1, :post_id => 5)
# # => /posts/5/comments/1.xml
#
# Comment.element_path(1, :post_id => 5, :active => 1)
# # => /posts/5/comments/1.xml?active=1
#
# Comment.element_path(1, {:post_id => 5}, {:active => 1})
# # => /posts/5/comments/1.xml?active=1
#
def element_path(id, prefix_options = {}, query_options = nil)
prefix_options, query_options = split_options(prefix_options) if query_options.nil?
"#{prefix(prefix_options)}#{collection_name}/#{id}.#{format.extension}#{query_string(query_options)}"
end
# Gets the collection path for the REST resources. If the +query_options+ parameter is omitted, Rails
# will split from the +prefix_options+.
#
# ==== Options
# +prefix_options+:: A hash to add a prefix to the request for nested URL's (e.g., <tt>:account_id => 19</tt>
# would yield a URL like <tt>/accounts/19/purchases.xml</tt>).
# +query_options+:: A hash to add items to the query string for the request.
#
# ==== Examples
# Post.collection_path
# # => /posts.xml
#
# Comment.collection_path(:post_id => 5)
# # => /posts/5/comments.xml
#
# Comment.collection_path(:post_id => 5, :active => 1)
# # => /posts/5/comments.xml?active=1
#
# Comment.collection_path({:post_id => 5}, {:active => 1})
# # => /posts/5/comments.xml?active=1
#
def collection_path(prefix_options = {}, query_options = nil)
prefix_options, query_options = split_options(prefix_options) if query_options.nil?
"#{prefix(prefix_options)}#{collection_name}.#{format.extension}#{query_string(query_options)}"
end
alias_method :set_primary_key, :primary_key= #:nodoc:
# Create a new resource instance and request to the remote service
# that it be saved, making it equivalent to the following simultaneous calls:
#
# ryan = Person.new(:first => 'ryan')
# ryan.save
#
# The newly created resource is returned. If a failure has occurred an
# exception will be raised (see save). If the resource is invalid and
# has not been saved then valid? will return <tt>false</tt>,
# while new? will still return <tt>true</tt>.
#
# ==== Examples
# Person.create(:name => 'Jeremy', :email => 'myname@nospam.com', :enabled => true)
# my_person = Person.find(:first)
# my_person.email
# # => myname@nospam.com
#
# dhh = Person.create(:name => 'David', :email => 'dhh@nospam.com', :enabled => true)
# dhh.valid?
# # => true
# dhh.new?
# # => false
#
# # We'll assume that there's a validation that requires the name attribute
# that_guy = Person.create(:name => '', :email => 'thatguy@nospam.com', :enabled => true)
# that_guy.valid?
# # => false
# that_guy.new?
# # => true
#
def create(attributes = {})
returning(self.new(attributes)) { |res| res.save }
end
# Core method for finding resources. Used similarly to Active Record's find method.
#
# ==== Arguments
# The first argument is considered to be the scope of the query. That is, how many
# resources are returned from the request. It can be one of the following.
#
# * <tt>:one</tt>: Returns a single resource.
# * <tt>:first</tt>: Returns the first resource found.
# * <tt>:all</tt>: Returns every resource that matches the request.
#
# ==== Options
#
# * +from+: Sets the path or custom method that resources will be fetched from.
# * +params+: Sets query and prefix (nested URL) parameters.
#
# ==== Examples
# Person.find(1)
# # => GET /people/1.xml
#
# Person.find(:all)
# # => GET /people.xml
#
# Person.find(:all, :params => { :title => "CEO" })
# # => GET /people.xml?title=CEO
#
# Person.find(:first, :from => :managers)
# # => GET /people/managers.xml
#
# Person.find(:all, :from => "/companies/1/people.xml")
# # => GET /companies/1/people.xml
#
# Person.find(:one, :from => :leader)
# # => GET /people/leader.xml
#
# Person.find(:all, :from => :developers, :params => { :language => 'ruby' })
# # => GET /people/developers.xml?language=ruby
#
# Person.find(:one, :from => "/companies/1/manager.xml")
# # => GET /companies/1/manager.xml
#
# StreetAddress.find(1, :params => { :person_id => 1 })
# # => GET /people/1/street_addresses/1.xml
def find(*arguments)
scope = arguments.slice!(0)
options = arguments.slice!(0) || {}
case scope
when :all then find_every(options)
when :first then find_every(options).first
when :one then find_one(options)
else find_single(scope, options)
end
end
# Deletes the resources with the ID in the +id+ parameter.
#
# ==== Options
# All options specify prefix and query parameters.
#
# ==== Examples
# Event.delete(2)
# # => DELETE /events/2
#
# Event.create(:name => 'Free Concert', :location => 'Community Center')
# my_event = Event.find(:first)
# # => Events (id: 7)
# Event.delete(my_event.id)
# # => DELETE /events/7
#
# # Let's assume a request to events/5/cancel.xml
# Event.delete(params[:id])
# # => DELETE /events/5
#
def delete(id, options = {})
connection.delete(element_path(id, options))
end
# Asserts the existence of a resource, returning <tt>true</tt> if the resource is found.
#
# ==== Examples
# Note.create(:title => 'Hello, world.', :body => 'Nothing more for now...')
# Note.exists?(1)
# # => true
#
# Note.exists(1349)
# # => false
def exists?(id, options = {})
if id
prefix_options, query_options = split_options(options[:params])
path = element_path(id, prefix_options, query_options)
response = connection.head(path, headers)
response.code == 200
end
# id && !find_single(id, options).nil?
rescue ActiveResource::ResourceNotFound
false
end
private
# Find every resource
def find_every(options)
case from = options[:from]
when Symbol
instantiate_collection(get(from, options[:params]))
when String
path = "#{from}#{query_string(options[:params])}"
instantiate_collection(connection.get(path, headers) || [])
else
prefix_options, query_options = split_options(options[:params])
path = collection_path(prefix_options, query_options)
instantiate_collection( (connection.get(path, headers) || []), prefix_options )
end
end
# Find a single resource from a one-off URL
def find_one(options)
case from = options[:from]
when Symbol
instantiate_record(get(from, options[:params]))
when String
path = "#{from}#{query_string(options[:params])}"
instantiate_record(connection.get(path, headers))
end
end
# Find a single resource from the default URL
def find_single(scope, options)
prefix_options, query_options = split_options(options[:params])
path = element_path(scope, prefix_options, query_options)
instantiate_record(connection.get(path, headers), prefix_options)
end
def instantiate_collection(collection, prefix_options = {})
collection.collect! { |record| instantiate_record(record, prefix_options) }
end
def instantiate_record(record, prefix_options = {})
returning new(record) do |resource|
resource.prefix_options = prefix_options
end
end
# Accepts a URI and creates the site URI from that.
def create_site_uri_from(site)
site.is_a?(URI) ? site.dup : URI.parse(site)
end
# contains a set of the current prefix parameters.
def prefix_parameters
@prefix_parameters ||= prefix_source.scan(/:\w+/).map { |key| key[1..-1].to_sym }.to_set
end
# Builds the query string for the request.
def query_string(options)
"?#{options.to_query}" unless options.nil? || options.empty?
end
# split an option hash into two hashes, one containing the prefix options,
# and the other containing the leftovers.
def split_options(options = {})
prefix_options, query_options = {}, {}
(options || {}).each do |key, value|
next if key.blank?
(prefix_parameters.include?(key.to_sym) ? prefix_options : query_options)[key.to_sym] = value
end
[ prefix_options, query_options ]
end
end
attr_accessor :attributes #:nodoc:
attr_accessor :prefix_options #:nodoc:
# Constructor method for new resources; the optional +attributes+ parameter takes a +Hash+
# of attributes for the new resource.
#
# ==== Examples
# my_course = Course.new
# my_course.name = "Western Civilization"
# my_course.lecturer = "Don Trotter"
# my_course.save
#
# my_other_course = Course.new(:name => "Philosophy: Reason and Being", :lecturer => "Ralph Cling")
# my_other_course.save
def initialize(attributes = {})
@attributes = {}
@prefix_options = {}
load(attributes)
end
# Returns a clone of the resource that hasn't been assigned an id yet and
# is treated as a new resource.
#
# ryan = Person.find(1)
# not_ryan = ryan.clone
# not_ryan.new? # => true
#
# Any active resource member attributes will NOT be cloned, though all other
# attributes are. This is to prevent the conflict between any prefix_options
# that refer to the original parent resource and the newly cloned parent
# resource that does not exist.
#
# ryan = Person.find(1)
# ryan.address = StreetAddress.find(1, :person_id => ryan.id)
# ryan.hash = {:not => "an ARes instance"}
#
# not_ryan = ryan.clone
# not_ryan.new? # => true
# not_ryan.address # => NoMethodError
# not_ryan.hash # => {:not => "an ARes instance"}
#
def clone
# Clone all attributes except the pk and any nested ARes
cloned = attributes.reject {|k,v| k == self.class.primary_key || v.is_a?(ActiveResource::Base)}.inject({}) do |attrs, (k, v)|
attrs[k] = v.clone
attrs
end
# Form the new resource - bypass initialize of resource with 'new' as that will call 'load' which
# attempts to convert hashes into member objects and arrays into collections of objects. We want
# the raw objects to be cloned so we bypass load by directly setting the attributes hash.
resource = self.class.new({})
resource.prefix_options = self.prefix_options
resource.send :instance_variable_set, '@attributes', cloned
resource
end
# A method to determine if the resource a new object (i.e., it has not been POSTed to the remote service yet).
#
# ==== Examples
# not_new = Computer.create(:brand => 'Apple', :make => 'MacBook', :vendor => 'MacMall')
# not_new.new?
# # => false
#
# is_new = Computer.new(:brand => 'IBM', :make => 'Thinkpad', :vendor => 'IBM')
# is_new.new?
# # => true
#
# is_new.save
# is_new.new?
# # => false
#
def new?
id.nil?
end
# Get the +id+ attribute of the resource.
def id
attributes[self.class.primary_key]
end
# Set the +id+ attribute of the resource.
def id=(id)
attributes[self.class.primary_key] = id
end
# Allows ActiveResource objects to be used as parameters in ActionPack URL generation.
def to_param
id && id.to_s
end
# Test for equality. Resource are equal if and only if +other+ is the same object or
# is an instance of the same class, is not +new?+, and has the same +id+.
#
# ==== Examples
# ryan = Person.create(:name => 'Ryan')
# jamie = Person.create(:name => 'Jamie')
#
# ryan == jamie
# # => false (Different name attribute and id)
#
# ryan_again = Person.new(:name => 'Ryan')
# ryan == ryan_again
# # => false (ryan_again is new?)
#
# ryans_clone = Person.create(:name => 'Ryan')
# ryan == ryans_clone
# # => false (Different id attributes)
#
# ryans_twin = Person.find(ryan.id)
# ryan == ryans_twin
# # => true
#
def ==(other)
other.equal?(self) || (other.instance_of?(self.class) && !other.new? && other.id == id)
end
# Tests for equality (delegates to ==).
def eql?(other)
self == other
end
# Delegates to id in order to allow two resources of the same type and id to work with something like:
# [Person.find(1), Person.find(2)] & [Person.find(1), Person.find(4)] # => [Person.find(1)]
def hash
id.hash
end
# Duplicate the current resource without saving it.
#
# ==== Examples
# my_invoice = Invoice.create(:customer => 'That Company')
# next_invoice = my_invoice.dup
# next_invoice.new?
# # => true
#
# next_invoice.save
# next_invoice == my_invoice
# # => false (different id attributes)
#
# my_invoice.customer
# # => That Company
# next_invoice.customer
# # => That Company
def dup
returning self.class.new do |resource|
resource.attributes = @attributes
resource.prefix_options = @prefix_options
end
end
# A method to save (+POST+) or update (+PUT+) a resource. It delegates to +create+ if a new object,
# +update+ if it is existing. If the response to the save includes a body, it will be assumed that this body
# is XML for the final object as it looked after the save (which would include attributes like +created_at+
# that weren't part of the original submit).
#
# ==== Examples
# my_company = Company.new(:name => 'RoleModel Software', :owner => 'Ken Auer', :size => 2)
# my_company.new?
# # => true
# my_company.save
# # => POST /companies/ (create)
#
# my_company.new?
# # => false
# my_company.size = 10
# my_company.save
# # => PUT /companies/1 (update)
def save
new? ? create : update
end
# Deletes the resource from the remote service.
#
# ==== Examples
# my_id = 3
# my_person = Person.find(my_id)
# my_person.destroy
# Person.find(my_id)
# # => 404 (Resource Not Found)
#
# new_person = Person.create(:name => 'James')
# new_id = new_person.id
# # => 7
# new_person.destroy
# Person.find(new_id)
# # => 404 (Resource Not Found)
def destroy
connection.delete(element_path, self.class.headers)
end
# Evaluates to <tt>true</tt> if this resource is not +new?+ and is
# found on the remote service. Using this method, you can check for
# resources that may have been deleted between the object's instantiation
# and actions on it.
#
# ==== Examples
# Person.create(:name => 'Theodore Roosevelt')
# that_guy = Person.find(:first)
# that_guy.exists?
# # => true
#
# that_lady = Person.new(:name => 'Paul Bean')
# that_lady.exists?
# # => false
#
# guys_id = that_guy.id
# Person.delete(guys_id)
# that_guy.exists?
# # => false
def exists?
!new? && self.class.exists?(to_param, :params => prefix_options)
end
# A method to convert the the resource to an XML string.
#
# ==== Options
# The +options+ parameter is handed off to the +to_xml+ method on each
# attribute, so it has the same options as the +to_xml+ methods in
# ActiveSupport.
#
# indent:: Set the indent level for the XML output (default is +2+).
# dasherize:: Boolean option to determine whether or not element names should
# replace underscores with dashes (default is <tt>false</tt>).
# skip_instruct:: Toggle skipping the +instruct!+ call on the XML builder
# that generates the XML declaration (default is <tt>false</tt>).
#
# ==== Examples
# my_group = SubsidiaryGroup.find(:first)
# my_group.to_xml
# # => <?xml version="1.0" encoding="UTF-8"?>
# # <subsidiary_group> [...] </subsidiary_group>
#
# my_group.to_xml(:dasherize => true)
# # => <?xml version="1.0" encoding="UTF-8"?>
# # <subsidiary-group> [...] </subsidiary-group>
#
# my_group.to_xml(:skip_instruct => true)
# # => <subsidiary_group> [...] </subsidiary_group>
def to_xml(options={})
attributes.to_xml({:root => self.class.element_name}.merge(options))
end
# A method to reload the attributes of this object from the remote web service.
#
# ==== Examples
# my_branch = Branch.find(:first)
# my_branch.name
# # => Wislon Raod
#
# # Another client fixes the typo...
#
# my_branch.name
# # => Wislon Raod
# my_branch.reload
# my_branch.name
# # => Wilson Road
def reload
self.load(self.class.find(to_param, :params => @prefix_options).attributes)
end
# A method to manually load attributes from a hash. Recursively loads collections of
# resources. This method is called in initialize and create when a +Hash+ of attributes
# is provided.
#
# ==== Examples
# my_attrs = {:name => 'J&J Textiles', :industry => 'Cloth and textiles'}
#
# the_supplier = Supplier.find(:first)
# the_supplier.name
# # => 'J&M Textiles'
# the_supplier.load(my_attrs)
# the_supplier.name('J&J Textiles')
#
# # These two calls are the same as Supplier.new(my_attrs)
# my_supplier = Supplier.new
# my_supplier.load(my_attrs)
#
# # These three calls are the same as Supplier.create(my_attrs)
# your_supplier = Supplier.new
# your_supplier.load(my_attrs)
# your_supplier.save
def load(attributes)
raise ArgumentError, "expected an attributes Hash, got #{attributes.inspect}" unless attributes.is_a?(Hash)
@prefix_options, attributes = split_options(attributes)
attributes.each do |key, value|
@attributes[key.to_s] =
case value
when Array
resource = find_or_create_resource_for_collection(key)
value.map { |attrs| resource.new(attrs) }
when Hash
resource = find_or_create_resource_for(key)
resource.new(value)
else
value.dup rescue value
end
end
self
end
# For checking respond_to? without searching the attributes (which is faster).
alias_method :respond_to_without_attributes?, :respond_to?
# A method to determine if an object responds to a message (e.g., a method call). In Active Resource, a +Person+ object with a
# +name+ attribute can answer <tt>true</tt> to <tt>my_person.respond_to?("name")</tt>, <tt>my_person.respond_to?("name=")</tt>, and
# <tt>my_person.respond_to?("name?")</tt>.
def respond_to?(method, include_priv = false)
method_name = method.to_s
if attributes.nil?
return super
elsif attributes.has_key?(method_name)
return true
elsif ['?','='].include?(method_name.last) && attributes.has_key?(method_name.first(-1))
return true
end
# super must be called at the end of the method, because the inherited respond_to?
# would return true for generated readers, even if the attribute wasn't present
super
end
protected
def connection(refresh = false)
self.class.connection(refresh)
end
# Update the resource on the remote service.
def update
returning connection.put(element_path(prefix_options), to_xml, self.class.headers) do |response|
load_attributes_from_response(response)
end
end
# Create (i.e., save to the remote service) the new resource.
def create
returning connection.post(collection_path, to_xml, self.class.headers) do |response|
self.id = id_from_response(response)
load_attributes_from_response(response)
end
end
def load_attributes_from_response(response)
if response['Content-Length'] != "0" && response.body.strip.size > 0
load(self.class.format.decode(response.body))
end
end
# Takes a response from a typical create post and pulls the ID out
def id_from_response(response)
response['Location'][/\/([^\/]*?)(\.\w+)?$/, 1]
end
def element_path(options = nil)
self.class.element_path(to_param, options || prefix_options)
end
def collection_path(options = nil)
self.class.collection_path(options || prefix_options)
end
private
# Tries to find a resource for a given collection name; if it fails, then the resource is created
def find_or_create_resource_for_collection(name)
find_or_create_resource_for(name.to_s.singularize)
end
# Tries to find a resource in a non empty list of nested modules
# Raises a NameError if it was not found in any of the given nested modules
def find_resource_in_modules(resource_name, module_names)
receiver = Object
namespaces = module_names[0, module_names.size-1].map do |module_name|
receiver = receiver.const_get(module_name)
end
if namespace = namespaces.reverse.detect { |ns| ns.const_defined?(resource_name) }
return namespace.const_get(resource_name)
else
raise NameError
end
end
# Tries to find a resource for a given name; if it fails, then the resource is created
def find_or_create_resource_for(name)
resource_name = name.to_s.camelize
ancestors = self.class.name.split("::")
if ancestors.size > 1
find_resource_in_modules(resource_name, ancestors)
else
self.class.const_get(resource_name)
end
rescue NameError
resource = self.class.const_set(resource_name, Class.new(ActiveResource::Base))
resource.prefix = self.class.prefix
resource.site = self.class.site
resource
end
def split_options(options = {})
self.class.send!(:split_options, options)
end
def method_missing(method_symbol, *arguments) #:nodoc:
method_name = method_symbol.to_s
case method_name.last
when "="
attributes[method_name.first(-1)] = arguments.first
when "?"
attributes[method_name.first(-1)]
else
attributes.has_key?(method_name) ? attributes[method_name] : super
end
end
end
end
Added information of how to set element_name in the case the user has a name confliction with an existing model
require 'active_resource/connection'
require 'cgi'
require 'set'
module ActiveResource
# ActiveResource::Base is the main class for mapping RESTful resources as models in a Rails application.
#
# For an outline of what Active Resource is capable of, see link:files/vendor/rails/activeresource/README.html.
#
# == Automated mapping
#
# Active Resource objects represent your RESTful resources as manipulatable Ruby objects. To map resources
# to Ruby objects, Active Resource only needs a class name that corresponds to the resource name (e.g., the class
# Person maps to the resources people, very similarly to Active Record) and a +site+ value, which holds the
# URI of the resources.
#
# class Person < ActiveResource::Base
# self.site = "http://api.people.com:3000/"
# end
#
# Now the Person class is mapped to RESTful resources located at <tt>http://api.people.com:3000/people/</tt>, and
# you can now use Active Resource's lifecycles methods to manipulate resources. In the case where you already have
# an existing model with the same name as the desired RESTful resource you can set the +element_name+ value.
#
# class PersonResource < ActiveResource::Base
# self.site = "http://api.people.com:3000/"
# self.element_name = "person"
# end
#
#
# == Lifecycle methods
#
# Active Resource exposes methods for creating, finding, updating, and deleting resources
# from REST web services.
#
# ryan = Person.new(:first => 'Ryan', :last => 'Daigle')
# ryan.save #=> true
# ryan.id #=> 2
# Person.exists?(ryan.id) #=> true
# ryan.exists? #=> true
#
# ryan = Person.find(1)
# # => Resource holding our newly created Person object
#
# ryan.first = 'Rizzle'
# ryan.save #=> true
#
# ryan.destroy #=> true
#
# As you can see, these are very similar to Active Record's lifecycle methods for database records.
# You can read more about each of these methods in their respective documentation.
#
# === Custom REST methods
#
# Since simple CRUD/lifecycle methods can't accomplish every task, Active Resource also supports
# defining your own custom REST methods. To invoke them, Active Resource provides the <tt>get</tt>,
# <tt>post</tt>, <tt>put</tt> and <tt>delete</tt> methods where you can specify a custom REST method
# name to invoke.
#
# # POST to the custom 'register' REST method, i.e. POST /people/new/register.xml.
# Person.new(:name => 'Ryan').post(:register)
# # => { :id => 1, :name => 'Ryan', :position => 'Clerk' }
#
# # PUT an update by invoking the 'promote' REST method, i.e. PUT /people/1/promote.xml?position=Manager.
# Person.find(1).put(:promote, :position => 'Manager')
# # => { :id => 1, :name => 'Ryan', :position => 'Manager' }
#
# # GET all the positions available, i.e. GET /people/positions.xml.
# Person.get(:positions)
# # => [{:name => 'Manager'}, {:name => 'Clerk'}]
#
# # DELETE to 'fire' a person, i.e. DELETE /people/1/fire.xml.
# Person.find(1).delete(:fire)
#
# For more information on using custom REST methods, see the
# ActiveResource::CustomMethods documentation.
#
# == Validations
#
# You can validate resources client side by overriding validation methods in the base class.
#
# class Person < ActiveResource::Base
# self.site = "http://api.people.com:3000/"
# protected
# def validate
# errors.add("last", "has invalid characters") unless last =~ /[a-zA-Z]*/
# end
# end
#
# See the ActiveResource::Validations documentation for more information.
#
# == Authentication
#
# Many REST APIs will require authentication, usually in the form of basic
# HTTP authentication. Authentication can be specified by:
# * putting the credentials in the URL for the +site+ variable.
#
# class Person < ActiveResource::Base
# self.site = "http://ryan:password@api.people.com:3000/"
# end
#
# * defining +user+ and/or +password+ variables
#
# class Person < ActiveResource::Base
# self.site = "http://api.people.com:3000/"
# self.user = "ryan"
# self.password = "password"
# end
#
# For obvious security reasons, it is probably best if such services are available
# over HTTPS.
#
# Note: Some values cannot be provided in the URL passed to site. e.g. email addresses
# as usernames. In those situations you should use the seperate user and password option.
# == Errors & Validation
#
# Error handling and validation is handled in much the same manner as you're used to seeing in
# Active Record. Both the response code in the HTTP response and the body of the response are used to
# indicate that an error occurred.
#
# === Resource errors
#
# When a GET is requested for a resource that does not exist, the HTTP <tt>404</tt> (Resource Not Found)
# response code will be returned from the server which will raise an ActiveResource::ResourceNotFound
# exception.
#
# # GET http://api.people.com:3000/people/999.xml
# ryan = Person.find(999) # => Raises ActiveResource::ResourceNotFound
# # => Response = 404
#
# <tt>404</tt> is just one of the HTTP error response codes that ActiveResource will handle with its own exception. The
# following HTTP response codes will also result in these exceptions:
#
# 200 - 399:: Valid response, no exception
# 404:: ActiveResource::ResourceNotFound
# 409:: ActiveResource::ResourceConflict
# 422:: ActiveResource::ResourceInvalid (rescued by save as validation errors)
# 401 - 499:: ActiveResource::ClientError
# 500 - 599:: ActiveResource::ServerError
#
# These custom exceptions allow you to deal with resource errors more naturally and with more precision
# rather than returning a general HTTP error. For example:
#
# begin
# ryan = Person.find(my_id)
# rescue ActiveResource::ResourceNotFound
# redirect_to :action => 'not_found'
# rescue ActiveResource::ResourceConflict, ActiveResource::ResourceInvalid
# redirect_to :action => 'new'
# end
#
# === Validation errors
#
# Active Resource supports validations on resources and will return errors if any these validations fail
# (e.g., "First name can not be blank" and so on). These types of errors are denoted in the response by
# a response code of <tt>422</tt> and an XML representation of the validation errors. The save operation will
# then fail (with a <tt>false</tt> return value) and the validation errors can be accessed on the resource in question.
#
# ryan = Person.find(1)
# ryan.first #=> ''
# ryan.save #=> false
#
# # When
# # PUT http://api.people.com:3000/people/1.xml
# # is requested with invalid values, the response is:
# #
# # Response (422):
# # <errors type="array"><error>First cannot be empty</error></errors>
# #
#
# ryan.errors.invalid?(:first) #=> true
# ryan.errors.full_messages #=> ['First cannot be empty']
#
# Learn more about Active Resource's validation features in the ActiveResource::Validations documentation.
#
# === Timeouts
#
# Active Resource relies on HTTP to access RESTful APIs and as such is inherently susceptible to slow or
# unresponsive servers. In such cases, your Active Resource method calls could timeout. You can control the
# amount of time before Active Resource times out with the +timeout+ variable.
#
# class Person < ActiveResource::Base
# self.site = "http://api.people.com:3000/"
# self.timeout = 5
# end
#
# This sets the +timeout+ to 5 seconds. You can adjust the timeout to a value suitable for the RESTful API
# you are accessing. It is recommended to set this to a reasonably low value to allow your Active Resource
# clients (especially if you are using Active Resource in a Rails application) to fail-fast (see
# http://en.wikipedia.org/wiki/Fail-fast) rather than cause cascading failures that could incapacitate your
# server.
#
# Internally, Active Resource relies on Ruby's Net::HTTP library to make HTTP requests. Setting +timeout+
# sets the <tt>read_timeout</tt> of the internal Net::HTTP instance to the same value. The default
# <tt>read_timeout</tt> is 60 seconds on most Ruby implementations.
class Base
# The logger for diagnosing and tracing Active Resource calls.
cattr_accessor :logger
class << self
# Gets the URI of the REST resources to map for this class. The site variable is required
# ActiveResource's mapping to work.
def site
# Not using superclass_delegating_reader because don't want subclasses to modify superclass instance
#
# With superclass_delegating_reader
#
# Parent.site = 'http://anonymous@test.com'
# Subclass.site # => 'http://anonymous@test.com'
# Subclass.site.user = 'david'
# Parent.site # => 'http://david@test.com'
#
# Without superclass_delegating_reader (expected behaviour)
#
# Parent.site = 'http://anonymous@test.com'
# Subclass.site # => 'http://anonymous@test.com'
# Subclass.site.user = 'david' # => TypeError: can't modify frozen object
#
if defined?(@site)
@site
elsif superclass != Object && superclass.site
superclass.site.dup.freeze
end
end
# Sets the URI of the REST resources to map for this class to the value in the +site+ argument.
# The site variable is required ActiveResource's mapping to work.
def site=(site)
@connection = nil
if site.nil?
@site = nil
else
@site = create_site_uri_from(site)
@user = URI.decode(@site.user) if @site.user
@password = URI.decode(@site.password) if @site.password
end
end
# Gets the user for REST HTTP authentication
def user
# Not using superclass_delegating_reader. See +site+ for explanation
if defined?(@user)
@user
elsif superclass != Object && superclass.user
superclass.user.dup.freeze
end
end
# Sets the user for REST HTTP authentication
def user=(user)
@connection = nil
@user = user
end
# Gets the password for REST HTTP authentication
def password
# Not using superclass_delegating_reader. See +site+ for explanation
if defined?(@password)
@password
elsif superclass != Object && superclass.password
superclass.password.dup.freeze
end
end
# Sets the password for REST HTTP authentication
def password=(password)
@connection = nil
@password = password
end
# Sets the format that attributes are sent and received in from a mime type reference. Example:
#
# Person.format = :json
# Person.find(1) # => GET /people/1.json
#
# Person.format = ActiveResource::Formats::XmlFormat
# Person.find(1) # => GET /people/1.xml
#
# Default format is :xml.
def format=(mime_type_reference_or_format)
format = mime_type_reference_or_format.is_a?(Symbol) ?
ActiveResource::Formats[mime_type_reference_or_format] : mime_type_reference_or_format
write_inheritable_attribute("format", format)
connection.format = format if site
end
# Returns the current format, default is ActiveResource::Formats::XmlFormat
def format # :nodoc:
read_inheritable_attribute("format") || ActiveResource::Formats[:xml]
end
# Sets the number of seconds after which requests to the REST API should time out.
def timeout=(timeout)
@connection = nil
@timeout = timeout
end
# Gets tthe number of seconds after which requests to the REST API should time out.
def timeout
if defined?(@timeout)
@timeout
elsif superclass != Object && superclass.timeout
superclass.timeout
end
end
# An instance of ActiveResource::Connection that is the base connection to the remote service.
# The +refresh+ parameter toggles whether or not the connection is refreshed at every request
# or not (defaults to <tt>false</tt>).
def connection(refresh = false)
if defined?(@connection) || superclass == Object
@connection = Connection.new(site, format) if refresh || @connection.nil?
@connection.user = user if user
@connection.password = password if password
@connection.timeout = timeout if timeout
@connection
else
superclass.connection
end
end
def headers
@headers ||= {}
end
# Do not include any modules in the default element name. This makes it easier to seclude ARes objects
# in a separate namespace without having to set element_name repeatedly.
attr_accessor_with_default(:element_name) { to_s.split("::").last.underscore } #:nodoc:
attr_accessor_with_default(:collection_name) { element_name.pluralize } #:nodoc:
attr_accessor_with_default(:primary_key, 'id') #:nodoc:
# Gets the prefix for a resource's nested URL (e.g., <tt>prefix/collectionname/1.xml</tt>)
# This method is regenerated at runtime based on what the prefix is set to.
def prefix(options={})
default = site.path
default << '/' unless default[-1..-1] == '/'
# generate the actual method based on the current site path
self.prefix = default
prefix(options)
end
# An attribute reader for the source string for the resource path prefix. This
# method is regenerated at runtime based on what the prefix is set to.
def prefix_source
prefix # generate #prefix and #prefix_source methods first
prefix_source
end
# Sets the prefix for a resource's nested URL (e.g., <tt>prefix/collectionname/1.xml</tt>).
# Default value is <tt>site.path</tt>.
def prefix=(value = '/')
# Replace :placeholders with '#{embedded options[:lookups]}'
prefix_call = value.gsub(/:\w+/) { |key| "\#{options[#{key}]}" }
# Redefine the new methods.
code = <<-end_code
def prefix_source() "#{value}" end
def prefix(options={}) "#{prefix_call}" end
end_code
silence_warnings { instance_eval code, __FILE__, __LINE__ }
rescue
logger.error "Couldn't set prefix: #{$!}\n #{code}"
raise
end
alias_method :set_prefix, :prefix= #:nodoc:
alias_method :set_element_name, :element_name= #:nodoc:
alias_method :set_collection_name, :collection_name= #:nodoc:
# Gets the element path for the given ID in +id+. If the +query_options+ parameter is omitted, Rails
# will split from the prefix options.
#
# ==== Options
# +prefix_options+:: A hash to add a prefix to the request for nested URL's (e.g., <tt>:account_id => 19</tt>
# would yield a URL like <tt>/accounts/19/purchases.xml</tt>).
# +query_options+:: A hash to add items to the query string for the request.
#
# ==== Examples
# Post.element_path(1)
# # => /posts/1.xml
#
# Comment.element_path(1, :post_id => 5)
# # => /posts/5/comments/1.xml
#
# Comment.element_path(1, :post_id => 5, :active => 1)
# # => /posts/5/comments/1.xml?active=1
#
# Comment.element_path(1, {:post_id => 5}, {:active => 1})
# # => /posts/5/comments/1.xml?active=1
#
def element_path(id, prefix_options = {}, query_options = nil)
prefix_options, query_options = split_options(prefix_options) if query_options.nil?
"#{prefix(prefix_options)}#{collection_name}/#{id}.#{format.extension}#{query_string(query_options)}"
end
# Gets the collection path for the REST resources. If the +query_options+ parameter is omitted, Rails
# will split from the +prefix_options+.
#
# ==== Options
# +prefix_options+:: A hash to add a prefix to the request for nested URL's (e.g., <tt>:account_id => 19</tt>
# would yield a URL like <tt>/accounts/19/purchases.xml</tt>).
# +query_options+:: A hash to add items to the query string for the request.
#
# ==== Examples
# Post.collection_path
# # => /posts.xml
#
# Comment.collection_path(:post_id => 5)
# # => /posts/5/comments.xml
#
# Comment.collection_path(:post_id => 5, :active => 1)
# # => /posts/5/comments.xml?active=1
#
# Comment.collection_path({:post_id => 5}, {:active => 1})
# # => /posts/5/comments.xml?active=1
#
def collection_path(prefix_options = {}, query_options = nil)
prefix_options, query_options = split_options(prefix_options) if query_options.nil?
"#{prefix(prefix_options)}#{collection_name}.#{format.extension}#{query_string(query_options)}"
end
alias_method :set_primary_key, :primary_key= #:nodoc:
# Create a new resource instance and request to the remote service
# that it be saved, making it equivalent to the following simultaneous calls:
#
# ryan = Person.new(:first => 'ryan')
# ryan.save
#
# The newly created resource is returned. If a failure has occurred an
# exception will be raised (see save). If the resource is invalid and
# has not been saved then valid? will return <tt>false</tt>,
# while new? will still return <tt>true</tt>.
#
# ==== Examples
# Person.create(:name => 'Jeremy', :email => 'myname@nospam.com', :enabled => true)
# my_person = Person.find(:first)
# my_person.email
# # => myname@nospam.com
#
# dhh = Person.create(:name => 'David', :email => 'dhh@nospam.com', :enabled => true)
# dhh.valid?
# # => true
# dhh.new?
# # => false
#
# # We'll assume that there's a validation that requires the name attribute
# that_guy = Person.create(:name => '', :email => 'thatguy@nospam.com', :enabled => true)
# that_guy.valid?
# # => false
# that_guy.new?
# # => true
#
def create(attributes = {})
returning(self.new(attributes)) { |res| res.save }
end
# Core method for finding resources. Used similarly to Active Record's find method.
#
# ==== Arguments
# The first argument is considered to be the scope of the query. That is, how many
# resources are returned from the request. It can be one of the following.
#
# * <tt>:one</tt>: Returns a single resource.
# * <tt>:first</tt>: Returns the first resource found.
# * <tt>:all</tt>: Returns every resource that matches the request.
#
# ==== Options
#
# * +from+: Sets the path or custom method that resources will be fetched from.
# * +params+: Sets query and prefix (nested URL) parameters.
#
# ==== Examples
# Person.find(1)
# # => GET /people/1.xml
#
# Person.find(:all)
# # => GET /people.xml
#
# Person.find(:all, :params => { :title => "CEO" })
# # => GET /people.xml?title=CEO
#
# Person.find(:first, :from => :managers)
# # => GET /people/managers.xml
#
# Person.find(:all, :from => "/companies/1/people.xml")
# # => GET /companies/1/people.xml
#
# Person.find(:one, :from => :leader)
# # => GET /people/leader.xml
#
# Person.find(:all, :from => :developers, :params => { :language => 'ruby' })
# # => GET /people/developers.xml?language=ruby
#
# Person.find(:one, :from => "/companies/1/manager.xml")
# # => GET /companies/1/manager.xml
#
# StreetAddress.find(1, :params => { :person_id => 1 })
# # => GET /people/1/street_addresses/1.xml
def find(*arguments)
scope = arguments.slice!(0)
options = arguments.slice!(0) || {}
case scope
when :all then find_every(options)
when :first then find_every(options).first
when :one then find_one(options)
else find_single(scope, options)
end
end
# Deletes the resources with the ID in the +id+ parameter.
#
# ==== Options
# All options specify prefix and query parameters.
#
# ==== Examples
# Event.delete(2)
# # => DELETE /events/2
#
# Event.create(:name => 'Free Concert', :location => 'Community Center')
# my_event = Event.find(:first)
# # => Events (id: 7)
# Event.delete(my_event.id)
# # => DELETE /events/7
#
# # Let's assume a request to events/5/cancel.xml
# Event.delete(params[:id])
# # => DELETE /events/5
#
def delete(id, options = {})
connection.delete(element_path(id, options))
end
# Asserts the existence of a resource, returning <tt>true</tt> if the resource is found.
#
# ==== Examples
# Note.create(:title => 'Hello, world.', :body => 'Nothing more for now...')
# Note.exists?(1)
# # => true
#
# Note.exists(1349)
# # => false
def exists?(id, options = {})
if id
prefix_options, query_options = split_options(options[:params])
path = element_path(id, prefix_options, query_options)
response = connection.head(path, headers)
response.code == 200
end
# id && !find_single(id, options).nil?
rescue ActiveResource::ResourceNotFound
false
end
private
# Find every resource
def find_every(options)
case from = options[:from]
when Symbol
instantiate_collection(get(from, options[:params]))
when String
path = "#{from}#{query_string(options[:params])}"
instantiate_collection(connection.get(path, headers) || [])
else
prefix_options, query_options = split_options(options[:params])
path = collection_path(prefix_options, query_options)
instantiate_collection( (connection.get(path, headers) || []), prefix_options )
end
end
# Find a single resource from a one-off URL
def find_one(options)
case from = options[:from]
when Symbol
instantiate_record(get(from, options[:params]))
when String
path = "#{from}#{query_string(options[:params])}"
instantiate_record(connection.get(path, headers))
end
end
# Find a single resource from the default URL
def find_single(scope, options)
prefix_options, query_options = split_options(options[:params])
path = element_path(scope, prefix_options, query_options)
instantiate_record(connection.get(path, headers), prefix_options)
end
def instantiate_collection(collection, prefix_options = {})
collection.collect! { |record| instantiate_record(record, prefix_options) }
end
def instantiate_record(record, prefix_options = {})
returning new(record) do |resource|
resource.prefix_options = prefix_options
end
end
# Accepts a URI and creates the site URI from that.
def create_site_uri_from(site)
site.is_a?(URI) ? site.dup : URI.parse(site)
end
# contains a set of the current prefix parameters.
def prefix_parameters
@prefix_parameters ||= prefix_source.scan(/:\w+/).map { |key| key[1..-1].to_sym }.to_set
end
# Builds the query string for the request.
def query_string(options)
"?#{options.to_query}" unless options.nil? || options.empty?
end
# split an option hash into two hashes, one containing the prefix options,
# and the other containing the leftovers.
def split_options(options = {})
prefix_options, query_options = {}, {}
(options || {}).each do |key, value|
next if key.blank?
(prefix_parameters.include?(key.to_sym) ? prefix_options : query_options)[key.to_sym] = value
end
[ prefix_options, query_options ]
end
end
attr_accessor :attributes #:nodoc:
attr_accessor :prefix_options #:nodoc:
# Constructor method for new resources; the optional +attributes+ parameter takes a +Hash+
# of attributes for the new resource.
#
# ==== Examples
# my_course = Course.new
# my_course.name = "Western Civilization"
# my_course.lecturer = "Don Trotter"
# my_course.save
#
# my_other_course = Course.new(:name => "Philosophy: Reason and Being", :lecturer => "Ralph Cling")
# my_other_course.save
def initialize(attributes = {})
@attributes = {}
@prefix_options = {}
load(attributes)
end
# Returns a clone of the resource that hasn't been assigned an id yet and
# is treated as a new resource.
#
# ryan = Person.find(1)
# not_ryan = ryan.clone
# not_ryan.new? # => true
#
# Any active resource member attributes will NOT be cloned, though all other
# attributes are. This is to prevent the conflict between any prefix_options
# that refer to the original parent resource and the newly cloned parent
# resource that does not exist.
#
# ryan = Person.find(1)
# ryan.address = StreetAddress.find(1, :person_id => ryan.id)
# ryan.hash = {:not => "an ARes instance"}
#
# not_ryan = ryan.clone
# not_ryan.new? # => true
# not_ryan.address # => NoMethodError
# not_ryan.hash # => {:not => "an ARes instance"}
#
def clone
# Clone all attributes except the pk and any nested ARes
cloned = attributes.reject {|k,v| k == self.class.primary_key || v.is_a?(ActiveResource::Base)}.inject({}) do |attrs, (k, v)|
attrs[k] = v.clone
attrs
end
# Form the new resource - bypass initialize of resource with 'new' as that will call 'load' which
# attempts to convert hashes into member objects and arrays into collections of objects. We want
# the raw objects to be cloned so we bypass load by directly setting the attributes hash.
resource = self.class.new({})
resource.prefix_options = self.prefix_options
resource.send :instance_variable_set, '@attributes', cloned
resource
end
# A method to determine if the resource a new object (i.e., it has not been POSTed to the remote service yet).
#
# ==== Examples
# not_new = Computer.create(:brand => 'Apple', :make => 'MacBook', :vendor => 'MacMall')
# not_new.new?
# # => false
#
# is_new = Computer.new(:brand => 'IBM', :make => 'Thinkpad', :vendor => 'IBM')
# is_new.new?
# # => true
#
# is_new.save
# is_new.new?
# # => false
#
def new?
id.nil?
end
# Get the +id+ attribute of the resource.
def id
attributes[self.class.primary_key]
end
# Set the +id+ attribute of the resource.
def id=(id)
attributes[self.class.primary_key] = id
end
# Allows ActiveResource objects to be used as parameters in ActionPack URL generation.
def to_param
id && id.to_s
end
# Test for equality. Resource are equal if and only if +other+ is the same object or
# is an instance of the same class, is not +new?+, and has the same +id+.
#
# ==== Examples
# ryan = Person.create(:name => 'Ryan')
# jamie = Person.create(:name => 'Jamie')
#
# ryan == jamie
# # => false (Different name attribute and id)
#
# ryan_again = Person.new(:name => 'Ryan')
# ryan == ryan_again
# # => false (ryan_again is new?)
#
# ryans_clone = Person.create(:name => 'Ryan')
# ryan == ryans_clone
# # => false (Different id attributes)
#
# ryans_twin = Person.find(ryan.id)
# ryan == ryans_twin
# # => true
#
def ==(other)
other.equal?(self) || (other.instance_of?(self.class) && !other.new? && other.id == id)
end
# Tests for equality (delegates to ==).
def eql?(other)
self == other
end
# Delegates to id in order to allow two resources of the same type and id to work with something like:
# [Person.find(1), Person.find(2)] & [Person.find(1), Person.find(4)] # => [Person.find(1)]
def hash
id.hash
end
# Duplicate the current resource without saving it.
#
# ==== Examples
# my_invoice = Invoice.create(:customer => 'That Company')
# next_invoice = my_invoice.dup
# next_invoice.new?
# # => true
#
# next_invoice.save
# next_invoice == my_invoice
# # => false (different id attributes)
#
# my_invoice.customer
# # => That Company
# next_invoice.customer
# # => That Company
def dup
returning self.class.new do |resource|
resource.attributes = @attributes
resource.prefix_options = @prefix_options
end
end
# A method to save (+POST+) or update (+PUT+) a resource. It delegates to +create+ if a new object,
# +update+ if it is existing. If the response to the save includes a body, it will be assumed that this body
# is XML for the final object as it looked after the save (which would include attributes like +created_at+
# that weren't part of the original submit).
#
# ==== Examples
# my_company = Company.new(:name => 'RoleModel Software', :owner => 'Ken Auer', :size => 2)
# my_company.new?
# # => true
# my_company.save
# # => POST /companies/ (create)
#
# my_company.new?
# # => false
# my_company.size = 10
# my_company.save
# # => PUT /companies/1 (update)
def save
new? ? create : update
end
# Deletes the resource from the remote service.
#
# ==== Examples
# my_id = 3
# my_person = Person.find(my_id)
# my_person.destroy
# Person.find(my_id)
# # => 404 (Resource Not Found)
#
# new_person = Person.create(:name => 'James')
# new_id = new_person.id
# # => 7
# new_person.destroy
# Person.find(new_id)
# # => 404 (Resource Not Found)
def destroy
connection.delete(element_path, self.class.headers)
end
# Evaluates to <tt>true</tt> if this resource is not +new?+ and is
# found on the remote service. Using this method, you can check for
# resources that may have been deleted between the object's instantiation
# and actions on it.
#
# ==== Examples
# Person.create(:name => 'Theodore Roosevelt')
# that_guy = Person.find(:first)
# that_guy.exists?
# # => true
#
# that_lady = Person.new(:name => 'Paul Bean')
# that_lady.exists?
# # => false
#
# guys_id = that_guy.id
# Person.delete(guys_id)
# that_guy.exists?
# # => false
def exists?
!new? && self.class.exists?(to_param, :params => prefix_options)
end
# A method to convert the the resource to an XML string.
#
# ==== Options
# The +options+ parameter is handed off to the +to_xml+ method on each
# attribute, so it has the same options as the +to_xml+ methods in
# ActiveSupport.
#
# indent:: Set the indent level for the XML output (default is +2+).
# dasherize:: Boolean option to determine whether or not element names should
# replace underscores with dashes (default is <tt>false</tt>).
# skip_instruct:: Toggle skipping the +instruct!+ call on the XML builder
# that generates the XML declaration (default is <tt>false</tt>).
#
# ==== Examples
# my_group = SubsidiaryGroup.find(:first)
# my_group.to_xml
# # => <?xml version="1.0" encoding="UTF-8"?>
# # <subsidiary_group> [...] </subsidiary_group>
#
# my_group.to_xml(:dasherize => true)
# # => <?xml version="1.0" encoding="UTF-8"?>
# # <subsidiary-group> [...] </subsidiary-group>
#
# my_group.to_xml(:skip_instruct => true)
# # => <subsidiary_group> [...] </subsidiary_group>
def to_xml(options={})
attributes.to_xml({:root => self.class.element_name}.merge(options))
end
# A method to reload the attributes of this object from the remote web service.
#
# ==== Examples
# my_branch = Branch.find(:first)
# my_branch.name
# # => Wislon Raod
#
# # Another client fixes the typo...
#
# my_branch.name
# # => Wislon Raod
# my_branch.reload
# my_branch.name
# # => Wilson Road
def reload
self.load(self.class.find(to_param, :params => @prefix_options).attributes)
end
# A method to manually load attributes from a hash. Recursively loads collections of
# resources. This method is called in initialize and create when a +Hash+ of attributes
# is provided.
#
# ==== Examples
# my_attrs = {:name => 'J&J Textiles', :industry => 'Cloth and textiles'}
#
# the_supplier = Supplier.find(:first)
# the_supplier.name
# # => 'J&M Textiles'
# the_supplier.load(my_attrs)
# the_supplier.name('J&J Textiles')
#
# # These two calls are the same as Supplier.new(my_attrs)
# my_supplier = Supplier.new
# my_supplier.load(my_attrs)
#
# # These three calls are the same as Supplier.create(my_attrs)
# your_supplier = Supplier.new
# your_supplier.load(my_attrs)
# your_supplier.save
def load(attributes)
raise ArgumentError, "expected an attributes Hash, got #{attributes.inspect}" unless attributes.is_a?(Hash)
@prefix_options, attributes = split_options(attributes)
attributes.each do |key, value|
@attributes[key.to_s] =
case value
when Array
resource = find_or_create_resource_for_collection(key)
value.map { |attrs| resource.new(attrs) }
when Hash
resource = find_or_create_resource_for(key)
resource.new(value)
else
value.dup rescue value
end
end
self
end
# For checking respond_to? without searching the attributes (which is faster).
alias_method :respond_to_without_attributes?, :respond_to?
# A method to determine if an object responds to a message (e.g., a method call). In Active Resource, a +Person+ object with a
# +name+ attribute can answer <tt>true</tt> to <tt>my_person.respond_to?("name")</tt>, <tt>my_person.respond_to?("name=")</tt>, and
# <tt>my_person.respond_to?("name?")</tt>.
def respond_to?(method, include_priv = false)
method_name = method.to_s
if attributes.nil?
return super
elsif attributes.has_key?(method_name)
return true
elsif ['?','='].include?(method_name.last) && attributes.has_key?(method_name.first(-1))
return true
end
# super must be called at the end of the method, because the inherited respond_to?
# would return true for generated readers, even if the attribute wasn't present
super
end
protected
def connection(refresh = false)
self.class.connection(refresh)
end
# Update the resource on the remote service.
def update
returning connection.put(element_path(prefix_options), to_xml, self.class.headers) do |response|
load_attributes_from_response(response)
end
end
# Create (i.e., save to the remote service) the new resource.
def create
returning connection.post(collection_path, to_xml, self.class.headers) do |response|
self.id = id_from_response(response)
load_attributes_from_response(response)
end
end
def load_attributes_from_response(response)
if response['Content-Length'] != "0" && response.body.strip.size > 0
load(self.class.format.decode(response.body))
end
end
# Takes a response from a typical create post and pulls the ID out
def id_from_response(response)
response['Location'][/\/([^\/]*?)(\.\w+)?$/, 1]
end
def element_path(options = nil)
self.class.element_path(to_param, options || prefix_options)
end
def collection_path(options = nil)
self.class.collection_path(options || prefix_options)
end
private
# Tries to find a resource for a given collection name; if it fails, then the resource is created
def find_or_create_resource_for_collection(name)
find_or_create_resource_for(name.to_s.singularize)
end
# Tries to find a resource in a non empty list of nested modules
# Raises a NameError if it was not found in any of the given nested modules
def find_resource_in_modules(resource_name, module_names)
receiver = Object
namespaces = module_names[0, module_names.size-1].map do |module_name|
receiver = receiver.const_get(module_name)
end
if namespace = namespaces.reverse.detect { |ns| ns.const_defined?(resource_name) }
return namespace.const_get(resource_name)
else
raise NameError
end
end
# Tries to find a resource for a given name; if it fails, then the resource is created
def find_or_create_resource_for(name)
resource_name = name.to_s.camelize
ancestors = self.class.name.split("::")
if ancestors.size > 1
find_resource_in_modules(resource_name, ancestors)
else
self.class.const_get(resource_name)
end
rescue NameError
resource = self.class.const_set(resource_name, Class.new(ActiveResource::Base))
resource.prefix = self.class.prefix
resource.site = self.class.site
resource
end
def split_options(options = {})
self.class.send!(:split_options, options)
end
def method_missing(method_symbol, *arguments) #:nodoc:
method_name = method_symbol.to_s
case method_name.last
when "="
attributes[method_name.first(-1)] = arguments.first
when "?"
attributes[method_name.first(-1)]
else
attributes.has_key?(method_name) ? attributes[method_name] : super
end
end
end
end
|
module ActsAsLoggable
class Log < ::ActiveRecord::Base
belongs_to :loggable, :polymorphic => true
belongs_to :logger, :polymorphic => true
belongs_to :log_action, :polymorphic => true
#quickfix for Netzke
belongs_to :user_action, :foreign_key => 'log_action_id'
belongs_to :bike_action, :foreign_key => 'log_action_id'
attr_accessible :loggable_type, :loggable_id, :logger_type, :logger_id, :context,
:start_date, :end_date, :description, :log_action_id, :log_action_type,
:copy_log, :copy_type, :copy_id, :copy_action_id, :copy_action_type #virtual attributes
#virtual attributes for copy convenience callback
attr_accessor :copy_log, :copy_type, :copy_id, :copy_action_id, :copy_action_type
#provide a convenience copy ability
before_save :check_copy_log
### ASSOCIATIONS:
#has_many :taggings, :dependent => :destroy, :class_name => 'ActsAsTaggableOn::Tagging'
### VALIDATIONS:
validates_presence_of :log_action_id
validates_presence_of :start_date
validates_presence_of :end_date
validates_length_of :description, :maximum => 255
scope :sort_by_duration, lambda { |direction| order("(julianday(end_date) - julianday(start_date)) #{direction}") }
def check_copy_log
if self.copy_log == true or self.copy_log == "on" #needed to check for on because of extJS fieldsets
log_copy = self.dup
log_copy.update_attributes( { :loggable_type => self.copy_type,
:loggable_id => self.copy_id,
:log_action_id => self.copy_action_id,
:log_action_type => self.copy_action_type,
:copy_log => false} )
end
end
def action
#"ActsAsLoggable::#{self.loggable_type}Action".constantize.find_by_id(self.action_id)
self.log_action
end
def loggable_path
"/#{self.loggable_type.pluralize.downcase}/#{self.loggable_id}/logs"
end
end
end
Add a hack for velocipede Netzke
module ActsAsLoggable
class Log < ::ActiveRecord::Base
belongs_to :loggable, :polymorphic => true
belongs_to :logger, :polymorphic => true
belongs_to :log_action, :polymorphic => true
#quickfix for Netzke
belongs_to :user_action, :foreign_key => 'log_action_id'
belongs_to :bike_action, :foreign_key => 'log_action_id'
belongs_to :transaction_action, :foreign_key => 'log_action_id'
attr_accessible :loggable_type, :loggable_id, :logger_type, :logger_id, :context,
:start_date, :end_date, :description, :log_action_id, :log_action_type,
:copy_log, :copy_type, :copy_id, :copy_action_id, :copy_action_type #virtual attributes
#virtual attributes for copy convenience callback
attr_accessor :copy_log, :copy_type, :copy_id, :copy_action_id, :copy_action_type
#provide a convenience copy ability
before_save :check_copy_log
### ASSOCIATIONS:
#has_many :taggings, :dependent => :destroy, :class_name => 'ActsAsTaggableOn::Tagging'
### VALIDATIONS:
validates_presence_of :log_action_id
validates_presence_of :start_date
validates_presence_of :end_date
validates_length_of :description, :maximum => 255
scope :sort_by_duration, lambda { |direction| order("(julianday(end_date) - julianday(start_date)) #{direction}") }
def check_copy_log
if self.copy_log == true or self.copy_log == "on" #needed to check for on because of extJS fieldsets
log_copy = self.dup
log_copy.update_attributes( { :loggable_type => self.copy_type,
:loggable_id => self.copy_id,
:log_action_id => self.copy_action_id,
:log_action_type => self.copy_action_type,
:copy_log => false} )
end
end
def action
#"ActsAsLoggable::#{self.loggable_type}Action".constantize.find_by_id(self.action_id)
self.log_action
end
def loggable_path
"/#{self.loggable_type.pluralize.downcase}/#{self.loggable_id}/logs"
end
end
end
|
module AFMotion
class ClientDSL
def initialize(client)
@client = client
end
def header(header, value)
@client.setDefaultHeader(header, value: value)
end
def authorization(options = {})
@client.authorization = options
end
def operation(operation)
klass = operation
if operation.is_a?(Symbol) or operation.is_a?(String)
klass = case operation.to_s
when "json"
AFJSONRequestOperation
when "plist"
AFPropertyListRequestOperation
when "xml"
AFXMLRequestOperation
when "http"
AFHTTPRequestOperation
else
raise "Not a valid operation: #{operation.inspect}"
end
end
@client.registerHTTPOperationClass(klass)
end
def parameter_encoding(encoding)
enc = encoding
if encoding.is_a?(Symbol) or encoding.is_a?(String)
enc = case encoding.to_s
when "json"
AFJSONParameterEncoding
when "plist"
AFPropertyListParameterEncoding
when "form"
AFFormURLParameterEncoding
else
p "Not a valid parameter encoding: #{encoding.inspect}; using AFFormURLParameterEncoding"
AFFormURLParameterEncoding
end
end
@client.parameterEncoding = enc
end
end
end
module AFMotion
class Client
class << self
attr_accessor :shared
# Returns an instance of AFHTTPClient
def build(base_url, &block)
client = AFHTTPClient.clientWithBaseURL(base_url.to_url)
if block
dsl = AFMotion::ClientDSL.new(client)
dsl.instance_eval(&block)
end
client
end
# Sets AFMotion::Client.shared as the built client
# TODO: Make sure this only happens once (dispatch_once not available)
def build_shared(base_url, &block)
self.shared = self.build(base_url, &block)
end
end
end
end
class AFHTTPClient
AFMotion::HTTP_METHODS.each do |method|
# EX client.get('my/resource.json')
define_method "#{method}", -> (path, parameters = {}, &callback) do
if @multipart
multipart_callback = callback.arity == 1 ? nil : lambda { |formData|
callback.call(nil, formData)
}
upload_callback = callback.arity > 2 ? lambda { |bytes_written_now, total_bytes_written, total_bytes_expect|
case callback.arity
when 3
callback.call(nil, nil, total_bytes_written.to_f / total_bytes_expect.to_f)
when 5
callback.call(nil, nil, bytes_written_now, total_bytes_written, total_bytes_expect)
end
} : nil
request = self.multipartFormRequestWithMethod(method, path: path,
parameters: parameters,constructingBodyWithBlock: multipart_callback)
operation = self.HTTPRequestOperationWithRequest(request,
success: lambda {|operation, responseObject|
result = AFMotion::HTTPResult.new(operation, responseObject, nil)
case callback.arity
when 1
callback.call(result)
when 2
callback.call(result, nil)
when 3
callback.call(result, nil, nil)
when 5
callback.call(result, nil, nil, nil, nil)
end
}, failure: lambda {|operation, error|
result = AFMotion::HTTPResult.new(operation, nil, error)
case callback.arity
when 1
callback.call(result)
when 2
callback.call(result, nil)
when 3
callback.call(result, nil, nil)
when 5
callback.call(result, nil, nil, nil, nil)
end
})
if upload_callback
operation.setUploadProgressBlock(upload_callback)
end
self.enqueueHTTPRequestOperation(operation)
@multipart = nil
@operation = operation
else
request = self.requestWithMethod(method.upcase, path:path, parameters:parameters)
@operation = self.HTTPRequestOperationWithRequest(request, success: lambda {|operation, responseObject|
result = AFMotion::HTTPResult.new(operation, responseObject, nil)
callback.call(result)
}, failure: lambda {|operation, error|
result = AFMotion::HTTPResult.new(operation, nil, error)
callback.call(result)
})
self.enqueueHTTPRequestOperation(@operation)
@operation
end
end
end
def multipart
@multipart = true
self
end
# options can be
# - {username: ___, password: ____}
# or
# - {token: ___ }
def authorization=(options = {})
if options.nil?
clearAuthorizationHeader
elsif options[:username] && options[:password]
setAuthorizationHeaderWithUsername(options[:username], password: options[:password])
elsif options[:token]
setAuthorizationHeaderWithToken(options[:token])
else
raise "Not a valid authorization hash: #{options.inspect}"
end
end
private
# To force RubyMotion pre-compilation of these methods
def dummy
self.getPath("", parameters: nil, success: nil, failure: nil)
self.postPath("", parameters: nil, success: nil, failure: nil)
self.putPath("", parameters: nil, success: nil, failure: nil)
self.deletePath("", parameters: nil, success: nil, failure: nil)
self.patchPath("", parameters: nil, success: nil, failure: nil)
end
end
Split up the operation creation logic into different methods
module AFMotion
class ClientDSL
def initialize(client)
@client = client
end
def header(header, value)
@client.setDefaultHeader(header, value: value)
end
def authorization(options = {})
@client.authorization = options
end
def operation(operation)
klass = operation
if operation.is_a?(Symbol) or operation.is_a?(String)
klass = case operation.to_s
when "json"
AFJSONRequestOperation
when "plist"
AFPropertyListRequestOperation
when "xml"
AFXMLRequestOperation
when "http"
AFHTTPRequestOperation
else
raise "Not a valid operation: #{operation.inspect}"
end
end
@client.registerHTTPOperationClass(klass)
end
def parameter_encoding(encoding)
enc = encoding
if encoding.is_a?(Symbol) or encoding.is_a?(String)
enc = case encoding.to_s
when "json"
AFJSONParameterEncoding
when "plist"
AFPropertyListParameterEncoding
when "form"
AFFormURLParameterEncoding
else
p "Not a valid parameter encoding: #{encoding.inspect}; using AFFormURLParameterEncoding"
AFFormURLParameterEncoding
end
end
@client.parameterEncoding = enc
end
end
end
module AFMotion
class Client
class << self
attr_accessor :shared
# Returns an instance of AFHTTPClient
def build(base_url, &block)
client = AFHTTPClient.clientWithBaseURL(base_url.to_url)
if block
dsl = AFMotion::ClientDSL.new(client)
dsl.instance_eval(&block)
end
client
end
# Sets AFMotion::Client.shared as the built client
# TODO: Make sure this only happens once (dispatch_once not available)
def build_shared(base_url, &block)
self.shared = self.build(base_url, &block)
end
end
end
end
class AFHTTPClient
AFMotion::HTTP_METHODS.each do |method|
# EX client.get('my/resource.json')
define_method "#{method}", -> (path, parameters = {}, &callback) do
if @multipart
@operation = create_multipart_operation(method, path, parameters, &callback)
self.enqueueHTTPRequestOperation(@operation)
@multipart = nil
else
@operation = create_operation(method, path, parameters, &callback)
self.enqueueHTTPRequestOperation(@operation)
end
@operation
end
end
def create_multipart_operation(method, path, parameters = {}, &callback)
multipart_callback = callback.arity == 1 ? nil : lambda { |formData|
callback.call(nil, formData)
}
upload_callback = callback.arity > 2 ? lambda { |bytes_written_now, total_bytes_written, total_bytes_expect|
case callback.arity
when 3
callback.call(nil, nil, total_bytes_written.to_f / total_bytes_expect.to_f)
when 5
callback.call(nil, nil, bytes_written_now, total_bytes_written, total_bytes_expect)
end
} : nil
request = self.multipartFormRequestWithMethod(method, path: path,
parameters: parameters,constructingBodyWithBlock: multipart_callback)
operation = self.HTTPRequestOperationWithRequest(request,
success: lambda {|operation, responseObject|
result = AFMotion::HTTPResult.new(operation, responseObject, nil)
case callback.arity
when 1
callback.call(result)
when 2
callback.call(result, nil)
when 3
callback.call(result, nil, nil)
when 5
callback.call(result, nil, nil, nil, nil)
end
}, failure: lambda {|operation, error|
result = AFMotion::HTTPResult.new(operation, nil, error)
case callback.arity
when 1
callback.call(result)
when 2
callback.call(result, nil)
when 3
callback.call(result, nil, nil)
when 5
callback.call(result, nil, nil, nil, nil)
end
})
if upload_callback
operation.setUploadProgressBlock(upload_callback)
end
operation
end
def create_operation(method, path, parameters = {}, &callback)
request = self.requestWithMethod(method.upcase, path:path, parameters:parameters)
self.HTTPRequestOperationWithRequest(request, success: lambda {|operation, responseObject|
result = AFMotion::HTTPResult.new(operation, responseObject, nil)
callback.call(result)
}, failure: lambda {|operation, error|
result = AFMotion::HTTPResult.new(operation, nil, error)
callback.call(result)
})
end
def multipart
@multipart = true
self
end
# options can be
# - {username: ___, password: ____}
# or
# - {token: ___ }
def authorization=(options = {})
if options.nil?
clearAuthorizationHeader
elsif options[:username] && options[:password]
setAuthorizationHeaderWithUsername(options[:username], password: options[:password])
elsif options[:token]
setAuthorizationHeaderWithToken(options[:token])
else
raise "Not a valid authorization hash: #{options.inspect}"
end
end
private
# To force RubyMotion pre-compilation of these methods
def dummy
self.getPath("", parameters: nil, success: nil, failure: nil)
self.postPath("", parameters: nil, success: nil, failure: nil)
self.putPath("", parameters: nil, success: nil, failure: nil)
self.deletePath("", parameters: nil, success: nil, failure: nil)
self.patchPath("", parameters: nil, success: nil, failure: nil)
end
end |
module AirbrakeAPI
VERSION = '4.1.3'
end
version bump to 4.2.0 [ci skip]
module AirbrakeAPI
VERSION = '4.2.0'
end
|
require 'fileutils'
require 'git'
module Appserver
class Repository < Struct.new(:server, :dir)
class InvalidRepositoryError < RuntimeError; end
include Utils
def initialize (server, dir, config)
self.server, self.dir = server, dir.chomp('/')
raise InvalidRepositoryError unless valid?
end
def name
File.basename(dir, '.git')
end
def app
# The app for this repository (same name)
server.app(name)
end
def valid?
File.directory?(File.join(dir, 'hooks')) && File.directory?(File.join(dir, 'refs'))
end
def post_receive_hook
File.join(dir, 'hooks', 'post-receive')
end
def install_hook
deploy_cmd = "#{File.expand_path($0)} -d #{server.dir} deploy #{dir}"
if !File.exist?(post_receive_hook) || !File.executable?(post_receive_hook)
puts "Installing git post-receive hook to repository #{dir}..."
safe_replace_file(post_receive_hook) do |f|
f.puts '#!/bin/sh'
f.puts deploy_cmd
f.chown File.stat(dir).uid, File.stat(dir).gid
f.chmod 0755
end
elsif !File.readlines(post_receive_hook).any? { |line| line =~ /^#{Regexp.escape(deploy_cmd)}/ }
puts "Couldn't install post-receive hook. Foreign hook script already present in repository #{dir}!"
else
#puts "Hook already installed in repository #{dir}"
end
end
def deploy
# Choose a temporary build directory on the same filesystem so that it
# can be easily renamed/moved to be the real application directory later
build_dir, old_dir = "#{app.dir}.new", "#{app.dir}.old"
begin
# Check out the current code
checkout(build_dir)
# TODO: more deploy setup (build gem bundle, write database config, ...)
# Replace the current application directory with the newly built one
FileUtils.rm_rf old_dir
FileUtils.mv app.dir, old_dir if Dir.exist?(app.dir)
FileUtils.mv build_dir, app.dir
# TODO: restart instances (needs root, use monit?)
# TODO: remove old_dir *after* restart succeeded, maybe revert to old_dir on failure
ensure
# If anything broke and the build directory still exists, remove it
FileUtils.rm_rf build_dir
# If anything broke and the app directory doesn't exist anymore, put the old directory in place
FileUtils.mv old_dir, app.dir if !Dir.exist?(app.dir) && Dir.exist?(old_dir)
end
end
protected
def expand_path (path)
File.expand_path(path, dir)
end
def checkout (path, branch = 'master')
# There seem to be two ways to "export" the tip of a branch from a repository
# 1. clone the repository, check out the branch and remove the .git directory afterwards
#system("git clone --depth 1 --branch master #{dir} #{path} && rm -rf #{path}/.git")
# 2. do a hard reset while pointing GIT_DIR to the repository and GIT_WORK_TREE to an empty dir
#system("mkdir #{path} && git --git-dir=#{dir} --work-tree=#{path} reset --hard #{branch}")
# We use the Git.export from the git gem here, which uses the first
# method (and handles errors more nicely than a uing system())
Git.export(dir, path, :branch => branch)
end
end
end
Make sure, a repository name can be build (which wouldn't be the case if a non-bare repository is given)
require 'fileutils'
require 'git'
module Appserver
class Repository < Struct.new(:server, :dir)
class InvalidRepositoryError < RuntimeError; end
include Utils
def initialize (server, dir, config)
self.server, self.dir = server, dir.chomp('/')
raise InvalidRepositoryError unless valid?
end
def name
File.basename(dir, '.git')
end
def app
# The app for this repository (same name)
server.app(name)
end
def valid?
name && name != '' &&
File.directory?(File.join(dir, 'hooks')) &&
File.directory?(File.join(dir, 'refs'))
end
def post_receive_hook
File.join(dir, 'hooks', 'post-receive')
end
def install_hook
deploy_cmd = "#{File.expand_path($0)} -d #{server.dir} deploy #{dir}"
if !File.exist?(post_receive_hook) || !File.executable?(post_receive_hook)
puts "Installing git post-receive hook to repository #{dir}..."
safe_replace_file(post_receive_hook) do |f|
f.puts '#!/bin/sh'
f.puts deploy_cmd
f.chown File.stat(dir).uid, File.stat(dir).gid
f.chmod 0755
end
elsif !File.readlines(post_receive_hook).any? { |line| line =~ /^#{Regexp.escape(deploy_cmd)}/ }
puts "Couldn't install post-receive hook. Foreign hook script already present in repository #{dir}!"
else
#puts "Hook already installed in repository #{dir}"
end
end
def deploy
# Choose a temporary build directory on the same filesystem so that it
# can be easily renamed/moved to be the real application directory later
build_dir, old_dir = "#{app.dir}.new", "#{app.dir}.old"
begin
# Check out the current code
checkout(build_dir)
# TODO: more deploy setup (build gem bundle, write database config, ...)
# Replace the current application directory with the newly built one
FileUtils.rm_rf old_dir
FileUtils.mv app.dir, old_dir if Dir.exist?(app.dir)
FileUtils.mv build_dir, app.dir
# TODO: restart instances (needs root, use monit?)
# TODO: remove old_dir *after* restart succeeded, maybe revert to old_dir on failure
ensure
# If anything broke and the build directory still exists, remove it
FileUtils.rm_rf build_dir
# If anything broke and the app directory doesn't exist anymore, put the old directory in place
FileUtils.mv old_dir, app.dir if !Dir.exist?(app.dir) && Dir.exist?(old_dir)
end
end
protected
def expand_path (path)
File.expand_path(path, dir)
end
def checkout (path, branch = 'master')
# There seem to be two ways to "export" the tip of a branch from a repository
# 1. clone the repository, check out the branch and remove the .git directory afterwards
#system("git clone --depth 1 --branch master #{dir} #{path} && rm -rf #{path}/.git")
# 2. do a hard reset while pointing GIT_DIR to the repository and GIT_WORK_TREE to an empty dir
#system("mkdir #{path} && git --git-dir=#{dir} --work-tree=#{path} reset --hard #{branch}")
# We use the Git.export from the git gem here, which uses the first
# method (and handles errors more nicely than a uing system())
Git.export(dir, path, :branch => branch)
end
end
end
|
module Appsignal
module Probes
class MriProbe
include Helpers
# @api private
def self.dependencies_present?
defined?(::RubyVM) && ::RubyVM.respond_to?(:stat)
end
def self.garbage_collection_profiler
@garbage_collection_profiler ||= Appsignal::GarbageCollectionProfiler.new
end
def initialize(appsignal = Appsignal)
Appsignal.logger.debug("Initializing VM probe")
@appsignal = appsignal
end
# @api private
def call
stat = RubyVM.stat
@appsignal.set_gauge(
"ruby_vm",
stat[:class_serial],
:metric => :class_serial
)
@appsignal.set_gauge(
"ruby_vm",
stat[:constant_cache] ? stat[:constant_cache].values.sum : stat[:global_constant_state],
:metric => :global_constant_state
)
@appsignal.set_gauge("thread_count", Thread.list.size)
@appsignal.set_gauge("gc_total_time", MriProbe.garbage_collection_profiler.total_time)
gc_stats = GC.stat
allocated_objects =
gauge_delta(
:allocated_objects,
gc_stats[:total_allocated_objects] || gc_stats[:total_allocated_object]
)
if allocated_objects
@appsignal.set_gauge("allocated_objects", allocated_objects)
end
gc_count = gauge_delta(:gc_count, GC.count)
if gc_count
@appsignal.set_gauge("gc_count", gc_count, :metric => :gc_count)
end
minor_gc_count = gauge_delta(:minor_gc_count, gc_stats[:minor_gc_count])
if minor_gc_count
@appsignal.set_gauge("gc_count", minor_gc_count, :metric => :minor_gc_count)
end
major_gc_count = gauge_delta(:major_gc_count, gc_stats[:major_gc_count])
if major_gc_count
@appsignal.set_gauge("gc_count", major_gc_count, :metric => :major_gc_count)
end
@appsignal.set_gauge("heap_slots", gc_stats[:heap_live_slots] || gc_stats[:heap_live_slot], :metric => :heap_live)
@appsignal.set_gauge("heap_slots", gc_stats[:heap_free_slots] || gc_stats[:heap_free_slot], :metric => :heap_free)
end
end
end
end
Refactor gauge calls to helper method in MRI probe
Call a private method that calls `Appsignal.set_guage` so that if we
want to change some defaults in metrics reported by this probe we only
have to do it in one place. Like adding a hostname tag.
module Appsignal
module Probes
class MriProbe
include Helpers
# @api private
def self.dependencies_present?
defined?(::RubyVM) && ::RubyVM.respond_to?(:stat)
end
def self.garbage_collection_profiler
@garbage_collection_profiler ||= Appsignal::GarbageCollectionProfiler.new
end
def initialize(appsignal = Appsignal)
Appsignal.logger.debug("Initializing VM probe")
@appsignal = appsignal
end
# @api private
def call
stat = RubyVM.stat
set_gauge(
"ruby_vm",
stat[:class_serial],
:metric => :class_serial
)
set_gauge(
"ruby_vm",
stat[:constant_cache] ? stat[:constant_cache].values.sum : stat[:global_constant_state],
:metric => :global_constant_state
)
set_gauge("thread_count", Thread.list.size)
set_gauge("gc_total_time", MriProbe.garbage_collection_profiler.total_time)
gc_stats = GC.stat
allocated_objects =
gauge_delta(
:allocated_objects,
gc_stats[:total_allocated_objects] || gc_stats[:total_allocated_object]
)
set_gauge("allocated_objects", allocated_objects) if allocated_objects
gc_count = gauge_delta(:gc_count, GC.count)
set_gauge("gc_count", gc_count, :metric => :gc_count) if gc_count
minor_gc_count = gauge_delta(:minor_gc_count, gc_stats[:minor_gc_count])
if minor_gc_count
set_gauge("gc_count", minor_gc_count, :metric => :minor_gc_count)
end
major_gc_count = gauge_delta(:major_gc_count, gc_stats[:major_gc_count])
if major_gc_count
set_gauge("gc_count", major_gc_count, :metric => :major_gc_count)
end
set_gauge("heap_slots", gc_stats[:heap_live_slots] || gc_stats[:heap_live_slot], :metric => :heap_live)
set_gauge("heap_slots", gc_stats[:heap_free_slots] || gc_stats[:heap_free_slot], :metric => :heap_free)
end
private
def set_gauge(metric, value, tags = {})
@appsignal.set_gauge(metric, value, tags)
end
end
end
end
|
require 'zlib'
require 'archive/support/io-like'
module Zlib # :nodoc:
if ! const_defined?(:MAX_WBITS) then
MAX_WBITS = Deflate::MAX_WBITS
end
# Zlib::ZWriter is a writable, IO-like object (includes IO::Like) which wraps
# other writable, IO-like objects in order to facilitate writing data to those
# objects using the deflate method of compression.
class ZWriter
include IO::Like
# Creates a new instance of this class with the given arguments using #new
# and then passes the instance to the given block. The #close method is
# guaranteed to be called after the block completes.
#
# Equivalent to #new if no block is given.
def self.open(io, level = Zlib::DEFAULT_COMPRESSION, window_bits = nil, mem_level = nil, strategy = nil)
zw = new(io, level, window_bits, mem_level, strategy)
return zw unless block_given?
begin
yield(zw)
ensure
zw.close unless zw.closed?
end
end
# Creates a new instance of this class. _io_ must respond to the _write_
# method as an instance of IO would. _level_, _window_bits_, _mem_level_,
# and _strategy_ are all passed directly to Zlib::Deflate.new(). See the
# documentation of that method for their meanings.
#
# <b>NOTE:</b> Due to limitations in Ruby's finalization capabilities, the
# #close method is _not_ automatically called when this object is garbage
# collected. Make sure to call #close when finished with this object.
def initialize(io, level = Zlib::DEFAULT_COMPRESSION, window_bits = nil, mem_level = nil, strategy = nil)
@delegate = io
@deflater = Zlib::Deflate.new(level, window_bits, mem_level, strategy)
@deflate_buffer = ''
@crc32 = 0
end
# The CRC32 checksum of the uncompressed data written using this object.
#
# <b>NOTE:</b> Anything still in the internal write buffer has not been
# processed, so calling #flush prior to examining this attribute may be
# necessary for an accurate computation.
attr_reader :crc32
protected
# The delegate object to which compressed data is written.
attr_reader :delegate
public
# Closes the writer by finishing the compressed data and flushing it to the
# delegate.
#
# Raises IOError if called more than once.
def close
super()
delegate.write(@deflater.finish)
nil
end
# Returns the number of bytes of compressed data produced so far.
#
# <b>NOTE:</b> Anything still in the internal write buffer has not been
# processed, so calling #flush prior to calling this method may be necessary
# for an accurate count.
def compressed_size
@deflater.total_out
end
# Returns the number of bytes sent to be compressed so far.
#
# <b>NOTE:</b> Anything still in the internal write buffer has not been
# processed, so calling #flush prior to calling this method may be necessary
# for an accurate count.
def uncompressed_size
@deflater.total_in
end
private
def unbuffered_write(string)
until @deflate_buffer.empty? do
@deflate_buffer.slice!(0, delegate.write(@deflate_buffer))
end
@deflate_buffer = @deflater.deflate(string)
begin
@deflate_buffer.slice!(0, delegate.write(@deflate_buffer))
rescue Errno::EINTR, Errno::EAGAIN
# Ignore this because everything is in the deflate buffer and will be
# attempted again the next time this method is called.
end
@crc32 = Zlib.crc32(string, @crc32)
string.length
end
end
# Zlib::ZReader is a readable, IO-like object (includes IO::Like) which wraps
# other readable, IO-like objects in order to facilitate reading data from
# those objects using the inflate method of decompression.
class ZReader
include IO::Like
# The number of bytes to read from the delegate object each time the
# internal read buffer is filled.
DEFAULT_DELEGATE_READ_SIZE = 4096
# Creates a new instance of this class with the given arguments using #new
# and then passes the instance to the given block. The #close method is
# guaranteed to be called after the block completes.
#
# Equivalent to #new if no block is given.
def self.open(io, window_bits = nil)
zr = new(io, window_bits)
return zr unless block_given?
begin
yield(zr)
ensure
zr.close unless zr.closed?
end
end
# Creates a new instance of this class. _io_ must respond to the _read_
# method as an IO instance would. _window_bits_ is passed directly to
# Zlib::Inflate.new(). See the documentation of that method for its
# meaning.
#
# This class has extremely limited seek capabilities. It is possible to
# seek with an offset of <tt>0</tt> and a whence of <tt>IO::SEEK_CUR</tt>.
# As a result, the _pos_ and _tell_ methods also work as expected.
#
# Due to certain optimizations within IO::Like#seek and if there is data in
# the read buffer, the _seek_ method can be used to seek forward from the
# current stream position up to the end of the buffer. Unless it is known
# definitively how much data is in the buffer, it is best to avoid relying
# on this behavior.
#
# If _io_ also responds to _rewind_, then the _rewind_ method of this class
# can be used to reset the whole stream back to the beginning. Using _seek_
# of this class to seek directly to offset <tt>0</tt> using
# <tt>IO::SEEK_SET</tt> for whence will also work in this case.
#
# Any other seeking attempts, will raise Errno::EINVAL exceptions.
#
# <b>NOTE:</b> Due to limitations in Ruby's finalization capabilities, the
# #close method is _not_ automatically called when this object is garbage
# collected. Make sure to call #close when finished with this object.
def initialize(io, window_bits = nil)
@delegate = io
@delegate_read_size = DEFAULT_DELEGATE_READ_SIZE
@window_bits = window_bits
@inflater = Zlib::Inflate.new(@window_bits)
@decompress_buffer = ''
@crc32 = 0
end
# The CRC32 checksum of the uncompressed data read using this object.
#
# <b>NOTE:</b> The contents of the internal read buffer are immediately
# processed any time the buffer is filled, so this count is only accurate if
# all data has been read out of this object.
attr_reader :crc32
# The number of bytes to read from the delegate object each time the
# internal read buffer is filled.
attr_accessor :delegate_read_size
protected
# The delegate object from which compressed data is read.
attr_reader :delegate
public
# Closes the reader.
#
# Raises IOError if called more than once.
def close
super()
@inflater.close
nil
end
# Returns the number of bytes sent to be decompressed so far.
def compressed_size
@inflater.total_in
end
# Returns the number of bytes of decompressed data produced so far.
def uncompressed_size
@inflater.total_out
end
private
def unbuffered_read(length)
if @decompress_buffer.empty? && @inflater.finished? then
raise EOFError, 'end of file reached'
end
begin
while @decompress_buffer.length < length && ! @inflater.finished? do
@decompress_buffer <<
@inflater.inflate(delegate.read(@delegate_read_size))
end
rescue Errno::EINTR, Errno::EAGAIN
raise if @decompress_buffer.empty?
end
buffer = @decompress_buffer.slice!(0, length)
@crc32 = Zlib.crc32(buffer, @crc32)
buffer
end
# Allows resetting this object and the delegate object back to the beginning
# of the stream or reporting the current position in the stream.
#
# Raises Errno::EINVAL unless _offset_ is <tt>0</tt> and _whence_ is either
# IO::SEEK_SET or IO::SEEK_CUR. Raises Errno::EINVAL if _whence_ is
# IO::SEEK_SEK and the delegate object does not respond to the _rewind_
# method.
def unbuffered_seek(offset, whence = IO::SEEK_SET)
unless offset == 0 &&
((whence == IO::SEEK_SET && delegate.respond_to?(:rewind)) ||
whence == IO::SEEK_CUR) then
raise Errno::EINVAL, 'Invalid argument'
end
case whence
when IO::SEEK_SET
delegate.rewind
@inflater.close
@inflater = Zlib::Inflate.new(@window_bits)
@crc32 = 0
@decompress_buffer = ''
0
when IO::SEEK_CUR
@inflater.total_out - @decompress_buffer.length
end
end
end
end
Fixed a potential data loss bug in Zlib::ZWriter
require 'zlib'
require 'archive/support/io-like'
module Zlib # :nodoc:
if ! const_defined?(:MAX_WBITS) then
MAX_WBITS = Deflate::MAX_WBITS
end
# Zlib::ZWriter is a writable, IO-like object (includes IO::Like) which wraps
# other writable, IO-like objects in order to facilitate writing data to those
# objects using the deflate method of compression.
class ZWriter
include IO::Like
# Creates a new instance of this class with the given arguments using #new
# and then passes the instance to the given block. The #close method is
# guaranteed to be called after the block completes.
#
# Equivalent to #new if no block is given.
def self.open(io, level = Zlib::DEFAULT_COMPRESSION, window_bits = nil, mem_level = nil, strategy = nil)
zw = new(io, level, window_bits, mem_level, strategy)
return zw unless block_given?
begin
yield(zw)
ensure
zw.close unless zw.closed?
end
end
# Creates a new instance of this class. _io_ must respond to the _write_
# method as an instance of IO would. _level_, _window_bits_, _mem_level_,
# and _strategy_ are all passed directly to Zlib::Deflate.new(). See the
# documentation of that method for their meanings.
#
# <b>NOTE:</b> Due to limitations in Ruby's finalization capabilities, the
# #close method is _not_ automatically called when this object is garbage
# collected. Make sure to call #close when finished with this object.
def initialize(io, level = Zlib::DEFAULT_COMPRESSION, window_bits = nil, mem_level = nil, strategy = nil)
@delegate = io
@deflater = Zlib::Deflate.new(level, window_bits, mem_level, strategy)
@deflate_buffer = ''
@crc32 = 0
end
# The CRC32 checksum of the uncompressed data written using this object.
#
# <b>NOTE:</b> Anything still in the internal write buffer has not been
# processed, so calling #flush prior to examining this attribute may be
# necessary for an accurate computation.
attr_reader :crc32
protected
# The delegate object to which compressed data is written.
attr_reader :delegate
public
# Closes the writer by finishing the compressed data and flushing it to the
# delegate.
#
# Raises IOError if called more than once.
def close
flush()
@deflate_buffer << @deflater.finish unless @deflater.finished?
until @deflate_buffer.empty? do
@deflate_buffer.slice!(0, delegate.write(@deflate_buffer))
end
@deflater.close
super()
nil
end
# Returns the number of bytes of compressed data produced so far.
#
# <b>NOTE:</b> Anything still in the internal write buffer has not been
# processed, so calling #flush prior to calling this method may be necessary
# for an accurate count.
def compressed_size
@deflater.total_out
end
# Returns the number of bytes sent to be compressed so far.
#
# <b>NOTE:</b> Anything still in the internal write buffer has not been
# processed, so calling #flush prior to calling this method may be necessary
# for an accurate count.
def uncompressed_size
@deflater.total_in
end
private
def unbuffered_write(string)
# First try to write out the contents of the deflate buffer because if
# that raises a failure we can let that pass up the call stack without
# having polluted the deflater instance.
until @deflate_buffer.empty? do
@deflate_buffer.slice!(0, delegate.write(@deflate_buffer))
end
# At this point we can deflate the given string into a new buffer and
# behave as if it was written.
@deflate_buffer = @deflater.deflate(string)
@crc32 = Zlib.crc32(string, @crc32)
string.length
end
end
# Zlib::ZReader is a readable, IO-like object (includes IO::Like) which wraps
# other readable, IO-like objects in order to facilitate reading data from
# those objects using the inflate method of decompression.
class ZReader
include IO::Like
# The number of bytes to read from the delegate object each time the
# internal read buffer is filled.
DEFAULT_DELEGATE_READ_SIZE = 4096
# Creates a new instance of this class with the given arguments using #new
# and then passes the instance to the given block. The #close method is
# guaranteed to be called after the block completes.
#
# Equivalent to #new if no block is given.
def self.open(io, window_bits = nil)
zr = new(io, window_bits)
return zr unless block_given?
begin
yield(zr)
ensure
zr.close unless zr.closed?
end
end
# Creates a new instance of this class. _io_ must respond to the _read_
# method as an IO instance would. _window_bits_ is passed directly to
# Zlib::Inflate.new(). See the documentation of that method for its
# meaning.
#
# This class has extremely limited seek capabilities. It is possible to
# seek with an offset of <tt>0</tt> and a whence of <tt>IO::SEEK_CUR</tt>.
# As a result, the _pos_ and _tell_ methods also work as expected.
#
# Due to certain optimizations within IO::Like#seek and if there is data in
# the read buffer, the _seek_ method can be used to seek forward from the
# current stream position up to the end of the buffer. Unless it is known
# definitively how much data is in the buffer, it is best to avoid relying
# on this behavior.
#
# If _io_ also responds to _rewind_, then the _rewind_ method of this class
# can be used to reset the whole stream back to the beginning. Using _seek_
# of this class to seek directly to offset <tt>0</tt> using
# <tt>IO::SEEK_SET</tt> for whence will also work in this case.
#
# Any other seeking attempts, will raise Errno::EINVAL exceptions.
#
# <b>NOTE:</b> Due to limitations in Ruby's finalization capabilities, the
# #close method is _not_ automatically called when this object is garbage
# collected. Make sure to call #close when finished with this object.
def initialize(io, window_bits = nil)
@delegate = io
@delegate_read_size = DEFAULT_DELEGATE_READ_SIZE
@window_bits = window_bits
@inflater = Zlib::Inflate.new(@window_bits)
@decompress_buffer = ''
@crc32 = 0
end
# The CRC32 checksum of the uncompressed data read using this object.
#
# <b>NOTE:</b> The contents of the internal read buffer are immediately
# processed any time the buffer is filled, so this count is only accurate if
# all data has been read out of this object.
attr_reader :crc32
# The number of bytes to read from the delegate object each time the
# internal read buffer is filled.
attr_accessor :delegate_read_size
protected
# The delegate object from which compressed data is read.
attr_reader :delegate
public
# Closes the reader.
#
# Raises IOError if called more than once.
def close
super()
@inflater.close
nil
end
# Returns the number of bytes sent to be decompressed so far.
def compressed_size
@inflater.total_in
end
# Returns the number of bytes of decompressed data produced so far.
def uncompressed_size
@inflater.total_out
end
private
def unbuffered_read(length)
if @decompress_buffer.empty? && @inflater.finished? then
raise EOFError, 'end of file reached'
end
begin
while @decompress_buffer.length < length && ! @inflater.finished? do
@decompress_buffer <<
@inflater.inflate(delegate.read(@delegate_read_size))
end
rescue Errno::EINTR, Errno::EAGAIN
raise if @decompress_buffer.empty?
end
buffer = @decompress_buffer.slice!(0, length)
@crc32 = Zlib.crc32(buffer, @crc32)
buffer
end
# Allows resetting this object and the delegate object back to the beginning
# of the stream or reporting the current position in the stream.
#
# Raises Errno::EINVAL unless _offset_ is <tt>0</tt> and _whence_ is either
# IO::SEEK_SET or IO::SEEK_CUR. Raises Errno::EINVAL if _whence_ is
# IO::SEEK_SEK and the delegate object does not respond to the _rewind_
# method.
def unbuffered_seek(offset, whence = IO::SEEK_SET)
unless offset == 0 &&
((whence == IO::SEEK_SET && delegate.respond_to?(:rewind)) ||
whence == IO::SEEK_CUR) then
raise Errno::EINVAL, 'Invalid argument'
end
case whence
when IO::SEEK_SET
delegate.rewind
@inflater.close
@inflater = Zlib::Inflate.new(@window_bits)
@crc32 = 0
@decompress_buffer = ''
0
when IO::SEEK_CUR
@inflater.total_out - @decompress_buffer.length
end
end
end
end
|
ArJdbc.load_java_part :MySQL
require 'bigdecimal'
require 'active_record/connection_adapters/abstract_mysql_adapter'
require 'active_record/connection_adapters/abstract/schema_definitions'
require 'arjdbc/abstract/core'
require 'arjdbc/abstract/connection_management'
require 'arjdbc/abstract/database_statements'
require 'arjdbc/abstract/statement_cache'
require 'arjdbc/abstract/transaction_support'
module ActiveRecord
module ConnectionAdapters
AbstractMysqlAdapter.class_eval do
include ArJdbc::Abstract::Core # to have correct initialize() super
end
# Remove any vestiges of core/Ruby MySQL adapter
remove_const(:Mysql2Adapter) if const_defined?(:Mysql2Adapter)
class Mysql2Adapter < AbstractMysqlAdapter
ADAPTER_NAME = 'Mysql2'.freeze
include Jdbc::ConnectionPoolCallbacks
include ArJdbc::Abstract::ConnectionManagement
include ArJdbc::Abstract::DatabaseStatements
# NOTE: do not include MySQL::DatabaseStatements
include ArJdbc::Abstract::StatementCache
include ArJdbc::Abstract::TransactionSupport
include ArJdbc::MySQL
def initialize(connection, logger, connection_parameters, config)
super
@prepared_statements = false unless config.key?(:prepared_statements)
# configure_connection taken care of at ArJdbc::Abstract::Core
end
def supports_json?
!mariadb? && version >= '5.7.8'
end
def supports_comments?
true
end
def supports_comments_in_create?
true
end
def supports_savepoints?
true
end
def supports_transaction_isolation?
true
end
def supports_set_server_option?
false
end
# HELPER METHODS ===========================================
# Reloading the type map in abstract/statement_cache.rb blows up postgres
def clear_cache!
reload_type_map
super
end
def each_hash(result) # :nodoc:
if block_given?
# FIXME: This is C in mysql2 gem and I just made simplest Ruby
result.each do |row|
new_hash = {}
row.each { |k, v| new_hash[k.to_sym] = v }
yield new_hash
end
else
to_enum(:each_hash, result)
end
end
def error_number(exception)
exception.error_code if exception.is_a?(JDBCError)
end
#--
# QUOTING ==================================================
#+
# FIXME: 5.1 crashes without this. I think this is Arel hitting a fallback path in to_sql.rb.
# So maybe an untested code path in their source. Still means we are doing something wrong to
# even hit it.
def quote(value, comment=nil)
super(value)
end
# NOTE: quote_string(string) provided by ArJdbc::MySQL (native code),
# this piece is also native (mysql2) under MRI: `@connection.escape(string)`
def quoted_date(value)
if supports_datetime_with_precision?
super
else
super.sub(/\.\d{6}\z/, '')
end
end
def _quote(value)
if value.is_a?(Type::Binary::Data)
"x'#{value.hex}'"
else
super
end
end
private :_quote
#--
# CONNECTION MANAGEMENT ====================================
#++
alias :reset! :reconnect!
#
private
# e.g. "5.7.20-0ubuntu0.16.04.1"
def full_version; @full_version ||= @connection.full_version end
def jdbc_connection_class(spec)
::ActiveRecord::ConnectionAdapters::MySQLJdbcConnection
end
def jdbc_column_class
::ActiveRecord::ConnectionAdapters::MySQL::Column
end
# defined in MySQL::DatabaseStatements which is not included
def default_insert_value(column)
Arel.sql("DEFAULT") unless column.auto_increment?
end
# FIXME: optimize insert_fixtures_set by using JDBC Statement.addBatch()/executeBatch()
def combine_multi_statements(total_sql)
total_sql
end
def with_multi_statements
yield
end
def discard_remaining_results
end
end
end
end
[mysql] enable lazy transactions
ArJdbc.load_java_part :MySQL
require 'bigdecimal'
require 'active_record/connection_adapters/abstract_mysql_adapter'
require 'active_record/connection_adapters/abstract/schema_definitions'
require 'arjdbc/abstract/core'
require 'arjdbc/abstract/connection_management'
require 'arjdbc/abstract/database_statements'
require 'arjdbc/abstract/statement_cache'
require 'arjdbc/abstract/transaction_support'
module ActiveRecord
module ConnectionAdapters
AbstractMysqlAdapter.class_eval do
include ArJdbc::Abstract::Core # to have correct initialize() super
end
# Remove any vestiges of core/Ruby MySQL adapter
remove_const(:Mysql2Adapter) if const_defined?(:Mysql2Adapter)
class Mysql2Adapter < AbstractMysqlAdapter
ADAPTER_NAME = 'Mysql2'.freeze
include Jdbc::ConnectionPoolCallbacks
include ArJdbc::Abstract::ConnectionManagement
include ArJdbc::Abstract::DatabaseStatements
# NOTE: do not include MySQL::DatabaseStatements
include ArJdbc::Abstract::StatementCache
include ArJdbc::Abstract::TransactionSupport
include ArJdbc::MySQL
def initialize(connection, logger, connection_parameters, config)
super
@prepared_statements = false unless config.key?(:prepared_statements)
# configure_connection taken care of at ArJdbc::Abstract::Core
end
def supports_json?
!mariadb? && version >= '5.7.8'
end
def supports_comments?
true
end
def supports_comments_in_create?
true
end
def supports_savepoints?
true
end
def supports_lazy_transactions?
true
end
def supports_transaction_isolation?
true
end
def supports_set_server_option?
false
end
# HELPER METHODS ===========================================
# Reloading the type map in abstract/statement_cache.rb blows up postgres
def clear_cache!
reload_type_map
super
end
def each_hash(result) # :nodoc:
if block_given?
# FIXME: This is C in mysql2 gem and I just made simplest Ruby
result.each do |row|
new_hash = {}
row.each { |k, v| new_hash[k.to_sym] = v }
yield new_hash
end
else
to_enum(:each_hash, result)
end
end
def error_number(exception)
exception.error_code if exception.is_a?(JDBCError)
end
#--
# QUOTING ==================================================
#+
# FIXME: 5.1 crashes without this. I think this is Arel hitting a fallback path in to_sql.rb.
# So maybe an untested code path in their source. Still means we are doing something wrong to
# even hit it.
def quote(value, comment=nil)
super(value)
end
# NOTE: quote_string(string) provided by ArJdbc::MySQL (native code),
# this piece is also native (mysql2) under MRI: `@connection.escape(string)`
def quoted_date(value)
if supports_datetime_with_precision?
super
else
super.sub(/\.\d{6}\z/, '')
end
end
def _quote(value)
if value.is_a?(Type::Binary::Data)
"x'#{value.hex}'"
else
super
end
end
private :_quote
#--
# CONNECTION MANAGEMENT ====================================
#++
alias :reset! :reconnect!
#
private
# e.g. "5.7.20-0ubuntu0.16.04.1"
def full_version; @full_version ||= @connection.full_version end
def jdbc_connection_class(spec)
::ActiveRecord::ConnectionAdapters::MySQLJdbcConnection
end
def jdbc_column_class
::ActiveRecord::ConnectionAdapters::MySQL::Column
end
# defined in MySQL::DatabaseStatements which is not included
def default_insert_value(column)
Arel.sql("DEFAULT") unless column.auto_increment?
end
# FIXME: optimize insert_fixtures_set by using JDBC Statement.addBatch()/executeBatch()
def combine_multi_statements(total_sql)
total_sql
end
def with_multi_statements
yield
end
def discard_remaining_results
end
end
end
end
|
class SetFileSizeNotNullInProjectFiles < ActiveRecord::Migration
def self.up
#deletes all files with file_size NULL
ProjectFile.where("file_file_size IS NULL").each do |f|
f.destroy
end
change_column_null(:project_files, :file_file_size, false)
end
def self.down
change_column_null(:project_files, :file_file_size, true)
end
end
Fix migration which doesn't work unless you update your code one commit at a time.
Nugi, don't write this type of code in migrations. Stick to SQL commands and simple migration tasks.
class SetFileSizeNotNullInProjectFiles < ActiveRecord::Migration
def self.up
#deletes all files with file_size NULL
execute("DELETE FROM `project_files` WHERE `file_file_size` IS NULL")
change_column_null(:project_files, :file_file_size, false)
end
def self.down
change_column_null(:project_files, :file_file_size, true)
end
end
|
require 'active_record/connection_adapters/abstract/schema_definitions'
module ::ArJdbc
module MySQL
def self.column_selector
[/mysql/i, lambda {|cfg,col| col.extend(::ArJdbc::MySQL::ColumnExtensions)}]
end
def self.extended(adapter)
adapter.configure_connection
end
def configure_connection
execute("SET SQL_AUTO_IS_NULL=0")
end
def self.jdbc_connection_class
::ActiveRecord::ConnectionAdapters::MySQLJdbcConnection
end
module ColumnExtensions
def extract_default(default)
if sql_type =~ /blob/i || type == :text
if default.blank?
return null ? nil : ''
else
raise ArgumentError, "#{type} columns cannot have a default value: #{default.inspect}"
end
elsif missing_default_forged_as_empty_string?(default)
nil
else
super
end
end
def has_default?
return false if sql_type =~ /blob/i || type == :text #mysql forbids defaults on blob and text columns
super
end
def simplified_type(field_type)
case field_type
when /tinyint\(1\)|bit/i then :boolean
when /enum/i then :string
when /year/i then :integer
else
super
end
end
def extract_limit(sql_type)
case sql_type
when /blob|text/i
case sql_type
when /tiny/i
255
when /medium/i
16777215
when /long/i
2147483647 # mysql only allows 2^31-1, not 2^32-1, somewhat inconsistently with the tiny/medium/normal cases
else
nil # we could return 65535 here, but we leave it undecorated by default
end
when /^enum/i; 255
when /^bigint/i; 8
when /^int/i; 4
when /^mediumint/i; 3
when /^smallint/i; 2
when /^tinyint/i; 1
when /^(bool|date|float|int|time)/i
nil
else
super
end
end
# MySQL misreports NOT NULL column default when none is given.
# We can't detect this for columns which may have a legitimate ''
# default (string) but we can for others (integer, datetime, boolean,
# and the rest).
#
# Test whether the column has default '', is not null, and is not
# a type allowing default ''.
def missing_default_forged_as_empty_string?(default)
type != :string && !null && default == ''
end
end
def modify_types(tp)
tp[:primary_key] = "int(11) DEFAULT NULL auto_increment PRIMARY KEY"
tp[:integer] = { :name => 'int', :limit => 4 }
tp[:decimal] = { :name => "decimal" }
tp[:timestamp] = { :name => "datetime" }
tp[:datetime][:limit] = nil
tp
end
def adapter_name #:nodoc:
'MySQL'
end
def self.arel2_visitors(config)
{}.tap {|v| %w(mysql mysql2 jdbcmysql).each {|a| v[a] = ::Arel::Visitors::MySQL } }
end
def case_sensitive_equality_operator
"= BINARY"
end
def case_sensitive_modifier(node)
Arel::Nodes::Bin.new(node)
end
def limited_update_conditions(where_sql, quoted_table_name, quoted_primary_key)
where_sql
end
# QUOTING ==================================================
def quote(value, column = nil)
return value.quoted_id if value.respond_to?(:quoted_id)
if column && column.type == :primary_key
value.to_s
elsif column && String === value && column.type == :binary && column.class.respond_to?(:string_to_binary)
s = column.class.string_to_binary(value).unpack("H*")[0]
"x'#{s}'"
elsif BigDecimal === value
"'#{value.to_s("F")}'"
else
super
end
end
def quote_column_name(name)
"`#{name.to_s.gsub('`', '``')}`"
end
def quoted_true
"1"
end
def quoted_false
"0"
end
def supports_savepoints? #:nodoc:
true
end
def create_savepoint
execute("SAVEPOINT #{current_savepoint_name}")
end
def rollback_to_savepoint
execute("ROLLBACK TO SAVEPOINT #{current_savepoint_name}")
end
def release_savepoint
execute("RELEASE SAVEPOINT #{current_savepoint_name}")
end
def disable_referential_integrity(&block) #:nodoc:
old = select_value("SELECT @@FOREIGN_KEY_CHECKS")
begin
update("SET FOREIGN_KEY_CHECKS = 0")
yield
ensure
update("SET FOREIGN_KEY_CHECKS = #{old}")
end
end
# SCHEMA STATEMENTS ========================================
def structure_dump #:nodoc:
if supports_views?
sql = "SHOW FULL TABLES WHERE Table_type = 'BASE TABLE'"
else
sql = "SHOW TABLES"
end
select_all(sql).inject("") do |structure, table|
table.delete('Table_type')
hash = show_create_table(table.to_a.first.last)
if(table = hash["Create Table"])
structure += table + ";\n\n"
elsif(view = hash["Create View"])
structure += view + ";\n\n"
end
end
end
# based on:
# https://github.com/rails/rails/blob/3-1-stable/activerecord/lib/active_record/connection_adapters/mysql_adapter.rb#L756
# Required for passing rails column caching tests
# Returns a table's primary key and belonging sequence.
def pk_and_sequence_for(table) #:nodoc:
keys = []
result = execute("SHOW INDEX FROM #{quote_table_name(table)} WHERE Key_name = 'PRIMARY'", 'SCHEMA')
result.each do |h|
keys << h["Column_name"]
end
keys.length == 1 ? [keys.first, nil] : nil
end
# based on:
# https://github.com/rails/rails/blob/3-1-stable/activerecord/lib/active_record/connection_adapters/mysql_adapter.rb#L647
# Returns an array of indexes for the given table.
def indexes(table_name, name = nil)#:nodoc:
indexes = []
current_index = nil
result = execute("SHOW KEYS FROM #{quote_table_name(table_name)}", name)
result.each do |row|
key_name = row["Key_name"]
if current_index != key_name
next if key_name == "PRIMARY" # skip the primary key
current_index = key_name
indexes << ::ActiveRecord::ConnectionAdapters::IndexDefinition.new(
row["Table"], key_name, row["Non_unique"] == 0, [], [])
end
indexes.last.columns << row["Column_name"]
indexes.last.lengths << row["Sub_part"]
end
indexes
end
def jdbc_columns(table_name, name = nil)#:nodoc:
sql = "SHOW FIELDS FROM #{quote_table_name(table_name)}"
execute(sql, 'SCHEMA').map do |field|
::ActiveRecord::ConnectionAdapters::MysqlColumn.new(field["Field"], field["Default"], field["Type"], field["Null"] == "YES")
end
end
# Returns just a table's primary key
def primary_key(table)
pk_and_sequence = pk_and_sequence_for(table)
pk_and_sequence && pk_and_sequence.first
end
def recreate_database(name, options = {}) #:nodoc:
drop_database(name)
create_database(name, options)
end
def create_database(name, options = {}) #:nodoc:
if options[:collation]
execute "CREATE DATABASE `#{name}` DEFAULT CHARACTER SET `#{options[:charset] || 'utf8'}` COLLATE `#{options[:collation]}`"
else
execute "CREATE DATABASE `#{name}` DEFAULT CHARACTER SET `#{options[:charset] || 'utf8'}`"
end
end
def drop_database(name) #:nodoc:
execute "DROP DATABASE IF EXISTS `#{name}`"
end
def current_database
select_one("SELECT DATABASE() as db")["db"]
end
def create_table(name, options = {}) #:nodoc:
super(name, {:options => "ENGINE=InnoDB DEFAULT CHARSET=utf8"}.merge(options))
end
def rename_table(name, new_name)
execute "RENAME TABLE #{quote_table_name(name)} TO #{quote_table_name(new_name)}"
end
def add_column(table_name, column_name, type, options = {})
add_column_sql = "ALTER TABLE #{quote_table_name(table_name)} ADD #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
add_column_options!(add_column_sql, options)
add_column_position!(add_column_sql, options)
execute(add_column_sql)
end
def change_column_default(table_name, column_name, default) #:nodoc:
column = column_for(table_name, column_name)
change_column table_name, column_name, column.sql_type, :default => default
end
def change_column_null(table_name, column_name, null, default = nil)
column = column_for(table_name, column_name)
unless null || default.nil?
execute("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote(default)} WHERE #{quote_column_name(column_name)} IS NULL")
end
change_column table_name, column_name, column.sql_type, :null => null
end
def change_column(table_name, column_name, type, options = {}) #:nodoc:
column = column_for(table_name, column_name)
unless options_include_default?(options)
options[:default] = column.default
end
unless options.has_key?(:null)
options[:null] = column.null
end
change_column_sql = "ALTER TABLE #{quote_table_name(table_name)} CHANGE #{quote_column_name(column_name)} #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
add_column_options!(change_column_sql, options)
add_column_position!(change_column_sql, options)
execute(change_column_sql)
end
def rename_column(table_name, column_name, new_column_name) #:nodoc:
options = {}
if column = columns(table_name).find { |c| c.name == column_name.to_s }
options[:default] = column.default
options[:null] = column.null
else
raise ActiveRecord::ActiveRecordError, "No such column: #{table_name}.#{column_name}"
end
current_type = select_one("SHOW COLUMNS FROM #{quote_table_name(table_name)} LIKE '#{column_name}'")["Type"]
rename_column_sql = "ALTER TABLE #{quote_table_name(table_name)} CHANGE #{quote_column_name(column_name)} #{quote_column_name(new_column_name)} #{current_type}"
add_column_options!(rename_column_sql, options)
execute(rename_column_sql)
end
def add_limit_offset!(sql, options) #:nodoc:
limit, offset = options[:limit], options[:offset]
if limit && offset
sql << " LIMIT #{offset.to_i}, #{sanitize_limit(limit)}"
elsif limit
sql << " LIMIT #{sanitize_limit(limit)}"
elsif offset
sql << " OFFSET #{offset.to_i}"
end
sql
end
# Taken from: https://github.com/gfmurphy/rails/blob/3-1-stable/activerecord/lib/active_record/connection_adapters/mysql_adapter.rb#L540
#
# In the simple case, MySQL allows us to place JOINs directly into the UPDATE
# query. However, this does not allow for LIMIT, OFFSET and ORDER. To support
# these, we must use a subquery. However, MySQL is too stupid to create a
# temporary table for this automatically, so we have to give it some prompting
# in the form of a subsubquery. Ugh!
def join_to_update(update, select) #:nodoc:
if select.limit || select.offset || select.orders.any?
subsubselect = select.clone
subsubselect.projections = [update.key]
subselect = Arel::SelectManager.new(select.engine)
subselect.project Arel.sql(update.key.name)
subselect.from subsubselect.as('__active_record_temp')
update.where update.key.in(subselect)
else
update.table select.source
update.wheres = select.constraints
end
end
def show_variable(var)
res = execute("show variables like '#{var}'")
result_row = res.detect {|row| row["Variable_name"] == var }
result_row && result_row["Value"]
end
def charset
show_variable("character_set_database")
end
def collation
show_variable("collation_database")
end
def type_to_sql(type, limit = nil, precision = nil, scale = nil)
return super unless type.to_s == 'integer'
case limit
when 1; 'tinyint'
when 2; 'smallint'
when 3; 'mediumint'
when nil, 4, 11; 'int(11)' # compatibility with MySQL default
when 5..8; 'bigint'
else raise(ActiveRecordError, "No integer type has byte size #{limit}")
end
end
def add_column_position!(sql, options)
if options[:first]
sql << " FIRST"
elsif options[:after]
sql << " AFTER #{quote_column_name(options[:after])}"
end
end
protected
def quoted_columns_for_index(column_names, options = {})
length = options[:length] if options.is_a?(Hash)
case length
when Hash
column_names.map { |name| length[name] ? "#{quote_column_name(name)}(#{length[name]})" : quote_column_name(name) }
when Fixnum
column_names.map { |name| "#{quote_column_name(name)}(#{length})" }
else
column_names.map { |name| quote_column_name(name) }
end
end
def translate_exception(exception, message)
return super unless exception.respond_to?(:errno)
case exception.errno
when 1062
::ActiveRecord::RecordNotUnique.new(message, exception)
when 1452
::ActiveRecord::InvalidForeignKey.new(message, exception)
else
super
end
end
private
def column_for(table_name, column_name)
unless column = columns(table_name).find { |c| c.name == column_name.to_s }
raise "No such column: #{table_name}.#{column_name}"
end
column
end
def show_create_table(table)
select_one("SHOW CREATE TABLE #{quote_table_name(table)}")
end
def supports_views?
false
end
end
end
module ActiveRecord
module ConnectionAdapters
# Remove any vestiges of core/Ruby MySQL adapter
remove_const(:MysqlColumn) if const_defined?(:MysqlColumn)
remove_const(:MysqlAdapter) if const_defined?(:MysqlAdapter)
class MysqlColumn < JdbcColumn
include ArJdbc::MySQL::ColumnExtensions
def initialize(name, *args)
if Hash === name
super
else
super(nil, name, *args)
end
end
def call_discovered_column_callbacks(*)
end
end
class MysqlAdapter < JdbcAdapter
include ArJdbc::MySQL
def initialize(*args)
super
configure_connection
end
def jdbc_connection_class(spec)
::ArJdbc::MySQL.jdbc_connection_class
end
def jdbc_column_class
ActiveRecord::ConnectionAdapters::MysqlColumn
end
alias_chained_method :columns, :query_cache, :jdbc_columns
protected
def exec_insert(sql, name, binds)
binds = binds.dup
# Pretend to support bind parameters
unless binds.empty?
sql = sql.gsub('?') { quote(*binds.shift.reverse) }
end
execute sql, name
end
alias :exec_update :exec_insert
alias :exec_delete :exec_insert
end
end
end
module Mysql # :nodoc:
remove_const(:Error) if const_defined?(:Error)
class Error < ::ActiveRecord::JDBCError
end
def self.client_version
50400 # faked out for AR tests
end
end
Added support for Rails 3.2 explain feature per issue #159. I
lifted this code from the mysql2 gem and modified it to work
in the JDBC gem. I've done testing under JRuby 1.6.7.2 with
MySQL 5.1 and 5.5 with no issues.
Github issue URL:
https://github.com/jruby/activerecord-jdbc-adapter/issues/159
require 'active_record/connection_adapters/abstract/schema_definitions'
module ::ArJdbc
module MySQL
def self.column_selector
[/mysql/i, lambda {|cfg,col| col.extend(::ArJdbc::MySQL::ColumnExtensions)}]
end
def self.extended(adapter)
adapter.configure_connection
end
def configure_connection
execute("SET SQL_AUTO_IS_NULL=0")
end
def self.jdbc_connection_class
::ActiveRecord::ConnectionAdapters::MySQLJdbcConnection
end
module ColumnExtensions
def extract_default(default)
if sql_type =~ /blob/i || type == :text
if default.blank?
return null ? nil : ''
else
raise ArgumentError, "#{type} columns cannot have a default value: #{default.inspect}"
end
elsif missing_default_forged_as_empty_string?(default)
nil
else
super
end
end
def has_default?
return false if sql_type =~ /blob/i || type == :text #mysql forbids defaults on blob and text columns
super
end
def simplified_type(field_type)
case field_type
when /tinyint\(1\)|bit/i then :boolean
when /enum/i then :string
when /year/i then :integer
else
super
end
end
def extract_limit(sql_type)
case sql_type
when /blob|text/i
case sql_type
when /tiny/i
255
when /medium/i
16777215
when /long/i
2147483647 # mysql only allows 2^31-1, not 2^32-1, somewhat inconsistently with the tiny/medium/normal cases
else
nil # we could return 65535 here, but we leave it undecorated by default
end
when /^enum/i; 255
when /^bigint/i; 8
when /^int/i; 4
when /^mediumint/i; 3
when /^smallint/i; 2
when /^tinyint/i; 1
when /^(bool|date|float|int|time)/i
nil
else
super
end
end
# MySQL misreports NOT NULL column default when none is given.
# We can't detect this for columns which may have a legitimate ''
# default (string) but we can for others (integer, datetime, boolean,
# and the rest).
#
# Test whether the column has default '', is not null, and is not
# a type allowing default ''.
def missing_default_forged_as_empty_string?(default)
type != :string && !null && default == ''
end
end
def modify_types(tp)
tp[:primary_key] = "int(11) DEFAULT NULL auto_increment PRIMARY KEY"
tp[:integer] = { :name => 'int', :limit => 4 }
tp[:decimal] = { :name => "decimal" }
tp[:timestamp] = { :name => "datetime" }
tp[:datetime][:limit] = nil
tp
end
def adapter_name #:nodoc:
'MySQL'
end
def self.arel2_visitors(config)
{}.tap {|v| %w(mysql mysql2 jdbcmysql).each {|a| v[a] = ::Arel::Visitors::MySQL } }
end
def case_sensitive_equality_operator
"= BINARY"
end
def case_sensitive_modifier(node)
Arel::Nodes::Bin.new(node)
end
def limited_update_conditions(where_sql, quoted_table_name, quoted_primary_key)
where_sql
end
# QUOTING ==================================================
def quote(value, column = nil)
return value.quoted_id if value.respond_to?(:quoted_id)
if column && column.type == :primary_key
value.to_s
elsif column && String === value && column.type == :binary && column.class.respond_to?(:string_to_binary)
s = column.class.string_to_binary(value).unpack("H*")[0]
"x'#{s}'"
elsif BigDecimal === value
"'#{value.to_s("F")}'"
else
super
end
end
def quote_column_name(name)
"`#{name.to_s.gsub('`', '``')}`"
end
def quoted_true
"1"
end
def quoted_false
"0"
end
def supports_savepoints? #:nodoc:
true
end
def create_savepoint
execute("SAVEPOINT #{current_savepoint_name}")
end
def rollback_to_savepoint
execute("ROLLBACK TO SAVEPOINT #{current_savepoint_name}")
end
def release_savepoint
execute("RELEASE SAVEPOINT #{current_savepoint_name}")
end
def disable_referential_integrity(&block) #:nodoc:
old = select_value("SELECT @@FOREIGN_KEY_CHECKS")
begin
update("SET FOREIGN_KEY_CHECKS = 0")
yield
ensure
update("SET FOREIGN_KEY_CHECKS = #{old}")
end
end
# SCHEMA STATEMENTS ========================================
def structure_dump #:nodoc:
if supports_views?
sql = "SHOW FULL TABLES WHERE Table_type = 'BASE TABLE'"
else
sql = "SHOW TABLES"
end
select_all(sql).inject("") do |structure, table|
table.delete('Table_type')
hash = show_create_table(table.to_a.first.last)
if(table = hash["Create Table"])
structure += table + ";\n\n"
elsif(view = hash["Create View"])
structure += view + ";\n\n"
end
end
end
# based on:
# https://github.com/rails/rails/blob/3-1-stable/activerecord/lib/active_record/connection_adapters/mysql_adapter.rb#L756
# Required for passing rails column caching tests
# Returns a table's primary key and belonging sequence.
def pk_and_sequence_for(table) #:nodoc:
keys = []
result = execute("SHOW INDEX FROM #{quote_table_name(table)} WHERE Key_name = 'PRIMARY'", 'SCHEMA')
result.each do |h|
keys << h["Column_name"]
end
keys.length == 1 ? [keys.first, nil] : nil
end
# based on:
# https://github.com/rails/rails/blob/3-1-stable/activerecord/lib/active_record/connection_adapters/mysql_adapter.rb#L647
# Returns an array of indexes for the given table.
def indexes(table_name, name = nil)#:nodoc:
indexes = []
current_index = nil
result = execute("SHOW KEYS FROM #{quote_table_name(table_name)}", name)
result.each do |row|
key_name = row["Key_name"]
if current_index != key_name
next if key_name == "PRIMARY" # skip the primary key
current_index = key_name
indexes << ::ActiveRecord::ConnectionAdapters::IndexDefinition.new(
row["Table"], key_name, row["Non_unique"] == 0, [], [])
end
indexes.last.columns << row["Column_name"]
indexes.last.lengths << row["Sub_part"]
end
indexes
end
def jdbc_columns(table_name, name = nil)#:nodoc:
sql = "SHOW FIELDS FROM #{quote_table_name(table_name)}"
execute(sql, 'SCHEMA').map do |field|
::ActiveRecord::ConnectionAdapters::MysqlColumn.new(field["Field"], field["Default"], field["Type"], field["Null"] == "YES")
end
end
# Returns just a table's primary key
def primary_key(table)
pk_and_sequence = pk_and_sequence_for(table)
pk_and_sequence && pk_and_sequence.first
end
def recreate_database(name, options = {}) #:nodoc:
drop_database(name)
create_database(name, options)
end
def create_database(name, options = {}) #:nodoc:
if options[:collation]
execute "CREATE DATABASE `#{name}` DEFAULT CHARACTER SET `#{options[:charset] || 'utf8'}` COLLATE `#{options[:collation]}`"
else
execute "CREATE DATABASE `#{name}` DEFAULT CHARACTER SET `#{options[:charset] || 'utf8'}`"
end
end
def drop_database(name) #:nodoc:
execute "DROP DATABASE IF EXISTS `#{name}`"
end
def current_database
select_one("SELECT DATABASE() as db")["db"]
end
def create_table(name, options = {}) #:nodoc:
super(name, {:options => "ENGINE=InnoDB DEFAULT CHARSET=utf8"}.merge(options))
end
def rename_table(name, new_name)
execute "RENAME TABLE #{quote_table_name(name)} TO #{quote_table_name(new_name)}"
end
def add_column(table_name, column_name, type, options = {})
add_column_sql = "ALTER TABLE #{quote_table_name(table_name)} ADD #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
add_column_options!(add_column_sql, options)
add_column_position!(add_column_sql, options)
execute(add_column_sql)
end
def change_column_default(table_name, column_name, default) #:nodoc:
column = column_for(table_name, column_name)
change_column table_name, column_name, column.sql_type, :default => default
end
def change_column_null(table_name, column_name, null, default = nil)
column = column_for(table_name, column_name)
unless null || default.nil?
execute("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote(default)} WHERE #{quote_column_name(column_name)} IS NULL")
end
change_column table_name, column_name, column.sql_type, :null => null
end
def change_column(table_name, column_name, type, options = {}) #:nodoc:
column = column_for(table_name, column_name)
unless options_include_default?(options)
options[:default] = column.default
end
unless options.has_key?(:null)
options[:null] = column.null
end
change_column_sql = "ALTER TABLE #{quote_table_name(table_name)} CHANGE #{quote_column_name(column_name)} #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
add_column_options!(change_column_sql, options)
add_column_position!(change_column_sql, options)
execute(change_column_sql)
end
def rename_column(table_name, column_name, new_column_name) #:nodoc:
options = {}
if column = columns(table_name).find { |c| c.name == column_name.to_s }
options[:default] = column.default
options[:null] = column.null
else
raise ActiveRecord::ActiveRecordError, "No such column: #{table_name}.#{column_name}"
end
current_type = select_one("SHOW COLUMNS FROM #{quote_table_name(table_name)} LIKE '#{column_name}'")["Type"]
rename_column_sql = "ALTER TABLE #{quote_table_name(table_name)} CHANGE #{quote_column_name(column_name)} #{quote_column_name(new_column_name)} #{current_type}"
add_column_options!(rename_column_sql, options)
execute(rename_column_sql)
end
def add_limit_offset!(sql, options) #:nodoc:
limit, offset = options[:limit], options[:offset]
if limit && offset
sql << " LIMIT #{offset.to_i}, #{sanitize_limit(limit)}"
elsif limit
sql << " LIMIT #{sanitize_limit(limit)}"
elsif offset
sql << " OFFSET #{offset.to_i}"
end
sql
end
# Taken from: https://github.com/gfmurphy/rails/blob/3-1-stable/activerecord/lib/active_record/connection_adapters/mysql_adapter.rb#L540
#
# In the simple case, MySQL allows us to place JOINs directly into the UPDATE
# query. However, this does not allow for LIMIT, OFFSET and ORDER. To support
# these, we must use a subquery. However, MySQL is too stupid to create a
# temporary table for this automatically, so we have to give it some prompting
# in the form of a subsubquery. Ugh!
def join_to_update(update, select) #:nodoc:
if select.limit || select.offset || select.orders.any?
subsubselect = select.clone
subsubselect.projections = [update.key]
subselect = Arel::SelectManager.new(select.engine)
subselect.project Arel.sql(update.key.name)
subselect.from subsubselect.as('__active_record_temp')
update.where update.key.in(subselect)
else
update.table select.source
update.wheres = select.constraints
end
end
def show_variable(var)
res = execute("show variables like '#{var}'")
result_row = res.detect {|row| row["Variable_name"] == var }
result_row && result_row["Value"]
end
def charset
show_variable("character_set_database")
end
def collation
show_variable("collation_database")
end
def type_to_sql(type, limit = nil, precision = nil, scale = nil)
return super unless type.to_s == 'integer'
case limit
when 1; 'tinyint'
when 2; 'smallint'
when 3; 'mediumint'
when nil, 4, 11; 'int(11)' # compatibility with MySQL default
when 5..8; 'bigint'
else raise(ActiveRecordError, "No integer type has byte size #{limit}")
end
end
def add_column_position!(sql, options)
if options[:first]
sql << " FIRST"
elsif options[:after]
sql << " AFTER #{quote_column_name(options[:after])}"
end
end
protected
def quoted_columns_for_index(column_names, options = {})
length = options[:length] if options.is_a?(Hash)
case length
when Hash
column_names.map { |name| length[name] ? "#{quote_column_name(name)}(#{length[name]})" : quote_column_name(name) }
when Fixnum
column_names.map { |name| "#{quote_column_name(name)}(#{length})" }
else
column_names.map { |name| quote_column_name(name) }
end
end
def translate_exception(exception, message)
return super unless exception.respond_to?(:errno)
case exception.errno
when 1062
::ActiveRecord::RecordNotUnique.new(message, exception)
when 1452
::ActiveRecord::InvalidForeignKey.new(message, exception)
else
super
end
end
private
def column_for(table_name, column_name)
unless column = columns(table_name).find { |c| c.name == column_name.to_s }
raise "No such column: #{table_name}.#{column_name}"
end
column
end
def show_create_table(table)
select_one("SHOW CREATE TABLE #{quote_table_name(table)}")
end
def supports_views?
false
end
end
end
module ActiveRecord
module ConnectionAdapters
# Remove any vestiges of core/Ruby MySQL adapter
remove_const(:MysqlColumn) if const_defined?(:MysqlColumn)
remove_const(:MysqlAdapter) if const_defined?(:MysqlAdapter)
class MysqlColumn < JdbcColumn
include ArJdbc::MySQL::ColumnExtensions
def initialize(name, *args)
if Hash === name
super
else
super(nil, name, *args)
end
end
def call_discovered_column_callbacks(*)
end
end
class MysqlAdapter < JdbcAdapter
include ArJdbc::MySQL
def initialize(*args)
super
configure_connection
end
## EXPLAIN support lifted from the mysql2 gem with slight modifications
## to work in the JDBC adapter gem.
def supports_explain?
true
end
def explain(arel, binds = [])
sql = "EXPLAIN #{to_sql(arel, binds.dup)}"
start = Time.now.to_f
raw_result = execute(sql, "EXPLAIN")
ar_result = ActiveRecord::Result.new(raw_result[0].keys, raw_result)
elapsed = Time.now.to_f - start
ExplainPrettyPrinter.new.pp(ar_result, elapsed)
end
class ExplainPrettyPrinter # :nodoc:
# Pretty prints the result of a EXPLAIN in a way that resembles the output of the
# MySQL shell:
#
# +----+-------------+-------+-------+---------------+---------+---------+-------+------+-------------+
# | id | select_type | table | type | possible_keys | key | key_len | ref | rows | Extra |
# +----+-------------+-------+-------+---------------+---------+---------+-------+------+-------------+
# | 1 | SIMPLE | users | const | PRIMARY | PRIMARY | 4 | const | 1 | |
# | 1 | SIMPLE | posts | ALL | NULL | NULL | NULL | NULL | 1 | Using where |
# +----+-------------+-------+-------+---------------+---------+---------+-------+------+-------------+
# 2 rows in set (0.00 sec)
#
# This is an exercise in Ruby hyperrealism :).
def pp(result, elapsed)
widths = compute_column_widths(result)
separator = build_separator(widths)
pp = []
pp << separator
pp << build_cells(result.columns, widths)
pp << separator
result.rows.each do |row|
pp << build_cells(row.values, widths)
end
pp << separator
pp << build_footer(result.rows.length, elapsed)
pp.join("\n") + "\n"
end
private
def compute_column_widths(result)
[].tap do |widths|
result.columns.each do |col|
cells_in_column = [col] + result.rows.map {|r| r[col].nil? ? 'NULL' : r[col].to_s}
widths << cells_in_column.map(&:length).max
end
end
end
def build_separator(widths)
padding = 1
'+' + widths.map {|w| '-' * (w + (padding*2))}.join('+') + '+'
end
def build_cells(items, widths)
cells = []
items.each_with_index do |item, i|
item = 'NULL' if item.nil?
justifier = item.is_a?(Numeric) ? 'rjust' : 'ljust'
cells << item.to_s.send(justifier, widths[i])
end
'| ' + cells.join(' | ') + ' |'
end
def build_footer(nrows, elapsed)
rows_label = nrows == 1 ? 'row' : 'rows'
"#{nrows} #{rows_label} in set (%.2f sec)" % elapsed
end
end
def jdbc_connection_class(spec)
::ArJdbc::MySQL.jdbc_connection_class
end
def jdbc_column_class
ActiveRecord::ConnectionAdapters::MysqlColumn
end
alias_chained_method :columns, :query_cache, :jdbc_columns
protected
def exec_insert(sql, name, binds)
binds = binds.dup
# Pretend to support bind parameters
unless binds.empty?
sql = sql.gsub('?') { quote(*binds.shift.reverse) }
end
execute sql, name
end
alias :exec_update :exec_insert
alias :exec_delete :exec_insert
end
end
end
module Mysql # :nodoc:
remove_const(:Error) if const_defined?(:Error)
class Error < ::ActiveRecord::JDBCError
end
def self.client_version
50400 # faked out for AR tests
end
end
|
add index on assignments workflow_state
flag=none
test plan:
- assignments table has an index on workflow_state
Change-Id: I0fe95622fb8dce8af6c77c6270a841d0e545227a
Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/293661
Tested-by: Service Cloud Jenkins <9144042a601061f88f1e1d7a1753ea3e2972119d@instructure.com>
Reviewed-by: Weston Dransfield <e9203059795dc94e28a8831c65eb7869e818825e@instructure.com>
QA-Review: Alex Slaughter <11669a22a05ead38ed9badafd55ef4c5386724b6@instructure.com>
Product-Review: Alex Slaughter <11669a22a05ead38ed9badafd55ef4c5386724b6@instructure.com>
Migration-Review: Alex Slaughter <11669a22a05ead38ed9badafd55ef4c5386724b6@instructure.com>
# frozen_string_literal: true
# Copyright (C) 2022 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
class AddIndexOnAssignmentsWorkflowState < ActiveRecord::Migration[6.1]
tag :predeploy
disable_ddl_transaction!
def change
add_index :assignments, :workflow_state, algorithm: :concurrently, if_not_exists: true
end
end
|
require 'active_record/connection_adapters/abstract/schema_definitions'
module ::ArJdbc
module NuoDB
def self.column_selector
[/nuodb/i, lambda { |cfg, col| col.extend(::ArJdbc::NuoDB::Column) }]
end
module Column
end
def self.arel2_visitors(config)
{}.tap { |v| %w(nuodb).each { |a| v[a] = ::Arel::Visitors::NuoDB } }
end
# FEATURES ===============================================
def supports_migrations?
true
end
def supports_primary_key?
true
end
def supports_count_distinct?
true
end
def supports_ddl_transactions?
true
end
def supports_bulk_alter?
false
end
def supports_savepoints?
true
end
def supports_index_sort_order?
true
end
def supports_partial_index?
false
end
def supports_explain?
false
end
# SAVEPOINT SUPPORT ======================================
def create_savepoint
execute("SAVEPOINT #{current_savepoint_name}")
end
def rollback_to_savepoint
execute("ROLLBACK TO SAVEPOINT #{current_savepoint_name}")
end
def release_savepoint
execute("RELEASE SAVEPOINT #{current_savepoint_name}")
end
def modify_types(tp)
tp[:primary_key] = 'int not null generated always primary key'
tp[:boolean] = {:name => 'boolean'}
tp[:integer] = {:name => 'int', :limit => 4}
tp[:decimal] = {:name => 'decimal'}
tp[:string] = {:name => 'string'}
tp[:timestamp] = {:name => 'datetime'}
tp[:datetime][:limit] = nil
tp
end
def type_to_sql(type, limit = nil, precision = nil, scale = nil) #:nodoc:
limit = nil if %w(text binary string).include? type.to_s
return 'uniqueidentifier' if (type.to_s == 'uniqueidentifier')
return super unless type.to_s == 'integer'
if limit.nil? || limit == 4
'int'
elsif limit == 2
'smallint'
elsif limit == 1
'smallint'
else
'bigint'
end
end
def quote(value, column = nil)
case value
when TrueClass, FalseClass
value.to_s
else
super
end
end
def exec_insert(sql, name, binds)
sql = substitute_binds(sql, binds)
@connection.execute_insert(sql)
end
def primary_keys(table)
@connection.primary_keys(qualify_table(table))
end
def columns(table_name, name=nil)
@connection.columns_internal(table_name.to_s, name, nuodb_schema)
end
private
def qualify_table(table)
if (table.include? '.') || @config[:schema].blank?
table
else
nuodb_schema + '.' + table
end
end
def nuodb_schema
config[:schema] || ''
end
end
end
Minor restructuring of the code.
require 'active_record/connection_adapters/abstract/schema_definitions'
module ::ArJdbc
module NuoDB
def self.column_selector
[/nuodb/i, lambda { |cfg, col| col.extend(::ArJdbc::NuoDB::Column) }]
end
module Column
end
def self.arel2_visitors(config)
{}.tap { |v| %w(nuodb).each { |a| v[a] = ::Arel::Visitors::NuoDB } }
end
# FEATURES ===============================================
def supports_migrations?
true
end
def supports_primary_key?
true
end
def supports_count_distinct?
true
end
def supports_ddl_transactions?
true
end
def supports_bulk_alter?
false
end
def supports_savepoints?
true
end
def supports_index_sort_order?
true
end
def supports_partial_index?
false
end
def supports_explain?
false
end
# QUOTING ################################################
def quote(value, column = nil)
case value
when TrueClass, FalseClass
value.to_s
else
super
end
end
def quote_column_name(name)
"`#{name.to_s.gsub('`', '``')}`"
end
def quote_table_name(name)
quote_column_name(name).gsub('.', '`.`')
end
def type_cast(value, column)
return super unless value == true || value == false
value ? true : false
end
def quoted_true
"'true'"
end
def quoted_false
"'false'"
end
def quoted_date(value)
if value.acts_like?(:time)
zone_conversion_method = :getutc
if value.respond_to?(zone_conversion_method)
value = value.send(zone_conversion_method)
end
end
value.to_s(:db)
end
# SAVEPOINT SUPPORT ======================================
def create_savepoint
execute("SAVEPOINT #{current_savepoint_name}")
end
def rollback_to_savepoint
execute("ROLLBACK TO SAVEPOINT #{current_savepoint_name}")
end
def release_savepoint
execute("RELEASE SAVEPOINT #{current_savepoint_name}")
end
def modify_types(tp)
tp[:primary_key] = 'int not null generated always primary key'
tp[:boolean] = {:name => 'boolean'}
tp[:integer] = {:name => 'int', :limit => 4}
tp[:decimal] = {:name => 'decimal'}
tp[:string] = {:name => 'string'}
tp[:timestamp] = {:name => 'datetime'}
tp[:datetime][:limit] = nil
tp
end
def type_to_sql(type, limit = nil, precision = nil, scale = nil) #:nodoc:
limit = nil if %w(text binary string).include? type.to_s
return 'uniqueidentifier' if (type.to_s == 'uniqueidentifier')
return super unless type.to_s == 'integer'
if limit.nil? || limit == 4
'int'
elsif limit == 2
'smallint'
elsif limit == 1
'smallint'
else
'bigint'
end
end
def exec_insert(sql, name, binds)
sql = substitute_binds(sql, binds)
@connection.execute_insert(sql)
end
def primary_keys(table)
@connection.primary_keys(qualify_table(table))
end
def columns(table_name, name=nil)
@connection.columns_internal(table_name.to_s, name, nuodb_schema)
end
private
def qualify_table(table)
if (table.include? '.') || @config[:schema].blank?
table
else
nuodb_schema + '.' + table
end
end
def nuodb_schema
config[:schema] || ''
end
end
end
|
module ArtDecomp class Bipainter
def initialize beta_q, beta_v, seps
raise 'non-disjoint beta_v' unless beta_v.ints.pairs.all? { |a, b| (a & b).zero? }
@beta_v = beta_v
@qv_colours = {}
@g_colours = {}
@qv_forbidden = Hash.new { |h, k| h[k] = Set[] }
@g_forbidden = Hash.new { |h, k| h[k] = Set[] }
@qv_graph = Graph.new beta_q, seps - beta_v.seps
@g_graph = Graph.new beta_q * beta_v, seps
end
def blankets
colour_next_vertex! until painted?
qv_blocks = Hash.new 0
g_blocks = Hash.new 0
@qv_colours.each { |vertex, colour| qv_blocks[colour] |= vertex }
@g_colours.each { |vertex, colour| g_blocks[colour] |= vertex }
[Blanket.new(qv_blocks.values), Blanket.new(g_blocks.values)]
end
private
def backup!
@g_forbidden.default = nil
@qv_forbidden.default = nil
@backup = {
:g_colours => Marshal.dump(@g_colours),
:g_forbidden => Marshal.dump(@g_forbidden),
:qv_colours => Marshal.dump(@qv_colours),
:qv_forbidden => Marshal.dump(@qv_forbidden),
}
@g_forbidden.default_proc = proc { |h, k| h[k] = Set[] }
@qv_forbidden.default_proc = proc { |h, k| h[k] = Set[] }
end
def restore!
@g_colours = Marshal.load @backup[:g_colours]
@g_forbidden = Marshal.load @backup[:g_forbidden]
@qv_colours = Marshal.load @backup[:qv_colours]
@qv_forbidden = Marshal.load @backup[:qv_forbidden]
@g_forbidden.default_proc = proc { |h, k| h[k] = Set[] }
@qv_forbidden.default_proc = proc { |h, k| h[k] = Set[] }
end
def colour_g_vertex! g_vertex
begin
backup!
colour = :a
colour = colour.next while @g_forbidden[g_vertex].include? colour
colour_g! g_vertex, colour
rescue PaintingError
restore!
forbid_g! g_vertex, colour
retry
end
end
def colour_next_vertex!
# FIXME: consider colouring G graph’s vertex first
# FIXME: consider other vertex selection algorithms
qv_vertex = (@qv_graph.vertices - @qv_colours.keys).sort_by { |v| [-@qv_forbidden[v].size, -@qv_graph.degree(v)] }.first
colour_qv_vertex! qv_vertex if qv_vertex
g_vertex = (@g_graph.vertices - @g_colours.keys).sort_by { |v| [-@g_forbidden[v].size, -@g_graph.degree(v)] }.first
colour_g_vertex! g_vertex if g_vertex
end
def colour_qv_vertex! qv_vertex
begin
backup!
colour = :a
colour = colour.next while @qv_forbidden[qv_vertex].include? colour
colour_qv! qv_vertex, colour
rescue PaintingError
restore!
forbid_qv! qv_vertex, colour
retry
end
end
def forbid_g! g_vertex, colour
return if @g_forbidden[g_vertex].include? colour
raise PaintingError if colour == @g_colours[g_vertex]
@g_forbidden[g_vertex] << colour
siblings_of(g_vertex).each { |sibling| forbid_g! sibling, colour }
end
def forbid_qv! qv_vertex, colour
return if @qv_forbidden[qv_vertex].include? colour
raise PaintingError if colour == @qv_colours[qv_vertex]
@qv_forbidden[qv_vertex] << colour
end
def colour_g! g_vertex, colour
return if @g_colours[g_vertex] == colour
raise PaintingError if @g_colours[g_vertex] and @g_colours[g_vertex] != colour
raise PaintingError if @g_forbidden[g_vertex].include? colour
@g_colours[g_vertex] = colour
@g_graph.adjacent(g_vertex).each { |adjacent| forbid_g! adjacent, colour }
siblings_of(g_vertex).each { |sibling| colour_g! sibling, colour }
end
def colour_qv! qv_vertex, colour
return if @qv_colours[qv_vertex] == colour
raise PaintingError if @qv_colours[qv_vertex] and @qv_colours[qv_vertex] != colour
raise PaintingError if @qv_forbidden[qv_vertex].include? colour
@qv_colours[qv_vertex] = colour
@qv_graph.adjacent(qv_vertex).each { |adjacent| forbid_qv! adjacent, colour }
if @qv_colours.any? { |q, col| q != qv_vertex and col == colour }
@g_graph.vertices.select { |g| g & qv_vertex == g }.each do |g_vertex|
v_parent = @beta_v.ints.find { |v| v & g_vertex == g_vertex }
@g_graph.adjacent(g_vertex).select { |g| v_parent & g == g and qv_vertex & g != g }.each do |neighbour|
@qv_graph.vertices.select { |q| q & neighbour == neighbour }.each do |q_parent|
forbid_qv! q_parent, colour
end
end
siblings_of(g_vertex).each { |sibling| sync_colours g_vertex, sibling }
end
end
end
def siblings_of g_vertex
v_parent = @beta_v.ints.find { |v| v & g_vertex == g_vertex }
colours = @qv_colours.select { |q, col| g_vertex & q == g_vertex }.values
similar = @qv_colours.select { |q, col| colours.include? col }.keys
(similar.map { |q| q & v_parent }.to_set & @g_graph.vertices).delete g_vertex
end
def sync_colours v1, v2
(@g_forbidden[v1] - @g_forbidden[v2]).each { |col| forbid_g! v2, col }
(@g_forbidden[v2] - @g_forbidden[v1]).each { |col| forbid_g! v1, col }
if @g_colours[v1] then colour_g! v2, @g_colours[v1]
elsif @g_colours[v2] then colour_g! v1, @g_colours[v2]
end
end
def painted?
@qv_graph.vertices == @qv_colours.keys.to_set and @g_graph.vertices == @g_colours.keys.to_set
end
end end
Bipainter: reorder methods
module ArtDecomp class Bipainter
def initialize beta_q, beta_v, seps
raise 'non-disjoint beta_v' unless beta_v.ints.pairs.all? { |a, b| (a & b).zero? }
@beta_v = beta_v
@qv_colours = {}
@g_colours = {}
@qv_forbidden = Hash.new { |h, k| h[k] = Set[] }
@g_forbidden = Hash.new { |h, k| h[k] = Set[] }
@qv_graph = Graph.new beta_q, seps - beta_v.seps
@g_graph = Graph.new beta_q * beta_v, seps
end
def blankets
colour_next_vertex! until painted?
qv_blocks = Hash.new 0
g_blocks = Hash.new 0
@qv_colours.each { |vertex, colour| qv_blocks[colour] |= vertex }
@g_colours.each { |vertex, colour| g_blocks[colour] |= vertex }
[Blanket.new(qv_blocks.values), Blanket.new(g_blocks.values)]
end
private
def backup!
@g_forbidden.default = nil
@qv_forbidden.default = nil
@backup = {
:g_colours => Marshal.dump(@g_colours),
:g_forbidden => Marshal.dump(@g_forbidden),
:qv_colours => Marshal.dump(@qv_colours),
:qv_forbidden => Marshal.dump(@qv_forbidden),
}
@g_forbidden.default_proc = proc { |h, k| h[k] = Set[] }
@qv_forbidden.default_proc = proc { |h, k| h[k] = Set[] }
end
def colour_g! g_vertex, colour
return if @g_colours[g_vertex] == colour
raise PaintingError if @g_colours[g_vertex] and @g_colours[g_vertex] != colour
raise PaintingError if @g_forbidden[g_vertex].include? colour
@g_colours[g_vertex] = colour
@g_graph.adjacent(g_vertex).each { |adjacent| forbid_g! adjacent, colour }
siblings_of(g_vertex).each { |sibling| colour_g! sibling, colour }
end
def colour_g_vertex! g_vertex
begin
backup!
colour = :a
colour = colour.next while @g_forbidden[g_vertex].include? colour
colour_g! g_vertex, colour
rescue PaintingError
restore!
forbid_g! g_vertex, colour
retry
end
end
def colour_next_vertex!
# FIXME: consider colouring G graph’s vertex first
# FIXME: consider other vertex selection algorithms
qv_vertex = (@qv_graph.vertices - @qv_colours.keys).sort_by { |v| [-@qv_forbidden[v].size, -@qv_graph.degree(v)] }.first
colour_qv_vertex! qv_vertex if qv_vertex
g_vertex = (@g_graph.vertices - @g_colours.keys).sort_by { |v| [-@g_forbidden[v].size, -@g_graph.degree(v)] }.first
colour_g_vertex! g_vertex if g_vertex
end
def colour_qv! qv_vertex, colour
return if @qv_colours[qv_vertex] == colour
raise PaintingError if @qv_colours[qv_vertex] and @qv_colours[qv_vertex] != colour
raise PaintingError if @qv_forbidden[qv_vertex].include? colour
@qv_colours[qv_vertex] = colour
@qv_graph.adjacent(qv_vertex).each { |adjacent| forbid_qv! adjacent, colour }
if @qv_colours.any? { |q, col| q != qv_vertex and col == colour }
@g_graph.vertices.select { |g| g & qv_vertex == g }.each do |g_vertex|
v_parent = @beta_v.ints.find { |v| v & g_vertex == g_vertex }
@g_graph.adjacent(g_vertex).select { |g| v_parent & g == g and qv_vertex & g != g }.each do |neighbour|
@qv_graph.vertices.select { |q| q & neighbour == neighbour }.each do |q_parent|
forbid_qv! q_parent, colour
end
end
siblings_of(g_vertex).each { |sibling| sync_colours g_vertex, sibling }
end
end
end
def colour_qv_vertex! qv_vertex
begin
backup!
colour = :a
colour = colour.next while @qv_forbidden[qv_vertex].include? colour
colour_qv! qv_vertex, colour
rescue PaintingError
restore!
forbid_qv! qv_vertex, colour
retry
end
end
def forbid_g! g_vertex, colour
return if @g_forbidden[g_vertex].include? colour
raise PaintingError if colour == @g_colours[g_vertex]
@g_forbidden[g_vertex] << colour
siblings_of(g_vertex).each { |sibling| forbid_g! sibling, colour }
end
def forbid_qv! qv_vertex, colour
return if @qv_forbidden[qv_vertex].include? colour
raise PaintingError if colour == @qv_colours[qv_vertex]
@qv_forbidden[qv_vertex] << colour
end
def painted?
@qv_graph.vertices == @qv_colours.keys.to_set and @g_graph.vertices == @g_colours.keys.to_set
end
def restore!
@g_colours = Marshal.load @backup[:g_colours]
@g_forbidden = Marshal.load @backup[:g_forbidden]
@qv_colours = Marshal.load @backup[:qv_colours]
@qv_forbidden = Marshal.load @backup[:qv_forbidden]
@g_forbidden.default_proc = proc { |h, k| h[k] = Set[] }
@qv_forbidden.default_proc = proc { |h, k| h[k] = Set[] }
end
def siblings_of g_vertex
v_parent = @beta_v.ints.find { |v| v & g_vertex == g_vertex }
colours = @qv_colours.select { |q, col| g_vertex & q == g_vertex }.values
similar = @qv_colours.select { |q, col| colours.include? col }.keys
(similar.map { |q| q & v_parent }.to_set & @g_graph.vertices).delete g_vertex
end
def sync_colours v1, v2
(@g_forbidden[v1] - @g_forbidden[v2]).each { |col| forbid_g! v2, col }
(@g_forbidden[v2] - @g_forbidden[v1]).each { |col| forbid_g! v1, col }
if @g_colours[v1] then colour_g! v2, @g_colours[v1]
elsif @g_colours[v2] then colour_g! v1, @g_colours[v2]
end
end
end end
|
class Assette::Reader::Scss < Assette::Reader::Sass
private
def options
super.merge :syntax => :scss
end
end
made sure it requires in time
require './sass'
module Assette
class Reader::Scss < Assette::Reader::Sass
private
def options
super.merge :syntax => :scss
end
end
end |
module Ataru
class TestConverter
def self.convert(code_sample)
if code_sample.nil?
raise ArgumentError
end
if code_sample.code == nil || code_sample.code.size == 0
return [:warning, code_sample]
end
code_sample.prepared_code = code_sample.code.gsub(/(.) # ?=> (.)/, 'assert_equal \2, \1')
# before_match = code_sample.code.match(/((.*) #=>)|((.*) # =>)/)
# after_match = code_sample.code.match(/(# => (.*))|(#=> (.*))/)
#
# if before_match == nil && after_match == nil
# return [:nth_to_convert, code_sample]
# end
#
# if before_match == nil
# return [:warning, code_sample]
# end
#
# before = before_match[2] || before_match[4]
# after = after_match[2] || after_match[4]
# [:ok, "assert_equal #{after}, #{before}"]
end
end
end
Changed regex in gsub method.
module Ataru
class TestConverter
def self.convert(code_sample)
if code_sample.nil?
raise ArgumentError
end
if code_sample.code == nil || code_sample.code.size == 0
return [:warning, code_sample]
end
code_sample.prepared_code = code_sample.code.gsub(/(\S+) # ?=> (.+)/, 'assert_equal \2, \1')
# before_match = code_sample.code.match(/((.*) #=>)|((.*) # =>)/)
# after_match = code_sample.code.match(/(# => (.*))|(#=> (.*))/)
#
# if before_match == nil && after_match == nil
# return [:nth_to_convert, code_sample]
# end
#
# if before_match == nil
# return [:warning, code_sample]
# end
#
# before = before_match[2] || before_match[4]
# after = after_match[2] || after_match[4]
# [:ok, "assert_equal #{after}, #{before}"]
end
end
end
|
module AuthenticatedSystem
protected
AUTHENTICATED_USER_TYPES = [:bunny]
# Convenience methods for each kind of user
AUTHENTICATED_USER_TYPES.each do |user_type|
eval <<-END
def #{user_type}_logged_in?
user_logged_in?(:#{user_type})
end
def current_#{user_type}
current_user(:#{user_type})
end
def current_#{user_type}=(new_user)
set_current_user(:#{user_type} => new_user)
end
def #{user_type}_authorized?
user_authorized?(:#{user_type})
end
def #{user_type}_login_required
user_login_required(:#{user_type})
end
def #{user_type}_login_from_cookie
user_login_from_cookie(:#{user_type})
end
END
end
# Inclusion hooks to make #current_{user_type} and #{user_type}_logged_in?
# available as ActionView helper methods.
def self.included(base)
AUTHENTICATED_USER_TYPES.each do |user_type|
base.send(:helper_method, "current_#{user_type}".to_sym, "#{user_type}_logged_in?".to_sym)
end
end
# For a given user type (e.g. :player), returns true or false if that type
# of user is logged in.
# If they are logged in, it also populates an instance variable (e.g. @current_player)
# with the user object.
def user_logged_in?(user_type)
current_user = self.instance_variable_get("@current_#{user_type}".to_sym)
klass = eval(user_type.to_s.camelize)
current_user ||= session[user_type] ? klass.find_by_id(session[user_type]) : :false
self.instance_variable_set("@current_#{user_type}".to_sym, current_user)
current_user.is_a?(klass)
end
# Returns the current user object from the session for the given user type
def current_user(user_type)
instance_variable_get("@current_#{user_type}") if user_logged_in?(user_type)
end
# Stores the give user object in the session as the 'current' user:
# set_current_user(:player => new_player)
def set_current_user(options)
unless options.is_a?(Hash) && options.length == 1
raise ArgumentError, "options must be a Hash of the form {:player => new_player}"
end
user_type = options.keys.first
new_user = options.values.first
session[user_type] = (new_user.nil? || new_user.is_a?(Symbol)) ? nil : new_user.id
instance_variable_set("@current_#{user_type}".to_sym, new_user)
end
# Check if user is authorized.
# Override the individual user_type versions for more complex authorization
def user_authorized?(user_type)
true
end
# Filter method to enforce a login requirement.
#
# To require logins for all actions, use this in your controllers:
#
# before_filter :user_login_required
#
# To require logins for specific actions, use this in your controllers:
#
# before_filter :user_login_required, :only => [ :edit, :update ]
#
# To skip this in a subclassed controller:
#
# skip_before_filter :user_login_required
#
# replacing 'user' with the appropriate user type.
def user_login_required(user_type)
username, passwd = get_auth_data
klass = eval(user_type.to_s.camelize)
new_user = klass.authenticate(username, passwd) || :false if username && passwd
set_current_user(user_type => new_user) unless current_user(user_type)
user_logged_in?(user_type) && user_authorized?(user_type) ? true : user_access_denied(user_type)
end
# Redirect as appropriate when an access request fails.
# By default, we redirect to the appropriate login screen.
def user_access_denied(user_type)
respond_to do |accepts|
accepts.html do
store_location
flash[:notice] = "You must log in first"
redirect_to(:controller => "/#{user_type.to_s}_sessions", :action => :new)
end
accepts.xml do
headers["Status"] = "Unauthorized"
headers["WWW-Authenticate"] = %(Basic realm="Web Password")
render :text => "Couldn't authenticate you", :status => '401 Unauthorized'
end
end
false
end
# Store the URI of the current request in the session.
#
# We can return to this location by calling #redirect_back_or_default.
def store_location
session[:return_to] = request.request_uri
end
# Redirect to the URI stored by the most recent store_location call or
# to the passed default.
def redirect_back_or_default(default)
session[:return_to] ? redirect_to_url(session[:return_to]) : redirect_to(default)
session[:return_to] = nil
end
# When called from a before_filter (using the appropriate user_type version),
# will check for the appropriate auth_token cookie, and login the appropriate user
# if it's okay.
def user_login_from_cookie(user_type)
auth_token_name = "#{user_type}_auth_token".to_sym
return unless cookies[auth_token_name] && !user_logged_in?(user_type)
klass = eval(user_type.to_s.camelize)
user = klass.find_by_remember_token(cookies[auth_token_name])
if user && user.remember_token?
user.remember_me
set_current_user(user_type => user)
cookies[auth_token_name] = {:value => current_user(user_type).remember_token, :expires => current_user(user_type).remember_token_expires_at}
flash[:notice] = "Logged in successfully"
end
end
private
# gets BASIC auth info
def get_auth_data
user, pass = nil, nil
# extract authorisation credentials
if request.env.has_key? 'X-HTTP_AUTHORIZATION'
# try to get it where mod_rewrite might have put it
authdata = request.env['X-HTTP_AUTHORIZATION'].to_s.split
elsif request.env.has_key? 'HTTP_AUTHORIZATION'
# this is the regular location
authdata = request.env['HTTP_AUTHORIZATION'].to_s.split
end
# at the moment we only support basic authentication
if authdata && authdata[0] == 'Basic'
user, pass = Base64.decode64(authdata[1]).split(':')[0..1]
end
return [user, pass]
end
end
Replace deprecated call to redirect_to_url
git-svn-id: 801577a2cbccabf54a3a609152c727abd26e524a@1192 a18515e9-6cfd-0310-9624-afb4ebaee84e
module AuthenticatedSystem
protected
AUTHENTICATED_USER_TYPES = [:bunny]
# Convenience methods for each kind of user
AUTHENTICATED_USER_TYPES.each do |user_type|
eval <<-END
def #{user_type}_logged_in?
user_logged_in?(:#{user_type})
end
def current_#{user_type}
current_user(:#{user_type})
end
def current_#{user_type}=(new_user)
set_current_user(:#{user_type} => new_user)
end
def #{user_type}_authorized?
user_authorized?(:#{user_type})
end
def #{user_type}_login_required
user_login_required(:#{user_type})
end
def #{user_type}_login_from_cookie
user_login_from_cookie(:#{user_type})
end
END
end
# Inclusion hooks to make #current_{user_type} and #{user_type}_logged_in?
# available as ActionView helper methods.
def self.included(base)
AUTHENTICATED_USER_TYPES.each do |user_type|
base.send(:helper_method, "current_#{user_type}".to_sym, "#{user_type}_logged_in?".to_sym)
end
end
# For a given user type (e.g. :player), returns true or false if that type
# of user is logged in.
# If they are logged in, it also populates an instance variable (e.g. @current_player)
# with the user object.
def user_logged_in?(user_type)
current_user = self.instance_variable_get("@current_#{user_type}".to_sym)
klass = eval(user_type.to_s.camelize)
current_user ||= session[user_type] ? klass.find_by_id(session[user_type]) : :false
self.instance_variable_set("@current_#{user_type}".to_sym, current_user)
current_user.is_a?(klass)
end
# Returns the current user object from the session for the given user type
def current_user(user_type)
instance_variable_get("@current_#{user_type}") if user_logged_in?(user_type)
end
# Stores the give user object in the session as the 'current' user:
# set_current_user(:player => new_player)
def set_current_user(options)
unless options.is_a?(Hash) && options.length == 1
raise ArgumentError, "options must be a Hash of the form {:player => new_player}"
end
user_type = options.keys.first
new_user = options.values.first
session[user_type] = (new_user.nil? || new_user.is_a?(Symbol)) ? nil : new_user.id
instance_variable_set("@current_#{user_type}".to_sym, new_user)
end
# Check if user is authorized.
# Override the individual user_type versions for more complex authorization
def user_authorized?(user_type)
true
end
# Filter method to enforce a login requirement.
#
# To require logins for all actions, use this in your controllers:
#
# before_filter :user_login_required
#
# To require logins for specific actions, use this in your controllers:
#
# before_filter :user_login_required, :only => [ :edit, :update ]
#
# To skip this in a subclassed controller:
#
# skip_before_filter :user_login_required
#
# replacing 'user' with the appropriate user type.
def user_login_required(user_type)
username, passwd = get_auth_data
klass = eval(user_type.to_s.camelize)
new_user = klass.authenticate(username, passwd) || :false if username && passwd
set_current_user(user_type => new_user) unless current_user(user_type)
user_logged_in?(user_type) && user_authorized?(user_type) ? true : user_access_denied(user_type)
end
# Redirect as appropriate when an access request fails.
# By default, we redirect to the appropriate login screen.
def user_access_denied(user_type)
respond_to do |accepts|
accepts.html do
store_location
flash[:notice] = "You must log in first"
redirect_to(:controller => "/#{user_type.to_s}_sessions", :action => :new)
end
accepts.xml do
headers["Status"] = "Unauthorized"
headers["WWW-Authenticate"] = %(Basic realm="Web Password")
render :text => "Couldn't authenticate you", :status => '401 Unauthorized'
end
end
false
end
# Store the URI of the current request in the session.
#
# We can return to this location by calling #redirect_back_or_default.
def store_location
session[:return_to] = request.request_uri
end
# Redirect to the URI stored by the most recent store_location call or
# to the passed default.
def redirect_back_or_default(default)
session[:return_to] ? redirect_to(session[:return_to]) : redirect_to(default)
session[:return_to] = nil
end
# When called from a before_filter (using the appropriate user_type version),
# will check for the appropriate auth_token cookie, and login the appropriate user
# if it's okay.
def user_login_from_cookie(user_type)
auth_token_name = "#{user_type}_auth_token".to_sym
return unless cookies[auth_token_name] && !user_logged_in?(user_type)
klass = eval(user_type.to_s.camelize)
user = klass.find_by_remember_token(cookies[auth_token_name])
if user && user.remember_token?
user.remember_me
set_current_user(user_type => user)
cookies[auth_token_name] = {:value => current_user(user_type).remember_token, :expires => current_user(user_type).remember_token_expires_at}
flash[:notice] = "Logged in successfully"
end
end
private
# gets BASIC auth info
def get_auth_data
user, pass = nil, nil
# extract authorisation credentials
if request.env.has_key? 'X-HTTP_AUTHORIZATION'
# try to get it where mod_rewrite might have put it
authdata = request.env['X-HTTP_AUTHORIZATION'].to_s.split
elsif request.env.has_key? 'HTTP_AUTHORIZATION'
# this is the regular location
authdata = request.env['HTTP_AUTHORIZATION'].to_s.split
end
# at the moment we only support basic authentication
if authdata && authdata[0] == 'Basic'
user, pass = Base64.decode64(authdata[1]).split(':')[0..1]
end
return [user, pass]
end
end
|
module Autodeps
module Persistency
class Mapping
attr_accessor :dependent, :key_mapping, :value_mapping
def initialize(dependent, key_mapping, value_mapping)
@dependent = dependent
@key_mapping = key_mapping
@value_mapping = value_mapping
end
end
def self.included(base)
super
base.extend(ClassMethods)
end
module ClassMethods
def depend_on(clazz, options={})
clazz = Object.const_get(clazz) if clazz.is_a?(String)
options[:key_mapping] ||= {:id => (clazz.name.underscore.gsub(/^.*\//,"") + "_id").to_sym}
class << clazz
attr_accessor :_deps, :_autodeps_after_save_callbacked
end
clazz._deps ||= {}
# options[:value_mapping].keys.sort?
(clazz._deps[options[:value_mapping].keys.sort] ||= []) << Mapping.new(self, options[:key_mapping], options[:value_mapping] )
Autodeps.autorun do
end
# if !self._autodeps_self_after_create_callbacked
# self._autodeps_self_after_create_callbacked = true
# self.send(:before_validation) do #todo:examine we are in create not in update
# unless self.persisted?
# relation = clazz
# clazz.where(options[:key_mapping].each do |source_key, target_key|
# relation = relation.where(source_key => self.send(target_key))
# end
#
# options[:value_mapping].each do |source_key, target_key|
# self[target_key] = relation.first[source_key] #todo, whatif multiple source document match? should be an error
# end
# end
# end
if !clazz._autodeps_after_save_callbacked
clazz._autodeps_after_save_callbacked = true
clazz.send(:after_save) do
clazz._deps.each do |attribute_keys, values|
if attribute_keys.any? {|attribute_key| self.send("#{attribute_key.to_s}_changed?")}
values.each do |mapping|
relation = mapping.dependent
mapping.key_mapping.each do |source_key, target_key|
relation = relation.where(target_key => self.send(source_key))
end
relation.each do |tuple|
mapping.value_mapping.each do |source_key, target_key|
tuple[target_key] = self.send(source_key)
end
tuple.save
end
end
end
end
end
end
end
end
end
end
creating/updating
module Autodeps
module Persistency
class Mapping
attr_accessor :dependent, :key_mapping, :value_mapping
def initialize(dependent, key_mapping, value_mapping)
@dependent = dependent
@key_mapping = key_mapping
@value_mapping = value_mapping
end
end
def self.included(base)
super
base.extend(ClassMethods)
end
module ClassMethods
def depend_on(clazz, options={})
clazz = Object.const_get(clazz) if clazz.is_a?(String)
options[:key_mapping] ||= {:id => (clazz.name.underscore.gsub(/^.*\//,"") + "_id").to_sym}
class << clazz
attr_accessor :_deps, :_autodeps_after_save_callbacked
end
class << self
attr_accessor :_autodeps_self_after_create_callbacked
end
clazz._deps ||= {}
# options[:value_mapping].keys.sort?
(clazz._deps[options[:value_mapping].keys.sort] ||= []) << Mapping.new(self, options[:key_mapping], options[:value_mapping] )
Autodeps.autorun do
end
if !self._autodeps_self_after_create_callbacked
self._autodeps_self_after_create_callbacked = true
self.send(:before_validation) do #todo:examine we are in create not in update
#unless self.persisted?
relation = clazz
options[:key_mapping].each do |source_key, target_key|
relation = relation.where(source_key => self.send(target_key))
end
raise "#{self}: Can't find relation target for #{clazz} to save" if relation.first.nil?
options[:value_mapping].each do |source_key, target_key|
self[target_key] = relation.first[source_key] #todo, whatif multiple source document match? should be an error
end
#end
end
end
if !clazz._autodeps_after_save_callbacked
clazz._autodeps_after_save_callbacked = true
clazz.send(:after_save) do
clazz._deps.each do |attribute_keys, values|
if attribute_keys.any? {|attribute_key| self.send("#{attribute_key.to_s}_changed?")}
values.each do |mapping|
relation = mapping.dependent
mapping.key_mapping.each do |source_key, target_key|
relation = relation.where(target_key => self.send(source_key))
end
relation.each do |tuple|
mapping.value_mapping.each do |source_key, target_key|
tuple[target_key] = self.send(source_key)
end
tuple.save
end
end
end
end
end
end
end
end
end
end |
require 'mechanize'
module BadLinkFinder
class Link
attr_reader :link, :url, :error_message, :exception
def initialize(page_url, link)
@page_url = page_url
@link = link
@url = get_url_from_link(link)
validate_with_request
rescue URI::InvalidURIError => exception
record_error("This link is in a bad format", exception)
rescue Mechanize::ResponseCodeError => exception
if exception.response_code.to_i == 405 && !@head_unsupported
@head_unsupported = true
retry
else
record_error("This request returned a #{exception.response_code}", exception)
end
rescue Mechanize::UnauthorizedError => exception
record_error("This link requires authorisation", exception)
rescue Mechanize::UnsupportedSchemeError => exception
record_error("This link has a scheme we can't load (should be http or https)", exception)
rescue Mechanize::RedirectLimitReachedError => exception
record_error("This link might be in a redirect loop", exception)
rescue Mechanize::RobotsDisallowedError => exception
record_error("This link is blocked by robots.txt or nofollow attributes", exception)
rescue Mechanize::Error, Net::HTTP::Persistent::Error, Timeout::Error, Errno::EINVAL,
Errno::ECONNRESET, Errno::ETIMEDOUT, EOFError, Net::HTTPBadResponse, Net::HTTPHeaderSyntaxError,
Net::ProtocolError, OpenSSL::SSL::SSLError, SocketError => exception # Thanks Net::HTTP
record_error("The server failed to serve this page properly", exception)
end
def valid?
@error_message.nil?
end
protected
def validate_with_request
puts "-- testing link #{@link} using #{@url}"
sleep 0.1 # Recommended pause for gov.uk rate limiting
browser = Mechanize.new
browser.user_agent = 'GOV.UK link checker'
browser.keep_alive = false
if @head_unsupported
browser.get(@url)
else
browser.head(@url)
end
end
def get_url_from_link(link)
URI.join(@page_url, link).to_s
end
def record_error(message, exception = nil)
@error_message = message
@exception = exception
puts "---- found broken link #{@url}: #{message}: #{exception.message if exception}"
end
end
end
Remove Mechanize's history.
We ain't ever goin back.
require 'mechanize'
module BadLinkFinder
class Link
attr_reader :link, :url, :error_message, :exception
def initialize(page_url, link)
@page_url = page_url
@link = link
@url = get_url_from_link(link)
validate_with_request
rescue URI::InvalidURIError => exception
record_error("This link is in a bad format", exception)
rescue Mechanize::ResponseCodeError => exception
if exception.response_code.to_i == 405 && !@head_unsupported
@head_unsupported = true
retry
else
record_error("This request returned a #{exception.response_code}", exception)
end
rescue Mechanize::UnauthorizedError => exception
record_error("This link requires authorisation", exception)
rescue Mechanize::UnsupportedSchemeError => exception
record_error("This link has a scheme we can't load (should be http or https)", exception)
rescue Mechanize::RedirectLimitReachedError => exception
record_error("This link might be in a redirect loop", exception)
rescue Mechanize::RobotsDisallowedError => exception
record_error("This link is blocked by robots.txt or nofollow attributes", exception)
rescue Mechanize::Error, Net::HTTP::Persistent::Error, Timeout::Error, Errno::EINVAL,
Errno::ECONNRESET, Errno::ETIMEDOUT, EOFError, Net::HTTPBadResponse, Net::HTTPHeaderSyntaxError,
Net::ProtocolError, OpenSSL::SSL::SSLError, SocketError => exception # Thanks Net::HTTP
record_error("The server failed to serve this page properly", exception)
end
def valid?
@error_message.nil?
end
protected
def validate_with_request
puts "-- testing link #{@link} using #{@url}"
sleep 0.1 # Recommended pause for gov.uk rate limiting
browser = Mechanize.new
browser.user_agent = 'GOV.UK link checker'
browser.keep_alive = false
browser.history.max_size = 0
if @head_unsupported
browser.get(@url)
else
browser.head(@url)
end
end
def get_url_from_link(link)
URI.join(@page_url, link).to_s
end
def record_error(message, exception = nil)
@error_message = message
@exception = exception
puts "---- found broken link #{@url}: #{message}: #{exception.message if exception}"
end
end
end
|
require 'beef/core_ext/string/to_boolean'
require 'beef/core_ext/string/titleise'
Fixing ascii issues
require "beef/core_ext/string/to_boolean"
require "beef/core_ext/string/titleise" |
require 'buff/config/json'
module Berkshelf::API
class Config < Buff::Config::JSON
class << self
# @return [String]
def default_path
home_path = ENV['BERKSHELF_API_PATH'] || "~/.berkshelf/api-server"
File.expand_path(File.join(home_path, "config.json"))
end
end
attribute 'home_path',
type: String,
default: File.expand_path("~/.berkshelf/api-server")
attribute 'endpoints',
type: Array,
default: [
{
type: "opscode",
options: {
url: 'http://cookbooks.opscode.com/api/v1'
}
}
]
end
end
Hash the endpoints of the config
require 'buff/config/json'
require 'digest/sha1'
module Berkshelf::API
class Config < Buff::Config::JSON
class << self
# @return [String]
def default_path
home_path = ENV['BERKSHELF_API_PATH'] || "~/.berkshelf/api-server"
File.expand_path(File.join(home_path, "config.json"))
end
end
attribute 'home_path',
type: String,
default: File.expand_path("~/.berkshelf/api-server")
attribute 'endpoints',
type: Array,
default: [
{
type: "opscode",
options: {
url: 'http://cookbooks.opscode.com/api/v1'
}
}
]
def endpoints_checksum
Digest::SHA1.hexdigest(endpoints.collect {|x| x.to_hash }.to_s)
end
end
end
|
module Besepa
module Utils
VERSION = '0.4.1'.freeze
end
end
Release 0.5.0
module Besepa
module Utils
VERSION = '0.5.0'.freeze
end
end
|
require "uri"
module BetterErrors
module Editor
KNOWN_EDITORS = [
{ symbols: [:atom], sniff: /atom/i, url: "atom://core/open/file?filename=%{file}&line=%{line}" },
{ symbols: [:emacs, :emacsclient], sniff: /emacs/i, url: "emacs://open?url=file://%{file}&line=%{line}" },
{ symbols: [:idea], sniff: /idea/i, url: "idea://open?file=%{file}&line=%{line}" },
{ symbols: [:macvim, :mvim], sniff: /vim/i, url: "mvim://open?url=file://%{file_unencoded}&line=%{line}" },
{ symbols: [:rubymine], sniff: /mine/i, url: "x-mine://open?file=%{file}&line=%{line}" },
{ symbols: [:sublime, :subl, :st], sniff: /subl/i, url: "subl://open?url=file://%{file}&line=%{line}" },
{ symbols: [:textmate, :txmt, :tm], sniff: /mate/i, url: "txmt://open?url=file://%{file}&line=%{line}" },
{ symbols: [:vscode, :code], sniff: /code/i, url: "vscode://file/%{file}:%{line}" },
{ symbols: [:vscodium, :codium], sniff: /codium/i, url: "vscodium://file/%{file}:%{line}" },
]
class UsingFormattingString
def initialize(url_formatting_string)
@url_formatting_string = url_formatting_string
end
def url(file, line)
url_formatting_string % { file: URI.encode_www_form_component(file), file_unencoded: file, line: line }
end
private
attr_reader :url_formatting_string
end
class UsingProc
def initialize(url_proc)
@url_proc = url_proc
end
def url(file, line)
url_proc.call(file, line)
end
private
attr_reader :url_proc
end
def self.for_formatting_string(formatting_string)
UsingFormattingString.new(formatting_string)
end
def self.for_proc(url_proc)
UsingProc.new(url_proc)
end
def self.for_symbol(symbol)
KNOWN_EDITORS.each do |preset|
return for_formatting_string(preset[:url]) if preset[:symbols].include?(symbol)
end
end
# Automatically sniffs a default editor preset based on
# environment variables.
#
# @return [Symbol]
def self.default_editor
editor_from_environment_formatting_string ||
editor_from_environment_editor ||
for_symbol(:textmate)
end
def self.editor_from_environment_editor
if ENV["BETTER_ERRORS_EDITOR"]
editor = editor_from_command(ENV["BETTER_ERRORS_EDITOR"])
return editor if editor
puts "BETTER_ERRORS_EDITOR environment variable is not recognized as a supported Better Errors editor."
end
if ENV["EDITOR"]
editor = editor_from_command(ENV["EDITOR"])
return editor if editor
puts "EDITOR environment variable is not recognized as a supported Better Errors editor. Using TextMate by default."
else
puts "Since there is no EDITOR or BETTER_ERRORS_EDITOR environment variable, using Textmate by default."
end
end
def self.editor_from_command(editor_command)
env_preset = KNOWN_EDITORS.find { |preset| editor_command =~ preset[:sniff] }
for_formatting_string(env_preset[:url]) if env_preset
end
def self.editor_from_environment_formatting_string
return unless ENV['BETTER_ERRORS_EDITOR_URL']
for_formatting_string(ENV['BETTER_ERRORS_EDITOR_URL'])
end
end
end
Create instances of Editor
require "uri"
module BetterErrors
class Editor
KNOWN_EDITORS = [
{ symbols: [:atom], sniff: /atom/i, url: "atom://core/open/file?filename=%{file}&line=%{line}" },
{ symbols: [:emacs, :emacsclient], sniff: /emacs/i, url: "emacs://open?url=file://%{file}&line=%{line}" },
{ symbols: [:idea], sniff: /idea/i, url: "idea://open?file=%{file}&line=%{line}" },
{ symbols: [:macvim, :mvim], sniff: /vim/i, url: "mvim://open?url=file://%{file_unencoded}&line=%{line}" },
{ symbols: [:rubymine], sniff: /mine/i, url: "x-mine://open?file=%{file}&line=%{line}" },
{ symbols: [:sublime, :subl, :st], sniff: /subl/i, url: "subl://open?url=file://%{file}&line=%{line}" },
{ symbols: [:textmate, :txmt, :tm], sniff: /mate/i, url: "txmt://open?url=file://%{file}&line=%{line}" },
{ symbols: [:vscode, :code], sniff: /code/i, url: "vscode://file/%{file}:%{line}" },
{ symbols: [:vscodium, :codium], sniff: /codium/i, url: "vscodium://file/%{file}:%{line}" },
]
def self.for_formatting_string(formatting_string)
new proc { |file, line|
formatting_string % { file: URI.encode_www_form_component(file), file_unencoded: file, line: line }
}
end
def self.for_proc(url_proc)
new url_proc
end
def self.for_symbol(symbol)
KNOWN_EDITORS.each do |preset|
return for_formatting_string(preset[:url]) if preset[:symbols].include?(symbol)
end
end
# Automatically sniffs a default editor preset based on
# environment variables.
#
# @return [Symbol]
def self.default_editor
editor_from_environment_formatting_string ||
editor_from_environment_editor ||
for_symbol(:textmate)
end
def self.editor_from_environment_editor
if ENV["BETTER_ERRORS_EDITOR"]
editor = editor_from_command(ENV["BETTER_ERRORS_EDITOR"])
return editor if editor
puts "BETTER_ERRORS_EDITOR environment variable is not recognized as a supported Better Errors editor."
end
if ENV["EDITOR"]
editor = editor_from_command(ENV["EDITOR"])
return editor if editor
puts "EDITOR environment variable is not recognized as a supported Better Errors editor. Using TextMate by default."
else
puts "Since there is no EDITOR or BETTER_ERRORS_EDITOR environment variable, using Textmate by default."
end
end
def self.editor_from_command(editor_command)
env_preset = KNOWN_EDITORS.find { |preset| editor_command =~ preset[:sniff] }
for_formatting_string(env_preset[:url]) if env_preset
end
def self.editor_from_environment_formatting_string
return unless ENV['BETTER_ERRORS_EDITOR_URL']
for_formatting_string(ENV['BETTER_ERRORS_EDITOR_URL'])
end
def initialize(url_proc)
@url_proc = url_proc
end
def url(file, line)
url_proc.call(file, line)
end
private
attr_reader :url_proc
end
end
|
class Bibliog
attr_reader :autores, :titulo, :serie, :editorial, :edicion, :mes, :anno, :isbn
def initialize(a, t, e, ed, mes, anno, isbn, s="none")
@autores = a
@titulo = t
@serie = s
@editorial = e
@edicion = ed
@mes = mes
@anno = anno
@isbn = isbn
end
def get_autores
size = @autores.length
i = 0
while i < (size-1)
cadena = "#{cadena}"+"#{@autores[i]}, "
i = i+1
end
cadena = "#{cadena}"+"#{@autores[i]}"
end
def get_titulo
"#{@titulo}"
end
def get_serie
"#{@serie}"
end
def get_editorial
"#{@editorial}"
end
def get_edicion
"#{@edicion}"
end
def get_fecha
"#{@mes}, #{@anno}"
end
def get_isbn
size = @isbn.length
a = @isbn[0].length
cadena = "ISBN-#{a}: "
if a > 10
cadena = "#{cadena}"+"#{@isbn[0][-a..-11]}"+"-"+"#{@isbn[0][-10..-1]}"
else
cadena = "#{cadena}"+"#{@isbn[0]}"
end
i = 1
while i < size
a = @isbn[i].length
cadena = "#{cadena}"+"\nISBN-#{a}: "
if a > 10
cadena = "#{cadena}"+"#{@isbn[i][-a..-11]}"+"-"+"#{@isbn[i][-10..-1]}"
else
cadena = "#{cadena}"+"#{@isbn[i]}"
end
i = i+1
end
cadena
end
def to_s
cadena = "#{get_autores}.\n"
cadena = "#{cadena}"+"#{get_titulo}\n"
cadena = "#{cadena}"+"(#{get_serie})\n"
cadena = "#{cadena}"+"#{get_editorial}; #{get_edicion} edition (#{get_fecha})\n"
cadena = "#{cadena}"+"#{get_isbn}"
end
end
class Libro < Bibliog
def initialize
end
end
Creacion de la clase vacia Revista
class Bibliog
attr_reader :autores, :titulo, :serie, :editorial, :edicion, :mes, :anno, :isbn
def initialize(a, t, e, ed, mes, anno, isbn, s="none")
@autores = a
@titulo = t
@serie = s
@editorial = e
@edicion = ed
@mes = mes
@anno = anno
@isbn = isbn
end
def get_autores
size = @autores.length
i = 0
while i < (size-1)
cadena = "#{cadena}"+"#{@autores[i]}, "
i = i+1
end
cadena = "#{cadena}"+"#{@autores[i]}"
end
def get_titulo
"#{@titulo}"
end
def get_serie
"#{@serie}"
end
def get_editorial
"#{@editorial}"
end
def get_edicion
"#{@edicion}"
end
def get_fecha
"#{@mes}, #{@anno}"
end
def get_isbn
size = @isbn.length
a = @isbn[0].length
cadena = "ISBN-#{a}: "
if a > 10
cadena = "#{cadena}"+"#{@isbn[0][-a..-11]}"+"-"+"#{@isbn[0][-10..-1]}"
else
cadena = "#{cadena}"+"#{@isbn[0]}"
end
i = 1
while i < size
a = @isbn[i].length
cadena = "#{cadena}"+"\nISBN-#{a}: "
if a > 10
cadena = "#{cadena}"+"#{@isbn[i][-a..-11]}"+"-"+"#{@isbn[i][-10..-1]}"
else
cadena = "#{cadena}"+"#{@isbn[i]}"
end
i = i+1
end
cadena
end
def to_s
cadena = "#{get_autores}.\n"
cadena = "#{cadena}"+"#{get_titulo}\n"
cadena = "#{cadena}"+"(#{get_serie})\n"
cadena = "#{cadena}"+"#{get_editorial}; #{get_edicion} edition (#{get_fecha})\n"
cadena = "#{cadena}"+"#{get_isbn}"
end
end
class Libro < Bibliog
def initialize
end
end
class Revista
end |
require 'bibtex/authors'
module BibFormatter
def BibFormatter::get_formatter writer, style
case style[:format]
when :nrg
BibNRGFormatter.new(writer, style)
when :nih
BibNIHFormatter.new(writer, style)
when :springer
BibSpringerFormatter.new(writer, style)
else
BibDefaultFormatter.new(writer, style)
end
end
end
class BibAuthor
attr_accessor :surname, :initials
def initialize name
@surname = name
end
def to_s
@surname
end
end
module BibOutput
include FormatBibAuthors
# Authors in bibtex style are separated by 'and' keywords. Valid
# names are
#
# Jane Austen
# J. Austen
# Austen, Jane
# Austen, J.
#
# The output style can be any of these: firstnamefirst, initialsfirst,
# firstnamelast, initialslast.
#
def authors authorlist, style = {}
authors = split_bib_authors(authorlist)
num = authors.size
max=5
max=style[:max_authors] if style[:max_authors]
if style[:etal] and num > max
aunum = 1
aunum = style[:etal_num] if style[:etal_num]
text = comma(authors[0..aunum-1].join(', '))
text = text.chop.chop
text += if style[:etal] == :plain
' et al.'
else
' <i>et al.</i>'
end
else
if num > 1
# p [num,authors]
text = comma(authors[0..num-2].join(', ')+' & '+authors[num-1])
else
text = comma(authors[0])
end
# strip final comma
text = text.chop.chop
end
text
end
def url doi, link, full = false
if doi and doi != ''
text = '[DOI]'
text = 'doi:'+doi if full
" <a href=\"http://dx.doi.org/#{doi}\">#{text}</a>"
elsif link and link != ''
text = '[Link]'
text = link if full
" <a href=\"#{link}\">#{text}</a>"
else
''
end
end
def citations bib
text = ''
if @style[:bib_cites]
if bib.has?(:Impact)
text += "Impact factor = #{bold(bib[:Impact])}"
end
cited = ''
if bib.has?(:Cited)
cited += " #{bold(bib[:Cited])}x,"
end
if bib.has?(:Pmcited)
cited += " Pubmed #{bold(bib[:Pmcited])}x,"
end
if bib.has?(:Gscited)
cited += " Google Scholar #{bold(bib[:Gscited])}x,"
end
if cited != ''
text += "Cited "+cited.chop
end
text = ' <small>('+text+')</small>' if text != ''
end
text
end
def strip_bibtex str
str.gsub!(/\{/,'')
str.gsub!(/\}/,'')
# $stderr.print str
str
end
def newline
'<br />'
end
def bold str
return @writer.bold(str) if str!=nil and str.strip != ''
''
end
def italic str
return @writer.italics(str) if str!=nil and str.strip != ''
''
end
def colon str, space=true
if str!=nil and str.strip != ''
c = ''
c = ' ' if space
return str.rstrip + ':' + c
end
""
end
def prefix_colon str, space=true
if str!=nil and str.strip != ''
c = ''
c = ' ' if space
return ':' + str.rstrip + c
end
""
end
def comma str
if str!=nil and str.strip != ''
return str.rstrip + ', '
end
""
end
def braces str
if str!=nil and str.strip != ''
return '(' + str.strip + ')'
end
""
end
def dot str
if str!=nil and str.strip != ''
return str.rstrip + '. '
end
""
end
def prefix_dot str
if str!=nil and str.strip != ''
return '.' + str.rstrip + ' '
end
""
end
# Return sentence with only first letter capitalized (except for those
# between curly braces)
#
def capitalize_first str
str.gsub!(/^\{/,'')
str.gsub!(/\}$/,'')
a = str.split(/[{}]/)
# $stderr.print(a.join('@@'),"\n")
i = 0
str2 = a.map { | s |
i += 1
if (i % 2 == 1)
if (i==1)
s.capitalize
else
s.downcase
end
else
s
end
}.join('')
# $stderr.print(str2,"\n")
str2
end
def convert_special_characters s
s.gsub('\"{o}',"ö").gsub("~"," ").gsub('\"{u}',"ü").gsub("\`{e}","è")
end
def edition e
e
end
def pages pg
return '' if pg == nil or pg.strip == ''
if pg !~ /-/ and pg !~ /:/
return pg + 'p'
end
pg.strip
end
end
module BibDefaultOutput
def cite_marker num
@writer.superscript(num.to_s)
end
def reference_marker num
# @writer.superscript(num.to_s)
"#{num.to_s}."
end
def write bib
text = authors(bib[:Author],:etal=>true)
if bib.type == 'book' or bib.type == 'incollection' or bib.type == 'inproceedings' or bib.type == 'conference'
text += strip_bibtex(comma(italic(bib[:Title])))+comma(bib[:Booktitle])+" #{bib[:Pages]} (#{bib[:Publisher]} #{bib[:Year]})."
else
text += comma(strip_bibtex(bib[:Title]))+comma(italic(bib[:Journal]))+comma(bold(bib[:Volume]))+"#{bib[:Pages]} [#{bib[:Year]}]."
end
if !@style[:final]
text += url(bib[:Doi],bib[:Url])
text += citations(bib)
end
text
end
end
class BibDefaultFormatter
include BibOutput
include BibDefaultOutput
def initialize writer, style
@writer = writer
@style = style
end
end
class BibNRGFormatter
include BibOutput
include BibDefaultOutput
def initialize writer, style
@writer = writer
@style = style
end
def reference_marker num
@writer.superscript(num.to_s)
end
def write bib
text = authors(bib[:Author], :etal=>1, :amp=>true)
if bib.type == 'book' or bib.type == 'incollection' or bib.type == 'inproceedings'
text += strip_bibtex(comma(italic(bib[:Title])))+comma(bib[:Booktitle])+comma(bib[:Publisher])+bib[:Pages]+" (#{bib[:Year]})."
else
text += comma(strip_bibtex(bib[:Title]))+comma(italic(bib[:Journal]))+comma(bold(bib[:Volume]))+"#{bib[:Pages]} (#{bib[:Year]})."
end
if !@style[:final]
text += url(bib[:Doi],bib[:Url])
text += citations(bib)
end
text
end
end
class BibSpringerFormatter
include BibOutput
include BibDefaultOutput
def initialize writer, style
@writer = writer
@style = style
style[:max_authors] ||= 3
end
def cite_marker num
@writer.bold(@writer.italics("(#{num.to_s})"))
end
def write bib
text = authors(to_authorlist(bib[:Author]), :etal=>:plain, :etal_num => 3, :amp=>true)
text += braces(bib[:Year])+' '
if bib.type == 'book' or bib.type == 'incollection' or bib.type == 'inproceedings' or bib.type == 'conference'
text += strip_bibtex(dot(capitalize_first(bib.required(:Title))))+comma(bib[:Booktitle])+dot(edition(bib[:Edition]))
if bib.type == 'book'
text += comma(bib.required(:Publisher))
else
text += comma(bib[:Publisher])
end
text += comma(bib[:Organization])+dot(pages(bib[:Pages]))
elsif bib.type == 'journal'
# Journal article
text += dot(strip_bibtex(capitalize_first(bib.required(:Title))))+dot(bib.required(:Journal))+colon(bib[:Volume],false)+
braces(bib[:Number],false)+dot(pages("#{bib.required(:Pages)}"))+'.'
else
# this is used mostly:
text += dot(strip_bibtex(capitalize_first(bib[:Title])))+dot(bib[:Journal])+bib[:Volume]+braces(bib[:Number])+prefix_colon(bib[:Pages],false)
text += '.' if bib.type!='misc'
end
if bib.type=='misc' or !@style[:final]
text += url(bib[:Doi],bib[:Url],true)
end
if !@style[:final]
text += citations(bib)
end
text = text.strip
text = text.chop if text =~ /[,.]$/
convert_special_characters(text)+newline
end
def to_authorlist s
list = split_bib_authors(s)
list = list.map do | a |
first,last = split_first_lastname(a)
if first !~ /\./
$stderr.print "Possibly malformed first name <#{first.strip}> has no dot in <",a,">\n"
end
if first =~ /\w\w/
$stderr.print "Possibly malformed first name <#{first.strip}> contains two+ letters in ref <",a,">\n"
end
a1 = last+' '+to_initials(first)
a2 = a1.gsub(/[,.]/,' ')
# $stderr.print a," <--\n"
a2.strip
end
list
end
end
class BibNIHFormatter
include BibOutput
include BibDefaultOutput
def initialize writer, style
@writer = writer
@style = style
style[:max_authors] ||= 3
end
def cite_marker num
@writer.bold(@writer.italics("(#{num.to_s})"))
end
def write bib
text = authors(to_authorlist(bib[:Author]), :etal=>false, :amp=>true)
if bib.type == 'book' or bib.type == 'incollection' or bib.type == 'inproceedings' or bib.type == 'conference'
text += strip_bibtex(dot(capitalize_first(bib.required(:Title))))+comma(bib[:Booktitle])+dot(edition(bib[:Edition]))
if bib.type == 'book'
text += comma(bib.required(:Publisher))
else
text += comma(bib[:Publisher])
end
text += comma(bib[:Organization])+dot(pages(bib[:Pages]))
elsif bib.type == 'journal'
# Journal article
text += dot(strip_bibtex(capitalize_first(bib.required(:Title))))+bib.required(:Journal)+colon(bib[:Volume],false)+
braces(bib[:Number],false)+dot(pages("#{bib.required(:Pages)}"))
else
# this is used mostly:
text += dot(strip_bibtex(capitalize_first(bib[:Title])))+dot(bold(bib[:Journal]))+bib[:Volume]+braces(bib[:Number])+prefix_colon(bib[:Pages],false)
# text += '.' if bib.type!='misc'
end
text += ' '+braces(bib[:Year])
if bib[:Url] and bib[:URL] =~ /pubmed/
text += " PMID:"+bib[:Url].split("/").last
else
if bib.type=='misc' or !@style[:final]
# text += url(bib[:Doi],bib[:Url],true)
text += ' '+bib[:Doi]+bib[:howPublished]
end
end
if !@style[:final]
text += citations(bib)
end
text = text.strip
text = text.chop if text =~ /[,.]$/
convert_special_characters(text)+newline
end
def to_authorlist s
list = split_bib_authors(s)
list = list.map do | a |
first,last = split_first_lastname(a)
if first !~ /\./
$stderr.print "Possibly malformed first name <#{first.strip}> has no dot in <",a,">\n"
end
if first =~ /\w\w/
$stderr.print "Possibly malformed first name <#{first.strip}> contains two+ letters in ref <",a,">\n"
end
a1 = last+' '+to_initials(first)
a2 = a1.gsub(/[,.]/,' ')
# $stderr.print a," <--\n"
a2.strip
end
list
end
end
fixes
require 'bibtex/authors'
module BibFormatter
def BibFormatter::get_formatter writer, style
case style[:format]
when :nrg
BibNRGFormatter.new(writer, style)
when :nih
BibNIHFormatter.new(writer, style)
when :springer
BibSpringerFormatter.new(writer, style)
else
BibDefaultFormatter.new(writer, style)
end
end
end
class BibAuthor
attr_accessor :surname, :initials
def initialize name
@surname = name
end
def to_s
@surname
end
end
module BibOutput
include FormatBibAuthors
# Authors in bibtex style are separated by 'and' keywords. Valid
# names are
#
# Jane Austen
# J. Austen
# Austen, Jane
# Austen, J.
#
# The output style can be any of these: firstnamefirst, initialsfirst,
# firstnamelast, initialslast.
#
def authors authorlist, style = {}
authors = split_bib_authors(authorlist)
num = authors.size
max=5
max=style[:max_authors] if style[:max_authors]
if style[:etal] and num > max
aunum = 1
aunum = style[:etal_num] if style[:etal_num]
text = comma(authors[0..aunum-1].join(', '))
text = text.chop.chop
text += if style[:etal] == :plain
' et al.'
else
' <i>et al.</i>'
end
else
if num > 1
# p [num,authors]
text = comma(authors[0..num-2].join(', ')+' & '+authors[num-1])
else
text = comma(authors[0])
end
# strip final comma
text = text.chop.chop
end
text
end
def url doi, link, full = false
if doi and doi != ''
text = '[DOI]'
text = 'doi:'+doi if full
" <a href=\"http://dx.doi.org/#{doi}\">#{text}</a>"
elsif link and link != ''
text = '[Link]'
text = link if full
" <a href=\"#{link}\">#{text}</a>"
else
''
end
end
def citations bib
text = ''
if @style[:bib_cites]
if bib.has?(:Impact)
text += "Impact factor = #{bold(bib[:Impact])}"
end
cited = ''
if bib.has?(:Cited)
cited += " #{bold(bib[:Cited])}x,"
end
if bib.has?(:Pmcited)
cited += " Pubmed #{bold(bib[:Pmcited])}x,"
end
if bib.has?(:Gscited)
cited += " Google Scholar #{bold(bib[:Gscited])}x,"
end
if cited != ''
text += "Cited "+cited.chop
end
text = ' <small>('+text+')</small>' if text != ''
end
text
end
def strip_bibtex str
str.gsub!(/\{/,'')
str.gsub!(/\}/,'')
# $stderr.print str
str
end
def newline
'<br />'
end
def bold str
return @writer.bold(str) if str!=nil and str.strip != ''
''
end
def italic str
return @writer.italics(str) if str!=nil and str.strip != ''
''
end
def colon str, space=true
if str!=nil and str.strip != ''
c = ''
c = ' ' if space
return str.rstrip + ':' + c
end
""
end
def prefix_colon str, space=true
if str!=nil and str.strip != ''
c = ''
c = ' ' if space
return ':' + str.rstrip + c
end
""
end
def comma str
if str!=nil and str.strip != ''
return str.rstrip + ', '
end
""
end
def braces str
if str!=nil and str.strip != ''
return '(' + str.strip + ')'
end
""
end
def dot str
if str!=nil and str.strip != ''
return str.rstrip + '. '
end
""
end
def prefix_dot str
if str!=nil and str.strip != ''
return '.' + str.rstrip + ' '
end
""
end
# Return sentence with only first letter capitalized (except for those
# between curly braces)
#
def capitalize_first str
str.gsub!(/^\{/,'')
str.gsub!(/\}$/,'')
a = str.split(/[{}]/)
# $stderr.print(a.join('@@'),"\n")
i = 0
str2 = a.map { | s |
i += 1
if (i % 2 == 1)
if (i==1)
s.capitalize
else
s.downcase
end
else
s
end
}.join('')
# $stderr.print(str2,"\n")
str2
end
def convert_special_characters s
s.gsub('\"{o}',"ö").gsub("~"," ").gsub('\"{u}',"ü").gsub("\`{e}","è")
end
def edition e
e
end
def pages pg
return '' if pg == nil or pg.strip == ''
if pg !~ /-/ and pg !~ /:/
return pg + 'p'
end
pg.strip
end
end
module BibDefaultOutput
def cite_marker num
@writer.superscript(num.to_s)
end
def reference_marker num
# @writer.superscript(num.to_s)
"#{num.to_s}."
end
def write bib
text = authors(bib[:Author],:etal=>true)
if bib.type == 'book' or bib.type == 'incollection' or bib.type == 'inproceedings' or bib.type == 'conference'
text += strip_bibtex(comma(italic(bib[:Title])))+comma(bib[:Booktitle])+" #{bib[:Pages]} (#{bib[:Publisher]} #{bib[:Year]})."
else
text += comma(strip_bibtex(bib[:Title]))+comma(italic(bib[:Journal]))+comma(bold(bib[:Volume]))+"#{bib[:Pages]} [#{bib[:Year]}]."
end
if !@style[:final]
text += url(bib[:Doi],bib[:Url])
text += citations(bib)
end
text
end
end
class BibDefaultFormatter
include BibOutput
include BibDefaultOutput
def initialize writer, style
@writer = writer
@style = style
end
end
class BibNRGFormatter
include BibOutput
include BibDefaultOutput
def initialize writer, style
@writer = writer
@style = style
end
def reference_marker num
@writer.superscript(num.to_s)
end
def write bib
text = authors(bib[:Author], :etal=>1, :amp=>true)
if bib.type == 'book' or bib.type == 'incollection' or bib.type == 'inproceedings'
text += strip_bibtex(comma(italic(bib[:Title])))+comma(bib[:Booktitle])+comma(bib[:Publisher])+bib[:Pages]+" (#{bib[:Year]})."
else
text += comma(strip_bibtex(bib[:Title]))+comma(italic(bib[:Journal]))+comma(bold(bib[:Volume]))+"#{bib[:Pages]} (#{bib[:Year]})."
end
if !@style[:final]
text += url(bib[:Doi],bib[:Url])
text += citations(bib)
end
text
end
end
class BibSpringerFormatter
include BibOutput
include BibDefaultOutput
def initialize writer, style
@writer = writer
@style = style
style[:max_authors] ||= 3
end
def cite_marker num
@writer.bold(@writer.italics("(#{num.to_s})"))
end
def write bib
text = authors(to_authorlist(bib[:Author]), :etal=>:plain, :etal_num => 3, :amp=>true)
text += braces(bib[:Year])+' '
if bib.type == 'book' or bib.type == 'incollection' or bib.type == 'inproceedings' or bib.type == 'conference'
text += strip_bibtex(dot(capitalize_first(bib.required(:Title))))+comma(bib[:Booktitle])+dot(edition(bib[:Edition]))
if bib.type == 'book'
text += comma(bib.required(:Publisher))
else
text += comma(bib[:Publisher])
end
text += comma(bib[:Organization])+dot(pages(bib[:Pages]))
elsif bib.type == 'journal'
# Journal article
text += dot(strip_bibtex(capitalize_first(bib.required(:Title))))+dot(bib.required(:Journal))+colon(bib[:Volume],false)+
braces(bib[:Number],false)+dot(pages("#{bib.required(:Pages)}"))+'.'
else
# this is used mostly:
text += dot(strip_bibtex(capitalize_first(bib[:Title])))+dot(bib[:Journal])+bib[:Volume]+braces(bib[:Number])+prefix_colon(bib[:Pages],false)
text += '.' if bib.type!='misc'
end
if bib.type=='misc' or !@style[:final]
text += url(bib[:Doi],bib[:Url],true)
end
if !@style[:final]
text += citations(bib)
end
text = text.strip
text = text.chop if text =~ /[,.]$/
convert_special_characters(text)+newline
end
def to_authorlist s
list = split_bib_authors(s)
list = list.map do | a |
first,last = split_first_lastname(a)
if first !~ /\./
$stderr.print "Possibly malformed first name <#{first.strip}> has no dot in <",a,">\n"
end
if first =~ /\w\w/
$stderr.print "Possibly malformed first name <#{first.strip}> contains two+ letters in ref <",a,">\n"
end
a1 = last+' '+to_initials(first)
a2 = a1.gsub(/[,.]/,' ')
# $stderr.print a," <--\n"
a2.strip
end
list
end
end
class BibNIHFormatter
include BibOutput
include BibDefaultOutput
def initialize writer, style
@writer = writer
@style = style
style[:max_authors] ||= 3
end
def cite_marker num
@writer.bold(@writer.italics("(#{num.to_s})"))
end
def write bib
text = dot(authors(to_authorlist(bib[:Author]), :etal=>false, :amp=>true))
# text += " "
if bib.type == 'book' or bib.type == 'incollection' or bib.type == 'inproceedings' or bib.type == 'conference' or bib.type == 'phdthesis'
text += strip_bibtex(dot(capitalize_first(bib.required(:Title))))+comma(bib[:Booktitle])+dot(edition(bib[:Edition]))
if bib.type == 'book'
text += comma(bib.required(:Publisher))
else
text += comma(bib[:Publisher])
text += comma(bib[:Address])
end
text += comma(bib[:Organization])+dot(pages(bib[:Pages]))
elsif bib.type == 'journal'
# Journal article
text += dot(strip_bibtex(capitalize_first(bib.required(:Title))))+bib.required(:Journal)+colon(bib[:Volume],false)+
braces(bib[:Number],false)+dot(pages("#{bib.required(:Pages)}"))
else
# this is used mostly:
text += dot(strip_bibtex(capitalize_first(bib[:Title])))+dot(bold(bib[:Journal]))+bib[:Volume]+braces(bib[:Number])+prefix_colon(bib[:Pages],false)
# text += '.' if bib.type!='misc'
end
text += ' '+braces(bib[:Year])
if bib.has?(:ISBN)
text += " ISBN: "+bib[:ISBN]
end
if bib[:Url] and bib[:URL] =~ /pubmed/
text += " PMID:"+bib[:Url].split("/").last
else
if bib.type=='misc' or !@style[:final]
# text += url(bib[:Doi],bib[:Url],true)
text += ' '+bib[:Doi]+bib[:howPublished]
end
end
if !@style[:final]
text += citations(bib)
end
text = text.strip
text = text.chop if text =~ /[,.]$/
convert_special_characters(text)+newline
end
def to_authorlist s
list = split_bib_authors(s)
list = list.map do | a |
first,last = split_first_lastname(a)
if first !~ /\./
$stderr.print "Possibly malformed first name <#{first.strip}> has no dot in <",a,">\n"
end
if first =~ /\w\w/
$stderr.print "Possibly malformed first name <#{first.strip}> contains two+ letters in ref <",a,">\n"
end
a1 = last+' '+to_initials(first)
a2 = a1.gsub(/[,.]/,' ')
# $stderr.print a," <--\n"
a2.strip
end
list
end
end
|
module BiolaDeploy
VERSION = '0.5.2'
end
Bump version to 0.5.3
module BiolaDeploy
VERSION = '0.5.3'
end
|
require 'java'
module BiteScript
module JavaTypes
java_import java.lang.Object
java_import java.lang.Byte
java_import java.lang.Boolean
java_import java.lang.Short
java_import java.lang.Character
java_import java.lang.Integer
java_import java.lang.Long
java_import java.lang.Float
java_import java.lang.Double
java_import java.lang.Void
end
module Signature
def classname(path)
path.gsub('/', '.')
end
module_function :classname
def path(cls)
case cls
when Symbol
return cls
when Class, Module
cls_name = cls.java_class.to_s || cls.java_class.name
else
cls_name = cls.name
end
cls_name.gsub('.', '/')
end
module_function :path
def class_id(cls)
cls = cls.java_class if Class === cls
if !cls || cls == java.lang.Void || cls == Java::void
return "V"
end
if Module === cls
return "L#{path(cls)};"
end
if cls.array?
cls = cls.component_type
if cls.primitive?
cls = cls.primitive_type if cls.respond_to? :primitive_type
case cls
when JavaTypes::Byte::TYPE
return "[B"
when JavaTypes::Boolean::TYPE
return "[Z"
when JavaTypes::Short::TYPE
return "[S"
when JavaTypes::Character::TYPE
return "[C"
when JavaTypes::Integer::TYPE
return "[I"
when JavaTypes::Long::TYPE
return "[J"
when JavaTypes::Float::TYPE
return "[F"
when JavaTypes::Double::TYPE
return "[D"
else
raise "Unknown type in compiler: " + cls.name
end
else
return "[#{class_id(cls)}"
end
else
if cls.primitive?
cls = cls.primitive_type if cls.respond_to? :primitive_type
case cls
when JavaTypes::Byte::TYPE
return "B"
when JavaTypes::Boolean::TYPE
return "Z"
when JavaTypes::Short::TYPE
return "S"
when JavaTypes::Character::TYPE
return "C"
when JavaTypes::Integer::TYPE
return "I"
when JavaTypes::Long::TYPE
return "J"
when JavaTypes::Float::TYPE
return "F"
when JavaTypes::Double::TYPE
return "D"
when JavaTypes::Void::TYPE, java.lang.Void
return "V"
else
raise "Unknown type in compiler: " + cls.name
end
else
return "L#{path(cls)};"
end
end
end
alias ci class_id
module_function :class_id, :ci
def signature(*sig_classes)
if sig_classes.size == 0
return "()V"
end
return_class = sig_classes.shift
sig_string = "("
sig_classes.each {|arg_class| sig_string << class_id(arg_class)}
sig_string << ")#{class_id(return_class)}"
end
alias sig signature
module_function :signature, :sig
end
end
Fix string concat
require 'java'
module BiteScript
module JavaTypes
java_import java.lang.Object
java_import java.lang.Byte
java_import java.lang.Boolean
java_import java.lang.Short
java_import java.lang.Character
java_import java.lang.Integer
java_import java.lang.Long
java_import java.lang.Float
java_import java.lang.Double
java_import java.lang.Void
end
module Signature
def classname(path)
path.gsub('/', '.')
end
module_function :classname
def path(cls)
case cls
when Symbol
return cls
when Class, Module
cls_name = cls.java_class.to_s || cls.java_class.name
else
cls_name = cls.name
end
cls_name.gsub('.', '/')
end
module_function :path
def class_id(cls)
cls = cls.java_class if Class === cls
if !cls || cls == java.lang.Void || cls == Java::void
return "V"
end
if Module === cls
return "L#{path(cls)};"
end
if cls.array?
cls = cls.component_type
if cls.primitive?
cls = cls.primitive_type if cls.respond_to? :primitive_type
case cls
when JavaTypes::Byte::TYPE
return "[B"
when JavaTypes::Boolean::TYPE
return "[Z"
when JavaTypes::Short::TYPE
return "[S"
when JavaTypes::Character::TYPE
return "[C"
when JavaTypes::Integer::TYPE
return "[I"
when JavaTypes::Long::TYPE
return "[J"
when JavaTypes::Float::TYPE
return "[F"
when JavaTypes::Double::TYPE
return "[D"
else
raise "Unknown type in compiler: #{cls.name}"
end
else
return "[#{class_id(cls)}"
end
else
if cls.primitive?
cls = cls.primitive_type if cls.respond_to? :primitive_type
case cls
when JavaTypes::Byte::TYPE
return "B"
when JavaTypes::Boolean::TYPE
return "Z"
when JavaTypes::Short::TYPE
return "S"
when JavaTypes::Character::TYPE
return "C"
when JavaTypes::Integer::TYPE
return "I"
when JavaTypes::Long::TYPE
return "J"
when JavaTypes::Float::TYPE
return "F"
when JavaTypes::Double::TYPE
return "D"
when JavaTypes::Void::TYPE, java.lang.Void
return "V"
else
raise "Unknown type in compiler: #{cls.name}"
end
else
return "L#{path(cls)};"
end
end
end
alias ci class_id
module_function :class_id, :ci
def signature(*sig_classes)
if sig_classes.size == 0
return "()V"
end
return_class = sig_classes.shift
sig_string = "("
sig_classes.each {|arg_class| sig_string << class_id(arg_class)}
sig_string << ")#{class_id(return_class)}"
end
alias sig signature
module_function :signature, :sig
end
end
|
module Bliss
class ParserMachine
def initialize(path, filepath=nil)
@path = path
@sax_parser = Bliss::SaxParser.new
@parser = Nokogiri::XML::SAX::PushParser.new(@sax_parser)
if filepath
@file = File.new(filepath, 'w')
end
@root = nil
@nodes = nil
on_root {}
end
def on_root(&block)
return false if not block.is_a? Proc
@sax_parser.on_root { |root|
@root = root
block.call(root)
}
end
def on_tag_open(element, &block)
return false if block.arity != 1
@sax_parser.on_tag_open(element, block)
end
def on_tag_close(element, &block)
@sax_parser.on_tag_close(element, block)
end
def root
@root
end
def close
@sax_parser.close
end
def parse
@bytes = 0
EM.run do
http = EM::HttpRequest.new(@path).get
http.stream { |chunk|
chunk.force_encoding('UTF-8')
@parser << chunk
@bytes += chunk.length
if not @sax_parser.is_closed?
if @file
@file << chunk
end
else
if @file
last_index = chunk.index('</ad>') + 4
begin
@file << chunk[0..last_index]
@file << "</#{self.root}>"
@file.close
end
end
EM.stop
end
}
http.callback {
if @file
@file.close
end
EM.stop
}
end
end
end
end
#require 'stringio'
#str = StringIO.new
#z = Zlib::GzipWriter.new(str)
#z.write(txt)
#z.close
File close on begin rescue block
module Bliss
class ParserMachine
def initialize(path, filepath=nil)
@path = path
@sax_parser = Bliss::SaxParser.new
@parser = Nokogiri::XML::SAX::PushParser.new(@sax_parser)
if filepath
@file = File.new(filepath, 'w')
end
@root = nil
@nodes = nil
on_root {}
end
def on_root(&block)
return false if not block.is_a? Proc
@sax_parser.on_root { |root|
@root = root
block.call(root)
}
end
def on_tag_open(element, &block)
return false if block.arity != 1
@sax_parser.on_tag_open(element, block)
end
def on_tag_close(element, &block)
@sax_parser.on_tag_close(element, block)
end
def root
@root
end
def close
@sax_parser.close
end
def parse
@bytes = 0
EM.run do
http = EM::HttpRequest.new(@path).get
http.stream { |chunk|
chunk.force_encoding('UTF-8')
@parser << chunk
@bytes += chunk.length
if not @sax_parser.is_closed?
if @file
@file << chunk
end
else
if @file
last_index = chunk.index('</ad>') + 4
begin
@file << chunk[0..last_index]
@file << "</#{self.root}>"
@file.close
rescue
end
end
EM.stop
end
}
http.callback {
if @file
@file.close
end
EM.stop
}
end
end
end
end
#require 'stringio'
#str = StringIO.new
#z = Zlib::GzipWriter.new(str)
#z.write(txt)
#z.close
|
module Bluepill
class Application
PROCESS_COMMANDS = [:start, :stop, :restart, :unmonitor]
attr_accessor :name, :logger, :base_dir, :socket, :pid_file
attr_accessor :groups, :work_queue
attr_accessor :pids_dir, :log_file
def initialize(name, options = {})
self.name = name
self.log_file = options[:log_file]
self.base_dir = options[:base_dir] || '/var/bluepill'
self.pid_file = File.join(self.base_dir, 'pids', self.name + ".pid")
self.pids_dir = File.join(self.base_dir, 'pids', self.name)
self.groups = {}
self.logger = Bluepill::Logger.new(:log_file => self.log_file).prefix_with(self.name)
self.setup_signal_traps
self.setup_pids_dir
end
def load
begin
self.start_server
rescue StandardError => e
$stderr.puts "Failed to start bluepill:"
$stderr.puts "%s `%s`" % [e.class.name, e.message]
$stderr.puts e.backtrace
exit(5)
end
end
def status
buffer = []
depth = 0
if self.groups.has_key?(nil)
self.groups[nil].processes.each do |p|
buffer << "%s%s(pid:%s): %s" % [" " * depth, p.name, p.actual_pid.inspect, p.state]
if p.monitor_children?
depth += 2
p.children.each do |c|
buffer << "%s%s: %s" % [" " * depth, c.name, c.state]
end
depth -= 2
end
end
end
self.groups.each do |group_name, group|
next if group_name.nil?
buffer << "\n#{group_name}"
group.processes.each do |p|
depth += 2
buffer << "%s%s(pid:%s): %s" % [" " * depth, p.name, p.actual_pid.inspect, p.state]
if p.monitor_children?
depth += 2
p.children.each do |c|
buffer << "%s%s: %s" % [" " * depth, c.name, c.state]
end
depth -= 2
end
depth -= 2
end
end
buffer.join("\n")
end
PROCESS_COMMANDS.each do |command|
class_eval <<-END
def #{command}(group_name, process_name = nil)
puts "got #{command}"
self.send_to_process_or_group(:#{command}, group_name, process_name)
end
END
end
def add_process(process, group_name = nil)
self.groups[group_name] ||= Group.new(group_name, :logger => self.logger.prefix_with(group_name))
self.groups[group_name].add_process(process)
end
protected
def send_to_process_or_group(method, group_name, process_name)
if self.groups.key?(group_name)
self.groups[group_name].send(method, process_name)
elsif process_name.nil?
# they must be targeting just by process name
process_name = group_name
self.groups.values.collect do |group|
group.send(method, process_name)
end.flatten
else
[]
end
end
def start_listener
@listener_thread.kill if @listener_thread
@listener_thread = Thread.new do
begin
loop do
client = self.socket.accept
command, *args = client.readline.strip.split(":")
response = self.send(command, *args)
client.write(Marshal.dump(response))
client.close
end
rescue StandardError => e
logger.err("Got exception in cmd listener: %s `%s`" % [e.class.name, e.message])
e.backtrace.each {|l| logger.err(l)}
end
end
end
def start_server
self.kill_previous_bluepill
Daemonize.daemonize
self.logger.reopen
$0 = "bluepilld: #{self.name}"
self.groups.each {|_, group| group.boot }
self.write_pid_file
self.socket = Bluepill::Socket.server(self.base_dir, self.name)
self.start_listener
self.run
end
def run
@running = true # set to false by signal trap
while @running
System.reset_data
self.groups.each { |_, group| group.tick }
sleep 1
end
cleanup
end
def cleanup
File.unlink(self.socket.path) if self.socket
File.unlink(self.pid_file) if File.exists?(self.pid_file)
end
def setup_signal_traps
terminator = lambda do
puts "Terminating..."
@running = false
end
Signal.trap("TERM", &terminator)
Signal.trap("INT", &terminator)
Signal.trap("HUP") do
self.logger.reopen if self.logger
end
end
def setup_pids_dir
FileUtils.mkdir_p(self.pids_dir) unless File.exists?(self.pids_dir)
# we need everybody to be able to write to the pids_dir as processes managed by
# bluepill will be writing to this dir after they've dropped privileges
FileUtils.chmod(0777, self.pids_dir)
end
def kill_previous_bluepill
if File.exists?(self.pid_file)
previous_pid = File.read(self.pid_file).to_i
begin
::Process.kill(0, previous_pid)
puts "Killing previous bluepilld[#{previous_pid}]"
::Process.kill(2, previous_pid)
rescue Exception => e
$stderr.puts "Encountered error trying to kill previous bluepill:"
$stderr.puts "#{e.class}: #{e.message}"
exit(4) unless e.is_a?(Errno::ESRCH)
else
10.times do |i|
sleep 0.5
break unless System.pid_alive?(previous_pid)
end
if System.pid_alive?(previous_pid)
$stderr.puts "Previous bluepilld[#{previous_pid}] didn't die"
exit(4)
end
end
end
end
def write_pid_file
File.open(self.pid_file, 'w') { |x| x.write(::Process.pid) }
end
end
end
fixing a bug where bp did it wrong when group names were specified as
symbols
module Bluepill
class Application
PROCESS_COMMANDS = [:start, :stop, :restart, :unmonitor]
attr_accessor :name, :logger, :base_dir, :socket, :pid_file
attr_accessor :groups, :work_queue
attr_accessor :pids_dir, :log_file
def initialize(name, options = {})
self.name = name
self.log_file = options[:log_file]
self.base_dir = options[:base_dir] || '/var/bluepill'
self.pid_file = File.join(self.base_dir, 'pids', self.name + ".pid")
self.pids_dir = File.join(self.base_dir, 'pids', self.name)
self.groups = {}
self.logger = Bluepill::Logger.new(:log_file => self.log_file).prefix_with(self.name)
self.setup_signal_traps
self.setup_pids_dir
end
def load
begin
self.start_server
rescue StandardError => e
$stderr.puts "Failed to start bluepill:"
$stderr.puts "%s `%s`" % [e.class.name, e.message]
$stderr.puts e.backtrace
exit(5)
end
end
def status
buffer = []
depth = 0
if self.groups.has_key?(nil)
self.groups[nil].processes.each do |p|
buffer << "%s%s(pid:%s): %s" % [" " * depth, p.name, p.actual_pid.inspect, p.state]
if p.monitor_children?
depth += 2
p.children.each do |c|
buffer << "%s%s: %s" % [" " * depth, c.name, c.state]
end
depth -= 2
end
end
end
self.groups.each do |group_name, group|
next if group_name.nil?
buffer << "\n#{group_name}"
group.processes.each do |p|
depth += 2
buffer << "%s%s(pid:%s): %s" % [" " * depth, p.name, p.actual_pid.inspect, p.state]
if p.monitor_children?
depth += 2
p.children.each do |c|
buffer << "%s%s: %s" % [" " * depth, c.name, c.state]
end
depth -= 2
end
depth -= 2
end
end
buffer.join("\n")
end
PROCESS_COMMANDS.each do |command|
class_eval <<-END
def #{command}(group_name, process_name = nil)
puts "got #{command}"
self.send_to_process_or_group(:#{command}, group_name, process_name)
end
END
end
def add_process(process, group_name = nil)
group_name = group_name.to_s if group_name
self.groups[group_name] ||= Group.new(group_name, :logger => self.logger.prefix_with(group_name))
self.groups[group_name].add_process(process)
end
protected
def send_to_process_or_group(method, group_name, process_name)
if self.groups.key?(group_name)
self.groups[group_name].send(method, process_name)
elsif process_name.nil?
# they must be targeting just by process name
process_name = group_name
self.groups.values.collect do |group|
group.send(method, process_name)
end.flatten
else
[]
end
end
def start_listener
@listener_thread.kill if @listener_thread
@listener_thread = Thread.new do
begin
loop do
client = self.socket.accept
command, *args = client.readline.strip.split(":")
response = self.send(command, *args)
client.write(Marshal.dump(response))
client.close
end
rescue StandardError => e
logger.err("Got exception in cmd listener: %s `%s`" % [e.class.name, e.message])
e.backtrace.each {|l| logger.err(l)}
end
end
end
def start_server
self.kill_previous_bluepill
Daemonize.daemonize
self.logger.reopen
$0 = "bluepilld: #{self.name}"
self.groups.each {|_, group| group.boot }
self.write_pid_file
self.socket = Bluepill::Socket.server(self.base_dir, self.name)
self.start_listener
self.run
end
def run
@running = true # set to false by signal trap
while @running
System.reset_data
self.groups.each { |_, group| group.tick }
sleep 1
end
cleanup
end
def cleanup
File.unlink(self.socket.path) if self.socket
File.unlink(self.pid_file) if File.exists?(self.pid_file)
end
def setup_signal_traps
terminator = lambda do
puts "Terminating..."
@running = false
end
Signal.trap("TERM", &terminator)
Signal.trap("INT", &terminator)
Signal.trap("HUP") do
self.logger.reopen if self.logger
end
end
def setup_pids_dir
FileUtils.mkdir_p(self.pids_dir) unless File.exists?(self.pids_dir)
# we need everybody to be able to write to the pids_dir as processes managed by
# bluepill will be writing to this dir after they've dropped privileges
FileUtils.chmod(0777, self.pids_dir)
end
def kill_previous_bluepill
if File.exists?(self.pid_file)
previous_pid = File.read(self.pid_file).to_i
begin
::Process.kill(0, previous_pid)
puts "Killing previous bluepilld[#{previous_pid}]"
::Process.kill(2, previous_pid)
rescue Exception => e
$stderr.puts "Encountered error trying to kill previous bluepill:"
$stderr.puts "#{e.class}: #{e.message}"
exit(4) unless e.is_a?(Errno::ESRCH)
else
10.times do |i|
sleep 0.5
break unless System.pid_alive?(previous_pid)
end
if System.pid_alive?(previous_pid)
$stderr.puts "Previous bluepilld[#{previous_pid}] didn't die"
exit(4)
end
end
end
end
def write_pid_file
File.open(self.pid_file, 'w') { |x| x.write(::Process.pid) }
end
end
end |
module Byebug
#
# Show byebug settings.
#
class ShowCommand < Command
self.allow_in_control = true
def regexp
/^\s* show (?:\s+(?<setting>\w+))? \s*$/x
end
def execute
key = @match[:setting]
return puts(self.class.help) if key.nil?
full_key = Setting.find(key)
return errmsg(pr('show.errors.unknown_setting', key: key)) unless full_key
puts Setting.settings[full_key.to_sym].to_s
end
class << self
def names
%w(show)
end
def description
<<-EOD.gsub(/^ {8}/, '')
show <setting> <value>
Generic command for showing byebug settings. You can change them with
the "set" command.
EOD
end
def help(subcmds = [])
Setting.help('show', subcmds.first)
end
end
end
end
Remove unnecessary to_s
module Byebug
#
# Show byebug settings.
#
class ShowCommand < Command
self.allow_in_control = true
def regexp
/^\s* show (?:\s+(?<setting>\w+))? \s*$/x
end
def execute
key = @match[:setting]
return puts(self.class.help) if key.nil?
full_key = Setting.find(key)
return errmsg(pr('show.errors.unknown_setting', key: key)) unless full_key
puts Setting.settings[full_key.to_sym]
end
class << self
def names
%w(show)
end
def description
<<-EOD.gsub(/^ {8}/, '')
show <setting> <value>
Generic command for showing byebug settings. You can change them with
the "set" command.
EOD
end
def help(subcmds = [])
Setting.help('show', subcmds.first)
end
end
end
end
|
module CabooseRets
VERSION = '0.1.177'
end
Updated version file
module CabooseRets
VERSION = '0.1.178'
end |
module CabooseRets
VERSION = '0.0.61'
end
Updated version file
module CabooseRets
VERSION = '0.0.62'
end
|
module CabooseRets
VERSION = '0.0.28'
end
Updated version file
module CabooseRets
VERSION = '0.0.29'
end
|
module Canvas
class CoffeeScript
def self.coffee_script_binary_is_available?
return @is_available if instance_variable_defined?(:@is_available)
coffee_is_installed = `which coffee` && $?.success?
if coffee_is_installed
coffee_version = `coffee -v`.strip
coffee_is_correct_version = coffee_version.match(::CoffeeScript.version)
unless coffee_is_correct_version
puts "--> WARNING #{coffee_version} != pinned coffee-script-source: #{::CoffeeScript.version}"
end
end
@is_available = coffee_is_installed && coffee_is_correct_version
end
end
end
env var to error on incorrect coffee executable version
we're going to use this in the deploy script
Change-Id: Idd641b009fd3d421e1a56385339f07316a451137
module Canvas
class CoffeeScript
def self.coffee_script_binary_is_available?
return @is_available if instance_variable_defined?(:@is_available)
coffee_is_installed = `which coffee` && $?.success?
if coffee_is_installed
coffee_version = `coffee -v`.strip
coffee_is_correct_version = coffee_version.match(::CoffeeScript.version)
unless coffee_is_correct_version
if ENV['REQUIRE_COFFEE_VERSION_MATCH'] == '1'
raise "coffeescript version #{coffee_version} != pinned coffee-script-source: #{::CoffeeScript.version}"
else
puts "--> WARNING #{coffee_version} != pinned coffee-script-source: #{::CoffeeScript.version}"
end
end
end
@is_available = coffee_is_installed && coffee_is_correct_version
end
end
end
|
Capistrano::Configuration.instance.load do
namespace :mysqldump do
task :default, :roles => :db do
set :mysqldump_config, YAML.load_file("config/database.yml")[rails_env.to_s]
host = mysqldump_config["host"]
# overwrite these if necessary
set :mysqldump_bin, "/usr/local/mysql/bin/mysqldump" unless exists?(:mysqldump_bin)
set :mysqldump_remote_tmp_dir, "/tmp" unless exists?(:mysqldump_remote_tmp_dir)
set :mysqldump_local_tmp_dir, "/tmp" unless exists?(:mysqldump_local_tmp_dir)
set :mysqldump_location, host && host.any? && host != "localhost" ? :local : :remote unless exists?(:mysqldump_location)
# for convenience
set :mysqldump_filename, "%s-%s.sql" % [application, Time.now.to_i]
set :mysqldump_filename_gz, "%s.gz" % mysqldump_filename
set :mysqldump_remote_filename, File.join( mysqldump_remote_tmp_dir, mysqldump_filename_gz )
set :mysqldump_local_filename, File.join( mysqldump_local_tmp_dir, mysqldump_filename )
set :mysqldump_local_filename_gz, File.join( mysqldump_local_tmp_dir, mysqldump_filename_gz )
dump
import
end
task :dump, :roles => :db do
username, password, database, host = mysqldump_config.values_at *%w( username password database host )
case mysqldump_location
when :remote
mysqldump_cmd = "%s -u %s -p %s" % [ mysqldump_bin, username, database ]
mysqldump_cmd += " -h #{host}" if host && host.any?
mysqldump_cmd += " | gzip > %s" % mysqldump_remote_filename
run mysqldump_cmd do |ch, stream, out|
ch.send_data "#{password}\n" if out =~ /^Enter password:/
end
download mysqldump_remote_filename, mysqldump_local_filename_gz, :via => :scp
`gunzip #{mysqldump_local_filename_gz}`
when :local
mysqldump_cmd = "%s -u %s" % [ mysqldump_bin, username ]
mysqldump_cmd += " -p#{password}" if password && password.any?
mysqldump_cmd += " -h #{host}" if host && host.any?
mysqldump_cmd += " %s > %s" % [ database, mysqldump_local_filename]
`#{mysqldump_cmd}`
end
end
task :import do
config = YAML.load_file("config/database.yml")["development"]
username, password, database = config.values_at *%w( username password database )
mysql_cmd = "mysql -u#{username}"
mysql_cmd += " -p#{password}" if password && password.any?
`#{mysql_cmd} -e "drop database #{database}; create database #{database}"`
`#{mysql_cmd} #{database} < #{mysqldump_local_filename}`
`rm #{mysqldump_local_filename}`
end
end
end
clean up remote gz file
Capistrano::Configuration.instance.load do
namespace :mysqldump do
task :default, :roles => :db do
set :mysqldump_config, YAML.load_file("config/database.yml")[rails_env.to_s]
host = mysqldump_config["host"]
# overwrite these if necessary
set :mysqldump_bin, "/usr/local/mysql/bin/mysqldump" unless exists?(:mysqldump_bin)
set :mysqldump_remote_tmp_dir, "/tmp" unless exists?(:mysqldump_remote_tmp_dir)
set :mysqldump_local_tmp_dir, "/tmp" unless exists?(:mysqldump_local_tmp_dir)
set :mysqldump_location, host && host.any? && host != "localhost" ? :local : :remote unless exists?(:mysqldump_location)
# for convenience
set :mysqldump_filename, "%s-%s.sql" % [application, Time.now.to_i]
set :mysqldump_filename_gz, "%s.gz" % mysqldump_filename
set :mysqldump_remote_filename, File.join( mysqldump_remote_tmp_dir, mysqldump_filename_gz )
set :mysqldump_local_filename, File.join( mysqldump_local_tmp_dir, mysqldump_filename )
set :mysqldump_local_filename_gz, File.join( mysqldump_local_tmp_dir, mysqldump_filename_gz )
dump
import
end
task :dump, :roles => :db do
username, password, database, host = mysqldump_config.values_at *%w( username password database host )
case mysqldump_location
when :remote
mysqldump_cmd = "%s -u %s -p %s" % [ mysqldump_bin, username, database ]
mysqldump_cmd += " -h #{host}" if host && host.any?
mysqldump_cmd += " | gzip > %s" % mysqldump_remote_filename
run mysqldump_cmd do |ch, stream, out|
ch.send_data "#{password}\n" if out =~ /^Enter password:/
end
download mysqldump_remote_filename, mysqldump_local_filename_gz, :via => :scp
run "rm #{mysqldump_remote_filename}"
`gunzip #{mysqldump_local_filename_gz}`
when :local
mysqldump_cmd = "%s -u %s" % [ mysqldump_bin, username ]
mysqldump_cmd += " -p#{password}" if password && password.any?
mysqldump_cmd += " -h #{host}" if host && host.any?
mysqldump_cmd += " %s > %s" % [ database, mysqldump_local_filename]
`#{mysqldump_cmd}`
end
end
task :import do
config = YAML.load_file("config/database.yml")["development"]
username, password, database = config.values_at *%w( username password database )
mysql_cmd = "mysql -u#{username}"
mysql_cmd += " -p#{password}" if password && password.any?
`#{mysql_cmd} -e "drop database #{database}; create database #{database}"`
`#{mysql_cmd} #{database} < #{mysqldump_local_filename}`
`rm #{mysqldump_local_filename}`
end
end
end |
require File.expand_path(File.dirname(__FILE__) + '/util')
SITE_DIR = "#{WORKSPACE_DIR}/reports/site"
desc 'Copy the javadocs to docs dir'
task 'site:javadocs' do
javadocs_dir = "#{WORKSPACE_DIR}/target/arez/doc"
file(javadocs_dir).invoke
mkdir_p SITE_DIR
cp_r javadocs_dir, "#{SITE_DIR}/api"
end
desc 'Copy the favicons to docs dir'
task 'site:favicons' do
favicons_dir = "#{WORKSPACE_DIR}/assets/favicons"
mkdir_p SITE_DIR
cp_r Dir["#{favicons_dir}/*.png"], SITE_DIR
cp_r Dir["#{favicons_dir}/*.json"], SITE_DIR
cp_r Dir["#{favicons_dir}/*.xml"], SITE_DIR
cp_r Dir["#{favicons_dir}/*.ico"], SITE_DIR
end
desc 'Build the website'
task 'site:build' do
rm_rf SITE_DIR
sh "yarn build #{SITE_DIR}"
mkdir_p File.dirname(SITE_DIR)
mv "#{WORKSPACE_DIR}/website/build/arez", SITE_DIR
task('site:javadocs').invoke
task('site:favicons').invoke
end
desc 'Check that the website does not have any broken links'
task 'site:link_check' do
require 'webrick'
require 'socket'
# Copy the root and replace any absolute paths to target url with relative paths
# This is required as docusaurus forces qualified paths for some elements (i.e. atom/rss feeds)
root = "#{WORKSPACE_DIR}/target/site-link-check"
rm_rf root
mkdir_p File.dirname(root)
cp_r SITE_DIR, root
Dir["#{root}/**/*.html"].each do |filename|
content = IO.read(filename)
content = content.gsub('https://arez.github.io', '')
IO.write(filename, content)
end
# Get a free port and web address
socket = Socket.new(:INET, :STREAM, 0)
socket.bind(Addrinfo.tcp('127.0.0.1', 0))
address = socket.local_address.ip_address
port = socket.local_address.ip_port
socket.close
webserver = WEBrick::HTTPServer.new(:Port => port, :DocumentRoot => root)
Thread.new {webserver.start}
trap('INT') {webserver.shutdown}
begin
sh "yarn blc --ordered --recursive --filter-level 3 http://#{address}:#{port} --exclude https://github.com/arez/arez/compare/ --exclude https://github.com/arez/arez.github.io/settings --exclude https://docs.oracle.com/javase/8/docs/api"
ensure
webserver.shutdown
end
end
desc 'Serve the website for developing documentation'
task 'site:serve' do
sh 'yarn start'
end
desc 'Build the website'
task 'site:deploy' => ['site:build'] do
# Verify the site is valid first
task('site:link_check').invoke
# Only publish the site off the master branch if running out of Travis
if ENV['TRAVIS_BRANCH'].nil? || ENV['TRAVIS_BRANCH'] == 'master'
origin_url = 'https://github.com/arez/arez.github.io.git'
travis_build_number = ENV['TRAVIS_BUILD_NUMBER']
if travis_build_number
origin_url = origin_url.gsub('https://github.com/', 'git@github.com:')
end
local_dir = "#{WORKSPACE_DIR}/target/remote_site"
rm_rf local_dir
sh "git clone -b master --depth 1 #{origin_url} #{local_dir}"
# This is the list of directories controlled by other processes that should be left alone
excludes = %w()
in_dir(local_dir) do
message = "Publish website#{travis_build_number.nil? ? '' : " - Travis build: #{travis_build_number}"}"
rm_rf Dir["#{local_dir}/*"].select {|f| !excludes.include?(File.basename(f))}
cp_r Dir["#{SITE_DIR}/*"], local_dir
sh 'git add . -f'
unless `git status -s`.strip.empty?
sh "git commit -m \"#{message}\""
sh 'git push -f origin master'
end
end
end
end
Exclude idlestatus sub-directory that is uploaded through other mechanisms
require File.expand_path(File.dirname(__FILE__) + '/util')
SITE_DIR = "#{WORKSPACE_DIR}/reports/site"
desc 'Copy the javadocs to docs dir'
task 'site:javadocs' do
javadocs_dir = "#{WORKSPACE_DIR}/target/arez/doc"
file(javadocs_dir).invoke
mkdir_p SITE_DIR
cp_r javadocs_dir, "#{SITE_DIR}/api"
end
desc 'Copy the favicons to docs dir'
task 'site:favicons' do
favicons_dir = "#{WORKSPACE_DIR}/assets/favicons"
mkdir_p SITE_DIR
cp_r Dir["#{favicons_dir}/*.png"], SITE_DIR
cp_r Dir["#{favicons_dir}/*.json"], SITE_DIR
cp_r Dir["#{favicons_dir}/*.xml"], SITE_DIR
cp_r Dir["#{favicons_dir}/*.ico"], SITE_DIR
end
desc 'Build the website'
task 'site:build' do
rm_rf SITE_DIR
sh "yarn build #{SITE_DIR}"
mkdir_p File.dirname(SITE_DIR)
mv "#{WORKSPACE_DIR}/website/build/arez", SITE_DIR
task('site:javadocs').invoke
task('site:favicons').invoke
end
desc 'Check that the website does not have any broken links'
task 'site:link_check' do
require 'webrick'
require 'socket'
# Copy the root and replace any absolute paths to target url with relative paths
# This is required as docusaurus forces qualified paths for some elements (i.e. atom/rss feeds)
root = "#{WORKSPACE_DIR}/target/site-link-check"
rm_rf root
mkdir_p File.dirname(root)
cp_r SITE_DIR, root
Dir["#{root}/**/*.html"].each do |filename|
content = IO.read(filename)
content = content.gsub('https://arez.github.io', '')
IO.write(filename, content)
end
# Get a free port and web address
socket = Socket.new(:INET, :STREAM, 0)
socket.bind(Addrinfo.tcp('127.0.0.1', 0))
address = socket.local_address.ip_address
port = socket.local_address.ip_port
socket.close
webserver = WEBrick::HTTPServer.new(:Port => port, :DocumentRoot => root)
Thread.new {webserver.start}
trap('INT') {webserver.shutdown}
begin
sh "yarn blc --ordered --recursive --filter-level 3 http://#{address}:#{port} --exclude https://github.com/arez/arez/compare/ --exclude https://github.com/arez/arez.github.io/settings --exclude https://docs.oracle.com/javase/8/docs/api"
ensure
webserver.shutdown
end
end
desc 'Serve the website for developing documentation'
task 'site:serve' do
sh 'yarn start'
end
desc 'Build the website'
task 'site:deploy' => ['site:build'] do
# Verify the site is valid first
task('site:link_check').invoke
# Only publish the site off the master branch if running out of Travis
if ENV['TRAVIS_BRANCH'].nil? || ENV['TRAVIS_BRANCH'] == 'master'
origin_url = 'https://github.com/arez/arez.github.io.git'
travis_build_number = ENV['TRAVIS_BUILD_NUMBER']
if travis_build_number
origin_url = origin_url.gsub('https://github.com/', 'git@github.com:')
end
local_dir = "#{WORKSPACE_DIR}/target/remote_site"
rm_rf local_dir
sh "git clone -b master --depth 1 #{origin_url} #{local_dir}"
# This is the list of directories controlled by other processes that should be left alone
excludes = %w(idlestatus)
in_dir(local_dir) do
message = "Publish website#{travis_build_number.nil? ? '' : " - Travis build: #{travis_build_number}"}"
rm_rf Dir["#{local_dir}/*"].select {|f| !excludes.include?(File.basename(f))}
cp_r Dir["#{SITE_DIR}/*"], local_dir
sh 'git add . -f'
unless `git status -s`.strip.empty?
sh "git commit -m \"#{message}\""
sh 'git push -f origin master'
end
end
end
end
|
spec_defaults = lambda do |spec|
spec.pattern = 'spec/**/*_spec.rb'
spec.libs << 'lib' << 'spec'
spec.spec_opts << '--options' << 'spec/spec.opts'
end
begin
require 'spec/rake/spectask'
Spec::Rake::SpecTask.new(:spec, &spec_defaults)
rescue LoadError
task :spec do
abort 'rspec is not available. In order to run spec, you must: gem install rspec'
end
end
begin
require 'rcov'
require 'spec/rake/verify_rcov'
Spec::Rake::SpecTask.new(:rcov) do |rcov|
spec_defaults.call(rcov)
rcov.rcov = true
rcov.rcov_opts = File.read('spec/rcov.opts').split(/\s+/)
end
RCov::VerifyTask.new(:verify_rcov => :rcov) do |rcov|
rcov.threshold = 100
end
rescue LoadError
%w[ rcov verify_rcov ].each do |name|
task name do
abort "rcov is not available. In order to run #{name}, you must: gem install rcov"
end
end
end
task :default => :spec
Force check_dependencies to run before spec and rcov tasks
spec_defaults = lambda do |spec|
spec.pattern = 'spec/**/*_spec.rb'
spec.libs << 'lib' << 'spec'
spec.spec_opts << '--options' << 'spec/spec.opts'
end
begin
require 'spec/rake/spectask'
Spec::Rake::SpecTask.new(:spec, &spec_defaults)
rescue LoadError
task :spec do
abort 'rspec is not available. In order to run spec, you must: gem install rspec'
end
end
begin
require 'rcov'
require 'spec/rake/verify_rcov'
Spec::Rake::SpecTask.new(:rcov) do |rcov|
spec_defaults.call(rcov)
rcov.rcov = true
rcov.rcov_opts = File.read('spec/rcov.opts').split(/\s+/)
end
RCov::VerifyTask.new(:verify_rcov => :rcov) do |rcov|
rcov.threshold = 100
end
rescue LoadError
%w[ rcov verify_rcov ].each do |name|
task name do
abort "rcov is not available. In order to run #{name}, you must: gem install rcov"
end
end
end
task :spec => :check_dependencies
task :rcov => :check_dependencies
task :default => :spec
|
module Celluloid
# Base class of all Celluloid errors
Error = Class.new(StandardError)
# Don't do Actor-like things outside Actor scope
NotActorError = Class.new(Celluloid::Error)
# Trying to do something to a dead actor
DeadActorError = Class.new(Celluloid::Error)
# A timeout occured before the given request could complete
TimeoutError = Class.new(Celluloid::Error)
RefactorError = Class.new(Celluloid::Error)
# The sender made an error, not the current actor
class AbortError < Celluloid::Error
attr_reader :cause
def initialize(cause)
@cause = cause
super "caused by #{cause.inspect}: #{cause.to_s}"
end
end
end
remove unused RefactorError class
module Celluloid
# Base class of all Celluloid errors
Error = Class.new(StandardError)
# Don't do Actor-like things outside Actor scope
NotActorError = Class.new(Celluloid::Error)
# Trying to do something to a dead actor
DeadActorError = Class.new(Celluloid::Error)
# A timeout occured before the given request could complete
TimeoutError = Class.new(Celluloid::Error)
# The sender made an error, not the current actor
class AbortError < Celluloid::Error
attr_reader :cause
def initialize(cause)
@cause = cause
super "caused by #{cause.inspect}: #{cause.to_s}"
end
end
end
|
module Celluloid
# Supervisors are actors that watch over other actors and restart them if
# they crash
class Supervisor
include Celluloid
trap_exit :restart_actor
# Retrieve the actor this supervisor is supervising
attr_reader :actor
def self.supervise(klass, *args, &block)
new(nil, klass, *args, &block)
end
def self.supervise_as(name, klass, *args, &block)
new(name, klass, *args, &block)
end
def initialize(name, klass, *args, &block)
@name, @klass, @args, @block = name, klass, args, block
start_actor
end
def start_actor(start_attempts = 2, sleep_interval = 30)
failures = 0
begin
@actor = @klass.new_link(*@args, &@block)
rescue => ex
failures += 1
if failures >= start_attempts
failures = 0
Celluloid::Logger.warn("#{@klass} is crashing on initialize too quickly, sleeping for #{sleep_interval} seconds")
sleep sleep_interval
end
retry
end
Celluloid::Actor[@name] = @actor if @name
end
# When actors die, regardless of the reason, restart them
def restart_actor(actor, reason)
start_actor
end
def inspect
str = "#<#{self.class}(#{@klass}):0x#{object_id.to_s(16)}"
str << " " << @args.map { |arg| arg.inspect }.join(' ') unless @args.empty?
str << ">"
end
end
end
Supervisors shouldn't handle exit messages until actors start successfully
module Celluloid
# Supervisors are actors that watch over other actors and restart them if
# they crash
class Supervisor
include Celluloid
trap_exit :restart_actor
# Retrieve the actor this supervisor is supervising
attr_reader :actor
def self.supervise(klass, *args, &block)
new(nil, klass, *args, &block)
end
def self.supervise_as(name, klass, *args, &block)
new(name, klass, *args, &block)
end
def initialize(name, klass, *args, &block)
@name, @klass, @args, @block = name, klass, args, block
@started = false
start_actor
end
def start_actor(start_attempts = 2, sleep_interval = 30)
failures = 0
begin
@actor = @klass.new_link(*@args, &@block)
rescue => ex
failures += 1
if failures >= start_attempts
failures = 0
Celluloid::Logger.warn("#{@klass} is crashing on initialize too quickly, sleeping for #{sleep_interval} seconds")
sleep sleep_interval
end
retry
end
@started = true
Celluloid::Actor[@name] = @actor if @name
end
# When actors die, regardless of the reason, restart them
def restart_actor(actor, reason)
start_actor if @started
end
def inspect
str = "#<#{self.class}(#{@klass}):0x#{object_id.to_s(16)}"
str << " " << @args.map { |arg| arg.inspect }.join(' ') unless @args.empty?
str << ">"
end
end
end
|
module Celluloid
module ZMQ
class Router
include Celluloid::ZMQ
attr_accessor :identity, :peers, :endpoints
def initialize(identity=nil, endpoints=[], peer_endpoints=[])
@identity = identity
@endpoints = []
Array(endpoints).each do |endpoint|
add_endpoint(endpoint)
end
@peers = []
Array(peer_endpoints).each do |peer_endpoint|
add_peer(peer_endpoint)
end
end
def init_router_socket
@socket.close if @socket
@socket = RouterSocket.new
SocketMonitor.new_link(@socket, "zmq.socket.#{Celluloid::UUID.generate}")
end
def add_endpoint(endpoint)
unless @endpoints.include?(endpoint)
init_router_socket if @socket.nil?
async.listen if @endpoints.empty? && @peers.empty?
begin
@endpoints << endpoint
@socket.identity = @identity if @identity
@socket.bind(endpoint)
rescue IOError => e
@socket.close
raise e
end
end
end
def remove_endpoint(endpoint)
if @endpoints.include?(endpoint)
begin
@endpoints.delete(endpoint)
@socket.unbind(endpoint)
rescue IOError => e
@socket.close
raise e
end
end
end
def clear_endpoints
@endpoints.dup.each { |endpoint| remove_endpoint(endpoint) }
end
def add_peer(peer)
unless @peers.include?(peer)
init_router_socket if @socket.nil?
async.listen if @peers.empty? && @peers.empty?
begin
@peers << peer
@socket.connect(peer)
rescue IOError => e
@socket.close
raise e
end
end
end
def remove_peer(peer)
if @peers.include?(peer)
begin
@peers.delete(peer)
@socket.disconnect(peer)
rescue IOError => e
@socket.close
raise e
end
end
end
def clear_peers
@peers.dup.each { |peer| remove_peer(peer) }
end
def finalize
@socket.close if @socket
end
def write(identity, *parts)
@socket.write(identity, "", *parts)
end
def listen
loop do
parts = @socket.read_multipart
identity = parts.shift
parts.shift
dispatch(identity, parts)
end
end
# override this
def dispatch(identity, parts)
Logger.debug("received message from #{identity}: #{parts.inspect}")
end
end
end
end
terminate socket monitors if their sockets close
module Celluloid
module ZMQ
class Router
include Celluloid::ZMQ
attr_accessor :identity, :peers, :endpoints
def initialize(identity=nil, endpoints=[], peer_endpoints=[])
@identity = identity
@endpoints = []
Array(endpoints).each do |endpoint|
add_endpoint(endpoint)
end
@peers = []
Array(peer_endpoints).each do |peer_endpoint|
add_peer(peer_endpoint)
end
end
def init_router_socket
@socket.close if @socket
@monitor.terminate if @monitor
@socket = RouterSocket.new
@monitor = SocketMonitor.new_link(@socket, "zmq.socket.#{Celluloid::UUID.generate}")
@socket
end
def add_endpoint(endpoint)
unless @endpoints.include?(endpoint)
init_router_socket if @socket.nil?
async.listen if @endpoints.empty? && @peers.empty?
begin
@endpoints << endpoint
@socket.identity = @identity if @identity
@socket.bind(endpoint)
rescue IOError => e
@socket.close
raise e
end
end
end
def remove_endpoint(endpoint)
if @endpoints.include?(endpoint)
begin
@endpoints.delete(endpoint)
@socket.unbind(endpoint)
rescue IOError => e
@socket.close
raise e
end
end
end
def clear_endpoints
@endpoints.dup.each { |endpoint| remove_endpoint(endpoint) }
end
def add_peer(peer)
unless @peers.include?(peer)
init_router_socket if @socket.nil?
async.listen if @peers.empty? && @peers.empty?
begin
@peers << peer
@socket.connect(peer)
rescue IOError => e
@socket.close
raise e
end
end
end
def remove_peer(peer)
if @peers.include?(peer)
begin
@peers.delete(peer)
@socket.disconnect(peer)
rescue IOError => e
@socket.close
raise e
end
end
end
def clear_peers
@peers.dup.each { |peer| remove_peer(peer) }
end
def finalize
@socket.close if @socket
end
def write(identity, *parts)
@socket.write(identity, "", *parts)
end
def listen
loop do
parts = @socket.read_multipart
identity = parts.shift
parts.shift
dispatch(identity, parts)
end
end
# override this
def dispatch(identity, parts)
Logger.debug("received message from #{identity}: #{parts.inspect}")
end
end
end
end
|
require 'byebug'
require 'pathname'
require 'json'
require 'fileutils'
require 'active_support'
# require 'git'
# require 'github_api'
# require 'highline/import'
# require 'erb'
require 'jeweler'
class Jeweler::Generator
def create_git_and_github_repo
begin
create_version_control
create_and_push_repo
rescue
puts 'Error create repo en Gitgub'
end
end
end
class String
def to_bool
return true if self =~ (/(true|t|yes|y|1)$/i)
return false if self =~ (/(false|f|no|n|0)$/i)
return false
rescue
return false
end
end
module CenitCmd
class Collection < Thor::Group
include Thor::Actions
desc "builds a cenit_hub shared collection"
argument :file_name, type: :string, desc: 'collection path', default: '.'
argument :collection_name, type: :string, desc: 'collection name', default: '.'
source_root File.expand_path('../templates/collection', __FILE__)
class_option :user_name
class_option :user_email
class_option :github_username
class_option :summary
class_option :description
class_option :homepage
class_option :source
class_option :git_remote
class_option :create
@generated = false
def generate
@collection_name = @file_name
use_prefix 'cenit-collection-'
@user_name = options[:user_name] || git_config['user.name']
@user_email = options[:user_email] || git_config['user.email']
@github_username = options[:github_username] || git_config['github.user']
@summary = options[:summary] || "Shared Collection #{@file_name} to be use in Cenit"
@description = options[:description] || @summary
@homepage = options[:homepage] || "https://github.com/#{@github_username}/#{@file_name}"
@source = options[:source]
@git_remote = options[:git_remote] || "https://github.com/#{@github_username}/#{@file_name}.git"
@create = options[:create].to_bool
return unless validate_argument
empty_directory file_name
directory 'lib', "#{file_name}/lib"
empty_directory "#{file_name}/lib/cenit/collection/#{collection_name}/connections"
empty_directory "#{file_name}/lib/cenit/collection/#{collection_name}/webhooks"
empty_directory "#{file_name}/lib/cenit/collection/#{collection_name}/connection_roles"
empty_directory "#{file_name}/lib/cenit/collection/#{collection_name}/events"
empty_directory "#{file_name}/lib/cenit/collection/#{collection_name}/flows"
empty_directory "#{file_name}/lib/cenit/collection/#{collection_name}/translators"
empty_directory "#{file_name}/spec/support"
empty_directory "#{file_name}/spec/support/sample"
template 'collection.gemspec', "#{file_name}/#{file_name}.gemspec"
template 'Gemfile', "#{file_name}/Gemfile"
template 'gitignore', "#{file_name}/.gitignore"
template 'LICENSE', "#{file_name}/LICENSE"
template 'Rakefile', "#{file_name}/Rakefile"
template 'README.md', "#{file_name}/README.md"
template 'rspec', "#{file_name}/.rspec"
template 'spec/spec_helper.rb.tt', "#{file_name}/spec/spec_helper.rb"
@load_data = false
import_from_file if @source
create_repo if @create
# puts "cd #{file_name}"
# Dir.chdir("#{file_name}")
# puts "bundle exec rake create_repo"
# `bundle exec rake create_repo`
# puts "bundle exec rake version:write"
# `bundle exec rake version:write`
# puts "bundle exec rake git:release"
# `bundle exec rake git:release`
@generated = true
end
def final_banner
return unless @generated
say %Q{
#{'*' * 80}
Consider the next steps:
Move to the new collection folder.
$ cd #{file_name}
Create a new git and related GitHub's repository
$ rake create_repo
Commit and push until you are happy with your changes
...
Generate a version
$ rake version:write
Tag and push release to git
$ rake git:release
Shared your collection in https://rubygems.org
$ rake release
Visit README.md for more details.
#{'*' * 80}
}
end
no_tasks do
def class_name
Thor::Util.camel_case @collection_name
end
def use_prefix(prefix)
unless file_name =~ /^#{prefix}/
@file_name = prefix + Thor::Util.snake_case(file_name)
end
end
# Expose git config here, so we can stub it out for test environments
def git_config
@git_config ||= Pathname.new("~/.gitconfig").expand_path.exist? ? Git.global_config : {}
end
def validate_argument
if @user_name.nil?
$stderr.puts %Q{No user.name found in ~/.gitconfig. Please tell git about yourself (see http://help.github.com/git-email-settings/ for details). For example: git config --global user.name "mad voo"}
return false
elsif @user_email.nil?
$stderr.puts %Q{No user.email found in ~/.gitconfig. Please tell git about yourself (see http://help.github.com/git-email-settings/ for details). For example: git config --global user.email mad.vooo@gmail.com}
return false
elsif @github_username.nil?
$stderr.puts %Q{Please specify --github-username or set github.user in ~/.gitconfig (see http://github.com/blog/180-local-github-config for details). For example: git config --global github.user defunkt}
return false
end
true
end
def import_from_file
begin
unless @source.nil?
data = open_source
import_data(data) if data != {}
@load_data = true
end
rescue
@load_data = false
end
end
def import_data(data)
shared_data = JSON.parse(data)
hash_data = shared_data['data']
hash_model = []
models = ["flows","connection_roles","translators","events","connections","webhooks"]
models.collect do |model|
if hash_model = hash_data[model].to_a
hash_model.collect do |hash|
if file = filename_scape(hash['name'])
File.open(@file_name + '/lib/cenit/collection/' + @collection_name + '/' + model + '/' + file + '.json', mode: "w:utf-8") do |f|
f.write(JSON.pretty_generate(hash))
end
end
end
end
end
libraries = hash_data['libraries']
library_index = []
libraries.collect do |library|
if library_name = library['name']
library_file = filename_scape (library_name)
FileUtils.mkpath(@file_name + '/lib/cenit/collection/' + @collection_name + '/libraries/' + library_file) unless File.directory?(@file_name + '/lib/cenit/collection/' + @collection_name + '/libraries/' + library_file)
library['schemas'].collect do |schema|
if schema_file = schema['uri']
File.open(@file_name + '/lib/cenit/collection/' + @collection_name + '/' + '/libraries/' + library_file + '/' + schema_file, mode: "w:utf-8") do |f|
f.write(JSON.pretty_generate(JSON.parse(schema['schema'])))
end
end
end
library_index << {'name' => library_name, 'file' => library_file}
end
end
File.open(@file_name + '/lib/cenit/collection/' + @collection_name + '/libraries/index.json', mode: "w:utf-8") do |f|
f.write(JSON.pretty_generate(library_index))
end
File.open(@file_name + '/lib/cenit/collection/' + @collection_name + '/index.json', mode: "w:utf-8") do |f|
f.write(JSON.pretty_generate(shared_data.except('data')))
end
end
def open_source
File.open(@source, mode: "r:utf-8").read
rescue {}
end
def filename_scape(name)
name.gsub(/[^\w\s_-]+/, '')
.gsub(/(^|\b\s)\s+($|\s?\b)/, '\\1\\2')
.gsub(/\s+/, '_')
.downcase
end
def create_repo
begin
options = {
project_name: @file_name,
target_dir: @file_name,
user_name: @user_name,
user_email: @user_email,
github_username: @github_username,
summary: @summary,
description: @description,
homepage: @homepage,
testing_framework: :rspec,
documentation_framework: :rdoc
}
g = Jeweler::Generator.new(options)
g.create_git_and_github_repo
rescue
puts "Not create repo into Github"
end
end
# def create_repo
# options = {
# project_name: @file_name,
# target_dir: @file_name,
# user_name: @user_name,
# user_email: @user_email,
# github_username: @github_username,
# summary: @summary,
# description: @description,
# homepage: @homepage,
# testing_framework: :rspec,
# documentation_framework: :rdoc,
# git_remote: @git_remote
#
# }
# create_version_control(options)
# create_and_push_repo(options)
# end
# def create_version_control (options)
# Dir.chdir(options[:target_dir]) do
# begin
# @repo = Git.init()
# rescue Git::GitExecuteError => e
# raise GitInitFailed, "Encountered an error during gitification. Maybe the repo already exists, or has already been pushed to?"
# end
#
# begin
# @repo.add('.')
# rescue Git::GitExecuteError => e
# raise
# end
#
# begin
# @repo.commit "Initial commit to #{options[:project_name]}."
# rescue Git::GitExecuteError => e
# raise
# end
#
# begin
# @repo.add_remote('origin', options[:git_remote])
# rescue Git::GitExecuteError => e
# puts "Encountered an error while adding origin remote. Maybe you have some weird settings in ~/.gitconfig?"
# raise
# end
# end
# end
# def create_and_push_repo (options)
# puts "Please provide your Github user and password to create the Github repository"
# begin
# puts options[:github_username]
# password = ask("Password: ") { |q| q.echo = false }
# login = options[:github_username]
# github = Github.new(:login => login.strip, :password => password.strip)
# github.repos.create(:name => options[:pronject_name], :description => options[:summary], :testing_framework => :rspec, :documentation_framework => :rdoc)
# rescue Github::Error::Unauthorized
# puts "Wrong login/password! Please try again"
# retry
# rescue Github::Error::UnprocessableEntity
# raise GitRepoCreationFailed, "Can't create that repo. Does it already exist?"
# end
# @repo.push('origin')
# end
end
end
end
added bim minor version
require 'byebug'
require 'pathname'
require 'json'
require 'fileutils'
require 'active_support'
require 'jeweler'
class Jeweler::Generator
def create_git_and_github_repo
begin
create_version_control
create_and_push_repo
rescue
puts 'Error create repo en Gitgub'
end
end
end
class String
def to_bool
self =~ (/(true|t|yes|y|1)$/i) rescue false
end
end
module CenitCmd
class Collection < Thor::Group
include Thor::Actions
desc "builds a cenit_hub shared collection"
argument :file_name, type: :string, desc: 'collection path', default: '.'
argument :collection_name, type: :string, desc: 'collection name', default: '.'
source_root File.expand_path('../templates/collection', __FILE__)
class_option :user_name
class_option :user_email
class_option :github_username
class_option :summary
class_option :description
class_option :homepage
class_option :source
class_option :git_remote
class_option :create
@generated = false
def generate
@collection_name = @file_name
use_prefix 'cenit-collection-'
@user_name = options[:user_name] || git_config['user.name']
@user_email = options[:user_email] || git_config['user.email']
@github_username = options[:github_username] || git_config['github.user']
@summary = options[:summary] || "Shared Collection #{@file_name} to be use in Cenit"
@description = options[:description] || @summary
@homepage = options[:homepage] || "https://github.com/#{@github_username}/#{@file_name}"
@source = options[:source]
@git_remote = options[:git_remote] || "https://github.com/#{@github_username}/#{@file_name}.git"
@create = options[:create].to_bool
return unless validate_argument
empty_directory file_name
directory 'lib', "#{file_name}/lib"
empty_directory "#{file_name}/lib/cenit/collection/#{collection_name}/connections"
empty_directory "#{file_name}/lib/cenit/collection/#{collection_name}/webhooks"
empty_directory "#{file_name}/lib/cenit/collection/#{collection_name}/connection_roles"
empty_directory "#{file_name}/lib/cenit/collection/#{collection_name}/events"
empty_directory "#{file_name}/lib/cenit/collection/#{collection_name}/flows"
empty_directory "#{file_name}/lib/cenit/collection/#{collection_name}/translators"
empty_directory "#{file_name}/spec/support"
empty_directory "#{file_name}/spec/support/sample"
template 'collection.gemspec', "#{file_name}/#{file_name}.gemspec"
template 'Gemfile', "#{file_name}/Gemfile"
template 'gitignore', "#{file_name}/.gitignore"
template 'LICENSE', "#{file_name}/LICENSE"
template 'Rakefile', "#{file_name}/Rakefile"
template 'README.md', "#{file_name}/README.md"
template 'rspec', "#{file_name}/.rspec"
template 'spec/spec_helper.rb.tt', "#{file_name}/spec/spec_helper.rb"
@load_data = false
import_from_file if @source
create_repo if @create
@generated = true
end
def final_banner
return unless @generated
say %Q{
#{'*' * 80}
Consider the next steps:
Move to the new collection folder.
$ cd #{file_name}
Create a new git and related GitHub's repository
$ rake create_repo
Commit and push until you are happy with your changes
...
Generate a version
$ rake version:write
Tag and push release to git
$ rake git:release
Shared your collection in https://rubygems.org
$ rake release
Visit README.md for more details.
#{'*' * 80}
}
end
no_tasks do
def class_name
Thor::Util.camel_case @collection_name
end
def use_prefix(prefix)
unless file_name =~ /^#{prefix}/
@file_name = prefix + Thor::Util.snake_case(file_name)
end
end
# Expose git config here, so we can stub it out for test environments
def git_config
@git_config ||= Pathname.new("~/.gitconfig").expand_path.exist? ? Git.global_config : {}
end
def validate_argument
if @user_name.nil?
$stderr.puts %Q{No user.name found in ~/.gitconfig. Please tell git about yourself (see http://help.github.com/git-email-settings/ for details). For example: git config --global user.name "mad voo"}
return false
elsif @user_email.nil?
$stderr.puts %Q{No user.email found in ~/.gitconfig. Please tell git about yourself (see http://help.github.com/git-email-settings/ for details). For example: git config --global user.email mad.vooo@gmail.com}
return false
elsif @github_username.nil?
$stderr.puts %Q{Please specify --github-username or set github.user in ~/.gitconfig (see http://github.com/blog/180-local-github-config for details). For example: git config --global github.user defunkt}
return false
end
true
end
def import_from_file
begin
unless @source.nil?
data = open_source
import_data(data) if data != {}
@load_data = true
end
rescue
@load_data = false
end
end
def import_data(data)
base_path = "#{@file_name}/lib/cenit/collection/#{@collection_name}"
shared_data = JSON.parse(data)
hash_data = shared_data['data']
hash_model = []
models = %w(flows connection_roles translators events connections webhooks)
models.collect do |model|
next unless hash_model = hash_data[model].to_a
hash_model.collect do |hash|
next unless file = filename_scape(hash['name'])
File.open("#{base_path}/#{model}/#{file}.json", mode: "w:utf-8") { |f| f.write(JSON.pretty_generate(hash)) }
end
end
libraries = hash_data['libraries']
library_index = []
libraries.each do |library|
next unless library_name = library['name']
library_file = filename_scape (library_name)
FileUtils.mkpath("#{base_path}/libraries/#{library_file}") unless File.directory?("#{base_path}/libraries/#{library_file}")
library['schemas'].each do |schema|
next unless schema_file = schema['uri']
unless File.directory?("#{base_path}/libraries/#{schema_file}", mode: "w:utf-8")
File.open"#{base_path}/libraries/#{library_file}") { |f| f.write(JSON.pretty_generate(JSON.parse(schema['schema']))) }
end
end
library_index << {'name' => library_name, 'file' => library_file}
end
File.open("#{base_path}/libraries/#{schema_file}/index.json", mode: "w:utf-8") { |f| f.write(JSON.pretty_generate(library_index)) }
File.open("#{base_path}/index.json", mode: "w:utf-8") { |f| f.write(JSON.pretty_generate(shared_data.except('data'))) }
end
def open_source
File.open(@source, mode: "r:utf-8").read
rescue {}
end
def filename_scape(name)
name.gsub(/[^\w\s_-]+/, '')
.gsub(/(^|\b\s)\s+($|\s?\b)/, '\\1\\2')
.gsub(/\s+/, '_')
.downcase
end
def create_repo
begin
options = {
project_name: @file_name,
target_dir: @file_name,
user_name: @user_name,
user_email: @user_email,
github_username: @github_username,
summary: @summary,
description: @description,
homepage: @homepage,
testing_framework: :rspec,
documentation_framework: :rdoc
}
g = Jeweler::Generator.new(options)
g.create_git_and_github_repo
g.bump_minor_version
g.release_to_git(options)
rescue
puts "Not create repo into Github"
end
end
end
end
end |
# eucakeys.rb
# Grab all eucalyptus key files and export them as facts so we can use them within the Eucalyptus module
require 'facter'
eucakey_dir = '/var/lib/eucalyptus/keys'
# Get all keys in the top level eucakey_dir
if File.directory?(eucakey_dir)
keyfiles = Dir.entries(eucakey_dir)
keyfiles.each do |name|
if name.match(/\.pem/)
Facter.add("eucakeys_#{name.sub('.pem','').sub('-','_')}") do
setcode do
File.read("#{eucakey_dir}/#{name}")
end
end
end
if name.match(/\.p12/)
Facter.add("eucakeys_euca_p12") do
setcode do
File.read("#{eucakey_dir}/#{name}")
end
end
end
end
end
if File.directory?(eucakey_dir)
# Check if entries in eucakey_dir are directories, if they are return them minus the . and .. entries
dir_contents = Dir.entries(eucakey_dir).select {|entry| File.directory? File.join(eucakey_dir,entry) and !(entry =='.' || entry == '..') }
# For each cluster directory, grab all pem files and set as facts making sure all fact names use underscores
dir_contents.each do |clustername|
keyfiles = Dir.entries(eucakey_dir+"/"+clustername)
keyfiles.each do |keyname|
if keyname.match(/\.pem/)
Facter.add("eucakeys_" + clustername + "_#{keyname.sub('.pem','').sub('-','_')}") do
setcode do
File.read("#{eucakey_dir}/#{clustername}/#{keyname}")
end
end
end
end
end
end
update custom fact to include vtun support
# eucakeys.rb
# Grab all eucalyptus key files and export them as facts so we can use them within the Eucalyptus module
require 'facter'
eucakey_dir = '/var/lib/eucalyptus/keys'
# Get all keys in the top level eucakey_dir
if File.directory?(eucakey_dir)
keyfiles = Dir.entries(eucakey_dir)
keyfiles.each do |name|
if name.match(/\.pem/)
Facter.add("eucakeys_#{name.sub('.pem','').sub('-','_')}") do
setcode do
File.read("#{eucakey_dir}/#{name}")
end
end
end
if name.match(/\.p12/)
Facter.add("eucakeys_euca_p12") do
setcode do
File.read("#{eucakey_dir}/#{name}")
end
end
end
end
end
if File.directory?(eucakey_dir)
# Check if entries in eucakey_dir are directories, if they are return them minus the . and .. entries
dir_contents = Dir.entries(eucakey_dir).select {|entry| File.directory? File.join(eucakey_dir,entry) and !(entry =='.' || entry == '..') }
# For each cluster directory, grab all pem files and set as facts making sure all fact names use underscores
dir_contents.each do |clustername|
keyfiles = Dir.entries(eucakey_dir+"/"+clustername)
keyfiles.each do |keyname|
if keyname.match(/\.pem/)
Facter.add("eucakeys_" + clustername + "_#{keyname.sub('.pem','').sub('-','_')}") do
setcode do
File.read("#{eucakey_dir}/#{clustername}/#{keyname}")
end
end
end
# Collect VPN tunnel passwords for VNET_TUNNELLING
if keyname.match(/vtunpass/)
Facter.add("eucakeys_" + clustername + "_#{keyname}") do
setcode do
File.read("#{eucakey_dir}/#{clustername}/#{keyname}")
end
end
end
end
end
end
|
require 'active_model'
require 'aspect4r'
require 'cg_service_client'
module CgRoleClient
class Actor
include ActiveModel::Validations
include Aspect4r # this has to be here for the class level "around" to work
include CgServiceClient::Serializable
extend CgServiceClient::Serviceable
uses_service("Role","1","CgRoleClient::RestEndpoint")
serializable_attr_accessor :id, :actor_id, :actor_type, :singleton_group_id, :created_at, :updated_at
validates_presence_of :actor_id, :actor_type
class << self
include Aspect4r
around :create do |input, &block |
begin
ensure_endpoint
block.call(input)
rescue Exception => e
puts e
raise
end
end
def create(attributes = {})
actor = CgRoleClient::Actor.new(attributes)
if !actor.valid? || !actor.id.nil?
return false
end
@endpoint.create_actor(actor)
end
end
def initialize(attributes = {})
self.attributes = attributes
end
end
end
Added querying of actor by type and id.
git-svn-id: fa1a74920b061848e2ced189e7c50f362c0f37df@2022 bc291798-7d79-44a5-a816-fbf4f7d05ffa
require 'active_model'
require 'aspect4r'
require 'cg_service_client'
module CgRoleClient
class Actor
include ActiveModel::Validations
include Aspect4r # this has to be here for the class level "around" to work
include CgServiceClient::Serializable
extend CgServiceClient::Serviceable
uses_service("Role","1","CgRoleClient::RestEndpoint")
serializable_attr_accessor :id, :actor_id, :actor_type, :singleton_group_id, :created_at, :updated_at
validates_presence_of :actor_id, :actor_type
class << self
include Aspect4r
around :create, :find_by_actor_type_and_actor_id, :method_name_arg => true do |method, *args, &block |
begin
ensure_endpoint
if method == 'find_by_actor_type_and_actor_id'
block.call(args[0],args[1])
else
block.call(args[0])
end
rescue Exception => e
puts e
raise
end
end
def create(attributes = {})
actor = CgRoleClient::Actor.new(attributes)
if !actor.valid? || !actor.id.nil?
return false
end
@endpoint.create_actor(actor)
end
def find_by_actor_type_and_actor_id(actor_type, actor_id)
@endpoint.find_actor_by_actor_type_and_actor_id(actor_type,actor_id)
end
end
def initialize(attributes = {})
self.attributes = attributes
end
end
end
|
#
# Author:: Adam Jacob (<adam@chef.io>)
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative "../knife"
require_relative "data_bag_secret_options"
require "chef-utils/dist" unless defined?(ChefUtils::Dist)
require "license_acceptance/cli_flags/mixlib_cli"
module LicenseAcceptance
autoload :Acceptor, "license_acceptance/acceptor"
end
class Chef
class Knife
class Bootstrap < Knife
include DataBagSecretOptions
include LicenseAcceptance::CLIFlags::MixlibCLI
SUPPORTED_CONNECTION_PROTOCOLS ||= %w{ssh winrm}.freeze
WINRM_AUTH_PROTOCOL_LIST ||= %w{plaintext kerberos ssl negotiate}.freeze
# Common connectivity options
option :connection_user,
short: "-U USERNAME",
long: "--connection-user USERNAME",
description: "Authenticate to the target host with this user account."
option :connection_password,
short: "-P PASSWORD",
long: "--connection-password PASSWORD",
description: "Authenticate to the target host with this password."
option :connection_port,
short: "-p PORT",
long: "--connection-port PORT",
description: "The port on the target node to connect to."
option :connection_protocol,
short: "-o PROTOCOL",
long: "--connection-protocol PROTOCOL",
description: "The protocol to use to connect to the target node.",
in: SUPPORTED_CONNECTION_PROTOCOLS
option :max_wait,
short: "-W SECONDS",
long: "--max-wait SECONDS",
description: "The maximum time to wait for the initial connection to be established."
option :session_timeout,
long: "--session-timeout SECONDS",
description: "The number of seconds to wait for each connection operation to be acknowledged while running bootstrap.",
default: 60
# WinRM Authentication
option :winrm_ssl_peer_fingerprint,
long: "--winrm-ssl-peer-fingerprint FINGERPRINT",
description: "SSL certificate fingerprint expected from the target."
option :ca_trust_file,
short: "-f CA_TRUST_PATH",
long: "--ca-trust-file CA_TRUST_PATH",
description: "The Certificate Authority (CA) trust file used for SSL transport."
option :winrm_no_verify_cert,
long: "--winrm-no-verify-cert",
description: "Do not verify the SSL certificate of the target node for WinRM.",
boolean: true
option :winrm_ssl,
long: "--winrm-ssl",
description: "Use SSL in the WinRM connection."
option :winrm_auth_method,
short: "-w AUTH-METHOD",
long: "--winrm-auth-method AUTH-METHOD",
description: "The WinRM authentication method to use.",
in: WINRM_AUTH_PROTOCOL_LIST
option :winrm_basic_auth_only,
long: "--winrm-basic-auth-only",
description: "For WinRM basic authentication when using the 'ssl' auth method.",
boolean: true
# This option was provided in knife bootstrap windows winrm,
# but it is ignored in knife-windows/WinrmSession, and so remains unimplemented here.
# option :kerberos_keytab_file,
# :short => "-T KEYTAB_FILE",
# :long => "--keytab-file KEYTAB_FILE",
# :description => "The Kerberos keytab file used for authentication"
option :kerberos_realm,
short: "-R KERBEROS_REALM",
long: "--kerberos-realm KERBEROS_REALM",
description: "The Kerberos realm used for authentication."
option :kerberos_service,
short: "-S KERBEROS_SERVICE",
long: "--kerberos-service KERBEROS_SERVICE",
description: "The Kerberos service used for authentication."
## SSH Authentication
option :ssh_gateway,
short: "-G GATEWAY",
long: "--ssh-gateway GATEWAY",
description: "The SSH gateway."
option :ssh_gateway_identity,
long: "--ssh-gateway-identity SSH_GATEWAY_IDENTITY",
description: "The SSH identity file used for gateway authentication."
option :ssh_forward_agent,
short: "-A",
long: "--ssh-forward-agent",
description: "Enable SSH agent forwarding.",
boolean: true
option :ssh_identity_file,
short: "-i IDENTITY_FILE",
long: "--ssh-identity-file IDENTITY_FILE",
description: "The SSH identity file used for authentication."
option :ssh_verify_host_key,
long: "--ssh-verify-host-key VALUE",
description: "Verify host key. Default is 'always'.",
in: %w{always accept_new accept_new_or_local_tunnel never},
default: "always"
#
# bootstrap options
#
# client.rb content via chef-full/bootstrap_context
option :bootstrap_version,
long: "--bootstrap-version VERSION",
description: "The version of #{ChefUtils::Dist::Infra::PRODUCT} to install."
option :channel,
long: "--channel CHANNEL",
description: "Install from the given channel. Default is 'stable'.",
default: "stable",
in: %w{stable current unstable}
# client.rb content via chef-full/bootstrap_context
option :bootstrap_proxy,
long: "--bootstrap-proxy PROXY_URL",
description: "The proxy server for the node being bootstrapped."
# client.rb content via bootstrap_context
option :bootstrap_proxy_user,
long: "--bootstrap-proxy-user PROXY_USER",
description: "The proxy authentication username for the node being bootstrapped."
# client.rb content via bootstrap_context
option :bootstrap_proxy_pass,
long: "--bootstrap-proxy-pass PROXY_PASS",
description: "The proxy authentication password for the node being bootstrapped."
# client.rb content via bootstrap_context
option :bootstrap_no_proxy,
long: "--bootstrap-no-proxy [NO_PROXY_URL|NO_PROXY_IP]",
description: "Do not proxy locations for the node being bootstrapped"
# client.rb content via bootstrap_context
option :bootstrap_template,
short: "-t TEMPLATE",
long: "--bootstrap-template TEMPLATE",
description: "Bootstrap #{ChefUtils::Dist::Infra::PRODUCT} using a built-in or custom template. Set to the full path of an erb template or use one of the built-in templates."
# client.rb content via bootstrap_context
option :node_ssl_verify_mode,
long: "--node-ssl-verify-mode [peer|none]",
description: "Whether or not to verify the SSL cert for all HTTPS requests.",
proc: Proc.new { |v|
valid_values = %w{none peer}
unless valid_values.include?(v)
raise "Invalid value '#{v}' for --node-ssl-verify-mode. Valid values are: #{valid_values.join(", ")}"
end
v
}
# bootstrap_context - client.rb
option :node_verify_api_cert,
long: "--[no-]node-verify-api-cert",
description: "Verify the SSL cert for HTTPS requests to the #{ChefUtils::Dist::Server::PRODUCT} API.",
boolean: true
# runtime - sudo settings (train handles sudo)
option :use_sudo,
long: "--sudo",
description: "Execute the bootstrap via sudo.",
boolean: true
# runtime - sudo settings (train handles sudo)
option :preserve_home,
long: "--sudo-preserve-home",
description: "Preserve non-root user HOME environment variable with sudo.",
boolean: true
# runtime - sudo settings (train handles sudo)
option :use_sudo_password,
long: "--use-sudo-password",
description: "Execute the bootstrap via sudo with password.",
boolean: false
# runtime - su user
option :su_user,
long: "--su-user NAME",
description: "The su - USER name to perform bootstrap command using a non-root user."
# runtime - su user password
option :su_password,
long: "--su-password PASSWORD",
description: "The su USER password for authentication."
# runtime - client_builder
option :chef_node_name,
short: "-N NAME",
long: "--node-name NAME",
description: "The node name for your new node."
# runtime - client_builder - set runlist when creating node
option :run_list,
short: "-r RUN_LIST",
long: "--run-list RUN_LIST",
description: "Comma separated list of roles/recipes to apply.",
proc: lambda { |o| o.split(/[\s,]+/) },
default: []
# runtime - client_builder - set policy name when creating node
option :policy_name,
long: "--policy-name POLICY_NAME",
description: "Policyfile name to use (--policy-group must also be given).",
default: nil
# runtime - client_builder - set policy group when creating node
option :policy_group,
long: "--policy-group POLICY_GROUP",
description: "Policy group name to use (--policy-name must also be given).",
default: nil
# runtime - client_builder - node tags
option :tags,
long: "--tags TAGS",
description: "Comma separated list of tags to apply to the node.",
proc: lambda { |o| o.split(/[\s,]+/) },
default: []
# bootstrap template
option :first_boot_attributes,
short: "-j JSON_ATTRIBS",
long: "--json-attributes",
description: "A JSON string to be added to the first run of #{ChefUtils::Dist::Infra::CLIENT}.",
proc: lambda { |o| Chef::JSONCompat.parse(o) },
default: nil
# bootstrap template
option :first_boot_attributes_from_file,
long: "--json-attribute-file FILE",
description: "A JSON file to be used to the first run of #{ChefUtils::Dist::Infra::CLIENT}.",
proc: lambda { |o| Chef::JSONCompat.parse(File.read(o)) },
default: nil
# bootstrap template
# Create ohai hints in /etc/chef/ohai/hints, fname=hintname, content=value
option :hints,
long: "--hint HINT_NAME[=HINT_FILE]",
description: "Specify an Ohai hint to be set on the bootstrap target. Use multiple --hint options to specify multiple hints.",
proc: Proc.new { |hint, accumulator|
accumulator ||= {}
name, path = hint.split("=", 2)
accumulator[name] = path ? Chef::JSONCompat.parse(::File.read(path)) : {}
accumulator
}
# bootstrap override: url of a an installer shell script to use in place of omnitruck
# Note that the bootstrap template _only_ references this out of Chef::Config, and not from
# the provided options to knife bootstrap, so we set the Chef::Config option here.
option :bootstrap_url,
long: "--bootstrap-url URL",
description: "URL to a custom installation script."
option :bootstrap_product,
long: "--bootstrap-product PRODUCT",
description: "Product to install.",
default: "chef"
option :msi_url, # Windows target only
short: "-m URL",
long: "--msi-url URL",
description: "Location of the #{ChefUtils::Dist::Infra::PRODUCT} MSI. The default templates will prefer to download from this location. The MSI will be downloaded from #{ChefUtils::Dist::Org::WEBSITE} if not provided (Windows).",
default: ""
# bootstrap override: Do this instead of our own setup.sh from omnitruck. Causes bootstrap_url to be ignored.
option :bootstrap_install_command,
long: "--bootstrap-install-command COMMANDS",
description: "Custom command to install #{ChefUtils::Dist::Infra::PRODUCT}."
# bootstrap template: Run this command first in the bootstrap script
option :bootstrap_preinstall_command,
long: "--bootstrap-preinstall-command COMMANDS",
description: "Custom commands to run before installing #{ChefUtils::Dist::Infra::PRODUCT}."
# bootstrap template
option :bootstrap_wget_options,
long: "--bootstrap-wget-options OPTIONS",
description: "Add options to wget when installing #{ChefUtils::Dist::Infra::PRODUCT}."
# bootstrap template
option :bootstrap_curl_options,
long: "--bootstrap-curl-options OPTIONS",
description: "Add options to curl when install #{ChefUtils::Dist::Infra::PRODUCT}."
# chef_vault_handler
option :bootstrap_vault_file,
long: "--bootstrap-vault-file VAULT_FILE",
description: "A JSON file with a list of vault(s) and item(s) to be updated."
# chef_vault_handler
option :bootstrap_vault_json,
long: "--bootstrap-vault-json VAULT_JSON",
description: "A JSON string with the vault(s) and item(s) to be updated."
# chef_vault_handler
option :bootstrap_vault_item,
long: "--bootstrap-vault-item VAULT_ITEM",
description: 'A single vault and item to update as "vault:item".',
proc: Proc.new { |i, accumulator|
(vault, item) = i.split(/:/)
accumulator ||= {}
accumulator[vault] ||= []
accumulator[vault].push(item)
accumulator
}
# Deprecated options. These must be declared after
# regular options because they refer to the replacement
# option definitions implicitly.
deprecated_option :auth_timeout,
replacement: :max_wait,
long: "--max-wait SECONDS"
deprecated_option :forward_agent,
replacement: :ssh_forward_agent,
boolean: true, long: "--forward-agent"
deprecated_option :host_key_verify,
replacement: :ssh_verify_host_key,
boolean: true, long: "--[no-]host-key-verify",
value_mapper: Proc.new { |verify| verify ? "always" : "never" }
deprecated_option :prerelease,
replacement: :channel,
long: "--prerelease",
boolean: true, value_mapper: Proc.new { "current" }
deprecated_option :ssh_user,
replacement: :connection_user,
long: "--ssh-user USERNAME"
deprecated_option :ssh_password,
replacement: :connection_password,
long: "--ssh-password PASSWORD"
deprecated_option :ssh_port,
replacement: :connection_port,
long: "--ssh-port PASSWORD"
deprecated_option :ssl_peer_fingerprint,
replacement: :winrm_ssl_peer_fingerprint,
long: "--ssl-peer-fingerprint FINGERPRINT"
deprecated_option :winrm_user,
replacement: :connection_user,
long: "--winrm-user USERNAME", short: "-x USERNAME"
deprecated_option :winrm_password,
replacement: :connection_password,
long: "--winrm-password PASSWORD"
deprecated_option :winrm_port,
replacement: :connection_port,
long: "--winrm-port PORT"
deprecated_option :winrm_authentication_protocol,
replacement: :winrm_auth_method,
long: "--winrm-authentication-protocol PROTOCOL"
deprecated_option :winrm_session_timeout,
replacement: :session_timeout,
long: "--winrm-session-timeout MINUTES"
deprecated_option :winrm_ssl_verify_mode,
replacement: :winrm_no_verify_cert,
long: "--winrm-ssl-verify-mode MODE"
deprecated_option :winrm_transport, replacement: :winrm_ssl,
long: "--winrm-transport TRANSPORT",
value_mapper: Proc.new { |value| value == "ssl" }
attr_reader :connection
deps do
require "erubis" unless defined?(Erubis)
require_relative "../json_compat"
require_relative "../util/path_helper"
require_relative "bootstrap/chef_vault_handler"
require_relative "bootstrap/client_builder"
require_relative "bootstrap/train_connector"
end
banner "knife bootstrap [PROTOCOL://][USER@]FQDN (options)"
def client_builder
@client_builder ||= Chef::Knife::Bootstrap::ClientBuilder.new(
chef_config: Chef::Config,
config: config,
ui: ui
)
end
def chef_vault_handler
@chef_vault_handler ||= Chef::Knife::Bootstrap::ChefVaultHandler.new(
config: config,
ui: ui
)
end
# Determine if we need to accept the Chef Infra license locally in order to successfully bootstrap
# the remote node. Remote 'chef-client' run will fail if it is >= 15 and the license is not accepted locally.
def check_license
Chef::Log.debug("Checking if we need to accept Chef license to bootstrap node")
version = config[:bootstrap_version] || Chef::VERSION.split(".").first
acceptor = LicenseAcceptance::Acceptor.new(logger: Chef::Log, provided: Chef::Config[:chef_license])
if acceptor.license_required?("chef", version)
Chef::Log.debug("License acceptance required for chef version: #{version}")
license_id = acceptor.id_from_mixlib("chef")
acceptor.check_and_persist(license_id, version)
Chef::Config[:chef_license] ||= acceptor.acceptance_value
end
end
# The default bootstrap template to use to bootstrap a server.
# This is a public API hook which knife plugins use or inherit and override.
#
# @return [String] Default bootstrap template
def default_bootstrap_template
if connection.windows?
"windows-chef-client-msi"
else
"chef-full"
end
end
def host_descriptor
Array(@name_args).first
end
# The server_name is the DNS or IP we are going to connect to, it is not necessarily
# the node name, the fqdn, or the hostname of the server. This is a public API hook
# which knife plugins use or inherit and override.
#
# @return [String] The DNS or IP that bootstrap will connect to
def server_name
if host_descriptor
@server_name ||= host_descriptor.split("@").reverse[0]
end
end
# @return [String] The CLI specific bootstrap template or the default
def bootstrap_template
# Allow passing a bootstrap template or use the default
config[:bootstrap_template] || default_bootstrap_template
end
def find_template
template = bootstrap_template
# Use the template directly if it's a path to an actual file
if File.exist?(template)
Chef::Log.trace("Using the specified bootstrap template: #{File.dirname(template)}")
return template
end
# Otherwise search the template directories until we find the right one
bootstrap_files = []
bootstrap_files << File.join(__dir__, "bootstrap/templates", "#{template}.erb")
bootstrap_files << File.join(Knife.chef_config_dir, "bootstrap", "#{template}.erb") if Chef::Knife.chef_config_dir
Chef::Util::PathHelper.home(".chef", "bootstrap", "#{template}.erb") { |p| bootstrap_files << p }
bootstrap_files << Gem.find_files(File.join("chef", "knife", "bootstrap", "#{template}.erb"))
bootstrap_files.flatten!
template_file = Array(bootstrap_files).find do |bootstrap_template|
Chef::Log.trace("Looking for bootstrap template in #{File.dirname(bootstrap_template)}")
File.exist?(bootstrap_template)
end
unless template_file
ui.info("Can not find bootstrap definition for #{template}")
raise Errno::ENOENT
end
Chef::Log.trace("Found bootstrap template: #{template_file}")
template_file
end
def secret
@secret ||= encryption_secret_provided_ignore_encrypt_flag? ? read_secret : nil
end
# Establish bootstrap context for template rendering.
# Requires connection to be a live connection in order to determine
# the correct platform.
def bootstrap_context
@bootstrap_context ||=
if connection.windows?
require_relative "core/windows_bootstrap_context"
Knife::Core::WindowsBootstrapContext.new(config, config[:run_list], Chef::Config, secret)
else
require_relative "core/bootstrap_context"
Knife::Core::BootstrapContext.new(config, config[:run_list], Chef::Config, secret)
end
end
def first_boot_attributes
@config[:first_boot_attributes] || @config[:first_boot_attributes_from_file] || {}
end
def render_template
@config[:first_boot_attributes] = first_boot_attributes
template_file = find_template
template = IO.read(template_file).chomp
Erubis::Eruby.new(template).evaluate(bootstrap_context)
end
def run
check_license if ChefUtils::Dist::Org::ENFORCE_LICENSE
plugin_setup!
validate_name_args!
validate_protocol!
validate_first_boot_attributes!
validate_winrm_transport_opts!
validate_policy_options!
plugin_validate_options!
winrm_warn_no_ssl_verification
warn_on_short_session_timeout
plugin_create_instance!
$stdout.sync = true
connect!
register_client
content = render_template
bootstrap_path = upload_bootstrap(content)
perform_bootstrap(bootstrap_path)
plugin_finalize
ensure
connection.del_file!(bootstrap_path) if connection && bootstrap_path
end
def register_client
# chef-vault integration must use the new client-side hawtness, otherwise to use the
# new client-side hawtness, just delete your validation key.
if chef_vault_handler.doing_chef_vault? ||
(Chef::Config[:validation_key] &&
!File.exist?(File.expand_path(Chef::Config[:validation_key])))
unless config[:chef_node_name]
ui.error("You must pass a node name with -N when bootstrapping with user credentials")
exit 1
end
client_builder.run
chef_vault_handler.run(client_builder.client)
bootstrap_context.client_pem = client_builder.client_path
else
ui.warn "Performing legacy client registration with the validation key at #{Chef::Config[:validation_key]}..."
ui.warn "Remove the key file or remove the 'validation_key' configuration option from your config.rb (knife.rb) to use more secure user credentials for client registration."
end
end
def perform_bootstrap(remote_bootstrap_script_path)
ui.info("Bootstrapping #{ui.color(server_name, :bold)}")
cmd = bootstrap_command(remote_bootstrap_script_path)
bootstrap_run_command(cmd)
end
# Actual bootstrap command to be run on the node.
# Handles recursive calls if su USER failed to authenticate.
def bootstrap_run_command(cmd, limit = 3)
r = connection.run_command(cmd) do |data, ch|
ui.msg("#{ui.color(" [#{connection.hostname}]", :cyan)} #{data}")
ch.send_data("#{config[:su_password] || config[:connection_password]}\n") if data == "Password: "
end
if r.exit_status != 0
stderr = (r.stderr + r.stdout).strip
if stderr.match?("su: Authentication failure")
limit -= 1
ui.warn("Failed to authenticate su - #{config[:su_user]} to #{server_name}")
password = ui.ask("Enter password for su - #{config[:su_user]}@#{server_name}:", echo: false)
config[:su_password] = password
bootstrap_run_command(cmd, limit) if limit > 0
else
ui.error("The following error occurred on #{server_name}:")
ui.error(stderr)
exit(r.exit_status)
end
end
end
def connect!
ui.info("Connecting to #{ui.color(server_name, :bold)} using #{connection_protocol}")
opts ||= connection_opts.dup
do_connect(opts)
rescue Train::Error => e
# We handle these by message text only because train only loads the
# transports and protocols that it needs - so the exceptions may not be defined,
# and we don't want to require files internal to train.
if e.message =~ /fingerprint (\S+) is unknown for "(.+)"/ # Train::Transports::SSHFailed
fingerprint = $1
hostname, ip = $2.split(",")
# TODO: convert the SHA256 base64 value to hex with colons
# 'ssh' example output:
# RSA key fingerprint is e5:cb:c0:e2:21:3b:12:52:f8:ce:cb:00:24:e2:0c:92.
# ECDSA key fingerprint is 5d:67:61:08:a9:d7:01:fd:5e:ae:7e:09:40:ef:c0:3c.
# will exit 3 on N
ui.confirm <<~EOM
The authenticity of host '#{hostname} (#{ip})' can't be established.
fingerprint is #{fingerprint}.
Are you sure you want to continue connecting
EOM
# FIXME: this should save the key to known_hosts but doesn't appear to be
config[:ssh_verify_host_key] = :accept_new
conn_opts = connection_opts(reset: true)
opts.merge! conn_opts
retry
elsif (ssh? && e.cause && e.cause.class == Net::SSH::AuthenticationFailed) || (ssh? && e.class == Train::ClientError && e.reason == :no_ssh_password_or_key_available)
if connection.password_auth?
raise
else
ui.warn("Failed to authenticate #{opts[:user]} to #{server_name} - trying password auth")
password = ui.ask("Enter password for #{opts[:user]}@#{server_name}:", echo: false)
end
opts.merge! force_ssh_password_opts(password)
retry
else
raise
end
rescue RuntimeError => e
if winrm? && e.message == "password is a required option"
if connection.password_auth?
raise
else
ui.warn("Failed to authenticate #{opts[:user]} to #{server_name} - trying password auth")
password = ui.ask("Enter password for #{opts[:user]}@#{server_name}:", echo: false)
end
opts.merge! force_winrm_password_opts(password)
retry
else
raise
end
end
def handle_ssh_error(e); end
# url values override CLI flags, if you provide both
# we'll use the one that you gave in the URL.
def connection_protocol
return @connection_protocol if @connection_protocol
from_url = host_descriptor =~ %r{^(.*)://} ? $1 : nil
from_knife = config[:connection_protocol]
@connection_protocol = from_url || from_knife || "ssh"
end
def do_connect(conn_options)
@connection = TrainConnector.new(host_descriptor, connection_protocol, conn_options)
connection.connect!
rescue Train::UserError => e
limit ||= 1
if !conn_options.key?(:pty) && e.reason == :sudo_no_tty
ui.warn("#{e.message} - trying with pty request")
conn_options[:pty] = true # ensure we can talk to systems with requiretty set true in sshd config
retry
elsif config[:use_sudo_password] && (e.reason == :sudo_password_required || e.reason == :bad_sudo_password) && limit < 3
ui.warn("Failed to authenticate #{conn_options[:user]} to #{server_name} - #{e.message} \n sudo: #{limit} incorrect password attempt")
sudo_password = ui.ask("Enter sudo password for #{conn_options[:user]}@#{server_name}:", echo: false)
limit += 1
conn_options[:sudo_password] = sudo_password
retry
else
raise
end
end
# Fail if both first_boot_attributes and first_boot_attributes_from_file
# are set.
def validate_first_boot_attributes!
if @config[:first_boot_attributes] && @config[:first_boot_attributes_from_file]
raise Chef::Exceptions::BootstrapCommandInputError
end
true
end
# FIXME: someone needs to clean this up properly: https://github.com/chef/chef/issues/9645
# This code is deliberately left without an abstraction around deprecating the config options to avoid knife plugins from
# using those methods (which will need to be deprecated and break them) via inheritance (ruby does not have a true `private`
# so the lack of any inheritable implementation is because of that).
#
def winrm_auth_method
config.key?(:winrm_auth_method) ? config[:winrm_auth_method] : config.key?(:winrm_authentications_protocol) ? config[:winrm_authentication_protocol] : "negotiate" # rubocop:disable Style/NestedTernaryOperator
end
def ssh_verify_host_key
config.key?(:ssh_verify_host_key) ? config[:ssh_verify_host_key] : config.key?(:host_key_verify) ? config[:host_key_verify] : "always" # rubocop:disable Style/NestedTernaryOperator
end
# Fail if using plaintext auth without ssl because
# this can expose keys in plaintext on the wire.
# TODO test for this method
# TODO check that the protocol is valid.
def validate_winrm_transport_opts!
return true unless winrm?
if Chef::Config[:validation_key] && !File.exist?(File.expand_path(Chef::Config[:validation_key]))
if winrm_auth_method == "plaintext" &&
config[:winrm_ssl] != true
ui.error <<~EOM
Validatorless bootstrap over unsecure winrm channels could expose your
key to network sniffing.
Please use a 'winrm_auth_method' other than 'plaintext',
or enable ssl on #{server_name} then use the ---winrm-ssl flag
to connect.
EOM
exit 1
end
end
true
end
# fail if the server_name is nil
def validate_name_args!
if server_name.nil?
ui.error("Must pass an FQDN or ip to bootstrap")
exit 1
end
end
# Ensure options are valid by checking policyfile values.
#
# The method call will cause the program to exit(1) if:
# * Only one of --policy-name and --policy-group is specified
# * Policyfile options are set and --run-list is set as well
#
# @return [TrueClass] If options are valid.
def validate_policy_options!
if incomplete_policyfile_options?
ui.error("--policy-name and --policy-group must be specified together")
exit 1
elsif policyfile_and_run_list_given?
ui.error("Policyfile options and --run-list are exclusive")
exit 1
end
end
# Ensure a valid protocol is provided for target host connection
#
# The method call will cause the program to exit(1) if:
# * Conflicting protocols are given via the target URI and the --protocol option
# * The protocol is not a supported protocol
#
# @return [TrueClass] If options are valid.
def validate_protocol!
from_cli = config[:connection_protocol]
if from_cli && connection_protocol != from_cli
# Hanging indent to align with the ERROR: prefix
ui.error <<~EOM
The URL '#{host_descriptor}' indicates protocol is '#{connection_protocol}'
while the --protocol flag specifies '#{from_cli}'. Please include
only one or the other.
EOM
exit 1
end
unless SUPPORTED_CONNECTION_PROTOCOLS.include?(connection_protocol)
ui.error <<~EOM
Unsupported protocol '#{connection_protocol}'.
Supported protocols are: #{SUPPORTED_CONNECTION_PROTOCOLS.join(" ")}
EOM
exit 1
end
true
end
# Validate any additional options
#
# Plugins that subclass bootstrap, e.g. knife-ec2, can use this method to validate any additional options before any other actions are executed
#
# @return [TrueClass] If options are valid or exits
def plugin_validate_options!
true
end
# Create the server that we will bootstrap, if necessary
#
# Plugins that subclass bootstrap, e.g. knife-ec2, can use this method to call out to an API to build an instance of the server we wish to bootstrap
#
# @return [TrueClass] If instance successfully created, or exits
def plugin_create_instance!
true
end
# Perform any setup necessary by the plugin
#
# Plugins that subclass bootstrap, e.g. knife-ec2, can use this method to create connection objects
#
# @return [TrueClass] If instance successfully created, or exits
def plugin_setup!; end
# Perform any teardown or cleanup necessary by the plugin
#
# Plugins that subclass bootstrap, e.g. knife-ec2, can use this method to display a message or perform any cleanup
#
# @return [void]
def plugin_finalize; end
# If session_timeout is too short, it is likely
# a holdover from "--winrm-session-timeout" which used
# minutes as its unit, instead of seconds.
# Warn the human so that they are not surprised.
#
def warn_on_short_session_timeout
if session_timeout && session_timeout <= 15
ui.warn <<~EOM
You provided '--session-timeout #{session_timeout}' second(s).
Did you mean '--session-timeout #{session_timeout * 60}' seconds?
EOM
end
end
def winrm_warn_no_ssl_verification
return unless winrm?
# REVIEWER NOTE
# The original check from knife plugin did not include winrm_ssl_peer_fingerprint
# Reference:
# https://github.com/chef/knife-windows/blob/92d151298142be4a4750c5b54bb264f8d5b81b8a/lib/chef/knife/winrm_knife_base.rb#L271-L273
# TODO Seems like we should also do a similar warning if ssh_verify_host == false
if config[:ca_trust_file].nil? &&
config[:winrm_no_verify_cert] &&
config[:winrm_ssl_peer_fingerprint].nil?
ui.warn <<~WARN
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
SSL validation of HTTPS requests for the WinRM transport is disabled.
HTTPS WinRM connections are still encrypted, but knife is not able
to detect forged replies or spoofing attacks.
To work around this issue you can use the flag `--winrm-no-verify-cert`
or add an entry like this to your knife configuration file:
# Verify all WinRM HTTPS connections
knife[:winrm_no_verify_cert] = true
You can also specify a ca_trust_file via --ca-trust-file,
or the expected fingerprint of the target host's certificate
via --winrm-ssl-peer-fingerprint.
WARN
end
end
# @return a configuration hash suitable for connecting to the remote
# host via train
def connection_opts(reset: false)
return @connection_opts unless @connection_opts.nil? || reset == true
@connection_opts = {}
@connection_opts.merge! base_opts
@connection_opts.merge! host_verify_opts
@connection_opts.merge! gateway_opts
@connection_opts.merge! sudo_opts
@connection_opts.merge! winrm_opts
@connection_opts.merge! ssh_opts
@connection_opts.merge! ssh_identity_opts
@connection_opts
end
def winrm?
connection_protocol == "winrm"
end
def ssh?
connection_protocol == "ssh"
end
# Common configuration for all protocols
def base_opts
port = config_for_protocol(:port)
user = config_for_protocol(:user)
{}.tap do |opts|
opts[:logger] = Chef::Log
opts[:password] = config[:connection_password] if config.key?(:connection_password)
opts[:user] = user if user
opts[:max_wait_until_ready] = config[:max_wait].to_f unless config[:max_wait].nil?
# TODO - when would we need to provide rdp_port vs port? Or are they not mutually exclusive?
opts[:port] = port if port
end
end
def host_verify_opts
if winrm?
{ self_signed: config[:winrm_no_verify_cert] === true }
elsif ssh?
# Fall back to the old knife config key name for back compat.
{ verify_host_key: ssh_verify_host_key }
else
{}
end
end
def ssh_opts
opts = {}
return opts if winrm?
opts[:non_interactive] = true # Prevent password prompts from underlying net/ssh
opts[:forward_agent] = (config[:ssh_forward_agent] === true)
opts[:connection_timeout] = session_timeout
opts
end
def ssh_identity_opts
opts = {}
return opts if winrm?
identity_file = config[:ssh_identity_file]
if identity_file
opts[:key_files] = [identity_file]
# We only set keys_only based on the explicit ssh_identity_file;
# someone may use a gateway key and still expect password auth
# on the target. Similarly, someone may have a default key specified
# in knife config, but have provided a password on the CLI.
# REVIEW NOTE: this is a new behavior. Originally, ssh_identity_file
# could only be populated from CLI options, so there was no need to check
# for this. We will also set keys_only to false only if there are keys
# and no password.
# If both are present, train(via net/ssh) will prefer keys, falling back to password.
# Reference: https://github.com/chef/chef/blob/master/lib/chef/knife/ssh.rb#L272
opts[:keys_only] = config.key?(:connection_password) == false
else
opts[:key_files] = []
opts[:keys_only] = false
end
gateway_identity_file = config[:ssh_gateway] ? config[:ssh_gateway_identity] : nil
unless gateway_identity_file.nil?
opts[:key_files] << gateway_identity_file
end
opts
end
def gateway_opts
opts = {}
if config[:ssh_gateway]
split = config[:ssh_gateway].split("@", 2)
if split.length == 1
gw_host = split[0]
else
gw_user = split[0]
gw_host = split[1]
end
gw_host, gw_port = gw_host.split(":", 2)
# TODO - validate convertible port in config validation?
gw_port = Integer(gw_port) rescue nil
opts[:bastion_host] = gw_host
opts[:bastion_user] = gw_user
opts[:bastion_port] = gw_port
end
opts
end
# use_sudo - tells bootstrap to use the sudo command to run bootstrap
# use_sudo_password - tells bootstrap to use the sudo command to run bootstrap
# and to use the password specified with --password
# TODO: I'd like to make our sudo options sane:
# --sudo (bool) - use sudo
# --sudo-password PASSWORD (default: :password) - use this password for sudo
# --sudo-options "opt,opt,opt" to pass into sudo
# --sudo-command COMMAND sudo command other than sudo
# REVIEW NOTE: knife bootstrap did not pull sudo values from Chef::Config,
# should we change that for consistency?
def sudo_opts
return {} if winrm?
opts = { sudo: false }
if config[:use_sudo]
opts[:sudo] = true
if config[:use_sudo_password]
opts[:sudo_password] = config[:connection_password]
end
if config[:preserve_home]
opts[:sudo_options] = "-H"
end
end
opts
end
def winrm_opts
return {} unless winrm?
opts = {
winrm_transport: winrm_auth_method, # winrm gem and train calls auth method 'transport'
winrm_basic_auth_only: config[:winrm_basic_auth_only] || false,
ssl: config[:winrm_ssl] === true,
ssl_peer_fingerprint: config[:winrm_ssl_peer_fingerprint],
}
if winrm_auth_method == "kerberos"
opts[:kerberos_service] = config[:kerberos_service] if config[:kerberos_service]
opts[:kerberos_realm] = config[:kerberos_realm] if config[:kerberos_service]
end
if config[:ca_trust_file]
opts[:ca_trust_path] = config[:ca_trust_file]
end
opts[:operation_timeout] = session_timeout
opts
end
# Config overrides to force password auth.
def force_ssh_password_opts(password)
{
password: password,
non_interactive: false,
keys_only: false,
key_files: [],
auth_methods: %i{password keyboard_interactive},
}
end
def force_winrm_password_opts(password)
{
password: password,
}
end
# This is for deprecating config options. The fallback_key can be used
# to pull an old knife config option out of the config file when the
# cli value has been renamed. This is different from the deprecated
# cli values, since these are for config options that have no corresponding
# cli value.
#
# DO NOT USE - this whole API is considered deprecated
#
# @api deprecated
#
def config_value(key, fallback_key = nil, default = nil)
Chef.deprecated(:knife_bootstrap_apis, "Use of config_value is deprecated. Knife plugin authors should access the config hash directly, which does correct merging of cli and config options.")
if config.key?(key)
# the first key is the primary key so we check the merged hash first
config[key]
elsif config.key?(fallback_key)
# we get the old config option here (the deprecated cli option shouldn't exist)
config[fallback_key]
else
default
end
end
def upload_bootstrap(content)
script_name = connection.windows? ? "bootstrap.bat" : "bootstrap.sh"
remote_path = connection.normalize_path(File.join(connection.temp_dir, script_name))
connection.upload_file_content!(content, remote_path)
remote_path
end
# build the command string for bootstrapping
# @return String
def bootstrap_command(remote_path)
if connection.windows?
"cmd.exe /C #{remote_path}"
else
cmd = "sh #{remote_path}"
if config[:su_user]
# su - USER is subject to required an interactive console
# Otherwise, it will raise: su: must be run from a terminal
set_transport_options(pty: true)
cmd = "su - #{config[:su_user]} -c '#{cmd}'"
cmd = "sudo " << cmd if config[:use_sudo]
end
cmd
end
end
private
# To avoid cluttering the CLI options, some flags (such as port and user)
# are shared between protocols. However, there is still a need to allow the operator
# to specify defaults separately, since they may not be the same values for different
# protocols.
# These keys are available in Chef::Config, and are prefixed with the protocol name.
# For example, :user CLI option will map to :winrm_user and :ssh_user Chef::Config keys,
# based on the connection protocol in use.
# @api private
def config_for_protocol(option)
if option == :port
config[:connection_port] || config[knife_key_for_protocol(option)]
else
config[:connection_user] || config[knife_key_for_protocol(option)]
end
end
# @api private
def knife_key_for_protocol(option)
"#{connection_protocol}_#{option}".to_sym
end
# True if policy_name and run_list are both given
def policyfile_and_run_list_given?
run_list_given? && policyfile_options_given?
end
def run_list_given?
!config[:run_list].nil? && !config[:run_list].empty?
end
def policyfile_options_given?
!!config[:policy_name]
end
# True if one of policy_name or policy_group was given, but not both
def incomplete_policyfile_options?
(!!config[:policy_name] ^ config[:policy_group])
end
# session_timeout option has a default that may not arrive, particularly if
# we're being invoked from a plugin that doesn't merge_config.
def session_timeout
timeout = config[:session_timeout]
return options[:session_timeout][:default] if timeout.nil?
timeout.to_i
end
# Train::Transports::SSH::Connection#transport_options
# Append the options to connection transport_options
#
# @param opts [Hash] the opts to be added to connection transport_options.
# @return [Hash] transport_options if the opts contains any option to be set.
#
def set_transport_options(opts)
return unless opts.is_a?(Hash) || !opts.empty?
connection&.connection&.transport_options&.merge! opts
end
end
end
end
Fix AIX su password match string
Signed-off-by: Vivek Singh <ffdfd29e24c206a3c24e1247563eb4c218c7ffa5@msystechnologies.com>
#
# Author:: Adam Jacob (<adam@chef.io>)
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative "../knife"
require_relative "data_bag_secret_options"
require "chef-utils/dist" unless defined?(ChefUtils::Dist)
require "license_acceptance/cli_flags/mixlib_cli"
module LicenseAcceptance
autoload :Acceptor, "license_acceptance/acceptor"
end
class Chef
class Knife
class Bootstrap < Knife
include DataBagSecretOptions
include LicenseAcceptance::CLIFlags::MixlibCLI
SUPPORTED_CONNECTION_PROTOCOLS ||= %w{ssh winrm}.freeze
WINRM_AUTH_PROTOCOL_LIST ||= %w{plaintext kerberos ssl negotiate}.freeze
# Common connectivity options
option :connection_user,
short: "-U USERNAME",
long: "--connection-user USERNAME",
description: "Authenticate to the target host with this user account."
option :connection_password,
short: "-P PASSWORD",
long: "--connection-password PASSWORD",
description: "Authenticate to the target host with this password."
option :connection_port,
short: "-p PORT",
long: "--connection-port PORT",
description: "The port on the target node to connect to."
option :connection_protocol,
short: "-o PROTOCOL",
long: "--connection-protocol PROTOCOL",
description: "The protocol to use to connect to the target node.",
in: SUPPORTED_CONNECTION_PROTOCOLS
option :max_wait,
short: "-W SECONDS",
long: "--max-wait SECONDS",
description: "The maximum time to wait for the initial connection to be established."
option :session_timeout,
long: "--session-timeout SECONDS",
description: "The number of seconds to wait for each connection operation to be acknowledged while running bootstrap.",
default: 60
# WinRM Authentication
option :winrm_ssl_peer_fingerprint,
long: "--winrm-ssl-peer-fingerprint FINGERPRINT",
description: "SSL certificate fingerprint expected from the target."
option :ca_trust_file,
short: "-f CA_TRUST_PATH",
long: "--ca-trust-file CA_TRUST_PATH",
description: "The Certificate Authority (CA) trust file used for SSL transport."
option :winrm_no_verify_cert,
long: "--winrm-no-verify-cert",
description: "Do not verify the SSL certificate of the target node for WinRM.",
boolean: true
option :winrm_ssl,
long: "--winrm-ssl",
description: "Use SSL in the WinRM connection."
option :winrm_auth_method,
short: "-w AUTH-METHOD",
long: "--winrm-auth-method AUTH-METHOD",
description: "The WinRM authentication method to use.",
in: WINRM_AUTH_PROTOCOL_LIST
option :winrm_basic_auth_only,
long: "--winrm-basic-auth-only",
description: "For WinRM basic authentication when using the 'ssl' auth method.",
boolean: true
# This option was provided in knife bootstrap windows winrm,
# but it is ignored in knife-windows/WinrmSession, and so remains unimplemented here.
# option :kerberos_keytab_file,
# :short => "-T KEYTAB_FILE",
# :long => "--keytab-file KEYTAB_FILE",
# :description => "The Kerberos keytab file used for authentication"
option :kerberos_realm,
short: "-R KERBEROS_REALM",
long: "--kerberos-realm KERBEROS_REALM",
description: "The Kerberos realm used for authentication."
option :kerberos_service,
short: "-S KERBEROS_SERVICE",
long: "--kerberos-service KERBEROS_SERVICE",
description: "The Kerberos service used for authentication."
## SSH Authentication
option :ssh_gateway,
short: "-G GATEWAY",
long: "--ssh-gateway GATEWAY",
description: "The SSH gateway."
option :ssh_gateway_identity,
long: "--ssh-gateway-identity SSH_GATEWAY_IDENTITY",
description: "The SSH identity file used for gateway authentication."
option :ssh_forward_agent,
short: "-A",
long: "--ssh-forward-agent",
description: "Enable SSH agent forwarding.",
boolean: true
option :ssh_identity_file,
short: "-i IDENTITY_FILE",
long: "--ssh-identity-file IDENTITY_FILE",
description: "The SSH identity file used for authentication."
option :ssh_verify_host_key,
long: "--ssh-verify-host-key VALUE",
description: "Verify host key. Default is 'always'.",
in: %w{always accept_new accept_new_or_local_tunnel never},
default: "always"
#
# bootstrap options
#
# client.rb content via chef-full/bootstrap_context
option :bootstrap_version,
long: "--bootstrap-version VERSION",
description: "The version of #{ChefUtils::Dist::Infra::PRODUCT} to install."
option :channel,
long: "--channel CHANNEL",
description: "Install from the given channel. Default is 'stable'.",
default: "stable",
in: %w{stable current unstable}
# client.rb content via chef-full/bootstrap_context
option :bootstrap_proxy,
long: "--bootstrap-proxy PROXY_URL",
description: "The proxy server for the node being bootstrapped."
# client.rb content via bootstrap_context
option :bootstrap_proxy_user,
long: "--bootstrap-proxy-user PROXY_USER",
description: "The proxy authentication username for the node being bootstrapped."
# client.rb content via bootstrap_context
option :bootstrap_proxy_pass,
long: "--bootstrap-proxy-pass PROXY_PASS",
description: "The proxy authentication password for the node being bootstrapped."
# client.rb content via bootstrap_context
option :bootstrap_no_proxy,
long: "--bootstrap-no-proxy [NO_PROXY_URL|NO_PROXY_IP]",
description: "Do not proxy locations for the node being bootstrapped"
# client.rb content via bootstrap_context
option :bootstrap_template,
short: "-t TEMPLATE",
long: "--bootstrap-template TEMPLATE",
description: "Bootstrap #{ChefUtils::Dist::Infra::PRODUCT} using a built-in or custom template. Set to the full path of an erb template or use one of the built-in templates."
# client.rb content via bootstrap_context
option :node_ssl_verify_mode,
long: "--node-ssl-verify-mode [peer|none]",
description: "Whether or not to verify the SSL cert for all HTTPS requests.",
proc: Proc.new { |v|
valid_values = %w{none peer}
unless valid_values.include?(v)
raise "Invalid value '#{v}' for --node-ssl-verify-mode. Valid values are: #{valid_values.join(", ")}"
end
v
}
# bootstrap_context - client.rb
option :node_verify_api_cert,
long: "--[no-]node-verify-api-cert",
description: "Verify the SSL cert for HTTPS requests to the #{ChefUtils::Dist::Server::PRODUCT} API.",
boolean: true
# runtime - sudo settings (train handles sudo)
option :use_sudo,
long: "--sudo",
description: "Execute the bootstrap via sudo.",
boolean: true
# runtime - sudo settings (train handles sudo)
option :preserve_home,
long: "--sudo-preserve-home",
description: "Preserve non-root user HOME environment variable with sudo.",
boolean: true
# runtime - sudo settings (train handles sudo)
option :use_sudo_password,
long: "--use-sudo-password",
description: "Execute the bootstrap via sudo with password.",
boolean: false
# runtime - su user
option :su_user,
long: "--su-user NAME",
description: "The su - USER name to perform bootstrap command using a non-root user."
# runtime - su user password
option :su_password,
long: "--su-password PASSWORD",
description: "The su USER password for authentication."
# runtime - client_builder
option :chef_node_name,
short: "-N NAME",
long: "--node-name NAME",
description: "The node name for your new node."
# runtime - client_builder - set runlist when creating node
option :run_list,
short: "-r RUN_LIST",
long: "--run-list RUN_LIST",
description: "Comma separated list of roles/recipes to apply.",
proc: lambda { |o| o.split(/[\s,]+/) },
default: []
# runtime - client_builder - set policy name when creating node
option :policy_name,
long: "--policy-name POLICY_NAME",
description: "Policyfile name to use (--policy-group must also be given).",
default: nil
# runtime - client_builder - set policy group when creating node
option :policy_group,
long: "--policy-group POLICY_GROUP",
description: "Policy group name to use (--policy-name must also be given).",
default: nil
# runtime - client_builder - node tags
option :tags,
long: "--tags TAGS",
description: "Comma separated list of tags to apply to the node.",
proc: lambda { |o| o.split(/[\s,]+/) },
default: []
# bootstrap template
option :first_boot_attributes,
short: "-j JSON_ATTRIBS",
long: "--json-attributes",
description: "A JSON string to be added to the first run of #{ChefUtils::Dist::Infra::CLIENT}.",
proc: lambda { |o| Chef::JSONCompat.parse(o) },
default: nil
# bootstrap template
option :first_boot_attributes_from_file,
long: "--json-attribute-file FILE",
description: "A JSON file to be used to the first run of #{ChefUtils::Dist::Infra::CLIENT}.",
proc: lambda { |o| Chef::JSONCompat.parse(File.read(o)) },
default: nil
# bootstrap template
# Create ohai hints in /etc/chef/ohai/hints, fname=hintname, content=value
option :hints,
long: "--hint HINT_NAME[=HINT_FILE]",
description: "Specify an Ohai hint to be set on the bootstrap target. Use multiple --hint options to specify multiple hints.",
proc: Proc.new { |hint, accumulator|
accumulator ||= {}
name, path = hint.split("=", 2)
accumulator[name] = path ? Chef::JSONCompat.parse(::File.read(path)) : {}
accumulator
}
# bootstrap override: url of a an installer shell script to use in place of omnitruck
# Note that the bootstrap template _only_ references this out of Chef::Config, and not from
# the provided options to knife bootstrap, so we set the Chef::Config option here.
option :bootstrap_url,
long: "--bootstrap-url URL",
description: "URL to a custom installation script."
option :bootstrap_product,
long: "--bootstrap-product PRODUCT",
description: "Product to install.",
default: "chef"
option :msi_url, # Windows target only
short: "-m URL",
long: "--msi-url URL",
description: "Location of the #{ChefUtils::Dist::Infra::PRODUCT} MSI. The default templates will prefer to download from this location. The MSI will be downloaded from #{ChefUtils::Dist::Org::WEBSITE} if not provided (Windows).",
default: ""
# bootstrap override: Do this instead of our own setup.sh from omnitruck. Causes bootstrap_url to be ignored.
option :bootstrap_install_command,
long: "--bootstrap-install-command COMMANDS",
description: "Custom command to install #{ChefUtils::Dist::Infra::PRODUCT}."
# bootstrap template: Run this command first in the bootstrap script
option :bootstrap_preinstall_command,
long: "--bootstrap-preinstall-command COMMANDS",
description: "Custom commands to run before installing #{ChefUtils::Dist::Infra::PRODUCT}."
# bootstrap template
option :bootstrap_wget_options,
long: "--bootstrap-wget-options OPTIONS",
description: "Add options to wget when installing #{ChefUtils::Dist::Infra::PRODUCT}."
# bootstrap template
option :bootstrap_curl_options,
long: "--bootstrap-curl-options OPTIONS",
description: "Add options to curl when install #{ChefUtils::Dist::Infra::PRODUCT}."
# chef_vault_handler
option :bootstrap_vault_file,
long: "--bootstrap-vault-file VAULT_FILE",
description: "A JSON file with a list of vault(s) and item(s) to be updated."
# chef_vault_handler
option :bootstrap_vault_json,
long: "--bootstrap-vault-json VAULT_JSON",
description: "A JSON string with the vault(s) and item(s) to be updated."
# chef_vault_handler
option :bootstrap_vault_item,
long: "--bootstrap-vault-item VAULT_ITEM",
description: 'A single vault and item to update as "vault:item".',
proc: Proc.new { |i, accumulator|
(vault, item) = i.split(/:/)
accumulator ||= {}
accumulator[vault] ||= []
accumulator[vault].push(item)
accumulator
}
# Deprecated options. These must be declared after
# regular options because they refer to the replacement
# option definitions implicitly.
deprecated_option :auth_timeout,
replacement: :max_wait,
long: "--max-wait SECONDS"
deprecated_option :forward_agent,
replacement: :ssh_forward_agent,
boolean: true, long: "--forward-agent"
deprecated_option :host_key_verify,
replacement: :ssh_verify_host_key,
boolean: true, long: "--[no-]host-key-verify",
value_mapper: Proc.new { |verify| verify ? "always" : "never" }
deprecated_option :prerelease,
replacement: :channel,
long: "--prerelease",
boolean: true, value_mapper: Proc.new { "current" }
deprecated_option :ssh_user,
replacement: :connection_user,
long: "--ssh-user USERNAME"
deprecated_option :ssh_password,
replacement: :connection_password,
long: "--ssh-password PASSWORD"
deprecated_option :ssh_port,
replacement: :connection_port,
long: "--ssh-port PASSWORD"
deprecated_option :ssl_peer_fingerprint,
replacement: :winrm_ssl_peer_fingerprint,
long: "--ssl-peer-fingerprint FINGERPRINT"
deprecated_option :winrm_user,
replacement: :connection_user,
long: "--winrm-user USERNAME", short: "-x USERNAME"
deprecated_option :winrm_password,
replacement: :connection_password,
long: "--winrm-password PASSWORD"
deprecated_option :winrm_port,
replacement: :connection_port,
long: "--winrm-port PORT"
deprecated_option :winrm_authentication_protocol,
replacement: :winrm_auth_method,
long: "--winrm-authentication-protocol PROTOCOL"
deprecated_option :winrm_session_timeout,
replacement: :session_timeout,
long: "--winrm-session-timeout MINUTES"
deprecated_option :winrm_ssl_verify_mode,
replacement: :winrm_no_verify_cert,
long: "--winrm-ssl-verify-mode MODE"
deprecated_option :winrm_transport, replacement: :winrm_ssl,
long: "--winrm-transport TRANSPORT",
value_mapper: Proc.new { |value| value == "ssl" }
attr_reader :connection
deps do
require "erubis" unless defined?(Erubis)
require_relative "../json_compat"
require_relative "../util/path_helper"
require_relative "bootstrap/chef_vault_handler"
require_relative "bootstrap/client_builder"
require_relative "bootstrap/train_connector"
end
banner "knife bootstrap [PROTOCOL://][USER@]FQDN (options)"
def client_builder
@client_builder ||= Chef::Knife::Bootstrap::ClientBuilder.new(
chef_config: Chef::Config,
config: config,
ui: ui
)
end
def chef_vault_handler
@chef_vault_handler ||= Chef::Knife::Bootstrap::ChefVaultHandler.new(
config: config,
ui: ui
)
end
# Determine if we need to accept the Chef Infra license locally in order to successfully bootstrap
# the remote node. Remote 'chef-client' run will fail if it is >= 15 and the license is not accepted locally.
def check_license
Chef::Log.debug("Checking if we need to accept Chef license to bootstrap node")
version = config[:bootstrap_version] || Chef::VERSION.split(".").first
acceptor = LicenseAcceptance::Acceptor.new(logger: Chef::Log, provided: Chef::Config[:chef_license])
if acceptor.license_required?("chef", version)
Chef::Log.debug("License acceptance required for chef version: #{version}")
license_id = acceptor.id_from_mixlib("chef")
acceptor.check_and_persist(license_id, version)
Chef::Config[:chef_license] ||= acceptor.acceptance_value
end
end
# The default bootstrap template to use to bootstrap a server.
# This is a public API hook which knife plugins use or inherit and override.
#
# @return [String] Default bootstrap template
def default_bootstrap_template
if connection.windows?
"windows-chef-client-msi"
else
"chef-full"
end
end
def host_descriptor
Array(@name_args).first
end
# The server_name is the DNS or IP we are going to connect to, it is not necessarily
# the node name, the fqdn, or the hostname of the server. This is a public API hook
# which knife plugins use or inherit and override.
#
# @return [String] The DNS or IP that bootstrap will connect to
def server_name
if host_descriptor
@server_name ||= host_descriptor.split("@").reverse[0]
end
end
# @return [String] The CLI specific bootstrap template or the default
def bootstrap_template
# Allow passing a bootstrap template or use the default
config[:bootstrap_template] || default_bootstrap_template
end
def find_template
template = bootstrap_template
# Use the template directly if it's a path to an actual file
if File.exist?(template)
Chef::Log.trace("Using the specified bootstrap template: #{File.dirname(template)}")
return template
end
# Otherwise search the template directories until we find the right one
bootstrap_files = []
bootstrap_files << File.join(__dir__, "bootstrap/templates", "#{template}.erb")
bootstrap_files << File.join(Knife.chef_config_dir, "bootstrap", "#{template}.erb") if Chef::Knife.chef_config_dir
Chef::Util::PathHelper.home(".chef", "bootstrap", "#{template}.erb") { |p| bootstrap_files << p }
bootstrap_files << Gem.find_files(File.join("chef", "knife", "bootstrap", "#{template}.erb"))
bootstrap_files.flatten!
template_file = Array(bootstrap_files).find do |bootstrap_template|
Chef::Log.trace("Looking for bootstrap template in #{File.dirname(bootstrap_template)}")
File.exist?(bootstrap_template)
end
unless template_file
ui.info("Can not find bootstrap definition for #{template}")
raise Errno::ENOENT
end
Chef::Log.trace("Found bootstrap template: #{template_file}")
template_file
end
def secret
@secret ||= encryption_secret_provided_ignore_encrypt_flag? ? read_secret : nil
end
# Establish bootstrap context for template rendering.
# Requires connection to be a live connection in order to determine
# the correct platform.
def bootstrap_context
@bootstrap_context ||=
if connection.windows?
require_relative "core/windows_bootstrap_context"
Knife::Core::WindowsBootstrapContext.new(config, config[:run_list], Chef::Config, secret)
else
require_relative "core/bootstrap_context"
Knife::Core::BootstrapContext.new(config, config[:run_list], Chef::Config, secret)
end
end
def first_boot_attributes
@config[:first_boot_attributes] || @config[:first_boot_attributes_from_file] || {}
end
def render_template
@config[:first_boot_attributes] = first_boot_attributes
template_file = find_template
template = IO.read(template_file).chomp
Erubis::Eruby.new(template).evaluate(bootstrap_context)
end
def run
check_license if ChefUtils::Dist::Org::ENFORCE_LICENSE
plugin_setup!
validate_name_args!
validate_protocol!
validate_first_boot_attributes!
validate_winrm_transport_opts!
validate_policy_options!
plugin_validate_options!
winrm_warn_no_ssl_verification
warn_on_short_session_timeout
plugin_create_instance!
$stdout.sync = true
connect!
register_client
content = render_template
bootstrap_path = upload_bootstrap(content)
perform_bootstrap(bootstrap_path)
plugin_finalize
ensure
connection.del_file!(bootstrap_path) if connection && bootstrap_path
end
def register_client
# chef-vault integration must use the new client-side hawtness, otherwise to use the
# new client-side hawtness, just delete your validation key.
if chef_vault_handler.doing_chef_vault? ||
(Chef::Config[:validation_key] &&
!File.exist?(File.expand_path(Chef::Config[:validation_key])))
unless config[:chef_node_name]
ui.error("You must pass a node name with -N when bootstrapping with user credentials")
exit 1
end
client_builder.run
chef_vault_handler.run(client_builder.client)
bootstrap_context.client_pem = client_builder.client_path
else
ui.warn "Performing legacy client registration with the validation key at #{Chef::Config[:validation_key]}..."
ui.warn "Remove the key file or remove the 'validation_key' configuration option from your config.rb (knife.rb) to use more secure user credentials for client registration."
end
end
def perform_bootstrap(remote_bootstrap_script_path)
ui.info("Bootstrapping #{ui.color(server_name, :bold)}")
cmd = bootstrap_command(remote_bootstrap_script_path)
bootstrap_run_command(cmd)
end
# Actual bootstrap command to be run on the node.
# Handles recursive calls if su USER failed to authenticate.
def bootstrap_run_command(cmd, limit = 3)
r = connection.run_command(cmd) do |data, ch|
ui.msg("#{ui.color(" [#{connection.hostname}]", :cyan)} #{data}")
ch.send_data("#{config[:su_password] || config[:connection_password]}\n") if data.match?("Password:")
end
if r.exit_status != 0
stderr = (r.stderr + r.stdout).strip
if stderr.match?("su: Authentication failure")
limit -= 1
ui.warn("Failed to authenticate su - #{config[:su_user]} to #{server_name}")
password = ui.ask("Enter password for su - #{config[:su_user]}@#{server_name}:", echo: false)
config[:su_password] = password
bootstrap_run_command(cmd, limit) if limit > 0
else
ui.error("The following error occurred on #{server_name}:")
ui.error(stderr)
exit(r.exit_status)
end
end
end
def connect!
ui.info("Connecting to #{ui.color(server_name, :bold)} using #{connection_protocol}")
opts ||= connection_opts.dup
do_connect(opts)
rescue Train::Error => e
# We handle these by message text only because train only loads the
# transports and protocols that it needs - so the exceptions may not be defined,
# and we don't want to require files internal to train.
if e.message =~ /fingerprint (\S+) is unknown for "(.+)"/ # Train::Transports::SSHFailed
fingerprint = $1
hostname, ip = $2.split(",")
# TODO: convert the SHA256 base64 value to hex with colons
# 'ssh' example output:
# RSA key fingerprint is e5:cb:c0:e2:21:3b:12:52:f8:ce:cb:00:24:e2:0c:92.
# ECDSA key fingerprint is 5d:67:61:08:a9:d7:01:fd:5e:ae:7e:09:40:ef:c0:3c.
# will exit 3 on N
ui.confirm <<~EOM
The authenticity of host '#{hostname} (#{ip})' can't be established.
fingerprint is #{fingerprint}.
Are you sure you want to continue connecting
EOM
# FIXME: this should save the key to known_hosts but doesn't appear to be
config[:ssh_verify_host_key] = :accept_new
conn_opts = connection_opts(reset: true)
opts.merge! conn_opts
retry
elsif (ssh? && e.cause && e.cause.class == Net::SSH::AuthenticationFailed) || (ssh? && e.class == Train::ClientError && e.reason == :no_ssh_password_or_key_available)
if connection.password_auth?
raise
else
ui.warn("Failed to authenticate #{opts[:user]} to #{server_name} - trying password auth")
password = ui.ask("Enter password for #{opts[:user]}@#{server_name}:", echo: false)
end
opts.merge! force_ssh_password_opts(password)
retry
else
raise
end
rescue RuntimeError => e
if winrm? && e.message == "password is a required option"
if connection.password_auth?
raise
else
ui.warn("Failed to authenticate #{opts[:user]} to #{server_name} - trying password auth")
password = ui.ask("Enter password for #{opts[:user]}@#{server_name}:", echo: false)
end
opts.merge! force_winrm_password_opts(password)
retry
else
raise
end
end
def handle_ssh_error(e); end
# url values override CLI flags, if you provide both
# we'll use the one that you gave in the URL.
def connection_protocol
return @connection_protocol if @connection_protocol
from_url = host_descriptor =~ %r{^(.*)://} ? $1 : nil
from_knife = config[:connection_protocol]
@connection_protocol = from_url || from_knife || "ssh"
end
def do_connect(conn_options)
@connection = TrainConnector.new(host_descriptor, connection_protocol, conn_options)
connection.connect!
rescue Train::UserError => e
limit ||= 1
if !conn_options.key?(:pty) && e.reason == :sudo_no_tty
ui.warn("#{e.message} - trying with pty request")
conn_options[:pty] = true # ensure we can talk to systems with requiretty set true in sshd config
retry
elsif config[:use_sudo_password] && (e.reason == :sudo_password_required || e.reason == :bad_sudo_password) && limit < 3
ui.warn("Failed to authenticate #{conn_options[:user]} to #{server_name} - #{e.message} \n sudo: #{limit} incorrect password attempt")
sudo_password = ui.ask("Enter sudo password for #{conn_options[:user]}@#{server_name}:", echo: false)
limit += 1
conn_options[:sudo_password] = sudo_password
retry
else
raise
end
end
# Fail if both first_boot_attributes and first_boot_attributes_from_file
# are set.
def validate_first_boot_attributes!
if @config[:first_boot_attributes] && @config[:first_boot_attributes_from_file]
raise Chef::Exceptions::BootstrapCommandInputError
end
true
end
# FIXME: someone needs to clean this up properly: https://github.com/chef/chef/issues/9645
# This code is deliberately left without an abstraction around deprecating the config options to avoid knife plugins from
# using those methods (which will need to be deprecated and break them) via inheritance (ruby does not have a true `private`
# so the lack of any inheritable implementation is because of that).
#
def winrm_auth_method
config.key?(:winrm_auth_method) ? config[:winrm_auth_method] : config.key?(:winrm_authentications_protocol) ? config[:winrm_authentication_protocol] : "negotiate" # rubocop:disable Style/NestedTernaryOperator
end
def ssh_verify_host_key
config.key?(:ssh_verify_host_key) ? config[:ssh_verify_host_key] : config.key?(:host_key_verify) ? config[:host_key_verify] : "always" # rubocop:disable Style/NestedTernaryOperator
end
# Fail if using plaintext auth without ssl because
# this can expose keys in plaintext on the wire.
# TODO test for this method
# TODO check that the protocol is valid.
def validate_winrm_transport_opts!
return true unless winrm?
if Chef::Config[:validation_key] && !File.exist?(File.expand_path(Chef::Config[:validation_key]))
if winrm_auth_method == "plaintext" &&
config[:winrm_ssl] != true
ui.error <<~EOM
Validatorless bootstrap over unsecure winrm channels could expose your
key to network sniffing.
Please use a 'winrm_auth_method' other than 'plaintext',
or enable ssl on #{server_name} then use the ---winrm-ssl flag
to connect.
EOM
exit 1
end
end
true
end
# fail if the server_name is nil
def validate_name_args!
if server_name.nil?
ui.error("Must pass an FQDN or ip to bootstrap")
exit 1
end
end
# Ensure options are valid by checking policyfile values.
#
# The method call will cause the program to exit(1) if:
# * Only one of --policy-name and --policy-group is specified
# * Policyfile options are set and --run-list is set as well
#
# @return [TrueClass] If options are valid.
def validate_policy_options!
if incomplete_policyfile_options?
ui.error("--policy-name and --policy-group must be specified together")
exit 1
elsif policyfile_and_run_list_given?
ui.error("Policyfile options and --run-list are exclusive")
exit 1
end
end
# Ensure a valid protocol is provided for target host connection
#
# The method call will cause the program to exit(1) if:
# * Conflicting protocols are given via the target URI and the --protocol option
# * The protocol is not a supported protocol
#
# @return [TrueClass] If options are valid.
def validate_protocol!
from_cli = config[:connection_protocol]
if from_cli && connection_protocol != from_cli
# Hanging indent to align with the ERROR: prefix
ui.error <<~EOM
The URL '#{host_descriptor}' indicates protocol is '#{connection_protocol}'
while the --protocol flag specifies '#{from_cli}'. Please include
only one or the other.
EOM
exit 1
end
unless SUPPORTED_CONNECTION_PROTOCOLS.include?(connection_protocol)
ui.error <<~EOM
Unsupported protocol '#{connection_protocol}'.
Supported protocols are: #{SUPPORTED_CONNECTION_PROTOCOLS.join(" ")}
EOM
exit 1
end
true
end
# Validate any additional options
#
# Plugins that subclass bootstrap, e.g. knife-ec2, can use this method to validate any additional options before any other actions are executed
#
# @return [TrueClass] If options are valid or exits
def plugin_validate_options!
true
end
# Create the server that we will bootstrap, if necessary
#
# Plugins that subclass bootstrap, e.g. knife-ec2, can use this method to call out to an API to build an instance of the server we wish to bootstrap
#
# @return [TrueClass] If instance successfully created, or exits
def plugin_create_instance!
true
end
# Perform any setup necessary by the plugin
#
# Plugins that subclass bootstrap, e.g. knife-ec2, can use this method to create connection objects
#
# @return [TrueClass] If instance successfully created, or exits
def plugin_setup!; end
# Perform any teardown or cleanup necessary by the plugin
#
# Plugins that subclass bootstrap, e.g. knife-ec2, can use this method to display a message or perform any cleanup
#
# @return [void]
def plugin_finalize; end
# If session_timeout is too short, it is likely
# a holdover from "--winrm-session-timeout" which used
# minutes as its unit, instead of seconds.
# Warn the human so that they are not surprised.
#
def warn_on_short_session_timeout
if session_timeout && session_timeout <= 15
ui.warn <<~EOM
You provided '--session-timeout #{session_timeout}' second(s).
Did you mean '--session-timeout #{session_timeout * 60}' seconds?
EOM
end
end
def winrm_warn_no_ssl_verification
return unless winrm?
# REVIEWER NOTE
# The original check from knife plugin did not include winrm_ssl_peer_fingerprint
# Reference:
# https://github.com/chef/knife-windows/blob/92d151298142be4a4750c5b54bb264f8d5b81b8a/lib/chef/knife/winrm_knife_base.rb#L271-L273
# TODO Seems like we should also do a similar warning if ssh_verify_host == false
if config[:ca_trust_file].nil? &&
config[:winrm_no_verify_cert] &&
config[:winrm_ssl_peer_fingerprint].nil?
ui.warn <<~WARN
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
SSL validation of HTTPS requests for the WinRM transport is disabled.
HTTPS WinRM connections are still encrypted, but knife is not able
to detect forged replies or spoofing attacks.
To work around this issue you can use the flag `--winrm-no-verify-cert`
or add an entry like this to your knife configuration file:
# Verify all WinRM HTTPS connections
knife[:winrm_no_verify_cert] = true
You can also specify a ca_trust_file via --ca-trust-file,
or the expected fingerprint of the target host's certificate
via --winrm-ssl-peer-fingerprint.
WARN
end
end
# @return a configuration hash suitable for connecting to the remote
# host via train
def connection_opts(reset: false)
return @connection_opts unless @connection_opts.nil? || reset == true
@connection_opts = {}
@connection_opts.merge! base_opts
@connection_opts.merge! host_verify_opts
@connection_opts.merge! gateway_opts
@connection_opts.merge! sudo_opts
@connection_opts.merge! winrm_opts
@connection_opts.merge! ssh_opts
@connection_opts.merge! ssh_identity_opts
@connection_opts
end
def winrm?
connection_protocol == "winrm"
end
def ssh?
connection_protocol == "ssh"
end
# Common configuration for all protocols
def base_opts
port = config_for_protocol(:port)
user = config_for_protocol(:user)
{}.tap do |opts|
opts[:logger] = Chef::Log
opts[:password] = config[:connection_password] if config.key?(:connection_password)
opts[:user] = user if user
opts[:max_wait_until_ready] = config[:max_wait].to_f unless config[:max_wait].nil?
# TODO - when would we need to provide rdp_port vs port? Or are they not mutually exclusive?
opts[:port] = port if port
end
end
def host_verify_opts
if winrm?
{ self_signed: config[:winrm_no_verify_cert] === true }
elsif ssh?
# Fall back to the old knife config key name for back compat.
{ verify_host_key: ssh_verify_host_key }
else
{}
end
end
def ssh_opts
opts = {}
return opts if winrm?
opts[:non_interactive] = true # Prevent password prompts from underlying net/ssh
opts[:forward_agent] = (config[:ssh_forward_agent] === true)
opts[:connection_timeout] = session_timeout
opts
end
def ssh_identity_opts
opts = {}
return opts if winrm?
identity_file = config[:ssh_identity_file]
if identity_file
opts[:key_files] = [identity_file]
# We only set keys_only based on the explicit ssh_identity_file;
# someone may use a gateway key and still expect password auth
# on the target. Similarly, someone may have a default key specified
# in knife config, but have provided a password on the CLI.
# REVIEW NOTE: this is a new behavior. Originally, ssh_identity_file
# could only be populated from CLI options, so there was no need to check
# for this. We will also set keys_only to false only if there are keys
# and no password.
# If both are present, train(via net/ssh) will prefer keys, falling back to password.
# Reference: https://github.com/chef/chef/blob/master/lib/chef/knife/ssh.rb#L272
opts[:keys_only] = config.key?(:connection_password) == false
else
opts[:key_files] = []
opts[:keys_only] = false
end
gateway_identity_file = config[:ssh_gateway] ? config[:ssh_gateway_identity] : nil
unless gateway_identity_file.nil?
opts[:key_files] << gateway_identity_file
end
opts
end
def gateway_opts
opts = {}
if config[:ssh_gateway]
split = config[:ssh_gateway].split("@", 2)
if split.length == 1
gw_host = split[0]
else
gw_user = split[0]
gw_host = split[1]
end
gw_host, gw_port = gw_host.split(":", 2)
# TODO - validate convertible port in config validation?
gw_port = Integer(gw_port) rescue nil
opts[:bastion_host] = gw_host
opts[:bastion_user] = gw_user
opts[:bastion_port] = gw_port
end
opts
end
# use_sudo - tells bootstrap to use the sudo command to run bootstrap
# use_sudo_password - tells bootstrap to use the sudo command to run bootstrap
# and to use the password specified with --password
# TODO: I'd like to make our sudo options sane:
# --sudo (bool) - use sudo
# --sudo-password PASSWORD (default: :password) - use this password for sudo
# --sudo-options "opt,opt,opt" to pass into sudo
# --sudo-command COMMAND sudo command other than sudo
# REVIEW NOTE: knife bootstrap did not pull sudo values from Chef::Config,
# should we change that for consistency?
def sudo_opts
return {} if winrm?
opts = { sudo: false }
if config[:use_sudo]
opts[:sudo] = true
if config[:use_sudo_password]
opts[:sudo_password] = config[:connection_password]
end
if config[:preserve_home]
opts[:sudo_options] = "-H"
end
end
opts
end
def winrm_opts
return {} unless winrm?
opts = {
winrm_transport: winrm_auth_method, # winrm gem and train calls auth method 'transport'
winrm_basic_auth_only: config[:winrm_basic_auth_only] || false,
ssl: config[:winrm_ssl] === true,
ssl_peer_fingerprint: config[:winrm_ssl_peer_fingerprint],
}
if winrm_auth_method == "kerberos"
opts[:kerberos_service] = config[:kerberos_service] if config[:kerberos_service]
opts[:kerberos_realm] = config[:kerberos_realm] if config[:kerberos_service]
end
if config[:ca_trust_file]
opts[:ca_trust_path] = config[:ca_trust_file]
end
opts[:operation_timeout] = session_timeout
opts
end
# Config overrides to force password auth.
def force_ssh_password_opts(password)
{
password: password,
non_interactive: false,
keys_only: false,
key_files: [],
auth_methods: %i{password keyboard_interactive},
}
end
def force_winrm_password_opts(password)
{
password: password,
}
end
# This is for deprecating config options. The fallback_key can be used
# to pull an old knife config option out of the config file when the
# cli value has been renamed. This is different from the deprecated
# cli values, since these are for config options that have no corresponding
# cli value.
#
# DO NOT USE - this whole API is considered deprecated
#
# @api deprecated
#
def config_value(key, fallback_key = nil, default = nil)
Chef.deprecated(:knife_bootstrap_apis, "Use of config_value is deprecated. Knife plugin authors should access the config hash directly, which does correct merging of cli and config options.")
if config.key?(key)
# the first key is the primary key so we check the merged hash first
config[key]
elsif config.key?(fallback_key)
# we get the old config option here (the deprecated cli option shouldn't exist)
config[fallback_key]
else
default
end
end
def upload_bootstrap(content)
script_name = connection.windows? ? "bootstrap.bat" : "bootstrap.sh"
remote_path = connection.normalize_path(File.join(connection.temp_dir, script_name))
connection.upload_file_content!(content, remote_path)
remote_path
end
# build the command string for bootstrapping
# @return String
def bootstrap_command(remote_path)
if connection.windows?
"cmd.exe /C #{remote_path}"
else
cmd = "sh #{remote_path}"
if config[:su_user]
# su - USER is subject to required an interactive console
# Otherwise, it will raise: su: must be run from a terminal
set_transport_options(pty: true)
cmd = "su - #{config[:su_user]} -c '#{cmd}'"
cmd = "sudo " << cmd if config[:use_sudo]
end
cmd
end
end
private
# To avoid cluttering the CLI options, some flags (such as port and user)
# are shared between protocols. However, there is still a need to allow the operator
# to specify defaults separately, since they may not be the same values for different
# protocols.
# These keys are available in Chef::Config, and are prefixed with the protocol name.
# For example, :user CLI option will map to :winrm_user and :ssh_user Chef::Config keys,
# based on the connection protocol in use.
# @api private
def config_for_protocol(option)
if option == :port
config[:connection_port] || config[knife_key_for_protocol(option)]
else
config[:connection_user] || config[knife_key_for_protocol(option)]
end
end
# @api private
def knife_key_for_protocol(option)
"#{connection_protocol}_#{option}".to_sym
end
# True if policy_name and run_list are both given
def policyfile_and_run_list_given?
run_list_given? && policyfile_options_given?
end
def run_list_given?
!config[:run_list].nil? && !config[:run_list].empty?
end
def policyfile_options_given?
!!config[:policy_name]
end
# True if one of policy_name or policy_group was given, but not both
def incomplete_policyfile_options?
(!!config[:policy_name] ^ config[:policy_group])
end
# session_timeout option has a default that may not arrive, particularly if
# we're being invoked from a plugin that doesn't merge_config.
def session_timeout
timeout = config[:session_timeout]
return options[:session_timeout][:default] if timeout.nil?
timeout.to_i
end
# Train::Transports::SSH::Connection#transport_options
# Append the options to connection transport_options
#
# @param opts [Hash] the opts to be added to connection transport_options.
# @return [Hash] transport_options if the opts contains any option to be set.
#
def set_transport_options(opts)
return unless opts.is_a?(Hash) || !opts.empty?
connection&.connection&.transport_options&.merge! opts
end
end
end
end
|
module CloudQueues
VERSION = "0.0.1"
end
Release version 1.0.0
module CloudQueues
VERSION = "1.0.0"
end
|
module CobotClient
VERSION = "0.5.2"
end
bumps version
module CobotClient
VERSION = "0.6.0"
end
|
module CodeRay module Scanners
# by Josh Goebel
class SQL < Scanner
register_for :sql
RESERVED_WORDS = %w(
create table index trigger drop primary key set select
insert update delete replace into
on from values before and or if exists case when
then else as group order by avg where
join inner outer union engine not
like end using collate show columns begin
)
PREDEFINED_TYPES = %w(
char varchar enum binary text tinytext mediumtext
longtext blob tinyblob mediumblob longblob timestamp
date time datetime year double decimal float int
integer tinyint mediumint bigint smallint unsigned bit
bool boolean hex bin oct
)
PREDEFINED_FUNCTIONS = %w( sum cast abs pi count min max avg )
DIRECTIVES = %w( auto_increment unique default charset )
PREDEFINED_CONSTANTS = %w( null true false )
IDENT_KIND = CaseIgnoringWordList.new(:ident).
add(RESERVED_WORDS, :reserved).
add(PREDEFINED_TYPES, :pre_type).
add(PREDEFINED_CONSTANTS, :pre_constant).
add(PREDEFINED_FUNCTIONS, :predefined).
add(DIRECTIVES, :directive)
ESCAPE = / [rbfnrtv\n\\\/'"] | x[a-fA-F0-9]{1,2} | [0-7]{1,3} | . /mx
UNICODE_ESCAPE = / u[a-fA-F0-9]{4} | U[a-fA-F0-9]{8} /x
STRING_PREFIXES = /[xnb]|_\w+/i
def scan_tokens tokens, options
state = :initial
string_type = nil
string_content = ''
until eos?
kind = nil
match = nil
if state == :initial
if scan(/ \s+ | \\\n /x)
kind = :space
elsif scan(/^(?:--\s?|#).*/)
kind = :comment
elsif scan(%r! /\* (?: .*? \*/ | .* ) !mx)
kind = :comment
elsif scan(/ [-+*\/=<>;,!&^|()\[\]{}~%] | \.(?!\d) /x)
kind = :operator
elsif scan(/(#{STRING_PREFIXES})?([`"'])/o)
prefix = self[1]
string_type = self[2]
tokens << [:open, :string]
tokens << [prefix, :modifier] if prefix
match = string_type
state = :string
kind = :delimiter
elsif match = scan(/ @? [A-Za-z_][A-Za-z_0-9]* /x)
kind = match[0] == ?@ ? :variable : IDENT_KIND[match.downcase]
elsif scan(/0[xX][0-9A-Fa-f]+/)
kind = :hex
elsif scan(/0[0-7]+(?![89.eEfF])/)
kind = :oct
elsif scan(/(?>\d+)(?![.eEfF])/)
kind = :integer
elsif scan(/\d[fF]|\d*\.\d+(?:[eE][+-]?\d+)?|\d+[eE][+-]?\d+/)
kind = :float
else
getch
kind = :error
end
elsif state == :string
if match = scan(/[^\\"'`]+/)
string_content << match
next
elsif match = scan(/["'`]/)
if string_type == match
if peek(1) == string_type # doubling means escape
string_content << string_type << getch
next
end
unless string_content.empty?
tokens << [string_content, :content]
string_content = ''
end
tokens << [matched, :delimiter]
tokens << [:close, :string]
state = :initial
string_type = nil
next
else
string_content << match
end
next
elsif scan(/ \\ (?: #{ESCAPE} | #{UNICODE_ESCAPE} ) /mox)
unless string_content.empty?
tokens << [string_content, :content]
string_content = ''
end
kind = :char
elsif match = scan(/ \\ . /mox)
string_content << match
next
elsif scan(/ \\ | $ /x)
unless string_content.empty?
tokens << [string_content, :content]
string_content = ''
end
kind = :error
state = :initial
else
raise "else case \" reached; %p not handled." % peek(1), tokens
end
else
raise 'else-case reached', tokens
end
match ||= matched
unless kind
raise_inspect 'Error token %p in line %d' %
[[match, kind], line], tokens, state
end
raise_inspect 'Empty token', tokens unless match
tokens << [match, kind]
end
tokens
end
end
end end
More cleanup.
git-svn-id: 3003a0d67ecddf9b67dc4af6cf35c502b83f6d3b@334 282260fa-4eda-c845-a9f0-6527b7353f92
module CodeRay module Scanners
# by Josh Goebel
class SQL < Scanner
register_for :sql
RESERVED_WORDS = %w(
create table index trigger drop primary key set select
insert update delete replace into
on from values before and or if exists case when
then else as group order by avg where
join inner outer union engine not
like end using collate show columns begin
)
PREDEFINED_TYPES = %w(
char varchar enum binary text tinytext mediumtext
longtext blob tinyblob mediumblob longblob timestamp
date time datetime year double decimal float int
integer tinyint mediumint bigint smallint unsigned bit
bool boolean hex bin oct
)
PREDEFINED_FUNCTIONS = %w( sum cast abs pi count min max avg )
DIRECTIVES = %w( auto_increment unique default charset )
PREDEFINED_CONSTANTS = %w( null true false )
IDENT_KIND = CaseIgnoringWordList.new(:ident).
add(RESERVED_WORDS, :reserved).
add(PREDEFINED_TYPES, :pre_type).
add(PREDEFINED_CONSTANTS, :pre_constant).
add(PREDEFINED_FUNCTIONS, :predefined).
add(DIRECTIVES, :directive)
ESCAPE = / [rbfnrtv\n\\\/'"] | x[a-fA-F0-9]{1,2} | [0-7]{1,3} | . /mx
UNICODE_ESCAPE = / u[a-fA-F0-9]{4} | U[a-fA-F0-9]{8} /x
STRING_PREFIXES = /[xnb]|_\w+/i
def scan_tokens tokens, options
state = :initial
string_type = nil
string_content = ''
until eos?
kind = nil
match = nil
if state == :initial
if scan(/ \s+ | \\\n /x)
kind = :space
elsif scan(/^(?:--\s?|#).*/)
kind = :comment
elsif scan(%r! /\* (?: .*? \*/ | .* ) !mx)
kind = :comment
elsif scan(/ [-+*\/=<>;,!&^|()\[\]{}~%] | \.(?!\d) /x)
kind = :operator
elsif scan(/(#{STRING_PREFIXES})?([`"'])/o)
prefix = self[1]
string_type = self[2]
tokens << [:open, :string]
tokens << [prefix, :modifier] if prefix
match = string_type
state = :string
kind = :delimiter
elsif match = scan(/ @? [A-Za-z_][A-Za-z_0-9]* /x)
kind = match[0] == ?@ ? :variable : IDENT_KIND[match.downcase]
elsif scan(/0[xX][0-9A-Fa-f]+/)
kind = :hex
elsif scan(/0[0-7]+(?![89.eEfF])/)
kind = :oct
elsif scan(/(?>\d+)(?![.eEfF])/)
kind = :integer
elsif scan(/\d[fF]|\d*\.\d+(?:[eE][+-]?\d+)?|\d+[eE][+-]?\d+/)
kind = :float
else
getch
kind = :error
end
elsif state == :string
if match = scan(/[^\\"'`]+/)
string_content << match
next
elsif match = scan(/["'`]/)
if string_type == match
if peek(1) == string_type # doubling means escape
string_content << string_type << getch
next
end
unless string_content.empty?
tokens << [string_content, :content]
string_content = ''
end
tokens << [matched, :delimiter]
tokens << [:close, :string]
state = :initial
string_type = nil
next
else
string_content << match
end
next
elsif scan(/ \\ (?: #{ESCAPE} | #{UNICODE_ESCAPE} ) /mox)
unless string_content.empty?
tokens << [string_content, :content]
string_content = ''
end
kind = :char
elsif match = scan(/ \\ . /mox)
string_content << match
next
elsif scan(/ \\ | $ /x)
unless string_content.empty?
tokens << [string_content, :content]
string_content = ''
end
kind = :error
state = :initial
else
raise "else case \" reached; %p not handled." % peek(1), tokens
end
else
raise 'else-case reached', tokens
end
match ||= matched
unless kind
raise_inspect 'Error token %p in line %d' %
[[match, kind], line], tokens, state
end
raise_inspect 'Empty token', tokens unless match
tokens << [match, kind]
end
tokens
end
end
end end |
require 'resque/tasks'
task 'resque:setup' => :environment do
require './app/workers/resque_worker'
end
Add a default Resque QUEUE to run
require 'resque/tasks'
task 'resque:setup' => :environment do
# Add a default to run every queue, unless otherwise specified
ENV['QUEUE'] = '*' unless ENV.include?('QUEUE')
require './app/workers/resque_worker'
end
|
Added Richards benchmark
require 'benchmark'
require 'benchmark/ips'
module Richards
IDLE = 0
WORKER = 1
HANDLERA = 2
HANDLERB = 3
DEVICEA = 4
DEVICEB = 5
MAXTASKS = 6
def self.main(count)
s = Scheduler.new
s.addIdleTask(IDLE, 0, nil, count)
wkq = Packet.new(nil, WORKER, :work)
wkq = Packet.new(wkq, WORKER, :work)
s.addWorkerTask(WORKER, 1000, wkq)
wkq = Packet.new(nil, DEVICEA, :device)
wkq = Packet.new(wkq, DEVICEA, :device)
wkq = Packet.new(wkq, DEVICEA, :device)
s.addHandlerTask(HANDLERA, 2000, wkq)
wkq = Packet.new(nil, DEVICEB, :device)
wkq = Packet.new(wkq, DEVICEB, :device)
wkq = Packet.new(wkq, DEVICEB, :device)
s.addHandlerTask(HANDLERB, 3000, wkq)
s.addDeviceTask(DEVICEA, 4000, nil)
s.addDeviceTask(DEVICEB, 5000, nil)
s.schedule
end
class Scheduler
attr_reader :holdCount, :queueCount
def initialize
@table = Array.new(MAXTASKS,nil)
@list = nil
@queueCount = 0
@holdCount = 0
end
def holdCurrent
@holdCount += 1
@currentTcb.held
@currentTcb.link
end
def queue(packet)
if (task = @table.at(packet.id))
@queueCount += 1
packet.link = nil
packet.id = @currentId
task.checkPriorityAdd(@currentTcb,packet)
else
task
end
end
def release(id)
task = @table.at(id)
task.notHeld
if task.pri > @currentTcb.pri
task
else
@currentTcb
end
end
def schedule
@currentTcb = @list
while @currentTcb
if @currentTcb.isHeldOrSuspended?
@currentTcb = @currentTcb.link
else
@currentId = @currentTcb.id
@currentTcb = @currentTcb.run
end
end
end
def suspendCurrent
@currentTcb.suspended
end
def addDeviceTask(id,pri,wkq)
createTcb(id,pri,wkq, DeviceTask.new(self))
end
def addHandlerTask(id,pri,wkq)
createTcb(id,pri,wkq, HandlerTask.new(self))
end
def addIdleTask(id,pri,wkq,count)
createRunningTcb(id,pri,wkq, IdleTask.new(self,1,count))
end
def addWorkerTask(id,pri,wkq)
createTcb(id,pri,wkq, WorkerTask.new(self,HANDLERA,0))
end
def createRunningTcb(id,pri,wkq,task)
createTcb(id,pri,wkq,task)
@currentTcb.setRunning
end
def createTcb(id,pri,wkq,task)
@currentTcb = Tcb.new(@list,id,pri,wkq,task)
@list = @currentTcb
@table[id] = @currentTcb
end
end
class DeviceTask
def initialize(scheduler)
@scheduler = scheduler
end
def run(packet)
if packet
@v1 = packet
@scheduler.holdCurrent
else
if @v1
pkt = @v1
@v1 = nil
@scheduler.queue(pkt)
else
@scheduler.suspendCurrent
end
end
end
end
class HandlerTask
def initialize(scheduler)
@scheduler = scheduler
end
def run(packet)
if packet
if packet.kind == :work
@v1 = packet.addTo(@v1)
else
@v2 = packet.addTo(@v2)
end
end
if @v1
if ((count = @v1.a1) < 4 )
if @v2
v = @v2
@v2 = @v2.link
v.a1 = @v1.a2.at(count)
@v1.a1 = count+1
return @scheduler.queue(v)
end
else
v = @v1
@v1 = @v1.link
return @scheduler.queue(v)
end
end
@scheduler.suspendCurrent
end
end
class IdleTask
def initialize(scheduler,v1,v2)
@scheduler = scheduler
@v1 = v1
@v2 = v2
end
def run(packet)
if ( @v2 -= 1 ).zero?
@scheduler.holdCurrent
else
@scheduler.release(if (@v1 & 1).zero?
@v1 >>= 1
DEVICEA
else
@v1 >>= 1
@v1 ^= 0xD008
DEVICEB
end )
end
end
end
class WorkerTask
ALPHA = "0ABCDEFGHIJKLMNOPQRSTUVWXYZ"
def initialize(scheduler,v1,v2)
@scheduler = scheduler
@v1 = v1
@v2 = v2
end
def run(packet)
if packet
@v1 = if ( @v1 == HANDLERA )
HANDLERB
else
HANDLERA
end
packet.id = @v1
packet.a1 = 0
packet.a2.collect! {|x|
@v2 += 1
@v2 = 1 if @v2 > 26
ALPHA[@v2]
}
@scheduler.queue(packet)
else
@scheduler.suspendCurrent
end
end
end
class Tcb
RUNNING = 0b0 # 0
RUNNABLE = 0b1 # 1
SUSPENDED = 0b10 # 2
HELD = 0b100 # 4
SUSPENDED_RUNNABLE = SUSPENDED | RUNNABLE # 3
NOT_HELD = ~HELD # -5
attr_reader :link, :id, :pri
def initialize(link, id, pri, wkq, task)
@link = link
@id = id
@pri = pri
@wkq = wkq
@task = task
@state = if wkq then 0b11 else 0b10 end
# @state = if wkq then SUSPENDED_RUNNABLE else SUSPENDED end
@old = nil
end
def checkPriorityAdd(task,packet)
if @wkq
packet.addTo(@wkq)
else
@wkq = packet
@state |= 0b1 # RUNNABLE
return self if @pri > task.pri
end
task
end
def run
@task.run(if @state == 0b11 # 3 # SUSPENDED_RUNNABLE
@old = @wkq
@wkq = @old.link
@state = @wkq ? 0b1 : 0b0 # RUNNABLE : RUNNING
@old
end )
end
def setRunning
@state = 0b0 # RUNNING
end
def suspended
@state |= 0b10 # 2 # SUSPENDED
self
end
def held
@state |= 0b100 # 4 #HELD
end
def notHeld
@state &= -5 # NOT_HELD
end
PRECOMP = (0..5).collect{|state|
(state & 0b100 ) != 0 || state == 0b10
# (state & HELD) != 0 || state == SUSPENDED
}
def isHeldOrSuspended?
PRECOMP.at(@state)
end
end
class Packet
attr_accessor :link, :id, :kind, :a1
attr_reader :a2
def initialize(link, id, kind)
@link = link
@id = id
@kind = kind
@a1 = 0
@a2 = Array.new(4,0)
end
def addTo(queue)
@link = nil
unless queue
self
else
nextPacket = queue
while (peek = nextPacket.link)
nextPacket = peek
end
nextPacket.link = self
queue
end
end
end
end
Benchmark.ips do |x|
x.report '#main(10000)' do
Richards.main(10000)
end
end
|
cern-ndiff 5.02.08 (new formula)
The CERN ndiff tool is a specialiced diff tool for comparing numerical
differences in (large) text files. There are no testing defined for the
binary at the moment, which is why the test is empty in this recipe. The
tool is primarily used for testing of the Mad-X software, but generally
useful for people that need to do advanced numerical difference
comparison.
Closes #46666.
Signed-off-by: Baptiste Fontaine <bfee279af59f3e3f71f7ce1fa037ea7b90f93cbf@yahoo.fr>
class CernNdiff < Formula
desc "Numerical diff tool"
# Note: ndiff is a sub-project of Mad-X at the moment..
homepage "http://cern.ch/mad"
url "http://svn.cern.ch/guest/madx/tags/5.02.08/madX/tools/numdiff"
version "5.02.08"
head "http://svn.cern.ch/guest/madx/trunk/madX/tools/numdiff"
depends_on "cmake" => :build
def install
system "cmake", ".", *std_cmake_args
system "make", "install"
end
test do
(testpath/"lhs.txt").write("0.0 2e-3 0.003")
(testpath/"rhs.txt").write("1e-7 0.002 0.003")
(testpath/"test.cfg").write("* * abs=1e-6")
system "#{bin}/numdiff", "lhs.txt", "rhs.txt", "test.cfg"
end
end
|
require 'formula'
class ClassDump < Formula
homepage 'http://stevenygard.com/projects/class-dump/'
head 'https://github.com/nygard/class-dump.git'
url 'https://github.com/nygard/class-dump/archive/3.5.tar.gz'
sha1 'c343bec63878161b02c956f49c9c1c8d989b4b5a'
depends_on :macos => :mavericks
def install
xcodebuild "-configuration", "Release", "SYMROOT=build", "PREFIX=#{prefix}", "ONLY_ACTIVE_ARCH=YES"
bin.install "build/Release/class-dump"
end
end
class-dump: add Xcode dep
require 'formula'
class ClassDump < Formula
homepage 'http://stevenygard.com/projects/class-dump/'
head 'https://github.com/nygard/class-dump.git'
url 'https://github.com/nygard/class-dump/archive/3.5.tar.gz'
sha1 'c343bec63878161b02c956f49c9c1c8d989b4b5a'
depends_on :xcode
depends_on :macos => :mavericks
def install
xcodebuild "-configuration", "Release", "SYMROOT=build", "PREFIX=#{prefix}", "ONLY_ACTIVE_ARCH=YES"
bin.install "build/Release/class-dump"
end
end
|
class Commonmark < Formula
homepage "http://commonmark.org"
url "https://github.com/jgm/cmark/archive/0.18.tar.gz"
sha1 "dc45a70aec89c0a428321b8d0d29ee4933a7d562"
bottle do
cellar :any
sha1 "c02997d6624c6a9ef4e627ff789cb83127a17b97" => :yosemite
sha1 "9777bbeb2d36fd1fc901261167af0452ecd21622" => :mavericks
sha1 "af4136806400ffcf35f984fbd52f16b5bf08f2e6" => :mountain_lion
end
depends_on "cmake" => :build
depends_on :python3 => :build
def install
mkdir "build" do
system "cmake", "..", *std_cmake_args
system "make"
system "make", "test"
system "make", "install"
end
end
test do
test_input = "*hello, world*\n"
expected_output = "<p><em>hello, world</em></p>\n"
test_output = `/bin/echo -n "#{test_input}" | #{bin}/cmark`
assert_equal expected_output, test_output
end
end
commonmark: update 0.18 bottle.
class Commonmark < Formula
homepage "http://commonmark.org"
url "https://github.com/jgm/cmark/archive/0.18.tar.gz"
sha1 "dc45a70aec89c0a428321b8d0d29ee4933a7d562"
bottle do
cellar :any
sha1 "d6649e3b9cc6a7a05c4bf37b5b77a66b0524dfa0" => :yosemite
sha1 "3108842f9e69cfcf0a3a9c3d29f1e638f73b12dc" => :mavericks
sha1 "9a06df359b9c7ba2f7ebf8048d9c6c59efdee04c" => :mountain_lion
end
depends_on "cmake" => :build
depends_on :python3 => :build
def install
mkdir "build" do
system "cmake", "..", *std_cmake_args
system "make"
system "make", "test"
system "make", "install"
end
end
test do
test_input = "*hello, world*\n"
expected_output = "<p><em>hello, world</em></p>\n"
test_output = `/bin/echo -n "#{test_input}" | #{bin}/cmark`
assert_equal expected_output, test_output
end
end
|
module ParameterHandler
VERSION = '0.0.1'
end
Fix old name
module FillParams
VERSION = '0.0.1'
end
|
Tog Oracle Template
Signed-off-by: tog - extensible open source social network platform <a6699d8e1fe3bc0b367b46652c29410a5c13c11f@linkingpaths.com>
gem "rufus-scheduler"
rake "gems:install"
plugin 'only_owner', :git => "git://github.com/balinterdi/only_owner.git"
plugin 'tog_oracle', :git => "git://github.com/tog/tog_oracle.git"
route "map.routes_from_plugin 'tog_oracle'"
file "db/migrate/" + Time.now.strftime("%Y%m%d%H%M%S") + "_install_tog_oracle.rb",
%q{class InstallTogOracle < ActiveRecord::Migration
def self.up
migrate_plugin "tog_oracle", 5
end
def self.down
migrate_plugin "tog_oracle", 0
end
end
}
rake "db:migrate"
|
require 'formula'
class GdkPixbuf < Formula
homepage 'http://gtk.org'
url 'http://ftp.gnome.org/pub/gnome/sources/gdk-pixbuf/2.22/gdk-pixbuf-2.22.1.tar.bz2'
md5 '716c4593ead3f9c8cca63b8b1907a561'
depends_on 'glib'
depends_on 'jasper'
def install
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--with-libjasper",
"--enable-introspection=no"
system "make install"
end
end
gdk-pixbuf: add missing libtiff dependency
require 'formula'
class GdkPixbuf < Formula
homepage 'http://gtk.org'
url 'http://ftp.gnome.org/pub/gnome/sources/gdk-pixbuf/2.22/gdk-pixbuf-2.22.1.tar.bz2'
md5 '716c4593ead3f9c8cca63b8b1907a561'
depends_on 'glib'
depends_on 'jasper'
depends_on 'libtiff'
def install
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--with-libjasper",
"--enable-introspection=no"
system "make install"
end
end
|
# require "HDLRuby/hruby_base"
require "HDLRuby/hruby_low"
require "HDLRuby/hruby_types"
require "HDLRuby/hruby_values"
require "HDLRuby/hruby_bstr"
require 'set'
##
# High-level libraries for describing digital hardware.
#######################################################
module HDLRuby::High
# Base = HDLRuby::Base
Low = HDLRuby::Low
# Gets the infinity.
def infinity
return HDLRuby::Infinity
end
##
# Module providing extension of class.
module SingletonExtend
# Adds the singleton contents of +obj+ to current eigen class.
#
# NOTE: conflicting existing singleton content will be overridden if
def eigen_extend(obj)
# puts "eigen_extend for #{self} class=#{self.class}"
obj.singleton_methods.each do |name|
next if name == :yaml_tag # Do not know why we need to skip
# puts "name=#{name}"
self.define_singleton_method(name, &obj.singleton_method(name))
end
end
end
##
# Describes a namespace.
# Used for managing the access points to internals of hardware constructs.
class Namespace
include SingletonExtend
# The reserved names
RESERVED = [ :user, :initialize, :add_method, :concat_namespace,
:to_namespace, :user?, :user_deep? ]
# The construct using the namespace.
attr_reader :user
# Creates a new namespace attached to +user+.
def initialize(user)
# Sets the user.
@user = user
# Initialize the concat namespaces.
@concats = []
end
# Adds method +name+ provided the name is not empty.
def add_method(name,&ruby_block)
unless name.empty? then
if RESERVED.include?(name.to_sym) then
raise "Resevered name #{name} cannot be overridden."
end
define_singleton_method(name,&ruby_block)
end
end
# Concats another +namespace+ to current one.
def concat_namespace(namespace)
# Ensure namespace is really a namespace and concat it.
namespace = namespace.to_namespace
self.eigen_extend(namespace)
# Adds the concat the the list.
@concats << namespace
end
# Ensure it is a namespace
def to_namespace
return self
end
# Tell if an +object+ is the user of the namespace.
def user?(object)
return @user.equal?(object)
end
# Tell if an +object+ is the user of the namespace or of one of its
# concats.
def user_deep?(object)
# puts "@user=#{@user}, @concats=#{@concats.size}, object=#{object}"
# Convert the object to a user if appliable (for SystemT)
object = object.to_user if object.respond_to?(:to_user)
# Maybe object is the user of this namespace.
return true if user?(object)
# No, try in the concat namespaces.
@concats.any? { |concat| concat.user_deep?(object) }
end
end
# ##
# # Module providing mixin properties to hardware types.
# module HMix
# # Tells this is a hardware type supporting mixins.
# #
# # NOTE: only there for being checked through respond_to?
# def hmix?
# return true
# end
# # Mixins hardware types +htypes+.
# def include(*htypes)
# # Initialize the list of mixins hardware types if required.
# @includes ||= []
# # Check and add the hardware types.
# htypes.each do |htype|
# unless htype.respond_to?(:hmix?) then
# raise "Invalid class for mixin: #{htype.class}"
# end
# @includes << htype
# end
# end
# # # Mixins hardware types +htypes+ by extension.
# # def extend(htypes)
# # # Initialize the list of mixins hardware types if required.
# # @extends ||= []
# # # Check and add the hardware types.
# # htypes.each do |htype|
# # unless htype.respond_to?(:hmix?) then
# # raise "Invalid class for mixin: #{htype.class}"
# # end
# # @includes << htype
# # end
# # end
# end
##
# Module providing handling of unknown methods for hardware constructs.
module Hmissing
High = HDLRuby::High
# Missing methods may be immediate values, if not, they are looked up
# in the upper level of the namespace if any.
def method_missing(m, *args, &ruby_block)
# print "method_missing in class=#{self.class} with m=#{m}\n"
# Is the missing method an immediate value?
value = m.to_value
return value if value and args.empty?
# No, is there an upper namespace, i.e. is the current object
# present in the space?
if High.space_index(self) then
# Yes, self is in it, can try the methods in the space.
High.space_call(m,*args,&ruby_block)
elsif self.respond_to?(:namespace) and
High.space_index(self.namespace) then
# Yes, the private namespace is in it, can try the methods in
# the space.
High.space_call(m,*args,&ruby_block)
elsif self.respond_to?(:public_namespace) and
High.space_index(self.public_namespace) then
# Yes, the private namespace is in it, can try the methods in
# the space.
High.space_call(m,*args,&ruby_block)
else
# No, this is a true error.
raise NoMethodError.new("undefined local variable or method `#{m}'.")
end
end
end
module HScope_missing
include Hmissing
alias h_missing method_missing
# Missing methods are looked for in the private namespace.
#
# NOTE: it is ok to use the private namespace because the scope
# can only be accessed if it is available from its systemT.
def method_missing(m, *args, &ruby_block)
# Is the scope currently opened?
# puts "self.class=#{self.class}"
if High.space_top.user_deep?(self) then
# Yes, use the stack of namespaces.
h_missing(m,*args,&ruby_block)
else
# No, look into the current namespace and return a reference
# to the result if it is a referable hardware object.
res = self.namespace.send(m,*args,&ruby_block)
if res.respond_to?(:to_ref) then
# This is a referable object, build the reference from
# the namespace.
return RefObject.new(self.to_ref,res)
end
end
# puts "method_missing in scope=#{@name}(#{self}) with m=#{m}"
# puts "self.namespace=#{self.namespace}"
# # puts "namespace methods = #{self.namespace.methods}"
# if self.namespace.respond_to?(m) then
# puts "Found"
# self.namespace.send(m,*args,&ruby_block)
# else
# puts "NOT Found"
# h_missing(m,*args,&ruby_block)
# end
end
end
##
# Module providing methods for declaring select expressions.
module Hmux
# Creates an operator selecting from +select+ one of the +choices+.
#
# NOTE: +choices+ can either be a list of arguments or an array.
# If +choices+ has only two entries
# (and it is not a hash), +value+ will be converted to a boolean.
def mux(select,*choices)
# Process the choices.
choices = choices.flatten(1) if choices.size == 1
choices.map! { |choice| choice.to_expr }
# Generate the select expression.
return Select.new("?",select.to_expr,*choices)
end
end
##
# Module providing declaration of inner signal (assumes inner signals
# are present.
module Hinner
# Only adds the methods if not present.
def self.included(klass)
klass.class_eval do
# unless instance_methods.include?(:add_inner) then
# # Adds inner signal +signal+.
# def add_inner(signal)
# # Checks and add the signal.
# unless signal.is_a?(SignalI)
# raise "Invalid class for a signal instance: #{signal.class}"
# end
# if @inners.has_key?(signal.name) then
# raise "SignalI #{signal.name} already present."
# end
# @inners[signal.name] = signal
# end
# # Iterates over the inner signals.
# #
# # Returns an enumerator if no ruby block is given.
# def each_inner(&ruby_block)
# # No ruby block? Return an enumerator.
# return to_enum(:each_inner) unless ruby_block
# # A block? Apply it on each inner signal instance.
# @inners.each_value(&ruby_block)
# end
# alias :each_signal :each_inner
# ## Gets an inner signal by +name+.
# def get_inner(name)
# return @inners[name]
# end
# alias :get_signal :get_inner
# # Iterates over all the signals of the block and its sub block's ones.
# def each_signal_deep(&ruby_block)
# # No ruby block? Return an enumerator.
# return to_enum(:each_signal_deep) unless ruby_block
# # A block?
# # First, apply on the signals of the block.
# self.each_signal(&ruby_block)
# # Then apply on each sub block.
# self.each_block_deep do |block|
# block.each_signal_deep(&ruby_block)
# end
# end
# end
unless instance_methods.include?(:make_inners) then
# Creates and adds a set of inners typed +type+ from a list of +names+.
#
# NOTE: a name can also be a signal, is which case it is duplicated.
def make_inners(type, *names)
res = nil
names.each do |name|
if name.respond_to?(:to_sym) then
# Adds the inner signal
res = self.add_inner(SignalI.new(name,type,:inner))
else
# Deactivated because conflict with parent.
# signal = name.clone
# signal.dir = :inner
# self.add_inner(signal)
raise "Invalid class for a name: #{name.class}"
end
end
return res
end
end
unless instance_methods.include?(:inner) then
# Declares high-level bit inner signals named +names+.
def inner(*names)
self.make_inners(bit,*names)
end
end
end
end
end
# Classes describing hardware types.
##
# Describes a high-level system type.
class SystemT < Low::SystemT
High = HDLRuby::High
# include Hinner
include SingletonExtend
# The public namespace
#
# NOTE: the private namespace is the namespace of the scope object.
attr_reader :public_namespace
##
# Creates a new high-level system type named +name+ and inheriting
# from +mixins+.
#
# # If name is hash, it is considered the system is unnamed and the
# # table is used to rename its signals or instances.
#
# The proc +ruby_block+ is executed when instantiating the system.
def initialize(name, *mixins, &ruby_block)
# Initialize the system type structure.
# super(name,Scope.new())
super(name,Scope.new(name))
# Initialize the set of extensions to transmit to the instances'
# eigen class
@singleton_instanceO = Namespace.new(self.scope)
# Create the public namespace.
@public_namespace = Namespace.new(self.scope)
# Check and set the mixins.
mixins.each do |mixin|
unless mixin.is_a?(SystemT) then
raise "Invalid class for inheriting: #{mixin.class}."
end
end
@to_includes = mixins
# Prepare the instantiation methods
make_instantiater(name,SystemI,:add_systemI,&ruby_block)
# # Initialize the set of exported inner signals and instances
# @exports = {}
# # Initialize the set of included system instances.
# @includeIs = {}
end
# Converts to a namespace user.
def to_user
# Returns the scope.
return @scope
end
# # Adds a group of system +instances+ named +name+.
# def add_groupI(name, *instances)
# # Ensure name is a symbol and is not already used for another
# # group.
# name = name.to_sym
# if @groupIs.key?(name)
# raise "Group of system instances named #{name} already exist."
# end
# # Add the group.
# @groupIs[name.to_sym] = instances
# # Sets the parent of the instances.
# instances.each { |instance| instance.parent = self }
# end
# # Access a group of system instances by +name+.
# #
# # NOTE: the result is a copy of the group for avoiding side effects.
# def get_groupI(name)
# return @groupIs[name.to_sym].clone
# end
# # Iterates over the group of system instances.
# #
# # Returns an enumerator if no ruby block is given.
# def each_groupI(&ruby_block)
# # No ruby block? Return an enumerator.
# return to_enum(:each_groupI) unless ruby_block
# # A block? Apply it on each input signal instance.
# @groupIs.each(&ruby_block)
# end
# Creates and adds a set of inputs typed +type+ from a list of +names+.
#
# NOTE: a name can also be a signal, is which case it is duplicated.
def make_inputs(type, *names)
res = nil
names.each do |name|
if name.respond_to?(:to_sym) then
res = self.add_input(SignalI.new(name,type,:input))
else
raise "Invalid class for a name: #{name.class}"
end
end
return res
end
# Creates and adds a set of outputs typed +type+ from a list of +names+.
#
# NOTE: a name can also be a signal, is which case it is duplicated.
def make_outputs(type, *names)
# puts "type=#{type.inspect}"
res = nil
names.each do |name|
# puts "name=#{name}"
if name.respond_to?(:to_sym) then
res = self.add_output(SignalI.new(name,type,:output))
else
raise "Invalid class for a name: #{name.class}"
end
end
return res
end
# Creates and adds a set of inouts typed +type+ from a list of +names+.
#
# NOTE: a name can also be a signal, is which case it is duplicated.
def make_inouts(type, *names)
res = nil
names.each do |name|
if name.respond_to?(:to_sym) then
res = self.add_inout(SignalI.new(name,type,:inout))
else
raise "Invalid class for a name: #{name.class}"
end
end
return res
end
# # Creates and adds a set of inners typed +type+ from a list of +names+.
# #
# # NOTE: a name can also be a signal, is which case it is duplicated.
# def make_inners(type, *names)
# res = nil
# names.each do |name|
# if name.respond_to?(:to_sym) then
# res = self.add_inner(SignalI.new(name,type,:inner))
# else
# raise "Invalid class for a name: #{name.class}"
# end
# end
# return res
# end
# # Adds a +name+ to export.
# #
# # NOTE: if the name do not corresponds to any inner signal nor
# # instance, raise an exception.
# def add_export(name)
# # Check the name.
# name = name.to_sym
# # Look for construct to make public.
# # Maybe it is an inner signals.
# inner = self.get_inner(name)
# if inner then
# # Yes set it as export.
# @exports[name] = inner
# return
# end
# # No, maybe it is an instance.
# instance = self.get_systemI(name)
# if instance then
# # Yes, set it as export.
# @exports[name] = instance
# return
# end
# # No, error.
# raise NameError.new("Invalid name for export: #{name}")
# end
# # Iterates over the exported constructs.
# #
# # Returns an enumerator if no ruby block is given.
# def each_export(&ruby_block)
# # No ruby block? Return an enumerator.
# return to_enum(:each_export) unless ruby_block
# # A block? Apply it on each input signal instance.
# @exports.each_value(&ruby_block)
# end
# Iterates over the exported constructs
#
# NOTE: look into the scope.
def each_export(&ruby_block)
@scope.each_export(&ruby_block)
end
# Gets class containing the extension for the instances.
def singleton_instance
@singleton_instanceO.singleton_class
end
# Opens for extension.
#
# NOTE: actually executes +ruby_block+ in the context of the scope
# of the system.
def open(&ruby_block)
# # No push since should not merge the current environment into
# # the system's.
# High.space_insert(-1,@namespace)
# High.top_user.instance_eval(&ruby_block)
# # High.top_user.postprocess
# High.space_pop
self.scope.open(&ruby_block)
end
# The proc used for instantiating the system type.
attr_reader :instance_proc
# The instantiation target class.
attr_reader :instance_class
# The instance owning the system if it is an eigen system
attr_reader :owner
# Sets the +owner+.
#
# Note: will make the system eigen
def owner=(owner)
@owner = owner
end
# Instantiate the system type to an instance named +i_name+ with
# possible arguments +args+.
def instantiate(i_name,*args)
# Create the eigen type.
eigen = self.class.new(:"")
# # Extends eigen with self.
# eigen.extend(self)
# High.space_push(eigen.namespace)
# # Fills its namespace with the content of the current system type
# # (this latter may already contains access points if it has been
# # opended for extension previously).
# eigen.namespace.concat_namespace(@namespace)
# # Include the mixin systems given when declaring the system.
# @to_includes.each { |system| eigen.include(system) }
# # Execute the instantiation block
# High.top_user.instance_exec(*args,&@instance_proc) if @instance_proc
# High.space_pop
# Include the mixin systems given when declaring the system.
@to_includes.each { |system| eigen.scope.include(system) }
# Fills the scope of the eigen class.
eigen.scope.build_top(self.scope,*args)
# puts "eigen scope=#{eigen.scope}"
# Fill the public namespace
space = eigen.public_namespace
# Interface signals
eigen.each_signal do |signal|
# space.send(:define_singleton_method,signal.name) { signal }
space.send(:define_singleton_method,signal.name) do
# RefName.new(eigen.owner.to_ref,signal.name)
# RefName.new(eigen.owner.to_ref,signal)
RefObject.new(eigen.owner.to_ref,signal)
end
end
# Exported objects
eigen.each_export do |export|
# space.send(:define_singleton_method,export.name) { export }
space.send(:define_singleton_method,export.name) do
# RefName.new(eigen.owner.to_ref,export.name)
# RefName.new(eigen.owner.to_ref,export)
RefObject.new(eigen.owner.to_ref,export)
end
end
# Create the instance.
instance = @instance_class.new(i_name,eigen)
# Link it to its eigen system
eigen.owner = instance
# Extend it.
instance.eigen_extend(@singleton_instanceO)
# puts "instance scope= #{instance.systemT.scope}"
# Return the resulting instance
return instance
end
# Generates the instantiation capabilities including an instantiation
# method +name+ for hdl-like instantiation, target instantiation as
# +klass+, added to the calling object with +add_instance+, and
# whose eigen type is initialized by +ruby_block+.
def make_instantiater(name,klass,add_instance,&ruby_block)
# Set the instanciater.
@instance_proc = ruby_block
# Set the target instantiation class.
@instance_class = klass
# Unnamed types do not have associated access method.
return if name.empty?
obj = self # For using the right self within the proc
High.space_reg(name) do |*args|
# If no name it is actually an access to the system type.
return obj if args.empty?
# Get the names from the arguments.
i_names = args.shift
# puts "i_names=#{i_names}(#{i_names.class})"
i_names = [*i_names]
instance = nil # The current instance
i_names.each do |i_name|
# Instantiate.
instance = obj.instantiate(i_name,*args)
# Add the instance.
High.top_user.send(add_instance,instance)
end
# # Return the last instance.
instance
end
end
# Missing methods may be immediate values, if not, they are looked up
include Hmissing
# Methods used for describing a system in HDLRuby::High
# Declares high-level bit input signals named +names+.
def input(*names)
self.make_inputs(bit,*names)
end
# Declares high-level bit output signals named +names+.
def output(*names)
self.make_outputs(bit,*names)
end
# Declares high-level bit inout signals named +names+.
def inout(*names)
self.make_inouts(bit,*names)
end
# # Declares high-level bit inner signals named +names+.
# def inner(*names)
# self.make_inners(bit,*names)
# end
# # Declares a high-level behavior activated on a list of +events+, and
# # built by executing +ruby_block+.
# def behavior(*events, &ruby_block)
# # Preprocess the events.
# events.map! do |event|
# event.to_event
# end
# # Create and add the resulting behavior.
# self.add_behavior(Behavior.new(*events,&ruby_block))
# end
# # Declares a high-level timed behavior built by executing +ruby_block+.
# def timed(&ruby_block)
# # Create and add the resulting behavior.
# self.add_behavior(TimeBehavior.new(&ruby_block))
# end
# # Creates a new parallel block built from +ruby_block+.
# #
# # This methods first creates a new behavior to put the block in.
# def par(&ruby_block)
# self.behavior do
# par(&ruby_block)
# end
# end
# # Creates a new sequential block built from +ruby_block+.
# #
# # This methods first creates a new behavior to put the block in.
# def seq(&ruby_block)
# self.behavior do
# seq(&ruby_block)
# end
# end
# # Statements automatically enclosed in a behavior.
#
# # Creates a new if statement with a +condition+ that when met lead
# # to the execution of the block in +mode+ generated by the +ruby_block+.
# #
# # NOTE:
# # * the else part is defined through the helse method.
# # * a behavior is created to enclose the hif.
# def hif(condition, mode = nil, &ruby_block)
# self.behavior do
# hif(condition,mode,&ruby_block)
# end
# end
# # Sets the block executed when the condition is not met to the block
# # in +mode+ generated by the execution of +ruby_block+.
# #
# # Can only be used once.
# #
# # NOTE: added to the hif of the last behavior.
# def helse(mode = nil, &ruby_block)
# # There is a ruby_block: the helse is assumed to be with
# # the last statement of the last behavior.
# statement = self.last_behavior.last_statement
# # Completes the hif or the hcase statement.
# unless statement.is_a?(If) or statement.is_a?(Case) then
# raise "Error: helse statement without hif nor hcase (#{statement.class})."
# end
# statement.helse(mode, &ruby_block)
# end
# # Sets the condition check when the condition is not met to the block,
# # with a +condition+ that when met lead
# # to the execution of the block in +mode+ generated by the +ruby_block+.
# def helsif(condition, mode = nil, &ruby_block)
# # There is a ruby_block: the helse is assumed to be with
# # the last statement of the last behavior.
# statement = @statements.last
# # Completes the hif statement.
# unless statement.is_a?(If) then
# raise "Error: helsif statement without hif (#{statement.class})."
# end
# statement.helsif(condition, mode, &ruby_block)
# end
# # Creates a new case statement with a +value+ used for deciding which
# # block to execute.
# #
# # NOTE:
# # * the when part is defined through the hwhen method.
# # * a new behavior is created to enclose the hcase.
# def hcase(value)
# self.behavior do
# hcase(condition,value)
# end
# end
# # Sets the block of a case structure executed when the +match+ is met
# # to the block in +mode+ generated by the execution of +ruby_block+.
# #
# # Can only be used once.
# def hwhen(match, mode = nil, &ruby_block)
# # There is a ruby_block: the helse is assumed to be with
# # the last statement of the last behavior.
# statement = @statements.last
# # Completes the hcase statement.
# unless statement.is_a?(Case) then
# raise "Error: hwhen statement without hcase (#{statement.class})."
# end
# statement.hwhen(match, mode, &ruby_block)
# end
#
# # Sets the constructs corresponding to +names+ as exports.
# def export(*names)
# names.each {|name| self.add_export(name) }
# end
# Extend the class according to another +system+.
def extend(system)
# Adds the singleton methods
self.eigen_extend(system)
# Adds the singleton methods for the instances.
@singleton_instanceO.eigen_extend(system.singleton_instance)
end
# # Include another +system+ type with possible +args+ instanciation
# # arguments.
# def include(system,*args)
# if @includeIs.key?(system.name) then
# raise "Cannot include twice the same system."
# end
# # Extends with system.
# self.extend(system)
# # Create the instance to include
# instance = system.instantiate(:"",*args)
# # Concat its public namespace to the current one.
# self.namespace.concat_namespace(instance.public_namespace)
# # Adds it the list of includeds
# @includeIs[system.name] = instance
# end
# Casts as an included +system+.
#
# NOTE: use the includes of the scope.
def as(system)
# system = system.name if system.respond_to?(:name)
# return @includeIs[system].public_namespace
return self.scope.as(system.scope)
end
include Hmux
# Fills a low level system with self's contents.
#
# NOTE: name conflicts are treated in the current NameStack state.
def fill_low(systemTlow)
# puts "fill_low with systemTlow=#{systemTlow}"
# Adds its input signals.
self.each_input { |input| systemTlow.add_input(input.to_low) }
# Adds its output signals.
self.each_output { |output| systemTlow.add_output(output.to_low) }
# Adds its inout signals.
self.each_inout { |inout| systemTlow.add_inout(inout.to_low) }
# # Adds the inner signals.
# self.each_inner { |inner| systemTlow.add_inner(inner.to_low) }
# # Adds the instances.
# # Single ones.
# self.each_systemI { |systemI|
# systemTlow.add_systemI(systemI.to_low)
# }
# # Grouped ones.
# self.each_groupI do |name,systemIs|
# systemIs.each.with_index { |systemI,i|
# # Sets the name of the system instance
# # (required for conversion of further accesses).
# # puts "systemI.respond_to?=#{systemI.respond_to?(:name=)}"
# systemI.name = name.to_s + "[#{i}]"
# # And convert it to low
# systemTlow.add_systemI(systemI.to_low())
# }
# end
# # Adds the connections.
# self.each_connection { |connection|
# systemTlow.add_connection(connection.to_low)
# }
# # Adds the behaviors.
# self.each_behavior { |behavior|
# systemTlow.add_behavior(behavior.to_low)
# }
end
# Converts the system to HDLRuby::Low and set its +name+.
def to_low(name = self.name)
name = name.to_s
if name.empty? then
raise "Cannot convert a system without a name to HDLRuby::Low."
end
# Create the resulting low system type.
systemTlow = HDLRuby::Low::SystemT.new(High.names_create(name),
self.scope.to_low)
# Fills the interface of the new system from the included
# systems, must look into the scope since it it the scope
# that contains the included systems.
self.scope.each_included do |included|
included.systemT.fill_low(systemTlow)
end
# # Push the private namespace for the low generation.
# High.space_push(@namespace)
# # Pushes on the name stack for converting the internals of
# # the system.
# High.names_push
# # Adds the content of its included systems.
# @includeIs.each_value { |space| space.user.fill_low(systemTlow) }
# Adds the content of the actual system.
self.fill_low(systemTlow)
# # Restores the name stack.
# High.names_pop
# # Restores the namespace stack.
# High.space_pop
# # Return theresulting system.
return systemTlow
end
end
##
# Describes a scope for a system type
class Scope < Low::Scope
High = HDLRuby::High
# include HMix
include Hinner
include SingletonExtend
# The name of the scope if any.
attr_reader :name
# The namespace
attr_reader :namespace
# The return value when building the scope.
attr_reader :return_value
##
# Creates a new scope with possible +name+.
#
# The proc +ruby_block+ is executed for building the scope.
# If no block is provided, the scope is the top of a system and
# is filled by the instantiation procedure of the system.
def initialize(name = :"", &ruby_block)
# Initialize the scope structure
super(name)
unless name.empty? then
# Named scope, set the hdl-like access to the scope.
obj = self # For using the right self within the proc
High.space_reg(name) { obj }
end
# Initialize the set of grouped system instances.
@groupIs = {}
# Creates the namespace.
@namespace = Namespace.new(self)
# Initialize the set of exported inner signals and instances
@exports = {}
# Initialize the set of included system instances.
@includeIs = {}
# Builds the scope if a ruby block is provided
# (which means the scope is not the top of a system).
self.build(&ruby_block) if block_given?
end
# Converts to a namespace user.
def to_user
# Already a user.
return self
end
# # The name of the scope if any.
# #
# # NOTE:
# # * the name of the first scope of a system is the system's.
# # * for building reference path with converting to low.
# def name
# if self.parent.is_a?(SystemT) then
# return self.parent.name
# else
# return @name
# end
# end
# Adds a group of system +instances+ named +name+.
def add_groupI(name, *instances)
# Ensure name is a symbol and is not already used for another
# group.
name = name.to_sym
if @groupIs.key?(name)
raise "Group of system instances named #{name} already exist."
end
# Add the group.
@groupIs[name.to_sym] = instances
# Sets the parent of the instances.
instances.each { |instance| instance.parent = self }
end
# Access a group of system instances by +name+.
#
# NOTE: the result is a copy of the group for avoiding side effects.
def get_groupI(name)
return @groupIs[name.to_sym].clone
end
# Iterates over the group of system instances.
#
# Returns an enumerator if no ruby block is given.
def each_groupI(&ruby_block)
# No ruby block? Return an enumerator.
return to_enum(:each_groupI) unless ruby_block
# A block? Apply it on each input signal instance.
@groupIs.each(&ruby_block)
end
# Cf. Hinner
# # Creates and adds a set of inners typed +type+ from a list of +names+.
# #
# # NOTE: a name can also be a signal, is which case it is duplicated.
# def make_inners(type, *names)
# res = nil
# names.each do |name|
# if name.respond_to?(:to_sym) then
# res = self.add_inner(SignalI.new(name,type,:inner))
# else
# # Deactivated because conflict with parent.
# raise "Invalid class for a name: #{name.class}"
# end
# end
# return res
# end
# Adds a +name+ to export.
#
# NOTE: if the name do not corresponds to any inner signal nor
# instance, raise an exception.
def add_export(name)
# Check the name.
name = name.to_sym
# Look for construct to make public.
# Maybe it is an inner signals.
inner = self.get_inner(name)
if inner then
# Yes set it as export.
@exports[name] = inner
return
end
# No, maybe it is an instance.
instance = self.get_systemI(name)
if instance then
# Yes, set it as export.
@exports[name] = instance
return
end
# No, error.
raise NameError.new("Invalid name for export: #{name}")
end
# Iterates over the exported constructs.
#
# Returns an enumerator if no ruby block is given.
def each_export(&ruby_block)
# No ruby block? Return an enumerator.
return to_enum(:each_export) unless ruby_block
# A block? Apply it on each input signal instance.
@exports.each_value(&ruby_block)
# And apply on the sub scopes if any.
@scopes.each {|scope| scope.each_export(&ruby_block) }
end
# Iterates over the included systems.
def each_included(&ruby_block)
# No ruby block? Return an enumerator.
return to_enum(:each_included) unless ruby_block
# A block? Apply it on each input signal instance.
@includeIs.each_value(&ruby_block)
# And apply on the sub scopes if any.
@scopes.each {|scope| scope.each_included(&ruby_block) }
end
# Opens for extension.
#
# NOTE: actually executes +ruby_block+ in the context.
def open(&ruby_block)
# No push since should not merge the current environment into
# the system's.
High.space_insert(-1,@namespace)
High.top_user.instance_eval(&ruby_block)
High.space_pop
end
# Build the scope by executing +ruby_block+.
#
# NOTE: used when the scope is not the top of a system.
def build(&ruby_block)
# Namespace already there
# # High-level scopes can include inner signals.
# # And therefore require a namespace.
# @namespace ||= Namespace.new(self)
# Build the scope.
High.space_push(@namespace)
@return_value = High.top_user.instance_eval(&ruby_block)
High.space_pop
end
# Builds the scope using +base+ as model scope with possible arguments
# +args+.
#
# NOTE: Used by the instantiation procedure of a system.
def build_top(base,*args)
High.space_push(@namespace)
# Fills its namespace with the content of the base scope
# (this latter may already contains access points if it has been
# opended for extension previously).
@namespace.concat_namespace(base.namespace)
# # Include the mixin systems given when declaring the system.
# @to_includes.each { |system| eigen.include(system) }
# Execute the instantiation block
instance_proc = base.parent.instance_proc if base.parent.is_a?(SystemT)
@return_value = High.top_user.instance_exec(*args,&instance_proc) if instance_proc
High.space_pop
end
# Methods delegated to the upper system.
# Adds input +signal+ in the current system.
def add_input(signal)
self.parent.add_input(signal)
end
# Adds output +signal+ in the current system.
def add_output(signal)
self.parent.add_output(signal)
end
# Adds inout +signal+ in the current system.
def add_inout(signal)
self.parent.add_inout(signal)
end
# Creates and adds a set of inputs typed +type+ from a list of +names+
# in the current system.
#
# NOTE: a name can also be a signal, is which case it is duplicated.
def make_inputs(type, *names)
self.parent.make_inputs(type,*names)
end
# Creates and adds a set of outputs typed +type+ from a list of +names+
# in the current system.
#
# NOTE: a name can also be a signal, is which case it is duplicated.
def make_outputs(type, *names)
self.parent.make_outputs(type,*names)
end
# Creates and adds a set of inouts typed +type+ from a list of +names+
# in the current system.
#
# NOTE: a name can also be a signal, is which case it is duplicated.
def make_inouts(type, *names)
self.parent.make_inouts(type,*names)
end
# Converts to a new reference.
def to_ref
# return RefName.new(this,self.name)
# return RefName.new(this,self)
return RefObject.new(this,self)
end
include HScope_missing
# Moved to Hscope_missing for sharing with block
# include Hmissing
# alias h_missing method_missing
# # Missing methods are looked for in the private namespace.
# #
# # NOTE: it is ok to use the private namespace because the scope
# # can only be accessed if it is available from its systemT.
# def method_missing(m, *args, &ruby_block)
# # Is the scope currently opened?
# if High.space_top.user_deep?(self) then
# # Yes, use the stack of namespaces.
# h_missing(m,*args,&ruby_block)
# else
# # No, look into the current namespace and return a reference
# # to the result if it is a referable hardware object.
# res = self.namespace.send(m,*args,&ruby_block)
# if res.respond_to?(:to_ref) then
# # This is a referable object, build the reference from
# # the namespace.
# return RefObject.new(self.to_ref,res)
# end
# end
# # puts "method_missing in scope=#{@name}(#{self}) with m=#{m}"
# # puts "self.namespace=#{self.namespace}"
# # # puts "namespace methods = #{self.namespace.methods}"
# # if self.namespace.respond_to?(m) then
# # puts "Found"
# # self.namespace.send(m,*args,&ruby_block)
# # else
# # puts "NOT Found"
# # h_missing(m,*args,&ruby_block)
# # end
# end
# Methods used for describing a system in HDLRuby::High
# Declares high-level bit input signals named +names+
# in the current system.
def input(*names)
self.parent.input(*names)
end
# Declares high-level bit output signals named +names+
# in the current system.
def output(*names)
self.parent.output(*names)
end
# Declares high-level bit inout signals named +names+
# in the current system.
def inout(*names)
self.parent.inout(*names)
end
# Declares a sub scope with possible +name+ and built from +ruby_block+.
def sub(name = :"", &ruby_block)
# Creates the new scope.
scope = Scope.new(name,&ruby_block)
# puts "new scope=#{scope}"
# Add it
self.add_scope(scope)
# puts "self=#{self}"
# puts "self scopes=#{self.each_scope.to_a.join(",")}"
# Use its return value
return scope.return_value
end
# Declares a high-level behavior activated on a list of +events+, and
# built by executing +ruby_block+.
def behavior(*events, &ruby_block)
# Preprocess the events.
events.map! do |event|
event.to_event
end
# Create and add the resulting behavior.
self.add_behavior(Behavior.new(*events,&ruby_block))
end
# Declares a high-level timed behavior built by executing +ruby_block+.
def timed(&ruby_block)
# Create and add the resulting behavior.
self.add_behavior(TimeBehavior.new(&ruby_block))
end
# Creates a new parallel block built from +ruby_block+.
#
# This methods first creates a new behavior to put the block in.
def par(&ruby_block)
self.behavior do
par(&ruby_block)
end
end
# Creates a new sequential block built from +ruby_block+.
#
# This methods first creates a new behavior to put the block in.
def seq(&ruby_block)
self.behavior do
seq(&ruby_block)
end
end
# Statements automatically enclosed in a behavior.
# Creates a new if statement with a +condition+ that when met lead
# to the execution of the block in +mode+ generated by the +ruby_block+.
#
# NOTE:
# * the else part is defined through the helse method.
# * a behavior is created to enclose the hif.
def hif(condition, mode = nil, &ruby_block)
self.behavior do
hif(condition,mode,&ruby_block)
end
end
# Sets the block executed when the condition is not met to the block
# in +mode+ generated by the execution of +ruby_block+.
#
# Can only be used once.
#
# NOTE: added to the hif of the last behavior.
def helse(mode = nil, &ruby_block)
# There is a ruby_block: the helse is assumed to be with
# the last statement of the last behavior.
statement = self.last_behavior.last_statement
# Completes the hif or the hcase statement.
unless statement.is_a?(If) or statement.is_a?(Case) then
raise "Error: helse statement without hif nor hcase (#{statement.class})."
end
statement.helse(mode, &ruby_block)
end
# Sets the condition check when the condition is not met to the block,
# with a +condition+ that when met lead
# to the execution of the block in +mode+ generated by the +ruby_block+.
def helsif(condition, mode = nil, &ruby_block)
# There is a ruby_block: the helse is assumed to be with
# the last statement of the last behavior.
# statement = @statements.last
statement = self.last_behavior.last_statement
# Completes the hif statement.
unless statement.is_a?(If) then
raise "Error: helsif statement without hif (#{statement.class})."
end
statement.helsif(condition, mode, &ruby_block)
end
# Creates a new case statement with a +value+ used for deciding which
# block to execute.
#
# NOTE:
# * the when part is defined through the hwhen method.
# * a new behavior is created to enclose the hcase.
def hcase(value)
self.behavior do
hcase(condition,value)
end
end
# Sets the block of a case structure executed when the +match+ is met
# to the block in +mode+ generated by the execution of +ruby_block+.
#
# Can only be used once.
def hwhen(match, mode = nil, &ruby_block)
# There is a ruby_block: the helse is assumed to be with
# the last statement of the last behavior.
statement = @statements.last
# Completes the hcase statement.
unless statement.is_a?(Case) then
raise "Error: hwhen statement without hcase (#{statement.class})."
end
statement.hwhen(match, mode, &ruby_block)
end
# Sets the constructs corresponding to +names+ as exports.
def export(*names)
names.each {|name| self.add_export(name) }
end
# # Extend the class according to another +system+.
# def extend(system)
# # Adds the singleton methods
# self.eigen_extend(system)
# @singleton_instanceO.eigen_extend(system.singleton_instance)
# end
# Include a +system+ type with possible +args+ instanciation
# arguments.
def include(system,*args)
if @includeIs.key?(system.name) then
raise "Cannot include twice the same system."
end
# Extends with system.
self.eigen_extend(system)
# Create the instance to include
instance = system.instantiate(:"",*args)
# puts "instance=#{instance}"
# Concat its public namespace to the current one.
self.namespace.concat_namespace(instance.public_namespace)
# Adds it the list of includeds
@includeIs[system.name] = instance
end
# Casts as an included +system+.
def as(system)
system = system.name if system.respond_to?(:name)
return @includeIs[system].public_namespace
end
include Hmux
# Fills a low level scope with self's contents.
#
# NOTE: name conflicts are treated in the current NameStack state.
def fill_low(scopeLow)
# Adds the inner scopes.
self.each_scope { |scope| scopeLow.add_scope(scope.to_low) }
# Adds the inner signals.
self.each_inner { |inner| scopeLow.add_inner(inner.to_low) }
# Adds the instances.
# Single ones.
self.each_systemI { |systemI|
scopeLow.add_systemI(systemI.to_low)
}
# Grouped ones.
self.each_groupI do |name,systemIs|
systemIs.each.with_index { |systemI,i|
# Sets the name of the system instance
# (required for conversion of further accesses).
# puts "systemI.respond_to?=#{systemI.respond_to?(:name=)}"
systemI.name = name.to_s + "[#{i}]"
# And convert it to low
scopeLow.add_systemI(systemI.to_low())
}
end
# Adds the connections.
self.each_connection { |connection|
# puts "connection=#{connection}"
scopeLow.add_connection(connection.to_low)
}
# Adds the behaviors.
self.each_behavior { |behavior|
scopeLow.add_behavior(behavior.to_low)
}
end
# Converts the scope to HDLRuby::Low.
def to_low()
# Create the resulting low scope.
scopeLow = HDLRuby::Low::Scope.new()
# Push the private namespace for the low generation.
High.space_push(@namespace)
# Pushes on the name stack for converting the internals of
# the system.
High.names_push
# Adds the content of its included systems.
@includeIs.each_value {|instance| instance.user.fill_low(scopeLow) }
# Adds the content of the actual system.
self.fill_low(scopeLow)
# Restores the name stack.
High.names_pop
# Restores the namespace stack.
High.space_pop
# Return theresulting system.
return scopeLow
end
end
##
# Module bringing high-level properties to Type classes.
#
# NOTE: by default a type is not specified.
module Htype
High = HDLRuby::High
# Type processing
include HDLRuby::Tprocess
# Ensures initialize registers the type name
def self.included(base) # built-in Ruby hook for modules
base.class_eval do
original_method = instance_method(:initialize)
define_method(:initialize) do |*args, &block|
original_method.bind(self).call(*args, &block)
# Registers the name (if not empty).
self.register(name) unless name.empty?
end
end
end
# Tells htype has been included.
def htype?
return true
end
# Sets the +name+.
#
# NOTE: can only be done if the name is not already set.
def name=(name)
unless @name.empty? then
raise "Name of type already set to: #{@name}."
end
# Checks and sets the name.
name = name.to_sym
if name.empty? then
raise "Cannot set an empty name."
end
@name = name
# Registers the name.
self.register(name)
end
# Register the +name+ of the type.
def register(name)
if self.name.empty? then
raise "Cannot register with empty name."
else
# Sets the hdl-like access to the type.
obj = self # For using the right self within the proc
High.space_reg(name) { obj }
end
end
# Gets the type as left value.
#
# NOTE: used for asymetric types like TypeSystemI.
def left
# By default self.
self
end
# Gets the type as right value.
#
# NOTE: used for asymetric types like TypeSystemI.
def right
# By default self.
self
end
# Moved to base
# # The widths of the basic types.
# WIDTHS = { :bit => 1, :unsigned => 1, :signed => 1,
# :fixnum => 32, :float => 64, :bignum => High::Infinity }
# # The signs of the basic types.
# SIGNS = { :signed => true, :fixnum => true, :float => true,
# :bignum => true }
# SIGNS.default = false
# # Gets the bitwidth of the type, nil for undefined.
# #
# # NOTE: must be redefined for specific types.
# def width
# return WIDTHS[self.name]
# end
# # Tells if the type signed, false for unsigned.
# def signed?
# return SIGNS[self.name]
# end
# # # Tells if the type is specified or not.
# # def void?
# # return self.name == :void
# # end
# # # Tells if a type is generic or not.
# # def generic?
# # return self.void?
# # end
# # Checks the compatibility with +type+
# def compatible?(type)
# # # If type is void, compatible anyway.
# # return true if type.name == :void
# # Default: base types cases.
# case self.name
# # when :void then
# # # void is compatible with anything.
# # return true
# when :bit then
# # bit is compatible with bit signed and unsigned.
# return [:bit,:signed,:unsigned].include?(type.name)
# when :signed then
# # Signed is compatible with bit and signed.
# return [:bit,:signed].include?(type.name)
# when :unsigned then
# # Unsigned is compatible with bit and unsigned.
# return [:bit,:unsigned].include?(type.name)
# else
# # Unknown type for compatibility: not compatible by default.
# return false
# end
# end
# # Merges with +type+
# def merge(type)
# # # If type is void, return self.
# # return self if type.name == :void
# # Default: base types cases.
# case self.name
# # when :void then
# # # void: return type
# # return type
# when :bit then
# # bit is compatible with bit signed and unsigned.
# if [:bit,:signed,:unsigned].include?(type.name) then
# return type
# else
# raise "Incompatible types for merging: #{self}, #{type}."
# end
# when :signed then
# # Signed is compatible with bit and signed.
# if [:bit,:signed].include?(type.name) then
# return self
# else
# raise "Incompatible types for merging: #{self}, #{type}."
# end
# when :unsigned then
# # Unsigned is compatible with bit and unsigned.
# if [:bit,:unsigned].include?(type.name)
# return self
# else
# raise "Incompatible types for merging: #{self}, #{type}."
# end
# else
# # Unknown type for compatibility: not compatible by default.
# raise "Incompatible types for merging: #{self}, #{type}."
# end
# end
# # Instantiate the type with arguments +args+ if required.
# #
# # NOTE: actually, only TypeSystemT actually require instantiation.
# def instantiate
# self
# end
# Type creation in HDLRuby::High.
# Creates a new vector type of range +rng+ and with current type as
# base.
def [](rng)
return TypeVector.new(:"",self,rng)
end
# SignalI creation through the type.
# Declares high-level input signals named +names+ of the current type.
def input(*names)
# High.top_user.make_inputs(self.instantiate,*names)
High.top_user.make_inputs(self,*names)
end
# Declares high-level untyped output signals named +names+ of the
# current type.
def output(*names)
# High.top_user.make_outputs(self.instantiate,*names)
High.top_user.make_outputs(self,*names)
end
# Declares high-level untyped inout signals named +names+ of the
# current type.
def inout(*names)
# High.top_user.make_inouts(self.instantiate,*names)
High.top_user.make_inouts(self,*names)
end
# Declares high-level untyped inner signals named +names+ of the
# current type.
def inner(*names)
# High.top_user.make_inners(self.instantiate,*names)
High.top_user.make_inners(self,*names)
end
end
##
# Describes a high-level data type.
#
# NOTE: by default a type is not specified.
class Type < Low::Type
High = HDLRuby::High
include Htype
# Type creation.
# Creates a new type named +name+.
def initialize(name)
# Initialize the type structure.
super(name)
end
# Converts the type to HDLRuby::Low and set its +name+.
#
# NOTE: should be overridden by other type classes.
def to_low(name = self.name)
return HDLRuby::Low::Type.new(name)
end
end
# Creates the basic types.
# Defines a basic type +name+.
def self.define_type(name)
name = name.to_sym
type = Type.new(name)
self.send(:define_method,name) { type }
return type
end
# # The void type.
# define_type :void
# The bit type.
Bit = define_type(:bit)
class << Bit
# Tells if the type fixed point.
def fixed?
return true
end
# Gets the bitwidth of the type, nil for undefined.
def width
1
end
end
# The signed bit type.
Signed = define_type(:signed)
class << Signed
# Tells if the type is signed.
def signed?
return true
end
# Tells if the type is fixed point.
def fixed?
return true
end
# Gets the bitwidth of the type, nil for undefined.
def width
1
end
end
# The unsigned bit type.
Unsigned = define_type(:unsigned)
class << Unsigned
# Tells if the type is unsigned.
def unsigned?
return true
end
# Tells if the type is fixed point.
def fixed?
return true
end
# Gets the bitwidth of the type, nil for undefined.
def width
1
end
end
# # The numeric type (for all the Ruby Numeric types).
# define_type :numeric
# The float bit type
Float = define_type(:float)
class << Float
# Tells if the type is signed.
def signed?
return true
end
# Tells if the type is floating point.
def float?
return true
end
# Gets the bitwidth of the type, nil for undefined.
def width
1
end
end
# ##
# # Describes a numeric type.
# class TypeNumeric < Low::TypeNumeric
# High = HDLRuby::High
# include Htype
# # Converts the type to HDLRuby::Low and set its +name+.
# def to_low(name = self.name)
# # Generate and return the new type.
# return HDLRuby::Low::TypeNumeric.new(name,self.numeric)
# end
# end
# Methods for vector types.
module HvectorType
# Converts the type to HDLRuby::Low and set its +name+.
def to_low(name = self.name)
# Generate and return the new type.
return HDLRuby::Low::TypeVector.new(name,self.base.to_low,
self.range.to_low)
end
end
##
# Describes a vector type.
# class TypeVector < TypeExtend
class TypeVector < Low::TypeVector
High = HDLRuby::High
include Htype
include HvectorType
# # # The range of the vector.
# # attr_reader :range
# # # Creates a new vector type named +name+ from +base+ type and with
# # # +range+.
# # def initialize(name,base,range)
# # # Initialize the type.
# # super(name,basa,range)
# # # # Check and set the vector-specific attributes.
# # # if rng.respond_to?(:to_i) then
# # # # Integer case: convert to a 0..(rng-1) range.
# # # rng = (rng-1)..0
# # # elsif
# # # # Other cases: assume there is a first and a last to create
# # # # the range.
# # # rng = rng.first..rng.last
# # # end
# # # @range = rng
# # end
# # Type handling: these methods may have to be overriden when
# # subclassing.
# # Moved to base
# # # Gets the bitwidth of the type, nil for undefined.
# # #
# # # NOTE: must be redefined for specific types.
# # def width
# # first = @range.first
# # last = @range.last
# # return @base.width * (first-last).abs
# # end
# # # Gets the direction of the range.
# # def dir
# # return (@range.last - @range.first)
# # end
# # # Tells if the type signed, false for unsigned.
# # def signed?
# # return @base.signed?
# # end
# # # # Tells if a type is generic or not.
# # # def generic?
# # # # The type is generic if the base is generic.
# # # return self.base.generic?
# # # end
# # # Checks the compatibility with +type+
# # def compatible?(type)
# # # # if type is void, compatible anyway.
# # # return true if type.name == :void
# # # Compatible if same width and compatible base.
# # return false unless type.respond_to?(:dir)
# # return false unless type.respond_to?(:base)
# # return ( self.dir == type.dir and
# # self.base.compatible?(type.base) )
# # end
# # # Merges with +type+
# # def merge(type)
# # # # if type is void, return self anyway.
# # # return self if type.name == :void
# # # Compatible if same width and compatible base.
# # unless type.respond_to?(:dir) and type.respond_to?(:base) then
# # raise "Incompatible types for merging: #{self}, #{type}."
# # end
# # unless self.dir == type.dir then
# # raise "Incompatible types for merging: #{self}, #{type}."
# # end
# # return TypeVector.new(@name,@range,@base.merge(type.base))
# # end
# # Converts the type to HDLRuby::Low and set its +name+.
# def to_low(name = self.name)
# # Generate and return the new type.
# return HDLRuby::Low::TypeVector.new(name,self.base.to_low,
# self.range.to_low)
# end
end
##
# Describes a signed integer data type.
class TypeSigned < TypeVector
# Creates a new vector type named +name+ from +base+ type and with
# +range+.
#
# NOTE:
# * The default range is 32-bit.
def initialize(name,range = 31..0)
# Initialize the type.
super(name,Signed,range)
end
end
##
# Describes a unsigned integer data type.
class TypeUnsigned < TypeVector
# Creates a new vector type named +name+ from +base+ type and with
# +range+.
#
# NOTE:
# * The default range is 32-bit.
def initialize(name,range = 31..0)
# Initialize the type.
super(name,Unsigned,range)
end
end
##
# Describes a float data type.
class TypeFloat < TypeVector
# Creates a new vector type named +name+ from +base+ type and with
# +range+.
#
# NOTE:
# * The bits of negative range stands for the exponent
# * The default range is for 64-bit IEEE 754 double precision standart
def initialize(name,range = 52..-11)
# Initialize the type.
super(name,Float,range)
end
end
##
# Describes a tuple type.
# class TypeTuple < Tuple
class TypeTuple < Low::TypeTuple
High = HDLRuby::High
include Htype
# Converts the type to HDLRuby::Low and set its +name+.
def to_low(name = self.name)
return HDLRuby::Low::TypeTuple.new(name,
*@types.map { |type| type.to_low } )
end
end
##
# Describes a structure type.
# class TypeStruct < TypeHierarchy
class TypeStruct < Low::TypeStruct
High = HDLRuby::High
include Htype
# Moved to Low
# # Gets the bitwidth of the type, nil for undefined.
# #
# # NOTE: must be redefined for specific types.
# def width
# return @types.reduce(0) {|sum,type| sum + type.width }
# end
# # Checks the compatibility with +type+
# def compatible?(type)
# # # If type is void, compatible anyway.
# # return true if type.name == :void
# # Not compatible if different types.
# return false unless type.is_a?(TypeStruct)
# # Not compatibe unless each entry has the same name in same order.
# return false unless self.each_name == type.each_name
# self.each do |name,sub|
# return false unless sub.compatible?(self.get_type(name))
# end
# return true
# end
# # Merges with +type+
# def merge(type)
# # # if type is void, return self anyway.
# # return self if type.name == :void
# # Not compatible if different types.
# unless type.is_a?(TypeStruct) then
# raise "Incompatible types for merging: #{self}, #{type}."
# end
# # Not compatibe unless each entry has the same name and same order.
# unless self.each_name == type.each_name then
# raise "Incompatible types for merging: #{self}, #{type}."
# end
# # Creates the new type content
# content = {}
# self.each do |name,sub|
# content[name] = self.get_type(name).merge(sub)
# end
# return TypeStruct.new(@name,content)
# end
# Converts the type to HDLRuby::Low and set its +name+.
def to_low(name = self.name)
return HDLRuby::Low::TypeStruct.new(name,
@types.map { |name,type| [name,type.to_low] } )
end
end
## Methods for declaring system types and functions.
# The type constructors.
# Creates an unnamed structure type from a +content+.
def struct(content)
return TypeStruct.new(:"",content)
end
# # Creates an unnamed union type from a +content+.
# def union(content)
# return TypeUnion.new(:"",content)
# end
# Creates type named +name+ and using +ruby_block+ for building it.
def type(name,&ruby_block)
# Builds the type.
type = HDLRuby::High.top_user.instance_eval(&ruby_block)
# Ensures type is really a type.
# unless type.is_a?(Type) then
unless type.respond_to?(:htype?) then
raise "Invalid class for a type: #{type.class}."
end
# Name it.
type.name = name
return type
end
# Methods for declaring systems
# Declares a high-level system type named +name+, with +includes+ mixins
# hardware types and using +ruby_block+ for instantiating.
def system(name = :"", *includes, &ruby_block)
# print "system ruby_block=#{ruby_block}\n"
# Creates the resulting system.
return SystemT.new(name,*includes,&ruby_block)
end
# Methods for declaring function
# Declares a function named +name+ using +ruby_block+ as body.
#
# NOTE: a function is a short-cut for a method that creates a scope.
def function(name, &ruby_block)
if HDLRuby::High.in_systemT? then
define_singleton_method(name.to_sym) do |*args|
sub do
HDLRuby::High.top_user.instance_exec(*args,&ruby_block)
# ruby_block.call(*args)
end
end
else
define_method(name.to_sym) do |*args|
sub do
HDLRuby::High.top_user.instance_exec(*args,&ruby_block)
end
end
end
end
# # Extends the system type class for converting it to a data type.
# class SystemT
# # Converts the system type to a data type using +left+ signals
# # as left values and +right+ signals as right values.
# def to_type(left,right)
# return TypeSystemT.new(:"",self,left,right)
# end
# end
# Classes describing harware instances.
##
# Describes a high-level system instance.
class SystemI < Low::SystemI
High = HDLRuby::High
include SingletonExtend
# Creates a new system instance of system type +systemT+ named +name+.
def initialize(name, systemT)
# Initialize the system instance structure.
super(name,systemT)
# puts "New systemI with scope=#{self.systemT.scope}"
# Sets the hdl-like access to the system instance.
obj = self # For using the right self within the proc
High.space_reg(name) { obj }
end
# Converts to a new reference.
def to_ref
# return RefName.new(this,self.name)
# return RefName.new(this,self)
return RefObject.new(this,self)
end
# Connects signals of the system instance according to +connects+.
#
# NOTE: +connects+ can be a hash table where each entry gives the
# correspondance between a system's signal name and an external
# signal to connect to, or a list of signals that will be connected
# in the order of declaration.
def call(*connects)
# Checks if it is a connection through is a hash.
if connects.size == 1 and connects[0].respond_to?(:to_h) then
# Yes, perform a connection by name
connects = connects[0].to_h
# Performs the connections.
connects.each do |left,right|
# Gets the signal corresponding to connect.
left = self.get_signal(left)
# Convert it to a reference.
# left = RefName.new(self.to_ref,left.name)
# left = RefName.new(self.to_ref,left)
left = RefObject.new(self.to_ref,left)
# Make the connection.
left <= right
end
else
# No, perform a connection is order of declaration
connects.each.with_index do |csig,i|
# Gets i-est signal to connect
ssig = self.get_interface(i)
# Convert it to a reference.
# ssig = RefName.new(self.to_ref,ssig.name)
# ssig = RefName.new(self.to_ref,ssig)
ssig = RefObject.new(self.to_ref,ssig)
# Make the connection.
ssig <= csig
end
end
end
# Gets an exported element (signal or system instance) by +name+.
def get_export(name)
return @systemT.get_export(name)
end
# Opens for extension.
#
# NOTE: actually executes +ruby_block+ in the context of the
# systemT.
def open(&ruby_block)
return @systemT.open(&ruby_block)
end
# include Hmissing
# Missing methods are looked for in the public namespace of the
# system type.
def method_missing(m, *args, &ruby_block)
# print "method_missing in class=#{self.class} with m=#{m}\n"
self.public_namespace.send(m,*args,&ruby_block)
end
# Methods to transmit to the systemT
# Gets the public namespace.
def public_namespace
self.systemT.public_namespace
end
# Converts the instance to HDLRuby::Low and set its +name+.
def to_low(name = self.name)
# puts "to_low with #{self} (#{self.name}) #{self.systemT}"
# Converts the system of the instance to HDLRuby::Low
systemTlow = self.systemT.to_low(High.names_create(name.to_s+ "::T"))
# Creates the resulting HDLRuby::Low instance
return HDLRuby::Low::SystemI.new(High.names_create(name),
systemTlow)
end
end
# Class describing namespace in system.
# Classes describing hardware statements, connections and expressions
##
# Module giving high-level statement properties
module HStatement
# Creates a new if statement with a +condition+ enclosing the statement.
#
# NOTE: the else part is defined through the helse method.
def hif(condition)
# Creates the if statement.
return If.new(condition) { self }
end
end
##
# Describes a high-level if statement.
class If < Low::If
High = HDLRuby::High
include HStatement
# Creates a new if statement with a +condition+ that when met lead
# to the execution of the block in +mode+ generated by the execution of
# +ruby_block+.
def initialize(condition, mode = nil, &ruby_block)
# Create the yes block.
# yes_block = High.make_block(:par,&ruby_block)
yes_block = High.make_block(mode,&ruby_block)
# Creates the if statement.
super(condition.to_expr,yes_block)
end
# Sets the block executed in +mode+ when the condition is not met to
# the block generated by the execution of +ruby_block+.
#
# Can only be used once.
def helse(mode = nil, &ruby_block)
# If there is a no block, it is an error.
raise "Cannot have two helse for a single if statement." if self.no
# Create the no block if required
no_block = High.make_block(mode,&ruby_block)
# Sets the no block.
self.no = no_block
end
# Sets the block executed in +mode+ when the condition is not met
# but +next_cond+ is met to the block generated by the execution of
# +ruby_block+.
#
# Can only be used if the no-block is not set yet.
def helsif(next_cond, mode = nil, &ruby_block)
# If there is a no block, it is an error.
raise "Cannot have an helsif after an helse." if self.no
# Create the noif block if required
noif_block = High.make_block(mode,&ruby_block)
# Adds the noif block.
self.add_noif(next_cond.to_expr,noif_block)
end
# Converts the if to HDLRuby::Low.
def to_low
# no may be nil, so treat it appart
noL = self.no ? self.no.to_low : nil
# Now generate the low-level if.
low = HDLRuby::Low::If.new(self.condition.to_low,
self.yes.to_low,noL)
self.each_noif {|cond,block| low.add_noif(cond.to_low,block.to_low)}
return low
end
end
##
# Describes a high-level case statement.
class Case < Low::Case
High = HDLRuby::High
include HStatement
# Creates a new case statement with a +value+ that decides which
# block to execute.
def initialize(value)
# Create the yes block.
super(value.to_expr)
end
# Sets the block executed in +mode+ when the value matches +match+.
# The block is generated by the execution of +ruby_block+.
#
# Can only be used once for the given +match+.
def hwhen(match, mode = nil, &ruby_block)
# Create the nu block if required
# when_block = High.make_block(:par,&ruby_block)
when_block = High.make_block(mode,&ruby_block)
# Adds the case.
self.add_when(match.to_expr,when_block)
end
# Sets the block executed in +mode+ when there were no match to
# the block generated by the execution of +ruby_block+.
#
# Can only be used once.
def helse(mode = nil, &ruby_block)
# Create the nu block if required
# no_block = High.make_block(:par,&ruby_block)
default_block = High.make_block(mode,&ruby_block)
# Sets the default block.
self.default = default_block
end
# Converts the case to HDLRuby::Low.
def to_low
# Create the low level case.
caseL = HDLRuby::Low::Case.new(@value.to_low)
# Add each case.
self.each_when do |match,statement|
caseL.add_when(match.to_low, statement.to_low)
end
# Add the default if any.
if self.default then
caseL.default = self.default.to_low
end
return caseL
end
end
##
# Describes a delay: not synthesizable.
class Delay < Low::Delay
High = HDLRuby::High
include HStatement
def !
High.top_user.wait(self)
end
# Converts the delay to HDLRuby::Low.
def to_low
return HDLRuby::Low::Delay.new(self.value, self.unit)
end
end
##
# Describes a high-level wait delay statement.
class TimeWait < Low::TimeWait
include HStatement
# Converts the wait statement to HDLRuby::Low.
def to_low
return HDLRuby::Low::TimeWait.new(self.delay.to_low)
end
end
##
# Describes a timed loop statement: not synthesizable!
class TimeRepeat < Low::TimeRepeat
include HStatement
# Converts the repeat statement to HDLRuby::Low.
def to_low
return HDLRuby::Low::TimeRepeat.new(self.statement.to_low,
self.delay.to_low)
end
end
##
# Module giving high-level expression properties
module HExpression
# The system type the expression has been resolved in, if any.
attr_reader :systemT
# The type of the expression if resolved.
attr_reader :type
# Converts to a new value.
#
# NOTE: to be redefined.
def to_value
raise "Expression cannot be converted to a value: #{self.class}"
end
# Converts to a new expression.
#
# NOTE: to be redefined in case of non-expression class.
def to_expr
raise "Internal error: to_expr not defined yet for class: #{self.class}"
end
# Adds the unary operations generation.
[:"-@",:"@+",:"!",:"~",
:boolean, :bit, :signed, :unsigned].each do |operator|
define_method(operator) do
return Unary.new(operator,self.to_expr)
end
end
# Adds the binary operations generation.
[:"+",:"-",:"*",:"/",:"%",:"**",
:"&",:"|",:"^",:"<<",:">>",
:"==",:"!=",:"<",:">",:"<=",:">="].each do |operator|
define_method(operator) do |right|
return Binary.new(operator,self.to_expr,right.to_expr)
end
end
# Methods for conversion for HDLRuby::Low: type processing, flattening
# and so on
# The type of the expression if any.
attr_reader :type
# Sets the data +type+.
def type=(type)
# Check and set the type.
# unless type.is_a?(Type) then
unless type.respond_to?(:htype?) then
raise "Invalid class for a type: #{type.class}."
end
@type = type
end
# # The parent construct.
# attr_reader :parent
# # Sets the +parent+ construct.
# def parent=(parent)
# # Check and set the type.
# unless ( parent.is_a?(Low::Expression) or
# parent.is_a?(Low::Transmit) or
# parent.is_a?(Low::If) or
# parent.is_a?(Low::Case) ) then
# raise "Invalid class for a type: #{type.class}."
# end
# @parent = parent
# end
# # Iterates over the expression parents if any (actually at most once).
# def each_parent(&ruby_block)
# # No ruby block? Return an enumerator.
# return to_enum(:each_parent) unless ruby_block
# # A block? Apply it on the parent.
# ruby_block.call(@parent)
# end
# # Methods for conversion for HDLRuby::Low: type processing, flattening
# # and so on
# # Make the current expression a parent and recurse.
# def make_parents_deep
# # Set the parents of the children and recurse on them.
# self.each_child do |child|
# if child.respond_to?(:parent=) then
# child.parent = self
# else
# child.add_parent(self)
# end
# child.make_parents_deep
# end
# end
# # Resolves the unknown signal types and conflicts in the context
# # of system type +systemT+.
# # Returns true if the resolution succeeded.
# #
# # NOTE: sets the type of the expression.
# def resolve_types(systemT)
# # Only typed expression can be used for resolving types.
# unless @type then
# raise "Cannot resolve type: nil type."
# end
# # Resolve the children.
# self.each_child do |child|
# if child.type == nil then
# # The child's type is unknown, should not happen.
# raise "Cannot resolve type: child's type is nil."
# end
# # Check if the type is compatible with the child's.
# if @type.compatible?(child.type) then
# # Yes, compute and set the new type for both.
# @type = child.type = type.merge(child.type)
# else
# # Incombatible types, cannot resolve type.
# raise "Cannot resolve type: #{@type} and child's #{child.type} are incompatible."
# end
# end
# # Resolve the parents.
# self.each_parent do |parent|
# if parent.type == nil then
# # Simple sets the parent's type to current one.
# parent.type = @type
# elsif @type.compatible?(parent.type) then
# # Yes, compute and set the new type for both.
# @type = parent.type = type.merge(parent.type)
# else
# # Incombatible types, cannot resolve type.
# raise "Cannot resolve type: #{@type} and #{parent.type} are incompatible."
# end
# end
# end
end
##
# Module giving high-level properties for handling the arrow (<=) operator.
module HArrow
High = HDLRuby::High
# Creates a transmit, or connection with an +expr+.
#
# NOTE: it is converted afterward to an expression if required.
def <=(expr)
if High.top_user.is_a?(HDLRuby::Low::Block) then
# We are in a block, so generate and add a Transmit.
High.top_user.
add_statement(Transmit.new(self.to_ref,expr.to_expr))
else
# We are in a system type, so generate and add a Connection.
High.top_user.
add_connection(Connection.new(self.to_ref,expr.to_expr))
end
end
end
##
# Describes a high-level unary expression
class Unary < Low::Unary
include HExpression
# Converts to a new expression.
def to_expr
return Unary.new(self.operator,self.child.to_expr)
end
# Converts the unary expression to HDLRuby::Low.
def to_low
return HDLRuby::Low::Unary.new(self.operator,self.child.to_low)
end
end
##
# Describes a high-level binary expression
class Binary < Low::Binary
include HExpression
# Converts to a new expression.
def to_expr
return Binary.new(self.operator,self.left.to_expr,self.right.to_expr)
end
# Converts the binary expression to HDLRuby::Low.
def to_low
return HDLRuby::Low::Binary.new(self.operator,
self.left.to_low, self.right.to_low)
end
end
# ##
# # Describes a high-level ternary expression
# class Ternary < Low::Ternary
# include HExpression
# end
##
# Describes a section operation (generalization of the ternary operator).
#
# NOTE: choice is using the value of +select+ as an index.
class Select < Low::Select
include HExpression
# Converts to a new expression.
def to_expr
return Select.new("?",self.select.to_expr,
*self.each_choice.map do |choice|
choice.to_expr
end)
end
# Converts the selection expression to HDLRuby::Low.
def to_low
return HDLRuby::Low::Select.new("?",self.select.to_low,
*self.each_choice.map do |choice|
choice.to_low
end)
end
end
##
# Describes z high-level concat expression.
class Concat < Low::Concat
include HExpression
# Converts to a new expression.
def to_expr
return Concat.new(
self.each_expression.lazy.map do |expr|
expr.to_expr
end
)
end
# Converts the concatenation expression to HDLRuby::Low.
def to_low
return HDLRuby::Low::Concat.new(
self.each_expression.lazy.map do |expr|
expr.to_low
end
)
end
end
##
# Describes a high-level value.
class Value < Low::Value
include HExpression
include HDLRuby::Vprocess
# Converts to a new value.
def to_value
# # Already a value.
# self
return Value.new(self.type,self.content)
end
# Converts to a new expression.
def to_expr
return self.to_value
end
# Converts the value to HDLRuby::Low.
def to_low
# Clone the content if possible
content = self.content.frozen? ? self.content : self.content.clone
# Create and return the resulting low-level value
return HDLRuby::Low::Value.new(self.type.to_low,self.content)
end
# # For support in ranges.
# def <=>(expr)
# return self.to_s <=> expr.to_s
# end
end
##
# Module giving high-level reference properties.
module HRef
# Properties of expressions are also required
def self.included(klass)
klass.class_eval do
include HExpression
include HArrow
# Converts to a new expression.
def to_expr
self.to_ref
end
end
end
# Converts to a new reference.
#
# NOTE: to be redefined in case of non-reference class.
def to_ref
raise "Internal error: to_ref not defined yet for class: #{self.class}"
end
# Converts to a new event.
def to_event
return Event.new(:change,self.to_ref)
end
# Creates an access to elements of range +rng+ of the signal.
#
# NOTE: +rng+ can be a single expression in which case it is an index.
def [](rng)
if rng.respond_to?(:to_expr) then
# Number range: convert it to an expression.
rng = rng.to_expr
end
if rng.is_a?(HDLRuby::Low::Expression) then
# Index case
return RefIndex.new(self.to_ref,rng)
else
# Range case, ensure it is made among expression.
first = rng.first.to_expr
last = rng.last.to_expr
# Abd create the reference.
return RefRange.new(self.to_ref,first..last)
end
end
# Iterate over the elements.
#
# Returns an enumerator if no ruby block is given.
def each(&ruby_block)
# No ruby block? Return an enumerator.
return to_enum(:each) unless ruby_block
# A block? Apply it on each element.
self.type.range.heach do |i|
yield(self[i])
end
end
# Reference can be used like enumerator
include Enumerable
end
##
# Describes a high-level object reference: no low-level equivalent!
class RefObject < Low::Ref
include HRef
# The base of the reference
attr_reader :base
# The refered object.
attr_reader :object
# Creates a new reference from a +base+ reference and named +object+.
def initialize(base,object)
# Check and set the base (it must be convertible to a reference).
unless base.respond_to?(:to_ref)
raise "Invalid base for a RefObject: #{base}"
end
@base = base
# Check and set the object (it must have a name).
unless object.respond_to?(:name)
raise "Invalid object for a RefObject: #{object}"
end
@object = object
end
# Converts to a new reference.
def to_ref
return RefObject.new(@base,@object)
end
# Converts the name reference to a HDLRuby::Low::RefName.
def to_low
# puts "To low for ref with name=#{self.name} and subref=#{self.ref}"
return HDLRuby::Low::RefName.new(@base.to_ref.to_low,@object.name)
end
# Missing methods are looked for into the refered object.
def method_missing(m, *args, &ruby_block)
@object.send(m,*args,&ruby_block)
end
# # Converts the reference to a low-level name reference.
# def to_low
# # Build the path of the reference.
# path = []
# cur = @object
# while(!High.top_user.user_deep?(cur)) do
# puts "first cur=#{cur}"
# cur = cur.owner if cur.respond_to?(:owner)
# puts "cur=#{cur}", "name=#{cur.name}"
# path << cur.name
# cur = cur.parent
# # cur = cur.scope if cur.respond_to?(:scope)
# puts " parent=#{cur} found? #{High.top_user.user_deep?(cur)}"
# end
# # puts "path=#{path}"
# # Build the references from the path.
# ref = this.to_low
# path.each { |name| ref = HDLRuby::Low::RefName.new(ref,name) }
# return ref
# end
end
##
# Describes a high-level concat reference.
class RefConcat < Low::RefConcat
include HRef
# Converts to a new reference.
def to_ref
return RefConcat.new(
self.each_ref.lazy.map do |ref|
ref.to_ref
end
)
end
# Converts the concat reference to HDLRuby::Low.
def to_low
return HDLRuby::Low::RefConcat.new(
self.each_ref.lazy.map do |ref|
ref.to_low
end
)
end
end
##
# Describes a high-level index reference.
class RefIndex < Low::RefIndex
include HRef
# Converts to a new reference.
def to_ref
return RefIndex.new(self.ref.to_ref,self.index.to_expr)
end
# Converts the index reference to HDLRuby::Low.
def to_low
return HDLRuby::Low::RefIndex.new(self.ref.to_low,self.index.to_low)
end
end
##
# Describes a high-level range reference.
class RefRange < Low::RefRange
include HRef
# Converts to a new reference.
def to_ref
return RefRange.new(self.ref.to_ref,
self.range.first.to_expr..self.range.last.to_expr)
end
# Converts the range reference to HDLRuby::Low.
def to_low
return HDLRuby::Low::RefRange.new(self.ref.to_low,self.range.to_low)
end
end
##
# Describes a high-level name reference.
class RefName < Low::RefName
include HRef
# Converts to a new reference.
def to_ref
return RefName.new(self.ref.to_ref,self.name)
# return RefName.new(self.ref.to_ref,self)
end
# Converts the name reference to HDLRuby::Low.
def to_low
# puts "To low for ref with name=#{self.name} and subref=#{self.ref}"
return HDLRuby::Low::RefName.new(self.ref.to_low,self.name)
end
end
##
# Describes a this reference.
class RefThis < Low::RefThis
High = HDLRuby::High
include HRef
# Deactivated since incompatible with the parent features.
# # The only useful instance of RefThis.
# This = RefThis.new
# Converts to a new reference.
def to_ref
return RefThis.new
end
# Gets the enclosing system type.
def system
return High.cur_systemT
end
# Gets the enclosing behavior if any.
def behavior
return High.cur_behavior
end
# Gets the enclosing block if any.
def block
return High.cur_block
end
# Converts the this reference to HDLRuby::Low.
def to_low
return HDLRuby::Low::RefThis.new
end
end
# Gives access to the *this* reference.
def this
RefThis.new
end
##
# Describes a high-level event.
class Event < Low::Event
# Converts to a new event.
def to_event
# return self
return Event.new(self.type,self.ref.to_ref)
end
# Inverts the event: create a negedge if posedge, a posedge if negedge.
#
# NOTE: raise an execption if the event is neigther pos nor neg edge.
def invert
if self.type == :posedge then
return Event.new(:negedge,self.ref.to_ref)
elsif self.type == :negedge then
return Event.new(:posedge,self.ref.to_ref)
else
raise "Event cannot be inverted: #{self.type}"
end
end
# Converts the event to HDLRuby::Low.
def to_low
return HDLRuby::Low::Event.new(self.type,self.ref.to_low)
end
end
##
# Decribes a transmission statement.
class Transmit < Low::Transmit
High = HDLRuby::High
include HStatement
# Converts the transmission to a comparison expression.
#
# NOTE: required because the <= operator is ambigous and by
# default produces a Transmit or a Connection.
def to_expr
# Remove the transission from the block.
High.top_user.delete_statement(self)
# Generate an expression.
return Binary.new(:<=,self.left.to_expr,self.right.to_expr)
end
# Converts the transmit to HDLRuby::Low.
def to_low
return HDLRuby::Low::Transmit.new(self.left.to_low,
self.right.to_low)
end
end
##
# Describes a connection.
class Connection < Low::Connection
High = HDLRuby::High
# Converts the connection to a comparison expression.
#
# NOTE: required because the <= operator is ambigous and by
# default produces a Transmit or a Connection.
def to_expr
# Remove the connection from the system type.
High.top_user.delete_connection(self)
# Generate an expression.
return Binary.new(:<=,self.left,self.right)
end
# Creates a new behavior sensitive to +event+ including the connection
# converted to a transmission, and replace the former by the new
# behavior.
def at(event)
# Creates the behavior.
left, right = self.left, self.right
# Detached left and right from their connection since they will
# be put in a new behavior instead.
left.parent = right.parent = nil
# Create the new behavior replacing the connection.
behavior = Behavior.new(event) do
left <= right
end
# Adds the behavior.
High.top_user.add_behavior(behavior)
# Remove the connection
High.top_user.delete_connection(self)
end
# Creates a new behavior with an if statement from +condition+
# enclosing the connection converted to a transmission, and replace the
# former by the new behavior.
#
# NOTE: the else part is defined through the helse method.
def hif(condition)
# Creates the behavior.
left, right = self.left, self.right
# Detached left and right from their connection since they will
# be put in a new behavior instead.
left.parent = right.parent = nil
# Create the new behavior replacing the connection.
behavior = Behavior.new() do
hif(condition) do
left <= right
end
end
# Adds the behavior.
High.top_user.add_behavior(behavior)
# Remove the connection
High.top_user.delete_connection(self)
end
# Converts the connection to HDLRuby::Low.
def to_low
return HDLRuby::Low::Connection.new(self.left.to_low,
self.right.to_low)
end
end
##
# Describes a high-level signal.
class SignalI < Low::SignalI
High = HDLRuby::High
include HRef
# The valid bounding directions.
DIRS = [ :no, :input, :output, :inout, :inner ]
# # The object the signal is bounded to if any.
# attr_reader :bound
# The bounding direction.
attr_reader :dir
# Tells if the signal can be read.
attr_reader :can_read
# Tells if the signal can be written.
attr_reader :can_write
# Creates a new signal named +name+ typed as +type+ and with
# +dir+ as bounding direction.
#
# NOTE: +dir+ can be :input, :output, :inout or :inner
def initialize(name,type,dir)
# Initialize the type structure.
super(name,type)
unless name.empty? then
# Named signal, set the hdl-like access to the signal.
obj = self # For using the right self within the proc
High.space_reg(name) { obj }
end
# Hierarchical type allows access to sub references, so generate
# the corresponding methods.
if type.respond_to?(:each_name) then
type.each_name do |name|
self.define_singleton_method(name) do
# RefName.new(self.to_ref,name)
# RefName.new(self.to_ref,
# SignalI.new(name,type.get_type(name)))
RefObject.new(self.to_ref,
SignalI.new(name,type.get_type(name)))
end
end
end
# Check and set the bound.
self.dir = dir
# Set the read and write authorisations.
@can_read = 1.to_expr
@can_write = 1.to_expr
end
# Sets the +condition+ when the signal can be read.
def can_read=(condition)
@can_read = condition.to_expr
end
# Sets the +condition+ when the signal can be write.
def can_write=(condition)
@can_write = condition.to_expr
end
# # Tells if the signal is bounded or not.
# def bounded?
# return (@dir and @dir != :no)
# end
# Sets the direction to +dir+.
def dir=(dir)
# if self.bounded? then
# raise "Error: signal #{self.name} already bounded."
# end
unless DIRS.include?(dir) then
raise "Invalid bounding for signal #{self.name} direction: #{dir}."
end
@dir = dir
end
# Creates a positive edge event from the signal.
def posedge
return Event.new(:posedge,self.to_ref)
end
# Creates a negative edge event from the signal.
def negedge
return Event.new(:negedge,self.to_ref)
end
# Creates an edge event from the signal.
def edge
return Event.new(:edge,self.to_ref)
end
# # Creates a change event from the signal.
# def change
# return Event.new(:change,self.to_ref)
# end
# Converts to a new reference.
def to_ref
# return RefName.new(this,self.name)
# return RefName.new(this,self)
return RefObject.new(this,self)
end
# Converts to a new expression.
def to_expr
return self.to_ref
end
# Converts the system to HDLRuby::Low and set its +name+.
def to_low(name = self.name)
return HDLRuby::Low::SignalI.new(name,self.type.to_low)
end
end
##
# Module giving the properties of a high-level block.
module HBlock
High = HDLRuby::High
# The namespace
attr_reader :namespace
# The return value when building the scope.
attr_reader :return_value
# Build the block by executing +ruby_block+.
def build(&ruby_block)
# # # High-level blocks can include inner signals.
# # @inners ||= {}
# Already there
# # And therefore require a namespace.
# @namespace ||= Namespace.new(self)
# Build the block.
# High.space_push(self)
High.space_push(@namespace)
@return_value = High.top_user.instance_eval(&ruby_block)
High.space_pop
end
# Converts to a new reference.
def to_ref
# return RefName.new(this,self.name)
# return RefName.new(this,self)
return RefObject.new(this,self)
end
include HScope_missing
# include Hmissing
# alias h_missing method_missing
# # Missing methods are looked for in the private namespace.
# #
# # NOTE: it is ok to use the private namespace because the scope
# # can only be accessed if it is available from its systemT.
# def method_missing(m, *args, &ruby_block)
# # print "method_missing in class=#{self.class} with m=#{m}\n"
# if self.namespace.respond_to?(m) then
# self.namespace.send(m,*args,&ruby_block)
# else
# h_missing(m,*args,&ruby_block)
# end
# end
# # Adds inner signal +signal+.
# def add_inner(signal)
# # Checks and add the signal.
# unless signal.is_a?(SignalI)
# raise "Invalid class for a signal instance: #{signal.class}"
# end
# if @inners.has_key?(signal.name) then
# raise "SignalI #{signal.name} already present."
# end
# @inners[signal.name] = signal
# end
# # Creates and adds a set of inners typed +type+ from a list of +names+.
# #
# # NOTE: a name can also be a signal, is which case it is duplicated.
# def make_inners(type, *names)
# names.each do |name|
# if name.respond_to?(:to_sym) then
# self.add_inner(SignalI.new(name,type,:inner))
# else
# signal = name.clone
# signal.dir = :inner
# self.add_inner(signal)
# end
# end
# end
# # Iterates over the inner signals.
# #
# # Returns an enumerator if no ruby block is given.
# def each_inner(&ruby_block)
# # No ruby block? Return an enumerator.
# return to_enum(:each_inner) unless ruby_block
# # A block? Apply it on each inner signal instance.
# @inners.each_value(&ruby_block)
# end
# alias :each_signal :each_inner
# ## Gets an inner signal by +name+.
# def get_inner(name)
# return @inners[name]
# end
# alias :get_signal :get_inner
# # Declares high-level bit inner signals named +names+.
# def inner(*names)
# self.make_inners(bit,*names)
# end
# # Iterates over all the signals of the block and its sub block's ones.
# def each_signal_deep(&ruby_block)
# # No ruby block? Return an enumerator.
# return to_enum(:each_signal_deep) unless ruby_block
# # A block?
# # First, apply on the signals of the block.
# self.each_signal(&ruby_block)
# # Then apply on each sub block.
# self.each_block_deep do |block|
# block.each_signal_deep(&ruby_block)
# end
# end
# Creates and adds a new block executed in +mode+, with possible +name+
# and built by executing +ruby_block+.
def add_block(mode = nil, name = :"", &ruby_block)
# Creates the block.
block = High.make_block(mode,name,&ruby_block)
# Adds it as a statement.
self.add_statement(block)
# Use its return value.
return block.return_value
end
# Creates a new parallel block with possible +name+ and
# built from +ruby_block+.
def par(name = :"", &ruby_block)
return :par unless ruby_block
self.add_block(:par,name,&ruby_block)
end
# Creates a new sequential block with possible +name+ and
# built from +ruby_block+.
def seq(name = :"", &ruby_block)
return :seq unless ruby_block
self.add_block(:seq,name,&ruby_block)
end
# Creates a new block with the current mode with possible +name+ and
# built from +ruby_block+.
def sub(name = :"", &ruby_block)
self.add_block(self.mode,name,&ruby_block)
end
# Get the current mode of the block.
#
# NOTE: for name coherency purpose only.
def block
return self.mode
end
# Need to be able to declare select operators
include Hmux
# Creates a new if statement with a +condition+ that when met lead
# to the execution of the block in +mode+ generated by the +ruby_block+.
#
# NOTE: the else part is defined through the helse method.
def hif(condition, mode = nil, &ruby_block)
# Creates the if statement.
self.add_statement(If.new(condition,mode,&ruby_block))
end
# Sets the block executed when the condition is not met to the block
# in +mode+ generated by the execution of +ruby_block+.
#
# Can only be used once.
def helse(mode = nil, &ruby_block)
# There is a ruby_block: the helse is assumed to be with
# the hif in the same block.
# Completes the hif or the hcase statement.
statement = @statements.last
unless statement.is_a?(If) or statement.is_a?(Case) then
raise "Error: helse statement without hif nor hcase (#{statement.class})."
end
statement.helse(mode, &ruby_block)
end
# Sets the condition check when the condition is not met to the block,
# with a +condition+ that when met lead
# to the execution of the block in +mode+ generated by the +ruby_block+.
def helsif(condition, mode = nil, &ruby_block)
# There is a ruby_block: the helse is assumed to be with
# the hif in the same block.
# Completes the hif statement.
statement = @statements.last
unless statement.is_a?(If) then
raise "Error: helsif statement without hif (#{statement.class})."
end
statement.helsif(condition, mode, &ruby_block)
end
# Creates a new case statement with a +value+ used for deciding which
# block to execute.
#
# NOTE: the when part is defined through the hwhen method.
def hcase(value)
# Creates the case statement.
self.add_statement(Case.new(value))
end
# Sets the block of a case structure executed when the +match+ is met
# to the block in +mode+ generated by the execution of +ruby_block+.
#
# Can only be used once.
def hwhen(match, mode = nil, &ruby_block)
# There is a ruby_block: the helse is assumed to be with
# the hif in the same block.
# Completes the hcase statement.
statement = @statements.last
unless statement.is_a?(Case) then
raise "Error: hwhen statement without hcase (#{statement.class})."
end
statement.hwhen(match, mode, &ruby_block)
end
end
##
# Describes a high-level block.
class Block < Low::Block
High = HDLRuby::High
include HBlock
include Hinner
# Creates a new +mode+ sort of block, with possible +name+
# and build it by executing +ruby_block+.
def initialize(mode, name=:"", &ruby_block)
# Initialize the block.
super(mode,name)
unless name.empty? then
# Named block, set the hdl-like access to the block.
obj = self # For using the right self within the proc
High.space_reg(name) { obj }
end
# Creates the namespace.
@namespace = Namespace.new(self)
# puts "methods = #{self.methods.sort}"
build(&ruby_block)
end
# Converts the block to HDLRuby::Low.
def to_low
# Create the resulting block
blockL = HDLRuby::Low::Block.new(self.mode)
# Push the namespace for the low generation.
High.space_push(@namespace)
# Pushes on the name stack for converting the internals of
# the block.
High.names_push
# Add the inner signals
self.each_inner { |inner| blockL.add_inner(inner.to_low) }
# Add the statements
self.each_statement do |statement|
blockL.add_statement(statement.to_low)
end
# Restores the name stack.
High.names_pop
# Restores the namespace stack.
High.space_pop
# Return the resulting block
return blockL
end
end
# Describes a timed block.
#
# NOTE:
# * this is the only kind of block that can include time statements.
# * this kind of block is not synthesizable!
class TimeBlock < Low::TimeBlock
High = HDLRuby::High
include HBlock
# Creates a new +type+ sort of block with possible +name+
# and build it by executing +ruby_block+.
def initialize(type, name = :"", &ruby_block)
# Initialize the block.
super(type,name)
unless name.empty? then
# Named block, set the hdl-like access to the block.
obj = self # For using the right self within the proc
High.space_reg(name) { obj }
end
# Creates the namespace.
@namespace = Namespace.new(self)
build(&ruby_block)
end
# Adds a wait +delay+ statement in the block.
def wait(delay)
self.add_statement(TimeWait.new(delay))
end
# Adds a loop until +delay+ statement in the block in +mode+ whose
# loop content is built using +ruby_block+.
def repeat(delay, mode = nil, &ruby_block)
# Build the content block.
# content = High.make_block(:par,&ruby_block)
content = High.make_block(mode,&ruby_block)
# Create and add the statement.
self.add_statement(TimeRepeat.new(content,delay))
end
# Converts the time block to HDLRuby::Low.
def to_low
# Create the resulting block
blockL = HDLRuby::Low::TimeBlock.new(self.mode)
# Add the inner signals
self.each_inner { |inner| blockL.add_inner(inner.to_low) }
# Add the statements
self.each_statement do |statement|
blockL.add_statement(statement.to_low)
end
# Return the resulting block
return blockL
end
end
# Creates a block executed in +mode+, with possible +name+,
# that can be timed or not depending on the enclosing object and build
# it by executing the enclosing +ruby_block+.
#
# NOTE: not a method to include since it can only be used with
# a behavior or a block. Hence set as module method.
def self.make_block(mode = nil, name = :"", &ruby_block)
unless mode then
# No type of block given, get a default one.
if top_user.is_a?(Block) then
# There is an upper block, use its mode.
mode = top_user.mode
else
# There is no upper block, use :par as default.
mode = :par
end
end
if top_user.is_a?(TimeBlock) then
# return TimeBlock.new(mode,from_users(:block_extensions),&ruby_block)
return TimeBlock.new(mode,name,&ruby_block)
else
# return Block.new(mode,from_users(:block_extensions),&ruby_block)
return Block.new(mode,name,&ruby_block)
end
end
# Creates a specifically timed block in +mode+, with possible +name+
# and build it by executing the enclosing +ruby_block+.
#
# NOTE: not a method to include since it can only be used with
# a behavior or a block. Hence set as module method.
def self.make_time_block(mode = nil, name = :"", &ruby_block)
unless mode then
# No type of block given, get a default one.
if top_user.is_a?(Block) then
# There is an upper block, use its mode.
mode = block.mode
else
# There is no upper block, use :par as default.
mode = :par
end
end
# return TimeBlock.new(mode,top_user.block_extensions,&ruby_block)
return TimeBlock.new(mode,name,&ruby_block)
end
##
# Describes a high-level behavior.
class Behavior < Low::Behavior
High = HDLRuby::High
# # Creates a new behavior executing +block+ activated on a list of
# # +events+, and built by executing +ruby_block+.
# def initialize(*events,&ruby_block)
# # Initialize the behavior
# super()
# # Add the events.
# events.each { |event| self.add_event(event) }
# # Create a default par block for the behavior.
# block = High.make_block(:par,&ruby_block)
# self.add_block(block)
# # # Build the block by executing the ruby block in context.
# # High.space_push(block)
# # High.top_user.instance_eval(&ruby_block)
# # High.space_pop
# end
# Creates a new behavior executing +block+ activated on a list of
# +events+, and built by executing +ruby_block+.
def initialize(*events,&ruby_block)
# Create a default par block for the behavior.
# block = High.make_block(:par,&ruby_block)
mode = nil
if events.last.respond_to?(:to_sym) then
# A mode is given, use it.
mode = events.pop.to_sym
end
# block = High.make_block(mode,&ruby_block)
# Initialize the behavior with it.
# super(block)
super(nil)
# Sets the current behavior
@@cur_behavior = self
# Add the events.
events.each { |event| self.add_event(event) }
# Create and add the block.
self.block = High.make_block(mode,&ruby_block)
# Unset the current behavior
@@cur_behavior = nil
end
# Converts the time behavior to HDLRuby::Low.
def to_low
# Create the low level block.
blockL = self.block.to_low
# Create the low level events.
eventLs = self.each_event.map { |event| event.to_low }
# Create and return the resulting low level behavior.
behaviorL = HDLRuby::Low::Behavior.new(blockL)
eventLs.each(&behaviorL.method(:add_event))
return behaviorL
end
end
##
# Describes a high-level timed behavior.
class TimeBehavior < Low::TimeBehavior
High = HDLRuby::High
# # Creates a new timed behavior built by executing +ruby_block+.
# def initialize(&ruby_block)
# # Initialize the behavior
# super()
# # Create and add a default par block for the behavior.
# # NOTE: this block is forced to TimeBlock, so do not use
# # block(:par).
# block = High.make_time_block(:par,&ruby_block)
# # block = make_changer(TimeBlock).new(:par,&ruby_block)
# self.add_block(block)
# # # Build the block by executing the ruby block in context.
# # High.space_push(block)
# # High.top_user.instance_eval(&ruby_block)
# # High.space_pop
# end
# Creates a new timed behavior built by executing +ruby_block+.
def initialize(mode = nil, &ruby_block)
# Create a default par block for the behavior.
# NOTE: this block is forced to TimeBlock, so do not use
# block(:par).
# block = High.make_time_block(:par,&ruby_block)
block = High.make_time_block(mode,&ruby_block)
# Initialize the behavior with it.
super(block)
end
# Converts the time behavior to HDLRuby::Low.
def to_low
# Create the low level block.
blockL = self.block.to_low
# Create the low level events.
eventLs = self.each_event.map { |event| event.to_low }
# Create and return the resulting low level behavior.
behaviorL = HDLRuby::Low::TimeBehavior.new(blockL)
eventLs.each(&behaviorL.method(:add_event))
return behaviorL
end
end
# # Ensures constants defined is this module are prioritary.
# # @!visibility private
# def self.included(base) # :nodoc:
# if base.const_defined?(:SignalI) then
# base.send(:remove_const,:SignalI)
# base.const_set(:SignalI,HDLRuby::High::SignalI)
# end
# end
# Handle the namespaces for accessing the hardware referencing methods.
# The universe, i.e., the top system type.
Universe = SystemT.new(:"") {}
# The universe does not have input, output, nor inout.
class << Universe
undef_method :input
undef_method :output
undef_method :inout
undef_method :add_input
undef_method :add_output
undef_method :add_inout
end
# include Hmissing
# The namespace stack: never empty, the top is a nameless system without
# input nor output.
Namespaces = [Universe.scope.namespace]
private_constant :Namespaces
# Pushes +namespace+.
def self.space_push(namespace)
# Emsure namespace is really a namespace.
namespace = namespace.to_namespace
# # Concat the current top to namespace so that it has access to the
# # existing hardware constructs.
# LALALA
# # namespace.concat_namespace(Namespaces[-1])
# Adds the namespace to the top.
Namespaces.push(namespace)
end
# Inserts +namespace+ at +index+.
def self.space_insert(index,namespace)
Namespaces.insert(index.to_i,namespace.to_namespace)
end
# Pops a namespace.
def self.space_pop
if Namespaces.size <= 1 then
raise "Internal error: cannot pop further namespaces."
end
Namespaces.pop
end
# Gets the index of a +namespace+ within the stack.
def self.space_index(namespace)
return Namespaces.index(namespace)
end
# Gets the top of the namespaces stack.
def self.space_top
Namespaces[-1]
end
# Gets construct whose namespace is the top of the namespaces stack.
def self.top_user
self.space_top.user
end
# Gather the result of the execution of +method+ from all the users
# of the namespaces.
def self.from_users(method)
Namespaces.reverse_each.reduce([]) do |res,space|
user = space.user
if user.respond_to?(method) then
res += [*user.send(method)]
end
end
end
# Iterates over each namespace.
#
# Returns an enumerator if no ruby block is given.
def self.space_each(&ruby_block)
# No ruby block? Return an enumerator.
return to_enum(:space_each) unless ruby_block
# A block? Apply it on each system instance.
Namespaces.each(&ruby_block)
end
# Tells if within a system type.
def self.in_systemT?
return Namespaces.size > 1
end
# Gets the enclosing system type if any.
def self.cur_systemT
if Namespaces.size <= 1 then
raise "Not within a system type."
else
return Namespaces.reverse_each.find do |space|
# space.user.is_a?(SystemT)
space.user.is_a?(Scope) and space.user.parent.is_a?(SystemT)
# end.user
end.user.parent
end
end
# The current behavior: by default none.
@@cur_behavior = nil
# Gets the enclosing behavior if any.
def self.cur_behavior
# # Gets the enclosing system type.
# systemT = self.cur_systemT
# # Gets the current behavior from it.
# unless systemT.each_behavior.any? then
# raise "Not within a behavior."
# end
# # return systemT.each.reverse_each.first
# return systemT.last_behavior
return @@cur_behavior
end
# Tell if we are in a behavior.
def self.in_behavior?
top_user.is_a?(Block)
end
# Gets the enclosing block if any.
#
# NOTE: +level+ allows to get an upper block of the currently enclosing
# block.
def self.cur_block(level = 0)
if Namespaces[-1-level].user.is_a?(Block)
return Namespaces[-1-level].user
else
raise "Not within a block: #{Namespaces[-1-level].user.class}"
end
end
# Registers hardware referencing method +name+ to the current namespace.
def self.space_reg(name,&ruby_block)
# print "registering #{name} in #{Namespaces[-1]}\n"
# # Register it in the top object of the namespace stack.
# if Namespaces[-1].respond_to?(:define_method) then
# Namespaces[-1].send(:define_method,name.to_sym,&ruby_block)
# else
# Namespaces[-1].send(:define_singleton_method,name.to_sym,&ruby_block)
# end
Namespaces[-1].add_method(name,&ruby_block)
end
# Looks up and calls method +name+ from the namespace stack with arguments
# +args+ and block +ruby_block+.
def self.space_call(name,*args,&ruby_block)
# print "space_call with name=#{name}\n"
# Ensures name is a symbol.
name = name.to_sym
# Look from the top of the namespace stack.
Namespaces.reverse_each do |space|
if space.respond_to?(name) then
# print "Found is space user with class=#{space.user.class}\n"
# The method is found, call it.
return space.send(name,*args)
end
end
# Look in the top namespace.
# if Namespaces[-1].respond_to?(name) then
# # Found.
# return Namespaces[-1].send(name,*args,&ruby_block)
# end
# Look in the global methods.
if HDLRuby::High.respond_to?(name) then
# Found.
return HDLRuby::High.send(name,*args,&ruby_block)
end
# Not found.
raise NoMethodError.new("undefined local variable or method `#{name}'.")
end
# Extends the standard classes for support of HDLRuby.
# Extends the Numeric class for conversion to a high-level expression.
class ::Numeric
# to_expr is to be defined in the subclasses of ::Numeric
# # Converts to a new high-level expression.
# def to_expr
# # return Value.new(numeric,self)
# return Value.new(TypeNumeric.new(:"",self),self)
# end
# Converts to a new high-level value.
def to_value
to_expr
end
# Converts to a new delay in picoseconds.
def ps
return Delay.new(self,:ps)
end
# Converts to a new delay in nanoseconds.
def ns
return Delay.new(self,:ns)
end
# Converts to a new delay in microseconds.
def us
return Delay.new(self,:us)
end
# Converts to a new delay in milliseconds.
def ms
return Delay.new(self,:ms)
end
# Converts to a new delay in seconds.
def s
return Delay.new(self,:s)
end
end
# Extends the Integer class for computing the bit width.
class ::Integer
# Gets the bit width
def width
return Math.log2(self+1).ceil
end
end
# Extends the Fixnum class for computing for conversion to expression.
class ::Fixnum
# Converts to a new high-level expression.
def to_expr
return Value.new(Integer,self)
end
end
# Extends the Bignum class for computing for conversion to expression.
class ::Bignum
# Converts to a new high-level expression.
def to_expr
return Value.new(Bignum,self)
end
end
# Extends the Float class for computing the bit width and conversion
# to expression.
class ::Float
# Converts to a new high-level expression.
def to_expr
return Value.new(Real,self)
end
# Gets the bit width
def width
return 64
end
end
# Extends the Hash class for declaring signals of structure types.
class ::Hash
# Declares high-level input signals named +names+ of the current type.
def input(*names)
names.each do |name|
HDLRuby::High.top_user.
add_input(SignalI.new(name,TypeStruct.new(:"",self),:input))
end
end
# Declares high-level untyped output signals named +names+ of the
# current type.
def output(*names)
names.each do |name|
HDLRuby::High.top_user.
add_output(SignalI.new(name,TypeStruct.new(:"",self),:output))
end
end
# Declares high-level untyped inout signals named +names+ of the
# current type.
def inout(*names)
names.each do |name|
HDLRuby::High.top_user.
add_inout(SignalI.new(name,TypeStruct.new(:"",self),:inout))
end
end
# Declares high-level untyped inner signals named +names+ of the
# current type.
def inner(*names)
names.each do |name|
HDLRuby::High.top_user.
add_inner(SignalI.new(name,TypeStruct.new(:"",self),:inner))
end
end
end
# Extends the Array class for conversion to a high-level expression.
class ::Array
include HArrow
# Converts to a new high-level expression.
def to_expr
expr = Concat.new
self.each {|elem| expr.add_expression(elem.to_expr) }
expr
end
# Converts to a new high-level reference.
def to_ref
expr = RefConcat.new
self.each {|elem| expr.add_ref(elem.to_ref) }
expr
end
# Converts to a new type.
def to_type
if self.size == 1 and
( self[0].is_a?(Range) or self[0].respond_to?(:to_i) ) then
# Vector type case
return bit[*self]
else
# Tuple type case.
return TypeTuple.new(:"",*self)
end
end
# SignalI creation through the array take as type.
# Declares high-level input signals named +names+ of the current type.
def input(*names)
High.top_user.make_inputs(self.to_type,*names)
end
# Declares high-level untyped output signals named +names+ of the
# current type.
def output(*names)
High.top_user.make_outputs(self.to_type,*names)
end
# Declares high-level untyped inout signals named +names+ of the
# current type.
def inout(*names)
High.top_user.make_inouts(self.to_type,*names)
end
# Declares high-level untyped inner signals named +names+ of the
# current type.
def inner(*names)
High.top_user.make_inners(self.to_type,*names)
end
# Array construction shortcuts
# Create an array whose number of elements is given by the content
# of the current array, filled by +obj+ objects.
# If +obj+ is nil, +ruby_block+ is used instead for filling the array.
def call(obj = nil, &ruby_block)
unless self.size == 1 then
raise "Invalid array for call opertor."
end
number = self[0].to_i
if obj then
return Array.new(number,obj)
else
return Array.new(number,&ruby_block)
end
end
# Create an array of instances obtained by instantiating the elements
# using +args+ as argument and register the result to +name+.
#
# NOTE: the instances are unnamed since it is the resulting array
# that is registered.
def make(name,*args)
# Instantiate the types.
instances = self.map { |elem| elem.instantiate(:"",*args) }
# Add them to the top system
High.space_top.user.add_groupI(name,*instances)
# Register and return the result.
High.space_reg(name) { High.space_top.user.get_groupI(name) }
return High.space_top.user.get_groupI(name)
end
end
# Extends the symbol class for auto declaration of input or output.
class ::Symbol
High = HDLRuby::High
# # Converts to a new high-level expression.
# def to_expr
# self.to_ref
# end
# # Converts to a new high-level reference refering to an unbounded signal.
# def to_ref
# # Create the unbounded signal and add it to the upper system type.
# signal = SignalI.new(self,void,:no)
# High.cur_systemT.add_unbound(signal)
# # Convert it to a reference and return the result.
# return signal.to_ref
# end
# alias :+@ :to_ref
# Converts to a new value.
#
# Returns nil if no value can be obtained from it.
def to_value
str = self.to_s
# puts "str=#{str}"
# Get and check the type
type = str[0]
# puts "type=#{type}"
str = str[1..-1]
return nil if str.empty?
return nil unless ["b","u","s"].include?(type)
# Get the width if any.
if str[0].match(/[0-9]/) then
width = str.scan(/[0-9]*/)[0]
else
width = nil
end
# puts "width=#{width}"
old_str = str # Save the string it this state since its first chars
# can be erroneously considered as giving the width
str = str[width.size..-1] if width
# Get the base and the value
base = str[0]
# puts "base=#{base}\n"
unless ["b", "o", "d", "h"].include?(base) then
# No base found, default is bit
base = "b"
# And the width was actually a part of the value.
value = old_str
width = nil
else
# Get the value.
value = str[1..-1]
end
# puts "value=#{value}"
# Compute the bit width and the value
case base
when "b" then
# base 2, compute the width
width = width ? width.to_i : value.size
# # Check the value
# if value.match(/^[0-1]+$/) then
# # Numeric value, compute the corresponding integer
# value = value.to_i(2)
# elsif !value.match(/^[0-1zxZX]+$/) then
# # Invalid value.
# return nil
# end
# Check the value
return nil unless value.match(/^[0-1zxZX]+$/)
when "o" then
# base 8, compute the width
width = width ? width.to_i : value.size * 3
# Check the value
# if value.match(/^[0-7]+$/) then
# # Numeric value, compute the corresponding integer
# value = value.to_i(8)
# elsif value.match(/^[0-7xXzZ]+$/) then
if value.match(/^[0-7xXzZ]+$/) then
# 4-state value, conpute the correspondig bit string.
value = value.each_char.map do |c|
c = c.upcase
if c == "X" or c.upcase == "Z" then
c * 3
else
c.to_i(8).to_s(2).rjust(3,"0")
end
end.join
else
# Invalid value
return nil
end
when "d" then
# base 10, compute the width
width = width ? width.to_i : value.to_i.to_s(2).size + 1
# Check the value
return nil unless value.match(/^[0-9]+$/)
# Compute it (base 10 values cannot be 4-state!)
value = value.to_i.to_s(2)
when "h" then
# base 16, compute the width
width = width ? width.to_i : value.size * 4
# Check the value
# if value.match(/^[0-9a-fA-F]+$/) then
# # Numeric value, compute the corresponding integer
# value = value.to_i(16)
# elsif value.match(/^[0-9a-fA-FxXzZ]+$/) then
if value.match(/^[0-9a-fA-FxXzZ]+$/) then
# 4-state value, conpute the correspondig bit string.
value = value.each_char.map do |c|
c = c.upcase
if c == "X" or c.upcase == "Z" then
c * 4
else
c.to_i(16).to_s(2).rjust(4,"0")
end
end.join
else
# Invalid value
return nil
end
else
# Unknown base
return nil
end
# Compute the type.
case type
when "b" then
type = bit[width]
when "u" then
type = unsigned[width]
when "s" then
type = signed[width]
else
# Unknown type
return nil
end
# puts "type=#{type}, value=#{value}"
# Create and return the value.
# return Value.new(type,HDLRuby::BitString.new(value))
return Value.new(type,value)
end
end
# Extends the range class to support to_low
class ::Range
# Convert the first and last to HDLRuby::Low
def to_low
first = self.first
first = first.respond_to?(:to_low) ? first.to_low : first
last = self.last
last = last.respond_to?(:to_low) ? last.to_low : last
return (first..last)
end
# Iterates over the range as hardware.
#
# Returns an enumerator if no ruby block is given.
def heach(&ruby_block)
# No ruby block? Return an enumerator.
return to_enum(:heach) unless ruby_block
# Order the bounds to be able to iterate.
first,last = self.first, self.last
first,last = first > last ? [last,first] : [first,last]
# Iterate.
(first..last).each(&ruby_block)
end
end
# Method and attribute for generating an absolute uniq name.
# Such names cannot be used in HDLRuby::High code, but can be used
# to generate such code.
@@absoluteCounter = -1 # The absolute name counter.
# Generates an absolute uniq name.
def self.uniq_name
@@absoluteCounter += 1
return ":#{@@absoluteCounter}".to_sym
end
# Methods for managing the conversion to HDLRuby::Low
# Methods for generating uniq names in context
# The stack of names for creating new names without conflicts.
NameStack = [ Set.new ]
# Pushes on the name stack.
def self.names_push
NameStack.push(Set.new)
end
# Pops from the name stack.
def self.names_pop
NameStack.pop
end
# Adds a +name+ to the top of the stack.
def self.names_add(name)
NameStack[-1].add(name.to_s)
end
# Checks if a +name+ is present in the stack.
def self.names_has?(name)
NameStack.find do |names|
names.include?(name)
end
end
# Creates and adds the new name from +base+ that do not collides with the
# exisiting names.
def self.names_create(base)
base = base.to_s.clone
# Create a non-conflicting name
while(self.names_has?(base)) do
base << "_"
end
# Add and return it
self.names_add(base)
# puts "created name: #{base}"
return base.to_sym
end
# Standard vector types.
Integer = TypeSigned.new(:integer)
Natural = TypeUnsigned.new(:natural)
Bignum = TypeSigned.new(:bignum,HDLRuby::Infinity..0)
Real = TypeFloat.new(:float)
end
# Enters in HDLRuby::High mode.
def self.configure_high
include HDLRuby::High
class << self
# For main, missing methods are looked for in the namespaces.
def method_missing(m, *args, &ruby_block)
# print "method_missing in class=#{self.class} with m=#{m}\n"
# Is the missing method an immediate value?
value = m.to_value
return value if value and args.empty?
# puts "Universe methods: #{Universe.namespace.methods}"
# Not a value, but maybe it is in the namespaces
if Namespaces[-1].respond_to?(m) then
# Yes use it.
Namespaces[-1].send(m,*args,&ruby_block)
else
# No, true error
raise NoMethodError.new("undefined local variable or method `#{m}'.")
end
end
end
# Generate the standard signals
# $clk = SignalI.new(:__universe__clk__,Bit,:inner)
# $rst = SignalI.new(:__universe__rst__,Bit,:inner)
$clk = Universe.scope.inner :__universe__clk__
$rst = Universe.scope.inner :__universe__rst__
end
Renamed cur_systemT to cur_system and in_systemT? to in_system?.
# require "HDLRuby/hruby_base"
require "HDLRuby/hruby_low"
require "HDLRuby/hruby_types"
require "HDLRuby/hruby_values"
require "HDLRuby/hruby_bstr"
require 'set'
##
# High-level libraries for describing digital hardware.
#######################################################
module HDLRuby::High
# Base = HDLRuby::Base
Low = HDLRuby::Low
# Gets the infinity.
def infinity
return HDLRuby::Infinity
end
##
# Module providing extension of class.
module SingletonExtend
# Adds the singleton contents of +obj+ to current eigen class.
#
# NOTE: conflicting existing singleton content will be overridden if
def eigen_extend(obj)
# puts "eigen_extend for #{self} class=#{self.class}"
obj.singleton_methods.each do |name|
next if name == :yaml_tag # Do not know why we need to skip
# puts "name=#{name}"
self.define_singleton_method(name, &obj.singleton_method(name))
end
end
end
##
# Describes a namespace.
# Used for managing the access points to internals of hardware constructs.
class Namespace
include SingletonExtend
# The reserved names
RESERVED = [ :user, :initialize, :add_method, :concat_namespace,
:to_namespace, :user?, :user_deep? ]
# The construct using the namespace.
attr_reader :user
# Creates a new namespace attached to +user+.
def initialize(user)
# Sets the user.
@user = user
# Initialize the concat namespaces.
@concats = []
end
# Adds method +name+ provided the name is not empty.
def add_method(name,&ruby_block)
unless name.empty? then
if RESERVED.include?(name.to_sym) then
raise "Resevered name #{name} cannot be overridden."
end
define_singleton_method(name,&ruby_block)
end
end
# Concats another +namespace+ to current one.
def concat_namespace(namespace)
# Ensure namespace is really a namespace and concat it.
namespace = namespace.to_namespace
self.eigen_extend(namespace)
# Adds the concat the the list.
@concats << namespace
end
# Ensure it is a namespace
def to_namespace
return self
end
# Tell if an +object+ is the user of the namespace.
def user?(object)
return @user.equal?(object)
end
# Tell if an +object+ is the user of the namespace or of one of its
# concats.
def user_deep?(object)
# puts "@user=#{@user}, @concats=#{@concats.size}, object=#{object}"
# Convert the object to a user if appliable (for SystemT)
object = object.to_user if object.respond_to?(:to_user)
# Maybe object is the user of this namespace.
return true if user?(object)
# No, try in the concat namespaces.
@concats.any? { |concat| concat.user_deep?(object) }
end
end
# ##
# # Module providing mixin properties to hardware types.
# module HMix
# # Tells this is a hardware type supporting mixins.
# #
# # NOTE: only there for being checked through respond_to?
# def hmix?
# return true
# end
# # Mixins hardware types +htypes+.
# def include(*htypes)
# # Initialize the list of mixins hardware types if required.
# @includes ||= []
# # Check and add the hardware types.
# htypes.each do |htype|
# unless htype.respond_to?(:hmix?) then
# raise "Invalid class for mixin: #{htype.class}"
# end
# @includes << htype
# end
# end
# # # Mixins hardware types +htypes+ by extension.
# # def extend(htypes)
# # # Initialize the list of mixins hardware types if required.
# # @extends ||= []
# # # Check and add the hardware types.
# # htypes.each do |htype|
# # unless htype.respond_to?(:hmix?) then
# # raise "Invalid class for mixin: #{htype.class}"
# # end
# # @includes << htype
# # end
# # end
# end
##
# Module providing handling of unknown methods for hardware constructs.
module Hmissing
High = HDLRuby::High
# Missing methods may be immediate values, if not, they are looked up
# in the upper level of the namespace if any.
def method_missing(m, *args, &ruby_block)
# print "method_missing in class=#{self.class} with m=#{m}\n"
# Is the missing method an immediate value?
value = m.to_value
return value if value and args.empty?
# No, is there an upper namespace, i.e. is the current object
# present in the space?
if High.space_index(self) then
# Yes, self is in it, can try the methods in the space.
High.space_call(m,*args,&ruby_block)
elsif self.respond_to?(:namespace) and
High.space_index(self.namespace) then
# Yes, the private namespace is in it, can try the methods in
# the space.
High.space_call(m,*args,&ruby_block)
elsif self.respond_to?(:public_namespace) and
High.space_index(self.public_namespace) then
# Yes, the private namespace is in it, can try the methods in
# the space.
High.space_call(m,*args,&ruby_block)
else
# No, this is a true error.
raise NoMethodError.new("undefined local variable or method `#{m}'.")
end
end
end
module HScope_missing
include Hmissing
alias h_missing method_missing
# Missing methods are looked for in the private namespace.
#
# NOTE: it is ok to use the private namespace because the scope
# can only be accessed if it is available from its systemT.
def method_missing(m, *args, &ruby_block)
# Is the scope currently opened?
# puts "self.class=#{self.class}"
if High.space_top.user_deep?(self) then
# Yes, use the stack of namespaces.
h_missing(m,*args,&ruby_block)
else
# No, look into the current namespace and return a reference
# to the result if it is a referable hardware object.
res = self.namespace.send(m,*args,&ruby_block)
if res.respond_to?(:to_ref) then
# This is a referable object, build the reference from
# the namespace.
return RefObject.new(self.to_ref,res)
end
end
# puts "method_missing in scope=#{@name}(#{self}) with m=#{m}"
# puts "self.namespace=#{self.namespace}"
# # puts "namespace methods = #{self.namespace.methods}"
# if self.namespace.respond_to?(m) then
# puts "Found"
# self.namespace.send(m,*args,&ruby_block)
# else
# puts "NOT Found"
# h_missing(m,*args,&ruby_block)
# end
end
end
##
# Module providing methods for declaring select expressions.
module Hmux
# Creates an operator selecting from +select+ one of the +choices+.
#
# NOTE: +choices+ can either be a list of arguments or an array.
# If +choices+ has only two entries
# (and it is not a hash), +value+ will be converted to a boolean.
def mux(select,*choices)
# Process the choices.
choices = choices.flatten(1) if choices.size == 1
choices.map! { |choice| choice.to_expr }
# Generate the select expression.
return Select.new("?",select.to_expr,*choices)
end
end
##
# Module providing declaration of inner signal (assumes inner signals
# are present.
module Hinner
# Only adds the methods if not present.
def self.included(klass)
klass.class_eval do
# unless instance_methods.include?(:add_inner) then
# # Adds inner signal +signal+.
# def add_inner(signal)
# # Checks and add the signal.
# unless signal.is_a?(SignalI)
# raise "Invalid class for a signal instance: #{signal.class}"
# end
# if @inners.has_key?(signal.name) then
# raise "SignalI #{signal.name} already present."
# end
# @inners[signal.name] = signal
# end
# # Iterates over the inner signals.
# #
# # Returns an enumerator if no ruby block is given.
# def each_inner(&ruby_block)
# # No ruby block? Return an enumerator.
# return to_enum(:each_inner) unless ruby_block
# # A block? Apply it on each inner signal instance.
# @inners.each_value(&ruby_block)
# end
# alias :each_signal :each_inner
# ## Gets an inner signal by +name+.
# def get_inner(name)
# return @inners[name]
# end
# alias :get_signal :get_inner
# # Iterates over all the signals of the block and its sub block's ones.
# def each_signal_deep(&ruby_block)
# # No ruby block? Return an enumerator.
# return to_enum(:each_signal_deep) unless ruby_block
# # A block?
# # First, apply on the signals of the block.
# self.each_signal(&ruby_block)
# # Then apply on each sub block.
# self.each_block_deep do |block|
# block.each_signal_deep(&ruby_block)
# end
# end
# end
unless instance_methods.include?(:make_inners) then
# Creates and adds a set of inners typed +type+ from a list of +names+.
#
# NOTE: a name can also be a signal, is which case it is duplicated.
def make_inners(type, *names)
res = nil
names.each do |name|
if name.respond_to?(:to_sym) then
# Adds the inner signal
res = self.add_inner(SignalI.new(name,type,:inner))
else
# Deactivated because conflict with parent.
# signal = name.clone
# signal.dir = :inner
# self.add_inner(signal)
raise "Invalid class for a name: #{name.class}"
end
end
return res
end
end
unless instance_methods.include?(:inner) then
# Declares high-level bit inner signals named +names+.
def inner(*names)
self.make_inners(bit,*names)
end
end
end
end
end
# Classes describing hardware types.
##
# Describes a high-level system type.
class SystemT < Low::SystemT
High = HDLRuby::High
# include Hinner
include SingletonExtend
# The public namespace
#
# NOTE: the private namespace is the namespace of the scope object.
attr_reader :public_namespace
##
# Creates a new high-level system type named +name+ and inheriting
# from +mixins+.
#
# # If name is hash, it is considered the system is unnamed and the
# # table is used to rename its signals or instances.
#
# The proc +ruby_block+ is executed when instantiating the system.
def initialize(name, *mixins, &ruby_block)
# Initialize the system type structure.
# super(name,Scope.new())
super(name,Scope.new(name))
# Initialize the set of extensions to transmit to the instances'
# eigen class
@singleton_instanceO = Namespace.new(self.scope)
# Create the public namespace.
@public_namespace = Namespace.new(self.scope)
# Check and set the mixins.
mixins.each do |mixin|
unless mixin.is_a?(SystemT) then
raise "Invalid class for inheriting: #{mixin.class}."
end
end
@to_includes = mixins
# Prepare the instantiation methods
make_instantiater(name,SystemI,:add_systemI,&ruby_block)
# # Initialize the set of exported inner signals and instances
# @exports = {}
# # Initialize the set of included system instances.
# @includeIs = {}
end
# Converts to a namespace user.
def to_user
# Returns the scope.
return @scope
end
# # Adds a group of system +instances+ named +name+.
# def add_groupI(name, *instances)
# # Ensure name is a symbol and is not already used for another
# # group.
# name = name.to_sym
# if @groupIs.key?(name)
# raise "Group of system instances named #{name} already exist."
# end
# # Add the group.
# @groupIs[name.to_sym] = instances
# # Sets the parent of the instances.
# instances.each { |instance| instance.parent = self }
# end
# # Access a group of system instances by +name+.
# #
# # NOTE: the result is a copy of the group for avoiding side effects.
# def get_groupI(name)
# return @groupIs[name.to_sym].clone
# end
# # Iterates over the group of system instances.
# #
# # Returns an enumerator if no ruby block is given.
# def each_groupI(&ruby_block)
# # No ruby block? Return an enumerator.
# return to_enum(:each_groupI) unless ruby_block
# # A block? Apply it on each input signal instance.
# @groupIs.each(&ruby_block)
# end
# Creates and adds a set of inputs typed +type+ from a list of +names+.
#
# NOTE: a name can also be a signal, is which case it is duplicated.
def make_inputs(type, *names)
res = nil
names.each do |name|
if name.respond_to?(:to_sym) then
res = self.add_input(SignalI.new(name,type,:input))
else
raise "Invalid class for a name: #{name.class}"
end
end
return res
end
# Creates and adds a set of outputs typed +type+ from a list of +names+.
#
# NOTE: a name can also be a signal, is which case it is duplicated.
def make_outputs(type, *names)
# puts "type=#{type.inspect}"
res = nil
names.each do |name|
# puts "name=#{name}"
if name.respond_to?(:to_sym) then
res = self.add_output(SignalI.new(name,type,:output))
else
raise "Invalid class for a name: #{name.class}"
end
end
return res
end
# Creates and adds a set of inouts typed +type+ from a list of +names+.
#
# NOTE: a name can also be a signal, is which case it is duplicated.
def make_inouts(type, *names)
res = nil
names.each do |name|
if name.respond_to?(:to_sym) then
res = self.add_inout(SignalI.new(name,type,:inout))
else
raise "Invalid class for a name: #{name.class}"
end
end
return res
end
# # Creates and adds a set of inners typed +type+ from a list of +names+.
# #
# # NOTE: a name can also be a signal, is which case it is duplicated.
# def make_inners(type, *names)
# res = nil
# names.each do |name|
# if name.respond_to?(:to_sym) then
# res = self.add_inner(SignalI.new(name,type,:inner))
# else
# raise "Invalid class for a name: #{name.class}"
# end
# end
# return res
# end
# # Adds a +name+ to export.
# #
# # NOTE: if the name do not corresponds to any inner signal nor
# # instance, raise an exception.
# def add_export(name)
# # Check the name.
# name = name.to_sym
# # Look for construct to make public.
# # Maybe it is an inner signals.
# inner = self.get_inner(name)
# if inner then
# # Yes set it as export.
# @exports[name] = inner
# return
# end
# # No, maybe it is an instance.
# instance = self.get_systemI(name)
# if instance then
# # Yes, set it as export.
# @exports[name] = instance
# return
# end
# # No, error.
# raise NameError.new("Invalid name for export: #{name}")
# end
# # Iterates over the exported constructs.
# #
# # Returns an enumerator if no ruby block is given.
# def each_export(&ruby_block)
# # No ruby block? Return an enumerator.
# return to_enum(:each_export) unless ruby_block
# # A block? Apply it on each input signal instance.
# @exports.each_value(&ruby_block)
# end
# Iterates over the exported constructs
#
# NOTE: look into the scope.
def each_export(&ruby_block)
@scope.each_export(&ruby_block)
end
# Gets class containing the extension for the instances.
def singleton_instance
@singleton_instanceO.singleton_class
end
# Opens for extension.
#
# NOTE: actually executes +ruby_block+ in the context of the scope
# of the system.
def open(&ruby_block)
# # No push since should not merge the current environment into
# # the system's.
# High.space_insert(-1,@namespace)
# High.top_user.instance_eval(&ruby_block)
# # High.top_user.postprocess
# High.space_pop
self.scope.open(&ruby_block)
end
# The proc used for instantiating the system type.
attr_reader :instance_proc
# The instantiation target class.
attr_reader :instance_class
# The instance owning the system if it is an eigen system
attr_reader :owner
# Sets the +owner+.
#
# Note: will make the system eigen
def owner=(owner)
@owner = owner
end
# Instantiate the system type to an instance named +i_name+ with
# possible arguments +args+.
def instantiate(i_name,*args)
# Create the eigen type.
eigen = self.class.new(:"")
# # Extends eigen with self.
# eigen.extend(self)
# High.space_push(eigen.namespace)
# # Fills its namespace with the content of the current system type
# # (this latter may already contains access points if it has been
# # opended for extension previously).
# eigen.namespace.concat_namespace(@namespace)
# # Include the mixin systems given when declaring the system.
# @to_includes.each { |system| eigen.include(system) }
# # Execute the instantiation block
# High.top_user.instance_exec(*args,&@instance_proc) if @instance_proc
# High.space_pop
# Include the mixin systems given when declaring the system.
@to_includes.each { |system| eigen.scope.include(system) }
# Fills the scope of the eigen class.
eigen.scope.build_top(self.scope,*args)
# puts "eigen scope=#{eigen.scope}"
# Fill the public namespace
space = eigen.public_namespace
# Interface signals
eigen.each_signal do |signal|
# space.send(:define_singleton_method,signal.name) { signal }
space.send(:define_singleton_method,signal.name) do
# RefName.new(eigen.owner.to_ref,signal.name)
# RefName.new(eigen.owner.to_ref,signal)
RefObject.new(eigen.owner.to_ref,signal)
end
end
# Exported objects
eigen.each_export do |export|
# space.send(:define_singleton_method,export.name) { export }
space.send(:define_singleton_method,export.name) do
# RefName.new(eigen.owner.to_ref,export.name)
# RefName.new(eigen.owner.to_ref,export)
RefObject.new(eigen.owner.to_ref,export)
end
end
# Create the instance.
instance = @instance_class.new(i_name,eigen)
# Link it to its eigen system
eigen.owner = instance
# Extend it.
instance.eigen_extend(@singleton_instanceO)
# puts "instance scope= #{instance.systemT.scope}"
# Return the resulting instance
return instance
end
# Generates the instantiation capabilities including an instantiation
# method +name+ for hdl-like instantiation, target instantiation as
# +klass+, added to the calling object with +add_instance+, and
# whose eigen type is initialized by +ruby_block+.
def make_instantiater(name,klass,add_instance,&ruby_block)
# Set the instanciater.
@instance_proc = ruby_block
# Set the target instantiation class.
@instance_class = klass
# Unnamed types do not have associated access method.
return if name.empty?
obj = self # For using the right self within the proc
High.space_reg(name) do |*args|
# If no name it is actually an access to the system type.
return obj if args.empty?
# Get the names from the arguments.
i_names = args.shift
# puts "i_names=#{i_names}(#{i_names.class})"
i_names = [*i_names]
instance = nil # The current instance
i_names.each do |i_name|
# Instantiate.
instance = obj.instantiate(i_name,*args)
# Add the instance.
High.top_user.send(add_instance,instance)
end
# # Return the last instance.
instance
end
end
# Missing methods may be immediate values, if not, they are looked up
include Hmissing
# Methods used for describing a system in HDLRuby::High
# Declares high-level bit input signals named +names+.
def input(*names)
self.make_inputs(bit,*names)
end
# Declares high-level bit output signals named +names+.
def output(*names)
self.make_outputs(bit,*names)
end
# Declares high-level bit inout signals named +names+.
def inout(*names)
self.make_inouts(bit,*names)
end
# # Declares high-level bit inner signals named +names+.
# def inner(*names)
# self.make_inners(bit,*names)
# end
# # Declares a high-level behavior activated on a list of +events+, and
# # built by executing +ruby_block+.
# def behavior(*events, &ruby_block)
# # Preprocess the events.
# events.map! do |event|
# event.to_event
# end
# # Create and add the resulting behavior.
# self.add_behavior(Behavior.new(*events,&ruby_block))
# end
# # Declares a high-level timed behavior built by executing +ruby_block+.
# def timed(&ruby_block)
# # Create and add the resulting behavior.
# self.add_behavior(TimeBehavior.new(&ruby_block))
# end
# # Creates a new parallel block built from +ruby_block+.
# #
# # This methods first creates a new behavior to put the block in.
# def par(&ruby_block)
# self.behavior do
# par(&ruby_block)
# end
# end
# # Creates a new sequential block built from +ruby_block+.
# #
# # This methods first creates a new behavior to put the block in.
# def seq(&ruby_block)
# self.behavior do
# seq(&ruby_block)
# end
# end
# # Statements automatically enclosed in a behavior.
#
# # Creates a new if statement with a +condition+ that when met lead
# # to the execution of the block in +mode+ generated by the +ruby_block+.
# #
# # NOTE:
# # * the else part is defined through the helse method.
# # * a behavior is created to enclose the hif.
# def hif(condition, mode = nil, &ruby_block)
# self.behavior do
# hif(condition,mode,&ruby_block)
# end
# end
# # Sets the block executed when the condition is not met to the block
# # in +mode+ generated by the execution of +ruby_block+.
# #
# # Can only be used once.
# #
# # NOTE: added to the hif of the last behavior.
# def helse(mode = nil, &ruby_block)
# # There is a ruby_block: the helse is assumed to be with
# # the last statement of the last behavior.
# statement = self.last_behavior.last_statement
# # Completes the hif or the hcase statement.
# unless statement.is_a?(If) or statement.is_a?(Case) then
# raise "Error: helse statement without hif nor hcase (#{statement.class})."
# end
# statement.helse(mode, &ruby_block)
# end
# # Sets the condition check when the condition is not met to the block,
# # with a +condition+ that when met lead
# # to the execution of the block in +mode+ generated by the +ruby_block+.
# def helsif(condition, mode = nil, &ruby_block)
# # There is a ruby_block: the helse is assumed to be with
# # the last statement of the last behavior.
# statement = @statements.last
# # Completes the hif statement.
# unless statement.is_a?(If) then
# raise "Error: helsif statement without hif (#{statement.class})."
# end
# statement.helsif(condition, mode, &ruby_block)
# end
# # Creates a new case statement with a +value+ used for deciding which
# # block to execute.
# #
# # NOTE:
# # * the when part is defined through the hwhen method.
# # * a new behavior is created to enclose the hcase.
# def hcase(value)
# self.behavior do
# hcase(condition,value)
# end
# end
# # Sets the block of a case structure executed when the +match+ is met
# # to the block in +mode+ generated by the execution of +ruby_block+.
# #
# # Can only be used once.
# def hwhen(match, mode = nil, &ruby_block)
# # There is a ruby_block: the helse is assumed to be with
# # the last statement of the last behavior.
# statement = @statements.last
# # Completes the hcase statement.
# unless statement.is_a?(Case) then
# raise "Error: hwhen statement without hcase (#{statement.class})."
# end
# statement.hwhen(match, mode, &ruby_block)
# end
#
# # Sets the constructs corresponding to +names+ as exports.
# def export(*names)
# names.each {|name| self.add_export(name) }
# end
# Extend the class according to another +system+.
def extend(system)
# Adds the singleton methods
self.eigen_extend(system)
# Adds the singleton methods for the instances.
@singleton_instanceO.eigen_extend(system.singleton_instance)
end
# # Include another +system+ type with possible +args+ instanciation
# # arguments.
# def include(system,*args)
# if @includeIs.key?(system.name) then
# raise "Cannot include twice the same system."
# end
# # Extends with system.
# self.extend(system)
# # Create the instance to include
# instance = system.instantiate(:"",*args)
# # Concat its public namespace to the current one.
# self.namespace.concat_namespace(instance.public_namespace)
# # Adds it the list of includeds
# @includeIs[system.name] = instance
# end
# Casts as an included +system+.
#
# NOTE: use the includes of the scope.
def as(system)
# system = system.name if system.respond_to?(:name)
# return @includeIs[system].public_namespace
return self.scope.as(system.scope)
end
include Hmux
# Fills a low level system with self's contents.
#
# NOTE: name conflicts are treated in the current NameStack state.
def fill_low(systemTlow)
# puts "fill_low with systemTlow=#{systemTlow}"
# Adds its input signals.
self.each_input { |input| systemTlow.add_input(input.to_low) }
# Adds its output signals.
self.each_output { |output| systemTlow.add_output(output.to_low) }
# Adds its inout signals.
self.each_inout { |inout| systemTlow.add_inout(inout.to_low) }
# # Adds the inner signals.
# self.each_inner { |inner| systemTlow.add_inner(inner.to_low) }
# # Adds the instances.
# # Single ones.
# self.each_systemI { |systemI|
# systemTlow.add_systemI(systemI.to_low)
# }
# # Grouped ones.
# self.each_groupI do |name,systemIs|
# systemIs.each.with_index { |systemI,i|
# # Sets the name of the system instance
# # (required for conversion of further accesses).
# # puts "systemI.respond_to?=#{systemI.respond_to?(:name=)}"
# systemI.name = name.to_s + "[#{i}]"
# # And convert it to low
# systemTlow.add_systemI(systemI.to_low())
# }
# end
# # Adds the connections.
# self.each_connection { |connection|
# systemTlow.add_connection(connection.to_low)
# }
# # Adds the behaviors.
# self.each_behavior { |behavior|
# systemTlow.add_behavior(behavior.to_low)
# }
end
# Converts the system to HDLRuby::Low and set its +name+.
def to_low(name = self.name)
name = name.to_s
if name.empty? then
raise "Cannot convert a system without a name to HDLRuby::Low."
end
# Create the resulting low system type.
systemTlow = HDLRuby::Low::SystemT.new(High.names_create(name),
self.scope.to_low)
# Fills the interface of the new system from the included
# systems, must look into the scope since it it the scope
# that contains the included systems.
self.scope.each_included do |included|
included.systemT.fill_low(systemTlow)
end
# # Push the private namespace for the low generation.
# High.space_push(@namespace)
# # Pushes on the name stack for converting the internals of
# # the system.
# High.names_push
# # Adds the content of its included systems.
# @includeIs.each_value { |space| space.user.fill_low(systemTlow) }
# Adds the content of the actual system.
self.fill_low(systemTlow)
# # Restores the name stack.
# High.names_pop
# # Restores the namespace stack.
# High.space_pop
# # Return theresulting system.
return systemTlow
end
end
##
# Describes a scope for a system type
class Scope < Low::Scope
High = HDLRuby::High
# include HMix
include Hinner
include SingletonExtend
# The name of the scope if any.
attr_reader :name
# The namespace
attr_reader :namespace
# The return value when building the scope.
attr_reader :return_value
##
# Creates a new scope with possible +name+.
#
# The proc +ruby_block+ is executed for building the scope.
# If no block is provided, the scope is the top of a system and
# is filled by the instantiation procedure of the system.
def initialize(name = :"", &ruby_block)
# Initialize the scope structure
super(name)
unless name.empty? then
# Named scope, set the hdl-like access to the scope.
obj = self # For using the right self within the proc
High.space_reg(name) { obj }
end
# Initialize the set of grouped system instances.
@groupIs = {}
# Creates the namespace.
@namespace = Namespace.new(self)
# Initialize the set of exported inner signals and instances
@exports = {}
# Initialize the set of included system instances.
@includeIs = {}
# Builds the scope if a ruby block is provided
# (which means the scope is not the top of a system).
self.build(&ruby_block) if block_given?
end
# Converts to a namespace user.
def to_user
# Already a user.
return self
end
# # The name of the scope if any.
# #
# # NOTE:
# # * the name of the first scope of a system is the system's.
# # * for building reference path with converting to low.
# def name
# if self.parent.is_a?(SystemT) then
# return self.parent.name
# else
# return @name
# end
# end
# Adds a group of system +instances+ named +name+.
def add_groupI(name, *instances)
# Ensure name is a symbol and is not already used for another
# group.
name = name.to_sym
if @groupIs.key?(name)
raise "Group of system instances named #{name} already exist."
end
# Add the group.
@groupIs[name.to_sym] = instances
# Sets the parent of the instances.
instances.each { |instance| instance.parent = self }
end
# Access a group of system instances by +name+.
#
# NOTE: the result is a copy of the group for avoiding side effects.
def get_groupI(name)
return @groupIs[name.to_sym].clone
end
# Iterates over the group of system instances.
#
# Returns an enumerator if no ruby block is given.
def each_groupI(&ruby_block)
# No ruby block? Return an enumerator.
return to_enum(:each_groupI) unless ruby_block
# A block? Apply it on each input signal instance.
@groupIs.each(&ruby_block)
end
# Cf. Hinner
# # Creates and adds a set of inners typed +type+ from a list of +names+.
# #
# # NOTE: a name can also be a signal, is which case it is duplicated.
# def make_inners(type, *names)
# res = nil
# names.each do |name|
# if name.respond_to?(:to_sym) then
# res = self.add_inner(SignalI.new(name,type,:inner))
# else
# # Deactivated because conflict with parent.
# raise "Invalid class for a name: #{name.class}"
# end
# end
# return res
# end
# Adds a +name+ to export.
#
# NOTE: if the name do not corresponds to any inner signal nor
# instance, raise an exception.
def add_export(name)
# Check the name.
name = name.to_sym
# Look for construct to make public.
# Maybe it is an inner signals.
inner = self.get_inner(name)
if inner then
# Yes set it as export.
@exports[name] = inner
return
end
# No, maybe it is an instance.
instance = self.get_systemI(name)
if instance then
# Yes, set it as export.
@exports[name] = instance
return
end
# No, error.
raise NameError.new("Invalid name for export: #{name}")
end
# Iterates over the exported constructs.
#
# Returns an enumerator if no ruby block is given.
def each_export(&ruby_block)
# No ruby block? Return an enumerator.
return to_enum(:each_export) unless ruby_block
# A block? Apply it on each input signal instance.
@exports.each_value(&ruby_block)
# And apply on the sub scopes if any.
@scopes.each {|scope| scope.each_export(&ruby_block) }
end
# Iterates over the included systems.
def each_included(&ruby_block)
# No ruby block? Return an enumerator.
return to_enum(:each_included) unless ruby_block
# A block? Apply it on each input signal instance.
@includeIs.each_value(&ruby_block)
# And apply on the sub scopes if any.
@scopes.each {|scope| scope.each_included(&ruby_block) }
end
# Opens for extension.
#
# NOTE: actually executes +ruby_block+ in the context.
def open(&ruby_block)
# No push since should not merge the current environment into
# the system's.
High.space_insert(-1,@namespace)
High.top_user.instance_eval(&ruby_block)
High.space_pop
end
# Build the scope by executing +ruby_block+.
#
# NOTE: used when the scope is not the top of a system.
def build(&ruby_block)
# Namespace already there
# # High-level scopes can include inner signals.
# # And therefore require a namespace.
# @namespace ||= Namespace.new(self)
# Build the scope.
High.space_push(@namespace)
@return_value = High.top_user.instance_eval(&ruby_block)
High.space_pop
end
# Builds the scope using +base+ as model scope with possible arguments
# +args+.
#
# NOTE: Used by the instantiation procedure of a system.
def build_top(base,*args)
High.space_push(@namespace)
# Fills its namespace with the content of the base scope
# (this latter may already contains access points if it has been
# opended for extension previously).
@namespace.concat_namespace(base.namespace)
# # Include the mixin systems given when declaring the system.
# @to_includes.each { |system| eigen.include(system) }
# Execute the instantiation block
instance_proc = base.parent.instance_proc if base.parent.is_a?(SystemT)
@return_value = High.top_user.instance_exec(*args,&instance_proc) if instance_proc
High.space_pop
end
# Methods delegated to the upper system.
# Adds input +signal+ in the current system.
def add_input(signal)
self.parent.add_input(signal)
end
# Adds output +signal+ in the current system.
def add_output(signal)
self.parent.add_output(signal)
end
# Adds inout +signal+ in the current system.
def add_inout(signal)
self.parent.add_inout(signal)
end
# Creates and adds a set of inputs typed +type+ from a list of +names+
# in the current system.
#
# NOTE: a name can also be a signal, is which case it is duplicated.
def make_inputs(type, *names)
self.parent.make_inputs(type,*names)
end
# Creates and adds a set of outputs typed +type+ from a list of +names+
# in the current system.
#
# NOTE: a name can also be a signal, is which case it is duplicated.
def make_outputs(type, *names)
self.parent.make_outputs(type,*names)
end
# Creates and adds a set of inouts typed +type+ from a list of +names+
# in the current system.
#
# NOTE: a name can also be a signal, is which case it is duplicated.
def make_inouts(type, *names)
self.parent.make_inouts(type,*names)
end
# Converts to a new reference.
def to_ref
# return RefName.new(this,self.name)
# return RefName.new(this,self)
return RefObject.new(this,self)
end
include HScope_missing
# Moved to Hscope_missing for sharing with block
# include Hmissing
# alias h_missing method_missing
# # Missing methods are looked for in the private namespace.
# #
# # NOTE: it is ok to use the private namespace because the scope
# # can only be accessed if it is available from its systemT.
# def method_missing(m, *args, &ruby_block)
# # Is the scope currently opened?
# if High.space_top.user_deep?(self) then
# # Yes, use the stack of namespaces.
# h_missing(m,*args,&ruby_block)
# else
# # No, look into the current namespace and return a reference
# # to the result if it is a referable hardware object.
# res = self.namespace.send(m,*args,&ruby_block)
# if res.respond_to?(:to_ref) then
# # This is a referable object, build the reference from
# # the namespace.
# return RefObject.new(self.to_ref,res)
# end
# end
# # puts "method_missing in scope=#{@name}(#{self}) with m=#{m}"
# # puts "self.namespace=#{self.namespace}"
# # # puts "namespace methods = #{self.namespace.methods}"
# # if self.namespace.respond_to?(m) then
# # puts "Found"
# # self.namespace.send(m,*args,&ruby_block)
# # else
# # puts "NOT Found"
# # h_missing(m,*args,&ruby_block)
# # end
# end
# Methods used for describing a system in HDLRuby::High
# Declares high-level bit input signals named +names+
# in the current system.
def input(*names)
self.parent.input(*names)
end
# Declares high-level bit output signals named +names+
# in the current system.
def output(*names)
self.parent.output(*names)
end
# Declares high-level bit inout signals named +names+
# in the current system.
def inout(*names)
self.parent.inout(*names)
end
# Declares a sub scope with possible +name+ and built from +ruby_block+.
def sub(name = :"", &ruby_block)
# Creates the new scope.
scope = Scope.new(name,&ruby_block)
# puts "new scope=#{scope}"
# Add it
self.add_scope(scope)
# puts "self=#{self}"
# puts "self scopes=#{self.each_scope.to_a.join(",")}"
# Use its return value
return scope.return_value
end
# Declares a high-level behavior activated on a list of +events+, and
# built by executing +ruby_block+.
def behavior(*events, &ruby_block)
# Preprocess the events.
events.map! do |event|
event.to_event
end
# Create and add the resulting behavior.
self.add_behavior(Behavior.new(*events,&ruby_block))
end
# Declares a high-level timed behavior built by executing +ruby_block+.
def timed(&ruby_block)
# Create and add the resulting behavior.
self.add_behavior(TimeBehavior.new(&ruby_block))
end
# Creates a new parallel block built from +ruby_block+.
#
# This methods first creates a new behavior to put the block in.
def par(&ruby_block)
self.behavior do
par(&ruby_block)
end
end
# Creates a new sequential block built from +ruby_block+.
#
# This methods first creates a new behavior to put the block in.
def seq(&ruby_block)
self.behavior do
seq(&ruby_block)
end
end
# Statements automatically enclosed in a behavior.
# Creates a new if statement with a +condition+ that when met lead
# to the execution of the block in +mode+ generated by the +ruby_block+.
#
# NOTE:
# * the else part is defined through the helse method.
# * a behavior is created to enclose the hif.
def hif(condition, mode = nil, &ruby_block)
self.behavior do
hif(condition,mode,&ruby_block)
end
end
# Sets the block executed when the condition is not met to the block
# in +mode+ generated by the execution of +ruby_block+.
#
# Can only be used once.
#
# NOTE: added to the hif of the last behavior.
def helse(mode = nil, &ruby_block)
# There is a ruby_block: the helse is assumed to be with
# the last statement of the last behavior.
statement = self.last_behavior.last_statement
# Completes the hif or the hcase statement.
unless statement.is_a?(If) or statement.is_a?(Case) then
raise "Error: helse statement without hif nor hcase (#{statement.class})."
end
statement.helse(mode, &ruby_block)
end
# Sets the condition check when the condition is not met to the block,
# with a +condition+ that when met lead
# to the execution of the block in +mode+ generated by the +ruby_block+.
def helsif(condition, mode = nil, &ruby_block)
# There is a ruby_block: the helse is assumed to be with
# the last statement of the last behavior.
# statement = @statements.last
statement = self.last_behavior.last_statement
# Completes the hif statement.
unless statement.is_a?(If) then
raise "Error: helsif statement without hif (#{statement.class})."
end
statement.helsif(condition, mode, &ruby_block)
end
# Creates a new case statement with a +value+ used for deciding which
# block to execute.
#
# NOTE:
# * the when part is defined through the hwhen method.
# * a new behavior is created to enclose the hcase.
def hcase(value)
self.behavior do
hcase(condition,value)
end
end
# Sets the block of a case structure executed when the +match+ is met
# to the block in +mode+ generated by the execution of +ruby_block+.
#
# Can only be used once.
def hwhen(match, mode = nil, &ruby_block)
# There is a ruby_block: the helse is assumed to be with
# the last statement of the last behavior.
statement = @statements.last
# Completes the hcase statement.
unless statement.is_a?(Case) then
raise "Error: hwhen statement without hcase (#{statement.class})."
end
statement.hwhen(match, mode, &ruby_block)
end
# Sets the constructs corresponding to +names+ as exports.
def export(*names)
names.each {|name| self.add_export(name) }
end
# # Extend the class according to another +system+.
# def extend(system)
# # Adds the singleton methods
# self.eigen_extend(system)
# @singleton_instanceO.eigen_extend(system.singleton_instance)
# end
# Include a +system+ type with possible +args+ instanciation
# arguments.
def include(system,*args)
if @includeIs.key?(system.name) then
raise "Cannot include twice the same system."
end
# Extends with system.
self.eigen_extend(system)
# Create the instance to include
instance = system.instantiate(:"",*args)
# puts "instance=#{instance}"
# Concat its public namespace to the current one.
self.namespace.concat_namespace(instance.public_namespace)
# Adds it the list of includeds
@includeIs[system.name] = instance
end
# Casts as an included +system+.
def as(system)
system = system.name if system.respond_to?(:name)
return @includeIs[system].public_namespace
end
include Hmux
# Fills a low level scope with self's contents.
#
# NOTE: name conflicts are treated in the current NameStack state.
def fill_low(scopeLow)
# Adds the inner scopes.
self.each_scope { |scope| scopeLow.add_scope(scope.to_low) }
# Adds the inner signals.
self.each_inner { |inner| scopeLow.add_inner(inner.to_low) }
# Adds the instances.
# Single ones.
self.each_systemI { |systemI|
scopeLow.add_systemI(systemI.to_low)
}
# Grouped ones.
self.each_groupI do |name,systemIs|
systemIs.each.with_index { |systemI,i|
# Sets the name of the system instance
# (required for conversion of further accesses).
# puts "systemI.respond_to?=#{systemI.respond_to?(:name=)}"
systemI.name = name.to_s + "[#{i}]"
# And convert it to low
scopeLow.add_systemI(systemI.to_low())
}
end
# Adds the connections.
self.each_connection { |connection|
# puts "connection=#{connection}"
scopeLow.add_connection(connection.to_low)
}
# Adds the behaviors.
self.each_behavior { |behavior|
scopeLow.add_behavior(behavior.to_low)
}
end
# Converts the scope to HDLRuby::Low.
def to_low()
# Create the resulting low scope.
scopeLow = HDLRuby::Low::Scope.new()
# Push the private namespace for the low generation.
High.space_push(@namespace)
# Pushes on the name stack for converting the internals of
# the system.
High.names_push
# Adds the content of its included systems.
@includeIs.each_value {|instance| instance.user.fill_low(scopeLow) }
# Adds the content of the actual system.
self.fill_low(scopeLow)
# Restores the name stack.
High.names_pop
# Restores the namespace stack.
High.space_pop
# Return theresulting system.
return scopeLow
end
end
##
# Module bringing high-level properties to Type classes.
#
# NOTE: by default a type is not specified.
module Htype
High = HDLRuby::High
# Type processing
include HDLRuby::Tprocess
# Ensures initialize registers the type name
def self.included(base) # built-in Ruby hook for modules
base.class_eval do
original_method = instance_method(:initialize)
define_method(:initialize) do |*args, &block|
original_method.bind(self).call(*args, &block)
# Registers the name (if not empty).
self.register(name) unless name.empty?
end
end
end
# Tells htype has been included.
def htype?
return true
end
# Sets the +name+.
#
# NOTE: can only be done if the name is not already set.
def name=(name)
unless @name.empty? then
raise "Name of type already set to: #{@name}."
end
# Checks and sets the name.
name = name.to_sym
if name.empty? then
raise "Cannot set an empty name."
end
@name = name
# Registers the name.
self.register(name)
end
# Register the +name+ of the type.
def register(name)
if self.name.empty? then
raise "Cannot register with empty name."
else
# Sets the hdl-like access to the type.
obj = self # For using the right self within the proc
High.space_reg(name) { obj }
end
end
# Gets the type as left value.
#
# NOTE: used for asymetric types like TypeSystemI.
def left
# By default self.
self
end
# Gets the type as right value.
#
# NOTE: used for asymetric types like TypeSystemI.
def right
# By default self.
self
end
# Moved to base
# # The widths of the basic types.
# WIDTHS = { :bit => 1, :unsigned => 1, :signed => 1,
# :fixnum => 32, :float => 64, :bignum => High::Infinity }
# # The signs of the basic types.
# SIGNS = { :signed => true, :fixnum => true, :float => true,
# :bignum => true }
# SIGNS.default = false
# # Gets the bitwidth of the type, nil for undefined.
# #
# # NOTE: must be redefined for specific types.
# def width
# return WIDTHS[self.name]
# end
# # Tells if the type signed, false for unsigned.
# def signed?
# return SIGNS[self.name]
# end
# # # Tells if the type is specified or not.
# # def void?
# # return self.name == :void
# # end
# # # Tells if a type is generic or not.
# # def generic?
# # return self.void?
# # end
# # Checks the compatibility with +type+
# def compatible?(type)
# # # If type is void, compatible anyway.
# # return true if type.name == :void
# # Default: base types cases.
# case self.name
# # when :void then
# # # void is compatible with anything.
# # return true
# when :bit then
# # bit is compatible with bit signed and unsigned.
# return [:bit,:signed,:unsigned].include?(type.name)
# when :signed then
# # Signed is compatible with bit and signed.
# return [:bit,:signed].include?(type.name)
# when :unsigned then
# # Unsigned is compatible with bit and unsigned.
# return [:bit,:unsigned].include?(type.name)
# else
# # Unknown type for compatibility: not compatible by default.
# return false
# end
# end
# # Merges with +type+
# def merge(type)
# # # If type is void, return self.
# # return self if type.name == :void
# # Default: base types cases.
# case self.name
# # when :void then
# # # void: return type
# # return type
# when :bit then
# # bit is compatible with bit signed and unsigned.
# if [:bit,:signed,:unsigned].include?(type.name) then
# return type
# else
# raise "Incompatible types for merging: #{self}, #{type}."
# end
# when :signed then
# # Signed is compatible with bit and signed.
# if [:bit,:signed].include?(type.name) then
# return self
# else
# raise "Incompatible types for merging: #{self}, #{type}."
# end
# when :unsigned then
# # Unsigned is compatible with bit and unsigned.
# if [:bit,:unsigned].include?(type.name)
# return self
# else
# raise "Incompatible types for merging: #{self}, #{type}."
# end
# else
# # Unknown type for compatibility: not compatible by default.
# raise "Incompatible types for merging: #{self}, #{type}."
# end
# end
# # Instantiate the type with arguments +args+ if required.
# #
# # NOTE: actually, only TypeSystemT actually require instantiation.
# def instantiate
# self
# end
# Type creation in HDLRuby::High.
# Creates a new vector type of range +rng+ and with current type as
# base.
def [](rng)
return TypeVector.new(:"",self,rng)
end
# SignalI creation through the type.
# Declares high-level input signals named +names+ of the current type.
def input(*names)
# High.top_user.make_inputs(self.instantiate,*names)
High.top_user.make_inputs(self,*names)
end
# Declares high-level untyped output signals named +names+ of the
# current type.
def output(*names)
# High.top_user.make_outputs(self.instantiate,*names)
High.top_user.make_outputs(self,*names)
end
# Declares high-level untyped inout signals named +names+ of the
# current type.
def inout(*names)
# High.top_user.make_inouts(self.instantiate,*names)
High.top_user.make_inouts(self,*names)
end
# Declares high-level untyped inner signals named +names+ of the
# current type.
def inner(*names)
# High.top_user.make_inners(self.instantiate,*names)
High.top_user.make_inners(self,*names)
end
end
##
# Describes a high-level data type.
#
# NOTE: by default a type is not specified.
class Type < Low::Type
High = HDLRuby::High
include Htype
# Type creation.
# Creates a new type named +name+.
def initialize(name)
# Initialize the type structure.
super(name)
end
# Converts the type to HDLRuby::Low and set its +name+.
#
# NOTE: should be overridden by other type classes.
def to_low(name = self.name)
return HDLRuby::Low::Type.new(name)
end
end
# Creates the basic types.
# Defines a basic type +name+.
def self.define_type(name)
name = name.to_sym
type = Type.new(name)
self.send(:define_method,name) { type }
return type
end
# # The void type.
# define_type :void
# The bit type.
Bit = define_type(:bit)
class << Bit
# Tells if the type fixed point.
def fixed?
return true
end
# Gets the bitwidth of the type, nil for undefined.
def width
1
end
end
# The signed bit type.
Signed = define_type(:signed)
class << Signed
# Tells if the type is signed.
def signed?
return true
end
# Tells if the type is fixed point.
def fixed?
return true
end
# Gets the bitwidth of the type, nil for undefined.
def width
1
end
end
# The unsigned bit type.
Unsigned = define_type(:unsigned)
class << Unsigned
# Tells if the type is unsigned.
def unsigned?
return true
end
# Tells if the type is fixed point.
def fixed?
return true
end
# Gets the bitwidth of the type, nil for undefined.
def width
1
end
end
# # The numeric type (for all the Ruby Numeric types).
# define_type :numeric
# The float bit type
Float = define_type(:float)
class << Float
# Tells if the type is signed.
def signed?
return true
end
# Tells if the type is floating point.
def float?
return true
end
# Gets the bitwidth of the type, nil for undefined.
def width
1
end
end
# ##
# # Describes a numeric type.
# class TypeNumeric < Low::TypeNumeric
# High = HDLRuby::High
# include Htype
# # Converts the type to HDLRuby::Low and set its +name+.
# def to_low(name = self.name)
# # Generate and return the new type.
# return HDLRuby::Low::TypeNumeric.new(name,self.numeric)
# end
# end
# Methods for vector types.
module HvectorType
# Converts the type to HDLRuby::Low and set its +name+.
def to_low(name = self.name)
# Generate and return the new type.
return HDLRuby::Low::TypeVector.new(name,self.base.to_low,
self.range.to_low)
end
end
##
# Describes a vector type.
# class TypeVector < TypeExtend
class TypeVector < Low::TypeVector
High = HDLRuby::High
include Htype
include HvectorType
# # # The range of the vector.
# # attr_reader :range
# # # Creates a new vector type named +name+ from +base+ type and with
# # # +range+.
# # def initialize(name,base,range)
# # # Initialize the type.
# # super(name,basa,range)
# # # # Check and set the vector-specific attributes.
# # # if rng.respond_to?(:to_i) then
# # # # Integer case: convert to a 0..(rng-1) range.
# # # rng = (rng-1)..0
# # # elsif
# # # # Other cases: assume there is a first and a last to create
# # # # the range.
# # # rng = rng.first..rng.last
# # # end
# # # @range = rng
# # end
# # Type handling: these methods may have to be overriden when
# # subclassing.
# # Moved to base
# # # Gets the bitwidth of the type, nil for undefined.
# # #
# # # NOTE: must be redefined for specific types.
# # def width
# # first = @range.first
# # last = @range.last
# # return @base.width * (first-last).abs
# # end
# # # Gets the direction of the range.
# # def dir
# # return (@range.last - @range.first)
# # end
# # # Tells if the type signed, false for unsigned.
# # def signed?
# # return @base.signed?
# # end
# # # # Tells if a type is generic or not.
# # # def generic?
# # # # The type is generic if the base is generic.
# # # return self.base.generic?
# # # end
# # # Checks the compatibility with +type+
# # def compatible?(type)
# # # # if type is void, compatible anyway.
# # # return true if type.name == :void
# # # Compatible if same width and compatible base.
# # return false unless type.respond_to?(:dir)
# # return false unless type.respond_to?(:base)
# # return ( self.dir == type.dir and
# # self.base.compatible?(type.base) )
# # end
# # # Merges with +type+
# # def merge(type)
# # # # if type is void, return self anyway.
# # # return self if type.name == :void
# # # Compatible if same width and compatible base.
# # unless type.respond_to?(:dir) and type.respond_to?(:base) then
# # raise "Incompatible types for merging: #{self}, #{type}."
# # end
# # unless self.dir == type.dir then
# # raise "Incompatible types for merging: #{self}, #{type}."
# # end
# # return TypeVector.new(@name,@range,@base.merge(type.base))
# # end
# # Converts the type to HDLRuby::Low and set its +name+.
# def to_low(name = self.name)
# # Generate and return the new type.
# return HDLRuby::Low::TypeVector.new(name,self.base.to_low,
# self.range.to_low)
# end
end
##
# Describes a signed integer data type.
class TypeSigned < TypeVector
# Creates a new vector type named +name+ from +base+ type and with
# +range+.
#
# NOTE:
# * The default range is 32-bit.
def initialize(name,range = 31..0)
# Initialize the type.
super(name,Signed,range)
end
end
##
# Describes a unsigned integer data type.
class TypeUnsigned < TypeVector
# Creates a new vector type named +name+ from +base+ type and with
# +range+.
#
# NOTE:
# * The default range is 32-bit.
def initialize(name,range = 31..0)
# Initialize the type.
super(name,Unsigned,range)
end
end
##
# Describes a float data type.
class TypeFloat < TypeVector
# Creates a new vector type named +name+ from +base+ type and with
# +range+.
#
# NOTE:
# * The bits of negative range stands for the exponent
# * The default range is for 64-bit IEEE 754 double precision standart
def initialize(name,range = 52..-11)
# Initialize the type.
super(name,Float,range)
end
end
##
# Describes a tuple type.
# class TypeTuple < Tuple
class TypeTuple < Low::TypeTuple
High = HDLRuby::High
include Htype
# Converts the type to HDLRuby::Low and set its +name+.
def to_low(name = self.name)
return HDLRuby::Low::TypeTuple.new(name,
*@types.map { |type| type.to_low } )
end
end
##
# Describes a structure type.
# class TypeStruct < TypeHierarchy
class TypeStruct < Low::TypeStruct
High = HDLRuby::High
include Htype
# Moved to Low
# # Gets the bitwidth of the type, nil for undefined.
# #
# # NOTE: must be redefined for specific types.
# def width
# return @types.reduce(0) {|sum,type| sum + type.width }
# end
# # Checks the compatibility with +type+
# def compatible?(type)
# # # If type is void, compatible anyway.
# # return true if type.name == :void
# # Not compatible if different types.
# return false unless type.is_a?(TypeStruct)
# # Not compatibe unless each entry has the same name in same order.
# return false unless self.each_name == type.each_name
# self.each do |name,sub|
# return false unless sub.compatible?(self.get_type(name))
# end
# return true
# end
# # Merges with +type+
# def merge(type)
# # # if type is void, return self anyway.
# # return self if type.name == :void
# # Not compatible if different types.
# unless type.is_a?(TypeStruct) then
# raise "Incompatible types for merging: #{self}, #{type}."
# end
# # Not compatibe unless each entry has the same name and same order.
# unless self.each_name == type.each_name then
# raise "Incompatible types for merging: #{self}, #{type}."
# end
# # Creates the new type content
# content = {}
# self.each do |name,sub|
# content[name] = self.get_type(name).merge(sub)
# end
# return TypeStruct.new(@name,content)
# end
# Converts the type to HDLRuby::Low and set its +name+.
def to_low(name = self.name)
return HDLRuby::Low::TypeStruct.new(name,
@types.map { |name,type| [name,type.to_low] } )
end
end
## Methods for declaring system types and functions.
# The type constructors.
# Creates an unnamed structure type from a +content+.
def struct(content)
return TypeStruct.new(:"",content)
end
# # Creates an unnamed union type from a +content+.
# def union(content)
# return TypeUnion.new(:"",content)
# end
# Creates type named +name+ and using +ruby_block+ for building it.
def type(name,&ruby_block)
# Builds the type.
type = HDLRuby::High.top_user.instance_eval(&ruby_block)
# Ensures type is really a type.
# unless type.is_a?(Type) then
unless type.respond_to?(:htype?) then
raise "Invalid class for a type: #{type.class}."
end
# Name it.
type.name = name
return type
end
# Methods for declaring systems
# Declares a high-level system type named +name+, with +includes+ mixins
# hardware types and using +ruby_block+ for instantiating.
def system(name = :"", *includes, &ruby_block)
# print "system ruby_block=#{ruby_block}\n"
# Creates the resulting system.
return SystemT.new(name,*includes,&ruby_block)
end
# Methods for declaring function
# Declares a function named +name+ using +ruby_block+ as body.
#
# NOTE: a function is a short-cut for a method that creates a scope.
def function(name, &ruby_block)
if HDLRuby::High.in_system? then
define_singleton_method(name.to_sym) do |*args|
sub do
HDLRuby::High.top_user.instance_exec(*args,&ruby_block)
# ruby_block.call(*args)
end
end
else
define_method(name.to_sym) do |*args|
sub do
HDLRuby::High.top_user.instance_exec(*args,&ruby_block)
end
end
end
end
# # Extends the system type class for converting it to a data type.
# class SystemT
# # Converts the system type to a data type using +left+ signals
# # as left values and +right+ signals as right values.
# def to_type(left,right)
# return TypeSystemT.new(:"",self,left,right)
# end
# end
# Classes describing harware instances.
##
# Describes a high-level system instance.
class SystemI < Low::SystemI
High = HDLRuby::High
include SingletonExtend
# Creates a new system instance of system type +systemT+ named +name+.
def initialize(name, systemT)
# Initialize the system instance structure.
super(name,systemT)
# puts "New systemI with scope=#{self.systemT.scope}"
# Sets the hdl-like access to the system instance.
obj = self # For using the right self within the proc
High.space_reg(name) { obj }
end
# Converts to a new reference.
def to_ref
# return RefName.new(this,self.name)
# return RefName.new(this,self)
return RefObject.new(this,self)
end
# Connects signals of the system instance according to +connects+.
#
# NOTE: +connects+ can be a hash table where each entry gives the
# correspondance between a system's signal name and an external
# signal to connect to, or a list of signals that will be connected
# in the order of declaration.
def call(*connects)
# Checks if it is a connection through is a hash.
if connects.size == 1 and connects[0].respond_to?(:to_h) then
# Yes, perform a connection by name
connects = connects[0].to_h
# Performs the connections.
connects.each do |left,right|
# Gets the signal corresponding to connect.
left = self.get_signal(left)
# Convert it to a reference.
# left = RefName.new(self.to_ref,left.name)
# left = RefName.new(self.to_ref,left)
left = RefObject.new(self.to_ref,left)
# Make the connection.
left <= right
end
else
# No, perform a connection is order of declaration
connects.each.with_index do |csig,i|
# Gets i-est signal to connect
ssig = self.get_interface(i)
# Convert it to a reference.
# ssig = RefName.new(self.to_ref,ssig.name)
# ssig = RefName.new(self.to_ref,ssig)
ssig = RefObject.new(self.to_ref,ssig)
# Make the connection.
ssig <= csig
end
end
end
# Gets an exported element (signal or system instance) by +name+.
def get_export(name)
return @systemT.get_export(name)
end
# Opens for extension.
#
# NOTE: actually executes +ruby_block+ in the context of the
# systemT.
def open(&ruby_block)
return @systemT.open(&ruby_block)
end
# include Hmissing
# Missing methods are looked for in the public namespace of the
# system type.
def method_missing(m, *args, &ruby_block)
# print "method_missing in class=#{self.class} with m=#{m}\n"
self.public_namespace.send(m,*args,&ruby_block)
end
# Methods to transmit to the systemT
# Gets the public namespace.
def public_namespace
self.systemT.public_namespace
end
# Converts the instance to HDLRuby::Low and set its +name+.
def to_low(name = self.name)
# puts "to_low with #{self} (#{self.name}) #{self.systemT}"
# Converts the system of the instance to HDLRuby::Low
systemTlow = self.systemT.to_low(High.names_create(name.to_s+ "::T"))
# Creates the resulting HDLRuby::Low instance
return HDLRuby::Low::SystemI.new(High.names_create(name),
systemTlow)
end
end
# Class describing namespace in system.
# Classes describing hardware statements, connections and expressions
##
# Module giving high-level statement properties
module HStatement
# Creates a new if statement with a +condition+ enclosing the statement.
#
# NOTE: the else part is defined through the helse method.
def hif(condition)
# Creates the if statement.
return If.new(condition) { self }
end
end
##
# Describes a high-level if statement.
class If < Low::If
High = HDLRuby::High
include HStatement
# Creates a new if statement with a +condition+ that when met lead
# to the execution of the block in +mode+ generated by the execution of
# +ruby_block+.
def initialize(condition, mode = nil, &ruby_block)
# Create the yes block.
# yes_block = High.make_block(:par,&ruby_block)
yes_block = High.make_block(mode,&ruby_block)
# Creates the if statement.
super(condition.to_expr,yes_block)
end
# Sets the block executed in +mode+ when the condition is not met to
# the block generated by the execution of +ruby_block+.
#
# Can only be used once.
def helse(mode = nil, &ruby_block)
# If there is a no block, it is an error.
raise "Cannot have two helse for a single if statement." if self.no
# Create the no block if required
no_block = High.make_block(mode,&ruby_block)
# Sets the no block.
self.no = no_block
end
# Sets the block executed in +mode+ when the condition is not met
# but +next_cond+ is met to the block generated by the execution of
# +ruby_block+.
#
# Can only be used if the no-block is not set yet.
def helsif(next_cond, mode = nil, &ruby_block)
# If there is a no block, it is an error.
raise "Cannot have an helsif after an helse." if self.no
# Create the noif block if required
noif_block = High.make_block(mode,&ruby_block)
# Adds the noif block.
self.add_noif(next_cond.to_expr,noif_block)
end
# Converts the if to HDLRuby::Low.
def to_low
# no may be nil, so treat it appart
noL = self.no ? self.no.to_low : nil
# Now generate the low-level if.
low = HDLRuby::Low::If.new(self.condition.to_low,
self.yes.to_low,noL)
self.each_noif {|cond,block| low.add_noif(cond.to_low,block.to_low)}
return low
end
end
##
# Describes a high-level case statement.
class Case < Low::Case
High = HDLRuby::High
include HStatement
# Creates a new case statement with a +value+ that decides which
# block to execute.
def initialize(value)
# Create the yes block.
super(value.to_expr)
end
# Sets the block executed in +mode+ when the value matches +match+.
# The block is generated by the execution of +ruby_block+.
#
# Can only be used once for the given +match+.
def hwhen(match, mode = nil, &ruby_block)
# Create the nu block if required
# when_block = High.make_block(:par,&ruby_block)
when_block = High.make_block(mode,&ruby_block)
# Adds the case.
self.add_when(match.to_expr,when_block)
end
# Sets the block executed in +mode+ when there were no match to
# the block generated by the execution of +ruby_block+.
#
# Can only be used once.
def helse(mode = nil, &ruby_block)
# Create the nu block if required
# no_block = High.make_block(:par,&ruby_block)
default_block = High.make_block(mode,&ruby_block)
# Sets the default block.
self.default = default_block
end
# Converts the case to HDLRuby::Low.
def to_low
# Create the low level case.
caseL = HDLRuby::Low::Case.new(@value.to_low)
# Add each case.
self.each_when do |match,statement|
caseL.add_when(match.to_low, statement.to_low)
end
# Add the default if any.
if self.default then
caseL.default = self.default.to_low
end
return caseL
end
end
##
# Describes a delay: not synthesizable.
class Delay < Low::Delay
High = HDLRuby::High
include HStatement
def !
High.top_user.wait(self)
end
# Converts the delay to HDLRuby::Low.
def to_low
return HDLRuby::Low::Delay.new(self.value, self.unit)
end
end
##
# Describes a high-level wait delay statement.
class TimeWait < Low::TimeWait
include HStatement
# Converts the wait statement to HDLRuby::Low.
def to_low
return HDLRuby::Low::TimeWait.new(self.delay.to_low)
end
end
##
# Describes a timed loop statement: not synthesizable!
class TimeRepeat < Low::TimeRepeat
include HStatement
# Converts the repeat statement to HDLRuby::Low.
def to_low
return HDLRuby::Low::TimeRepeat.new(self.statement.to_low,
self.delay.to_low)
end
end
##
# Module giving high-level expression properties
module HExpression
# The system type the expression has been resolved in, if any.
attr_reader :systemT
# The type of the expression if resolved.
attr_reader :type
# Converts to a new value.
#
# NOTE: to be redefined.
def to_value
raise "Expression cannot be converted to a value: #{self.class}"
end
# Converts to a new expression.
#
# NOTE: to be redefined in case of non-expression class.
def to_expr
raise "Internal error: to_expr not defined yet for class: #{self.class}"
end
# Adds the unary operations generation.
[:"-@",:"@+",:"!",:"~",
:boolean, :bit, :signed, :unsigned].each do |operator|
define_method(operator) do
return Unary.new(operator,self.to_expr)
end
end
# Adds the binary operations generation.
[:"+",:"-",:"*",:"/",:"%",:"**",
:"&",:"|",:"^",:"<<",:">>",
:"==",:"!=",:"<",:">",:"<=",:">="].each do |operator|
define_method(operator) do |right|
return Binary.new(operator,self.to_expr,right.to_expr)
end
end
# Methods for conversion for HDLRuby::Low: type processing, flattening
# and so on
# The type of the expression if any.
attr_reader :type
# Sets the data +type+.
def type=(type)
# Check and set the type.
# unless type.is_a?(Type) then
unless type.respond_to?(:htype?) then
raise "Invalid class for a type: #{type.class}."
end
@type = type
end
# # The parent construct.
# attr_reader :parent
# # Sets the +parent+ construct.
# def parent=(parent)
# # Check and set the type.
# unless ( parent.is_a?(Low::Expression) or
# parent.is_a?(Low::Transmit) or
# parent.is_a?(Low::If) or
# parent.is_a?(Low::Case) ) then
# raise "Invalid class for a type: #{type.class}."
# end
# @parent = parent
# end
# # Iterates over the expression parents if any (actually at most once).
# def each_parent(&ruby_block)
# # No ruby block? Return an enumerator.
# return to_enum(:each_parent) unless ruby_block
# # A block? Apply it on the parent.
# ruby_block.call(@parent)
# end
# # Methods for conversion for HDLRuby::Low: type processing, flattening
# # and so on
# # Make the current expression a parent and recurse.
# def make_parents_deep
# # Set the parents of the children and recurse on them.
# self.each_child do |child|
# if child.respond_to?(:parent=) then
# child.parent = self
# else
# child.add_parent(self)
# end
# child.make_parents_deep
# end
# end
# # Resolves the unknown signal types and conflicts in the context
# # of system type +systemT+.
# # Returns true if the resolution succeeded.
# #
# # NOTE: sets the type of the expression.
# def resolve_types(systemT)
# # Only typed expression can be used for resolving types.
# unless @type then
# raise "Cannot resolve type: nil type."
# end
# # Resolve the children.
# self.each_child do |child|
# if child.type == nil then
# # The child's type is unknown, should not happen.
# raise "Cannot resolve type: child's type is nil."
# end
# # Check if the type is compatible with the child's.
# if @type.compatible?(child.type) then
# # Yes, compute and set the new type for both.
# @type = child.type = type.merge(child.type)
# else
# # Incombatible types, cannot resolve type.
# raise "Cannot resolve type: #{@type} and child's #{child.type} are incompatible."
# end
# end
# # Resolve the parents.
# self.each_parent do |parent|
# if parent.type == nil then
# # Simple sets the parent's type to current one.
# parent.type = @type
# elsif @type.compatible?(parent.type) then
# # Yes, compute and set the new type for both.
# @type = parent.type = type.merge(parent.type)
# else
# # Incombatible types, cannot resolve type.
# raise "Cannot resolve type: #{@type} and #{parent.type} are incompatible."
# end
# end
# end
end
##
# Module giving high-level properties for handling the arrow (<=) operator.
module HArrow
High = HDLRuby::High
# Creates a transmit, or connection with an +expr+.
#
# NOTE: it is converted afterward to an expression if required.
def <=(expr)
if High.top_user.is_a?(HDLRuby::Low::Block) then
# We are in a block, so generate and add a Transmit.
High.top_user.
add_statement(Transmit.new(self.to_ref,expr.to_expr))
else
# We are in a system type, so generate and add a Connection.
High.top_user.
add_connection(Connection.new(self.to_ref,expr.to_expr))
end
end
end
##
# Describes a high-level unary expression
class Unary < Low::Unary
include HExpression
# Converts to a new expression.
def to_expr
return Unary.new(self.operator,self.child.to_expr)
end
# Converts the unary expression to HDLRuby::Low.
def to_low
return HDLRuby::Low::Unary.new(self.operator,self.child.to_low)
end
end
##
# Describes a high-level binary expression
class Binary < Low::Binary
include HExpression
# Converts to a new expression.
def to_expr
return Binary.new(self.operator,self.left.to_expr,self.right.to_expr)
end
# Converts the binary expression to HDLRuby::Low.
def to_low
return HDLRuby::Low::Binary.new(self.operator,
self.left.to_low, self.right.to_low)
end
end
# ##
# # Describes a high-level ternary expression
# class Ternary < Low::Ternary
# include HExpression
# end
##
# Describes a section operation (generalization of the ternary operator).
#
# NOTE: choice is using the value of +select+ as an index.
class Select < Low::Select
include HExpression
# Converts to a new expression.
def to_expr
return Select.new("?",self.select.to_expr,
*self.each_choice.map do |choice|
choice.to_expr
end)
end
# Converts the selection expression to HDLRuby::Low.
def to_low
return HDLRuby::Low::Select.new("?",self.select.to_low,
*self.each_choice.map do |choice|
choice.to_low
end)
end
end
##
# Describes z high-level concat expression.
class Concat < Low::Concat
include HExpression
# Converts to a new expression.
def to_expr
return Concat.new(
self.each_expression.lazy.map do |expr|
expr.to_expr
end
)
end
# Converts the concatenation expression to HDLRuby::Low.
def to_low
return HDLRuby::Low::Concat.new(
self.each_expression.lazy.map do |expr|
expr.to_low
end
)
end
end
##
# Describes a high-level value.
class Value < Low::Value
include HExpression
include HDLRuby::Vprocess
# Converts to a new value.
def to_value
# # Already a value.
# self
return Value.new(self.type,self.content)
end
# Converts to a new expression.
def to_expr
return self.to_value
end
# Converts the value to HDLRuby::Low.
def to_low
# Clone the content if possible
content = self.content.frozen? ? self.content : self.content.clone
# Create and return the resulting low-level value
return HDLRuby::Low::Value.new(self.type.to_low,self.content)
end
# # For support in ranges.
# def <=>(expr)
# return self.to_s <=> expr.to_s
# end
end
##
# Module giving high-level reference properties.
module HRef
# Properties of expressions are also required
def self.included(klass)
klass.class_eval do
include HExpression
include HArrow
# Converts to a new expression.
def to_expr
self.to_ref
end
end
end
# Converts to a new reference.
#
# NOTE: to be redefined in case of non-reference class.
def to_ref
raise "Internal error: to_ref not defined yet for class: #{self.class}"
end
# Converts to a new event.
def to_event
return Event.new(:change,self.to_ref)
end
# Creates an access to elements of range +rng+ of the signal.
#
# NOTE: +rng+ can be a single expression in which case it is an index.
def [](rng)
if rng.respond_to?(:to_expr) then
# Number range: convert it to an expression.
rng = rng.to_expr
end
if rng.is_a?(HDLRuby::Low::Expression) then
# Index case
return RefIndex.new(self.to_ref,rng)
else
# Range case, ensure it is made among expression.
first = rng.first.to_expr
last = rng.last.to_expr
# Abd create the reference.
return RefRange.new(self.to_ref,first..last)
end
end
# Iterate over the elements.
#
# Returns an enumerator if no ruby block is given.
def each(&ruby_block)
# No ruby block? Return an enumerator.
return to_enum(:each) unless ruby_block
# A block? Apply it on each element.
self.type.range.heach do |i|
yield(self[i])
end
end
# Reference can be used like enumerator
include Enumerable
end
##
# Describes a high-level object reference: no low-level equivalent!
class RefObject < Low::Ref
include HRef
# The base of the reference
attr_reader :base
# The refered object.
attr_reader :object
# Creates a new reference from a +base+ reference and named +object+.
def initialize(base,object)
# Check and set the base (it must be convertible to a reference).
unless base.respond_to?(:to_ref)
raise "Invalid base for a RefObject: #{base}"
end
@base = base
# Check and set the object (it must have a name).
unless object.respond_to?(:name)
raise "Invalid object for a RefObject: #{object}"
end
@object = object
end
# Converts to a new reference.
def to_ref
return RefObject.new(@base,@object)
end
# Converts the name reference to a HDLRuby::Low::RefName.
def to_low
# puts "To low for ref with name=#{self.name} and subref=#{self.ref}"
return HDLRuby::Low::RefName.new(@base.to_ref.to_low,@object.name)
end
# Missing methods are looked for into the refered object.
def method_missing(m, *args, &ruby_block)
@object.send(m,*args,&ruby_block)
end
# # Converts the reference to a low-level name reference.
# def to_low
# # Build the path of the reference.
# path = []
# cur = @object
# while(!High.top_user.user_deep?(cur)) do
# puts "first cur=#{cur}"
# cur = cur.owner if cur.respond_to?(:owner)
# puts "cur=#{cur}", "name=#{cur.name}"
# path << cur.name
# cur = cur.parent
# # cur = cur.scope if cur.respond_to?(:scope)
# puts " parent=#{cur} found? #{High.top_user.user_deep?(cur)}"
# end
# # puts "path=#{path}"
# # Build the references from the path.
# ref = this.to_low
# path.each { |name| ref = HDLRuby::Low::RefName.new(ref,name) }
# return ref
# end
end
##
# Describes a high-level concat reference.
class RefConcat < Low::RefConcat
include HRef
# Converts to a new reference.
def to_ref
return RefConcat.new(
self.each_ref.lazy.map do |ref|
ref.to_ref
end
)
end
# Converts the concat reference to HDLRuby::Low.
def to_low
return HDLRuby::Low::RefConcat.new(
self.each_ref.lazy.map do |ref|
ref.to_low
end
)
end
end
##
# Describes a high-level index reference.
class RefIndex < Low::RefIndex
include HRef
# Converts to a new reference.
def to_ref
return RefIndex.new(self.ref.to_ref,self.index.to_expr)
end
# Converts the index reference to HDLRuby::Low.
def to_low
return HDLRuby::Low::RefIndex.new(self.ref.to_low,self.index.to_low)
end
end
##
# Describes a high-level range reference.
class RefRange < Low::RefRange
include HRef
# Converts to a new reference.
def to_ref
return RefRange.new(self.ref.to_ref,
self.range.first.to_expr..self.range.last.to_expr)
end
# Converts the range reference to HDLRuby::Low.
def to_low
return HDLRuby::Low::RefRange.new(self.ref.to_low,self.range.to_low)
end
end
##
# Describes a high-level name reference.
class RefName < Low::RefName
include HRef
# Converts to a new reference.
def to_ref
return RefName.new(self.ref.to_ref,self.name)
# return RefName.new(self.ref.to_ref,self)
end
# Converts the name reference to HDLRuby::Low.
def to_low
# puts "To low for ref with name=#{self.name} and subref=#{self.ref}"
return HDLRuby::Low::RefName.new(self.ref.to_low,self.name)
end
end
##
# Describes a this reference.
class RefThis < Low::RefThis
High = HDLRuby::High
include HRef
# Deactivated since incompatible with the parent features.
# # The only useful instance of RefThis.
# This = RefThis.new
# Converts to a new reference.
def to_ref
return RefThis.new
end
# Gets the enclosing system type.
def system
return High.cur_system
end
# Gets the enclosing behavior if any.
def behavior
return High.cur_behavior
end
# Gets the enclosing block if any.
def block
return High.cur_block
end
# Converts the this reference to HDLRuby::Low.
def to_low
return HDLRuby::Low::RefThis.new
end
end
# Gives access to the *this* reference.
def this
RefThis.new
end
##
# Describes a high-level event.
class Event < Low::Event
# Converts to a new event.
def to_event
# return self
return Event.new(self.type,self.ref.to_ref)
end
# Inverts the event: create a negedge if posedge, a posedge if negedge.
#
# NOTE: raise an execption if the event is neigther pos nor neg edge.
def invert
if self.type == :posedge then
return Event.new(:negedge,self.ref.to_ref)
elsif self.type == :negedge then
return Event.new(:posedge,self.ref.to_ref)
else
raise "Event cannot be inverted: #{self.type}"
end
end
# Converts the event to HDLRuby::Low.
def to_low
return HDLRuby::Low::Event.new(self.type,self.ref.to_low)
end
end
##
# Decribes a transmission statement.
class Transmit < Low::Transmit
High = HDLRuby::High
include HStatement
# Converts the transmission to a comparison expression.
#
# NOTE: required because the <= operator is ambigous and by
# default produces a Transmit or a Connection.
def to_expr
# Remove the transission from the block.
High.top_user.delete_statement(self)
# Generate an expression.
return Binary.new(:<=,self.left.to_expr,self.right.to_expr)
end
# Converts the transmit to HDLRuby::Low.
def to_low
return HDLRuby::Low::Transmit.new(self.left.to_low,
self.right.to_low)
end
end
##
# Describes a connection.
class Connection < Low::Connection
High = HDLRuby::High
# Converts the connection to a comparison expression.
#
# NOTE: required because the <= operator is ambigous and by
# default produces a Transmit or a Connection.
def to_expr
# Remove the connection from the system type.
High.top_user.delete_connection(self)
# Generate an expression.
return Binary.new(:<=,self.left,self.right)
end
# Creates a new behavior sensitive to +event+ including the connection
# converted to a transmission, and replace the former by the new
# behavior.
def at(event)
# Creates the behavior.
left, right = self.left, self.right
# Detached left and right from their connection since they will
# be put in a new behavior instead.
left.parent = right.parent = nil
# Create the new behavior replacing the connection.
behavior = Behavior.new(event) do
left <= right
end
# Adds the behavior.
High.top_user.add_behavior(behavior)
# Remove the connection
High.top_user.delete_connection(self)
end
# Creates a new behavior with an if statement from +condition+
# enclosing the connection converted to a transmission, and replace the
# former by the new behavior.
#
# NOTE: the else part is defined through the helse method.
def hif(condition)
# Creates the behavior.
left, right = self.left, self.right
# Detached left and right from their connection since they will
# be put in a new behavior instead.
left.parent = right.parent = nil
# Create the new behavior replacing the connection.
behavior = Behavior.new() do
hif(condition) do
left <= right
end
end
# Adds the behavior.
High.top_user.add_behavior(behavior)
# Remove the connection
High.top_user.delete_connection(self)
end
# Converts the connection to HDLRuby::Low.
def to_low
return HDLRuby::Low::Connection.new(self.left.to_low,
self.right.to_low)
end
end
##
# Describes a high-level signal.
class SignalI < Low::SignalI
High = HDLRuby::High
include HRef
# The valid bounding directions.
DIRS = [ :no, :input, :output, :inout, :inner ]
# # The object the signal is bounded to if any.
# attr_reader :bound
# The bounding direction.
attr_reader :dir
# Tells if the signal can be read.
attr_reader :can_read
# Tells if the signal can be written.
attr_reader :can_write
# Creates a new signal named +name+ typed as +type+ and with
# +dir+ as bounding direction.
#
# NOTE: +dir+ can be :input, :output, :inout or :inner
def initialize(name,type,dir)
# Initialize the type structure.
super(name,type)
unless name.empty? then
# Named signal, set the hdl-like access to the signal.
obj = self # For using the right self within the proc
High.space_reg(name) { obj }
end
# Hierarchical type allows access to sub references, so generate
# the corresponding methods.
if type.respond_to?(:each_name) then
type.each_name do |name|
self.define_singleton_method(name) do
# RefName.new(self.to_ref,name)
# RefName.new(self.to_ref,
# SignalI.new(name,type.get_type(name)))
RefObject.new(self.to_ref,
SignalI.new(name,type.get_type(name)))
end
end
end
# Check and set the bound.
self.dir = dir
# Set the read and write authorisations.
@can_read = 1.to_expr
@can_write = 1.to_expr
end
# Sets the +condition+ when the signal can be read.
def can_read=(condition)
@can_read = condition.to_expr
end
# Sets the +condition+ when the signal can be write.
def can_write=(condition)
@can_write = condition.to_expr
end
# # Tells if the signal is bounded or not.
# def bounded?
# return (@dir and @dir != :no)
# end
# Sets the direction to +dir+.
def dir=(dir)
# if self.bounded? then
# raise "Error: signal #{self.name} already bounded."
# end
unless DIRS.include?(dir) then
raise "Invalid bounding for signal #{self.name} direction: #{dir}."
end
@dir = dir
end
# Creates a positive edge event from the signal.
def posedge
return Event.new(:posedge,self.to_ref)
end
# Creates a negative edge event from the signal.
def negedge
return Event.new(:negedge,self.to_ref)
end
# Creates an edge event from the signal.
def edge
return Event.new(:edge,self.to_ref)
end
# # Creates a change event from the signal.
# def change
# return Event.new(:change,self.to_ref)
# end
# Converts to a new reference.
def to_ref
# return RefName.new(this,self.name)
# return RefName.new(this,self)
return RefObject.new(this,self)
end
# Converts to a new expression.
def to_expr
return self.to_ref
end
# Converts the system to HDLRuby::Low and set its +name+.
def to_low(name = self.name)
return HDLRuby::Low::SignalI.new(name,self.type.to_low)
end
end
##
# Module giving the properties of a high-level block.
module HBlock
High = HDLRuby::High
# The namespace
attr_reader :namespace
# The return value when building the scope.
attr_reader :return_value
# Build the block by executing +ruby_block+.
def build(&ruby_block)
# # # High-level blocks can include inner signals.
# # @inners ||= {}
# Already there
# # And therefore require a namespace.
# @namespace ||= Namespace.new(self)
# Build the block.
# High.space_push(self)
High.space_push(@namespace)
@return_value = High.top_user.instance_eval(&ruby_block)
High.space_pop
end
# Converts to a new reference.
def to_ref
# return RefName.new(this,self.name)
# return RefName.new(this,self)
return RefObject.new(this,self)
end
include HScope_missing
# include Hmissing
# alias h_missing method_missing
# # Missing methods are looked for in the private namespace.
# #
# # NOTE: it is ok to use the private namespace because the scope
# # can only be accessed if it is available from its systemT.
# def method_missing(m, *args, &ruby_block)
# # print "method_missing in class=#{self.class} with m=#{m}\n"
# if self.namespace.respond_to?(m) then
# self.namespace.send(m,*args,&ruby_block)
# else
# h_missing(m,*args,&ruby_block)
# end
# end
# # Adds inner signal +signal+.
# def add_inner(signal)
# # Checks and add the signal.
# unless signal.is_a?(SignalI)
# raise "Invalid class for a signal instance: #{signal.class}"
# end
# if @inners.has_key?(signal.name) then
# raise "SignalI #{signal.name} already present."
# end
# @inners[signal.name] = signal
# end
# # Creates and adds a set of inners typed +type+ from a list of +names+.
# #
# # NOTE: a name can also be a signal, is which case it is duplicated.
# def make_inners(type, *names)
# names.each do |name|
# if name.respond_to?(:to_sym) then
# self.add_inner(SignalI.new(name,type,:inner))
# else
# signal = name.clone
# signal.dir = :inner
# self.add_inner(signal)
# end
# end
# end
# # Iterates over the inner signals.
# #
# # Returns an enumerator if no ruby block is given.
# def each_inner(&ruby_block)
# # No ruby block? Return an enumerator.
# return to_enum(:each_inner) unless ruby_block
# # A block? Apply it on each inner signal instance.
# @inners.each_value(&ruby_block)
# end
# alias :each_signal :each_inner
# ## Gets an inner signal by +name+.
# def get_inner(name)
# return @inners[name]
# end
# alias :get_signal :get_inner
# # Declares high-level bit inner signals named +names+.
# def inner(*names)
# self.make_inners(bit,*names)
# end
# # Iterates over all the signals of the block and its sub block's ones.
# def each_signal_deep(&ruby_block)
# # No ruby block? Return an enumerator.
# return to_enum(:each_signal_deep) unless ruby_block
# # A block?
# # First, apply on the signals of the block.
# self.each_signal(&ruby_block)
# # Then apply on each sub block.
# self.each_block_deep do |block|
# block.each_signal_deep(&ruby_block)
# end
# end
# Creates and adds a new block executed in +mode+, with possible +name+
# and built by executing +ruby_block+.
def add_block(mode = nil, name = :"", &ruby_block)
# Creates the block.
block = High.make_block(mode,name,&ruby_block)
# Adds it as a statement.
self.add_statement(block)
# Use its return value.
return block.return_value
end
# Creates a new parallel block with possible +name+ and
# built from +ruby_block+.
def par(name = :"", &ruby_block)
return :par unless ruby_block
self.add_block(:par,name,&ruby_block)
end
# Creates a new sequential block with possible +name+ and
# built from +ruby_block+.
def seq(name = :"", &ruby_block)
return :seq unless ruby_block
self.add_block(:seq,name,&ruby_block)
end
# Creates a new block with the current mode with possible +name+ and
# built from +ruby_block+.
def sub(name = :"", &ruby_block)
self.add_block(self.mode,name,&ruby_block)
end
# Get the current mode of the block.
#
# NOTE: for name coherency purpose only.
def block
return self.mode
end
# Need to be able to declare select operators
include Hmux
# Creates a new if statement with a +condition+ that when met lead
# to the execution of the block in +mode+ generated by the +ruby_block+.
#
# NOTE: the else part is defined through the helse method.
def hif(condition, mode = nil, &ruby_block)
# Creates the if statement.
self.add_statement(If.new(condition,mode,&ruby_block))
end
# Sets the block executed when the condition is not met to the block
# in +mode+ generated by the execution of +ruby_block+.
#
# Can only be used once.
def helse(mode = nil, &ruby_block)
# There is a ruby_block: the helse is assumed to be with
# the hif in the same block.
# Completes the hif or the hcase statement.
statement = @statements.last
unless statement.is_a?(If) or statement.is_a?(Case) then
raise "Error: helse statement without hif nor hcase (#{statement.class})."
end
statement.helse(mode, &ruby_block)
end
# Sets the condition check when the condition is not met to the block,
# with a +condition+ that when met lead
# to the execution of the block in +mode+ generated by the +ruby_block+.
def helsif(condition, mode = nil, &ruby_block)
# There is a ruby_block: the helse is assumed to be with
# the hif in the same block.
# Completes the hif statement.
statement = @statements.last
unless statement.is_a?(If) then
raise "Error: helsif statement without hif (#{statement.class})."
end
statement.helsif(condition, mode, &ruby_block)
end
# Creates a new case statement with a +value+ used for deciding which
# block to execute.
#
# NOTE: the when part is defined through the hwhen method.
def hcase(value)
# Creates the case statement.
self.add_statement(Case.new(value))
end
# Sets the block of a case structure executed when the +match+ is met
# to the block in +mode+ generated by the execution of +ruby_block+.
#
# Can only be used once.
def hwhen(match, mode = nil, &ruby_block)
# There is a ruby_block: the helse is assumed to be with
# the hif in the same block.
# Completes the hcase statement.
statement = @statements.last
unless statement.is_a?(Case) then
raise "Error: hwhen statement without hcase (#{statement.class})."
end
statement.hwhen(match, mode, &ruby_block)
end
end
##
# Describes a high-level block.
class Block < Low::Block
High = HDLRuby::High
include HBlock
include Hinner
# Creates a new +mode+ sort of block, with possible +name+
# and build it by executing +ruby_block+.
def initialize(mode, name=:"", &ruby_block)
# Initialize the block.
super(mode,name)
unless name.empty? then
# Named block, set the hdl-like access to the block.
obj = self # For using the right self within the proc
High.space_reg(name) { obj }
end
# Creates the namespace.
@namespace = Namespace.new(self)
# puts "methods = #{self.methods.sort}"
build(&ruby_block)
end
# Converts the block to HDLRuby::Low.
def to_low
# Create the resulting block
blockL = HDLRuby::Low::Block.new(self.mode)
# Push the namespace for the low generation.
High.space_push(@namespace)
# Pushes on the name stack for converting the internals of
# the block.
High.names_push
# Add the inner signals
self.each_inner { |inner| blockL.add_inner(inner.to_low) }
# Add the statements
self.each_statement do |statement|
blockL.add_statement(statement.to_low)
end
# Restores the name stack.
High.names_pop
# Restores the namespace stack.
High.space_pop
# Return the resulting block
return blockL
end
end
# Describes a timed block.
#
# NOTE:
# * this is the only kind of block that can include time statements.
# * this kind of block is not synthesizable!
class TimeBlock < Low::TimeBlock
High = HDLRuby::High
include HBlock
# Creates a new +type+ sort of block with possible +name+
# and build it by executing +ruby_block+.
def initialize(type, name = :"", &ruby_block)
# Initialize the block.
super(type,name)
unless name.empty? then
# Named block, set the hdl-like access to the block.
obj = self # For using the right self within the proc
High.space_reg(name) { obj }
end
# Creates the namespace.
@namespace = Namespace.new(self)
build(&ruby_block)
end
# Adds a wait +delay+ statement in the block.
def wait(delay)
self.add_statement(TimeWait.new(delay))
end
# Adds a loop until +delay+ statement in the block in +mode+ whose
# loop content is built using +ruby_block+.
def repeat(delay, mode = nil, &ruby_block)
# Build the content block.
# content = High.make_block(:par,&ruby_block)
content = High.make_block(mode,&ruby_block)
# Create and add the statement.
self.add_statement(TimeRepeat.new(content,delay))
end
# Converts the time block to HDLRuby::Low.
def to_low
# Create the resulting block
blockL = HDLRuby::Low::TimeBlock.new(self.mode)
# Add the inner signals
self.each_inner { |inner| blockL.add_inner(inner.to_low) }
# Add the statements
self.each_statement do |statement|
blockL.add_statement(statement.to_low)
end
# Return the resulting block
return blockL
end
end
# Creates a block executed in +mode+, with possible +name+,
# that can be timed or not depending on the enclosing object and build
# it by executing the enclosing +ruby_block+.
#
# NOTE: not a method to include since it can only be used with
# a behavior or a block. Hence set as module method.
def self.make_block(mode = nil, name = :"", &ruby_block)
unless mode then
# No type of block given, get a default one.
if top_user.is_a?(Block) then
# There is an upper block, use its mode.
mode = top_user.mode
else
# There is no upper block, use :par as default.
mode = :par
end
end
if top_user.is_a?(TimeBlock) then
# return TimeBlock.new(mode,from_users(:block_extensions),&ruby_block)
return TimeBlock.new(mode,name,&ruby_block)
else
# return Block.new(mode,from_users(:block_extensions),&ruby_block)
return Block.new(mode,name,&ruby_block)
end
end
# Creates a specifically timed block in +mode+, with possible +name+
# and build it by executing the enclosing +ruby_block+.
#
# NOTE: not a method to include since it can only be used with
# a behavior or a block. Hence set as module method.
def self.make_time_block(mode = nil, name = :"", &ruby_block)
unless mode then
# No type of block given, get a default one.
if top_user.is_a?(Block) then
# There is an upper block, use its mode.
mode = block.mode
else
# There is no upper block, use :par as default.
mode = :par
end
end
# return TimeBlock.new(mode,top_user.block_extensions,&ruby_block)
return TimeBlock.new(mode,name,&ruby_block)
end
##
# Describes a high-level behavior.
class Behavior < Low::Behavior
High = HDLRuby::High
# # Creates a new behavior executing +block+ activated on a list of
# # +events+, and built by executing +ruby_block+.
# def initialize(*events,&ruby_block)
# # Initialize the behavior
# super()
# # Add the events.
# events.each { |event| self.add_event(event) }
# # Create a default par block for the behavior.
# block = High.make_block(:par,&ruby_block)
# self.add_block(block)
# # # Build the block by executing the ruby block in context.
# # High.space_push(block)
# # High.top_user.instance_eval(&ruby_block)
# # High.space_pop
# end
# Creates a new behavior executing +block+ activated on a list of
# +events+, and built by executing +ruby_block+.
def initialize(*events,&ruby_block)
# Create a default par block for the behavior.
# block = High.make_block(:par,&ruby_block)
mode = nil
if events.last.respond_to?(:to_sym) then
# A mode is given, use it.
mode = events.pop.to_sym
end
# block = High.make_block(mode,&ruby_block)
# Initialize the behavior with it.
# super(block)
super(nil)
# Sets the current behavior
@@cur_behavior = self
# Add the events.
events.each { |event| self.add_event(event) }
# Create and add the block.
self.block = High.make_block(mode,&ruby_block)
# Unset the current behavior
@@cur_behavior = nil
end
# Converts the time behavior to HDLRuby::Low.
def to_low
# Create the low level block.
blockL = self.block.to_low
# Create the low level events.
eventLs = self.each_event.map { |event| event.to_low }
# Create and return the resulting low level behavior.
behaviorL = HDLRuby::Low::Behavior.new(blockL)
eventLs.each(&behaviorL.method(:add_event))
return behaviorL
end
end
##
# Describes a high-level timed behavior.
class TimeBehavior < Low::TimeBehavior
High = HDLRuby::High
# # Creates a new timed behavior built by executing +ruby_block+.
# def initialize(&ruby_block)
# # Initialize the behavior
# super()
# # Create and add a default par block for the behavior.
# # NOTE: this block is forced to TimeBlock, so do not use
# # block(:par).
# block = High.make_time_block(:par,&ruby_block)
# # block = make_changer(TimeBlock).new(:par,&ruby_block)
# self.add_block(block)
# # # Build the block by executing the ruby block in context.
# # High.space_push(block)
# # High.top_user.instance_eval(&ruby_block)
# # High.space_pop
# end
# Creates a new timed behavior built by executing +ruby_block+.
def initialize(mode = nil, &ruby_block)
# Create a default par block for the behavior.
# NOTE: this block is forced to TimeBlock, so do not use
# block(:par).
# block = High.make_time_block(:par,&ruby_block)
block = High.make_time_block(mode,&ruby_block)
# Initialize the behavior with it.
super(block)
end
# Converts the time behavior to HDLRuby::Low.
def to_low
# Create the low level block.
blockL = self.block.to_low
# Create the low level events.
eventLs = self.each_event.map { |event| event.to_low }
# Create and return the resulting low level behavior.
behaviorL = HDLRuby::Low::TimeBehavior.new(blockL)
eventLs.each(&behaviorL.method(:add_event))
return behaviorL
end
end
# # Ensures constants defined is this module are prioritary.
# # @!visibility private
# def self.included(base) # :nodoc:
# if base.const_defined?(:SignalI) then
# base.send(:remove_const,:SignalI)
# base.const_set(:SignalI,HDLRuby::High::SignalI)
# end
# end
# Handle the namespaces for accessing the hardware referencing methods.
# The universe, i.e., the top system type.
Universe = SystemT.new(:"") {}
# The universe does not have input, output, nor inout.
class << Universe
undef_method :input
undef_method :output
undef_method :inout
undef_method :add_input
undef_method :add_output
undef_method :add_inout
end
# include Hmissing
# The namespace stack: never empty, the top is a nameless system without
# input nor output.
Namespaces = [Universe.scope.namespace]
private_constant :Namespaces
# Pushes +namespace+.
def self.space_push(namespace)
# Emsure namespace is really a namespace.
namespace = namespace.to_namespace
# # Concat the current top to namespace so that it has access to the
# # existing hardware constructs.
# LALALA
# # namespace.concat_namespace(Namespaces[-1])
# Adds the namespace to the top.
Namespaces.push(namespace)
end
# Inserts +namespace+ at +index+.
def self.space_insert(index,namespace)
Namespaces.insert(index.to_i,namespace.to_namespace)
end
# Pops a namespace.
def self.space_pop
if Namespaces.size <= 1 then
raise "Internal error: cannot pop further namespaces."
end
Namespaces.pop
end
# Gets the index of a +namespace+ within the stack.
def self.space_index(namespace)
return Namespaces.index(namespace)
end
# Gets the top of the namespaces stack.
def self.space_top
Namespaces[-1]
end
# Gets construct whose namespace is the top of the namespaces stack.
def self.top_user
self.space_top.user
end
# Gather the result of the execution of +method+ from all the users
# of the namespaces.
def self.from_users(method)
Namespaces.reverse_each.reduce([]) do |res,space|
user = space.user
if user.respond_to?(method) then
res += [*user.send(method)]
end
end
end
# Iterates over each namespace.
#
# Returns an enumerator if no ruby block is given.
def self.space_each(&ruby_block)
# No ruby block? Return an enumerator.
return to_enum(:space_each) unless ruby_block
# A block? Apply it on each system instance.
Namespaces.each(&ruby_block)
end
# Tells if within a system type.
def self.in_system?
return Namespaces.size > 1
end
# Gets the enclosing system type if any.
def self.cur_system
if Namespaces.size <= 1 then
raise "Not within a system type."
else
return Namespaces.reverse_each.find do |space|
# space.user.is_a?(SystemT)
space.user.is_a?(Scope) and space.user.parent.is_a?(SystemT)
# end.user
end.user.parent
end
end
# The current behavior: by default none.
@@cur_behavior = nil
# Gets the enclosing behavior if any.
def self.cur_behavior
# # Gets the enclosing system type.
# systemT = self.cur_system
# # Gets the current behavior from it.
# unless systemT.each_behavior.any? then
# raise "Not within a behavior."
# end
# # return systemT.each.reverse_each.first
# return systemT.last_behavior
return @@cur_behavior
end
# Tell if we are in a behavior.
def self.in_behavior?
top_user.is_a?(Block)
end
# Gets the enclosing block if any.
#
# NOTE: +level+ allows to get an upper block of the currently enclosing
# block.
def self.cur_block(level = 0)
if Namespaces[-1-level].user.is_a?(Block)
return Namespaces[-1-level].user
else
raise "Not within a block: #{Namespaces[-1-level].user.class}"
end
end
# Registers hardware referencing method +name+ to the current namespace.
def self.space_reg(name,&ruby_block)
# print "registering #{name} in #{Namespaces[-1]}\n"
# # Register it in the top object of the namespace stack.
# if Namespaces[-1].respond_to?(:define_method) then
# Namespaces[-1].send(:define_method,name.to_sym,&ruby_block)
# else
# Namespaces[-1].send(:define_singleton_method,name.to_sym,&ruby_block)
# end
Namespaces[-1].add_method(name,&ruby_block)
end
# Looks up and calls method +name+ from the namespace stack with arguments
# +args+ and block +ruby_block+.
def self.space_call(name,*args,&ruby_block)
# print "space_call with name=#{name}\n"
# Ensures name is a symbol.
name = name.to_sym
# Look from the top of the namespace stack.
Namespaces.reverse_each do |space|
if space.respond_to?(name) then
# print "Found is space user with class=#{space.user.class}\n"
# The method is found, call it.
return space.send(name,*args)
end
end
# Look in the top namespace.
# if Namespaces[-1].respond_to?(name) then
# # Found.
# return Namespaces[-1].send(name,*args,&ruby_block)
# end
# Look in the global methods.
if HDLRuby::High.respond_to?(name) then
# Found.
return HDLRuby::High.send(name,*args,&ruby_block)
end
# Not found.
raise NoMethodError.new("undefined local variable or method `#{name}'.")
end
# Extends the standard classes for support of HDLRuby.
# Extends the Numeric class for conversion to a high-level expression.
class ::Numeric
# to_expr is to be defined in the subclasses of ::Numeric
# # Converts to a new high-level expression.
# def to_expr
# # return Value.new(numeric,self)
# return Value.new(TypeNumeric.new(:"",self),self)
# end
# Converts to a new high-level value.
def to_value
to_expr
end
# Converts to a new delay in picoseconds.
def ps
return Delay.new(self,:ps)
end
# Converts to a new delay in nanoseconds.
def ns
return Delay.new(self,:ns)
end
# Converts to a new delay in microseconds.
def us
return Delay.new(self,:us)
end
# Converts to a new delay in milliseconds.
def ms
return Delay.new(self,:ms)
end
# Converts to a new delay in seconds.
def s
return Delay.new(self,:s)
end
end
# Extends the Integer class for computing the bit width.
class ::Integer
# Gets the bit width
def width
return Math.log2(self+1).ceil
end
end
# Extends the Fixnum class for computing for conversion to expression.
class ::Fixnum
# Converts to a new high-level expression.
def to_expr
return Value.new(Integer,self)
end
end
# Extends the Bignum class for computing for conversion to expression.
class ::Bignum
# Converts to a new high-level expression.
def to_expr
return Value.new(Bignum,self)
end
end
# Extends the Float class for computing the bit width and conversion
# to expression.
class ::Float
# Converts to a new high-level expression.
def to_expr
return Value.new(Real,self)
end
# Gets the bit width
def width
return 64
end
end
# Extends the Hash class for declaring signals of structure types.
class ::Hash
# Declares high-level input signals named +names+ of the current type.
def input(*names)
names.each do |name|
HDLRuby::High.top_user.
add_input(SignalI.new(name,TypeStruct.new(:"",self),:input))
end
end
# Declares high-level untyped output signals named +names+ of the
# current type.
def output(*names)
names.each do |name|
HDLRuby::High.top_user.
add_output(SignalI.new(name,TypeStruct.new(:"",self),:output))
end
end
# Declares high-level untyped inout signals named +names+ of the
# current type.
def inout(*names)
names.each do |name|
HDLRuby::High.top_user.
add_inout(SignalI.new(name,TypeStruct.new(:"",self),:inout))
end
end
# Declares high-level untyped inner signals named +names+ of the
# current type.
def inner(*names)
names.each do |name|
HDLRuby::High.top_user.
add_inner(SignalI.new(name,TypeStruct.new(:"",self),:inner))
end
end
end
# Extends the Array class for conversion to a high-level expression.
class ::Array
include HArrow
# Converts to a new high-level expression.
def to_expr
expr = Concat.new
self.each {|elem| expr.add_expression(elem.to_expr) }
expr
end
# Converts to a new high-level reference.
def to_ref
expr = RefConcat.new
self.each {|elem| expr.add_ref(elem.to_ref) }
expr
end
# Converts to a new type.
def to_type
if self.size == 1 and
( self[0].is_a?(Range) or self[0].respond_to?(:to_i) ) then
# Vector type case
return bit[*self]
else
# Tuple type case.
return TypeTuple.new(:"",*self)
end
end
# SignalI creation through the array take as type.
# Declares high-level input signals named +names+ of the current type.
def input(*names)
High.top_user.make_inputs(self.to_type,*names)
end
# Declares high-level untyped output signals named +names+ of the
# current type.
def output(*names)
High.top_user.make_outputs(self.to_type,*names)
end
# Declares high-level untyped inout signals named +names+ of the
# current type.
def inout(*names)
High.top_user.make_inouts(self.to_type,*names)
end
# Declares high-level untyped inner signals named +names+ of the
# current type.
def inner(*names)
High.top_user.make_inners(self.to_type,*names)
end
# Array construction shortcuts
# Create an array whose number of elements is given by the content
# of the current array, filled by +obj+ objects.
# If +obj+ is nil, +ruby_block+ is used instead for filling the array.
def call(obj = nil, &ruby_block)
unless self.size == 1 then
raise "Invalid array for call opertor."
end
number = self[0].to_i
if obj then
return Array.new(number,obj)
else
return Array.new(number,&ruby_block)
end
end
# Create an array of instances obtained by instantiating the elements
# using +args+ as argument and register the result to +name+.
#
# NOTE: the instances are unnamed since it is the resulting array
# that is registered.
def make(name,*args)
# Instantiate the types.
instances = self.map { |elem| elem.instantiate(:"",*args) }
# Add them to the top system
High.space_top.user.add_groupI(name,*instances)
# Register and return the result.
High.space_reg(name) { High.space_top.user.get_groupI(name) }
return High.space_top.user.get_groupI(name)
end
end
# Extends the symbol class for auto declaration of input or output.
class ::Symbol
High = HDLRuby::High
# # Converts to a new high-level expression.
# def to_expr
# self.to_ref
# end
# # Converts to a new high-level reference refering to an unbounded signal.
# def to_ref
# # Create the unbounded signal and add it to the upper system type.
# signal = SignalI.new(self,void,:no)
# High.cur_system.add_unbound(signal)
# # Convert it to a reference and return the result.
# return signal.to_ref
# end
# alias :+@ :to_ref
# Converts to a new value.
#
# Returns nil if no value can be obtained from it.
def to_value
str = self.to_s
# puts "str=#{str}"
# Get and check the type
type = str[0]
# puts "type=#{type}"
str = str[1..-1]
return nil if str.empty?
return nil unless ["b","u","s"].include?(type)
# Get the width if any.
if str[0].match(/[0-9]/) then
width = str.scan(/[0-9]*/)[0]
else
width = nil
end
# puts "width=#{width}"
old_str = str # Save the string it this state since its first chars
# can be erroneously considered as giving the width
str = str[width.size..-1] if width
# Get the base and the value
base = str[0]
# puts "base=#{base}\n"
unless ["b", "o", "d", "h"].include?(base) then
# No base found, default is bit
base = "b"
# And the width was actually a part of the value.
value = old_str
width = nil
else
# Get the value.
value = str[1..-1]
end
# puts "value=#{value}"
# Compute the bit width and the value
case base
when "b" then
# base 2, compute the width
width = width ? width.to_i : value.size
# # Check the value
# if value.match(/^[0-1]+$/) then
# # Numeric value, compute the corresponding integer
# value = value.to_i(2)
# elsif !value.match(/^[0-1zxZX]+$/) then
# # Invalid value.
# return nil
# end
# Check the value
return nil unless value.match(/^[0-1zxZX]+$/)
when "o" then
# base 8, compute the width
width = width ? width.to_i : value.size * 3
# Check the value
# if value.match(/^[0-7]+$/) then
# # Numeric value, compute the corresponding integer
# value = value.to_i(8)
# elsif value.match(/^[0-7xXzZ]+$/) then
if value.match(/^[0-7xXzZ]+$/) then
# 4-state value, conpute the correspondig bit string.
value = value.each_char.map do |c|
c = c.upcase
if c == "X" or c.upcase == "Z" then
c * 3
else
c.to_i(8).to_s(2).rjust(3,"0")
end
end.join
else
# Invalid value
return nil
end
when "d" then
# base 10, compute the width
width = width ? width.to_i : value.to_i.to_s(2).size + 1
# Check the value
return nil unless value.match(/^[0-9]+$/)
# Compute it (base 10 values cannot be 4-state!)
value = value.to_i.to_s(2)
when "h" then
# base 16, compute the width
width = width ? width.to_i : value.size * 4
# Check the value
# if value.match(/^[0-9a-fA-F]+$/) then
# # Numeric value, compute the corresponding integer
# value = value.to_i(16)
# elsif value.match(/^[0-9a-fA-FxXzZ]+$/) then
if value.match(/^[0-9a-fA-FxXzZ]+$/) then
# 4-state value, conpute the correspondig bit string.
value = value.each_char.map do |c|
c = c.upcase
if c == "X" or c.upcase == "Z" then
c * 4
else
c.to_i(16).to_s(2).rjust(4,"0")
end
end.join
else
# Invalid value
return nil
end
else
# Unknown base
return nil
end
# Compute the type.
case type
when "b" then
type = bit[width]
when "u" then
type = unsigned[width]
when "s" then
type = signed[width]
else
# Unknown type
return nil
end
# puts "type=#{type}, value=#{value}"
# Create and return the value.
# return Value.new(type,HDLRuby::BitString.new(value))
return Value.new(type,value)
end
end
# Extends the range class to support to_low
class ::Range
# Convert the first and last to HDLRuby::Low
def to_low
first = self.first
first = first.respond_to?(:to_low) ? first.to_low : first
last = self.last
last = last.respond_to?(:to_low) ? last.to_low : last
return (first..last)
end
# Iterates over the range as hardware.
#
# Returns an enumerator if no ruby block is given.
def heach(&ruby_block)
# No ruby block? Return an enumerator.
return to_enum(:heach) unless ruby_block
# Order the bounds to be able to iterate.
first,last = self.first, self.last
first,last = first > last ? [last,first] : [first,last]
# Iterate.
(first..last).each(&ruby_block)
end
end
# Method and attribute for generating an absolute uniq name.
# Such names cannot be used in HDLRuby::High code, but can be used
# to generate such code.
@@absoluteCounter = -1 # The absolute name counter.
# Generates an absolute uniq name.
def self.uniq_name
@@absoluteCounter += 1
return ":#{@@absoluteCounter}".to_sym
end
# Methods for managing the conversion to HDLRuby::Low
# Methods for generating uniq names in context
# The stack of names for creating new names without conflicts.
NameStack = [ Set.new ]
# Pushes on the name stack.
def self.names_push
NameStack.push(Set.new)
end
# Pops from the name stack.
def self.names_pop
NameStack.pop
end
# Adds a +name+ to the top of the stack.
def self.names_add(name)
NameStack[-1].add(name.to_s)
end
# Checks if a +name+ is present in the stack.
def self.names_has?(name)
NameStack.find do |names|
names.include?(name)
end
end
# Creates and adds the new name from +base+ that do not collides with the
# exisiting names.
def self.names_create(base)
base = base.to_s.clone
# Create a non-conflicting name
while(self.names_has?(base)) do
base << "_"
end
# Add and return it
self.names_add(base)
# puts "created name: #{base}"
return base.to_sym
end
# Standard vector types.
Integer = TypeSigned.new(:integer)
Natural = TypeUnsigned.new(:natural)
Bignum = TypeSigned.new(:bignum,HDLRuby::Infinity..0)
Real = TypeFloat.new(:float)
end
# Enters in HDLRuby::High mode.
def self.configure_high
include HDLRuby::High
class << self
# For main, missing methods are looked for in the namespaces.
def method_missing(m, *args, &ruby_block)
# print "method_missing in class=#{self.class} with m=#{m}\n"
# Is the missing method an immediate value?
value = m.to_value
return value if value and args.empty?
# puts "Universe methods: #{Universe.namespace.methods}"
# Not a value, but maybe it is in the namespaces
if Namespaces[-1].respond_to?(m) then
# Yes use it.
Namespaces[-1].send(m,*args,&ruby_block)
else
# No, true error
raise NoMethodError.new("undefined local variable or method `#{m}'.")
end
end
end
# Generate the standard signals
# $clk = SignalI.new(:__universe__clk__,Bit,:inner)
# $rst = SignalI.new(:__universe__rst__,Bit,:inner)
$clk = Universe.scope.inner :__universe__clk__
$rst = Universe.scope.inner :__universe__rst__
end
|
class JohnJumbo < Formula
homepage "http://www.openwall.com/john/"
url "http://openwall.com/john/j/john-1.8.0-jumbo-1.tar.xz"
sha1 "38196f21d2c9c4b539529d0820eb242d5373241f"
version "1.8.0"
bottle do
revision 2
sha1 "5ab9f75db6b8303b793fca20948c0d9645a912fe" => :yosemite
sha1 "ac84043c8d73c2db6e11b7741685cb46275d37f8" => :mavericks
sha1 "7764fe2e72d3f695936e4a05bd7a1f063fd8dda9" => :mountain_lion
end
conflicts_with "john", :because => "both install the same binaries"
option "without-completion", "bash/zsh completion will not be installed"
depends_on "pkg-config" => :build
depends_on "openssl"
depends_on "gmp"
# Patch taken from MacPorts, tells john where to find runtime files.
# https://github.com/magnumripper/JohnTheRipper/issues/982
patch :DATA
fails_with :llvm do
build 2334
cause "Don't remember, but adding this to whitelist 2336."
end
# https://github.com/magnumripper/JohnTheRipper/blob/bleeding-jumbo/doc/INSTALL#L133-L143
fails_with :gcc do
cause "Upstream have a hacky workaround for supporting gcc that we can't use."
end
def install
cd "src" do
args = []
if build.bottle?
args << "--disable-native-tests" << "--disable-native-macro"
end
system "./configure", *args
system "make", "clean"
system "make", "-s", "CC=#{ENV.cc}"
end
# Remove the symlink and install the real file
rm "README"
prefix.install "doc/README"
doc.install Dir["doc/*"]
# Only symlink the main binary into bin
(share/"john").install Dir["run/*"]
bin.install_symlink share/"john/john"
if build.with? "completion"
bash_completion.install share/"john/john.bash_completion" => "john.bash"
zsh_completion.install share/"john/john.zsh_completion" => "_john"
end
# Source code defaults to "john.ini", so rename
mv share/"john/john.conf", share/"john/john.ini"
end
test do
touch "john2.pot"
system "echo dave:`printf secret | openssl md5` > test"
output = shell_output("#{bin}/john --pot=#{testpath}/john2.pot --format=raw-md5 test")
assert output.include? "secret"
assert (testpath/"john2.pot").read.include?("secret")
end
end
__END__
--- a/src/params.h 2012-08-30 13:24:18.000000000 -0500
+++ b/src/params.h 2012-08-30 13:25:13.000000000 -0500
@@ -70,15 +70,15 @@
* notes above.
*/
#ifndef JOHN_SYSTEMWIDE
-#define JOHN_SYSTEMWIDE 0
+#define JOHN_SYSTEMWIDE 1
#endif
#if JOHN_SYSTEMWIDE
#ifndef JOHN_SYSTEMWIDE_EXEC /* please refer to the notes above */
-#define JOHN_SYSTEMWIDE_EXEC "/usr/libexec/john"
+#define JOHN_SYSTEMWIDE_EXEC "HOMEBREW_PREFIX/share/john"
#endif
#ifndef JOHN_SYSTEMWIDE_HOME
-#define JOHN_SYSTEMWIDE_HOME "/usr/share/john"
+#define JOHN_SYSTEMWIDE_HOME "HOMEBREW_PREFIX/share/john"
#endif
#define JOHN_PRIVATE_HOME "~/.john"
#endif
john-jumbo: update 1.8.0 bottle.
class JohnJumbo < Formula
homepage "http://www.openwall.com/john/"
url "http://openwall.com/john/j/john-1.8.0-jumbo-1.tar.xz"
sha1 "38196f21d2c9c4b539529d0820eb242d5373241f"
version "1.8.0"
bottle do
revision 3
sha256 "b5d13ea393e16a474bcd69d0d7fd14038effac04d423b6041d9dbb76dd6325ae" => :yosemite
sha256 "d8303c4412f7354e2778ef58ed8eb366d9d474491b255ad5f32d27946df174e6" => :mavericks
sha256 "c3a9c980f5725ec08854cdce75b91af58bb4f61c8a30e2d700de45e0a5b9ff3c" => :mountain_lion
end
conflicts_with "john", :because => "both install the same binaries"
option "without-completion", "bash/zsh completion will not be installed"
depends_on "pkg-config" => :build
depends_on "openssl"
depends_on "gmp"
# Patch taken from MacPorts, tells john where to find runtime files.
# https://github.com/magnumripper/JohnTheRipper/issues/982
patch :DATA
fails_with :llvm do
build 2334
cause "Don't remember, but adding this to whitelist 2336."
end
# https://github.com/magnumripper/JohnTheRipper/blob/bleeding-jumbo/doc/INSTALL#L133-L143
fails_with :gcc do
cause "Upstream have a hacky workaround for supporting gcc that we can't use."
end
def install
cd "src" do
args = []
if build.bottle?
args << "--disable-native-tests" << "--disable-native-macro"
end
system "./configure", *args
system "make", "clean"
system "make", "-s", "CC=#{ENV.cc}"
end
# Remove the symlink and install the real file
rm "README"
prefix.install "doc/README"
doc.install Dir["doc/*"]
# Only symlink the main binary into bin
(share/"john").install Dir["run/*"]
bin.install_symlink share/"john/john"
if build.with? "completion"
bash_completion.install share/"john/john.bash_completion" => "john.bash"
zsh_completion.install share/"john/john.zsh_completion" => "_john"
end
# Source code defaults to "john.ini", so rename
mv share/"john/john.conf", share/"john/john.ini"
end
test do
touch "john2.pot"
system "echo dave:`printf secret | openssl md5` > test"
output = shell_output("#{bin}/john --pot=#{testpath}/john2.pot --format=raw-md5 test")
assert output.include? "secret"
assert (testpath/"john2.pot").read.include?("secret")
end
end
__END__
--- a/src/params.h 2012-08-30 13:24:18.000000000 -0500
+++ b/src/params.h 2012-08-30 13:25:13.000000000 -0500
@@ -70,15 +70,15 @@
* notes above.
*/
#ifndef JOHN_SYSTEMWIDE
-#define JOHN_SYSTEMWIDE 0
+#define JOHN_SYSTEMWIDE 1
#endif
#if JOHN_SYSTEMWIDE
#ifndef JOHN_SYSTEMWIDE_EXEC /* please refer to the notes above */
-#define JOHN_SYSTEMWIDE_EXEC "/usr/libexec/john"
+#define JOHN_SYSTEMWIDE_EXEC "HOMEBREW_PREFIX/share/john"
#endif
#ifndef JOHN_SYSTEMWIDE_HOME
-#define JOHN_SYSTEMWIDE_HOME "/usr/share/john"
+#define JOHN_SYSTEMWIDE_HOME "HOMEBREW_PREFIX/share/john"
#endif
#define JOHN_PRIVATE_HOME "~/.john"
#endif
|
class JohnJumbo < Formula
homepage "http://www.openwall.com/john/"
url "http://openwall.com/john/j/john-1.8.0-jumbo-1.tar.xz"
sha1 "38196f21d2c9c4b539529d0820eb242d5373241f"
version "1.8.0"
bottle do
sha1 "a11eb01effa085f1f196353477111f76c39e6349" => :mavericks
sha1 "acbdf6c2b4f59b2b4e756d7288f3d727ab630706" => :mountain_lion
sha1 "eef8dcc88d9666c7c3c099bee4cc6d14f27a056b" => :lion
end
conflicts_with "john", :because => "both install the same binaries"
option "without-completion", "bash/zsh completion will not be installed"
depends_on "pkg-config" => :build
depends_on "openssl"
depends_on "gmp"
# Patch taken from MacPorts, tells john where to find runtime files.
# https://github.com/magnumripper/JohnTheRipper/issues/982
patch :DATA
fails_with :llvm do
build 2334
cause "Don't remember, but adding this to whitelist 2336."
end
# https://github.com/magnumripper/JohnTheRipper/blob/bleeding-jumbo/doc/INSTALL#L133-L143
fails_with :gcc do
cause "Upstream have a hacky workaround for supporting gcc that we can't use."
end
def install
cd "src" do
system "./configure"
system "make", "clean"
system "make", "-s", "CC=#{ENV.cc}"
end
# Remove the symlink and install the real file
rm "README"
prefix.install "doc/README"
doc.install Dir["doc/*"]
# Only symlink the main binary into bin
(share/"john").install Dir["run/*"]
bin.install_symlink share/"john/john"
if build.with? "completion"
bash_completion.install share/"john/john.bash_completion" => "john.bash"
zsh_completion.install share/"john/john.zsh_completion" => "_john"
end
# Source code defaults to "john.ini", so rename
mv share/"john/john.conf", share/"john/john.ini"
end
test do
ENV["HOME"] = testpath
touch "john2.pot"
system "echo dave:`printf secret | openssl md5` > test"
output = shell_output("#{bin}/john --pot=#{testpath}/john2.pot --format=raw-md5 test")
assert output.include? "secret"
assert (testpath/"john2.pot").read.include?("secret")
end
end
__END__
--- a/src/params.h 2012-08-30 13:24:18.000000000 -0500
+++ b/src/params.h 2012-08-30 13:25:13.000000000 -0500
@@ -70,15 +70,15 @@
* notes above.
*/
#ifndef JOHN_SYSTEMWIDE
-#define JOHN_SYSTEMWIDE 0
+#define JOHN_SYSTEMWIDE 1
#endif
#if JOHN_SYSTEMWIDE
#ifndef JOHN_SYSTEMWIDE_EXEC /* please refer to the notes above */
-#define JOHN_SYSTEMWIDE_EXEC "/usr/libexec/john"
+#define JOHN_SYSTEMWIDE_EXEC "HOMEBREW_PREFIX/share/john"
#endif
#ifndef JOHN_SYSTEMWIDE_HOME
-#define JOHN_SYSTEMWIDE_HOME "/usr/share/john"
+#define JOHN_SYSTEMWIDE_HOME "HOMEBREW_PREFIX/share/john"
#endif
#define JOHN_PRIVATE_HOME "~/.john"
#endif
john-jumbo: update 1.8.0 bottle.
class JohnJumbo < Formula
homepage "http://www.openwall.com/john/"
url "http://openwall.com/john/j/john-1.8.0-jumbo-1.tar.xz"
sha1 "38196f21d2c9c4b539529d0820eb242d5373241f"
version "1.8.0"
bottle do
sha1 "7b1fd2d3d9f12567c70c4e12cbf8cdc525f0f61e" => :yosemite
sha1 "4514927c45452ffebaeef56e068d0ea1e709e8c2" => :mavericks
sha1 "c91ebc708391e78c2a586c90da7b85cd394fa0ee" => :mountain_lion
end
conflicts_with "john", :because => "both install the same binaries"
option "without-completion", "bash/zsh completion will not be installed"
depends_on "pkg-config" => :build
depends_on "openssl"
depends_on "gmp"
# Patch taken from MacPorts, tells john where to find runtime files.
# https://github.com/magnumripper/JohnTheRipper/issues/982
patch :DATA
fails_with :llvm do
build 2334
cause "Don't remember, but adding this to whitelist 2336."
end
# https://github.com/magnumripper/JohnTheRipper/blob/bleeding-jumbo/doc/INSTALL#L133-L143
fails_with :gcc do
cause "Upstream have a hacky workaround for supporting gcc that we can't use."
end
def install
cd "src" do
system "./configure"
system "make", "clean"
system "make", "-s", "CC=#{ENV.cc}"
end
# Remove the symlink and install the real file
rm "README"
prefix.install "doc/README"
doc.install Dir["doc/*"]
# Only symlink the main binary into bin
(share/"john").install Dir["run/*"]
bin.install_symlink share/"john/john"
if build.with? "completion"
bash_completion.install share/"john/john.bash_completion" => "john.bash"
zsh_completion.install share/"john/john.zsh_completion" => "_john"
end
# Source code defaults to "john.ini", so rename
mv share/"john/john.conf", share/"john/john.ini"
end
test do
ENV["HOME"] = testpath
touch "john2.pot"
system "echo dave:`printf secret | openssl md5` > test"
output = shell_output("#{bin}/john --pot=#{testpath}/john2.pot --format=raw-md5 test")
assert output.include? "secret"
assert (testpath/"john2.pot").read.include?("secret")
end
end
__END__
--- a/src/params.h 2012-08-30 13:24:18.000000000 -0500
+++ b/src/params.h 2012-08-30 13:25:13.000000000 -0500
@@ -70,15 +70,15 @@
* notes above.
*/
#ifndef JOHN_SYSTEMWIDE
-#define JOHN_SYSTEMWIDE 0
+#define JOHN_SYSTEMWIDE 1
#endif
#if JOHN_SYSTEMWIDE
#ifndef JOHN_SYSTEMWIDE_EXEC /* please refer to the notes above */
-#define JOHN_SYSTEMWIDE_EXEC "/usr/libexec/john"
+#define JOHN_SYSTEMWIDE_EXEC "HOMEBREW_PREFIX/share/john"
#endif
#ifndef JOHN_SYSTEMWIDE_HOME
-#define JOHN_SYSTEMWIDE_HOME "/usr/share/john"
+#define JOHN_SYSTEMWIDE_HOME "HOMEBREW_PREFIX/share/john"
#endif
#define JOHN_PRIVATE_HOME "~/.john"
#endif
|
require 'formula'
class JohnJumbo < Formula
homepage 'http://www.openwall.com/john/'
url 'http://www.openwall.com/john/g/john-1.7.9.tar.bz2'
sha1 '8f77bdd42b7cf94ec176f55ea69c4da9b2b8fe3b'
conflicts_with 'john', :because => 'both install the same binaries'
def patches
[
"http://www.openwall.com/john/g/john-1.7.9-jumbo-7.diff.gz", # Jumbo
# First patch taken from MacPorts, tells john where to find runtime files
# Second patch protects against a redefinition of _mm_testz_si128 which
# tanked the build in clang;
# see https://github.com/Homebrew/homebrew/issues/26531
DATA
]
end
fails_with :llvm do
build 2334
cause "Don't remember, but adding this to whitelist 2336."
end
def install
ENV.deparallelize
arch = MacOS.prefer_64_bit? ? '64' : 'sse2'
arch += '-opencl'
target = "macosx-x86-#{arch}"
cd 'src' do
inreplace 'Makefile' do |s|
s.change_make_var! "CC", ENV.cc
if MacOS.version > :leopard && ENV.compiler != :clang
s.change_make_var! "OMPFLAGS", "-fopenmp -msse2 -D_FORTIFY_SOURCE=0"
end
end
system "make", "clean", target
end
# Remove the README symlink and install the real file
rm 'README'
prefix.install 'doc/README'
doc.install Dir['doc/*']
# Only symlink the binary into bin
(share/'john').install Dir['run/*']
bin.install_symlink share/'john/john'
# Source code defaults to 'john.ini', so rename
mv share/'john/john.conf', share/'john/john.ini'
end
end
__END__
--- a/src/params.h 2012-08-30 13:24:18.000000000 -0500
+++ b/src/params.h 2012-08-30 13:25:13.000000000 -0500
@@ -70,15 +70,15 @@
* notes above.
*/
#ifndef JOHN_SYSTEMWIDE
-#define JOHN_SYSTEMWIDE 0
+#define JOHN_SYSTEMWIDE 1
#endif
#if JOHN_SYSTEMWIDE
#ifndef JOHN_SYSTEMWIDE_EXEC /* please refer to the notes above */
-#define JOHN_SYSTEMWIDE_EXEC "/usr/libexec/john"
+#define JOHN_SYSTEMWIDE_EXEC "HOMEBREW_PREFIX/share/john"
#endif
#ifndef JOHN_SYSTEMWIDE_HOME
-#define JOHN_SYSTEMWIDE_HOME "/usr/share/john"
+#define JOHN_SYSTEMWIDE_HOME "HOMEBREW_PREFIX/share/john"
#endif
#define JOHN_PRIVATE_HOME "~/.john"
#endif
diff --git a/src/rawSHA1_ng_fmt.c b/src/rawSHA1_ng_fmt.c
index 5f89cda..6cbd550 100644
--- a/src/rawSHA1_ng_fmt.c
+++ b/src/rawSHA1_ng_fmt.c
@@ -530,7 +530,7 @@ static void sha1_fmt_crypt_all(int count)
#if defined(__SSE4_1__)
-# if !defined(__INTEL_COMPILER)
+# if !defined(__INTEL_COMPILER) && !defined(__clang__)
// This intrinsic is not always available in GCC, so define it here.
static inline int _mm_testz_si128 (__m128i __M, __m128i __V)
{
john-jumbo: use checksummed patches
require 'formula'
class JohnJumbo < Formula
homepage 'http://www.openwall.com/john/'
url 'http://www.openwall.com/john/g/john-1.7.9.tar.bz2'
sha1 '8f77bdd42b7cf94ec176f55ea69c4da9b2b8fe3b'
conflicts_with 'john', :because => 'both install the same binaries'
# First patch taken from MacPorts, tells john where to find runtime files
# Second patch protects against a redefinition of _mm_testz_si128 which
# tanked the build in clang;
# see https://github.com/Homebrew/homebrew/issues/26531
patch do
url "http://www.openwall.com/john/g/john-1.7.9-jumbo-7.diff.gz"
sha1 "22fd8294e997f45a301cfeb65a8aa7083f25a55d"
end
patch :DATA
fails_with :llvm do
build 2334
cause "Don't remember, but adding this to whitelist 2336."
end
def install
ENV.deparallelize
arch = MacOS.prefer_64_bit? ? '64' : 'sse2'
arch += '-opencl'
target = "macosx-x86-#{arch}"
cd 'src' do
inreplace 'Makefile' do |s|
s.change_make_var! "CC", ENV.cc
if MacOS.version > :leopard && ENV.compiler != :clang
s.change_make_var! "OMPFLAGS", "-fopenmp -msse2 -D_FORTIFY_SOURCE=0"
end
end
system "make", "clean", target
end
# Remove the README symlink and install the real file
rm 'README'
prefix.install 'doc/README'
doc.install Dir['doc/*']
# Only symlink the binary into bin
(share/'john').install Dir['run/*']
bin.install_symlink share/'john/john'
# Source code defaults to 'john.ini', so rename
mv share/'john/john.conf', share/'john/john.ini'
end
end
__END__
--- a/src/params.h 2012-08-30 13:24:18.000000000 -0500
+++ b/src/params.h 2012-08-30 13:25:13.000000000 -0500
@@ -70,15 +70,15 @@
* notes above.
*/
#ifndef JOHN_SYSTEMWIDE
-#define JOHN_SYSTEMWIDE 0
+#define JOHN_SYSTEMWIDE 1
#endif
#if JOHN_SYSTEMWIDE
#ifndef JOHN_SYSTEMWIDE_EXEC /* please refer to the notes above */
-#define JOHN_SYSTEMWIDE_EXEC "/usr/libexec/john"
+#define JOHN_SYSTEMWIDE_EXEC "HOMEBREW_PREFIX/share/john"
#endif
#ifndef JOHN_SYSTEMWIDE_HOME
-#define JOHN_SYSTEMWIDE_HOME "/usr/share/john"
+#define JOHN_SYSTEMWIDE_HOME "HOMEBREW_PREFIX/share/john"
#endif
#define JOHN_PRIVATE_HOME "~/.john"
#endif
diff --git a/src/rawSHA1_ng_fmt.c b/src/rawSHA1_ng_fmt.c
index 5f89cda..6cbd550 100644
--- a/src/rawSHA1_ng_fmt.c
+++ b/src/rawSHA1_ng_fmt.c
@@ -530,7 +530,7 @@ static void sha1_fmt_crypt_all(int count)
#if defined(__SSE4_1__)
-# if !defined(__INTEL_COMPILER)
+# if !defined(__INTEL_COMPILER) && !defined(__clang__)
// This intrinsic is not always available in GCC, so define it here.
static inline int _mm_testz_si128 (__m128i __M, __m128i __V)
{
|
class JpegTurbo < Formula
desc "JPEG image codec that aids compression and decompression"
homepage "http://www.libjpeg-turbo.org/"
url "https://downloads.sourceforge.net/project/libjpeg-turbo/1.4.0/libjpeg-turbo-1.4.0.tar.gz"
mirror "https://mirrors.kernel.org/debian/pool/main/libj/libjpeg-turbo/libjpeg-turbo_1.4.0.orig.tar.gz"
sha1 "a9ed7a99a6090e0848836c5df8e836f300a098b9"
bottle do
cellar :any
sha1 "847dab53f17c69fc8670407f42d6e6da30e3f527" => :yosemite
sha1 "d682021ac4745c3e3cfe4a6f1baf6bf07628966a" => :mavericks
sha1 "f5a667481af812c39caca21ec7842bf678864df3" => :mountain_lion
end
depends_on "libtool" => :build
depends_on "nasm" => :build if MacOS.prefer_64_bit?
keg_only "libjpeg-turbo is not linked to prevent conflicts with the standard libjpeg."
def install
cp Dir["#{Formula["libtool"].opt_share}/libtool/*/config.{guess,sub}"], buildpath
args = ["--disable-dependency-tracking", "--prefix=#{prefix}", "--with-jpeg8", "--mandir=#{man}"]
if MacOS.prefer_64_bit?
# Auto-detect our 64-bit nasm
args << "NASM=#{Formula["nasm"].bin}/nasm"
end
system "./configure", *args
system "make"
ENV.j1 # Stops a race condition error: file exists
system "make", "install"
end
test do
system "#{bin}/jpegtran", "-crop", "1x1",
"-transpose", "-perfect",
"-outfile", "out.jpg",
test_fixtures("test.jpg")
end
end
jpeg-turbo 1.4.1
Closes #40583.
Signed-off-by: Dominyk Tiller <53e438f55903875d07efdd98a8aaf887e7208dd3@gmail.com>
class JpegTurbo < Formula
desc "JPEG image codec that aids compression and decompression"
homepage "http://www.libjpeg-turbo.org/"
url "https://downloads.sourceforge.net/project/libjpeg-turbo/1.4.1/libjpeg-turbo-1.4.1.tar.gz"
sha256 "4bf5bad4ce85625bffbbd9912211e06790e00fb982b77724af7211034efafb08"
bottle do
cellar :any
sha1 "847dab53f17c69fc8670407f42d6e6da30e3f527" => :yosemite
sha1 "d682021ac4745c3e3cfe4a6f1baf6bf07628966a" => :mavericks
sha1 "f5a667481af812c39caca21ec7842bf678864df3" => :mountain_lion
end
depends_on "libtool" => :build
depends_on "nasm" => :build if MacOS.prefer_64_bit?
keg_only "libjpeg-turbo is not linked to prevent conflicts with the standard libjpeg."
def install
cp Dir["#{Formula["libtool"].opt_share}/libtool/*/config.{guess,sub}"], buildpath
args = ["--disable-dependency-tracking", "--prefix=#{prefix}", "--with-jpeg8", "--mandir=#{man}"]
if MacOS.prefer_64_bit?
# Auto-detect our 64-bit nasm
args << "NASM=#{Formula["nasm"].bin}/nasm"
end
system "./configure", *args
system "make"
ENV.j1 # Stops a race condition error: file exists
system "make", "install"
end
test do
system "#{bin}/jpegtran", "-crop", "1x1",
"-transpose", "-perfect",
"-outfile", "out.jpg",
test_fixtures("test.jpg")
end
end
|
require 'formula'
class Libantlr3c < Formula
homepage 'http://www.antlr3.org'
url 'http://www.antlr3.org/download/C/libantlr3c-3.4.tar.gz'
sha1 'faa9ab43ab4d3774f015471c3f011cc247df6a18'
bottle do
cellar :any
revision 1
sha1 "5340de248798194c46c5112c73a1ba409912059d" => :yosemite
sha1 "044b66cb95b396080f1729cc04f9a960f08e6ea5" => :mavericks
sha1 "b02f52cf3c696b52974e90532cf66964e61c750d" => :mountain_lion
end
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make install"
end
end
libantlr3c: enable exceptions by default.
class Libantlr3c < Formula
homepage "http://www.antlr3.org"
url "http://www.antlr3.org/download/C/libantlr3c-3.4.tar.gz"
sha256 "ca914a97f1a2d2f2c8e1fca12d3df65310ff0286d35c48b7ae5f11dcc8b2eb52"
revision 1
option "without-exceptions", "Compile without support for exception handling"
bottle do
cellar :any
revision 1
sha1 "5340de248798194c46c5112c73a1ba409912059d" => :yosemite
sha1 "044b66cb95b396080f1729cc04f9a960f08e6ea5" => :mavericks
sha1 "b02f52cf3c696b52974e90532cf66964e61c750d" => :mountain_lion
end
def install
args = ["--disable-dependency-tracking",
"--disable-antlrdebug",
"--prefix=#{prefix}"]
args << "--enable-64bit" if MacOS.prefer_64_bit?
system "./configure", *args
if build.with? "exceptions"
inreplace "Makefile" do |s|
cflags = s.get_make_var "CFLAGS"
cflags = cflags << " -fexceptions"
s.change_make_var! "CFLAGS", cflags
end
end
system "make", "install"
end
test do
(testpath/"hello.c").write <<-EOS.undent
#include <antlr3.h>
int main() {
if (0) {
antlr3GenericSetupStream(NULL);
}
return 0;
}
EOS
system ENV.cc, "hello.c", "-lantlr3c", "-o", "hello", "-O0"
system testpath/"hello"
end
end
|
class Postgresql < Formula
desc "Object-relational database system"
homepage "https://www.postgresql.org/"
url "https://ftp.postgresql.org/pub/source/v9.5.0/postgresql-9.5.0.tar.bz2"
sha256 "f1c0d3a1a8aa8c92738cab0153fbfffcc4d4158b3fee84f7aa6bfea8283978bc"
bottle do
revision 1
sha256 "c49cfd5c49476158519da525c09b424fd11dd22b7216a45cda7e7c0baf29a7c5" => :el_capitan
sha256 "f21552b6b1b6e30648d8ae791ef08668bf78b5d8d8013761b4beb8239277ae33" => :yosemite
sha256 "6c2feaa678c1e3f53b651fb42f54d013a78d0ee07c7daf4e76c4daecb6a8f6ca" => :mavericks
end
option "32-bit"
option "without-perl", "Build without Perl support"
option "without-tcl", "Build without Tcl support"
option "with-dtrace", "Build with DTrace support"
deprecated_option "no-perl" => "without-perl"
deprecated_option "no-tcl" => "without-tcl"
deprecated_option "enable-dtrace" => "with-dtrace"
depends_on "openssl"
depends_on "readline"
depends_on "libxml2" if MacOS.version <= :leopard # Leopard libxml is too old
depends_on :python => :optional
conflicts_with "postgres-xc",
:because => "postgresql and postgres-xc install the same binaries."
fails_with :clang do
build 211
cause "Miscompilation resulting in segfault on queries"
end
def install
ENV.libxml2 if MacOS.version >= :snow_leopard
ENV.prepend "LDFLAGS", "-L#{Formula["openssl"].opt_lib} -L#{Formula["readline"].opt_lib}"
ENV.prepend "CPPFLAGS", "-I#{Formula["openssl"].opt_include} -I#{Formula["readline"].opt_include}"
args = %W[
--disable-debug
--prefix=#{prefix}
--datadir=#{HOMEBREW_PREFIX}/share/postgresql
--libdir=#{HOMEBREW_PREFIX}/lib
--sysconfdir=#{etc}
--docdir=#{doc}
--enable-thread-safety
--with-bonjour
--with-gssapi
--with-ldap
--with-openssl
--with-pam
--with-libxml
--with-libxslt
]
args << "--with-python" if build.with? "python"
args << "--with-perl" if build.with? "perl"
# The CLT is required to build Tcl support on 10.7 and 10.8 because
# tclConfig.sh is not part of the SDK
if build.with?("tcl") && (MacOS.version >= :mavericks || MacOS::CLT.installed?)
args << "--with-tcl"
if File.exist?("#{MacOS.sdk_path}/usr/lib/tclConfig.sh")
args << "--with-tclconfig=#{MacOS.sdk_path}/usr/lib"
end
end
args << "--enable-dtrace" if build.with? "dtrace"
args << "--with-uuid=e2fs"
if build.build_32_bit?
ENV.append %w[CFLAGS LDFLAGS], "-arch #{Hardware::CPU.arch_32_bit}"
end
system "./configure", *args
system "make"
system "make", "install-world", "datadir=#{pkgshare}",
"libdir=#{lib}",
"pkglibdir=#{lib}/postgresql"
end
def post_install
unless File.exist? "#{var}/postgres"
system "#{bin}/initdb", "#{var}/postgres"
end
end
def caveats; <<-EOS.undent
If builds of PostgreSQL 9 are failing and you have version 8.x installed,
you may need to remove the previous version first. See:
https://github.com/Homebrew/homebrew/issues/2510
To migrate existing data from a previous major version (pre-9.5) of PostgreSQL, see:
https://www.postgresql.org/docs/9.5/static/upgrading.html
EOS
end
plist_options :manual => "postgres -D #{HOMEBREW_PREFIX}/var/postgres"
def plist; <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/postgres</string>
<string>-D</string>
<string>#{var}/postgres</string>
<string>-r</string>
<string>#{var}/postgres/server.log</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}</string>
<key>StandardErrorPath</key>
<string>#{var}/postgres/server.log</string>
</dict>
</plist>
EOS
end
test do
system "#{bin}/initdb", testpath/"test"
assert_equal "#{HOMEBREW_PREFIX}/share/postgresql", shell_output("#{bin}/pg_config --sharedir").chomp
assert_equal "#{HOMEBREW_PREFIX}/lib", shell_output("#{bin}/pg_config --libdir").chomp
assert_equal "#{HOMEBREW_PREFIX}/lib/postgresql", shell_output("#{bin}/pg_config --pkglibdir").chomp
end
end
postgresql: add caveat on minor updates
Closes #48486.
Closes #48602.
Signed-off-by: Dominyk Tiller <53e438f55903875d07efdd98a8aaf887e7208dd3@gmail.com>
class Postgresql < Formula
desc "Object-relational database system"
homepage "https://www.postgresql.org/"
url "https://ftp.postgresql.org/pub/source/v9.5.0/postgresql-9.5.0.tar.bz2"
sha256 "f1c0d3a1a8aa8c92738cab0153fbfffcc4d4158b3fee84f7aa6bfea8283978bc"
bottle do
revision 1
sha256 "c49cfd5c49476158519da525c09b424fd11dd22b7216a45cda7e7c0baf29a7c5" => :el_capitan
sha256 "f21552b6b1b6e30648d8ae791ef08668bf78b5d8d8013761b4beb8239277ae33" => :yosemite
sha256 "6c2feaa678c1e3f53b651fb42f54d013a78d0ee07c7daf4e76c4daecb6a8f6ca" => :mavericks
end
option "32-bit"
option "without-perl", "Build without Perl support"
option "without-tcl", "Build without Tcl support"
option "with-dtrace", "Build with DTrace support"
deprecated_option "no-perl" => "without-perl"
deprecated_option "no-tcl" => "without-tcl"
deprecated_option "enable-dtrace" => "with-dtrace"
depends_on "openssl"
depends_on "readline"
depends_on "libxml2" if MacOS.version <= :leopard # Leopard libxml is too old
depends_on :python => :optional
conflicts_with "postgres-xc",
:because => "postgresql and postgres-xc install the same binaries."
fails_with :clang do
build 211
cause "Miscompilation resulting in segfault on queries"
end
def install
ENV.libxml2 if MacOS.version >= :snow_leopard
ENV.prepend "LDFLAGS", "-L#{Formula["openssl"].opt_lib} -L#{Formula["readline"].opt_lib}"
ENV.prepend "CPPFLAGS", "-I#{Formula["openssl"].opt_include} -I#{Formula["readline"].opt_include}"
args = %W[
--disable-debug
--prefix=#{prefix}
--datadir=#{HOMEBREW_PREFIX}/share/postgresql
--libdir=#{HOMEBREW_PREFIX}/lib
--sysconfdir=#{etc}
--docdir=#{doc}
--enable-thread-safety
--with-bonjour
--with-gssapi
--with-ldap
--with-openssl
--with-pam
--with-libxml
--with-libxslt
]
args << "--with-python" if build.with? "python"
args << "--with-perl" if build.with? "perl"
# The CLT is required to build Tcl support on 10.7 and 10.8 because
# tclConfig.sh is not part of the SDK
if build.with?("tcl") && (MacOS.version >= :mavericks || MacOS::CLT.installed?)
args << "--with-tcl"
if File.exist?("#{MacOS.sdk_path}/usr/lib/tclConfig.sh")
args << "--with-tclconfig=#{MacOS.sdk_path}/usr/lib"
end
end
args << "--enable-dtrace" if build.with? "dtrace"
args << "--with-uuid=e2fs"
if build.build_32_bit?
ENV.append %w[CFLAGS LDFLAGS], "-arch #{Hardware::CPU.arch_32_bit}"
end
system "./configure", *args
system "make"
system "make", "install-world", "datadir=#{pkgshare}",
"libdir=#{lib}",
"pkglibdir=#{lib}/postgresql"
end
def post_install
unless File.exist? "#{var}/postgres"
system "#{bin}/initdb", "#{var}/postgres"
end
end
def caveats; <<-EOS.undent
If builds of PostgreSQL 9 are failing and you have version 8.x installed,
you may need to remove the previous version first. See:
https://github.com/Homebrew/homebrew/issues/2510
To migrate existing data from a previous major version (pre-9.0) of PostgreSQL, see:
http://www.postgresql.org/docs/9.5/static/upgrading.html
To migrate existing data from a previous minor version (9.0-9.4) of PosgresSQL, see:
http://www.postgresql.org/docs/9.5/static/pgupgrade.html
You will need your previous PostgreSQL installation from brew to perform `pg_upgrade`.
Do not run `brew cleanup postgresql` until you have performed the migration.
EOS
end
plist_options :manual => "postgres -D #{HOMEBREW_PREFIX}/var/postgres"
def plist; <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/postgres</string>
<string>-D</string>
<string>#{var}/postgres</string>
<string>-r</string>
<string>#{var}/postgres/server.log</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}</string>
<key>StandardErrorPath</key>
<string>#{var}/postgres/server.log</string>
</dict>
</plist>
EOS
end
test do
system "#{bin}/initdb", testpath/"test"
assert_equal "#{HOMEBREW_PREFIX}/share/postgresql", shell_output("#{bin}/pg_config --sharedir").chomp
assert_equal "#{HOMEBREW_PREFIX}/lib", shell_output("#{bin}/pg_config --libdir").chomp
assert_equal "#{HOMEBREW_PREFIX}/lib/postgresql", shell_output("#{bin}/pg_config --pkglibdir").chomp
end
end
|
require "language/haskell"
class Purescript < Formula
include Language::Haskell::Cabal
homepage "http://www.purescript.org"
url "https://hackage.haskell.org/package/purescript-0.6.8/purescript-0.6.8.tar.gz"
sha1 "70fd4d3109d61c34c8898a30d222c4b1ad8fd7a5"
bottle do
sha1 "b5628dbaabd07215c54979156b2d2f66fb6034c0" => :yosemite
sha1 "0d082d33a31bae337188e0866180120a8b38c66d" => :mavericks
sha1 "e9bbad2add5f0961926a0df8adbd8a3848781747" => :mountain_lion
end
depends_on "cabal-install" => :build
depends_on "ghc"
def install
install_cabal_package
end
test do
test_module_path = testpath/"Test.purs"
test_target_path = testpath/"test-module.js"
test_module_path.write <<-EOS.undent
module Test where
import Control.Monad.Eff
main :: forall e. Eff e Unit
main = return unit
EOS
system bin/"psc", test_module_path, "-o", test_target_path
assert File.exist?(test_target_path)
end
end
purescript: v0.6.9.5
Closes #39238.
Signed-off-by: Jack Nagel <43386ce32af96f5c56f2a88e458cb94cebee3751@gmail.com>
require "language/haskell"
class Purescript < Formula
include Language::Haskell::Cabal
homepage "http://www.purescript.org"
url "https://github.com/purescript/purescript/archive/v0.6.9.5.tar.gz"
sha256 "b7d24ce85c65a9d2adb178d2e9b628f8d4f5a33103c3da6f3312c63a1048ff80"
bottle do
sha1 "b5628dbaabd07215c54979156b2d2f66fb6034c0" => :yosemite
sha1 "0d082d33a31bae337188e0866180120a8b38c66d" => :mavericks
sha1 "e9bbad2add5f0961926a0df8adbd8a3848781747" => :mountain_lion
end
depends_on "ghc" => :build
depends_on "cabal-install" => :build
def install
install_cabal_package
end
test do
test_module_path = testpath/"Test.purs"
test_target_path = testpath/"test-module.js"
test_module_path.write <<-EOS.undent
module Test where
import Control.Monad.Eff
main :: forall e. Eff e Unit
main = return unit
EOS
system bin/"psc", test_module_path, "-o", test_target_path
assert File.exist?(test_target_path)
end
end
|
class Rubberband < Formula
desc "audio time stretcher tool and library"
homepage "http://breakfastquay.com/rubberband/"
url "http://code.breakfastquay.com/attachments/download/34/rubberband-1.8.1.tar.bz2"
sha256 "ff0c63b0b5ce41f937a8a3bc560f27918c5fe0b90c6bc1cb70829b86ada82b75"
head "https://bitbucket.org/breakfastquay/rubberband/", :using => :hg
stable do
# replace vecLib.h by Accelerate.h
# already fixed in upstream:
# https://bitbucket.org/breakfastquay/rubberband/commits/cb02b7ed1500f0c06c0ffd196921c812dbcf6888
# https://bitbucket.org/breakfastquay/rubberband/commits/9e32f693c6122b656a0df63bc77e6a96d6ba213d
patch :p1 do
url "http://tuohela.net/irc/rubberband-1.8.1-yosemite.diff"
sha1 "76ea7cac0fc0ab99b38081176375ef7c34be678f"
end
end
bottle do
cellar :any
sha256 "e1890156ef9c638993fc9410bc4b5ccee113a14bb59861ebd25a6d68501f83d7" => :el_capitan
sha256 "746b65592ab6a93e990a7930a22aaa5c7c559e1bfdff7da8d3f292168e79e512" => :yosemite
sha256 "06c1b9d0bea22637edec00a3bd41b118472caee55902b6c1f8ea8941d922a3f7" => :mavericks
end
depends_on "pkg-config" => :build
depends_on "libsamplerate"
depends_on "libsndfile"
def install
system "make", "-f", "Makefile.osx"
bin.install "bin/rubberband"
lib.install "lib/librubberband.dylib"
include.install "rubberband"
cp "rubberband.pc.in", "rubberband.pc"
inreplace "rubberband.pc", "%PREFIX%", opt_prefix
(lib/"pkgconfig").install "rubberband.pc"
end
test do
assert_match /Pass 2: Processing.../, shell_output("rubberband -t2 #{test_fixtures("test.wav")} out.wav 2>&1")
end
end
rubberband: audit fixes
Closes #48775.
Signed-off-by: Dominyk Tiller <53e438f55903875d07efdd98a8aaf887e7208dd3@gmail.com>
class Rubberband < Formula
desc "audio time stretcher tool and library"
homepage "http://breakfastquay.com/rubberband/"
head "https://bitbucket.org/breakfastquay/rubberband/", :using => :hg
stable do
url "http://code.breakfastquay.com/attachments/download/34/rubberband-1.8.1.tar.bz2"
sha256 "ff0c63b0b5ce41f937a8a3bc560f27918c5fe0b90c6bc1cb70829b86ada82b75"
# replace vecLib.h by Accelerate.h
# already fixed in upstream:
# https://bitbucket.org/breakfastquay/rubberband/commits/cb02b7ed1500f0c06c0ffd196921c812dbcf6888
# https://bitbucket.org/breakfastquay/rubberband/commits/9e32f693c6122b656a0df63bc77e6a96d6ba213d
patch :p1 do
url "https://raw.githubusercontent.com/homebrew/patches/1fd51a983cf7728958659bab95073657b1801b3c/rubberband/rubberband-1.8.1-yosemite.diff"
sha256 "7686dd9d05fddbcbdf4015071676ac37ecad5c7594cc06470440a18da17c71cd"
end
end
bottle do
cellar :any
sha256 "e1890156ef9c638993fc9410bc4b5ccee113a14bb59861ebd25a6d68501f83d7" => :el_capitan
sha256 "746b65592ab6a93e990a7930a22aaa5c7c559e1bfdff7da8d3f292168e79e512" => :yosemite
sha256 "06c1b9d0bea22637edec00a3bd41b118472caee55902b6c1f8ea8941d922a3f7" => :mavericks
end
depends_on "pkg-config" => :build
depends_on "libsamplerate"
depends_on "libsndfile"
def install
system "make", "-f", "Makefile.osx"
bin.install "bin/rubberband"
lib.install "lib/librubberband.dylib"
include.install "rubberband"
cp "rubberband.pc.in", "rubberband.pc"
inreplace "rubberband.pc", "%PREFIX%", opt_prefix
(lib/"pkgconfig").install "rubberband.pc"
end
test do
assert_match /Pass 2: Processing.../, shell_output("rubberband -t2 #{test_fixtures("test.wav")} out.wav 2>&1")
end
end
|
class RubyBuild < Formula
desc "Install various Ruby versions and implementations"
homepage "https://github.com/sstephenson/ruby-build"
url "https://github.com/sstephenson/ruby-build/archive/v20151024.tar.gz"
sha256 "8e5be61e967d9a6af84dd84f57c1e28be4cc6c1d4916e235a3393a42673618dc"
bottle :unneeded
head "https://github.com/sstephenson/ruby-build.git"
depends_on "autoconf" => [:recommended, :run]
depends_on "pkg-config" => [:recommended, :run]
depends_on "openssl" => :recommended
def install
ENV["PREFIX"] = prefix
system "./install.sh"
end
test do
system "#{bin}/ruby-build", "--definitions"
end
end
ruby-build 20151028
Closes #45418.
Signed-off-by: Baptiste Fontaine <bfee279af59f3e3f71f7ce1fa037ea7b90f93cbf@yahoo.fr>
class RubyBuild < Formula
desc "Install various Ruby versions and implementations"
homepage "https://github.com/sstephenson/ruby-build"
url "https://github.com/sstephenson/ruby-build/archive/v20151028.tar.gz"
sha256 "3c83ae35c48869404884603b784927a8bd1d3e041c555996afb1286fc20aa708"
bottle :unneeded
head "https://github.com/sstephenson/ruby-build.git"
depends_on "autoconf" => [:recommended, :run]
depends_on "pkg-config" => [:recommended, :run]
depends_on "openssl" => :recommended
def install
ENV["PREFIX"] = prefix
system "./install.sh"
end
test do
system "#{bin}/ruby-build", "--definitions"
end
end
|
require 'formula'
class Strongswan < Formula
homepage 'http://www.strongswan.org'
url 'http://download.strongswan.org/strongswan-5.2.1.tar.bz2'
sha1 '3035fc0c38e0698b0d85a94dbc25944abd2a8722'
bottle do
sha1 "8aada0aa9bf2a5b97282b9d967767d2b23f582e0" => :mavericks
sha1 "5ca2e977cb940f2da1df1aff7923104a20ab45ec" => :mountain_lion
sha1 "e04c343ff3c7755492dfb131f0552b0b843881a4" => :lion
end
option 'with-curl', 'Build with libcurl based fetcher'
option 'with-suite-b', 'Build with Suite B support (does not use the IPsec implementation provided by the kernel)'
depends_on 'openssl' if build.with? "suite-b" or MacOS.version <= :leopard
depends_on 'curl' => :optional
def install
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
--sbindir=#{bin}
--sysconfdir=#{etc}
--disable-defaults
--enable-charon
--enable-cmd
--enable-constraints
--enable-eap-identity
--enable-eap-md5
--enable-eap-gtc
--enable-eap-mschapv2
--enable-ikev1
--enable-ikev2
--enable-kernel-pfroute
--enable-nonce
--enable-openssl
--enable-osx-attr
--enable-pem
--enable-pgp
--enable-pkcs1
--enable-pkcs8
--enable-pki
--enable-pubkey
--enable-revocation
--enable-scepclient
--enable-socket-default
--enable-sshkey
--enable-stroke
--enable-swanctl
--enable-updown
--enable-unity
--enable-xauth-generic
]
args << "--enable-curl" if build.with? 'curl'
args << "--enable-kernel-pfkey" if build.without? 'suite-b'
args << "--enable-kernel-libipsec" if build.with? 'suite-b'
system "./configure", *args
system "make", "install"
end
def caveats
msg = <<-EOS.undent
strongSwan's configuration files are placed in:
#{etc}
You will have to run both 'ipsec' and 'charon-cmd' with 'sudo'.
EOS
if build.with? 'suite-b'
msg += <<-EOS.undent
If you previously ran strongSwan without Suite B support it might be
required to execute 'sudo sysctl -w net.inet.ipsec.esp_port=0' in order
to receive packets.
EOS
end
return msg
end
end
strongswan: update 5.2.1 bottle.
Closes #33389.
Signed-off-by: Mike McQuaid <a17fed27eaa842282862ff7c1b9c8395a26ac320@mikemcquaid.com>
require 'formula'
class Strongswan < Formula
homepage 'http://www.strongswan.org'
url 'http://download.strongswan.org/strongswan-5.2.1.tar.bz2'
sha1 '3035fc0c38e0698b0d85a94dbc25944abd2a8722'
bottle do
sha1 "ebcf0937245258aede64c79278f96f2bd9b50756" => :yosemite
sha1 "1e35a8281bfb5c3341fb9bb004a79f141f88eedb" => :mavericks
sha1 "38635c861ee0e8e8ac5638734e58b9415256d378" => :mountain_lion
end
option 'with-curl', 'Build with libcurl based fetcher'
option 'with-suite-b', 'Build with Suite B support (does not use the IPsec implementation provided by the kernel)'
depends_on 'openssl' if build.with? "suite-b" or MacOS.version <= :leopard
depends_on 'curl' => :optional
def install
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
--sbindir=#{bin}
--sysconfdir=#{etc}
--disable-defaults
--enable-charon
--enable-cmd
--enable-constraints
--enable-eap-identity
--enable-eap-md5
--enable-eap-gtc
--enable-eap-mschapv2
--enable-ikev1
--enable-ikev2
--enable-kernel-pfroute
--enable-nonce
--enable-openssl
--enable-osx-attr
--enable-pem
--enable-pgp
--enable-pkcs1
--enable-pkcs8
--enable-pki
--enable-pubkey
--enable-revocation
--enable-scepclient
--enable-socket-default
--enable-sshkey
--enable-stroke
--enable-swanctl
--enable-updown
--enable-unity
--enable-xauth-generic
]
args << "--enable-curl" if build.with? 'curl'
args << "--enable-kernel-pfkey" if build.without? 'suite-b'
args << "--enable-kernel-libipsec" if build.with? 'suite-b'
system "./configure", *args
system "make", "install"
end
def caveats
msg = <<-EOS.undent
strongSwan's configuration files are placed in:
#{etc}
You will have to run both 'ipsec' and 'charon-cmd' with 'sudo'.
EOS
if build.with? 'suite-b'
msg += <<-EOS.undent
If you previously ran strongSwan without Suite B support it might be
required to execute 'sudo sysctl -w net.inet.ipsec.esp_port=0' in order
to receive packets.
EOS
end
return msg
end
end
|
wellington 0.6.0 formula
* based off libsass 3.1.0
* specify c++11 linker in clang via CGO_LDFLAG
Closes #35214.
Signed-off-by: Mike McQuaid <a17fed27eaa842282862ff7c1b9c8395a26ac320@mikemcquaid.com>
require "language/go"
class Wellington < Formula
homepage "https://github.com/wellington/wellington"
url "https://github.com/wellington/wellington/archive/0.6.0.tar.gz"
sha1 "c7d1c391f9e929796f92c4496f011c62546a12cd"
head "https://github.com/wellington/wellington.git"
needs :cxx11
depends_on "go" => :build
depends_on "pkg-config" => :build
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
go_resource "github.com/wellington/spritewell" do
url "https://github.com/wellington/spritewell.git",
:revision => "748bfe956f31c257605c304b41a0525a4487d17d"
end
go_resource "github.com/go-fsnotify/fsnotify" do
url "https://github.com/go-fsnotify/fsnotify.git",
:revision => "f582d920d11386e8ae15227bb5933a8f9b4c3dec"
end
def install
ENV.cxx11
# go_resource doesn't support gopkg, do it manually then symlink
mkdir_p buildpath/"src/gopkg.in"
ln_s buildpath/"src/github.com/go-fsnotify/fsnotify",
buildpath/"src/gopkg.in/fsnotify.v1"
ENV["PKG_CONFIG_PATH"] = buildpath/"libsass/lib/pkgconfig"
mkdir_p buildpath/"src/github.com/wellington"
ln_s buildpath, buildpath/"src/github.com/wellington/wellington"
Language::Go.stage_deps resources, buildpath/"src"
ENV["GOPATH"] = buildpath
ENV.append "CGO_LDFLAGS", "-stdlib=libc++" if ENV.compiler == :clang
system "make", "deps"
system "go", "build", "-x", "-v", "-o", "dist/wt", "wt/main.go"
bin.install "dist/wt"
end
test do
s = "div { p { color: red; } }"
expected = <<-EOS.undent
Reading from stdin, -h for help
/* line 6, stdin */
div p {
color: red; }
EOS
output = `echo '#{s}' | #{bin}/wt`
assert_equal(expected, output)
end
end
|
require "formula"
# Please only update to versions that are published on PyPi as there are too
# many releases for us to update to every single one:
# https://pypi.python.org/pypi/youtube_dl
class YoutubeDl < Formula
homepage "http://rg3.github.io/youtube-dl/"
url "https://yt-dl.org/downloads/2015.01.03/youtube-dl-2015.01.03.tar.gz"
sha256 "1c3c6dabe8672c13e7925f17493930889a34c556e229d7b69df56d1ded812b4d"
bottle do
cellar :any
sha1 "5520ec6ba9ddfb524a5c481147d8c8023a58c66e" => :yosemite
sha1 "3dd627e167df5e8fe12e9462a88ba1ccc4ba0690" => :mavericks
sha1 "f94988fccd860b11dac679337671d2c0d739bc7e" => :mountain_lion
end
head do
url "https://github.com/rg3/youtube-dl.git"
depends_on "pandoc" => :build
end
depends_on "rtmpdump" => :optional
def install
system "make", "PREFIX=#{prefix}"
bin.install "youtube-dl"
man1.install "youtube-dl.1"
bash_completion.install "youtube-dl.bash-completion"
zsh_completion.install "youtube-dl.zsh" => "_youtube-dl"
end
def caveats
"To use post-processing options, `brew install ffmpeg`."
end
test do
system "#{bin}/youtube-dl", "--simulate", "http://www.youtube.com/watch?v=he2a4xK8ctk"
end
end
youtube-dl: update 2015.01.03 bottle.
require "formula"
# Please only update to versions that are published on PyPi as there are too
# many releases for us to update to every single one:
# https://pypi.python.org/pypi/youtube_dl
class YoutubeDl < Formula
homepage "http://rg3.github.io/youtube-dl/"
url "https://yt-dl.org/downloads/2015.01.03/youtube-dl-2015.01.03.tar.gz"
sha256 "1c3c6dabe8672c13e7925f17493930889a34c556e229d7b69df56d1ded812b4d"
bottle do
cellar :any
sha1 "9f5d288a10ed2f43260902a1b658579f986c1655" => :yosemite
sha1 "2b3e5010ab35502593da094a333114482157cfdb" => :mavericks
sha1 "ce3447db6845e0cc6d2bfd3c8ef3c256a9f53d97" => :mountain_lion
end
head do
url "https://github.com/rg3/youtube-dl.git"
depends_on "pandoc" => :build
end
depends_on "rtmpdump" => :optional
def install
system "make", "PREFIX=#{prefix}"
bin.install "youtube-dl"
man1.install "youtube-dl.1"
bash_completion.install "youtube-dl.bash-completion"
zsh_completion.install "youtube-dl.zsh" => "_youtube-dl"
end
def caveats
"To use post-processing options, `brew install ffmpeg`."
end
test do
system "#{bin}/youtube-dl", "--simulate", "http://www.youtube.com/watch?v=he2a4xK8ctk"
end
end
|
module Condensation
VERSION = "1.0.0"
end
Bump version
module Condensation
VERSION = "1.0.1"
end
|
#
# Copyright (C) 2013 Conjur Inc
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
class Conjur::Command::Roles < Conjur::Command
GRAPH_FORMATS = %w(json dot png)
desc "Manage roles"
command :role do |role|
role.desc "Create a new role"
role.arg_name "role"
role.command :create do |c|
acting_as_option(c)
c.desc "Output a JSON response with a single field, roleid"
c.switch "json"
c.action do |global_options,options,args|
id = require_arg(args, 'role')
role = api.role(id)
if ownerid = options.delete(:ownerid)
options[:acting_as] = ownerid
end
role.create(options)
if options[:json]
display({
roleid: role.roleid
})
else
puts "Created role #{role.roleid}"
end
end
end
role.desc "Determines whether a role exists"
role.arg_name "role"
role.command :exists do |c|
c.desc "Output a JSON response with a single field, exists"
c.switch "json"
c.action do |global_options,options,args|
id = require_arg(args, 'role')
role = api.role(id)
if options[:json]
display({
exists: role.exists?
})
else
puts role.exists?
end
end
end
role.desc "Lists role memberships. The role membership list is recursively expanded."
role.arg_name "role"
role.command :memberships do |c|
c.desc "Whether to show system (internal) roles"
c.switch [:s, :system]
c.action do |global_options,options,args|
roleid = args.shift
role = roleid.nil? && api.current_role || api.role(roleid)
memberships = role.all.map(&:roleid)
unless options[:system]
memberships.reject!{|id| id =~ /^.+?:@/}
end
display memberships
end
end
role.desc "Lists all direct members of the role. The membership list is not recursively expanded."
role.arg_name "role"
role.command :members do |c|
c.desc "Verbose output"
c.switch [:V,:verbose]
c.action do |global_options,options,args|
role = args.shift || api.user(api.username).roleid
display_members api.role(role).members, options
end
end
role.desc "Grant a role to another role. You must have admin permission on the granting role."
role.arg_name "role member"
role.command :grant_to do |c|
c.desc "Whether to grant with admin option"
c.switch [:a,:admin]
c.action do |global_options,options,args|
id = require_arg(args, 'role')
member = require_arg(args, 'member')
role = api.role(id)
grant_options = {}
grant_options[:admin_option] = true if options[:admin]
role.grant_to member, grant_options
puts "Role granted"
end
end
role.desc "Revoke a role from another role. You must have admin permission on the revoking role."
role.arg_name "role member"
role.command :revoke_from do |c|
c.action do |global_options,options,args|
id = require_arg(args, 'role')
member = require_arg(args, 'member')
role = api.role(id)
role.revoke_from member
puts "Role revoked"
end
end
role.long_desc <<-EOD
Retrieves a digraph representing the role members and memberships of one or more roles.
The --[no-]ancestors and --[no-descendants] determine whether the graph should include ancestors, descendants, or both. Both
are included in the graph by default.
The --acting-as flag specifies, as usual, a role as which to perform the action. The default is the role of the currently
authenticated user. Only roles visible to this role will be included in the resulting graph.
The output is always written to the standard output, and can be one of the following forms (specified with the --format flag):
* png: use the 'dot' command to generate a png image representing the graph.
* dot: produce a file in a suitable format for use with the 'dot' program.
* json [default]: output a JSON representation of the graph.
In order to generate png images, the 'dot' program must be present and on your path. This program is usually installed
as part of the 'graphviz' package, and is available via apt-get on debian like systems and homebrew on OSX.
The JSON format is determined by the presence of the --short flag. If the --short flag is present, the JSON will be an array of
edges, with each edge represented as an array:
[
[ 'parent1', 'child1' ],
[ 'parent2', 'child2'],
...
]
If the --short flag is not present, the JSON output will be more verbose:
{
"graph": [
{
"parent": "parent1",
"child": "child1"
},
...
]
}
EOD
role.desc "Describe role memberships as a digraph"
role.arg_name "role", :multiple
role.command :graph do |c|
c.desc "Output formats [#{GRAPH_FORMATS}]"
c.flag [:f,:format], default_value: 'json', must_match: GRAPH_FORMATS
c.desc "Use a more compact JSON format"
c.switch [:s, :short]
c.desc "Whether to show ancestors"
c.switch [:a, :ancestors], default_value: true
c.desc "Whether to show descendants"
c.switch [:d, :descendants], default_value: true
acting_as_option(c)
c.action do |_, options, args|
format = options[:format].downcase.to_sym
if options[:short] and format != :json
$stderr.puts "WARNING: the --short option is meaningless when --format is not json"
end
params = options.slice(:ancestors, :descendants)
params[:as_role] = options[:acting_as] if options.member?(:acting_as)
graph = api.role_graph(args, params)
output = case format
when :json then graph.to_json(options[:short]) + "\n"
when :png then graph.to_png
when :dot then graph.to_dot + "\n"
else raise "Unsupported format: #{format}" # not strictly necessary, because GLI must_match checks it,
# but might as well?
end
$stdout.write output
end
end
end
end
clean up long desc a bit
#
# Copyright (C) 2013 Conjur Inc
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
class Conjur::Command::Roles < Conjur::Command
GRAPH_FORMATS = %w(json dot png)
desc "Manage roles"
command :role do |role|
role.desc "Create a new role"
role.arg_name "role"
role.command :create do |c|
acting_as_option(c)
c.desc "Output a JSON response with a single field, roleid"
c.switch "json"
c.action do |global_options,options,args|
id = require_arg(args, 'role')
role = api.role(id)
if ownerid = options.delete(:ownerid)
options[:acting_as] = ownerid
end
role.create(options)
if options[:json]
display({
roleid: role.roleid
})
else
puts "Created role #{role.roleid}"
end
end
end
role.desc "Determines whether a role exists"
role.arg_name "role"
role.command :exists do |c|
c.desc "Output a JSON response with a single field, exists"
c.switch "json"
c.action do |global_options,options,args|
id = require_arg(args, 'role')
role = api.role(id)
if options[:json]
display({
exists: role.exists?
})
else
puts role.exists?
end
end
end
role.desc "Lists role memberships. The role membership list is recursively expanded."
role.arg_name "role"
role.command :memberships do |c|
c.desc "Whether to show system (internal) roles"
c.switch [:s, :system]
c.action do |global_options,options,args|
roleid = args.shift
role = roleid.nil? && api.current_role || api.role(roleid)
memberships = role.all.map(&:roleid)
unless options[:system]
memberships.reject!{|id| id =~ /^.+?:@/}
end
display memberships
end
end
role.desc "Lists all direct members of the role. The membership list is not recursively expanded."
role.arg_name "role"
role.command :members do |c|
c.desc "Verbose output"
c.switch [:V,:verbose]
c.action do |global_options,options,args|
role = args.shift || api.user(api.username).roleid
display_members api.role(role).members, options
end
end
role.desc "Grant a role to another role. You must have admin permission on the granting role."
role.arg_name "role member"
role.command :grant_to do |c|
c.desc "Whether to grant with admin option"
c.switch [:a,:admin]
c.action do |global_options,options,args|
id = require_arg(args, 'role')
member = require_arg(args, 'member')
role = api.role(id)
grant_options = {}
grant_options[:admin_option] = true if options[:admin]
role.grant_to member, grant_options
puts "Role granted"
end
end
role.desc "Revoke a role from another role. You must have admin permission on the revoking role."
role.arg_name "role member"
role.command :revoke_from do |c|
c.action do |global_options,options,args|
id = require_arg(args, 'role')
member = require_arg(args, 'member')
role = api.role(id)
role.revoke_from member
puts "Role revoked"
end
end
role.long_desc <<-EOD
Retrieves a digraph representing the role members and memberships of one or more roles.
The --[no-]ancestors and --[no-descendants] determine whether the graph should include ancestors, descendants, or both. Both
are included in the graph by default.
The --acting-as flag specifies, as usual, a role as which to perform the action. The default is the role of the currently
authenticated user. Only roles visible to this role will be included in the resulting graph.
The output is always written to the standard output, and can be one of the following forms (specified with the --format flag):
* png: use the 'dot' command to generate a png image representing the graph.
* dot: produce a file in a suitable format for use with the 'dot' program.
* json [default]: output a JSON representation of the graph.
In order to generate png images, the 'dot' program must be present and on your path. This program is usually installed
as part of the 'graphviz' package, and is available via apt-get on debian like systems and homebrew on OSX.
The JSON format is determined by the presence of the --short flag. If the --short flag is present, the JSON will be an array of
edges, with each edge represented as an array:
[
[ 'parent1', 'child1' ],
[ 'parent2', 'child2'],
...
]
If the --short flag is not present, the JSON output will be more verbose:
{
"graph": [
{
"parent": "parent1",
"child": "child1"
},
...
]
}
EOD
role.desc "Describe role memberships as a digraph"
role.arg_name "role", :multiple
role.command :graph do |c|
c.desc "Output formats [#{GRAPH_FORMATS}]"
c.flag [:f,:format], default_value: 'json', must_match: GRAPH_FORMATS
c.desc "Use a more compact JSON format"
c.switch [:s, :short]
c.desc "Whether to show ancestors"
c.switch [:a, :ancestors], default_value: true
c.desc "Whether to show descendants"
c.switch [:d, :descendants], default_value: true
acting_as_option(c)
c.action do |_, options, args|
format = options[:format].downcase.to_sym
if options[:short] and format != :json
$stderr.puts "WARNING: the --short option is meaningless when --format is not json"
end
params = options.slice(:ancestors, :descendants)
params[:as_role] = options[:acting_as] if options.member?(:acting_as)
graph = api.role_graph(args, params)
output = case format
when :json then graph.to_json(options[:short]) + "\n"
when :png then graph.to_png
when :dot then graph.to_dot + "\n"
else raise "Unsupported format: #{format}" # not strictly necessary, because GLI must_match checks it,
# but might as well?
end
$stdout.write output
end
end
end
end |
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'capistrano/resque_monit/version'
Gem::Specification.new do |spec|
spec.name = "capistrano-resque_monit"
spec.version = Capistrano::ResqueMonit::VERSION
spec.authors = ["Gino Clement", "Jeremy Wadsack"]
spec.email = ["ginoclement@gmail.com", "jeremy@keylimetoolbox.com"]
spec.summary = "Deploying Resque and Monit using Capistrano."
spec.description = "A set of Capistrano scripts for configuring resque workers to be monitored by monit"
spec.homepage = "https://github.com/keylimetoolbox/capinstrano-resque_monit"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "capistrano", "~> 3.0"
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
end
Fix gem build warning
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'capistrano/resque_monit/version'
Gem::Specification.new do |spec|
spec.name = "capistrano-resque_monit"
spec.version = Capistrano::ResqueMonit::VERSION
spec.authors = ["Gino Clement", "Jeremy Wadsack"]
spec.email = ["ginoclement@gmail.com", "jeremy@keylimetoolbox.com"]
spec.summary = "Deploying Resque and Monit using Capistrano."
spec.description = "A set of Capistrano scripts for configuring resque workers to be monitored by monit"
spec.homepage = "https://github.com/keylimetoolbox/capinstrano-resque_monit"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "capistrano", "~> 3.0"
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake", "~> 10.0"
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.