CombinedText stringlengths 4 3.42M |
|---|
require_relative '../spec_helper'
ruby_version_is "3.0" do
describe "Keyword arguments" do
def target(*args, **kwargs)
[args, kwargs]
end
it "are separated from positional arguments" do
def m(*args, **kwargs)
[args, kwargs]
end
empty = {}
m(**empty).should == [[], {}]
m(empty).should == [[{}], {}]
m(a: 1).should == [[], {a: 1}]
m({a: 1}).should == [[{a: 1}], {}]
end
it "when the receiving method has not keyword parameters it treats kwargs as positional" do
def m(*a)
a
end
m(a: 1).should == [{a: 1}]
m({a: 1}).should == [{a: 1}]
end
context "empty kwargs are treated as if they were not passed" do
it "when calling a method" do
def m(*a)
a
end
empty = {}
m(**empty).should == []
m(empty).should == [{}]
end
it "when yielding to a block" do
def y(*args, **kwargs)
yield(*args, **kwargs)
end
empty = {}
y(**empty) { |*a| a }.should == []
y(empty) { |*a| a }.should == [{}]
end
end
it "extra keywords are not allowed without **kwrest" do
def m(*a, kw:)
a
end
m(kw: 1).should == []
-> { m(kw: 1, kw2: 2) }.should raise_error(ArgumentError, 'unknown keyword: :kw2')
-> { m(kw: 1, true => false) }.should raise_error(ArgumentError, 'unknown keyword: true')
end
it "handle * and ** at the same call site" do
def m(*a)
a
end
m(*[], **{}).should == []
m(*[], 42, **{}).should == [42]
end
context "**" do
it "does not copy a non-empty Hash for a method taking (*args)" do
def m(*args)
args[0]
end
h = {a: 1}
m(**h).should.equal?(h)
end
it "copies the given Hash for a method taking (**kwargs)" do
def m(**kw)
kw
end
empty = {}
m(**empty).should == empty
m(**empty).should_not.equal?(empty)
h = {a: 1}
m(**h).should == h
m(**h).should_not.equal?(h)
end
end
context "delegation" do
it "works with (*args, **kwargs)" do
def m(*args, **kwargs)
target(*args, **kwargs)
end
empty = {}
m(**empty).should == [[], {}]
m(empty).should == [[{}], {}]
m(a: 1).should == [[], {a: 1}]
m({a: 1}).should == [[{a: 1}], {}]
end
it "works with proc { |*args, **kwargs| }" do
m = proc do |*args, **kwargs|
target(*args, **kwargs)
end
empty = {}
m.(**empty).should == [[], {}]
m.(empty).should == [[{}], {}]
m.(a: 1).should == [[], {a: 1}]
m.({a: 1}).should == [[{a: 1}], {}]
# no autosplatting for |*args, **kwargs|
m.([1, 2]).should == [[[1, 2]], {}]
end
it "works with -> (*args, **kwargs) {}" do
m = -> (*args, **kwargs) do
target(*args, **kwargs)
end
empty = {}
m.(**empty).should == [[], {}]
m.(empty).should == [[{}], {}]
m.(a: 1).should == [[], {a: 1}]
m.({a: 1}).should == [[{a: 1}], {}]
end
it "works with (...)" do
def m(...)
target(...)
end
empty = {}
m(**empty).should == [[], {}]
m(empty).should == [[{}], {}]
m(a: 1).should == [[], {a: 1}]
m({a: 1}).should == [[{a: 1}], {}]
end
it "works with call(*ruby2_keyword_args)" do
class << self
ruby2_keywords def m(*args)
target(*args)
end
end
empty = {}
m(**empty).should == [[], {}]
Hash.ruby2_keywords_hash?(empty).should == false
m(empty).should == [[{}], {}]
Hash.ruby2_keywords_hash?(empty).should == false
m(a: 1).should == [[], {a: 1}]
m({a: 1}).should == [[{a: 1}], {}]
kw = {a: 1}
m(**kw).should == [[], {a: 1}]
m(**kw)[1].should == kw
m(**kw)[1].should_not.equal?(kw)
Hash.ruby2_keywords_hash?(kw).should == false
Hash.ruby2_keywords_hash?(m(**kw)[1]).should == false
m(kw).should == [[{a: 1}], {}]
m(kw)[0][0].should.equal?(kw)
Hash.ruby2_keywords_hash?(kw).should == false
end
it "works with super(*ruby2_keyword_args)" do
parent = Class.new do
def m(*args, **kwargs)
[args, kwargs]
end
end
child = Class.new(parent) do
ruby2_keywords def m(*args)
super(*args)
end
end
obj = child.new
empty = {}
obj.m(**empty).should == [[], {}]
Hash.ruby2_keywords_hash?(empty).should == false
obj.m(empty).should == [[{}], {}]
Hash.ruby2_keywords_hash?(empty).should == false
obj.m(a: 1).should == [[], {a: 1}]
obj.m({a: 1}).should == [[{a: 1}], {}]
kw = {a: 1}
obj.m(**kw).should == [[], {a: 1}]
obj.m(**kw)[1].should == kw
obj.m(**kw)[1].should_not.equal?(kw)
Hash.ruby2_keywords_hash?(kw).should == false
Hash.ruby2_keywords_hash?(obj.m(**kw)[1]).should == false
obj.m(kw).should == [[{a: 1}], {}]
obj.m(kw)[0][0].should.equal?(kw)
Hash.ruby2_keywords_hash?(kw).should == false
end
it "works with zsuper" do
parent = Class.new do
def m(*args, **kwargs)
[args, kwargs]
end
end
child = Class.new(parent) do
ruby2_keywords def m(*args)
super
end
end
obj = child.new
empty = {}
obj.m(**empty).should == [[], {}]
Hash.ruby2_keywords_hash?(empty).should == false
obj.m(empty).should == [[{}], {}]
Hash.ruby2_keywords_hash?(empty).should == false
obj.m(a: 1).should == [[], {a: 1}]
obj.m({a: 1}).should == [[{a: 1}], {}]
kw = {a: 1}
obj.m(**kw).should == [[], {a: 1}]
obj.m(**kw)[1].should == kw
obj.m(**kw)[1].should_not.equal?(kw)
Hash.ruby2_keywords_hash?(kw).should == false
Hash.ruby2_keywords_hash?(obj.m(**kw)[1]).should == false
obj.m(kw).should == [[{a: 1}], {}]
obj.m(kw)[0][0].should.equal?(kw)
Hash.ruby2_keywords_hash?(kw).should == false
end
it "works with yield(*ruby2_keyword_args)" do
class << self
def y(args)
yield(*args)
end
ruby2_keywords def m(*args)
y(args, &-> (*args, **kwargs) { target(*args, **kwargs) })
end
end
empty = {}
m(**empty).should == [[], {}]
Hash.ruby2_keywords_hash?(empty).should == false
m(empty).should == [[{}], {}]
Hash.ruby2_keywords_hash?(empty).should == false
m(a: 1).should == [[], {a: 1}]
m({a: 1}).should == [[{a: 1}], {}]
kw = {a: 1}
m(**kw).should == [[], {a: 1}]
m(**kw)[1].should == kw
m(**kw)[1].should_not.equal?(kw)
Hash.ruby2_keywords_hash?(kw).should == false
Hash.ruby2_keywords_hash?(m(**kw)[1]).should == false
m(kw).should == [[{a: 1}], {}]
m(kw)[0][0].should.equal?(kw)
Hash.ruby2_keywords_hash?(kw).should == false
end
it "does not work with (*args)" do
class << self
def m(*args)
target(*args)
end
end
empty = {}
m(**empty).should == [[], {}]
m(empty).should == [[{}], {}]
m(a: 1).should == [[{a: 1}], {}]
m({a: 1}).should == [[{a: 1}], {}]
end
end
end
end
Let 2.6 parse language/keyword_arguments_spec.rb
require_relative '../spec_helper'
ruby_version_is "3.0" do
describe "Keyword arguments" do
def target(*args, **kwargs)
[args, kwargs]
end
it "are separated from positional arguments" do
def m(*args, **kwargs)
[args, kwargs]
end
empty = {}
m(**empty).should == [[], {}]
m(empty).should == [[{}], {}]
m(a: 1).should == [[], {a: 1}]
m({a: 1}).should == [[{a: 1}], {}]
end
it "when the receiving method has not keyword parameters it treats kwargs as positional" do
def m(*a)
a
end
m(a: 1).should == [{a: 1}]
m({a: 1}).should == [{a: 1}]
end
context "empty kwargs are treated as if they were not passed" do
it "when calling a method" do
def m(*a)
a
end
empty = {}
m(**empty).should == []
m(empty).should == [{}]
end
it "when yielding to a block" do
def y(*args, **kwargs)
yield(*args, **kwargs)
end
empty = {}
y(**empty) { |*a| a }.should == []
y(empty) { |*a| a }.should == [{}]
end
end
it "extra keywords are not allowed without **kwrest" do
def m(*a, kw:)
a
end
m(kw: 1).should == []
-> { m(kw: 1, kw2: 2) }.should raise_error(ArgumentError, 'unknown keyword: :kw2')
-> { m(kw: 1, true => false) }.should raise_error(ArgumentError, 'unknown keyword: true')
end
it "handle * and ** at the same call site" do
def m(*a)
a
end
m(*[], **{}).should == []
m(*[], 42, **{}).should == [42]
end
context "**" do
it "does not copy a non-empty Hash for a method taking (*args)" do
def m(*args)
args[0]
end
h = {a: 1}
m(**h).should.equal?(h)
end
it "copies the given Hash for a method taking (**kwargs)" do
def m(**kw)
kw
end
empty = {}
m(**empty).should == empty
m(**empty).should_not.equal?(empty)
h = {a: 1}
m(**h).should == h
m(**h).should_not.equal?(h)
end
end
context "delegation" do
it "works with (*args, **kwargs)" do
def m(*args, **kwargs)
target(*args, **kwargs)
end
empty = {}
m(**empty).should == [[], {}]
m(empty).should == [[{}], {}]
m(a: 1).should == [[], {a: 1}]
m({a: 1}).should == [[{a: 1}], {}]
end
it "works with proc { |*args, **kwargs| }" do
m = proc do |*args, **kwargs|
target(*args, **kwargs)
end
empty = {}
m.(**empty).should == [[], {}]
m.(empty).should == [[{}], {}]
m.(a: 1).should == [[], {a: 1}]
m.({a: 1}).should == [[{a: 1}], {}]
# no autosplatting for |*args, **kwargs|
m.([1, 2]).should == [[[1, 2]], {}]
end
it "works with -> (*args, **kwargs) {}" do
m = -> (*args, **kwargs) do
target(*args, **kwargs)
end
empty = {}
m.(**empty).should == [[], {}]
m.(empty).should == [[{}], {}]
m.(a: 1).should == [[], {a: 1}]
m.({a: 1}).should == [[{a: 1}], {}]
end
it "works with (...)" do
instance_eval <<~DEF
def m(...)
target(...)
end
DEF
empty = {}
m(**empty).should == [[], {}]
m(empty).should == [[{}], {}]
m(a: 1).should == [[], {a: 1}]
m({a: 1}).should == [[{a: 1}], {}]
end
it "works with call(*ruby2_keyword_args)" do
class << self
ruby2_keywords def m(*args)
target(*args)
end
end
empty = {}
m(**empty).should == [[], {}]
Hash.ruby2_keywords_hash?(empty).should == false
m(empty).should == [[{}], {}]
Hash.ruby2_keywords_hash?(empty).should == false
m(a: 1).should == [[], {a: 1}]
m({a: 1}).should == [[{a: 1}], {}]
kw = {a: 1}
m(**kw).should == [[], {a: 1}]
m(**kw)[1].should == kw
m(**kw)[1].should_not.equal?(kw)
Hash.ruby2_keywords_hash?(kw).should == false
Hash.ruby2_keywords_hash?(m(**kw)[1]).should == false
m(kw).should == [[{a: 1}], {}]
m(kw)[0][0].should.equal?(kw)
Hash.ruby2_keywords_hash?(kw).should == false
end
it "works with super(*ruby2_keyword_args)" do
parent = Class.new do
def m(*args, **kwargs)
[args, kwargs]
end
end
child = Class.new(parent) do
ruby2_keywords def m(*args)
super(*args)
end
end
obj = child.new
empty = {}
obj.m(**empty).should == [[], {}]
Hash.ruby2_keywords_hash?(empty).should == false
obj.m(empty).should == [[{}], {}]
Hash.ruby2_keywords_hash?(empty).should == false
obj.m(a: 1).should == [[], {a: 1}]
obj.m({a: 1}).should == [[{a: 1}], {}]
kw = {a: 1}
obj.m(**kw).should == [[], {a: 1}]
obj.m(**kw)[1].should == kw
obj.m(**kw)[1].should_not.equal?(kw)
Hash.ruby2_keywords_hash?(kw).should == false
Hash.ruby2_keywords_hash?(obj.m(**kw)[1]).should == false
obj.m(kw).should == [[{a: 1}], {}]
obj.m(kw)[0][0].should.equal?(kw)
Hash.ruby2_keywords_hash?(kw).should == false
end
it "works with zsuper" do
parent = Class.new do
def m(*args, **kwargs)
[args, kwargs]
end
end
child = Class.new(parent) do
ruby2_keywords def m(*args)
super
end
end
obj = child.new
empty = {}
obj.m(**empty).should == [[], {}]
Hash.ruby2_keywords_hash?(empty).should == false
obj.m(empty).should == [[{}], {}]
Hash.ruby2_keywords_hash?(empty).should == false
obj.m(a: 1).should == [[], {a: 1}]
obj.m({a: 1}).should == [[{a: 1}], {}]
kw = {a: 1}
obj.m(**kw).should == [[], {a: 1}]
obj.m(**kw)[1].should == kw
obj.m(**kw)[1].should_not.equal?(kw)
Hash.ruby2_keywords_hash?(kw).should == false
Hash.ruby2_keywords_hash?(obj.m(**kw)[1]).should == false
obj.m(kw).should == [[{a: 1}], {}]
obj.m(kw)[0][0].should.equal?(kw)
Hash.ruby2_keywords_hash?(kw).should == false
end
it "works with yield(*ruby2_keyword_args)" do
class << self
def y(args)
yield(*args)
end
ruby2_keywords def m(*args)
y(args, &-> (*args, **kwargs) { target(*args, **kwargs) })
end
end
empty = {}
m(**empty).should == [[], {}]
Hash.ruby2_keywords_hash?(empty).should == false
m(empty).should == [[{}], {}]
Hash.ruby2_keywords_hash?(empty).should == false
m(a: 1).should == [[], {a: 1}]
m({a: 1}).should == [[{a: 1}], {}]
kw = {a: 1}
m(**kw).should == [[], {a: 1}]
m(**kw)[1].should == kw
m(**kw)[1].should_not.equal?(kw)
Hash.ruby2_keywords_hash?(kw).should == false
Hash.ruby2_keywords_hash?(m(**kw)[1]).should == false
m(kw).should == [[{a: 1}], {}]
m(kw)[0][0].should.equal?(kw)
Hash.ruby2_keywords_hash?(kw).should == false
end
it "does not work with (*args)" do
class << self
def m(*args)
target(*args)
end
end
empty = {}
m(**empty).should == [[], {}]
m(empty).should == [[{}], {}]
m(a: 1).should == [[{a: 1}], {}]
m({a: 1}).should == [[{a: 1}], {}]
end
end
end
end
|
module XYZ
class Task < Model
set_relation_name(:task,:task)
def self.up()
column :status, :varchar, :size => 20, :default => "created" # = "created" | "executing" | "completed" | "failed" | "not_reached"
column :result, :json # gets serialized version of TaskAction::Result
#column :output_vars, :json do we need this?
#column :events, :json - content of this may instead go in result
column :action_on_failure, :varchar, :default => "abort"
column :temporal_order, :varchar, :size => 20 # = "sequential" | "concurrent"
column :position, :integer, :default => 1
column :executable_action_type, :varchar
column :executable_action, :json # gets serialized version of TaskAction::Action
many_to_one :task
one_to_many :task, :task_event, :task_error
end
def initialize(hash_scalar_values,c,model=:task)
defaults = {
:status => "created",
:action_on_failure => "abort"
}
super(defaults.merge(hash_scalar_values),c,model)
self[:subtasks] = Array.new
end
#persists to db this and its sub tasks
def save!()
#no op if saved already as detected by whether has an id
return nil if id()
set_positions!()
#for db access efficiency implement into two phases: 1 - save all subtasks w/o ids, then put in ids
unrolled_tasks = unroll_tasks()
rows = unrolled_tasks.map do |hash_row|
executable_action = hash_row[:executable_action]
row = {
:display_name => "task#{hash_row[:position].to_s}",
:ref => "task#{hash_row[:position].to_s}",
:executable_action_type => executable_action ? Aux.demodulize(executable_action.class.to_s) : nil,
:executable_action => executable_action
}
cols = [:status, :result, :action_on_failure, :position, :temporal_order]
cols.each{|col|row.merge!(col => hash_row[col])}
row
end
id_info_list = Model.create_from_rows(model_handle,rows,{:convert => true,:do_not_update_info_table => true})
#set ids
unrolled_tasks.each_with_index{|task,i|task.set_id_handle(id_info_list[i])}
#set parent relationship
par_rel_rows_for_id_info = set_and_ret_parents!()
par_rel_rows_for_task = par_rel_rows_for_id_info.map{|r|{:id => r[:id], :task_id => r[:parent_id]}}
#prune top level tasks
par_rel_rows_for_task.reject!{|r|r[:task_id].nil?}
Model.update_from_rows(model_handle,par_rel_rows_for_task) unless par_rel_rows_for_task.empty?
IDInfoTable.update_instances(model_handle,par_rel_rows_for_id_info)
end
def subtasks()
self[:subtasks]
end
#for special tasks that have component actions
#TODO: trie dto do this by having a class inherir from Task and hanging these fns off it, but this confused Ramaze
def component_actions()
if self[:executable_action].kind_of?(TaskAction::ConfigNode)
action = self[:executable_action]
return (action[:component_actions]||[]).map{|ca| action[:node] ? ca.merge(:node => action[:node]) : ca}
end
subtasks.map{|obj|obj.component_actions()}.flatten
end
def add_subtask_from_hash(hash)
defaults = {:status => "created", :action_on_failure => "abort"}
new_subtask = Task.new(defaults.merge(hash),c)
add_subtask(new_subtask)
end
def add_subtask(new_subtask)
self[:subtasks] << new_subtask
new_subtask
end
def set_positions!()
self[:position] ||= 1
return nil if subtasks.empty?
subtasks.each_with_index do |e,i|
e[:position] = i+1
e.set_positions!()
end
end
def set_and_ret_parents!(parent_id=nil)
self[:task_id] = parent_id
id = id()
[:parent_id => parent_id, :id => id] + subtasks.map{|e|e.set_and_ret_parents!(id)}.flatten
end
def unroll_tasks()
[self] + subtasks.map{|e|e.unroll_tasks()}.flatten
end
#### for rending tasks
public
def render_form()
#may be different forms; this is one that is organized by node_group, node, component, attribute
task_list = render_form_flat(true)
#TODO: not yet teating node_group
Task.render_group_by_node(task_list)
end
protected
#protected, not private, because of recursive call
def render_form_flat(top=false)
#prune out all (sub)tasks except for top and executable
return render_executable_tasks() if self[:executable_action]
(top ? [render_top_task()] : []) + subtasks.map{|e|e.render_form_flat()}.flatten
end
private
def self.render_group_by_node(task_list)
return task_list if task_list.size < 2
ret = nil
indexed_nodes = Hash.new
task_list.each do |t|
if t[:level] == "top"
ret = t
elsif t[:level] == "node"
indexed_nodes[t[:node_id]] = t
end
end
task_list.each do |t|
if t[:level] == "node"
ret[:children] << t
elsif t[:level] == "component"
if indexed_nodes[t[:node_id]]
indexed_nodes[t[:node_id]][:children] << t
else
node_task = Task.render_task_on_node(:node_id => t[:node_id], :node_name => t[:node_name])
node_task[:children] << t
ret[:children] << node_task
indexed_nodes[node_task[:node_id]] = node_task
end
end
end
ret
end
def render_top_task()
{:task_id => id(),
:level => "top",
:type => "top",
:action_on_failure=> self[:action_on_failure],
:children => Array.new
}
end
def render_executable_tasks()
executable_action = self[:executable_action]
sc = executable_action[:state_change_types]
common_vals = {
:task_id => id(),
:status => self[:status],
}
if sc.include?("create_node") then Task.render_tasks_create_node(executable_action,common_vals)
elsif sc.include?("install_component") then Task.render_tasks_install_component(executable_action,common_vals)
elsif sc.include?("setting") then Task.render_tasks_setting(executable_action,common_vals)
else
Log.error("do not treat executable tasks of type(s) #{sc.join(',')}")
end
end
def self.render_task_on_node(node_info)
{:type => "on_node",
:level => "node",
:children => Array.new
}.merge(node_info)
end
def self.render_tasks_create_node(executable_action,common_vals)
node = executable_action[:node]
task = {
:type => "create_node",
:level => "node",
:node_id => node[:id],
:node_name => node[:display_name],
:image_name => executable_action[:image][:display_name],
:children => Array.new
}
[task.merge(common_vals)]
end
def self.render_tasks_install_component(executable_action,common_vals)
node = executable_action[:node]
(executable_action[:component_actions]||[]).map do |component_action|
component = component_action[:component]
cmp_attrs = {
:component_id => component[:id],
:component_name => component[:display_name]
}
task = {
:type => "install_component",
:level => "component",
:node_id => node[:id],
:node_name => node[:display_name],
:component_basic_type => component[:basic_type]
}
task.merge!(cmp_attrs)
task.merge!(common_vals)
add_attributes_to_component_task!(task,component_action,cmp_attrs)
end
end
def self.render_tasks_setting(executable_action,common_vals)
node = executable_action[:node]
(executable_action[:component_actions]||[]).map do |component_action|
component = component_action[:component]
cmp_attrs = {
:component_id => component[:id],
:component_name => component[:display_name].gsub(/::/,"_")
}
task = {
:type => "on_component",
:level => "component",
:node_id => node[:id],
:node_name => node[:display_name],
:component_basic_type => component[:basic_type]
}
task.merge!(cmp_attrs)
task.merge!(common_vals)
add_attributes_to_component_task!(task,component_action,cmp_attrs)
end
end
#TODO: modifications made (in comparison to keep value to better render in commit pane
#may need a flag that indiactes whetehr to reformulate
def self.add_attributes_to_component_task!(task,component_action,cmp_attrs)
attributes = component_action[:attributes]
return task unless attributes
keep_ids = component_action[:changed_attribute_ids]
pruned_attrs = attributes.reject do |a|
a[:hidden] or (keep_ids and not keep_ids.include?(a[:id]))
end
flattten_attrs = AttributeComplexType.flatten_attribute_list(pruned_attrs)
flattten_attrs.each do |a|
val = a[:attribute_value]
if val.nil?
next unless a[:port_type] == "input"
val = "DYNAMICALLY SET"
end
attr_task = {
:type => "setting",
:level => "attribute",
:attribute_id => a[:id],
:attribute_name => a[:display_name],
:attribute_value => val,
:attribute_data_type => a[:data_type],
:attribute_required => a[:required],
:attribute_dynamic => a[:dynamic]
}
attr_task.merge!(cmp_attrs)
task[:children]||= Array.new
task[:children] << attr_task
end
task
end
end
end
in commit view remve null values or have them set to DYNAMICALLY SET
module XYZ
class Task < Model
set_relation_name(:task,:task)
def self.up()
column :status, :varchar, :size => 20, :default => "created" # = "created" | "executing" | "completed" | "failed" | "not_reached"
column :result, :json # gets serialized version of TaskAction::Result
#column :output_vars, :json do we need this?
#column :events, :json - content of this may instead go in result
column :action_on_failure, :varchar, :default => "abort"
column :temporal_order, :varchar, :size => 20 # = "sequential" | "concurrent"
column :position, :integer, :default => 1
column :executable_action_type, :varchar
column :executable_action, :json # gets serialized version of TaskAction::Action
many_to_one :task
one_to_many :task, :task_event, :task_error
end
def initialize(hash_scalar_values,c,model=:task)
defaults = {
:status => "created",
:action_on_failure => "abort"
}
super(defaults.merge(hash_scalar_values),c,model)
self[:subtasks] = Array.new
end
#persists to db this and its sub tasks
def save!()
#no op if saved already as detected by whether has an id
return nil if id()
set_positions!()
#for db access efficiency implement into two phases: 1 - save all subtasks w/o ids, then put in ids
unrolled_tasks = unroll_tasks()
rows = unrolled_tasks.map do |hash_row|
executable_action = hash_row[:executable_action]
row = {
:display_name => "task#{hash_row[:position].to_s}",
:ref => "task#{hash_row[:position].to_s}",
:executable_action_type => executable_action ? Aux.demodulize(executable_action.class.to_s) : nil,
:executable_action => executable_action
}
cols = [:status, :result, :action_on_failure, :position, :temporal_order]
cols.each{|col|row.merge!(col => hash_row[col])}
row
end
id_info_list = Model.create_from_rows(model_handle,rows,{:convert => true,:do_not_update_info_table => true})
#set ids
unrolled_tasks.each_with_index{|task,i|task.set_id_handle(id_info_list[i])}
#set parent relationship
par_rel_rows_for_id_info = set_and_ret_parents!()
par_rel_rows_for_task = par_rel_rows_for_id_info.map{|r|{:id => r[:id], :task_id => r[:parent_id]}}
#prune top level tasks
par_rel_rows_for_task.reject!{|r|r[:task_id].nil?}
Model.update_from_rows(model_handle,par_rel_rows_for_task) unless par_rel_rows_for_task.empty?
IDInfoTable.update_instances(model_handle,par_rel_rows_for_id_info)
end
def subtasks()
self[:subtasks]
end
#for special tasks that have component actions
#TODO: trie dto do this by having a class inherir from Task and hanging these fns off it, but this confused Ramaze
def component_actions()
if self[:executable_action].kind_of?(TaskAction::ConfigNode)
action = self[:executable_action]
return (action[:component_actions]||[]).map{|ca| action[:node] ? ca.merge(:node => action[:node]) : ca}
end
subtasks.map{|obj|obj.component_actions()}.flatten
end
def add_subtask_from_hash(hash)
defaults = {:status => "created", :action_on_failure => "abort"}
new_subtask = Task.new(defaults.merge(hash),c)
add_subtask(new_subtask)
end
def add_subtask(new_subtask)
self[:subtasks] << new_subtask
new_subtask
end
def set_positions!()
self[:position] ||= 1
return nil if subtasks.empty?
subtasks.each_with_index do |e,i|
e[:position] = i+1
e.set_positions!()
end
end
def set_and_ret_parents!(parent_id=nil)
self[:task_id] = parent_id
id = id()
[:parent_id => parent_id, :id => id] + subtasks.map{|e|e.set_and_ret_parents!(id)}.flatten
end
def unroll_tasks()
[self] + subtasks.map{|e|e.unroll_tasks()}.flatten
end
#### for rending tasks
public
def render_form()
#may be different forms; this is one that is organized by node_group, node, component, attribute
task_list = render_form_flat(true)
#TODO: not yet teating node_group
Task.render_group_by_node(task_list)
end
protected
#protected, not private, because of recursive call
def render_form_flat(top=false)
#prune out all (sub)tasks except for top and executable
return render_executable_tasks() if self[:executable_action]
(top ? [render_top_task()] : []) + subtasks.map{|e|e.render_form_flat()}.flatten
end
private
def self.render_group_by_node(task_list)
return task_list if task_list.size < 2
ret = nil
indexed_nodes = Hash.new
task_list.each do |t|
if t[:level] == "top"
ret = t
elsif t[:level] == "node"
indexed_nodes[t[:node_id]] = t
end
end
task_list.each do |t|
if t[:level] == "node"
ret[:children] << t
elsif t[:level] == "component"
if indexed_nodes[t[:node_id]]
indexed_nodes[t[:node_id]][:children] << t
else
node_task = Task.render_task_on_node(:node_id => t[:node_id], :node_name => t[:node_name])
node_task[:children] << t
ret[:children] << node_task
indexed_nodes[node_task[:node_id]] = node_task
end
end
end
ret
end
def render_top_task()
{:task_id => id(),
:level => "top",
:type => "top",
:action_on_failure=> self[:action_on_failure],
:children => Array.new
}
end
def render_executable_tasks()
executable_action = self[:executable_action]
sc = executable_action[:state_change_types]
common_vals = {
:task_id => id(),
:status => self[:status],
}
if sc.include?("create_node") then Task.render_tasks_create_node(executable_action,common_vals)
elsif sc.include?("install_component") then Task.render_tasks_install_component(executable_action,common_vals)
elsif sc.include?("setting") then Task.render_tasks_setting(executable_action,common_vals)
else
Log.error("do not treat executable tasks of type(s) #{sc.join(',')}")
end
end
def self.render_task_on_node(node_info)
{:type => "on_node",
:level => "node",
:children => Array.new
}.merge(node_info)
end
def self.render_tasks_create_node(executable_action,common_vals)
node = executable_action[:node]
task = {
:type => "create_node",
:level => "node",
:node_id => node[:id],
:node_name => node[:display_name],
:image_name => executable_action[:image][:display_name],
:children => Array.new
}
[task.merge(common_vals)]
end
def self.render_tasks_install_component(executable_action,common_vals)
node = executable_action[:node]
(executable_action[:component_actions]||[]).map do |component_action|
component = component_action[:component]
cmp_attrs = {
:component_id => component[:id],
:component_name => component[:display_name]
}
task = {
:type => "install_component",
:level => "component",
:node_id => node[:id],
:node_name => node[:display_name],
:component_basic_type => component[:basic_type]
}
task.merge!(cmp_attrs)
task.merge!(common_vals)
add_attributes_to_component_task!(task,component_action,cmp_attrs)
end
end
def self.render_tasks_setting(executable_action,common_vals)
node = executable_action[:node]
(executable_action[:component_actions]||[]).map do |component_action|
component = component_action[:component]
cmp_attrs = {
:component_id => component[:id],
:component_name => component[:display_name].gsub(/::/,"_")
}
task = {
:type => "on_component",
:level => "component",
:node_id => node[:id],
:node_name => node[:display_name],
:component_basic_type => component[:basic_type]
}
task.merge!(cmp_attrs)
task.merge!(common_vals)
add_attributes_to_component_task!(task,component_action,cmp_attrs)
end
end
def self.add_attributes_to_component_task!(task,component_action,cmp_attrs)
attributes = component_action[:attributes]
return task unless attributes
keep_ids = component_action[:changed_attribute_ids]
pruned_attrs = attributes.reject do |a|
a[:hidden] or (keep_ids and not keep_ids.include?(a[:id]))
end
flattten_attrs = AttributeComplexType.flatten_attribute_list(pruned_attrs)
flattten_attrs.each do |a|
val = a[:attribute_value]
if val.nil?
next unless a[:port_type] == "input"
val = "DYNAMICALLY SET"
end
attr_task = {
:type => "setting",
:level => "attribute",
:attribute_id => a[:id],
:attribute_name => a[:display_name],
:attribute_value => val,
:attribute_data_type => a[:data_type],
:attribute_required => a[:required],
:attribute_dynamic => a[:dynamic]
}
attr_task.merge!(cmp_attrs)
task[:children]||= Array.new
task[:children] << attr_task
end
task
end
end
end
|
require 'sinatra/base'
require 'json'
require 'logger'
require_relative './rss2slack_handler'
require_relative './conf'
require_relative './db_client'
class Rss2Slack < Sinatra::Base
configure :production, :development do
@conf = R2S::Conf.new
@logger = Logger.new(conf.logger_path)
@logger.level = Logger::DEBUG
@db = R2S::DBClient.new(@logger, @conf)
end
before do
@handler = R2S::Handler.new(@logger, @conf, @db)
end
get '/v1/hello' do
status 200
body 'i_like_sushi'
end
post '/v1/slack/feed' do
res = @handler.handle_slack_feed(headers, request.params)
handle_response(res)
end
after do
# NOOP
end
# move helper?
def handle_response(response)
status response.status unless response.status.nil?
headers response.headers unless response.headers.nil? && response.headers.empty?
body response.body unless response.body.nil?
end
end
fix sinatra scope
require 'sinatra/base'
require 'json'
require 'logger'
require_relative './rss2slack_handler'
require_relative './conf'
require_relative './db_client'
class Rss2Slack < Sinatra::Base
configure :production, :development do
# NOOP
end
before do
@conf = R2S::Conf.new if @conf.nil?
@logger = Logger.new(conf.logger_path) if @logger.nil?
@logger.level = Logger::DEBUG
@db = R2S::DBClient.new(@logger, @conf) if @db.nil?
@handler = R2S::Handler.new(@logger, @conf, @db)
end
get '/v1/hello' do
status 200
body 'i_like_sushi'
end
post '/v1/slack/feed' do
res = @handler.handle_slack_feed(headers, request.params)
handle_response(res)
end
after do
# NOOP
end
# move helper?
def handle_response(response)
status response.status unless response.status.nil?
headers response.headers unless response.headers.nil? && response.headers.empty?
body response.body unless response.body.nil?
end
end
|
class ArrayList
end
write initialize and size methods, pass tests
class ArrayList
def initialize(size=5)
@array = Array.new(size, nil)
end
def size
@array.length
end
end
|
require 'formula'
class ArrayVarNginxModule < Formula
homepage 'https://github.com/agentzh/array-var-nginx-module'
url 'https://github.com/agentzh/array-var-nginx-module/archive/46767472df.tar.gz'
sha1 '96d8c9fa4d20bd9e0043a76f055d23724fc79dfd'
def install
(share+'array-var-nginx-module').install Dir['*']
end
end
array-var-nginx-module 0.03
require 'formula'
class ArrayVarNginxModule < Formula
homepage 'https://github.com/agentzh/array-var-nginx-module'
url 'https://github.com/agentzh/array-var-nginx-module/archive/v0.03.tar.gz'
sha1 'b2666aa3c092060fcd3931a6d45798a5745c1ad6'
def install
(share+'array-var-nginx-module').install Dir['*']
end
end
|
#######################################################
#
# ruboto/base.rb
#
# Code shared by other ruboto components.
#
#######################################################
# Only used needed for ruboto-core apps
require 'ruboto/version'
$RUBOTO_VERSION = 10
def confirm_ruboto_version(required_version, exact=true)
raise "requires $RUBOTO_VERSION=#{required_version} or greater, current version #{$RUBOTO_VERSION}" if $RUBOTO_VERSION < required_version and not exact
raise "requires $RUBOTO_VERSION=#{required_version}, current version #{$RUBOTO_VERSION}" if $RUBOTO_VERSION != required_version and exact
end
require 'java'
$package_name = ($activity || $service || $broadcast_receiver).package_name
$package = eval("Java::#{$package_name}")
# Create convenience method for top-level android package so we do not need to prefix with 'Java::'.
module Kernel
def android
JavaUtilities.get_package_module_dot_format('android')
end
end
java_import "android.R"
module Ruboto
java_import "#{$package_name}.R"
begin
Id = JavaUtilities.get_proxy_class("#{$package_name}.R$id")
rescue NameError
Java::android.util.Log.d "RUBOTO", "no R$id"
end
end
AndroidIds = JavaUtilities.get_proxy_class("android.R$id")
#
# Callbacks
#
module Ruboto
module CallbackClass
def new_with_callbacks &block
new.initialize_ruboto_callbacks &block
end
end
module Callbacks
def initialize_ruboto_callbacks &block
instance_eval &block
setup_ruboto_callbacks
self
end
def ruboto_callback_methods
(singleton_methods - ["on_create", "on_receive"]).select{|i| i =~ /^on_/}
end
def setup_ruboto_callbacks
ruboto_callback_methods.each do |i|
begin
setCallbackProc(self.class.const_get(i.sub(/^on_/, "CB_").upcase), method(i))
rescue
end
end
end
end
end
#
# Import a class and set it up for handlers
#
def ruboto_import(*package_classes)
# TODO(uwe): The first part of this "if" is only needed for JRuby 1.6.x. Simplify when we stop supporting JRuby 1.6.x
if package_classes.size == 1
imported_classes = [*(java_import(*package_classes) || eval("Java::#{package_classes[0]}"))]
else
imported_classes = java_import(package_classes)
end
imported_classes.each do |package_class|
package_class.class_eval do
extend Ruboto::CallbackClass
include Ruboto::Callbacks
end
end
end
Allow ruboto_import to set up callbacks on classes that were generated (i.e., not trying to import them first)
#######################################################
#
# ruboto/base.rb
#
# Code shared by other ruboto components.
#
#######################################################
# Only used needed for ruboto-core apps
require 'ruboto/version'
$RUBOTO_VERSION = 10
def confirm_ruboto_version(required_version, exact=true)
raise "requires $RUBOTO_VERSION=#{required_version} or greater, current version #{$RUBOTO_VERSION}" if $RUBOTO_VERSION < required_version and not exact
raise "requires $RUBOTO_VERSION=#{required_version}, current version #{$RUBOTO_VERSION}" if $RUBOTO_VERSION != required_version and exact
end
require 'java'
$package_name = ($activity || $service || $broadcast_receiver).package_name
$package = eval("Java::#{$package_name}")
# Create convenience method for top-level android package so we do not need to prefix with 'Java::'.
module Kernel
def android
JavaUtilities.get_package_module_dot_format('android')
end
end
java_import "android.R"
module Ruboto
java_import "#{$package_name}.R"
begin
Id = JavaUtilities.get_proxy_class("#{$package_name}.R$id")
rescue NameError
Java::android.util.Log.d "RUBOTO", "no R$id"
end
end
AndroidIds = JavaUtilities.get_proxy_class("android.R$id")
#
# Callbacks
#
module Ruboto
module CallbackClass
def new_with_callbacks &block
new.initialize_ruboto_callbacks &block
end
end
module Callbacks
def initialize_ruboto_callbacks &block
instance_eval &block
setup_ruboto_callbacks
self
end
def ruboto_callback_methods
(singleton_methods - ["on_create", "on_receive"]).select{|i| i =~ /^on_/}
end
def setup_ruboto_callbacks
ruboto_callback_methods.each do |i|
begin
setCallbackProc(self.class.const_get(i.sub(/^on_/, "CB_").upcase), method(i))
rescue
end
end
end
end
end
#
# Import a class and set it up for handlers
#
def ruboto_import(*package_classes)
already_classes = package_classes.select{|i| not i.is_a?(String) and not i.is_a?(Symbol)}
imported_classes = package_classes - already_classes
unless imported_classes.empty?
# TODO(uwe): The first part of this "if" is only needed for JRuby 1.6.x. Simplify when we stop supporting JRuby 1.6.x
if imported_classes.size == 1
imported_classes = [*(java_import(*imported_classes) || eval("Java::#{imported_classes[0]}"))]
else
imported_classes = java_import(imported_classes)
end
end
(already_classes + imported_classes).each do |package_class|
package_class.class_eval do
extend Ruboto::CallbackClass
include Ruboto::Callbacks
end
end
end
|
# Kindle adjuster
# requires graphics/ImageMagick
# graphics/poppler
# graphics/sam2p
# graphics/pdftk
Device_name = :kindle_paperwhite
Crop_nombre = true # ノンブルなどを削除するか
cleanup_tmpfiles = true # 最後に一時ファイルを削除するか
edge_lines_enable = true # ページの端に線を描き、Kindleによる自動的な
# 余白削除を抑制する
setting1 = "40%,90%.0.4" # やや地が濃いデータ用
setting2 = "25%,90%,0.3" # やや地が白いデータ用
level_settings = setting1
class Device
attr_reader :name, :pixels, :x, :y, :aspect
def initialize(device_name)
pix = {:kindle_paperwhite => "658x905"}
@name = device_name.to_s
@pixels = pix[device_name]
@pixels.match(/(\d+)x(\d+)/)
@x = $1.to_i
@y = $2.to_i
@aspect = @x.to_f/@y.to_f
end
end
def get_breakpoint(filename, axis, sample_n, step, threshold)
case axis
when :x
g = ['', 'x0+', '+0'] # #{step}x0+#{ofs}+0
when :y
g = ['0x', '+0+', ''] # 0x#{step}+0+#{ofs}
end
white = 65535
start_p = 0
breakpoints = Array.new
sample_n.times do |i|
ofs = (step * i).to_i
value = \
`convert #{filename} -crop #{g[0]}#{step}#{g[1]}#{ofs}#{g[2]}\
-format "%[mean]" info:`.to_f
white = value if i == 0
if (start_p == 0) && (value/white < threshold)
start_p = ofs
end
if (start_p != 0) && (value/white >= threshold)
breakpoints << start_p
breakpoints << ofs
start_p = 0
end
end
return breakpoints[0]-step.to_i, breakpoints[-1]+step.to_i
end
def get_crop_area(f)
org_size = `convert #{f} -format "%Wx%H" info:`.match(/(\d+)x(\d+)/)
x = org_size[1].to_f
y = org_size[2].to_f
sample_n = 150
dx = x/sample_n
dy = y/sample_n
threshold_x = 0.75
threshold_y = Crop_nombre ? 0.66: 0.90
dev_aspect = Device.new(Device_name).aspect
start_x, end_x = get_breakpoint(f, :x, sample_n, dx, threshold_x)
start_y, end_y = get_breakpoint(f, :y, sample_n, dy, threshold_y)
if ((end_x-start_x).to_f/(end_y-start_y)) < dev_aspect
adjust = (end_y - start_y) * dev_aspect - (end_x - start_x)
start_x -= adjust.to_i / 2
end_x += adjust.to_i / 2
else
adjust = (end_x - start_x) / dev_aspect - (end_y - start_y)
start_y -= adjust.to_i / 2
end_y += adjust.to_i / 2
end
return {x1: end_x - start_x, y1: end_y - start_y,\
x2: start_x, y2: start_y}
end
def elapsed_time(start_time)
t = Time.now - start_time
"#{(t/60).floor}min #{(t % 60).floor}sec"
end
book = ARGV.shift
if not book
puts "Usage: ruby kindle_adjuster bookname.pdf"
exit
end
dev = Device.new(Device_name)
start_time = Time.now
Dir.mkdir("./png") if not Dir.exist?("./png")
puts "splitting #{book} into png images... #{elapsed_time(start_time)}"
system("pdfimages -png #{book} ./png/page") # ページごとに png に切り分け
puts "selecting pages... #{elapsed_time(start_time)}"
pages = Array.new
Dir.glob('./png/*.png').each do |f| # ページごとのファイル名と横幅を記録
w = `identify -format '%w' #{f}`
pages << [f, w.to_i]
end
ww = 0
pages.each do |p|
ww += p[1]
end
mean_w = ww / pages.length # 平均のページ横幅を計算
(pages.length - 1).downto(0) do |i|
if pages[i][1] < mean_w * 0.85 or pages[i][1] > mean_w * 1.7
File.delete(pages[i][0])
pages.delete_at(i) # 平均から大きく外れるファイルを除外
end
end
sample_page_list = ""
skips = 101 # 100ページほどを抽出して、最初と最後を除いてリスト作成
skip_rate = 0
while (skip_rate % 2 == 0) do
skips -= 1
skip_rate = pages.length / skips
end
if pages.length < 100 # 100ページ未満のpdfの場合は全てのページをリスト化
skips = page.length
skip_rate = 1
end
i = 0
5.upto(skips - 5) do |i|
sample_page_list << " #{pages[i * skip_rate][0]}"
end
puts "calculating effective size... #{elapsed_time(start_time)}"
system("convert #{sample_page_list} -level #{level_settings} -background none -compose darken -flatten ./png/output.jpg") # リストのページをすべて重ね合わせる
g = get_crop_area('./png/output.jpg')
crop_geometry = "#{g[:x1]}x#{g[:y1]}+#{g[:x2]}+#{g[:y2]}"
Dir.mkdir("./conv") if not Dir.exist?("./conv")
i = 0
puts "cropping/converting png images... #{elapsed_time(start_time)}"
edge_lines = ""
if edge_lines_enable
edge_lines = "-strokewidth 10 -draw 'line 0,0 #{dev.x},0'"
end
pages.each do |p|
case i
when 0, (pages.length-1) # 最初と最後のページ(表紙と裏表紙)はcropしない
system("convert #{p[0]} -resize #{dev.pixels} -type Grayscale ./conv/#{'%04d' % i}.png")
when 1..(pages.length-2) # 他はcropしてから処理
system("convert #{p[0]} -rotate \"90>\" -crop #{crop_geometry} -resize #{dev.pixels} -type Grayscale -level #{level_settings} #{edge_lines} ./conv/#{'%04d' % i}.png")
else
end
i += 1
end
puts "making pdf from png files... #{elapsed_time(start_time)}"
Dir.glob('./conv/*.png').each do |f|
system("sam2p -j:quiet #{f} #{f}.pdf")
end
system("pdftk ./conv/*.pdf cat output ./#{book.sub('.pdf','_kindle.pdf')}")
if cleanup_tmpfiles
Dir.glob("./png/*") do |f|
File.delete(f)
end
Dir.glob("./conv/*") do |f|
File.delete(f)
end
end
grayscale変換時 convertの -level 設定を変更
# Kindle adjuster
# requires graphics/ImageMagick
# graphics/poppler
# graphics/sam2p
# graphics/pdftk
Device_name = :kindle_paperwhite
Crop_nombre = true # ノンブルなどを削除するか
cleanup_tmpfiles = true # 最後に一時ファイルを削除するか
edge_lines_enable = true # ページの端に線を描き、Kindleによる自動的な
# 余白削除を抑制する
setting1 = "40%,80%.0.25" # やや地が濃いデータ用
setting2 = "25%,90%,0.3" # やや地が白いデータ用
level_settings = setting1
class Device
attr_reader :name, :pixels, :x, :y, :aspect
def initialize(device_name)
pix = {:kindle_paperwhite => "658x905"}
@name = device_name.to_s
@pixels = pix[device_name]
@pixels.match(/(\d+)x(\d+)/)
@x = $1.to_i
@y = $2.to_i
@aspect = @x.to_f/@y.to_f
end
end
def get_breakpoint(filename, axis, sample_n, step, threshold)
case axis
when :x
g = ['', 'x0+', '+0'] # #{step}x0+#{ofs}+0
when :y
g = ['0x', '+0+', ''] # 0x#{step}+0+#{ofs}
end
white = 65535
start_p = 0
breakpoints = Array.new
sample_n.times do |i|
ofs = (step * i).to_i
value = \
`convert #{filename} -crop #{g[0]}#{step}#{g[1]}#{ofs}#{g[2]}\
-format "%[mean]" info:`.to_f
white = value if i == 0
if (start_p == 0) && (value/white < threshold)
start_p = ofs
end
if (start_p != 0) && (value/white >= threshold)
breakpoints << start_p
breakpoints << ofs
start_p = 0
end
end
return breakpoints[0]-step.to_i, breakpoints[-1]+step.to_i
end
def get_crop_area(f)
org_size = `convert #{f} -format "%Wx%H" info:`.match(/(\d+)x(\d+)/)
x = org_size[1].to_f
y = org_size[2].to_f
sample_n = 150
dx = x/sample_n
dy = y/sample_n
threshold_x = 0.75
threshold_y = Crop_nombre ? 0.66: 0.90
dev_aspect = Device.new(Device_name).aspect
start_x, end_x = get_breakpoint(f, :x, sample_n, dx, threshold_x)
start_y, end_y = get_breakpoint(f, :y, sample_n, dy, threshold_y)
if ((end_x-start_x).to_f/(end_y-start_y)) < dev_aspect
adjust = (end_y - start_y) * dev_aspect - (end_x - start_x)
start_x -= adjust.to_i / 2
end_x += adjust.to_i / 2
else
adjust = (end_x - start_x) / dev_aspect - (end_y - start_y)
start_y -= adjust.to_i / 2
end_y += adjust.to_i / 2
end
return {x1: end_x - start_x, y1: end_y - start_y,\
x2: start_x, y2: start_y}
end
def elapsed_time(start_time)
t = Time.now - start_time
"#{(t/60).floor}min #{(t % 60).floor}sec"
end
book = ARGV.shift
if not book
puts "Usage: ruby kindle_adjuster bookname.pdf"
exit
end
dev = Device.new(Device_name)
start_time = Time.now
Dir.mkdir("./png") if not Dir.exist?("./png")
puts "splitting #{book} into png images... #{elapsed_time(start_time)}"
system("pdfimages -png #{book} ./png/page") # ページごとに png に切り分け
puts "selecting pages... #{elapsed_time(start_time)}"
pages = Array.new
Dir.glob('./png/*.png').each do |f| # ページごとのファイル名と横幅を記録
w = `identify -format '%w' #{f}`
pages << [f, w.to_i]
end
ww = 0
pages.each do |p|
ww += p[1]
end
mean_w = ww / pages.length # 平均のページ横幅を計算
(pages.length - 1).downto(0) do |i|
if pages[i][1] < mean_w * 0.85 or pages[i][1] > mean_w * 1.7
File.delete(pages[i][0])
pages.delete_at(i) # 平均から大きく外れるファイルを除外
end
end
sample_page_list = ""
skips = 101 # 100ページほどを抽出して、最初と最後を除いてリスト作成
skip_rate = 0
while (skip_rate % 2 == 0) do
skips -= 1
skip_rate = pages.length / skips
end
if pages.length < 100 # 100ページ未満のpdfの場合は全てのページをリスト化
skips = page.length
skip_rate = 1
end
i = 0
5.upto(skips - 5) do |i|
sample_page_list << " #{pages[i * skip_rate][0]}"
end
puts "calculating effective size... #{elapsed_time(start_time)}"
system("convert #{sample_page_list} -level #{level_settings} -background none -compose darken -flatten ./png/output.jpg") # リストのページをすべて重ね合わせる
g = get_crop_area('./png/output.jpg')
crop_geometry = "#{g[:x1]}x#{g[:y1]}+#{g[:x2]}+#{g[:y2]}"
Dir.mkdir("./conv") if not Dir.exist?("./conv")
i = 0
puts "cropping/converting png images... #{elapsed_time(start_time)}"
edge_lines = ""
if edge_lines_enable
edge_lines = "-strokewidth 10 -draw 'line 0,0 #{dev.x},0'"
end
pages.each do |p|
case i
when 0, (pages.length-1) # 最初と最後のページ(表紙と裏表紙)はcropしない
system("convert #{p[0]} -resize #{dev.pixels} -type Grayscale ./conv/#{'%04d' % i}.png")
when 1..(pages.length-2) # 他はcropしてから処理
system("convert #{p[0]} -rotate \"90>\" -crop #{crop_geometry} -resize #{dev.pixels} -type Grayscale -level #{level_settings} #{edge_lines} ./conv/#{'%04d' % i}.png")
else
end
i += 1
end
puts "making pdf from png files... #{elapsed_time(start_time)}"
Dir.glob('./conv/*.png').each do |f|
system("sam2p -j:quiet #{f} #{f}.pdf")
end
system("pdftk ./conv/*.pdf cat output ./#{book.sub('.pdf','_kindle.pdf')}")
if cleanup_tmpfiles
Dir.glob("./png/*") do |f|
File.delete(f)
end
Dir.glob("./conv/*") do |f|
File.delete(f)
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{kindlemail}
s.version = "0.2.7"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Daniel Harper"]
s.date = %q{2011-01-17}
s.default_executable = %q{kindlemail}
s.description = %q{Sends documents to a designated kindle address painlessly and via the CLI. No need to fumble around with clumsy attachment boxes so forth, just whack in the documents you want to send and hit enter}
s.email = %q{djharperuk@gmail.com}
s.executables = ["kindlemail"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
"Changelog.md",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.md",
"Rakefile",
"TODO.txt",
"VERSION",
"bin/kindlemail",
"conf_templates/.email_conf",
"conf_templates/.kindlemail",
"kindlemail.gemspec",
"lib/KindleMail.rb",
"lib/KindleMailFileDatastore.rb",
"lib/KindleMailer.rb",
"lib/constants.rb",
"test/test_kindle_mailer.rb"
]
s.homepage = %q{http://github.com/djhworld/kindlemail}
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{Push documents to your kindle via the personal document service}
s.test_files = [
"test/test_kindle_mailer.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_development_dependency(%q<gmail-mailer>, ["= 0.4.5"])
s.add_development_dependency(%q<trollop>, ["~> 1.16.2"])
s.add_runtime_dependency(%q<gmail-mailer>, ["= 0.4.5"])
s.add_runtime_dependency(%q<trollop>, ["~> 1.16.2"])
else
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_dependency(%q<gmail-mailer>, ["= 0.4.5"])
s.add_dependency(%q<trollop>, ["~> 1.16.2"])
s.add_dependency(%q<gmail-mailer>, ["= 0.4.5"])
s.add_dependency(%q<trollop>, ["~> 1.16.2"])
end
else
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_dependency(%q<gmail-mailer>, ["= 0.4.5"])
s.add_dependency(%q<trollop>, ["~> 1.16.2"])
s.add_dependency(%q<gmail-mailer>, ["= 0.4.5"])
s.add_dependency(%q<trollop>, ["~> 1.16.2"])
end
end
Regenerate gemspec for version 0.2.8
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{kindlemail}
s.version = "0.2.8"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Daniel Harper"]
s.date = %q{2011-01-22}
s.default_executable = %q{kindlemail}
s.description = %q{Sends documents to a designated kindle address painlessly and via the CLI. No need to fumble around with clumsy attachment boxes so forth, just whack in the documents you want to send and hit enter}
s.email = %q{djharperuk@gmail.com}
s.executables = ["kindlemail"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
"Changelog.md",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.md",
"Rakefile",
"TODO.txt",
"VERSION",
"bin/kindlemail",
"conf_templates/.email_conf",
"conf_templates/.kindlemail",
"kindlemail.gemspec",
"lib/Configuration.rb",
"lib/KindleMail.rb",
"lib/KindleMailFileDatastore.rb",
"lib/KindleMailer.rb",
"lib/constants.rb",
"test/test_kindle_mailer.rb"
]
s.homepage = %q{http://github.com/djhworld/kindlemail}
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{Push documents to your kindle via the personal document service}
s.test_files = [
"test/test_kindle_mailer.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_development_dependency(%q<gmail-mailer>, ["= 0.4.5"])
s.add_development_dependency(%q<trollop>, ["~> 1.16.2"])
s.add_runtime_dependency(%q<gmail-mailer>, ["= 0.4.5"])
s.add_runtime_dependency(%q<trollop>, ["~> 1.16.2"])
else
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_dependency(%q<gmail-mailer>, ["= 0.4.5"])
s.add_dependency(%q<trollop>, ["~> 1.16.2"])
s.add_dependency(%q<gmail-mailer>, ["= 0.4.5"])
s.add_dependency(%q<trollop>, ["~> 1.16.2"])
end
else
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_dependency(%q<gmail-mailer>, ["= 0.4.5"])
s.add_dependency(%q<trollop>, ["~> 1.16.2"])
s.add_dependency(%q<gmail-mailer>, ["= 0.4.5"])
s.add_dependency(%q<trollop>, ["~> 1.16.2"])
end
end
|
#!/usr/bin/env ruby
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'lib','search_engine.rb'))
# ruby_cli?
query = ARGV.join(" ") # ARGF.read
search_engine = SearchEngine.new
results = search_engine.search(query)
puts search_engine.get_uri(query)
puts results.uri
# results.each do |r|
# puts "#{r.title}\t#{r.url}"
# end
# results.each do |r|
# puts "#{r.index}\t#{r.title}\t#{r.uri}"
# end
Only takes 1 field so far
#!/usr/bin/env ruby
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'lib','search_engine.rb'))
# ruby_cli?
query = ARGV.join(" ") # ARGF.read
search_engine = SearchEngine.new
results = search_engine.search(query, ["url"])
puts search_engine.get_uri(query)
puts results |
# encoding: utf-8
#--
# Copyright (C) 2013 Gitorious AS
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#++
require "test_helper"
require "create_merge_request_comment"
class CreateMergeRequestCommentTest < ActiveSupport::TestCase
def setup
@user = users(:zmalltalker)
@repository = repositories(:johans)
@merge_request = @repository.merge_requests.first
end
should "create comment" do
outcome = CreateMergeRequestComment.new(@user, @merge_request).execute({
:body => "Nice going!"
})
assert outcome.success?, outcome.to_s
assert_equal Comment.last, outcome.result
assert_equal "zmalltalker", outcome.result.user.login
assert_equal "Nice going!", outcome.result.body
assert_equal @merge_request, outcome.result.target
end
should "not create invalid comment" do
outcome = CreateMergeRequestComment.new(@user, @merge_request).execute({})
refute outcome.success?, outcome.to_s
end
should "create comment with only state change" do
outcome = CreateMergeRequestComment.new(@merge_request.user, @merge_request).execute({
:state => "Open"
})
assert outcome.success?, outcome.to_s
assert_equal Comment.last, outcome.result
assert_equal "Open", outcome.result.state_changed_to
assert_equal nil, outcome.result.body
assert_equal @merge_request, outcome.result.target
end
should "notify repository owner of merge request comment" do
user = users(:moe)
assert_difference "Message.count" do
outcome = CreateMergeRequestComment.new(user, @merge_request).execute({
:body => "Nice work"
})
end
message = Message.last
assert_equal user, message.sender
assert_equal @merge_request.user, message.recipient
assert_equal "moe commented on your merge request", message.subject
assert_equal "moe commented:\n\nNice work", message.body
assert_equal @merge_request, message.notifiable
end
should "not notify repository owner of own comment" do
assert_no_difference "Message.count" do
outcome = CreateMergeRequestComment.new(@merge_request.user, @merge_request).execute({
:body => "Aight"
})
end
end
should "update merge request status" do
outcome = CreateMergeRequestComment.new(@merge_request.user, @merge_request).execute({
:state => "Closed"
})
assert_equal "Closed", @merge_request.reload.status_tag.to_s
end
should "not allow non-owners to update status" do
outcome = CreateMergeRequestComment.new(users(:moe), @merge_request).execute({
:state => "Closed"
})
refute outcome.success, outcome.to_s
end
should "add to user's favorites" do
user = users(:moe)
assert_difference "user.favorites.count" do
outcome = CreateMergeRequestComment.new(user, @merge_request).execute({
:body => "Nice going!",
:add_to_favorites => true
})
end
assert @merge_request.watched_by?(user)
end
end
Test integration between MergeRequest and SendMessage with an isolated test
# encoding: utf-8
#--
# Copyright (C) 2013 Gitorious AS
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#++
require "test_helper"
require "create_merge_request_comment"
class CreateMergeRequestCommentTest < ActiveSupport::TestCase
def setup
@user = users(:zmalltalker)
@repository = repositories(:johans)
@merge_request = @repository.merge_requests.first
end
should "create comment" do
outcome = CreateMergeRequestComment.new(@user, @merge_request).execute({
:body => "Nice going!"
})
assert outcome.success?, outcome.to_s
assert_equal Comment.last, outcome.result
assert_equal "zmalltalker", outcome.result.user.login
assert_equal "Nice going!", outcome.result.body
assert_equal @merge_request, outcome.result.target
end
should "not create invalid comment" do
outcome = CreateMergeRequestComment.new(@user, @merge_request).execute({})
refute outcome.success?, outcome.to_s
end
should "create comment with only state change" do
outcome = CreateMergeRequestComment.new(@merge_request.user, @merge_request).execute({
:state => "Open"
})
assert outcome.success?, outcome.to_s
assert_equal Comment.last, outcome.result
assert_equal "Open", outcome.result.state_changed_to
assert_equal nil, outcome.result.body
assert_equal @merge_request, outcome.result.target
end
should "notify repository owner of merge request comment" do
user = users(:moe)
SendMessage.expects(:call).with(user: message.sender,
recipient: @merge_request.user,
subject: "moe commented on your merge request",
body: "moe commented:\n\nNice work",
notifiable: @merge_request)
CreateMergeRequestComment.new(user, @merge_request).execute(body: "Nice work")
end
should "not notify repository owner of own comment" do
assert_no_difference "Message.count" do
outcome = CreateMergeRequestComment.new(@merge_request.user, @merge_request).execute({
:body => "Aight"
})
end
end
should "update merge request status" do
outcome = CreateMergeRequestComment.new(@merge_request.user, @merge_request).execute({
:state => "Closed"
})
assert_equal "Closed", @merge_request.reload.status_tag.to_s
end
should "not allow non-owners to update status" do
outcome = CreateMergeRequestComment.new(users(:moe), @merge_request).execute({
:state => "Closed"
})
refute outcome.success, outcome.to_s
end
should "add to user's favorites" do
user = users(:moe)
assert_difference "user.favorites.count" do
outcome = CreateMergeRequestComment.new(user, @merge_request).execute({
:body => "Nice going!",
:add_to_favorites => true
})
end
assert @merge_request.watched_by?(user)
end
end
|
require_relative 'environment'
def environment_is_production
return ENV['ENVIRONMENT'] == 'production'
end
def json_template
json = {}
json[:posting_id] = nil
json[:locale] = nil
json[:type] = nil
json[:property_type] = nil
json[:date] = nil
json[:additional_info] = nil
json[:details] = {}
json[:details][:daily_rent] = nil
json[:details][:for_rent] = nil
json[:details][:for_sale] = nil
json[:details][:for_lease] = nil
json[:details][:est_lease_price] = nil
json[:details][:rent_price] = nil
json[:details][:rent_price_currency] = nil
json[:details][:rent_price_sq_meter] = nil
json[:details][:rent_price_dollars] = nil
json[:details][:rent_price_sq_meter_dollars] = nil
json[:details][:rent_price_exchange_rate_to_dollars] = nil
json[:details][:sale_price] = nil
json[:details][:sale_price_currency] = nil
json[:details][:sale_price_sq_meter] = nil
json[:details][:sale_price_dollars] = nil
json[:details][:sale_price_sq_meter_dollars] = nil
json[:details][:sale_price_exchange_rate_to_dollars] = nil
json[:details][:space] = nil
json[:details][:space_measurement] = nil
json[:details][:land] = nil
json[:details][:land_measurement] = nil
json[:details][:renovation] = nil
json[:details][:metro] = nil
json[:details][:view] = nil
json[:details][:project] = nil
json[:details][:condition] = nil
json[:details][:function] = nil
json[:details][:address] = nil
json[:details][:address_city] = nil
json[:details][:address_area] = nil
json[:details][:address_district] = nil
json[:details][:address_street] = nil
json[:details][:address_number] = nil
json[:details][:phone] = nil
json[:details][:cadastral] = nil
json[:specs] = {}
json[:specs][:all_floors] = nil
json[:specs][:floor] = nil
json[:specs][:rooms] = nil
json[:specs][:bedrooms] = nil
json[:specs][:conference_room] = nil
json[:specs][:suites] = nil
json[:specs][:wc] = nil
json[:specs][:bathroom] = nil
json[:specs][:shower] = nil
json[:specs][:fireplace] = nil
json[:specs][:air_conditioner] = nil
json[:specs][:balcony] = nil
json[:specs][:veranda] = nil
json[:specs][:loft] = nil
json[:specs][:bodrum] = nil
json[:specs][:mansard] = nil
json[:specs][:parking] = nil
json[:specs][:garage] = nil
json[:specs][:dist_from_tbilisi]= nil
json[:specs][:dist_from_cent_street] = nil
json[:specs][:box] = nil
json[:specs][:buildings] = nil
json[:specs][:administration_building] = nil
json[:specs][:workroom] = nil
json[:specs][:stockroom] = nil
json[:specs][:coefficient_k1] = nil
json[:specs][:coefficient_k2] = nil
return json
end
def create_directory(file_path)
if !file_path.nil? && file_path != "."
FileUtils.mkpath(file_path)
end
end
# get the parent folder for the provided id
# - the folder is the id minus it's last 3 digits
def get_parent_id_folder(id)
id.to_s[0..id.to_s.length-4]
end
def get_locale_key(locale_id)
match = @locales.keys.select{|x| @locales[x][:id] == locale_id}.first
if !match.nil?
return match
end
end
# determine the type of page being viewed
def get_page_type(text, locale_id)
key = get_locale_key(locale_id)
if !key.nil?
type = @locales[key][:types].values.select{|x| text.downcase.index(x) == 0}
if !type.nil?
return type.first
end
end
end
# determine the property type of page being viewed
def get_property_type(text, locale_id)
key = get_locale_key(locale_id)
if !key.nil?
type = @locales[key][:property_types].values.select{|x| !text.downcase.index(x).nil?}
if !type.nil?
return type.first
end
end
end
# pull out a query parameter value for a particular key
def get_param_value(url, key)
value = nil
index_q = url.index('?')
if !index_q.nil?
url_params = url.split('?').last
if !url_params.nil?
params = url_params.split('&')
if !params.nil?
param = params.select{|x| x.index(key + '=') == 0}
if !param.nil?
value = param.first.split('=')[1]
end
end
end
end
return value
end
# pull out the id of each property from the link
def pull_out_ids(search_results)
ids = []
search_results.each_with_index do |search_result, index|
id = get_param_value(search_result['href'], 'id')
if !id.nil?
# if we find the id that was process during the last run, stop
# for we have found all of the new ids
if @status.has_processed_id?(id) || reached_max_num_ids_to_scrape
@finished_scraping_new_post_ids = true
break
end
@num_ids_to_scrape += 1
@status.save_new_id(id)
end
end
end
# create sql for insert statements
def create_sql_insert(mysql, json, source, locale)
fields = []
values = []
sql = nil
fields << 'source'
values << source
fields << 'locale'
values << locale
fields << 'created_at'
values << Time.now.strftime('%Y-%m-%d %H:%M:%S')
if !json["posting_id"].nil?
fields << 'posting_id'
values << json["posting_id"]
end
if !json["type"].nil?
fields << 'type'
values << json["type"]
end
if !json["property_type"].nil?
fields << 'property_type'
values << json["property_type"]
end
if !json["date"].nil?
fields << 'date'
values << json["date"]
end
if !json["additional_info"].nil?
fields << 'additional_info'
values << json["additional_info"]
end
if !json["details"]["daily_rent"].nil?
fields << 'daily_rent'
values << json["details"]["daily_rent"]
end
if !json["details"]["for_rent"].nil?
fields << 'for_rent'
values << json["details"]["for_rent"]
end
if !json["details"]["for_sale"].nil?
fields << 'for_sale'
values << json["details"]["for_sale"]
end
if !json["details"]["for_lease"].nil?
fields << 'for_lease'
values << json["details"]["for_lease"]
end
if !json["details"]["est_lease_price"].nil?
fields << 'est_lease_price'
values << json["details"]["est_lease_price"]
end
if !json["details"]["rent_price"].nil?
fields << 'rent_price'
values << json["details"]["rent_price"]
end
if !json["details"]["rent_price_currency"].nil?
fields << 'rent_price_currency'
values << json["details"]["rent_price_currency"]
end
if !json["details"]["rent_price_exchange_rate_to_dollars"].nil?
fields << 'rent_price_exchange_rate_to_dollars'
values << json["details"]["rent_price_exchange_rate_to_dollars"]
end
if !json["details"]["rent_price_dollars"].nil?
fields << 'rent_price_dollars'
values << json["details"]["rent_price_dollars"]
end
if !json["details"]["rent_price_sq_meter"].nil?
fields << 'rent_price_sq_meter'
values << json["details"]["rent_price_sq_meter"]
end
if !json["details"]["rent_price_sq_meter_dollars"].nil?
fields << 'rent_price_sq_meter_dollars'
values << json["details"]["rent_price_sq_meter_dollars"]
end
if !json["details"]["sale_price"].nil?
fields << 'sale_price'
values << json["details"]["sale_price"]
end
if !json["details"]["sale_price_currency"].nil?
fields << 'sale_price_currency'
values << json["details"]["sale_price_currency"]
end
if !json["details"]["sale_price_exchange_rate_to_dollars"].nil?
fields << 'sale_price_exchange_rate_to_dollars'
values << json["details"]["sale_price_exchange_rate_to_dollars"]
end
if !json["details"]["sale_price_dollars"].nil?
fields << 'sale_price_dollars'
values << json["details"]["sale_price_dollars"]
end
if !json["details"]["sale_price_sq_meter"].nil?
fields << 'sale_price_sq_meter'
values << json["details"]["sale_price_sq_meter"]
end
if !json["details"]["sale_price_sq_meter_dollars"].nil?
fields << 'sale_price_sq_meter_dollars'
values << json["details"]["sale_price_sq_meter_dollars"]
end
if !json["details"]["space"].nil?
fields << 'space'
values << json["details"]["space"]
end
if !json["details"]["space_measurement"].nil?
fields << 'space_measurement'
values << json["details"]["space_measurement"]
end
if !json["details"]["land"].nil?
fields << 'land'
values << json["details"]["land"]
end
if !json["details"]["land_measurement"].nil?
fields << 'land_measurement'
values << json["details"]["land_measurement"]
end
if !json["details"]["renovation"].nil?
fields << 'renovation'
values << json["details"]["renovation"]
end
if !json["details"]["view"].nil?
fields << 'view'
values << json["details"]["view"]
end
if !json["details"]["metro"].nil?
fields << 'metro'
values << json["details"]["metro"]
end
if !json["details"]["project"].nil?
fields << 'project'
values << json["details"]["project"]
end
if !json["details"]["condition"].nil?
fields << 'place_condition'
values << json["details"]["condition"]
end
if !json["details"]["function"].nil?
fields << 'function'
values << json["details"]["function"]
end
if !json["details"]["address"].nil?
fields << 'address'
values << json["details"]["address"]
end
if !json["details"]["address_city"].nil?
fields << 'address_city'
values << json["details"]["address_city"]
end
if !json["details"]["address_area"].nil?
fields << 'address_area'
values << json["details"]["address_area"]
end
if !json["details"]["address_district"].nil?
fields << 'address_district'
values << json["details"]["address_district"]
end
if !json["details"]["address_street"].nil?
fields << 'address_street'
values << json["details"]["address_street"]
end
if !json["details"]["address_number"].nil?
fields << 'address_number'
values << json["details"]["address_number"]
end
if !json["details"]["phone"].nil?
fields << 'phone'
values << json["details"]["phone"]
end
if !json["details"]["cadastral"].nil?
fields << 'cadastral'
values << json["details"]["cadastral"]
end
if !json["specs"]["all_floors"].nil?
fields << 'all_floors'
values << json["specs"]["all_floors"]
end
if !json["specs"]["floor"].nil?
fields << 'floor'
values << json["specs"]["floor"]
end
if !json["specs"]["rooms"].nil?
fields << 'rooms'
values << json["specs"]["rooms"]
end
if !json["specs"]["bedrooms"].nil?
fields << 'bedrooms'
values << json["specs"]["bedrooms"]
end
if !json["specs"]["conference_room"].nil?
fields << 'conference_room'
values << json["specs"]["conference_room"]
end
if !json["specs"]["suites"].nil?
fields << 'suites'
values << json["specs"]["suites"]
end
if !json["specs"]["wc"].nil?
fields << 'wc'
values << json["specs"]["wc"]
end
if !json["specs"]["bathroom"].nil?
fields << 'bathroom'
values << json["specs"]["bathroom"]
end
if !json["specs"]["shower"].nil?
fields << 'shower'
values << json["specs"]["shower"]
end
if !json["specs"]["fireplace"].nil?
fields << 'fireplace'
values << json["specs"]["fireplace"]
end
if !json["specs"]["air_conditioner"].nil?
fields << 'air_conditioner'
values << json["specs"]["air_conditioner"]
end
if !json["specs"]["balcony"].nil?
fields << 'balcony'
values << json["specs"]["balcony"]
end
if !json["specs"]["veranda"].nil?
fields << 'veranda'
values << json["specs"]["veranda"]
end
if !json["specs"]["loft"].nil?
fields << 'loft'
values << json["specs"]["loft"]
end
if !json["specs"]["bodrum"].nil?
fields << 'bodrum'
values << json["specs"]["bodrum"]
end
if !json["specs"]["mansard"].nil?
fields << 'mansard'
values << json["specs"]["mansard"]
end
if !json["specs"]["parking"].nil?
fields << 'parking'
values << json["specs"]["parking"]
end
if !json["specs"]["garage"].nil?
fields << 'garage'
values << json["specs"]["garage"]
end
if !json["specs"]["dist_from_tbilisi"].nil?
fields << 'dist_from_tbilisi'
values << json["specs"]["dist_from_tbilisi"]
end
if !json["specs"]["dist_from_cent_street"].nil?
fields << 'dist_from_cent_street'
values << json["specs"]["dist_from_cent_street"]
end
if !json["specs"]["box"].nil?
fields << 'box'
values << json["specs"]["box"]
end
if !json["specs"]["buildings"].nil?
fields << 'buildings'
values << json["specs"]["buildings"]
end
if !json["specs"]["administration_building"].nil?
fields << 'administration_building'
values << json["specs"]["administration_building"]
end
if !json["specs"]["workroom"].nil?
fields << 'workroom'
values << json["specs"]["workroom"]
end
if !json["specs"]["stockroom"].nil?
fields << 'stockroom'
values << json["specs"]["stockroom"]
end
if !json["specs"]["coefficient_k1"].nil?
fields << 'coefficient_k1'
values << json["specs"]["coefficient_k1"]
end
if !json["specs"]["coefficient_k2"].nil?
fields << 'coefficient_k2'
values << json["specs"]["coefficient_k2"]
end
if !fields.empty? && !values.empty?
sql= "insert into postings("
sql << fields.join(', ')
sql << ") values("
sql << values.map{|x| "\"#{mysql.escape(x.to_s)}\""}.join(', ')
sql << ")"
end
return sql
end
# delete the record if it already exists
def delete_record_sql(mysql, posting_id, locale)
sql = "delete from postings where posting_id = '"
sql << mysql.escape(posting_id.to_s)
sql << "' and locale = '"
sql << mysql.escape(locale.to_s)
sql << "'"
return sql
end
# update github with any changes
def update_github
unless environment_is_production
puts 'NOT updating github because environment is not production'
return false
end
puts 'pushing database to github'
@log.info "------------------------------"
@log.info "updating git"
@log.info "------------------------------"
x = Subexec.run "git add #{@db_dump_file} #{@status_file_name}"
x = Subexec.run "git commit -m 'Updated database dump file and status.json with new makler.ge data'"
x = Subexec.run "git push origin master"
end
def compress_file(file_path)
file_name = File.basename(file_path)
dir_path = File.dirname(file_path)
compressed_file_path = "#{dir_path}/#{file_name}.zip"
begin
Zip::File.open(compressed_file_path, Zip::File::CREATE) do |zipfile|
zipfile.add(file_name, file_path)
end
rescue StandardError => e
@data_files_log.error "Could not zip #{file_path} ---> #{compressed_file_path}; error: #{e}"
end
File.delete(file_path)
end
def reached_max_num_ids_to_scrape
!@max_num_ids_to_scrape.nil? && @num_ids_to_scrape >= @max_num_ids_to_scrape
end
def compress_data_files
if uncompressed_data_files.empty?
puts 'Data files are already compressed!'
return
end
uncompressed_data_files.each do |file|
compress_file(file)
end
end
def uncompressed_data_files
html_files = Dir.glob("#{@data_path}/**/*.html")
json_files = Dir.glob("#{@data_path}/**/*.json")
return html_files + json_files
end
def git_checkout_file(file)
puts "Running 'git checkout -- #{file}'"
`git checkout -- #{file}`
end
Improved pull_out_ids method code.
require_relative 'environment'
def environment_is_production
return ENV['ENVIRONMENT'] == 'production'
end
def json_template
json = {}
json[:posting_id] = nil
json[:locale] = nil
json[:type] = nil
json[:property_type] = nil
json[:date] = nil
json[:additional_info] = nil
json[:details] = {}
json[:details][:daily_rent] = nil
json[:details][:for_rent] = nil
json[:details][:for_sale] = nil
json[:details][:for_lease] = nil
json[:details][:est_lease_price] = nil
json[:details][:rent_price] = nil
json[:details][:rent_price_currency] = nil
json[:details][:rent_price_sq_meter] = nil
json[:details][:rent_price_dollars] = nil
json[:details][:rent_price_sq_meter_dollars] = nil
json[:details][:rent_price_exchange_rate_to_dollars] = nil
json[:details][:sale_price] = nil
json[:details][:sale_price_currency] = nil
json[:details][:sale_price_sq_meter] = nil
json[:details][:sale_price_dollars] = nil
json[:details][:sale_price_sq_meter_dollars] = nil
json[:details][:sale_price_exchange_rate_to_dollars] = nil
json[:details][:space] = nil
json[:details][:space_measurement] = nil
json[:details][:land] = nil
json[:details][:land_measurement] = nil
json[:details][:renovation] = nil
json[:details][:metro] = nil
json[:details][:view] = nil
json[:details][:project] = nil
json[:details][:condition] = nil
json[:details][:function] = nil
json[:details][:address] = nil
json[:details][:address_city] = nil
json[:details][:address_area] = nil
json[:details][:address_district] = nil
json[:details][:address_street] = nil
json[:details][:address_number] = nil
json[:details][:phone] = nil
json[:details][:cadastral] = nil
json[:specs] = {}
json[:specs][:all_floors] = nil
json[:specs][:floor] = nil
json[:specs][:rooms] = nil
json[:specs][:bedrooms] = nil
json[:specs][:conference_room] = nil
json[:specs][:suites] = nil
json[:specs][:wc] = nil
json[:specs][:bathroom] = nil
json[:specs][:shower] = nil
json[:specs][:fireplace] = nil
json[:specs][:air_conditioner] = nil
json[:specs][:balcony] = nil
json[:specs][:veranda] = nil
json[:specs][:loft] = nil
json[:specs][:bodrum] = nil
json[:specs][:mansard] = nil
json[:specs][:parking] = nil
json[:specs][:garage] = nil
json[:specs][:dist_from_tbilisi]= nil
json[:specs][:dist_from_cent_street] = nil
json[:specs][:box] = nil
json[:specs][:buildings] = nil
json[:specs][:administration_building] = nil
json[:specs][:workroom] = nil
json[:specs][:stockroom] = nil
json[:specs][:coefficient_k1] = nil
json[:specs][:coefficient_k2] = nil
return json
end
def create_directory(file_path)
if !file_path.nil? && file_path != "."
FileUtils.mkpath(file_path)
end
end
# get the parent folder for the provided id
# - the folder is the id minus it's last 3 digits
def get_parent_id_folder(id)
id.to_s[0..id.to_s.length-4]
end
def get_locale_key(locale_id)
match = @locales.keys.select{|x| @locales[x][:id] == locale_id}.first
if !match.nil?
return match
end
end
# determine the type of page being viewed
def get_page_type(text, locale_id)
key = get_locale_key(locale_id)
if !key.nil?
type = @locales[key][:types].values.select{|x| text.downcase.index(x) == 0}
if !type.nil?
return type.first
end
end
end
# determine the property type of page being viewed
def get_property_type(text, locale_id)
key = get_locale_key(locale_id)
if !key.nil?
type = @locales[key][:property_types].values.select{|x| !text.downcase.index(x).nil?}
if !type.nil?
return type.first
end
end
end
# pull out a query parameter value for a particular key
def get_param_value(url, key)
value = nil
index_q = url.index('?')
if !index_q.nil?
url_params = url.split('?').last
if !url_params.nil?
params = url_params.split('&')
if !params.nil?
param = params.select{|x| x.index(key + '=') == 0}
if !param.nil?
value = param.first.split('=')[1]
end
end
end
end
return value
end
# pull out the id of each property from the link
def pull_out_ids(search_results)
search_results.each do |search_result|
id = get_param_value(search_result['href'], 'id')
next if id.nil?
if @status.has_processed_id?(id) || reached_max_num_ids_to_scrape
@finished_scraping_new_post_ids = true
break
end
@num_ids_to_scrape += 1
@status.save_new_id(id)
end
end
# create sql for insert statements
def create_sql_insert(mysql, json, source, locale)
fields = []
values = []
sql = nil
fields << 'source'
values << source
fields << 'locale'
values << locale
fields << 'created_at'
values << Time.now.strftime('%Y-%m-%d %H:%M:%S')
if !json["posting_id"].nil?
fields << 'posting_id'
values << json["posting_id"]
end
if !json["type"].nil?
fields << 'type'
values << json["type"]
end
if !json["property_type"].nil?
fields << 'property_type'
values << json["property_type"]
end
if !json["date"].nil?
fields << 'date'
values << json["date"]
end
if !json["additional_info"].nil?
fields << 'additional_info'
values << json["additional_info"]
end
if !json["details"]["daily_rent"].nil?
fields << 'daily_rent'
values << json["details"]["daily_rent"]
end
if !json["details"]["for_rent"].nil?
fields << 'for_rent'
values << json["details"]["for_rent"]
end
if !json["details"]["for_sale"].nil?
fields << 'for_sale'
values << json["details"]["for_sale"]
end
if !json["details"]["for_lease"].nil?
fields << 'for_lease'
values << json["details"]["for_lease"]
end
if !json["details"]["est_lease_price"].nil?
fields << 'est_lease_price'
values << json["details"]["est_lease_price"]
end
if !json["details"]["rent_price"].nil?
fields << 'rent_price'
values << json["details"]["rent_price"]
end
if !json["details"]["rent_price_currency"].nil?
fields << 'rent_price_currency'
values << json["details"]["rent_price_currency"]
end
if !json["details"]["rent_price_exchange_rate_to_dollars"].nil?
fields << 'rent_price_exchange_rate_to_dollars'
values << json["details"]["rent_price_exchange_rate_to_dollars"]
end
if !json["details"]["rent_price_dollars"].nil?
fields << 'rent_price_dollars'
values << json["details"]["rent_price_dollars"]
end
if !json["details"]["rent_price_sq_meter"].nil?
fields << 'rent_price_sq_meter'
values << json["details"]["rent_price_sq_meter"]
end
if !json["details"]["rent_price_sq_meter_dollars"].nil?
fields << 'rent_price_sq_meter_dollars'
values << json["details"]["rent_price_sq_meter_dollars"]
end
if !json["details"]["sale_price"].nil?
fields << 'sale_price'
values << json["details"]["sale_price"]
end
if !json["details"]["sale_price_currency"].nil?
fields << 'sale_price_currency'
values << json["details"]["sale_price_currency"]
end
if !json["details"]["sale_price_exchange_rate_to_dollars"].nil?
fields << 'sale_price_exchange_rate_to_dollars'
values << json["details"]["sale_price_exchange_rate_to_dollars"]
end
if !json["details"]["sale_price_dollars"].nil?
fields << 'sale_price_dollars'
values << json["details"]["sale_price_dollars"]
end
if !json["details"]["sale_price_sq_meter"].nil?
fields << 'sale_price_sq_meter'
values << json["details"]["sale_price_sq_meter"]
end
if !json["details"]["sale_price_sq_meter_dollars"].nil?
fields << 'sale_price_sq_meter_dollars'
values << json["details"]["sale_price_sq_meter_dollars"]
end
if !json["details"]["space"].nil?
fields << 'space'
values << json["details"]["space"]
end
if !json["details"]["space_measurement"].nil?
fields << 'space_measurement'
values << json["details"]["space_measurement"]
end
if !json["details"]["land"].nil?
fields << 'land'
values << json["details"]["land"]
end
if !json["details"]["land_measurement"].nil?
fields << 'land_measurement'
values << json["details"]["land_measurement"]
end
if !json["details"]["renovation"].nil?
fields << 'renovation'
values << json["details"]["renovation"]
end
if !json["details"]["view"].nil?
fields << 'view'
values << json["details"]["view"]
end
if !json["details"]["metro"].nil?
fields << 'metro'
values << json["details"]["metro"]
end
if !json["details"]["project"].nil?
fields << 'project'
values << json["details"]["project"]
end
if !json["details"]["condition"].nil?
fields << 'place_condition'
values << json["details"]["condition"]
end
if !json["details"]["function"].nil?
fields << 'function'
values << json["details"]["function"]
end
if !json["details"]["address"].nil?
fields << 'address'
values << json["details"]["address"]
end
if !json["details"]["address_city"].nil?
fields << 'address_city'
values << json["details"]["address_city"]
end
if !json["details"]["address_area"].nil?
fields << 'address_area'
values << json["details"]["address_area"]
end
if !json["details"]["address_district"].nil?
fields << 'address_district'
values << json["details"]["address_district"]
end
if !json["details"]["address_street"].nil?
fields << 'address_street'
values << json["details"]["address_street"]
end
if !json["details"]["address_number"].nil?
fields << 'address_number'
values << json["details"]["address_number"]
end
if !json["details"]["phone"].nil?
fields << 'phone'
values << json["details"]["phone"]
end
if !json["details"]["cadastral"].nil?
fields << 'cadastral'
values << json["details"]["cadastral"]
end
if !json["specs"]["all_floors"].nil?
fields << 'all_floors'
values << json["specs"]["all_floors"]
end
if !json["specs"]["floor"].nil?
fields << 'floor'
values << json["specs"]["floor"]
end
if !json["specs"]["rooms"].nil?
fields << 'rooms'
values << json["specs"]["rooms"]
end
if !json["specs"]["bedrooms"].nil?
fields << 'bedrooms'
values << json["specs"]["bedrooms"]
end
if !json["specs"]["conference_room"].nil?
fields << 'conference_room'
values << json["specs"]["conference_room"]
end
if !json["specs"]["suites"].nil?
fields << 'suites'
values << json["specs"]["suites"]
end
if !json["specs"]["wc"].nil?
fields << 'wc'
values << json["specs"]["wc"]
end
if !json["specs"]["bathroom"].nil?
fields << 'bathroom'
values << json["specs"]["bathroom"]
end
if !json["specs"]["shower"].nil?
fields << 'shower'
values << json["specs"]["shower"]
end
if !json["specs"]["fireplace"].nil?
fields << 'fireplace'
values << json["specs"]["fireplace"]
end
if !json["specs"]["air_conditioner"].nil?
fields << 'air_conditioner'
values << json["specs"]["air_conditioner"]
end
if !json["specs"]["balcony"].nil?
fields << 'balcony'
values << json["specs"]["balcony"]
end
if !json["specs"]["veranda"].nil?
fields << 'veranda'
values << json["specs"]["veranda"]
end
if !json["specs"]["loft"].nil?
fields << 'loft'
values << json["specs"]["loft"]
end
if !json["specs"]["bodrum"].nil?
fields << 'bodrum'
values << json["specs"]["bodrum"]
end
if !json["specs"]["mansard"].nil?
fields << 'mansard'
values << json["specs"]["mansard"]
end
if !json["specs"]["parking"].nil?
fields << 'parking'
values << json["specs"]["parking"]
end
if !json["specs"]["garage"].nil?
fields << 'garage'
values << json["specs"]["garage"]
end
if !json["specs"]["dist_from_tbilisi"].nil?
fields << 'dist_from_tbilisi'
values << json["specs"]["dist_from_tbilisi"]
end
if !json["specs"]["dist_from_cent_street"].nil?
fields << 'dist_from_cent_street'
values << json["specs"]["dist_from_cent_street"]
end
if !json["specs"]["box"].nil?
fields << 'box'
values << json["specs"]["box"]
end
if !json["specs"]["buildings"].nil?
fields << 'buildings'
values << json["specs"]["buildings"]
end
if !json["specs"]["administration_building"].nil?
fields << 'administration_building'
values << json["specs"]["administration_building"]
end
if !json["specs"]["workroom"].nil?
fields << 'workroom'
values << json["specs"]["workroom"]
end
if !json["specs"]["stockroom"].nil?
fields << 'stockroom'
values << json["specs"]["stockroom"]
end
if !json["specs"]["coefficient_k1"].nil?
fields << 'coefficient_k1'
values << json["specs"]["coefficient_k1"]
end
if !json["specs"]["coefficient_k2"].nil?
fields << 'coefficient_k2'
values << json["specs"]["coefficient_k2"]
end
if !fields.empty? && !values.empty?
sql= "insert into postings("
sql << fields.join(', ')
sql << ") values("
sql << values.map{|x| "\"#{mysql.escape(x.to_s)}\""}.join(', ')
sql << ")"
end
return sql
end
# delete the record if it already exists
def delete_record_sql(mysql, posting_id, locale)
sql = "delete from postings where posting_id = '"
sql << mysql.escape(posting_id.to_s)
sql << "' and locale = '"
sql << mysql.escape(locale.to_s)
sql << "'"
return sql
end
# update github with any changes
def update_github
unless environment_is_production
puts 'NOT updating github because environment is not production'
return false
end
puts 'pushing database to github'
@log.info "------------------------------"
@log.info "updating git"
@log.info "------------------------------"
x = Subexec.run "git add #{@db_dump_file} #{@status_file_name}"
x = Subexec.run "git commit -m 'Updated database dump file and status.json with new makler.ge data'"
x = Subexec.run "git push origin master"
end
def compress_file(file_path)
file_name = File.basename(file_path)
dir_path = File.dirname(file_path)
compressed_file_path = "#{dir_path}/#{file_name}.zip"
begin
Zip::File.open(compressed_file_path, Zip::File::CREATE) do |zipfile|
zipfile.add(file_name, file_path)
end
rescue StandardError => e
@data_files_log.error "Could not zip #{file_path} ---> #{compressed_file_path}; error: #{e}"
end
File.delete(file_path)
end
def reached_max_num_ids_to_scrape
!@max_num_ids_to_scrape.nil? && @num_ids_to_scrape >= @max_num_ids_to_scrape
end
def compress_data_files
if uncompressed_data_files.empty?
puts 'Data files are already compressed!'
return
end
uncompressed_data_files.each do |file|
compress_file(file)
end
end
def uncompressed_data_files
html_files = Dir.glob("#{@data_path}/**/*.html")
json_files = Dir.glob("#{@data_path}/**/*.json")
return html_files + json_files
end
def git_checkout_file(file)
puts "Running 'git checkout -- #{file}'"
`git checkout -- #{file}`
end
|
added removed appveyor code to a separate file
def restart_by_branch(name, slug, branch)
body = {
accountName: name,
projectSlug: slug,
branch: branch
}
url = 'https://ci.appveyor.com/api/builds'
HTTParty.post(url,
:body => body,
:headers => {"Authorization" => 'Bearer ' + ENV['APPVEYOR_TOKEN']})
end
def restart_appveyor(repo)
lb_url = 'https://ci.appveyor.com/api/projects/'
out = HTTParty.get(lb_url + repo)
lb_name = out['build']['branch']
url = 'https://ci.appveyor.com/api/builds'
accountName = repo.split('/')[0]
projectSlug = repo.split('/')[1]
if lb_name == 'master'
restart_by_branch(accountName, projectSlug, 'master')
else
restart_by_branch(accountName, projectSlug, 'master')
restart_by_branch(accountName, projectSlug, lb_name)
end
end
desc "Builds Appveyor job with token ENV['APPVEYOR_TOKEN']"
task :runappveyor do
appveyor_repos = ['sckott/rgbif','sckott/alm','sckott/rnoaa','sckott/rWBclimate',
'sckott/rinat','sckott/treeBASE','sckott/rgauges','sckott/rplos','sckott/rsnps',
'sckott/solr','sckott/rentrez','sckott/taxize','karthik/rAltmetric','karthik/AntWeb',
'karthik/rbison','karthik/ecoengine','karthik/rebird','karthik/rfisheries',
'karthik/spocc']
appveyor_repos.each do |iter|
restart_appveyor(iter)
end
end
|
Forgotten exercise.
#!/usr/bin/env ruby
#
# Can you iterate through a hash?
#
h = {'a' => 'abc', 'b' => 'cba'}
for key in h.keys
puts key + " - " + h[key]
end
h.each {|key, value| puts key + " - " + value} |
require 'rails_helper'
# This spec was generated by rspec-rails when you ran the scaffold generator.
# It demonstrates how one might use RSpec to specify the controller code that
# was generated by Rails when you ran the scaffold generator.
#
# It assumes that the implementation code is generated by the rails scaffold
# generator. If you are using any extension libraries to generate different
# controller code, this generated spec may or may not pass.
#
# It only uses APIs available in rails and/or rspec-rails. There are a number
# of tools you can use to make these specs even more expressive, but we're
# sticking to rails and rspec-rails APIs to keep things simple and stable.
#
# Compared to earlier versions of this generator, there is very limited use of
# stubs and message expectations in this spec. Stubs are only used when there
# is no simpler way to get a handle on the object needed for the example.
# Message expectations are only used when there is no simpler way to specify
# that an instance is receiving a specific message.
#
# Also compared to earlier versions of this generator, there are no longer any
# expectations of assigns and templates rendered. These features have been
# removed from Rails core in Rails 5, but can be added back in via the
# `rails-controller-testing` gem.
RSpec.describe Admin::PhotoAttrsController, type: :controller do
# This should return the minimal set of attributes required to create a valid
# Admin::PhotoAttr. As you add validations to Admin::PhotoAttr, be sure to
# adjust the attributes here as well.
let(:valid_attributes) {
skip("Add a hash of attributes valid for your model")
}
let(:invalid_attributes) {
skip("Add a hash of attributes invalid for your model")
}
# This should return the minimal set of values that should be in the session
# in order to pass any filters (e.g. authentication) defined in
# Admin::PhotoAttrsController. Be sure to keep this updated too.
let(:valid_session) { {} }
describe "GET #index" do
it "returns a success response" do
photo_attr = Admin::PhotoAttr.create! valid_attributes
get :index, {}, valid_session
expect(response).to be_success
end
end
describe "GET #show" do
it "returns a success response" do
photo_attr = Admin::PhotoAttr.create! valid_attributes
get :show, {:id => photo_attr.to_param}, valid_session
expect(response).to be_success
end
end
describe "GET #new" do
it "returns a success response" do
get :new, {}, valid_session
expect(response).to be_success
end
end
describe "GET #edit" do
it "returns a success response" do
photo_attr = Admin::PhotoAttr.create! valid_attributes
get :edit, {:id => photo_attr.to_param}, valid_session
expect(response).to be_success
end
end
describe "POST #create" do
context "with valid params" do
it "creates a new Admin::PhotoAttr" do
expect {
post :create, {:admin_photo_attr => valid_attributes}, valid_session
}.to change(Admin::PhotoAttr, :count).by(1)
end
it "redirects to the created admin_photo_attr" do
post :create, {:admin_photo_attr => valid_attributes}, valid_session
expect(response).to redirect_to(Admin::PhotoAttr.last)
end
end
context "with invalid params" do
it "returns a success response (i.e. to display the 'new' template)" do
post :create, {:admin_photo_attr => invalid_attributes}, valid_session
expect(response).to be_success
end
end
end
describe "PUT #update" do
context "with valid params" do
let(:new_attributes) {
skip("Add a hash of attributes valid for your model")
}
it "updates the requested admin_photo_attr" do
photo_attr = Admin::PhotoAttr.create! valid_attributes
put :update, {:id => photo_attr.to_param, :admin_photo_attr => new_attributes}, valid_session
photo_attr.reload
skip("Add assertions for updated state")
end
it "redirects to the admin_photo_attr" do
photo_attr = Admin::PhotoAttr.create! valid_attributes
put :update, {:id => photo_attr.to_param, :admin_photo_attr => valid_attributes}, valid_session
expect(response).to redirect_to(photo_attr)
end
end
context "with invalid params" do
it "returns a success response (i.e. to display the 'edit' template)" do
photo_attr = Admin::PhotoAttr.create! valid_attributes
put :update, {:id => photo_attr.to_param, :admin_photo_attr => invalid_attributes}, valid_session
expect(response).to be_success
end
end
end
describe "DELETE #destroy" do
it "destroys the requested admin_photo_attr" do
photo_attr = Admin::PhotoAttr.create! valid_attributes
expect {
delete :destroy, {:id => photo_attr.to_param}, valid_session
}.to change(Admin::PhotoAttr, :count).by(-1)
end
it "redirects to the admin_photo_attrs list" do
photo_attr = Admin::PhotoAttr.create! valid_attributes
delete :destroy, {:id => photo_attr.to_param}, valid_session
expect(response).to redirect_to(admin_photo_attrs_url)
end
end
end
Remove unnecessary specs
|
require 'rails_helper'
RSpec.describe Api::V1::ActivitiesController, type: :controller do
describe '#index' do
it 'can return activities' do
create(:activity)
get :index
expect(response).to be_success
end
end
describe '#show' do
end
describe '#create' do
end
describe '#update' do
end
describe '#destroy' do
end
end
spec for create activities
require 'rails_helper'
RSpec.describe Api::V1::ActivitiesController, type: :controller do
describe '#index' do
it 'can return activities' do
create(:activity)
get :index
expect(response).to be_success
end
end
describe '#show' do
end
describe '#create' do
let(:hash) {
{
'data' => {
'type' => 'activities',
'attributes' => {
'name' => 'Activity 1',
'description' => 'This is a new activity.'
}
}
}
}
it 'can create a new activity from a hash' do
expect {
post :create, hash
expect(response).to be_success
}.to change { Activity.count }.from(0).to(1)
end
end
describe '#update' do
end
describe '#destroy' do
end
end
|
# frozen_string_literal: true
require 'spec_helper'
describe Spree::Admin::UsersController do
context '#authorize_admin' do
let(:user) { create(:user) }
let(:test_user) { create(:user) }
before do
allow(controller).to receive_messages spree_current_user: user
allow(Spree::User).to receive(:find).with(test_user.id.to_s).and_return(test_user)
user.spree_roles.clear
end
it 'should grant access to users with an admin role' do
user.spree_roles << Spree::Role.find_or_create_by(name: 'admin')
spree_post :index
expect(response).to render_template :index
end
it "allows admins to update a user's API key" do
user.spree_roles << Spree::Role.find_or_create_by(name: 'admin')
expect(test_user).to receive(:generate_spree_api_key!).and_return(true)
puts user.id
puts test_user.id
spree_put :generate_api_key, id: test_user.id
expect(response).to redirect_to(spree.edit_admin_user_path(test_user))
end
it "allows admins to clear a user's API key" do
user.spree_roles << Spree::Role.find_or_create_by(name: 'admin')
expect(test_user).to receive(:clear_spree_api_key!).and_return(true)
spree_put :clear_api_key, id: test_user.id
expect(response).to redirect_to(spree.edit_admin_user_path(test_user))
end
it 'should deny access to users without an admin role' do
allow(user).to receive_messages has_spree_role?: false
spree_post :index
expect(response).to redirect_to('/unauthorized')
end
end
end
Remove puts debugging in tests
# frozen_string_literal: true
require 'spec_helper'
describe Spree::Admin::UsersController do
context '#authorize_admin' do
let(:user) { create(:user) }
let(:test_user) { create(:user) }
before do
allow(controller).to receive_messages spree_current_user: user
allow(Spree::User).to receive(:find).with(test_user.id.to_s).and_return(test_user)
user.spree_roles.clear
end
it 'should grant access to users with an admin role' do
user.spree_roles << Spree::Role.find_or_create_by(name: 'admin')
spree_post :index
expect(response).to render_template :index
end
it "allows admins to update a user's API key" do
user.spree_roles << Spree::Role.find_or_create_by(name: 'admin')
expect(test_user).to receive(:generate_spree_api_key!).and_return(true)
spree_put :generate_api_key, id: test_user.id
expect(response).to redirect_to(spree.edit_admin_user_path(test_user))
end
it "allows admins to clear a user's API key" do
user.spree_roles << Spree::Role.find_or_create_by(name: 'admin')
expect(test_user).to receive(:clear_spree_api_key!).and_return(true)
spree_put :clear_api_key, id: test_user.id
expect(response).to redirect_to(spree.edit_admin_user_path(test_user))
end
it 'should deny access to users without an admin role' do
allow(user).to receive_messages has_spree_role?: false
spree_post :index
expect(response).to redirect_to('/unauthorized')
end
end
end
|
require 'coffee_script'
#Use this file to set/override Jasmine configuration options
#You can remove it if you don't need it.
#This file is loaded *after* jasmine.yml is interpreted.
#
#Example: using a different boot file.
#Jasmine.configure do |config|
# config.boot_dir = '/absolute/path/to/boot_dir'
# config.boot_files = lambda { ['/absolute/path/to/boot_dir/file.js'] }
#end
#
#Example: prevent PhantomJS auto install, uses PhantomJS already on your path.
#Jasmine.configure do |config|
# config.prevent_phantom_js_auto_install = true
#end
#
Jasmine.configure do |config|
config.show_console_log = true
end
puts 'To skip installation pass rake jasmine PASS_INSTALL=true'
unless ENV["PASS_INSTALL"]
system('bundle exec rails d marionette:install')
system('bundle exec rails g marionette:install')
system("rails g marionette:view TestLayout1 Layout")
system("rails g marionette:view TestLayout2 Layout")
system("rails g marionette:view TestLayout3 Layout")
system("rails g marionette:view TestItemView1 ItemView title:string description:text url:text phone:string quantity:integer float_number:float decimal_number:decimal full:boolean email:string password:string")
system("rails g marionette:view TestItemView2 ItemView")
system("rails g marionette:view test_module/TestItemView3 ItemView")
system("rails g marionette:view TestSubsubmodule/test_submodule/TestModule/testItemView4 ItemView")
# system("rails g marionette:view TestCollectionView1 CollectionView")
# system("rails g marionette:view admin/TestCollectionView2 CollectionView")
# system("rails g marionette:view TestSubsubmodule/test_submodule/TestModule/TestCollectionView3 CollectionView")
# system("rails g marionette:view TestCompositeView1 CompositeView")
# system("rails g marionette:view test_module/TestCompositeView2 CompositeView")
# system("rails g marionette:view TestSubsubmodule/test_submodule/TestModule/testCompositeView3 CompositeView")
end
uncomment
require 'coffee_script'
#Use this file to set/override Jasmine configuration options
#You can remove it if you don't need it.
#This file is loaded *after* jasmine.yml is interpreted.
#
#Example: using a different boot file.
#Jasmine.configure do |config|
# config.boot_dir = '/absolute/path/to/boot_dir'
# config.boot_files = lambda { ['/absolute/path/to/boot_dir/file.js'] }
#end
#
#Example: prevent PhantomJS auto install, uses PhantomJS already on your path.
#Jasmine.configure do |config|
# config.prevent_phantom_js_auto_install = true
#end
#
Jasmine.configure do |config|
config.show_console_log = true
end
puts 'To skip installation pass rake jasmine PASS_INSTALL=true'
unless ENV["PASS_INSTALL"]
system('bundle exec rails d marionette:install')
system('bundle exec rails g marionette:install')
system("rails g marionette:view TestLayout1 Layout")
system("rails g marionette:view TestLayout2 Layout")
system("rails g marionette:view TestLayout3 Layout")
system("rails g marionette:view TestItemView1 ItemView title:string description:text url:text phone:string quantity:integer float_number:float decimal_number:decimal full:boolean email:string password:string")
system("rails g marionette:view TestItemView2 ItemView")
system("rails g marionette:view test_module/TestItemView3 ItemView")
system("rails g marionette:view TestSubsubmodule/test_submodule/TestModule/testItemView4 ItemView")
system("rails g marionette:view TestCollectionView1 CollectionView")
system("rails g marionette:view admin/TestCollectionView2 CollectionView")
system("rails g marionette:view TestSubsubmodule/test_submodule/TestModule/TestCollectionView3 CollectionView")
system("rails g marionette:view TestCompositeView1 CompositeView")
system("rails g marionette:view test_module/TestCompositeView2 CompositeView")
system("rails g marionette:view TestSubsubmodule/test_submodule/TestModule/testCompositeView3 CompositeView")
end
|
describe Knapsack::Distributors::ReportDistributor do
let(:args) { {} }
let(:default_report) { { 'default_report_spec.rb' => 1.0 } }
let(:distributor) { described_class.new(args) }
before do
allow(Knapsack).to receive(:report) {
instance_double(Knapsack::Report, open: default_report)
}
end
describe '#sorted_report' do
subject { distributor.sorted_report }
let(:report) do
{
'e_spec.rb' => 3.0,
'f_spec.rb' => 3.5,
'c_spec.rb' => 2.0,
'd_spec.rb' => 2.5,
'a_spec.rb' => 1.0,
'b_spec.rb' => 1.5,
}
end
let(:args) { { report: report } }
it do
should eql([
['f_spec.rb', 3.5],
['e_spec.rb', 3.0],
['d_spec.rb', 2.5],
['c_spec.rb', 2.0],
['b_spec.rb', 1.5],
['a_spec.rb', 1.0],
])
end
end
describe '#sorted_report_with_existing_specs' do
subject { distributor.sorted_report_with_existing_specs }
before do
expect(distributor).to receive(:all_specs).exactly(6).times.and_return([
'b_spec.rb',
'd_spec.rb',
'f_spec.rb',
])
end
let(:report) do
{
'e_spec.rb' => 3.0,
'f_spec.rb' => 3.5,
'c_spec.rb' => 2.0,
'd_spec.rb' => 2.5,
'a_spec.rb' => 1.0,
'b_spec.rb' => 1.5,
}
end
let(:args) { { report: report } }
it do
should eql([
['f_spec.rb', 3.5],
['d_spec.rb', 2.5],
['b_spec.rb', 1.5],
])
end
end
context do
let(:report) do
{
'a_spec.rb' => 3.0,
'b_spec.rb' => 1.0,
'c_spec.rb' => 1.5,
}
end
let(:args) { { report: report } }
before do
allow(distributor).to receive(:all_specs).and_return(report.keys)
end
describe '#total_time_execution' do
subject { distributor.total_time_execution }
context 'when time is float' do
it { should eql 5.5 }
end
context 'when time is not float' do
let(:report) do
{
'a_spec.rb' => 3,
'b_spec.rb' => 1,
}
end
it { should eql 4.0 }
end
end
describe '#node_time_execution' do
subject { distributor.node_time_execution }
let(:args) { { report: report, ci_node_total: 4 } }
it { should eql 1.375 }
end
end
context do
let(:report) do
{
'g_spec.rb' => 9.0,
'h_spec.rb' => 3.0,
'i_spec.rb' => 3.0,
'f_spec.rb' => 3.5,
'c_spec.rb' => 2.0,
'd_spec.rb' => 2.5,
'a_spec.rb' => 1.0,
'b_spec.rb' => 1.5,
}
end
let(:args) do
{
report: report,
ci_node_total: 3,
}
end
before do
allow(distributor).to receive(:all_specs).and_return(report.keys)
end
describe '#assign_spec_files_to_node' do
before { distributor.assign_spec_files_to_node }
it do
expect(distributor.node_specs[0]).to eql({
:node_index => 0,
:time_left => -0.5,
:spec_files_with_time => [
["g_spec.rb", 9.0]
]
})
end
it do
expect(distributor.node_specs[1]).to eql({
:node_index => 1,
:time_left => 0.0,
:spec_files_with_time => [
["f_spec.rb", 3.5],
["d_spec.rb", 2.5],
["a_spec.rb", 1.0],
["b_spec.rb", 1.5]
]
})
end
it do
expect(distributor.node_specs[2]).to eql({
:node_index => 2,
:time_left => 0.5,
:spec_files_with_time => [
["h_spec.rb", 3.0],
["c_spec.rb", 2.0],
["i_spec.rb", 3.0]
]
})
end
end
describe '#specs_for_node' do
context 'when node exists' do
it do
expect(distributor.specs_for_node(1)).to eql([
'f_spec.rb',
'd_spec.rb',
'a_spec.rb',
'b_spec.rb'
])
end
end
context "when node doesn't exist" do
it { expect(distributor.specs_for_node(42)).to be_nil }
end
end
end
end
Fix report distributor spec
describe Knapsack::Distributors::ReportDistributor do
let(:report) { { 'a_spec.rb' => 1.0 } }
let(:default_args) do
{
report: report,
spec_pattern: 'spec/**/*_spec.rb',
ci_node_total: '1',
ci_node_index: '0'
}
end
let(:args) { default_args.merge(custom_args) }
let(:custom_args) { {} }
let(:distributor) { described_class.new(args) }
describe '#sorted_report' do
subject { distributor.sorted_report }
let(:report) do
{
'e_spec.rb' => 3.0,
'f_spec.rb' => 3.5,
'c_spec.rb' => 2.0,
'd_spec.rb' => 2.5,
'a_spec.rb' => 1.0,
'b_spec.rb' => 1.5,
}
end
it do
should eql([
['f_spec.rb', 3.5],
['e_spec.rb', 3.0],
['d_spec.rb', 2.5],
['c_spec.rb', 2.0],
['b_spec.rb', 1.5],
['a_spec.rb', 1.0],
])
end
end
describe '#sorted_report_with_existing_specs' do
subject { distributor.sorted_report_with_existing_specs }
let(:report) do
{
'e_spec.rb' => 3.0,
'f_spec.rb' => 3.5,
'c_spec.rb' => 2.0,
'd_spec.rb' => 2.5,
'a_spec.rb' => 1.0,
'b_spec.rb' => 1.5,
}
end
before do
expect(distributor).to receive(:all_specs).exactly(6).times.and_return([
'b_spec.rb',
'd_spec.rb',
'f_spec.rb',
])
end
it do
should eql([
['f_spec.rb', 3.5],
['d_spec.rb', 2.5],
['b_spec.rb', 1.5],
])
end
end
context do
let(:report) do
{
'a_spec.rb' => 3.0,
'b_spec.rb' => 1.0,
'c_spec.rb' => 1.5,
}
end
before do
allow(distributor).to receive(:all_specs).and_return(report.keys)
end
describe '#total_time_execution' do
subject { distributor.total_time_execution }
context 'when time is float' do
it { should eql 5.5 }
end
context 'when time is not float' do
let(:report) do
{
'a_spec.rb' => 3,
'b_spec.rb' => 1,
}
end
it { should eql 4.0 }
end
end
describe '#node_time_execution' do
subject { distributor.node_time_execution }
let(:custom_args) { { ci_node_total: 4 } }
it { should eql 1.375 }
end
end
context do
let(:report) do
{
'g_spec.rb' => 9.0,
'h_spec.rb' => 3.0,
'i_spec.rb' => 3.0,
'f_spec.rb' => 3.5,
'c_spec.rb' => 2.0,
'd_spec.rb' => 2.5,
'a_spec.rb' => 1.0,
'b_spec.rb' => 1.5,
}
end
let(:custom_args) { { ci_node_total: 3 } }
before do
allow(distributor).to receive(:all_specs).and_return(report.keys)
end
describe '#assign_spec_files_to_node' do
before { distributor.assign_spec_files_to_node }
it do
expect(distributor.node_specs[0]).to eql({
:node_index => 0,
:time_left => -0.5,
:spec_files_with_time => [
["g_spec.rb", 9.0]
]
})
end
it do
expect(distributor.node_specs[1]).to eql({
:node_index => 1,
:time_left => 0.0,
:spec_files_with_time => [
["f_spec.rb", 3.5],
["d_spec.rb", 2.5],
["a_spec.rb", 1.0],
["b_spec.rb", 1.5]
]
})
end
it do
expect(distributor.node_specs[2]).to eql({
:node_index => 2,
:time_left => 0.5,
:spec_files_with_time => [
["h_spec.rb", 3.0],
["c_spec.rb", 2.0],
["i_spec.rb", 3.0]
]
})
end
end
describe '#specs_for_node' do
context 'when node exists' do
it do
expect(distributor.specs_for_node(1)).to eql([
'f_spec.rb',
'd_spec.rb',
'a_spec.rb',
'b_spec.rb'
])
end
end
context "when node doesn't exist" do
it { expect(distributor.specs_for_node(42)).to be_nil }
end
end
end
end
|
test: add specs for advanced action generation
require 'spec_helper'
require_relative '../../../lib/generators/light_service/action_generator.rb'
require_relative './full_generator_test_blobs'
include FullGeneratorTestBlobs
describe LightService::Generators::ActionGenerator, type: :generator do
destination File.expand_path("../tmp", __FILE__)
context "when generating an advanced action" do
before(:all) do
prepare_destination
run_generator
end
after(:all) do
FileUtils.rm_rf destination_root
end
arguments %w(my/fancy/action expects:foo,bar promises:baz,qux --no-roll-back --dir=services)
specify do
expect(destination_root).to have_structure {
directory "app/services/my/fancy" do
file "action.rb" do
contains advanced_action_blob
end
end
directory "spec/services/my/fancy" do
file "action_spec.rb" do
contains advanced_action_spec_blob
end
end
}
end
end
end
|
# frozen_string_literal: true
require 'spec_helper'
class InterfaceConnectionQuerySpec < PrelaySpec
it "should support returning a connection on an interface" do
execute_query <<-GRAPHQL
query Query {
connections {
releases(first: 5) {
edges {
cursor
node {
id,
name,
artist {
id,
name
}
}
}
}
}
}
GRAPHQL
albums = Album.order(Sequel.desc(:created_at)).limit(5).all
compilations = Compilation.order(Sequel.desc(:created_at)).limit(5).all
releases = (albums + compilations).sort_by(&:created_at).reverse.first(5)
assert_sqls [
%(SELECT "albums"."id", "albums"."name", "albums"."artist_id", "albums"."created_at" AS "cursor" FROM "albums" ORDER BY "created_at" DESC LIMIT 5),
%(SELECT "artists"."id", "artists"."first_name", "artists"."last_name" FROM "artists" WHERE ("artists"."id" IN (#{albums.map{|a| "'#{a.artist_id}'"}.uniq.join(', ')})) ORDER BY "artists"."id"),
%(SELECT "compilations"."id", "compilations"."name", "compilations"."artist_id", "compilations"."created_at" AS "cursor" FROM "compilations" ORDER BY "created_at" DESC LIMIT 5),
%(SELECT "artists"."id", "artists"."first_name", "artists"."last_name" FROM "artists" WHERE ("artists"."id" IN (#{compilations.map{|a| "'#{a.artist_id}'"}.uniq.join(', ')})) ORDER BY "artists"."id"),
]
assert_result \
'data' => {
'connections' => {
'releases' => {
'edges' => releases.map { |release|
{
'cursor' => to_cursor(release.created_at),
'node' => {
'id' => encode(release.is_a?(Album) ? 'Album' : 'Compilation', release.id),
'name' => release.name,
'artist' => {
'id' => encode('Artist', release.artist_id),
'name' => release.artist.name,
}
}
}
}
}
}
}
end
it "should support filters on the given interface"
end
Update spec to match new helpers in master.
# frozen_string_literal: true
require 'spec_helper'
class InterfaceConnectionQuerySpec < PrelaySpec
it "should support returning a connection on an interface" do
execute_query <<-GRAPHQL
query Query {
connections {
releases(first: 5) {
edges {
cursor
node {
id,
name,
artist {
id,
name
}
}
}
}
}
}
GRAPHQL
albums = Album.order(Sequel.desc(:created_at)).limit(5).all
compilations = Compilation.order(Sequel.desc(:created_at)).limit(5).all
releases = (albums + compilations).sort_by(&:created_at).reverse.first(5)
assert_sqls [
%(SELECT "albums"."id", "albums"."name", "albums"."artist_id", "albums"."created_at" AS "cursor" FROM "albums" ORDER BY "created_at" DESC LIMIT 5),
%(SELECT "artists"."id", "artists"."first_name", "artists"."last_name" FROM "artists" WHERE ("artists"."id" IN (#{albums.map{|a| "'#{a.artist_id}'"}.uniq.join(', ')})) ORDER BY "artists"."id"),
%(SELECT "compilations"."id", "compilations"."name", "compilations"."artist_id", "compilations"."created_at" AS "cursor" FROM "compilations" ORDER BY "created_at" DESC LIMIT 5),
%(SELECT "artists"."id", "artists"."first_name", "artists"."last_name" FROM "artists" WHERE ("artists"."id" IN (#{compilations.map{|a| "'#{a.artist_id}'"}.uniq.join(', ')})) ORDER BY "artists"."id"),
]
assert_result \
'data' => {
'connections' => {
'releases' => {
'edges' => releases.map { |release|
{
'cursor' => to_cursor(release.created_at),
'node' => {
'id' => id_for(release),
'name' => release.name,
'artist' => {
'id' => id_for(release.artist),
'name' => release.artist.name,
}
}
}
}
}
}
}
end
it "should support filters on the given interface"
end
|
# encoding utf-8
require 'spec_helper_min'
require 'support/helpers'
require 'factories/carto_visualizations'
describe Carto::Api::SnapshotsController do
include Carto::Factories::Visualizations
include HelperMethods
let(:fake_json) { { manolo: 'escobar' } }
before(:all) do
@user = FactoryGirl.create(:carto_user)
@intruder = FactoryGirl.create(:carto_user)
@_m, @_t, @_tv, @visualization = create_full_visualization(@user)
@_om, @_ot, @_otv, @other_visualization = create_full_visualization(@user)
end
after(:all) do
destroy_full_visualzation(@_m, @_t, @_tv, @visualization)
destroy_full_visualzation(@_om, @_ot, @_otv, @other_visualization)
@intruder.destroy
@user.destroy
end
describe('#index') do
def snapshots_index_url(user_domain: @user.subdomain,
visualization_id: @visualization.id,
api_key: @user.api_key)
snapshots_url(user_domain: user_domain,
visualization_id: visualization_id,
api_key: api_key)
end
before(:all) do
5.times do
Carto::State.create!(user_id: @user.id,
visualization_id: @visualization.id,
json: fake_json)
end
@buddy = FactoryGirl.create(:carto_user)
5.times do
Carto::State.create!(user_id: @buddy.id,
visualization_id: @visualization.id,
json: fake_json)
end
5.times do
Carto::State.create!(user_id: @buddy.id,
visualization_id: @other_visualization.id,
json: fake_json)
end
end
after(:all) do
Carto::State.where(user_id: @user.id).map(&:destroy)
Carto::State.where(user_id: @buddy.id).map(&:destroy)
@buddy.destroy
end
it 'should reject unauthenticated access' do
Carto::Visualization.any_instance
.stubs(:is_publically_accesible?)
.returns(false)
get_json(snapshots_index_url(api_key: nil), Hash.new) do |response|
response.status.should eq 401
end
end
it 'should reject users with no read access' do
Carto::Visualization.any_instance
.stubs(:is_viewable_by_user?)
.returns(false)
intruder_url = snapshots_index_url(user_domain: @intruder.subdomain,
api_key: @intruder.api_key)
get_json(intruder_url, Hash.new) do |response|
response.status.should eq 403
end
end
it 'should not list visualization state for owner' do
get_json(snapshots_index_url, Hash.new) do |response|
response.status.should eq 200
response_ids = response.body
.map { |snapshot| snapshot['id'] }
.compact
.sort
response_ids.should_not be_empty
response_ids.should_not include(@visualization.id)
end
end
it 'should list only snapshots for user and visualization' do
buddy_url = snapshots_index_url(user_domain: @buddy.subdomain,
api_key: @buddy.api_key)
buddy_snaps_for_viz = Carto::State.where(user_id: @buddy.id,
visualization_id: @visualization.id)
.map(&:id)
.sort
get_json(buddy_url, Hash.new) do |response|
response.status.should eq 200
response_ids = response.body
.map { |snapshot| snapshot['id'] }
.compact
.sort
response_ids.should_not be_empty
response_ids.should eq buddy_snaps_for_viz
end
end
end
describe('#show') do
def snapshots_show_url(user_domain: @user.subdomain,
visualization_id: @visualization.id,
snapshot_id: @snapshot.id,
api_key: @user.api_key)
snapshot_url(user_domain: user_domain,
visualization_id: visualization_id,
id: snapshot_id,
api_key: api_key)
end
before(:all) do
@snapshot = Carto::State.create!(user_id: @user.id,
visualization_id: @visualization.id,
json: fake_json)
end
after(:all) do
@snapshot.destroy
end
it 'should reject unauthenticated access' do
Carto::Visualization.any_instance
.stubs(:is_publically_accesible?)
.returns(false)
get_json(snapshots_show_url(api_key: nil), Hash.new) do |response|
response.status.should eq 401
end
end
it 'should reject users with no read access' do
Carto::Visualization.any_instance
.stubs(:is_viewable_by_user?)
.returns(false)
intruder_url = snapshots_show_url(user_domain: @intruder.subdomain,
api_key: @intruder.api_key)
get_json(intruder_url, Hash.new) do |response|
response.status.should eq 403
end
end
it 'should only accept owners of snapshots' do
intruder_url = snapshots_show_url(user_domain: @intruder.subdomain,
api_key: @intruder.api_key)
get_json(intruder_url, Hash.new) do |response|
response.status.should eq 403
end
end
it 'shows a snapshot' do
get_json(snapshots_show_url, Hash.new) do |response|
response.status.should eq 200
response.body[:id].should eq @snapshot.id
end
end
end
end
add test to verify 404 visualization in index
# encoding utf-8
require 'spec_helper_min'
require 'support/helpers'
require 'factories/carto_visualizations'
describe Carto::Api::SnapshotsController do
include Carto::Factories::Visualizations
include HelperMethods
let(:fake_json) { { manolo: 'escobar' } }
before(:all) do
@user = FactoryGirl.create(:carto_user)
@intruder = FactoryGirl.create(:carto_user)
@_m, @_t, @_tv, @visualization = create_full_visualization(@user)
@_om, @_ot, @_otv, @other_visualization = create_full_visualization(@user)
end
after(:all) do
destroy_full_visualzation(@_m, @_t, @_tv, @visualization)
destroy_full_visualzation(@_om, @_ot, @_otv, @other_visualization)
@intruder.destroy
@user.destroy
end
describe('#index') do
def snapshots_index_url(user_domain: @user.subdomain,
visualization_id: @visualization.id,
api_key: @user.api_key)
snapshots_url(user_domain: user_domain,
visualization_id: visualization_id,
api_key: api_key)
end
before(:all) do
5.times do
Carto::State.create!(user_id: @user.id,
visualization_id: @visualization.id,
json: fake_json)
end
@buddy = FactoryGirl.create(:carto_user)
5.times do
Carto::State.create!(user_id: @buddy.id,
visualization_id: @visualization.id,
json: fake_json)
end
5.times do
Carto::State.create!(user_id: @buddy.id,
visualization_id: @other_visualization.id,
json: fake_json)
end
end
after(:all) do
Carto::State.where(user_id: @user.id).map(&:destroy)
Carto::State.where(user_id: @buddy.id).map(&:destroy)
@buddy.destroy
end
it 'should reject unauthenticated access' do
Carto::Visualization.any_instance
.stubs(:is_publically_accesible?)
.returns(false)
get_json(snapshots_index_url(api_key: nil), Hash.new) do |response|
response.status.should eq 401
end
end
it 'should reject users with no read access' do
Carto::Visualization.any_instance
.stubs(:is_viewable_by_user?)
.returns(false)
intruder_url = snapshots_index_url(user_domain: @intruder.subdomain,
api_key: @intruder.api_key)
get_json(intruder_url, Hash.new) do |response|
response.status.should eq 403
end
end
it 'should 404 for non existent visualizations' do
not_found_url = snapshots_index_url(visualization_id: random_uuid)
get_json(not_found_url, Hash.new) do |response|
response.status.should eq 404
end
end
it 'should not list visualization state for owner' do
get_json(snapshots_index_url, Hash.new) do |response|
response.status.should eq 200
response_ids = response.body
.map { |snapshot| snapshot['id'] }
.compact
.sort
response_ids.should_not be_empty
response_ids.should_not include(@visualization.id)
end
end
it 'should list only snapshots for user and visualization' do
buddy_url = snapshots_index_url(user_domain: @buddy.subdomain,
api_key: @buddy.api_key)
buddy_snaps_for_viz = Carto::State.where(user_id: @buddy.id,
visualization_id: @visualization.id)
.map(&:id)
.sort
get_json(buddy_url, Hash.new) do |response|
response.status.should eq 200
response_ids = response.body
.map { |snapshot| snapshot['id'] }
.compact
.sort
response_ids.should_not be_empty
response_ids.should eq buddy_snaps_for_viz
end
end
end
describe('#show') do
def snapshots_show_url(user_domain: @user.subdomain,
visualization_id: @visualization.id,
snapshot_id: @snapshot.id,
api_key: @user.api_key)
snapshot_url(user_domain: user_domain,
visualization_id: visualization_id,
id: snapshot_id,
api_key: api_key)
end
before(:all) do
@snapshot = Carto::State.create!(user_id: @user.id,
visualization_id: @visualization.id,
json: fake_json)
end
after(:all) do
@snapshot.destroy
end
it 'should reject unauthenticated access' do
Carto::Visualization.any_instance
.stubs(:is_publically_accesible?)
.returns(false)
get_json(snapshots_show_url(api_key: nil), Hash.new) do |response|
response.status.should eq 401
end
end
it 'should reject users with no read access' do
Carto::Visualization.any_instance
.stubs(:is_viewable_by_user?)
.returns(false)
intruder_url = snapshots_show_url(user_domain: @intruder.subdomain,
api_key: @intruder.api_key)
get_json(intruder_url, Hash.new) do |response|
response.status.should eq 403
end
end
it 'should only accept owners of snapshots' do
intruder_url = snapshots_show_url(user_domain: @intruder.subdomain,
api_key: @intruder.api_key)
get_json(intruder_url, Hash.new) do |response|
response.status.should eq 403
end
end
it 'shows a snapshot' do
get_json(snapshots_show_url, Hash.new) do |response|
response.status.should eq 200
response.body[:id].should eq @snapshot.id
end
end
end
end
|
require File.expand_path(File.dirname(__FILE__) + '/common')
require File.expand_path(File.dirname(__FILE__) + '/helpers/files_common')
require File.expand_path(File.dirname(__FILE__) + '/helpers/submissions_common')
require File.expand_path(File.dirname(__FILE__) + '/helpers/gradebook2_common')
describe "submissions" do
include_examples "in-process server selenium tests"
context 'as a student' do
before(:each) do
@due_date = Time.now.utc + 2.days
course_with_student_logged_in
@assignment = @course.assignments.create!(:title => 'assignment 1', :name => 'assignment 1', :due_at => @due_date)
@second_assignment = @course.assignments.create!(:title => 'assignment 2', :name => 'assignment 2', :due_at => nil)
@third_assignment = @course.assignments.create!(:title => 'assignment 3', :name => 'assignment 3', :due_at => nil)
@fourth_assignment = @course.assignments.create!(:title => 'assignment 4', :name => 'assignment 4', :due_at => @due_date - 1.day)
end
it "should let a student submit a text entry", priority: "1", test_id: 56015 do
@assignment.update_attributes(submission_types: "online_text_entry")
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
f(".submit_assignment_link").click
driver.execute_script "tinyMCE.activeEditor.setContent('text')"
f('button[type="submit"]').click
expect(f("#sidebar_content")).to include_text("Turned In!")
end
it "should not break when you open and close the media comment dialog", priority: "1", test_id: 237020 do
stub_kaltura
#pending("failing because it is dependant on an external kaltura system")
create_assignment_and_go_to_page('media_recording')
f(".submit_assignment_link").click
open_button = f(".record_media_comment_link")
# open it twice
open_button.click
# swf and other stuff load, give it half a second before it starts trying to click
sleep 1
close_visible_dialog
open_button.click
sleep 1
close_visible_dialog
# fire the callback that the flash object fires
driver.execute_script("window.mediaCommentCallback([{entryId:1, entryType:1}]);")
# see if the confirmation element shows up
expect(f('#media_media_recording_ready')).to be_displayed
# submit the assignment so the "are you sure?!" message doesn't freeze up selenium
submit_form('#submit_media_recording_form')
end
it "should not allow blank media submission", priority: "1", test_id: 237021 do
stub_kaltura
#pending("failing because it is dependant on an external kaltura system")
create_assignment_and_go_to_page 'media_recording'
f(".submit_assignment_link").click
expect(f('#media_comment_submit_button')).to have_attribute('disabled', 'true')
# leave so the "are you sure?!" message doesn't freeze up selenium
f('#section-tabs .home').click
driver.switch_to.alert.accept
end
it "should allow you to submit a file", priority: "1", test_id: 237022 do
@assignment.submission_types = 'online_upload'
@assignment.save!
filename, fullpath, data = get_file("testfile1.txt")
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
f('.submit_assignment_link').click
f('.submission_attachment input').send_keys(fullpath)
f('#submission_comment').send_keys("hello comment")
expect_new_page_load { f('#submit_file_button').click }
keep_trying_until do
expect(f('#sidebar_content .header')).to include_text "Turned In!"
expect(f('.details .file-big')).to include_text "testfile1"
end
@submission = @assignment.reload.submissions.where(user_id: @student).first
expect(@submission.submission_type).to eq 'online_upload'
expect(@submission.attachments.length).to eq 1
expect(@submission.workflow_state).to eq 'submitted'
end
it "should not allow a user to submit a file-submission assignment without attaching a file", priority: "1", test_id: 237023 do
@assignment.submission_types = 'online_upload'
@assignment.save!
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
f('.submit_assignment_link').click
wait_for_ajaximations
f('#submit_file_button').click
wait_for_ajaximations
expect(flash_message_present?(:error)).to be_truthy
# navigate off the page and dismiss the alert box to avoid problems
# with other selenium tests
f('#section-tabs .home').click
driver.switch_to.alert.accept
driver.switch_to.default_content
end
it "should not allow a user to submit a file-submission assignment with an illegal file extension", priority: "1", test_id: 237024 do
@assignment.submission_types = 'online_upload'
@assignment.allowed_extensions = ['bash']
@assignment.save!
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
f('.submit_assignment_link').click
wait_for_ajaximations
# Select an assignment that has a wrong file extension
filename, fullpath, data = get_file("testfile1.txt")
f('.submission_attachment input').send_keys(fullpath)
# Check that the error is being reported
expect(f('.bad_ext_msg').text() =~ /This\sfile\stype\sis\snot\sallowed/).to be_truthy
# navigate off the page and dismiss the alert box to avoid problems
# with other selenium tests
f('#section-tabs .home').click
driver.switch_to.alert.accept
driver.switch_to.default_content
end
it "should show as not turned in when submission was auto created in speedgrader", priority: "1", test_id: 237025 do
# given
@assignment.update_attributes(:submission_types => "online_text_entry")
@assignment.grade_student(@student, :grade => "0")
# when
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
# expect
expect(f('#sidebar_content .details')).to include_text "Not Turned In!"
expect(f('.submit_assignment_link').text).to eq "Submit Assignment"
end
it "should not show as turned in or not turned in when assignment doesnt expect a submission", priority: "1", test_id: 237025 do
# given
@assignment.update_attributes(:submission_types => "on_paper")
@assignment.grade_student(@student, :grade => "0")
# when
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
# expect
expect(f('#sidebar_content .details')).not_to include_text "Turned In!"
expect(f('#sidebar_content .details')).not_to include_text "Not Turned In!"
expect(f('.submit_assignment_link')).to be_nil
end
it "should not allow blank submissions for text entry", priority: "1", test_id: 237026 do
@assignment.update_attributes(:submission_types => "online_text_entry")
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
f('.submit_assignment_link').click
wait_for_ajaximations
assignment_form = f('#submit_online_text_entry_form')
wait_for_tiny(assignment_form)
submit_form(assignment_form)
wait_for_ajaximations
# it should not actually submit and pop up an error message
expect(ff('.error_box')[1]).to include_text('Required')
expect(Submission.count).to eq 0
# now make sure it works
type_in_tiny('#submission_body', 'now it is not blank')
submit_form(assignment_form)
wait_for_ajaximations
expect(Submission.count).to eq 1
end
it "should not allow a submission with only comments", priority: "1", test_id: 237027 do
@assignment.update_attributes(:submission_types => "online_text_entry")
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
f('.submit_assignment_link').click
assignment_form = f('#submit_online_text_entry_form')
replace_content(assignment_form.find_element(:id, 'submission_comment'), 'this should not be able to be submitted for grading')
submit_form("#submit_online_text_entry_form")
# it should not actually submit and pop up an error message
expect(ff('.error_box')[1]).to include_text('Required')
expect(Submission.count).to eq 0
# navigate off the page and dismiss the alert box to avoid problems
# with other selenium tests
f('#section-tabs .home').click
driver.switch_to.alert.accept
driver.switch_to.default_content
end
it "should not allow peer reviewers to see turnitin scores/reports", priority: "1", test_id: 237028 do
@student1 = @user
@assignment.submission_types = 'online_upload'
@assignment.save!
filename, fullpath, data = get_file("testfile1.txt")
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
f('.submit_assignment_link').click
f('.submission_attachment input').send_keys(fullpath)
f('#submission_comment').send_keys("hello comment")
expect_new_page_load { f('#submit_file_button').click }
@submission = @assignment.reload.submissions.last
user_logged_in(:username => "assessor@example.com")
@student2 = @user
student_in_course(:active_enrollment => true, :user => @student2)
@assignment.peer_reviews = true
@assignment.assign_peer_review(@student2, @student1)
@assignment.turnitin_enabled = true
@assignment.due_at = 1.day.ago
@assignment.save!
asset = @submission.turnitin_assets.first.asset_string
@submission.turnitin_data = {
"#{asset}" => {
:object_id => "123456",
:publication_overlap => 5,
:similarity_score => 100,
:state => "failure",
:status => "scored",
:student_overlap => 44,
:web_overlap => 100
},
:last_processed_attempt => 1
}
@submission.turnitin_data_changed!
@submission.save!
get "/courses/#{@course.id}/assignments/#{@assignment.id}/submissions/#{@student1.id}"
in_frame('preview_frame') do
expect(ff('.turnitin_score_container')).to be_empty
end
end
it "should submit an assignment and validate confirmation information", priority: "1", test_id: 237029 do
skip "BUG 6783 - Coming Up assignments update error"
@assignment.update_attributes(:submission_types => 'online_url')
@submission = @assignment.submit_homework(@student)
@submission.submission_type = "online_url"
@submission.save!
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
expect(f('.details .header')).to include_text('Turned In!')
get "/courses/#{@course.id}"
driver.execute_script("$('.tooltip_text').css('visibility', 'visible')")
tooltip_text_elements = ff('.tooltip_text > span')
expect(f('.tooltip_text')).to be_displayed
expect(tooltip_text_elements[1].text).to eq 'submitted'
end
describe 'uploaded files for submission' do
include_examples "in-process server selenium tests"
def fixture_file_path(file)
path = ActionController::TestCase.respond_to?(:fixture_path) ? ActionController::TestCase.send(:fixture_path) : nil
return "#{path}#{file}"
end
def add_file(fixture, context, name)
context.attachments.create! do |attachment|
attachment.uploaded_data = fixture
attachment.filename = name
attachment.folder = Folder.root_folders(context).first
end
end
def make_folder_actions_visible
driver.execute_script("$('.folder_item').addClass('folder_item_hover')")
end
it "should allow uploaded files to be used for submission", priority: "1", test_id: 237030 do
local_storage!
user_with_pseudonym :username => "nobody2@example.com",
:password => "asdfasdf2"
course_with_student_logged_in :user => @user
create_session @pseudonym
add_file(fixture_file_upload('files/html-editing-test.html', 'text/html'),
@user, "html-editing-test.html")
File.read(fixture_file_path("files/html-editing-test.html"))
assignment = @course.assignments.create!(:title => 'assignment 1',
:name => 'assignment 1',
:submission_types => "online_upload",
:allowed_extensions => '.html')
get "/courses/#{@course.id}/assignments/#{assignment.id}"
f('.submit_assignment_link').click
wait_for_ajaximations
f('.toggle_uploaded_files_link').click
wait_for_ajaximations
# traverse the tree
begin
keep_trying_until do
f('#uploaded_files > ul > li.folder > .sign').click
wait_for_ajaximations
expect(f('#uploaded_files > ul > li.folder .file .name')).to be_displayed
end
f('#uploaded_files > ul > li.folder .file .name').click
wait_for_ajaximations
rescue => err
# prevent the confirm dialog that pops up when you navigate away
# from the page from showing.
# TODO: actually figure out why the spec intermittently fails.
driver.execute_script "window.onbeforeunload = null;"
raise err
end
expect_new_page_load { f('#submit_file_button').click }
keep_trying_until do
expect(f('.details .header')).to include_text "Turned In!"
expect(f('.details .file-big')).to include_text "html-editing-test.html"
end
end
it "should not allow a user to submit a file-submission assignment from previously uploaded files with an illegal file extension", priority: "1", test_id: 237031 do
FILENAME = "hello-world.sh"
FIXTURE_FN = "files/#{FILENAME}"
local_storage!
user_with_pseudonym :username => "nobody2@example.com",
:password => "asdfasdf2"
course_with_student_logged_in :user => @user
create_session @pseudonym
add_file(fixture_file_upload(FIXTURE_FN, 'application/x-sh'),
@user, FILENAME)
File.read(fixture_file_path(FIXTURE_FN))
assignment = @course.assignments.create!(:title => 'assignment 1',
:name => 'assignment 1',
:submission_types => "online_upload",
:allowed_extensions => ['txt'])
get "/courses/#{@course.id}/assignments/#{assignment.id}"
f('.submit_assignment_link').click
wait_for_ajaximations
f('.toggle_uploaded_files_link').click
wait_for_ajaximations
# traverse the tree
begin
keep_trying_until do
f('#uploaded_files > ul > li.folder > .sign').click
wait_for_ajaximations
# How does it know which name we're looking for?
expect(f('#uploaded_files > ul > li.folder .file .name')).to be_displayed
end
f('#uploaded_files > ul > li.folder .file .name').click
wait_for_ajaximations
f('#submit_file_button').click
rescue => err
# prevent the confirm dialog that pops up when you navigate away
# from the page from showing.
# TODO: actually figure out why the spec intermittently fails.
driver.execute_script "window.onbeforeunload = null;"
raise err
end
# Make sure the flash message is being displayed
expect(flash_message_present?(:error)).to be_truthy
# navigate off the page and dismiss the alert box to avoid problems
# with other selenium tests
f('#section-tabs .home').click
driver.switch_to.alert.accept
driver.switch_to.default_content
end
end
end
context 'Excused assignment' do
it 'indicates as excused in submission details page', priority: "1", test_id: 201937 do
init_course_with_students
assignments = []
3.times do |i|
assignments << assignment = @course.assignments.create!(title: "Assignment #{i}", submission_types: 'online_text_entry', points_possible: 20)
assignment.submit_homework(@students[0], {submission_type: 'online_text_entry'}) unless i == 2
end
assignments[1].grade_student @students[0], {grade: 10}
assignments.each do |assignment|
assignment.grade_student @students[0], {excuse: true}
end
user_session @students[0]
get "/courses/#{@course.id}/assignments"
index_scores = ff('.score-display')
index_scores.each do
expect(score.text).to eq 'Excused'
end
3.times do |i|
get "/courses/#{@course.id}/assignments/#{assignments[i].id}"
expect(f("#sidebar_content .header").text).to eq 'Excused!'
get "/courses/#{@course.id}/assignments/#{assignments[i].id}/submissions/#{@students[0].id}"
expect(f("#content .submission_details .published_grade").text).to eq 'Excused'
end
end
it 'does not allow submissions', priority: "1", test_id: 197048 do
course_with_student_logged_in
@assignment = @course.assignments.create!(
title: 'assignment 1',
submission_types: 'online_text_entry'
)
@assignment.grade_student @student, {excuse: 1}
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
expect(f('a.submit_assignment_link')).to be_nil
expect(f('#assignment_show .assignment-title').text).to eq 'assignment 1'
end
end
end
Added a spec for submitting blank text assignment
Added an opposite check on my previous spec
Change-Id: Idfc53b5b3b9054718be99a1e67cd85b78d4597b2
Reviewed-on: https://gerrit.instructure.com/58880
Reviewed-by: Caleb Guanzon <54fa1f30d451e4f3c341a8f3c47c9215fd0be9c6@instructure.com>
QA-Review: Caleb Guanzon <54fa1f30d451e4f3c341a8f3c47c9215fd0be9c6@instructure.com>
Product-Review: Derek Hansen <cb1116771307a93b98b6babf698410ed3846296d@instructure.com>
Tested-by: Jenkins
require File.expand_path(File.dirname(__FILE__) + '/common')
require File.expand_path(File.dirname(__FILE__) + '/helpers/files_common')
require File.expand_path(File.dirname(__FILE__) + '/helpers/submissions_common')
require File.expand_path(File.dirname(__FILE__) + '/helpers/gradebook2_common')
describe "submissions" do
include_examples "in-process server selenium tests"
context 'as a student' do
before(:each) do
@due_date = Time.now.utc + 2.days
course_with_student_logged_in
@assignment = @course.assignments.create!(:title => 'assignment 1', :name => 'assignment 1', :due_at => @due_date)
@second_assignment = @course.assignments.create!(:title => 'assignment 2', :name => 'assignment 2', :due_at => nil)
@third_assignment = @course.assignments.create!(:title => 'assignment 3', :name => 'assignment 3', :due_at => nil)
@fourth_assignment = @course.assignments.create!(:title => 'assignment 4', :name => 'assignment 4', :due_at => @due_date - 1.day)
end
it "should let a student submit a text entry", priority: "1", test_id: 56015 do
@assignment.update_attributes(submission_types: "online_text_entry")
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
f(".submit_assignment_link").click
driver.execute_script "tinyMCE.activeEditor.setContent('text')"
f('button[type="submit"]').click
expect(f("#sidebar_content")).to include_text("Turned In!")
expect(fj(".error_text")).to be_nil
end
it "should not let a student submit a text entry with no text entered", priority: "2", test_id: 238143 do
@assignment.update_attributes(submission_types: "online_text_entry")
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
f(".submit_assignment_link").click
f('button[type="submit"]').click
expect(fj(".error_text")).to be
end
it "should not break when you open and close the media comment dialog", priority: "1", test_id: 237020 do
stub_kaltura
#pending("failing because it is dependant on an external kaltura system")
create_assignment_and_go_to_page('media_recording')
f(".submit_assignment_link").click
open_button = f(".record_media_comment_link")
# open it twice
open_button.click
# swf and other stuff load, give it half a second before it starts trying to click
sleep 1
close_visible_dialog
open_button.click
sleep 1
close_visible_dialog
# fire the callback that the flash object fires
driver.execute_script("window.mediaCommentCallback([{entryId:1, entryType:1}]);")
# see if the confirmation element shows up
expect(f('#media_media_recording_ready')).to be_displayed
# submit the assignment so the "are you sure?!" message doesn't freeze up selenium
submit_form('#submit_media_recording_form')
end
it "should not allow blank media submission", priority: "1", test_id: 237021 do
stub_kaltura
#pending("failing because it is dependant on an external kaltura system")
create_assignment_and_go_to_page 'media_recording'
f(".submit_assignment_link").click
expect(f('#media_comment_submit_button')).to have_attribute('disabled', 'true')
# leave so the "are you sure?!" message doesn't freeze up selenium
f('#section-tabs .home').click
driver.switch_to.alert.accept
end
it "should allow you to submit a file", priority: "1", test_id: 237022 do
@assignment.submission_types = 'online_upload'
@assignment.save!
filename, fullpath, data = get_file("testfile1.txt")
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
f('.submit_assignment_link').click
f('.submission_attachment input').send_keys(fullpath)
f('#submission_comment').send_keys("hello comment")
expect_new_page_load { f('#submit_file_button').click }
keep_trying_until do
expect(f('#sidebar_content .header')).to include_text "Turned In!"
expect(f('.details .file-big')).to include_text "testfile1"
end
@submission = @assignment.reload.submissions.where(user_id: @student).first
expect(@submission.submission_type).to eq 'online_upload'
expect(@submission.attachments.length).to eq 1
expect(@submission.workflow_state).to eq 'submitted'
end
it "should not allow a user to submit a file-submission assignment without attaching a file", priority: "1", test_id: 237023 do
@assignment.submission_types = 'online_upload'
@assignment.save!
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
f('.submit_assignment_link').click
wait_for_ajaximations
f('#submit_file_button').click
wait_for_ajaximations
expect(flash_message_present?(:error)).to be_truthy
# navigate off the page and dismiss the alert box to avoid problems
# with other selenium tests
f('#section-tabs .home').click
driver.switch_to.alert.accept
driver.switch_to.default_content
end
it "should not allow a user to submit a file-submission assignment with an illegal file extension", priority: "1", test_id: 237024 do
@assignment.submission_types = 'online_upload'
@assignment.allowed_extensions = ['bash']
@assignment.save!
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
f('.submit_assignment_link').click
wait_for_ajaximations
# Select an assignment that has a wrong file extension
filename, fullpath, data = get_file("testfile1.txt")
f('.submission_attachment input').send_keys(fullpath)
# Check that the error is being reported
expect(f('.bad_ext_msg').text() =~ /This\sfile\stype\sis\snot\sallowed/).to be_truthy
# navigate off the page and dismiss the alert box to avoid problems
# with other selenium tests
f('#section-tabs .home').click
driver.switch_to.alert.accept
driver.switch_to.default_content
end
it "should show as not turned in when submission was auto created in speedgrader", priority: "1", test_id: 237025 do
# given
@assignment.update_attributes(:submission_types => "online_text_entry")
@assignment.grade_student(@student, :grade => "0")
# when
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
# expect
expect(f('#sidebar_content .details')).to include_text "Not Turned In!"
expect(f('.submit_assignment_link').text).to eq "Submit Assignment"
end
it "should not show as turned in or not turned in when assignment doesnt expect a submission", priority: "1", test_id: 237025 do
# given
@assignment.update_attributes(:submission_types => "on_paper")
@assignment.grade_student(@student, :grade => "0")
# when
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
# expect
expect(f('#sidebar_content .details')).not_to include_text "Turned In!"
expect(f('#sidebar_content .details')).not_to include_text "Not Turned In!"
expect(f('.submit_assignment_link')).to be_nil
end
it "should not allow blank submissions for text entry", priority: "1", test_id: 237026 do
@assignment.update_attributes(:submission_types => "online_text_entry")
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
f('.submit_assignment_link').click
wait_for_ajaximations
assignment_form = f('#submit_online_text_entry_form')
wait_for_tiny(assignment_form)
submit_form(assignment_form)
wait_for_ajaximations
# it should not actually submit and pop up an error message
expect(ff('.error_box')[1]).to include_text('Required')
expect(Submission.count).to eq 0
# now make sure it works
type_in_tiny('#submission_body', 'now it is not blank')
submit_form(assignment_form)
wait_for_ajaximations
expect(Submission.count).to eq 1
end
it "should not allow a submission with only comments", priority: "1", test_id: 237027 do
@assignment.update_attributes(:submission_types => "online_text_entry")
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
f('.submit_assignment_link').click
assignment_form = f('#submit_online_text_entry_form')
replace_content(assignment_form.find_element(:id, 'submission_comment'), 'this should not be able to be submitted for grading')
submit_form("#submit_online_text_entry_form")
# it should not actually submit and pop up an error message
expect(ff('.error_box')[1]).to include_text('Required')
expect(Submission.count).to eq 0
# navigate off the page and dismiss the alert box to avoid problems
# with other selenium tests
f('#section-tabs .home').click
driver.switch_to.alert.accept
driver.switch_to.default_content
end
it "should not allow peer reviewers to see turnitin scores/reports", priority: "1", test_id: 237028 do
@student1 = @user
@assignment.submission_types = 'online_upload'
@assignment.save!
filename, fullpath, data = get_file("testfile1.txt")
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
f('.submit_assignment_link').click
f('.submission_attachment input').send_keys(fullpath)
f('#submission_comment').send_keys("hello comment")
expect_new_page_load { f('#submit_file_button').click }
@submission = @assignment.reload.submissions.last
user_logged_in(:username => "assessor@example.com")
@student2 = @user
student_in_course(:active_enrollment => true, :user => @student2)
@assignment.peer_reviews = true
@assignment.assign_peer_review(@student2, @student1)
@assignment.turnitin_enabled = true
@assignment.due_at = 1.day.ago
@assignment.save!
asset = @submission.turnitin_assets.first.asset_string
@submission.turnitin_data = {
"#{asset}" => {
:object_id => "123456",
:publication_overlap => 5,
:similarity_score => 100,
:state => "failure",
:status => "scored",
:student_overlap => 44,
:web_overlap => 100
},
:last_processed_attempt => 1
}
@submission.turnitin_data_changed!
@submission.save!
get "/courses/#{@course.id}/assignments/#{@assignment.id}/submissions/#{@student1.id}"
in_frame('preview_frame') do
expect(ff('.turnitin_score_container')).to be_empty
end
end
it "should submit an assignment and validate confirmation information", priority: "1", test_id: 237029 do
skip "BUG 6783 - Coming Up assignments update error"
@assignment.update_attributes(:submission_types => 'online_url')
@submission = @assignment.submit_homework(@student)
@submission.submission_type = "online_url"
@submission.save!
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
expect(f('.details .header')).to include_text('Turned In!')
get "/courses/#{@course.id}"
driver.execute_script("$('.tooltip_text').css('visibility', 'visible')")
tooltip_text_elements = ff('.tooltip_text > span')
expect(f('.tooltip_text')).to be_displayed
expect(tooltip_text_elements[1].text).to eq 'submitted'
end
describe 'uploaded files for submission' do
include_examples "in-process server selenium tests"
def fixture_file_path(file)
path = ActionController::TestCase.respond_to?(:fixture_path) ? ActionController::TestCase.send(:fixture_path) : nil
return "#{path}#{file}"
end
def add_file(fixture, context, name)
context.attachments.create! do |attachment|
attachment.uploaded_data = fixture
attachment.filename = name
attachment.folder = Folder.root_folders(context).first
end
end
def make_folder_actions_visible
driver.execute_script("$('.folder_item').addClass('folder_item_hover')")
end
it "should allow uploaded files to be used for submission", priority: "1", test_id: 237030 do
local_storage!
user_with_pseudonym :username => "nobody2@example.com",
:password => "asdfasdf2"
course_with_student_logged_in :user => @user
create_session @pseudonym
add_file(fixture_file_upload('files/html-editing-test.html', 'text/html'),
@user, "html-editing-test.html")
File.read(fixture_file_path("files/html-editing-test.html"))
assignment = @course.assignments.create!(:title => 'assignment 1',
:name => 'assignment 1',
:submission_types => "online_upload",
:allowed_extensions => '.html')
get "/courses/#{@course.id}/assignments/#{assignment.id}"
f('.submit_assignment_link').click
wait_for_ajaximations
f('.toggle_uploaded_files_link').click
wait_for_ajaximations
# traverse the tree
begin
keep_trying_until do
f('#uploaded_files > ul > li.folder > .sign').click
wait_for_ajaximations
expect(f('#uploaded_files > ul > li.folder .file .name')).to be_displayed
end
f('#uploaded_files > ul > li.folder .file .name').click
wait_for_ajaximations
rescue => err
# prevent the confirm dialog that pops up when you navigate away
# from the page from showing.
# TODO: actually figure out why the spec intermittently fails.
driver.execute_script "window.onbeforeunload = null;"
raise err
end
expect_new_page_load { f('#submit_file_button').click }
keep_trying_until do
expect(f('.details .header')).to include_text "Turned In!"
expect(f('.details .file-big')).to include_text "html-editing-test.html"
end
end
it "should not allow a user to submit a file-submission assignment from previously uploaded files with an illegal file extension", priority: "1", test_id: 237031 do
FILENAME = "hello-world.sh"
FIXTURE_FN = "files/#{FILENAME}"
local_storage!
user_with_pseudonym :username => "nobody2@example.com",
:password => "asdfasdf2"
course_with_student_logged_in :user => @user
create_session @pseudonym
add_file(fixture_file_upload(FIXTURE_FN, 'application/x-sh'),
@user, FILENAME)
File.read(fixture_file_path(FIXTURE_FN))
assignment = @course.assignments.create!(:title => 'assignment 1',
:name => 'assignment 1',
:submission_types => "online_upload",
:allowed_extensions => ['txt'])
get "/courses/#{@course.id}/assignments/#{assignment.id}"
f('.submit_assignment_link').click
wait_for_ajaximations
f('.toggle_uploaded_files_link').click
wait_for_ajaximations
# traverse the tree
begin
keep_trying_until do
f('#uploaded_files > ul > li.folder > .sign').click
wait_for_ajaximations
# How does it know which name we're looking for?
expect(f('#uploaded_files > ul > li.folder .file .name')).to be_displayed
end
f('#uploaded_files > ul > li.folder .file .name').click
wait_for_ajaximations
f('#submit_file_button').click
rescue => err
# prevent the confirm dialog that pops up when you navigate away
# from the page from showing.
# TODO: actually figure out why the spec intermittently fails.
driver.execute_script "window.onbeforeunload = null;"
raise err
end
# Make sure the flash message is being displayed
expect(flash_message_present?(:error)).to be_truthy
# navigate off the page and dismiss the alert box to avoid problems
# with other selenium tests
f('#section-tabs .home').click
driver.switch_to.alert.accept
driver.switch_to.default_content
end
end
end
context 'Excused assignment' do
it 'indicates as excused in submission details page', priority: "1", test_id: 201937 do
init_course_with_students
assignments = []
3.times do |i|
assignments << assignment = @course.assignments.create!(title: "Assignment #{i}", submission_types: 'online_text_entry', points_possible: 20)
assignment.submit_homework(@students[0], {submission_type: 'online_text_entry'}) unless i == 2
end
assignments[1].grade_student @students[0], {grade: 10}
assignments.each do |assignment|
assignment.grade_student @students[0], {excuse: true}
end
user_session @students[0]
get "/courses/#{@course.id}/assignments"
index_scores = ff('.score-display')
index_scores.each do
expect(score.text).to eq 'Excused'
end
3.times do |i|
get "/courses/#{@course.id}/assignments/#{assignments[i].id}"
expect(f("#sidebar_content .header").text).to eq 'Excused!'
get "/courses/#{@course.id}/assignments/#{assignments[i].id}/submissions/#{@students[0].id}"
expect(f("#content .submission_details .published_grade").text).to eq 'Excused'
end
end
it 'does not allow submissions', priority: "1", test_id: 197048 do
course_with_student_logged_in
@assignment = @course.assignments.create!(
title: 'assignment 1',
submission_types: 'online_text_entry'
)
@assignment.grade_student @student, {excuse: 1}
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
expect(f('a.submit_assignment_link')).to be_nil
expect(f('#assignment_show .assignment-title').text).to eq 'assignment 1'
end
end
end
|
#
# Copyright (C) 2014 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
require File.expand_path(File.dirname(__FILE__) + '/../common')
require File.expand_path(File.dirname(__FILE__) + '/../helpers/calendar2_common')
describe "calendar2" do
include_context "in-process server selenium tests"
include Calendar2Common
before(:each) do
Account.default.tap do |a|
a.settings[:show_scheduler] = true
a.save!
end
end
context "as a teacher" do
before(:each) do
course_with_teacher_logged_in
end
context "event creation" do
it "should create an event by hitting the '+' in the top bar" do
event_title = 'new event'
get "/calendar2"
fj('#create_new_event_link').click
edit_event_dialog = f('#edit_event_tabs')
expect(edit_event_dialog).to be_displayed
end
it "should create an event with a location name" do
event_name = 'event with location'
create_middle_day_event(event_name, false, true)
fj('.fc-event:visible').click
expect(fj('.event-details-content:visible')).to include_text('location title')
end
it 'should create an event with name and address' do
get "/calendar2"
event_title = 'event title'
location_name = 'my house'
location_address = '555 test street'
find_middle_day.click
edit_event_dialog = f('#edit_event_tabs')
expect(edit_event_dialog).to be_displayed
edit_event_form = edit_event_dialog.find('#edit_calendar_event_form')
title = edit_event_form.find('#calendar_event_title')
expect(title).to be_displayed
replace_content(title, event_title)
expect_new_page_load { f('.more_options_link').click }
expect(driver.current_url).to match /start_date=\d\d\d\d-\d\d-\d\d/ # passed in ISO format, not localized
expect(f('.title')).to have_value event_title
expect(f('#editCalendarEventFull .btn-primary').text).to eq "Create Event"
replace_content(f('#calendar_event_location_name'), location_name)
replace_content(f('#calendar_event_location_address'), location_address)
# submit_form makes the spec fragile
wait_for_new_page_load { f('#editCalendarEventFull').submit }
expect(CalendarEvent.last.location_name).to eq location_name
expect(CalendarEvent.last.location_address).to eq location_address
end
it 'should cosistently format date <input> value to what datepicker would set it as, even in langs that have funky formatting' do
skip('USE_OPTIMIZED_JS=true') unless ENV['USE_OPTIMIZED_JS']
skip('RAILS_LOAD_ALL_LOCALES=true') unless ENV['RAILS_LOAD_ALL_LOCALES']
@user.locale = 'fr'
@user.save!
get "/calendar2#view_name=month&view_start=2018-02-01"
f('.fc-day[data-date="2018-03-02"]').click
# verify it shows up right from the start
expect(f('.ui-dialog #calendar_event_date').attribute(:value)).to eq('02/03/2018')
expect(fj('.date_field_container:has(#calendar_event_date) .datetime_suggest').text).to eq 'ven. 2 Mar 2018'
# verify it shows up right when set from the datepicker
f('#calendar_event_date + .ui-datepicker-trigger').click
fj('.ui-datepicker-current-day a:contains(2)').click()
expect(f('.ui-dialog #calendar_event_date').attribute(:value)).to eq('ven 2 Mars 2018')
expect(fj('.date_field_container:has(#calendar_event_date) .datetime_suggest').text).to eq 'ven. 2 Mar 2018'
f('#edit_calendar_event_form button[type="submit"]').click
expect(CalendarEvent.last.start_at).to eq Time.utc(2018, 3, 2)
end
it "should go to calendar event modal when a syllabus link is clicked", priority: "1", test_id: 186581 do
event_title = "Test Event"
make_event(title: event_title, context: @course)
# Verifies we are taken to the event in Calendar after clicking on it in Syllabus
get "/courses/#{@course.id}/assignments/syllabus"
fj("a:contains('#{event_title}')").click
wait_for_ajaximations
expect(fj('.event-details-header:visible')).to be_displayed
expect(f('.view_event_link')).to include_text(event_title)
end
it "should be able to create an event for a group" do
group(:context => @course)
get "/groups/#{@group.id}"
expect_new_page_load { f('.event-list-view-calendar').click }
event_name = 'some name'
create_calendar_event(event_name, false, false, false)
event = @group.calendar_events.last
expect(event.title).to eq event_name
end
it "should create an event that is recurring", priority: "1", test_id: 223510 do
Account.default.enable_feature!(:recurring_calendar_events)
get '/calendar2'
expect(f('#context-list li:nth-of-type(1)').text).to include(@teacher.name)
expect(f('#context-list li:nth-of-type(2)').text).to include(@course.name)
f('.calendar .fc-week .fc-today').click
edit_event_dialog = f('#edit_event_tabs')
expect(edit_event_dialog).to be_displayed
edit_event_form = edit_event_dialog.find('#edit_calendar_event_form')
title = edit_event_form.find('#calendar_event_title')
replace_content(title, "Test Event")
replace_content(f("input[type=text][name= 'start_time']"), "6:00am")
replace_content(f("input[type=text][name= 'end_time']"), "6:00pm")
click_option(f('.context_id'), @course.name)
expect_new_page_load { f('.more_options_link').click }
wait_for_tiny(f(".mce-edit-area"))
expect(f('.title')).to have_value "Test Event"
move_to_click('#duplicate_event')
replace_content(f("input[type=number][name='duplicate_count']"), 2)
expect_new_page_load { f('#editCalendarEventFull').submit }
expect(CalendarEvent.count).to eq(3)
repeat_event = CalendarEvent.where(title: "Test Event")
first_start_date = repeat_event[0].start_at.to_date
expect(repeat_event[1].start_at.to_date).to eq(first_start_date + 1.week)
expect(repeat_event[2].start_at.to_date).to eq(first_start_date + 2.weeks)
end
it "should create recurring section-specific events" do
Account.default.enable_feature!(:recurring_calendar_events)
section1 = @course.course_sections.first
section2 = @course.course_sections.create!(:name => "other section")
day1 = 1.day.from_now.to_date
day2 = 2.days.from_now.to_date
get '/calendar2'
f('.calendar .fc-week .fc-today').click
f('#edit_event #edit_event_tabs') # using implicit wait for element to be displayed
click_option(f('.context_id'), @course.name)
expect_new_page_load { f('.more_options_link').click }
# tiny can steal focus from one of the date inputs when it initializes
wait_for_tiny(f('#calendar-description'))
f('#use_section_dates').click
f("#section_#{section1.id}_start_date").send_keys(day1.to_s)
f("#section_#{section2.id}_start_date").send_keys(day2.to_s)
ff(".date_start_end_row input.start_time").select(&:displayed?).each do |input|
replace_content(input, "11:30am")
end
ff(".date_start_end_row input.end_time").select(&:displayed?).each do |input|
replace_content(input, "1pm")
end
f('#duplicate_event').click
replace_content(f("input[type=number][name='duplicate_count']"), 1)
form = f('#editCalendarEventFull')
expect_new_page_load{form.submit}
expect(CalendarEvent.count).to eq(6) # 2 parent events each with 2 child events
s1_events = CalendarEvent.where(:context_code => section1.asset_string).
where.not(:parent_calendar_event_id => nil).order(:start_at).to_a
expect(s1_events[1].start_at.to_date).to eq (s1_events[0].start_at.to_date + 1.week)
s2_events = CalendarEvent.where(:context_code => section2.asset_string).
where.not(:parent_calendar_event_id => nil).order(:start_at).to_a
expect(s2_events[1].start_at.to_date).to eq (s2_events[0].start_at.to_date + 1.week)
end
it "should query for all the sections in a course when creating an event" do
15.times.with_index { |i| add_section("Section #{i}") }
num_sections = @course.course_sections.count
get "/courses/#{@course.id}/calendar_events/new"
wait_for_ajaximations
wait_for_tiny(f(".mce-edit-area"))
f('#use_section_dates').click
num_rows = ff(".show_if_using_sections .row_header").length
expect(num_rows).to be_equal(num_sections)
end
end
end
context "to-do dates" do
before :once do
Account.default.enable_feature!(:student_planner)
@course = Course.create!(name: "Course 1")
@course.offer!
@student1 = User.create!(name: 'Student 1')
@course.enroll_student(@student1).accept!
end
before(:each) do
user_session(@student1)
end
context "student to-do event" do
before :once do
@todo_date = Time.zone.now
@student_to_do = @student1.planner_notes.create!(todo_date: @todo_date, title: "Student to do")
end
it "shows student to-do events in the calendar", priority: "1", test_id: 3357313 do
get "/calendar2"
expect(f('.fc-content .fc-title')).to include_text(@student_to_do.title)
end
it "shows the correct date and context for student to-do item in calendar", priority: "1", test_id: 3357315 do
get "/calendar2"
f('.fc-content .fc-title').click
event_content = fj('.event-details-content:visible')
expect(event_content.find_element(:css, '.event-details-timestring').text).
to eq format_time_for_view(@todo_date, :short)
expect(event_content).to contain_link('Student 1')
end
end
context "course to-do event" do
before :once do
@todo_date = Time.zone.now
@course_to_do = @student1.planner_notes.create!(todo_date: @todo_date, title: "Course to do",
course_id: @course.id)
end
it "shows course to do events in the calendar", priority: "1", test_id: 3357314 do
get "/calendar2"
expect(f('.fc-content .fc-title')).to include_text(@course_to_do.title)
end
it "shows the correct date and context for courseto-do item in calendar", priority: "1", test_id: 3357316 do
get "/calendar2"
f('.fc-content .fc-title').click
event_content = fj('.event-details-content:visible')
expect(event_content.find_element(:css, '.event-details-timestring').text).
to eq format_time_for_view(@todo_date, :short)
expect(event_content).to contain_link('Course 1')
end
end
context "edit to-do event" do
before :once do
@todo_date = Time.zone.now
@to_do = @student1.planner_notes.create!(todo_date: @todo_date, title: "A new to do")
end
it "respects the calendars checkboxes" do
get "/calendar2"
expect(ff('.fc-view-container .fc-content .fc-title').length).to equal(1)
# turn it off
f("span.group_user_#{@student1.id}").click
expect(f('.fc-view-container')).not_to contain_css('.fc-content .fc-title')
# turn it back on
f("span.group_user_#{@student1.id}").click
expect(ff('.fc-view-container .fc-content .fc-title').length).to equal(1)
# click to edit
f(".fc-event-container a.group_user_#{@student1.id}").click
# detial popup is displayed
expect(f('.event-details .event-details-header h2')).to include_text(@to_do.title)
# click edit button
f("button.event_button.edit_event_link").click
expect(f('#planner_note_context')).to be_displayed
# change the calendar
click_option('#planner_note_context', @course.name)
# save
f('#edit_planner_note_form_holder button[type="submit"]').click
wait_for_ajaximations
expect(ff('.fc-view-container .fc-content .fc-title').length).to equal(1)
# turn it off
f("span.group_course_#{@course.id}").click
expect(f('.fc-view-container')).not_to contain_css('.fc-content .fc-title')
# turn it back on
f("span.group_course_#{@course.id}").click
expect(ff('.fc-view-container .fc-content .fc-title').length).to equal(1)
end
it "edits the event in calendar", priority: "1", test_id: 3415211 do
get "/calendar2"
f('.fc-content .fc-title').click
f('.edit_event_link').click
replace_content(f('input[name=title]'), 'new to-do edited')
datetime = @todo_date
datetime = if datetime.to_date().mday() == '15'
datetime.change({day: 20})
else
datetime.change({day: 15})
end
replace_content(f('input[name=date]'), format_date_for_view(datetime, :short))
f('.validated-form-view').submit
refresh_page
f('.fc-content .fc-title').click
event_content = fj('.event-details-content:visible')
expect(event_content.find_element(:css, '.event-details-timestring').text).
to eq format_time_for_view(datetime, :short)
@to_do.reload
expect(format_time_for_view(@to_do.todo_date, :short)).to eq(format_time_for_view(datetime, :short))
end
end
end
end
spec: add wait_for_ajaximations
need some better waiting for the modal to be displayed. Added
wait_for_ajaximations after the button click.
Test Plan:
- Jenkins builds pass
- FSC build passes
fixes: CCI-379
flag = none
Change-Id: Id0d347e84dd96b820a87a810d237e2db32e093c3
Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/237040
Tested-by: Service Cloud Jenkins <9144042a601061f88f1e1d7a1753ea3e2972119d@instructure.com>
Reviewed-by: Rex Fleischer <360d56e185853f069087eaa5288d1374fc56d750@instructure.com>
Reviewed-by: Derek Bender <d24d8a7ee520e34b1cec38800225d9f36b552df7@instructure.com>
QA-Review: James Butters <4c5a489c9bb72d41f20b3b22aaf4c9b864e32f20@instructure.com>
Product-Review: James Butters <4c5a489c9bb72d41f20b3b22aaf4c9b864e32f20@instructure.com>
#
# Copyright (C) 2014 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
require File.expand_path(File.dirname(__FILE__) + '/../common')
require File.expand_path(File.dirname(__FILE__) + '/../helpers/calendar2_common')
describe "calendar2" do
include_context "in-process server selenium tests"
include Calendar2Common
before(:each) do
Account.default.tap do |a|
a.settings[:show_scheduler] = true
a.save!
end
end
context "as a teacher" do
before(:each) do
course_with_teacher_logged_in
end
context "event creation" do
it "should create an event by hitting the '+' in the top bar" do
event_title = 'new event'
get "/calendar2"
fj('#create_new_event_link').click
edit_event_dialog = f('#edit_event_tabs')
expect(edit_event_dialog).to be_displayed
end
it "should create an event with a location name" do
event_name = 'event with location'
create_middle_day_event(event_name, false, true)
fj('.fc-event:visible').click
expect(fj('.event-details-content:visible')).to include_text('location title')
end
it 'should create an event with name and address' do
get "/calendar2"
event_title = 'event title'
location_name = 'my house'
location_address = '555 test street'
find_middle_day.click
edit_event_dialog = f('#edit_event_tabs')
expect(edit_event_dialog).to be_displayed
edit_event_form = edit_event_dialog.find('#edit_calendar_event_form')
title = edit_event_form.find('#calendar_event_title')
expect(title).to be_displayed
replace_content(title, event_title)
expect_new_page_load { f('.more_options_link').click }
expect(driver.current_url).to match /start_date=\d\d\d\d-\d\d-\d\d/ # passed in ISO format, not localized
expect(f('.title')).to have_value event_title
expect(f('#editCalendarEventFull .btn-primary').text).to eq "Create Event"
replace_content(f('#calendar_event_location_name'), location_name)
replace_content(f('#calendar_event_location_address'), location_address)
# submit_form makes the spec fragile
wait_for_new_page_load { f('#editCalendarEventFull').submit }
expect(CalendarEvent.last.location_name).to eq location_name
expect(CalendarEvent.last.location_address).to eq location_address
end
it 'should cosistently format date <input> value to what datepicker would set it as, even in langs that have funky formatting' do
skip('USE_OPTIMIZED_JS=true') unless ENV['USE_OPTIMIZED_JS']
skip('RAILS_LOAD_ALL_LOCALES=true') unless ENV['RAILS_LOAD_ALL_LOCALES']
@user.locale = 'fr'
@user.save!
get "/calendar2#view_name=month&view_start=2018-02-01"
f('.fc-day[data-date="2018-03-02"]').click
# verify it shows up right from the start
expect(f('.ui-dialog #calendar_event_date').attribute(:value)).to eq('02/03/2018')
expect(fj('.date_field_container:has(#calendar_event_date) .datetime_suggest').text).to eq 'ven. 2 Mar 2018'
# verify it shows up right when set from the datepicker
f('#calendar_event_date + .ui-datepicker-trigger').click
fj('.ui-datepicker-current-day a:contains(2)').click()
expect(f('.ui-dialog #calendar_event_date').attribute(:value)).to eq('ven 2 Mars 2018')
expect(fj('.date_field_container:has(#calendar_event_date) .datetime_suggest').text).to eq 'ven. 2 Mar 2018'
f('#edit_calendar_event_form button[type="submit"]').click
expect(CalendarEvent.last.start_at).to eq Time.utc(2018, 3, 2)
end
it "should go to calendar event modal when a syllabus link is clicked", priority: "1", test_id: 186581 do
event_title = "Test Event"
make_event(title: event_title, context: @course)
# Verifies we are taken to the event in Calendar after clicking on it in Syllabus
get "/courses/#{@course.id}/assignments/syllabus"
fj("a:contains('#{event_title}')").click
wait_for_ajaximations
expect(fj('.event-details-header:visible')).to be_displayed
expect(f('.view_event_link')).to include_text(event_title)
end
it "should be able to create an event for a group" do
group(:context => @course)
get "/groups/#{@group.id}"
expect_new_page_load { f('.event-list-view-calendar').click }
event_name = 'some name'
create_calendar_event(event_name, false, false, false)
event = @group.calendar_events.last
expect(event.title).to eq event_name
end
it "should create an event that is recurring", priority: "1", test_id: 223510 do
Account.default.enable_feature!(:recurring_calendar_events)
get '/calendar2'
expect(f('#context-list li:nth-of-type(1)').text).to include(@teacher.name)
expect(f('#context-list li:nth-of-type(2)').text).to include(@course.name)
f('.calendar .fc-week .fc-today').click
edit_event_dialog = f('#edit_event_tabs')
expect(edit_event_dialog).to be_displayed
edit_event_form = edit_event_dialog.find('#edit_calendar_event_form')
title = edit_event_form.find('#calendar_event_title')
replace_content(title, "Test Event")
replace_content(f("input[type=text][name= 'start_time']"), "6:00am")
replace_content(f("input[type=text][name= 'end_time']"), "6:00pm")
click_option(f('.context_id'), @course.name)
expect_new_page_load { f('.more_options_link').click }
wait_for_tiny(f(".mce-edit-area"))
expect(f('.title')).to have_value "Test Event"
move_to_click('#duplicate_event')
replace_content(f("input[type=number][name='duplicate_count']"), 2)
expect_new_page_load { f('#editCalendarEventFull').submit }
expect(CalendarEvent.count).to eq(3)
repeat_event = CalendarEvent.where(title: "Test Event")
first_start_date = repeat_event[0].start_at.to_date
expect(repeat_event[1].start_at.to_date).to eq(first_start_date + 1.week)
expect(repeat_event[2].start_at.to_date).to eq(first_start_date + 2.weeks)
end
it "should create recurring section-specific events" do
Account.default.enable_feature!(:recurring_calendar_events)
section1 = @course.course_sections.first
section2 = @course.course_sections.create!(:name => "other section")
day1 = 1.day.from_now.to_date
day2 = 2.days.from_now.to_date
get '/calendar2'
f('.calendar .fc-week .fc-today').click
wait_for_ajaximations
f('#edit_event #edit_event_tabs') # using implicit wait for element to be displayed
click_option(f('.context_id'), @course.name)
expect_new_page_load { f('.more_options_link').click }
# tiny can steal focus from one of the date inputs when it initializes
wait_for_tiny(f('#calendar-description'))
f('#use_section_dates').click
f("#section_#{section1.id}_start_date").send_keys(day1.to_s)
f("#section_#{section2.id}_start_date").send_keys(day2.to_s)
ff(".date_start_end_row input.start_time").select(&:displayed?).each do |input|
replace_content(input, "11:30am")
end
ff(".date_start_end_row input.end_time").select(&:displayed?).each do |input|
replace_content(input, "1pm")
end
f('#duplicate_event').click
replace_content(f("input[type=number][name='duplicate_count']"), 1)
form = f('#editCalendarEventFull')
expect_new_page_load{form.submit}
expect(CalendarEvent.count).to eq(6) # 2 parent events each with 2 child events
s1_events = CalendarEvent.where(:context_code => section1.asset_string).
where.not(:parent_calendar_event_id => nil).order(:start_at).to_a
expect(s1_events[1].start_at.to_date).to eq (s1_events[0].start_at.to_date + 1.week)
s2_events = CalendarEvent.where(:context_code => section2.asset_string).
where.not(:parent_calendar_event_id => nil).order(:start_at).to_a
expect(s2_events[1].start_at.to_date).to eq (s2_events[0].start_at.to_date + 1.week)
end
it "should query for all the sections in a course when creating an event" do
15.times.with_index { |i| add_section("Section #{i}") }
num_sections = @course.course_sections.count
get "/courses/#{@course.id}/calendar_events/new"
wait_for_ajaximations
wait_for_tiny(f(".mce-edit-area"))
f('#use_section_dates').click
num_rows = ff(".show_if_using_sections .row_header").length
expect(num_rows).to be_equal(num_sections)
end
end
end
context "to-do dates" do
before :once do
Account.default.enable_feature!(:student_planner)
@course = Course.create!(name: "Course 1")
@course.offer!
@student1 = User.create!(name: 'Student 1')
@course.enroll_student(@student1).accept!
end
before(:each) do
user_session(@student1)
end
context "student to-do event" do
before :once do
@todo_date = Time.zone.now
@student_to_do = @student1.planner_notes.create!(todo_date: @todo_date, title: "Student to do")
end
it "shows student to-do events in the calendar", priority: "1", test_id: 3357313 do
get "/calendar2"
expect(f('.fc-content .fc-title')).to include_text(@student_to_do.title)
end
it "shows the correct date and context for student to-do item in calendar", priority: "1", test_id: 3357315 do
get "/calendar2"
f('.fc-content .fc-title').click
event_content = fj('.event-details-content:visible')
expect(event_content.find_element(:css, '.event-details-timestring').text).
to eq format_time_for_view(@todo_date, :short)
expect(event_content).to contain_link('Student 1')
end
end
context "course to-do event" do
before :once do
@todo_date = Time.zone.now
@course_to_do = @student1.planner_notes.create!(todo_date: @todo_date, title: "Course to do",
course_id: @course.id)
end
it "shows course to do events in the calendar", priority: "1", test_id: 3357314 do
get "/calendar2"
expect(f('.fc-content .fc-title')).to include_text(@course_to_do.title)
end
it "shows the correct date and context for courseto-do item in calendar", priority: "1", test_id: 3357316 do
get "/calendar2"
f('.fc-content .fc-title').click
event_content = fj('.event-details-content:visible')
expect(event_content.find_element(:css, '.event-details-timestring').text).
to eq format_time_for_view(@todo_date, :short)
expect(event_content).to contain_link('Course 1')
end
end
context "edit to-do event" do
before :once do
@todo_date = Time.zone.now
@to_do = @student1.planner_notes.create!(todo_date: @todo_date, title: "A new to do")
end
it "respects the calendars checkboxes" do
get "/calendar2"
expect(ff('.fc-view-container .fc-content .fc-title').length).to equal(1)
# turn it off
f("span.group_user_#{@student1.id}").click
expect(f('.fc-view-container')).not_to contain_css('.fc-content .fc-title')
# turn it back on
f("span.group_user_#{@student1.id}").click
expect(ff('.fc-view-container .fc-content .fc-title').length).to equal(1)
# click to edit
f(".fc-event-container a.group_user_#{@student1.id}").click
# detial popup is displayed
expect(f('.event-details .event-details-header h2')).to include_text(@to_do.title)
# click edit button
f("button.event_button.edit_event_link").click
expect(f('#planner_note_context')).to be_displayed
# change the calendar
click_option('#planner_note_context', @course.name)
# save
f('#edit_planner_note_form_holder button[type="submit"]').click
wait_for_ajaximations
expect(ff('.fc-view-container .fc-content .fc-title').length).to equal(1)
# turn it off
f("span.group_course_#{@course.id}").click
expect(f('.fc-view-container')).not_to contain_css('.fc-content .fc-title')
# turn it back on
f("span.group_course_#{@course.id}").click
expect(ff('.fc-view-container .fc-content .fc-title').length).to equal(1)
end
it "edits the event in calendar", priority: "1", test_id: 3415211 do
get "/calendar2"
f('.fc-content .fc-title').click
f('.edit_event_link').click
replace_content(f('input[name=title]'), 'new to-do edited')
datetime = @todo_date
datetime = if datetime.to_date().mday() == '15'
datetime.change({day: 20})
else
datetime.change({day: 15})
end
replace_content(f('input[name=date]'), format_date_for_view(datetime, :short))
f('.validated-form-view').submit
refresh_page
f('.fc-content .fc-title').click
event_content = fj('.event-details-content:visible')
expect(event_content.find_element(:css, '.event-details-timestring').text).
to eq format_time_for_view(datetime, :short)
@to_do.reload
expect(format_time_for_view(@to_do.todo_date, :short)).to eq(format_time_for_view(datetime, :short))
end
end
end
end
|
#
# Author:: John Keiser (<jkeiser@chef.io>)
# Author:: Ho-Sheng Hsiao (<hosh@chef.io>)
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "tmpdir"
require "fileutils"
require "chef/config"
require "chef/json_compat"
require "chef/server_api"
require "support/shared/integration/knife_support"
require "cheffish/rspec/chef_run_support"
module Cheffish
class BasicChefClient
def_delegators :@run_context, :before_notifications
end
end
module IntegrationSupport
include ChefZero::RSpec
def self.included(includer_class)
includer_class.extend(Cheffish::RSpec::ChefRunSupport)
includer_class.extend(ClassMethods)
end
module ClassMethods
include ChefZero::RSpec
def when_the_repository(desc, *tags, &block)
context("when the chef repo #{desc}", *tags) do
include_context "with a chef repo"
module_eval(&block)
end
end
def with_versioned_cookbooks(&block)
context("with versioned cookbooks") do
include_context "with versioned cookbooks"
module_eval(&block)
end
end
end
def api
Chef::ServerAPI.new
end
def directory(relative_path, &block)
old_parent_path = @parent_path
@parent_path = path_to(relative_path)
FileUtils.mkdir_p(@parent_path)
instance_eval(&block) if block
@parent_path = old_parent_path
end
def file(relative_path, contents)
filename = path_to(relative_path)
dir = File.dirname(filename)
FileUtils.mkdir_p(dir) unless dir == "."
File.open(filename, "w") do |file|
raw = case contents
when Hash, Array
Chef::JSONCompat.to_json_pretty(contents)
else
contents
end
file.write(raw)
end
end
def symlink(relative_path, relative_dest)
filename = path_to(relative_path)
dir = File.dirname(filename)
FileUtils.mkdir_p(dir) unless dir == "."
dest_filename = path_to(relative_dest)
File.symlink(dest_filename, filename)
end
def path_to(relative_path)
File.expand_path(relative_path, (@parent_path || @repository_dir))
end
def cb_metadata(name, version, extra_text = "")
"name #{name.inspect}; version #{version.inspect}#{extra_text}"
end
def cwd(relative_path)
@old_cwd = Dir.pwd
Dir.chdir(path_to(relative_path))
end
RSpec.shared_context "with a chef repo" do
before :each do
raise "Can only create one directory per test" if @repository_dir
@repository_dir = Dir.mktmpdir("chef_repo")
Chef::Config.chef_repo_path = @repository_dir
%w{client cookbook data_bag environment node role user}.each do |object_name|
Chef::Config.delete("#{object_name}_path".to_sym)
end
end
after :each do
if @repository_dir
begin
%w{client cookbook data_bag environment node role user}.each do |object_name|
Chef::Config.delete("#{object_name}_path".to_sym)
end
Chef::Config.delete(:chef_repo_path)
# TODO: "force" actually means "silence all exceptions". this
# silences a weird permissions error on Windows that we should track
# down, but for now there's no reason for it to blow up our CI.
FileUtils.remove_entry_secure(@repository_dir, force = ChefUtils.windows?)
ensure
@repository_dir = nil
end
end
Dir.chdir(@old_cwd) if @old_cwd
end
end
# Versioned cookbooks
RSpec.shared_context "with versioned cookbooks", versioned_cookbooks: true do
before(:each) { Chef::Config[:versioned_cookbooks] = true }
after(:each) { Chef::Config.delete(:versioned_cookbooks) }
end
end
Remove unnecessary resetting of Chef::Config.
Signed-off-by: Pete Higgins <e3b6cda228242c30b711ac17cb264f1dbadfd0b6@peterhiggins.org>
#
# Author:: John Keiser (<jkeiser@chef.io>)
# Author:: Ho-Sheng Hsiao (<hosh@chef.io>)
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "tmpdir"
require "fileutils"
require "chef/config"
require "chef/json_compat"
require "chef/server_api"
require "support/shared/integration/knife_support"
require "cheffish/rspec/chef_run_support"
module Cheffish
class BasicChefClient
def_delegators :@run_context, :before_notifications
end
end
module IntegrationSupport
include ChefZero::RSpec
def self.included(includer_class)
includer_class.extend(Cheffish::RSpec::ChefRunSupport)
includer_class.extend(ClassMethods)
end
module ClassMethods
include ChefZero::RSpec
def when_the_repository(desc, *tags, &block)
context("when the chef repo #{desc}", *tags) do
include_context "with a chef repo"
module_eval(&block)
end
end
def with_versioned_cookbooks(&block)
context("with versioned cookbooks") do
include_context "with versioned cookbooks"
module_eval(&block)
end
end
end
def api
Chef::ServerAPI.new
end
def directory(relative_path, &block)
old_parent_path = @parent_path
@parent_path = path_to(relative_path)
FileUtils.mkdir_p(@parent_path)
instance_eval(&block) if block
@parent_path = old_parent_path
end
def file(relative_path, contents)
filename = path_to(relative_path)
dir = File.dirname(filename)
FileUtils.mkdir_p(dir) unless dir == "."
File.open(filename, "w") do |file|
raw = case contents
when Hash, Array
Chef::JSONCompat.to_json_pretty(contents)
else
contents
end
file.write(raw)
end
end
def symlink(relative_path, relative_dest)
filename = path_to(relative_path)
dir = File.dirname(filename)
FileUtils.mkdir_p(dir) unless dir == "."
dest_filename = path_to(relative_dest)
File.symlink(dest_filename, filename)
end
def path_to(relative_path)
File.expand_path(relative_path, (@parent_path || @repository_dir))
end
def cb_metadata(name, version, extra_text = "")
"name #{name.inspect}; version #{version.inspect}#{extra_text}"
end
def cwd(relative_path)
@old_cwd = Dir.pwd
Dir.chdir(path_to(relative_path))
end
RSpec.shared_context "with a chef repo" do
before :each do
raise "Can only create one directory per test" if @repository_dir
@repository_dir = Dir.mktmpdir("chef_repo")
Chef::Config.chef_repo_path = @repository_dir
%w{client cookbook data_bag environment node role user}.each do |object_name|
Chef::Config.delete("#{object_name}_path".to_sym)
end
end
after :each do
if @repository_dir
begin
# TODO: "force" actually means "silence all exceptions". this
# silences a weird permissions error on Windows that we should track
# down, but for now there's no reason for it to blow up our CI.
FileUtils.remove_entry_secure(@repository_dir, force = ChefUtils.windows?)
ensure
@repository_dir = nil
end
end
Dir.chdir(@old_cwd) if @old_cwd
end
end
# Versioned cookbooks
RSpec.shared_context "with versioned cookbooks", versioned_cookbooks: true do
before(:each) { Chef::Config[:versioned_cookbooks] = true }
end
end
|
#
# Cookbook Name:: forum
# Recipe:: default
#
# Copyright 2014, OpenStreetMap Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe "apache::ssl"
include_recipe "git"
include_recipe "mysql"
passwords = data_bag_item("forum", "passwords")
package "php"
package "php-cli"
package "php-mysql"
package "php-apcu"
apache_module "php7.0"
apache_module "rewrite"
apache_site "default" do
action [:disable]
end
apache_site "forum.openstreetmap.org" do
template "apache.erb"
end
directory "/srv/forum.openstreetmap.org" do
owner "forum"
group "forum"
mode 0o755
end
git "/srv/forum.openstreetmap.org/html/" do
action :sync
repository "http://github.com/openstreetmap/openstreetmap-forum.git"
revision "openstreetmap"
depth 1
user "forum"
group "forum"
end
directory "/srv/forum.openstreetmap.org/html/cache/" do
owner "www-data"
group "www-data"
mode 0o755
end
directory "/srv/forum.openstreetmap.org/html/img/avatars/" do
owner "www-data"
group "www-data"
mode 0o755
end
template "/srv/forum.openstreetmap.org/html/config.php" do
source "config.php.erb"
owner "forum"
group "www-data"
mode 0o440
variables :passwords => passwords
end
mysql_user "forum@localhost" do
password passwords["database"]
end
mysql_database "forum" do
permissions "forum@localhost" => :all
end
template "/etc/cron.daily/forum-backup" do
source "backup.cron.erb"
owner "root"
group "root"
mode 0o750
variables :passwords => passwords
end
Install php-xml for utf8_decode
#
# Cookbook Name:: forum
# Recipe:: default
#
# Copyright 2014, OpenStreetMap Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe "apache::ssl"
include_recipe "git"
include_recipe "mysql"
passwords = data_bag_item("forum", "passwords")
package "php"
package "php-cli"
package "php-mysql"
package "php-xml"
package "php-apcu"
apache_module "php7.0"
apache_module "rewrite"
apache_site "default" do
action [:disable]
end
apache_site "forum.openstreetmap.org" do
template "apache.erb"
end
directory "/srv/forum.openstreetmap.org" do
owner "forum"
group "forum"
mode 0o755
end
git "/srv/forum.openstreetmap.org/html/" do
action :sync
repository "http://github.com/openstreetmap/openstreetmap-forum.git"
revision "openstreetmap"
depth 1
user "forum"
group "forum"
end
directory "/srv/forum.openstreetmap.org/html/cache/" do
owner "www-data"
group "www-data"
mode 0o755
end
directory "/srv/forum.openstreetmap.org/html/img/avatars/" do
owner "www-data"
group "www-data"
mode 0o755
end
template "/srv/forum.openstreetmap.org/html/config.php" do
source "config.php.erb"
owner "forum"
group "www-data"
mode 0o440
variables :passwords => passwords
end
mysql_user "forum@localhost" do
password passwords["database"]
end
mysql_database "forum" do
permissions "forum@localhost" => :all
end
template "/etc/cron.daily/forum-backup" do
source "backup.cron.erb"
owner "root"
group "root"
mode 0o750
variables :passwords => passwords
end
|
[Add] Brilliant (0.2.1)
#
# Be sure to run `pod lib lint Brilliant.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "Brilliant"
s.version = "0.2.1"
s.summary = "A library for in-app NPS Surveys"
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
Brilliant is a library for in-app nps (net promoter score) surveys
DESC
s.homepage = "https://github.com/tomboates/BrilliantSDK"
s.screenshots = "http://www.brilliant-llc.com/nps.png", "http://www.brilliant-llc.com/comments.png", "http://www.brilliant-llc.com/rating.png"
s.license = 'MIT'
s.author = { "Tom Boates" => "tom.boates@me.com" }
s.source = { :git => "https://github.com/tomboates/BrilliantSDK.git", :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/brilliantnps'
s.platform = :ios, '8.0'
s.requires_arc = true
s.source_files = 'Pod/Classes/**/*'
s.resource_bundle = { 'Brilliant' => ['Assets/*.png', 'Assets/*.xib'] }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
s.dependency 'Alamofire', '~> 2.0'
s.dependency 'ReachabilitySwift', '2.0'
end
|
class Zenity < Formula
desc "GTK+ dialog boxes for the command-line"
homepage "https://live.gnome.org/Zenity"
url "https://download.gnome.org/sources/zenity/3.16/zenity-3.16.3.tar.xz"
sha256 "7fe28016fbc5b1fc6d8f730d8eabd5ae2d8b7d67c8bfa0270811ff0c2bfb1eba"
bottle do
root_url "https://downloads.sf.net/project/machomebrew/Bottles/x11"
sha1 "3426d2077fa335d2c215cce4903741c748e578c2" => :yosemite
sha1 "9b356978fb0b758d0e420d3fdfa54ba538e5663b" => :mavericks
sha1 "aee2c119bed7d6cf1c844f9670a11b0becb806d2" => :mountain_lion
end
depends_on "pkg-config" => :build
depends_on "intltool" => :build
depends_on "itstool" => :build
depends_on "libxml2"
depends_on "gtk+3"
depends_on "gnome-doc-utils"
depends_on "scrollkeeper"
def install
ENV.append_path "PYTHONPATH", "#{Formula["libxml2"].opt_lib}/python2.7/site-packages"
system "./autogen.sh" if build.head?
system "./configure", "--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
system "#{bin}/zenity", "--help"
end
end
zenity: update 3.16.3 bottle.
class Zenity < Formula
desc "GTK+ dialog boxes for the command-line"
homepage "https://live.gnome.org/Zenity"
url "https://download.gnome.org/sources/zenity/3.16/zenity-3.16.3.tar.xz"
sha256 "7fe28016fbc5b1fc6d8f730d8eabd5ae2d8b7d67c8bfa0270811ff0c2bfb1eba"
bottle do
root_url "https://homebrew.bintray.com/bottles-x11"
sha256 "105cbdb34aaafe239277e926c7892aa636a20ccb611013c8fe662fd649286423" => :yosemite
sha256 "12e5b7d3c268e057460a5e3656e5cb02519172e8b5fc05f1fa92457d8c29e095" => :mavericks
sha256 "d5a679656230f98c0f642d2825a82f4cb8e912f90fe68f6c78ceef993649131c" => :mountain_lion
end
depends_on "pkg-config" => :build
depends_on "intltool" => :build
depends_on "itstool" => :build
depends_on "libxml2"
depends_on "gtk+3"
depends_on "gnome-doc-utils"
depends_on "scrollkeeper"
def install
ENV.append_path "PYTHONPATH", "#{Formula["libxml2"].opt_lib}/python2.7/site-packages"
system "./autogen.sh" if build.head?
system "./configure", "--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
system "#{bin}/zenity", "--help"
end
end
|
#
# Cookbook:: nginx
# Recipe:: default
#
# Copyright:: 2013, OpenStreetMap Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package "nginx"
resolvers = node[:networking][:nameservers].map do |resolver|
IPAddr.new(resolver).ipv6? ? "[#{resolver}]" : resolver
end
template "/etc/nginx/nginx.conf" do
source "nginx.conf.erb"
owner "root"
group "root"
mode 0o644
variables :resolvers => resolvers
end
directory "/var/cache/nginx/fastcgi-cache" do
owner "www-data"
group "root"
mode 0o755
only_if { node[:nginx][:cache][:fastcgi][:enable] }
end
directory "/var/cache/nginx/proxy-cache" do
owner "www-data"
group "root"
mode 0o755
only_if { node[:nginx][:cache][:proxy][:enable] }
end
# Temporary Cleanup to remove old levels=1:2 cache after migration to 2:2:2
execute "nginx-remove-old-fastcgi-cache" do
command "/usr/bin/find /var/cache/nginx/fastcgi-cache/ -mindepth 3 -maxdepth 3 -type f -delete"
ignore_failure true
only_if { node[:nginx][:cache][:fastcgi][:enable] }
end
# Temporary Cleanup to remove old levels=1:2 cache after migration to 2:2:2
execute "nginx-remove-old-proxy-cache" do
command "/usr/bin/find /var/cache/nginx/proxy-cache/ -mindepth 3 -maxdepth 3 -type f -delete"
ignore_failure true
only_if { node[:nginx][:cache][:proxy][:enable] }
end
service "nginx" do
action [:enable] # Do not start the service as config may be broken from failed chef run
supports :status => true, :restart => true, :reload => true
subscribes :restart, "template[/etc/nginx/nginx.conf]"
end
munin_plugin_conf "nginx" do
template "munin.erb"
end
package "libwww-perl"
munin_plugin "nginx_request"
munin_plugin "nginx_status"
Revert "Temporary nginx cache purge after depth change"
This reverts commit cb90c668918b4f95a83662dc08c584bc5f5b62e2.
#
# Cookbook:: nginx
# Recipe:: default
#
# Copyright:: 2013, OpenStreetMap Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package "nginx"
resolvers = node[:networking][:nameservers].map do |resolver|
IPAddr.new(resolver).ipv6? ? "[#{resolver}]" : resolver
end
template "/etc/nginx/nginx.conf" do
source "nginx.conf.erb"
owner "root"
group "root"
mode 0o644
variables :resolvers => resolvers
end
directory "/var/cache/nginx/fastcgi-cache" do
owner "www-data"
group "root"
mode 0o755
only_if { node[:nginx][:cache][:fastcgi][:enable] }
end
directory "/var/cache/nginx/proxy-cache" do
owner "www-data"
group "root"
mode 0o755
only_if { node[:nginx][:cache][:proxy][:enable] }
end
service "nginx" do
action [:enable] # Do not start the service as config may be broken from failed chef run
supports :status => true, :restart => true, :reload => true
subscribes :restart, "template[/etc/nginx/nginx.conf]"
end
munin_plugin_conf "nginx" do
template "munin.erb"
end
package "libwww-perl"
munin_plugin "nginx_request"
munin_plugin "nginx_status"
|
require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
describe "Enumerable#take_while" do
ruby_version_is '1.8.7' do
before :each do
@enum = EnumerableSpecs::Numerous.new(3, 2, 1, :go)
end
it "returns an Enumerator if no block given" do
@enum.take_while.should be_an_instance_of(enumerator_class)
end
it "returns no/all elements for {true/false} block" do
@enum.take_while{true}.should == @enum.to_a
@enum.take_while{false}.should == []
end
it "accepts returns other than true/false" do
@enum.take_while{1}.should == @enum.to_a
@enum.take_while{nil}.should == []
end
it "passes elements to the block until the first false" do
a = []
@enum.take_while{|obj| (a << obj).size < 3}.should == [3, 2]
a.should == [3, 2, 1]
end
it "will only go through what's needed" do
enum = EnumerableSpecs::EachCounter.new(4, 3, 2, 1, :stop)
enum.take_while { |x|
break 42 if x == 3
true
}.should == 42
enum.times_yielded.should == 2
end
it "doesn't return self when it could" do
a = [1,2,3]
a.take_while{true}.should_not equal(a)
end
end
end
Add specs for Enumerable#take_while
require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
describe "Enumerable#take_while" do
ruby_version_is '1.8.7' do
before :each do
@enum = EnumerableSpecs::Numerous.new(3, 2, 1, :go)
end
it "returns an Enumerator if no block given" do
@enum.take_while.should be_an_instance_of(enumerator_class)
end
it "returns no/all elements for {true/false} block" do
@enum.take_while{true}.should == @enum.to_a
@enum.take_while{false}.should == []
end
it "accepts returns other than true/false" do
@enum.take_while{1}.should == @enum.to_a
@enum.take_while{nil}.should == []
end
it "passes elements to the block until the first false" do
a = []
@enum.take_while{|obj| (a << obj).size < 3}.should == [3, 2]
a.should == [3, 2, 1]
end
it "will only go through what's needed" do
enum = EnumerableSpecs::EachCounter.new(4, 3, 2, 1, :stop)
enum.take_while { |x|
break 42 if x == 3
true
}.should == 42
enum.times_yielded.should == 2
end
it "doesn't return self when it could" do
a = [1,2,3]
a.take_while{true}.should_not equal(a)
end
ruby_version_is "" ... "1.9" do
it "calls the block with an array when yielded with multiple arguments" do
yields = []
EnumerableSpecs::YieldsMixed.new.take_while{ |v| yields << v }
yields.should == [1, [2], [3, 4], [5, 6, 7], [8, 9], nil, []]
end
end
ruby_version_is "1.9" do
it "calls the block with initial args when yielded with multiple arguments" do
yields = []
EnumerableSpecs::YieldsMixed.new.take_while{ |v| yields << v }
yields.should == [1, [2], 3, 5, [8, 9], nil, []]
end
end
end
end
|
require File.dirname(__FILE__) + '/../../spec_helper'
ruby_version_is "1.9" do
require File.dirname(__FILE__) + '/../../shared/enumerator/with_index'
describe "Enumerator#with_index" do
it_behaves_like(:enum_with_index, :with_index)
it "accepts an optional argument when given a block" do
lambda do
@enum.with_index(1) { |f| f}
end.should_not raise_error(ArgumentError)
end
it "accepts an optional argument when not given a block" do
lambda do
@enum.with_index(1)
end.should_not raise_error(ArgumentError)
end
it "numbers indices from the given index when given an offset but no block" do
@enum.with_index(1).to_a.should == [[1,1],[2,2],[3,3],[4,4]]
end
it "numbers indices from the given index when given an offset and block" do
acc = []
@enum.with_index(1) {|e,i| acc << [e,i] }
acc.should == [[1,1],[2,2],[3,3],[4,4]]
end
it "coerces the given numeric argument to an Integer" do
@enum.with_index(1.678).to_a.should == [[1,1],[2,2],[3,3],[4,4]]
end
end
end
added secs to test Enumerator#with_index with non integer arguments
require File.dirname(__FILE__) + '/../../spec_helper'
ruby_version_is "1.9" do
require File.dirname(__FILE__) + '/../../shared/enumerator/with_index'
describe "Enumerator#with_index" do
it_behaves_like(:enum_with_index, :with_index)
it "accepts an optional argument when given a block" do
lambda do
@enum.with_index(1) { |f| f}
end.should_not raise_error(ArgumentError)
end
it "accepts an optional argument when not given a block" do
lambda do
@enum.with_index(1)
end.should_not raise_error(ArgumentError)
end
it "numbers indices from the given index when given an offset but no block" do
@enum.with_index(1).to_a.should == [[1,1],[2,2],[3,3],[4,4]]
end
it "numbers indices from the given index when given an offset and block" do
acc = []
@enum.with_index(1) {|e,i| acc << [e,i] }
acc.should == [[1,1],[2,2],[3,3],[4,4]]
end
it "raises a TypeError when a non numeric argument is given" do
lambda do
@enum.with_index('1') {|o, i| i}
end.should raise_error(TypeError)
end
it "coerces the given numeric argument to an Integer" do
@enum.with_index(1.678).to_a.should == [[1,1],[2,2],[3,3],[4,4]]
end
end
end
|
class DataEngine
def initialize(logger=nil)
@logger = logger
end
def load_data_from_yml(filename)
@logger.debug "Loading data from file #{filename}" if @logger
begin
data = YAML.load_file(filename)
return data
rescue Psych::SyntaxError => e
raise "ERROR: Tally could not parse this YML file!\n"\
"\tMY GUESS: You have a syntax error in your .yml file\n"\
"\tYML_FILE: #{filename}\n"\
"\tNOTE: Could be from inherited page YML file\n"
"\tORIGINAL MESSAGE: #{e.message}\n\n"
rescue Errno::ENOENT => e
raise "ERROR: Tally could not parse this YML file!\n"\
"\tMY FIRST GUESS: The path to this YML file is incorrect.\n"\
"\tMY SECOND GUESS: The yml file is missing.\n"\
"\tYML_FILE: #{filename}\n"\
"\tORIGINAL MESSAGE: #{e.message}\n\n"
end
end
def get_yml_data(data_type, filename, data_name)
data_name = data_name.upcase.gsub(" ","_")
raw_data = load_data_from_yml(filename)[data_type]
data = raw_data[data_name]
default_data = raw_data["DEFAULT"]
raise "DEFAULT key is empty for #{filename}!" if default_data == nil
default_data = default_data.merge!(data) if data
return default_data
end
def get_input_data(filename, data_name)
get_yml_data("INPUT_DATA", filename, data_name)
end
def get_expected_data(filename, data_name)
get_yml_data("EXPECTED_DATA", filename, data_name)
end
end
Fixed Tally references
class DataEngine
def initialize(logger=nil)
@logger = logger
end
def load_data_from_yml(filename)
@logger.debug "Loading data from file #{filename}" if @logger
begin
data = YAML.load_file(filename)
return data
rescue Psych::SyntaxError => e
raise "ERROR: OZ could not parse this YML file!\n"\
"\tMY GUESS: You have a syntax error in your .yml file\n"\
"\tYML_FILE: #{filename}\n"\
"\tNOTE: Could be from inherited page YML file\n"
"\tORIGINAL MESSAGE: #{e.message}\n\n"
rescue Errno::ENOENT => e
raise "ERROR: OZ could not parse this YML file!\n"\
"\tMY FIRST GUESS: The path to this YML file is incorrect.\n"\
"\tMY SECOND GUESS: The yml file is missing.\n"\
"\tYML_FILE: #{filename}\n"\
"\tORIGINAL MESSAGE: #{e.message}\n\n"
end
end
def get_yml_data(data_type, filename, data_name)
data_name = data_name.upcase.gsub(" ","_")
raw_data = load_data_from_yml(filename)[data_type]
data = raw_data[data_name]
default_data = raw_data["DEFAULT"]
raise "DEFAULT key is empty for #{filename}!" if default_data == nil
default_data = default_data.merge!(data) if data
return default_data
end
def get_input_data(filename, data_name)
get_yml_data("INPUT_DATA", filename, data_name)
end
def get_expected_data(filename, data_name)
get_yml_data("EXPECTED_DATA", filename, data_name)
end
end |
# -*- coding: utf-8 -*-
require "gtk2"
class Gtk::MikutterWindow < Gtk::Window
attr_reader :panes, :statusbar
def initialize(*args)
super
@container = Gtk::VBox.new(false, 0)
@panes = Gtk::HBox.new(true, 0)
@postboxes = Gtk::VBox.new(false, 0)
add(@container.closeup(@postboxes).pack_start(@panes).closeup(create_statusbar))
end
def add_postbox(i_postbox)
postbox = Gtk::PostBox.new(i_postbox.poster || Service.primary, (i_postbox.options||{}).merge(:postboxstorage => @postboxes, :delegate_other => true))
@postboxes.pack_start(postbox)
set_focus(postbox.post)
postbox.show_all end
# def set_focus(widget)
# if widget.is_a? Gtk::TimeLine
# end
# end
private
# ステータスバーを返す
# ==== Return
# Gtk::Statusbar
def create_statusbar
statusbar = Gtk::Statusbar.new
notice "statusbar: context id: #{statusbar.get_context_id("system")}"
statusbar.push(statusbar.get_context_id("system"), "Twitterに新しい視野を、mikutter。")
status_button = Gtk::Button.new.add(Gtk::WebIcon.new(MUI::Skin.get("settings.png"), 16, 16))
status_button.relief = Gtk::RELIEF_NONE
status_button.ssc(:clicked) {
Plugin.call(:gui_setting) }
@statusbar = statusbar.closeup(status_button) end
end
windowに追加するpostboxのオプションを上書きできるようにした
git-svn-id: e7711ecd44ebdb2de8d903bf0a4f221b6d4573cb@876 03aab468-d3d2-4883-8b12-f661bbf03fa8
# -*- coding: utf-8 -*-
require "gtk2"
class Gtk::MikutterWindow < Gtk::Window
attr_reader :panes, :statusbar
def initialize(*args)
super
@container = Gtk::VBox.new(false, 0)
@panes = Gtk::HBox.new(true, 0)
@postboxes = Gtk::VBox.new(false, 0)
add(@container.closeup(@postboxes).pack_start(@panes).closeup(create_statusbar))
end
def add_postbox(i_postbox)
postbox = Gtk::PostBox.new(i_postbox.poster || Service.primary, {postboxstorage: @postboxes, delegate_other: true}.merge(i_postbox.options||{}))
@postboxes.pack_start(postbox)
set_focus(postbox.post)
postbox.show_all end
# def set_focus(widget)
# if widget.is_a? Gtk::TimeLine
# end
# end
private
# ステータスバーを返す
# ==== Return
# Gtk::Statusbar
def create_statusbar
statusbar = Gtk::Statusbar.new
notice "statusbar: context id: #{statusbar.get_context_id("system")}"
statusbar.push(statusbar.get_context_id("system"), "Twitterに新しい視野を、mikutter。")
status_button = Gtk::Button.new.add(Gtk::WebIcon.new(MUI::Skin.get("settings.png"), 16, 16))
status_button.relief = Gtk::RELIEF_NONE
status_button.ssc(:clicked) {
Plugin.call(:gui_setting) }
@statusbar = statusbar.closeup(status_button) end
end
|
cask 'font-gohu-nerd-font-mono' do
version '1.1.0'
sha256 'ad9ca3eeefb0dcca733b31df1f61e944f9428290546a939d8ba3cf70fe4388b6'
url "https://github.com/ryanoasis/nerd-fonts/releases/download/v#{version}/Gohu.zip"
appcast 'https://github.com/ryanoasis/nerd-fonts/releases.atom',
checkpoint: '109f18cfd453156e38ffac165683bcfc2745e0c8dc07bd379a7f9ea19d0cbe41'
name 'GohuFontBold Nerd Font (Gohu)'
homepage 'https://github.com/ryanoasis/nerd-fonts'
font 'GohuFont-Bold Nerd Font Complete Mono.ttf'
font 'GohuFont-Medium Nerd Font Complete Mono.ttf'
end
Update font-gohu-nerd-font-mono to 1.2.0 (#1448)
cask 'font-gohu-nerd-font-mono' do
version '1.2.0'
sha256 '3c2cc94365093054b7b32638dfa1cba0ea09e02a8abd28395612de4c03fd1f4b'
url "https://github.com/ryanoasis/nerd-fonts/releases/download/v#{version}/Gohu.zip"
appcast 'https://github.com/ryanoasis/nerd-fonts/releases.atom',
checkpoint: '7dedec17cde17542418131f94e739265707a4abe9d0773287d14f175c02325f7'
name 'GohuFontBold Nerd Font (Gohu)'
homepage 'https://github.com/ryanoasis/nerd-fonts'
font 'GohuFont-Bold Nerd Font Complete Mono.ttf'
font 'GohuFont-Medium Nerd Font Complete Mono.ttf'
end
|
Add Microsoft Remote Desktop v10.5.1 (#100818)
* Add Microsoft Remote Desktop v10.5.1
* Fix uninstall in Microsoft Remote Desktop Cask
Co-authored-by: Sam Cleathero <f16bed56189e249fe4ca8ed10a1ecae60e8ceac0@webstraxt.com>
cask "microsoft-remote-desktop" do
version "10.5.1"
sha256 "2b99e8980b8c57a4ab60bc2cde9b671afcdaec42e3148dc8b0df04e68b8c806f"
url "https://officecdn-microsoft-com.akamaized.net/pr/C1297A47-86C4-4C1F-97FA-950631F94777/MacAutoupdate/Microsoft_Remote_Desktop_#{version}_installer.pkg",
verified: "officecdn-microsoft-com.akamaized.net/pr/C1297A47-86C4-4C1F-97FA-950631F94777/MacAutoupdate/"
name "Microsoft Remote Desktop"
desc "Remote desktop client"
homepage "https://docs.microsoft.com/en-us/windows-server/remote/remote-desktop-services/clients/remote-desktop-mac"
auto_updates true
depends_on macos: ">= :high_sierra"
pkg "Microsoft_Remote_Desktop_#{version}_installer.pkg"
uninstall pkgutil: [
"com.microsoft.rdc.macos",
],
launchctl: [
"com.microsoft.update.agent",
],
quit: [
"com.microsoft.autoupdate.fba",
]
zap trash: [
"~/Library/Application Scripts/com.microsoft.rdc.macos",
"~/Library/Containers/com.microsoft.rdc.macos",
"~/Library/Group Containers/UBF8T346G9.com.microsoft.rdc",
]
end
|
cask "ukrainian-unicode-layout" do
version "1.0.0"
sha256 "d0f6d760c640a7e27acfa69f8d7094e308962626216494415227e3bc1c6e8c5a"
url "https://github.com/korzhyk/macOS-Ukrainian-Unicode-Layout/archive/#{version}.tar.gz"
appcast "https://github.com/korzhyk/macOS-Ukrainian-Unicode-Layout/releases.atom"
name "Ukrainian Unicode Layout"
homepage "https://github.com/korzhyk/OSX-Ukrainian-Unicode-Layout"
artifact "macOS-Ukrainian-Unicode-Layout-#{version}",
target: "#{ENV["HOME"]}/Library/Keyboard Layouts/Ukrainian-Unicode-Layout.bundle"
end
ukrainian-unicode-layout: fix RuboCop style.
See https://github.com/Homebrew/brew/pull/7867.
cask "ukrainian-unicode-layout" do
version "1.0.0"
sha256 "d0f6d760c640a7e27acfa69f8d7094e308962626216494415227e3bc1c6e8c5a"
url "https://github.com/korzhyk/macOS-Ukrainian-Unicode-Layout/archive/#{version}.tar.gz"
appcast "https://github.com/korzhyk/macOS-Ukrainian-Unicode-Layout/releases.atom"
name "Ukrainian Unicode Layout"
homepage "https://github.com/korzhyk/OSX-Ukrainian-Unicode-Layout"
artifact "macOS-Ukrainian-Unicode-Layout-#{version}",
target: "#{ENV["HOME"]}/Library/Keyboard Layouts/Ukrainian-Unicode-Layout.bundle"
end
|
class CreateLocations < ActiveRecord::Migration
def self.update_observations_by_where(location, where)
if where
observations = Observation.find_all_by_where(where)
for o in observations
unless o.location_id
o.location = location
o.where = nil
o.save
end
end
end
end
def self.up
create_table :locations do |t|
t.column :created, :datetime
t.column :modified, :datetime
t.column :user_id, :integer, :default => 0, :null => false
t.column :version, :integer, :default => 0, :null => false
t.column :display_name, :string, :limit => 200
t.column :notes, :text
t.column :north, :float # Interface enforces that north >= south
t.column :south, :float
t.column :west, :float # Area is assumed to be between west and east
t.column :east, :float # including any rollover
t.column :high, :float # Stored in meters, but interface should allow for ft
t.column :low, :float # Interface should enforce that high >= low
end
add_column :observations, "location_id", :integer
add_column :observations, "is_collection_location", :boolean, :default => true, :null => false
create_table :past_locations, :force => true do |t|
t.column :location_id, :integer
t.column :created, :datetime
t.column :modified, :datetime
t.column :user_id, :integer, :default => 0, :null => false
t.column :version, :integer, :default => 0, :null => false
t.column :display_name, :string, :limit => 200
t.column :notes, :text
t.column :north, :float # Interface enforces that north >= south
t.column :south, :float
t.column :west, :float # Area is assumed to be between west and east
t.column :east, :float # including any rollover
t.column :high, :float # Stored in meters, but interface should allow for ft
t.column :low, :float # Interface should enforce that high >= low
end
now = Time.now
for loc_attrs in [{
:display_name => "Albion, Mendocino Co., California, USA",
:north => 39.32,
:west => -123.82,
:east => -123.74,
:south => 39.21,
:high => 100.0,
:low => 0.0
}, {
:display_name => "Burbank, Los Angeles Co., California, USA",
:north => 34.22,
:west => -118.37,
:east => -118.29,
:south => 34.15,
:high => 294.0,
:low => 148.0
}, {
:display_name => "\"Mitrula Marsh\", Sand Lake, Bassetts, Yuba Co., California, USA",
:north => 39.7184,
:west => -120.687,
:east => -120.487,
:south => 39.5184
}, {
:display_name => "Salt Point State Park, Sonoma Co., California, USA",
:north => 38.5923,
:west => -123.343,
:east => -123.283,
:south => 38.5584,
:high => 100.0,
:low => 0.0
}, {
:display_name => "Gualala, Mendocino Co., California, USA",
:north => 38.7868,
:west => -123.557,
:east => -123.519,
:south => 38.7597,
:high => 100.0,
:low => 0.0
}, {
:display_name => "Elgin County, Ontario, Canada",
:north => 42.876,
:west => -81.8179,
:east => -80.8044,
:south => 42.4701,
}, {
:display_name => 'Brett Woods, Fairfield Co., Connecticut, USA',
:north => 41.2125,
:west => -73.3295,
:east => -73.3215,
:south => 41.1939
}, {
:display_name => 'Point Reyes National Seashore, Marin Co., California, USA',
:north => 38.2441,
:west => -123.0256,
:east => -122.7092,
:south => 37.9255
}, {
:display_name => 'Howarth Park, Santa Rosa, Sonoma Co., California, USA',
:north => 38.4582,
:west => -122.6712,
:east => -122.6632,
:south => 38.4496
}]
loc = Location.new(loc_attrs)
loc.user_id = 1
loc.created = now
loc.modified = now
if loc.save
print "Created #{loc.display_name}\n"
update_observations_by_where(loc, loc.display_name)
else
print "Unable to create #{loc_attrs.display_name}\n"
end
end
end
def self.down
for o in Observation.find(:all, :conditions => "`where` is NULL")
o.where = o.place_name
o.save
end
drop_table :past_locations
remove_column :observations, "location_id"
remove_column :observations, "is_collection_location"
drop_table :locations
end
end
More location initialization data
class CreateLocations < ActiveRecord::Migration
def self.update_observations_by_where(location, where)
if where
observations = Observation.find_all_by_where(where)
for o in observations
unless o.location_id
o.location = location
o.where = nil
o.save
end
end
end
end
def self.up
create_table :locations do |t|
t.column :created, :datetime
t.column :modified, :datetime
t.column :user_id, :integer, :default => 0, :null => false
t.column :version, :integer, :default => 0, :null => false
t.column :display_name, :string, :limit => 200
t.column :notes, :text
t.column :north, :float # Interface enforces that north >= south
t.column :south, :float
t.column :west, :float # Area is assumed to be between west and east
t.column :east, :float # including any rollover
t.column :high, :float # Stored in meters, but interface should allow for ft
t.column :low, :float # Interface should enforce that high >= low
end
add_column :observations, "location_id", :integer
add_column :observations, "is_collection_location", :boolean, :default => true, :null => false
create_table :past_locations, :force => true do |t|
t.column :location_id, :integer
t.column :created, :datetime
t.column :modified, :datetime
t.column :user_id, :integer, :default => 0, :null => false
t.column :version, :integer, :default => 0, :null => false
t.column :display_name, :string, :limit => 200
t.column :notes, :text
t.column :north, :float # Interface enforces that north >= south
t.column :south, :float
t.column :west, :float # Area is assumed to be between west and east
t.column :east, :float # including any rollover
t.column :high, :float # Stored in meters, but interface should allow for ft
t.column :low, :float # Interface should enforce that high >= low
end
now = Time.now
for loc_attrs, alt_names in [[{
:display_name => "Albion, Mendocino Co., California, USA",
:north => 39.32,
:west => -123.82,
:east => -123.74,
:south => 39.21,
:high => 100.0,
:low => 0.0
}, ["Albion, Mendocino Co., CA",
"Albion, CA",
"Albion, Mendocino County, California",
"Albion, California"]],
[{
:display_name => "Burbank, Los Angeles Co., California, USA",
:north => 34.22,
:west => -118.37,
:east => -118.29,
:south => 34.15,
:high => 294.0,
:low => 148.0
}, ["Burbank, California"]],
[{
:display_name => "\"Mitrula Marsh\", Sand Lake, Bassetts, Yuba Co., California, USA",
:north => 39.7184,
:west => -120.687,
:east => -120.487,
:south => 39.5184
}, [
'"Mitrula Marsh", Sand Lake, Bassetts, Yuba Co',
'"Mitrula Marsh", Sand Lake, Bassetts, Yuba Co.']],
[{
:display_name => "Salt Point State Park, Sonoma Co., California, USA",
:north => 38.5923,
:west => -123.343,
:east => -123.283,
:south => 38.5584,
:high => 100.0,
:low => 0.0
}, ["Salt Point State Park, CA",
"Salt Point State Park",
"Salt Point State Park, Sonoma County, CA",
"Salt Point State Park, Sonoma County, California",
"Salt Point State Pk, Ca."]],
[{
:display_name => "Gualala, Mendocino Co., California, USA",
:north => 38.7868,
:west => -123.557,
:east => -123.519,
:south => 38.7597,
:high => 100.0,
:low => 0.0
}, ["Gualala, California"]],
[{
:display_name => "Elgin County, Ontario, Canada",
:north => 42.876,
:west => -81.8179,
:east => -80.8044,
:south => 42.4701,
}, ["Elgin County, Ontario"]],
[{
:display_name => 'Brett Woods, Fairfield Co., Connecticut, USA',
:north => 41.2125,
:west => -73.3295,
:east => -73.3215,
:south => 41.1939
}, ['Brett Woods, Fairfield, Ct.']],
[{
:display_name => 'Point Reyes National Seashore, Marin Co., California, USA',
:north => 38.2441,
:west => -123.0256,
:east => -122.7092,
:south => 37.9255
}, [
'Point Reyes, California',
'Point Reyes National Seashore, Marin County, CA',
'Point Reyes National Seashore',
'Point Reyes National Seashore, CA',
'Pt Reyes National Seashore, Ca',
'Pt Reyes National Seashore, Ca.']],
[{
:display_name => 'Howarth Park, Santa Rosa, Sonoma Co., California, USA',
:north => 38.4582,
:west => -122.6712,
:east => -122.6632,
:south => 38.4496
}, [
'Howarth Park, Santa Rosa CA',
'Howarth Park',
'Howarth',
'Howarth Park Santa Rosa CA']],
[{
:display_name => 'Sebastopol, Sonoma Co., California, USA',
:north => 38.413,
:west => -122.848,
:east => -122.807,
:south => 38.3855
}, [
'Sebastopol, CA']],
[{
:display_name => 'Petaluma, Sonoma Co., California, USA',
:north => 38.2788,
:west => -122.6769,
:east => -122.5810,
:south => 38.2055
}, [
'Petaluma, CA']]
]
loc = Location.new(loc_attrs)
loc.user_id = 1
loc.created = now
loc.modified = now
if loc.save
print "Created #{loc.display_name}\n"
update_observations_by_where(loc, loc.display_name)
for name in alt_names
update_observations_by_where(loc, name)
end
else
print "Unable to create #{loc_attrs.display_name}\n"
end
end
end
def self.down
for o in Observation.find(:all, :conditions => "`where` is NULL")
o.where = o.place_name
o.save
end
drop_table :past_locations
remove_column :observations, "location_id"
remove_column :observations, "is_collection_location"
drop_table :locations
end
end
|
#!/usr/bin/env ruby
require 'thread'
require 'Qt'
require './Graph'
require './Point'
class DrawBox < Qt::Widget
def initialize(parent, mode)
super parent
@graph = mode == 1 ? Graph.new("alphabet.json") : Graph.new("numbers.json")
@result
@shift = false
@pos1
@pos2
@parent = parent
@r = @parent.getResult
@image = Qt::Image.new 1000, 1000, 7
@image.fill Qt::Color.new "#ffffff"
end
def paintEvent(e)
painter = Qt::Painter.new
painter.begin self
painter.setRenderHint Qt::Painter::Antialiasing
dirtyRect = e.rect
painter.drawImage(dirtyRect, @image, dirtyRect)
painter.end
end
def mouseMoveEvent(e)
if (e.pos.x > 0 and e.pos.x < @image.width) and (e.pos.y > 0 and e.pos.y < @image.height)
@pos2 = e.pos
@result << Point.new(@pos2.x, @pos2.y)
drawLineTo @pos1, @pos2
#drawPoint @pos1
@pos1 = @pos2
end
end
def mousePressEvent(e)
@result = Array.new
@pos1 = e.pos
@result << Point.new(@pos1.x, @pos1.y)
end
def mouseReleaseEvent(e)
@image.fill Qt::Color.new "#ffffff"
if @result.length > 19
char = @graph.solve(@result)
if char == "shift"
@shift = true
elsif char == "back"
@r.backspace
else
if @shift
@r.insert(char.upcase)
@shift = false
else
@r.insert(char)
end
end
end
# lignes pour la création du json
# puts @result.length
# tc = TraceConverter.new(@result)
# tab = tc.resize
# file = File.open('new.json', 'a')
# str = '{"letter":"a", "points":['
# tab.each{|p| str+='{"x":'+p.x.to_s+', "y":'+p.y.to_s+'},'}
# str = str[0..-2]
# str+= ']},'
# file.puts str
# file.close
update
end
def drawLineTo pos1, pos2
p = Qt::Painter.new
p.begin @image
p.setRenderHint Qt::Painter::Antialiasing
color = Qt::Color.new
color.setNamedColor "#333333"
pen = Qt::Pen.new color
pen.setWidth 3
p.setPen pen
p.drawLine Qt::Line.new(pos1, pos2)
rad = (3/2)+2;
update(Qt::Rect.new(pos1, pos2).normalized().adjusted(-rad, -rad, +rad, +rad))
p.end
end
# def drawPoint pos1
# p = Qt::Painter.new
# p.begin @image
# p.setRenderHint Qt::Painter::Antialiasing
# color = Qt::Color.new
# color.setNamedColor "#333333"
# pen = Qt::Pen.new color
# pen.setWidth 3
# p.setPen pen
# p.drawPoint pos1.x, pos1.y
# rad = (3/2)+2;
# update
# p.end
# end
def resize
@image = Qt::Image.new @parent.width/2, @parent.width/2, 7
@image.fill Qt::Color.new "#ffffff"
end
end
diminue la limite de point
#!/usr/bin/env ruby
require 'thread'
require 'Qt'
require './Graph'
require './Point'
class DrawBox < Qt::Widget
def initialize(parent, mode)
super parent
@graph = mode == 1 ? Graph.new("alphabet.json") : Graph.new("numbers.json")
@result
@shift = false
@pos1
@pos2
@parent = parent
@r = @parent.getResult
@image = Qt::Image.new 1000, 1000, 7
@image.fill Qt::Color.new "#ffffff"
end
def paintEvent(e)
painter = Qt::Painter.new
painter.begin self
painter.setRenderHint Qt::Painter::Antialiasing
dirtyRect = e.rect
painter.drawImage(dirtyRect, @image, dirtyRect)
painter.end
end
def mouseMoveEvent(e)
if (e.pos.x > 0 and e.pos.x < @image.width) and (e.pos.y > 0 and e.pos.y < @image.height)
@pos2 = e.pos
@result << Point.new(@pos2.x, @pos2.y)
drawLineTo @pos1, @pos2
#drawPoint @pos1
@pos1 = @pos2
end
end
def mousePressEvent(e)
@result = Array.new
@pos1 = e.pos
@result << Point.new(@pos1.x, @pos1.y)
end
def mouseReleaseEvent(e)
@image.fill Qt::Color.new "#ffffff"
if @result.length > 1
char = @graph.solve(@result)
if char == "shift"
@shift = true
elsif char == "back"
@r.backspace
else
if @shift
@r.insert(char.upcase)
@shift = false
else
@r.insert(char)
end
end
end
# lignes pour la création du json
# puts @result.length
# tc = TraceConverter.new(@result)
# tab = tc.resize
# file = File.open('new.json', 'a')
# str = '{"letter":"a", "points":['
# tab.each{|p| str+='{"x":'+p.x.to_s+', "y":'+p.y.to_s+'},'}
# str = str[0..-2]
# str+= ']},'
# file.puts str
# file.close
update
end
def drawLineTo pos1, pos2
p = Qt::Painter.new
p.begin @image
p.setRenderHint Qt::Painter::Antialiasing
color = Qt::Color.new
color.setNamedColor "#333333"
pen = Qt::Pen.new color
pen.setWidth 3
p.setPen pen
p.drawLine Qt::Line.new(pos1, pos2)
rad = (3/2)+2;
update(Qt::Rect.new(pos1, pos2).normalized().adjusted(-rad, -rad, +rad, +rad))
p.end
end
# def drawPoint pos1
# p = Qt::Painter.new
# p.begin @image
# p.setRenderHint Qt::Painter::Antialiasing
# color = Qt::Color.new
# color.setNamedColor "#333333"
# pen = Qt::Pen.new color
# pen.setWidth 3
# p.setPen pen
# p.drawPoint pos1.x, pos1.y
# rad = (3/2)+2;
# update
# p.end
# end
def resize
@image = Qt::Image.new @parent.width/2, @parent.width/2, 7
@image.fill Qt::Color.new "#ffffff"
end
end |
Pod::Spec.new do |s|
s.name = 'FH'
s.version = '2.2.16'
s.summary = 'FeedHenry iOS Software Development Kit'
s.homepage = 'https://www.feedhenry.com'
s.social_media_url = 'https://twitter.com/feedhenry'
s.license = 'FeedHenry'
s.author = 'Red Hat, Inc.'
s.source = { :git => 'https://github.com/feedhenry/fh-ios-sdk.git', :tag => s.version }
s.platform = :ios, 7.0
s.source_files = 'fh-ios-sdk/**/*.{h,m}'
s.public_header_files = 'fh-ios-sdk/FH.h', 'fh-ios-sdk/FHAct.h', 'fh-ios-sdk/FHActRequest.h', 'fh-ios-sdk/FHAuthRequest.h', 'fh-ios-sdk/FHCloudProps.h', 'fh-ios-sdk/FHCloudRequest.h', 'fh-ios-sdk/FHPushConfig.h', 'fh-ios-sdk/FHConfig.h', 'fh-ios-sdk/FHResponse.h', 'fh-ios-sdk/FHResponseDelegate.h', 'fh-ios-sdk/Sync/FHSyncClient.h', 'fh-ios-sdk/Sync/FHSyncConfig.h', 'fh-ios-sdk/Sync/FHSyncNotificationMessage.h', 'fh-ios-sdk/Sync/FHSyncDelegate.h', 'fh-ios-sdk/Categories/JSON/FHJSON.h', 'fh-ios-sdk/FHDataManager.h'
s.requires_arc = true
s.libraries = 'xml2', 'z'
s.dependency 'ASIHTTPRequest/Core', '1.8.2'
s.dependency 'Reachability', '3.2'
s.dependency 'AeroGear-Push', '1.1.0-beta.2'
end
Update AG push lib in FH podspec
Pod::Spec.new do |s|
s.name = 'FH'
s.version = '2.2.16'
s.summary = 'FeedHenry iOS Software Development Kit'
s.homepage = 'https://www.feedhenry.com'
s.social_media_url = 'https://twitter.com/feedhenry'
s.license = 'FeedHenry'
s.author = 'Red Hat, Inc.'
s.source = { :git => 'https://github.com/feedhenry/fh-ios-sdk.git', :tag => s.version }
s.platform = :ios, 7.0
s.source_files = 'fh-ios-sdk/**/*.{h,m}'
s.public_header_files = 'fh-ios-sdk/FH.h', 'fh-ios-sdk/FHAct.h', 'fh-ios-sdk/FHActRequest.h', 'fh-ios-sdk/FHAuthRequest.h', 'fh-ios-sdk/FHCloudProps.h', 'fh-ios-sdk/FHCloudRequest.h', 'fh-ios-sdk/FHPushConfig.h', 'fh-ios-sdk/FHConfig.h', 'fh-ios-sdk/FHResponse.h', 'fh-ios-sdk/FHResponseDelegate.h', 'fh-ios-sdk/Sync/FHSyncClient.h', 'fh-ios-sdk/Sync/FHSyncConfig.h', 'fh-ios-sdk/Sync/FHSyncNotificationMessage.h', 'fh-ios-sdk/Sync/FHSyncDelegate.h', 'fh-ios-sdk/Categories/JSON/FHJSON.h', 'fh-ios-sdk/FHDataManager.h'
s.requires_arc = true
s.libraries = 'xml2', 'z'
s.dependency 'ASIHTTPRequest/Core', '1.8.2'
s.dependency 'Reachability', '3.2'
s.dependency 'AeroGear-Push', '1.1.1'
end
|
# encoding: utf-8
source 'http://rubygems.org'
gem 'peach', "~> 0.5.1"
gem 'wallace'
group :development do
gem 'jeweler', '~> 1.8.7'
gem 'rake', '~> 10.1.0'
gem 'rdoc', '~> 4.0.1'
gem 'bundler', '~> 1.3.5'
end
Removed Peach and Wallace dependencies.
# encoding: utf-8
source 'http://rubygems.org'
group :development do
gem 'jeweler', '~> 1.8.7'
gem 'rake', '~> 10.1.0'
gem 'rdoc', '~> 4.0.1'
gem 'bundler', '~> 1.3.5'
end
|
class KdeExtraCmakeModules < Formula
desc "Extra modules and scripts for CMake"
homepage "https://api.kde.org/frameworks/extra-cmake-modules/html/index.html"
url "https://download.kde.org/stable/frameworks/5.73/extra-cmake-modules-5.73.0.tar.xz"
sha256 "c5e3ef0253f7d5ab3adf9185950e34fd620a3d5baaf3bcc15892f971fc3274c4"
head "https://invent.kde.org/frameworks/extra-cmake-modules.git"
bottle do
cellar :any_skip_relocation
sha256 "1a2fd1e6deea1346ffc02059938d0c90dda707ed1a48ae119d72eb7361a9eb31" => :catalina
sha256 "a8f67bf2d6cfbade9fb38ef67bb3236512caf1453462c4a7119bb7247279caf8" => :mojave
sha256 "1a2fd1e6deea1346ffc02059938d0c90dda707ed1a48ae119d72eb7361a9eb31" => :high_sierra
end
depends_on "cmake" => [:build, :test]
depends_on "qt" => :build
depends_on "sphinx-doc" => :build
def install
args = std_cmake_args
args << "-DBUILD_HTML_DOCS=ON"
args << "-DBUILD_MAN_DOCS=ON"
args << "-DBUILD_QTHELP_DOCS=ON"
args << "-DBUILD_TESTING=OFF"
system "cmake", ".", *args
system "make", "install"
end
test do
(testpath/"CMakeLists.txt").write("find_package(ECM REQUIRED)")
system "cmake", ".", "-Wno-dev"
expected="ECM_DIR:PATH=#{HOMEBREW_PREFIX}/share/ECM/cmake"
assert_match expected, File.read(testpath/"CMakeCache.txt")
end
end
kde-extra-cmake-modules: update 5.73.0 bottle.
class KdeExtraCmakeModules < Formula
desc "Extra modules and scripts for CMake"
homepage "https://api.kde.org/frameworks/extra-cmake-modules/html/index.html"
url "https://download.kde.org/stable/frameworks/5.73/extra-cmake-modules-5.73.0.tar.xz"
sha256 "c5e3ef0253f7d5ab3adf9185950e34fd620a3d5baaf3bcc15892f971fc3274c4"
head "https://invent.kde.org/frameworks/extra-cmake-modules.git"
bottle do
cellar :any_skip_relocation
sha256 "7e7861c3073349f9748f7407985d993242895b5207cdc11f7666db898cb3020b" => :catalina
sha256 "7e7861c3073349f9748f7407985d993242895b5207cdc11f7666db898cb3020b" => :mojave
sha256 "7e7861c3073349f9748f7407985d993242895b5207cdc11f7666db898cb3020b" => :high_sierra
end
depends_on "cmake" => [:build, :test]
depends_on "qt" => :build
depends_on "sphinx-doc" => :build
def install
args = std_cmake_args
args << "-DBUILD_HTML_DOCS=ON"
args << "-DBUILD_MAN_DOCS=ON"
args << "-DBUILD_QTHELP_DOCS=ON"
args << "-DBUILD_TESTING=OFF"
system "cmake", ".", *args
system "make", "install"
end
test do
(testpath/"CMakeLists.txt").write("find_package(ECM REQUIRED)")
system "cmake", ".", "-Wno-dev"
expected="ECM_DIR:PATH=#{HOMEBREW_PREFIX}/share/ECM/cmake"
assert_match expected, File.read(testpath/"CMakeCache.txt")
end
end
|
class OpenlibertyWebprofile8 < Formula
desc "Lightweight open framework for Java (Jakarta EE Web Profile 8)"
homepage "https://openliberty.io"
url "https://public.dhe.ibm.com/ibmdl/export/pub/software/openliberty/runtime/release/22.0.0.11/openliberty-webProfile8-22.0.0.11.zip"
sha256 "002361aef8b92e3768b487aa10157c206d78f33db39af285e548aa9a30fae007"
license "EPL-1.0"
livecheck do
url "https://openliberty.io/api/builds/data"
regex(/openliberty[._-]v?(\d+(?:\.\d+)+)\.zip/i)
end
bottle do
sha256 cellar: :any_skip_relocation, all: "21fa5acd10b80285f0ba40bdcecba0b79a19c6fc5517ea831cb183faf299364c"
end
depends_on "openjdk"
def install
rm_rf Dir["bin/**/*.bat"]
prefix.install_metafiles
libexec.install Dir["*"]
(bin/"openliberty-webprofile8").write_env_script "#{libexec}/bin/server",
Language::Java.overridable_java_home_env
end
def caveats
<<~EOS
The home of Open Liberty Jakarta EE Web Profile 8 is:
#{opt_libexec}
EOS
end
test do
ENV["WLP_USER_DIR"] = testpath
begin
system bin/"openliberty-webprofile8", "start"
assert_predicate testpath/"servers/.pid/defaultServer.pid", :exist?
ensure
system bin/"openliberty-webprofile8", "stop"
end
refute_predicate testpath/"servers/.pid/defaultServer.pid", :exist?
assert_match "<feature>webProfile-8.0</feature>", (testpath/"servers/defaultServer/server.xml").read
end
end
openliberty-webprofile8 22.0.0.12
Closes #116470.
Signed-off-by: Patrick Linnane <cbb7353e6d953ef360baf960c122346276c6e320@linnane.io>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class OpenlibertyWebprofile8 < Formula
desc "Lightweight open framework for Java (Jakarta EE Web Profile 8)"
homepage "https://openliberty.io"
url "https://public.dhe.ibm.com/ibmdl/export/pub/software/openliberty/runtime/release/22.0.0.12/openliberty-webProfile8-22.0.0.12.zip"
sha256 "b9f4def0705b18ffd671702000ab2969a10a65c9995d6c1818b3a863026d9057"
license "EPL-1.0"
livecheck do
url "https://openliberty.io/api/builds/data"
regex(/openliberty[._-]v?(\d+(?:\.\d+)+)\.zip/i)
end
bottle do
sha256 cellar: :any_skip_relocation, all: "21fa5acd10b80285f0ba40bdcecba0b79a19c6fc5517ea831cb183faf299364c"
end
depends_on "openjdk"
def install
rm_rf Dir["bin/**/*.bat"]
prefix.install_metafiles
libexec.install Dir["*"]
(bin/"openliberty-webprofile8").write_env_script "#{libexec}/bin/server",
Language::Java.overridable_java_home_env
end
def caveats
<<~EOS
The home of Open Liberty Jakarta EE Web Profile 8 is:
#{opt_libexec}
EOS
end
test do
ENV["WLP_USER_DIR"] = testpath
begin
system bin/"openliberty-webprofile8", "start"
assert_predicate testpath/"servers/.pid/defaultServer.pid", :exist?
ensure
system bin/"openliberty-webprofile8", "stop"
end
refute_predicate testpath/"servers/.pid/defaultServer.pid", :exist?
assert_match "<feature>webProfile-8.0</feature>", (testpath/"servers/defaultServer/server.xml").read
end
end
|
cask "azure-data-studio-insiders" do
version "1.34.0,8d8b3983a96c777b33b6a9fdaf8f8844bef9d08e"
sha256 "015af5f20a50e01928c7facf70d05fc034857f26317ff8ac62331cb0a0e87613"
url "https://sqlopsbuilds.azureedge.net/insider/#{version.csv.second}/azuredatastudio-macos-#{version.csv.first}-insider.zip",
verified: "sqlopsbuilds.azureedge.net/insider/"
name "Azure Data Studio - Insiders"
desc "Data management tool that enables working with SQL Server"
homepage "https://docs.microsoft.com/en-us/sql/azure-data-studio/"
livecheck do
url "https://azuredatastudio-update.azurewebsites.net/api/update/darwin/insider/VERSION"
strategy :page_match do |page|
name = page[/"name":"(\d+(?:\.\d+)+)/i, 1]
version = page[/"version":"(\w+)/i, 1]
next if name.blank? || version.blank?
"#{name},#{version}"
end
end
auto_updates true
app "Azure Data Studio - Insiders.app"
binary "#{appdir}/Azure Data Studio - Insiders.app/Contents/Resources/app/bin/code",
target: "azuredatastudio-insiders"
zap trash: [
"~/.azuredatastudio-insiders",
"~/Library/Application Support/Azure Data Studio",
"~/Library/Application Support/azuredatastudio",
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.azuredatastudio.oss.insiders.sfl2",
"~/Library/Caches/com.azuredatastudio.oss.insiders",
"~/Library/Caches/com.azuredatastudio.oss.insiders.ShipIt",
"~/Library/Preferences/com.azuredatastudio.oss.insiders.plist",
"~/Library/Saved Application State/com.azuredatastudio.oss.insiders.savedState",
]
end
Update azure-data-studio-insiders from 1.34.0,8d8b3983a96c777b33b6a9fdaf8f8844bef9d08e to 1.34.0,78ca26f0b1d0107494eca69f037ec676150acf93 (#12743)
cask "azure-data-studio-insiders" do
version "1.34.0,78ca26f0b1d0107494eca69f037ec676150acf93"
sha256 "ac99a6bccc3f66291134c60d94196703af9e7239ce4127b933fa830b12c3d4bb"
url "https://sqlopsbuilds.azureedge.net/insider/#{version.csv.second}/azuredatastudio-macos-#{version.csv.first}-insider.zip",
verified: "sqlopsbuilds.azureedge.net/insider/"
name "Azure Data Studio - Insiders"
desc "Data management tool that enables working with SQL Server"
homepage "https://docs.microsoft.com/en-us/sql/azure-data-studio/"
livecheck do
url "https://azuredatastudio-update.azurewebsites.net/api/update/darwin/insider/VERSION"
strategy :page_match do |page|
name = page[/"name":"(\d+(?:\.\d+)+)/i, 1]
version = page[/"version":"(\w+)/i, 1]
next if name.blank? || version.blank?
"#{name},#{version}"
end
end
auto_updates true
app "Azure Data Studio - Insiders.app"
binary "#{appdir}/Azure Data Studio - Insiders.app/Contents/Resources/app/bin/code",
target: "azuredatastudio-insiders"
zap trash: [
"~/.azuredatastudio-insiders",
"~/Library/Application Support/Azure Data Studio",
"~/Library/Application Support/azuredatastudio",
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.azuredatastudio.oss.insiders.sfl2",
"~/Library/Caches/com.azuredatastudio.oss.insiders",
"~/Library/Caches/com.azuredatastudio.oss.insiders.ShipIt",
"~/Library/Preferences/com.azuredatastudio.oss.insiders.plist",
"~/Library/Saved Application State/com.azuredatastudio.oss.insiders.savedState",
]
end
|
cask "azure-data-studio-insiders" do
version "1.33.0,51c41915a4449506d5c93934f8cd8fd9d42c91f4"
sha256 "5582aac99e43af472e91c5ef64cfa6e58621318e06171c3aa8faba40f53ecc88"
url "https://sqlopsbuilds.azureedge.net/insider/#{version.after_comma}/azuredatastudio-macos-#{version.before_comma}-insider.zip",
verified: "sqlopsbuilds.azureedge.net/insider/"
name "Azure Data Studio - Insiders"
desc "Data management tool that enables working with SQL Server"
homepage "https://docs.microsoft.com/en-us/sql/azure-data-studio/"
livecheck do
url "https://azuredatastudio-update.azurewebsites.net/api/update/darwin/insider/VERSION"
strategy :page_match do |page|
name = page[/"name":"(\d+(?:\.\d+)*)/i, 1]
version = page[/"version":"(\w+)/i, 1]
"#{name},#{version}"
end
end
auto_updates true
app "Azure Data Studio - Insiders.app"
binary "#{appdir}/Azure Data Studio - Insiders.app/Contents/Resources/app/bin/code",
target: "azuredatastudio-insiders"
zap trash: [
"~/.azuredatastudio-insiders",
"~/Library/Application Support/Azure Data Studio",
"~/Library/Application Support/azuredatastudio",
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.azuredatastudio.oss.insiders.sfl2",
"~/Library/Caches/com.azuredatastudio.oss.insiders",
"~/Library/Caches/com.azuredatastudio.oss.insiders.ShipIt",
"~/Library/Preferences/com.azuredatastudio.oss.insiders.plist",
"~/Library/Saved Application State/com.azuredatastudio.oss.insiders.savedState",
]
end
Update azure-data-studio-insiders from 1.33.0,51c41915a4449506d5c93934f8cd8fd9d42c91f4 to 1.33.0,747b8e84a89d418e380a949f16c41ac3a09930b3 (#12152)
cask "azure-data-studio-insiders" do
version "1.33.0,747b8e84a89d418e380a949f16c41ac3a09930b3"
sha256 "0360596192f96641218052c782496c95f62d90e463f6e79578a59d362a0c8af3"
url "https://sqlopsbuilds.azureedge.net/insider/#{version.after_comma}/azuredatastudio-macos-#{version.before_comma}-insider.zip",
verified: "sqlopsbuilds.azureedge.net/insider/"
name "Azure Data Studio - Insiders"
desc "Data management tool that enables working with SQL Server"
homepage "https://docs.microsoft.com/en-us/sql/azure-data-studio/"
livecheck do
url "https://azuredatastudio-update.azurewebsites.net/api/update/darwin/insider/VERSION"
strategy :page_match do |page|
name = page[/"name":"(\d+(?:\.\d+)*)/i, 1]
version = page[/"version":"(\w+)/i, 1]
"#{name},#{version}"
end
end
auto_updates true
app "Azure Data Studio - Insiders.app"
binary "#{appdir}/Azure Data Studio - Insiders.app/Contents/Resources/app/bin/code",
target: "azuredatastudio-insiders"
zap trash: [
"~/.azuredatastudio-insiders",
"~/Library/Application Support/Azure Data Studio",
"~/Library/Application Support/azuredatastudio",
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.azuredatastudio.oss.insiders.sfl2",
"~/Library/Caches/com.azuredatastudio.oss.insiders",
"~/Library/Caches/com.azuredatastudio.oss.insiders.ShipIt",
"~/Library/Preferences/com.azuredatastudio.oss.insiders.plist",
"~/Library/Saved Application State/com.azuredatastudio.oss.insiders.savedState",
]
end
|
cask "azure-data-studio-insiders" do
version "1.33.0,ffe5027c3f4a62e17044c6035461a6eb5a92c589"
sha256 "b3efc574af7051d4e48201199dbb3bafb7d12a58e7ea438aa29e0382d55b33e5"
url "https://sqlopsbuilds.azureedge.net/insider/#{version.after_comma}/azuredatastudio-macos-#{version.before_comma}-insider.zip",
verified: "sqlopsbuilds.azureedge.net/insider/"
name "Azure Data Studio - Insiders"
desc "Data management tool that enables working with SQL Server"
homepage "https://docs.microsoft.com/en-us/sql/azure-data-studio/"
livecheck do
url "https://azuredatastudio-update.azurewebsites.net/api/update/darwin/insider/VERSION"
strategy :page_match do |page|
name = page[/"name":"(\d+(?:\.\d+)*)/i, 1]
version = page[/"version":"(\w+)/i, 1]
"#{name},#{version}"
end
end
auto_updates true
app "Azure Data Studio - Insiders.app"
binary "#{appdir}/Azure Data Studio - Insiders.app/Contents/Resources/app/bin/code",
target: "azuredatastudio-insiders"
zap trash: [
"~/.azuredatastudio-insiders",
"~/Library/Application Support/Azure Data Studio",
"~/Library/Application Support/azuredatastudio",
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.azuredatastudio.oss.insiders.sfl2",
"~/Library/Caches/com.azuredatastudio.oss.insiders",
"~/Library/Caches/com.azuredatastudio.oss.insiders.ShipIt",
"~/Library/Preferences/com.azuredatastudio.oss.insiders.plist",
"~/Library/Saved Application State/com.azuredatastudio.oss.insiders.savedState",
]
end
Update azure-data-studio-insiders from 1.33.0,ffe5027c3f4a62e17044c6035461a6eb5a92c589 to 1.33.0,9a38578c54505a16b070001d03d1f37e827a17d4 (#12083)
cask "azure-data-studio-insiders" do
version "1.33.0,9a38578c54505a16b070001d03d1f37e827a17d4"
sha256 "a09a020865a833dadf7d7ef38191a56ebd8e621576b51651723e9e74f9c0b398"
url "https://sqlopsbuilds.azureedge.net/insider/#{version.after_comma}/azuredatastudio-macos-#{version.before_comma}-insider.zip",
verified: "sqlopsbuilds.azureedge.net/insider/"
name "Azure Data Studio - Insiders"
desc "Data management tool that enables working with SQL Server"
homepage "https://docs.microsoft.com/en-us/sql/azure-data-studio/"
livecheck do
url "https://azuredatastudio-update.azurewebsites.net/api/update/darwin/insider/VERSION"
strategy :page_match do |page|
name = page[/"name":"(\d+(?:\.\d+)*)/i, 1]
version = page[/"version":"(\w+)/i, 1]
"#{name},#{version}"
end
end
auto_updates true
app "Azure Data Studio - Insiders.app"
binary "#{appdir}/Azure Data Studio - Insiders.app/Contents/Resources/app/bin/code",
target: "azuredatastudio-insiders"
zap trash: [
"~/.azuredatastudio-insiders",
"~/Library/Application Support/Azure Data Studio",
"~/Library/Application Support/azuredatastudio",
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.azuredatastudio.oss.insiders.sfl2",
"~/Library/Caches/com.azuredatastudio.oss.insiders",
"~/Library/Caches/com.azuredatastudio.oss.insiders.ShipIt",
"~/Library/Preferences/com.azuredatastudio.oss.insiders.plist",
"~/Library/Saved Application State/com.azuredatastudio.oss.insiders.savedState",
]
end
|
class FirefoxdevelopereditionJa < Cask
version '35.0a2'
sha256 '3f990fbf7fe911efd27aaa22fae6b7e42f5f132792b08efc90ffa453fb2b6d78'
url "https://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/latest-mozilla-aurora-l10n/firefox-#{version}.ja-JP-mac.mac.dmg"
homepage 'https://www.mozilla.org/ja/firefox/developer/'
license :oss
app 'FirefoxDeveloperEdition.app'
end
new-style header in firefoxdeveloperedition-ja
cask :v1 => 'firefoxdeveloperedition-ja' do
version '35.0a2'
sha256 '3f990fbf7fe911efd27aaa22fae6b7e42f5f132792b08efc90ffa453fb2b6d78'
url "https://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/latest-mozilla-aurora-l10n/firefox-#{version}.ja-JP-mac.mac.dmg"
homepage 'https://www.mozilla.org/ja/firefox/developer/'
license :oss
app 'FirefoxDeveloperEdition.app'
end
|
cask "mendeley-reference-manager" do
version "2.70.0"
sha256 "138e680e90c30896a863d6d561df94882ed64d4a460cbf30b1357e411802915c"
url "https://static.mendeley.com/bin/desktop/mendeley-reference-manager-#{version}.dmg"
name "Mendeley Reference Manager"
desc "Research management tool"
homepage "https://www.mendeley.com/download-reference-manager/macOS/"
livecheck do
url "https://static.mendeley.com/bin/desktop/latest-mac.yml"
strategy :electron_builder
end
depends_on macos: ">= :yosemite"
app "Mendeley Reference Manager.app"
zap trash: [
"~/Library/Application Support/Mendeley Reference Manager",
"~/Library/Logs/Mendeley Reference Manager",
"~/Library/Preferences/com.elsevier.mendeley.plist",
"~/Library/Saved Application State/com.elsevier.mendeley.savedState",
]
end
mendeley-reference-manager 2.71.0
Update mendeley-reference-manager from 2.70.0 to 2.71.0
Closes #123478.
Signed-off-by: Miccal Matthews <04d56b873758dd262086066db48e0b64e3bc79f8@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
cask "mendeley-reference-manager" do
version "2.71.0"
sha256 "bdc653352d830a8a1b1966a88648f924edbf0d50390a723b6bc040f8c9f4ddf3"
url "https://static.mendeley.com/bin/desktop/mendeley-reference-manager-#{version}.dmg"
name "Mendeley Reference Manager"
desc "Research management tool"
homepage "https://www.mendeley.com/download-reference-manager/macOS/"
livecheck do
url "https://static.mendeley.com/bin/desktop/latest-mac.yml"
strategy :electron_builder
end
depends_on macos: ">= :yosemite"
app "Mendeley Reference Manager.app"
zap trash: [
"~/Library/Application Support/Mendeley Reference Manager",
"~/Library/Logs/Mendeley Reference Manager",
"~/Library/Preferences/com.elsevier.mendeley.plist",
"~/Library/Saved Application State/com.elsevier.mendeley.savedState",
]
end
|
cask "mendeley-reference-manager" do
version "2.72.0"
sha256 "b3eba79f4eb2864e35a7c42d44493c2114ffd17a1348c7a56599054a8e52964c"
url "https://static.mendeley.com/bin/desktop/mendeley-reference-manager-#{version}.dmg"
name "Mendeley Reference Manager"
desc "Research management tool"
homepage "https://www.mendeley.com/download-reference-manager/macOS/"
livecheck do
url "https://static.mendeley.com/bin/desktop/latest-mac.yml"
strategy :electron_builder
end
app "Mendeley Reference Manager.app"
zap trash: [
"~/Library/Application Support/Mendeley Reference Manager",
"~/Library/Logs/Mendeley Reference Manager",
"~/Library/Preferences/com.elsevier.mendeley.plist",
"~/Library/Saved Application State/com.elsevier.mendeley.savedState",
]
end
mendeley-reference-manager 2.73.0
Update mendeley-reference-manager from 2.72.0 to 2.73.0
Closes #125213.
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
cask "mendeley-reference-manager" do
version "2.73.0"
sha256 "c986f8968142ccc601dc6960d6f084d00b8f673f393169c56618b6c700355bd9"
url "https://static.mendeley.com/bin/desktop/mendeley-reference-manager-#{version}.dmg"
name "Mendeley Reference Manager"
desc "Research management tool"
homepage "https://www.mendeley.com/download-reference-manager/macOS/"
livecheck do
url "https://static.mendeley.com/bin/desktop/latest-mac.yml"
strategy :electron_builder
end
app "Mendeley Reference Manager.app"
zap trash: [
"~/Library/Application Support/Mendeley Reference Manager",
"~/Library/Logs/Mendeley Reference Manager",
"~/Library/Preferences/com.elsevier.mendeley.plist",
"~/Library/Saved Application State/com.elsevier.mendeley.savedState",
]
end
|
module RedisFailover
# NodeManager manages a list of redis nodes. Upon startup, the NodeManager
# will discover the current redis master and slaves. Each redis node is
# monitored by a NodeWatcher instance. The NodeWatchers periodically
# report the current state of the redis node it's watching to the
# NodeManager via an asynchronous queue. The NodeManager processes the
# state reports and reacts appropriately by handling stale/dead nodes,
# and promoting a new redis master if it sees fit to do so.
class NodeManager
include Util
# Name for the znode that handles exclusive locking between multiple
# Node Manager processes. Whoever holds the lock will be considered
# the "master" Node Manager, and will be responsible for monitoring
# the redis nodes. When a Node Manager that holds the lock disappears
# or fails, another Node Manager process will grab the lock and
# become the master.
LOCK_PATH = 'master_node_manager'
# Number of seconds to wait before retrying bootstrap process.
TIMEOUT = 5
# Creates a new instance.
#
# @param [Hash] options the options used to initialize the manager
# @option options [String] :zkservers comma-separated ZK host:port pairs
# @option options [String] :znode_path znode path override for redis nodes
# @option options [String] :password password for redis nodes
# @option options [Array<String>] :nodes the nodes to manage
# @option options [String] :max_failures the max failures for a node
def initialize(options)
logger.info("Redis Node Manager v#{VERSION} starting (#{RUBY_DESCRIPTION})")
@options = options
@znode = @options[:znode_path] || Util::DEFAULT_ZNODE_PATH
@manual_znode = ManualFailover::ZNODE_PATH
@mutex = Mutex.new
end
# Starts the node manager.
#
# @note This method does not return until the manager terminates.
def start
@queue = Queue.new
@leader = false
setup_zk
logger.info('Waiting to become master Node Manager ...')
@zk.with_lock(LOCK_PATH) do
@leader = true
logger.info('Acquired master Node Manager lock')
discover_nodes
initialize_path
spawn_watchers
handle_state_reports
end
rescue ZK::Exceptions::InterruptedSession => ex
logger.error("ZK error while attempting to manage nodes: #{ex.inspect}")
logger.error(ex.backtrace.join("\n"))
shutdown
sleep(TIMEOUT)
retry
end
# Notifies the manager of a state change. Used primarily by
# {RedisFailover::NodeWatcher} to inform the manager of watched node states.
#
# @param [Node] node the node
# @param [Symbol] state the state
def notify_state(node, state)
@queue << [node, state]
end
# Performs a graceful shutdown of the manager.
def shutdown
@queue.clear
@queue << nil
@watchers.each(&:shutdown) if @watchers
@zk.close! if @zk
end
private
# Configures the ZooKeeper client.
def setup_zk
@zk.close! if @zk
@zk = ZK.new("#{@options[:zkservers]}#{@options[:chroot] || ''}")
@zk.register(@manual_znode) do |event|
@mutex.synchronize do
if event.node_changed?
schedule_manual_failover
end
end
end
@zk.on_connected { @zk.stat(@manual_znode, :watch => true) }
@zk.stat(@manual_znode, :watch => true)
end
# Handles periodic state reports from {RedisFailover::NodeWatcher} instances.
def handle_state_reports
while state_report = @queue.pop
begin
node, state = state_report
case state
when :unavailable then handle_unavailable(node)
when :available then handle_available(node)
when :syncing then handle_syncing(node)
when :manual_failover then handle_manual_failover(node)
else raise InvalidNodeStateError.new(node, state)
end
# flush current state
write_state
rescue ZK::Exceptions::InterruptedSession
# fail hard if this is a ZK connection-related error
raise
rescue => ex
logger.error("Error handling #{state_report.inspect}: #{ex.inspect}")
logger.error(ex.backtrace.join("\n"))
end
end
end
# Handles an unavailable node.
#
# @param [Node] node the unavailable node
def handle_unavailable(node)
# no-op if we already know about this node
return if @unavailable.include?(node)
logger.info("Handling unavailable node: #{node}")
@unavailable << node
# find a new master if this node was a master
if node == @master
logger.info("Demoting currently unavailable master #{node}.")
promote_new_master
else
@slaves.delete(node)
end
end
# Handles an available node.
#
# @param [Node] node the available node
def handle_available(node)
reconcile(node)
# no-op if we already know about this node
return if @master == node || @slaves.include?(node)
logger.info("Handling available node: #{node}")
if @master
# master already exists, make a slave
node.make_slave!(@master)
@slaves << node
else
# no master exists, make this the new master
promote_new_master(node)
end
@unavailable.delete(node)
end
# Handles a node that is currently syncing.
#
# @param [Node] node the syncing node
def handle_syncing(node)
reconcile(node)
if node.syncing_with_master? && node.prohibits_stale_reads?
logger.info("Node #{node} not ready yet, still syncing with master.")
force_unavailable_slave(node)
return
end
# otherwise, we can use this node
handle_available(node)
end
# Handles a manual failover request to the given node.
#
# @param [Node] node the candidate node for failover
def handle_manual_failover(node)
# no-op if node to be failed over is already master
return if @master == node
logger.info("Handling manual failover")
# make current master a slave, and promote new master
@slaves << @master
@slaves.delete(node)
promote_new_master(node)
end
# Promotes a new master.
#
# @param [Node] node the optional node to promote
# @note if no node is specified, a random slave will be used
def promote_new_master(node = nil)
delete_path
@master = nil
# make a specific node or slave the new master
candidate = node || @slaves.pop
unless candidate
logger.error('Failed to promote a new master, no candidate available.')
return
end
redirect_slaves_to(candidate)
candidate.make_master!
@master = candidate
create_path
write_state
logger.info("Successfully promoted #{candidate} to master.")
end
# Discovers the current master and slave nodes.
def discover_nodes
@unavailable = []
nodes = @options[:nodes].map { |opts| Node.new(opts) }.uniq
raise NoMasterError unless @master = find_master(nodes)
@slaves = nodes - [@master]
logger.info("Managing master (#{@master}) and slaves" +
" (#{@slaves.map(&:to_s).join(', ')})")
# ensure that slaves are correctly pointing to this master
redirect_slaves_to(@master)
end
# Spawns the {RedisFailover::NodeWatcher} instances for each managed node.
def spawn_watchers
@watchers = [@master, @slaves, @unavailable].flatten.map do |node|
NodeWatcher.new(self, node, @options[:max_failures] || 3)
end
@watchers.each(&:watch)
end
# Searches for the master node.
#
# @param [Array<Node>] nodes the nodes to search
# @return [Node] the found master node, nil if not found
def find_master(nodes)
nodes.find do |node|
begin
node.master?
rescue NodeUnavailableError
false
end
end
end
# Redirects all slaves to the specified node.
#
# @param [Node] node the node to which slaves are redirected
def redirect_slaves_to(node)
@slaves.dup.each do |slave|
begin
slave.make_slave!(node)
rescue NodeUnavailableError
logger.info("Failed to redirect unreachable slave #{slave} to #{node}")
force_unavailable_slave(slave)
end
end
end
# Forces a slave to be marked as unavailable.
#
# @param [Node] node the node to force as unavailable
def force_unavailable_slave(node)
@slaves.delete(node)
@unavailable << node unless @unavailable.include?(node)
end
# It's possible that a newly available node may have been restarted
# and completely lost its dynamically set run-time role by the node
# manager. This method ensures that the node resumes its role as
# determined by the manager.
#
# @param [Node] node the node to reconcile
def reconcile(node)
return if @master == node && node.master?
return if @master && node.slave_of?(@master)
logger.info("Reconciling node #{node}")
if @master == node && !node.master?
# we think the node is a master, but the node doesn't
node.make_master!
return
end
# verify that node is a slave for the current master
if @master && !node.slave_of?(@master)
node.make_slave!(@master)
end
end
# @return [Hash] the set of current nodes grouped by category
def current_nodes
{
:master => @master ? @master.to_s : nil,
:slaves => @slaves.map(&:to_s),
:unavailable => @unavailable.map(&:to_s)
}
end
# Deletes the znode path containing the redis nodes.
def delete_path
@zk.delete(@znode)
logger.info("Deleted ZooKeeper node #{@znode}")
rescue ZK::Exceptions::NoNode => ex
logger.info("Tried to delete missing znode: #{ex.inspect}")
end
# Creates the znode path containing the redis nodes.
def create_path
@zk.create(@znode, encode(current_nodes), :ephemeral => true)
logger.info("Created ZooKeeper node #{@znode}")
rescue ZK::Exceptions::NodeExists
# best effort
end
# Initializes the znode path containing the redis nodes.
def initialize_path
create_path
write_state
end
# Writes the current redis nodes state to the znode path.
def write_state
create_path
@zk.set(@znode, encode(current_nodes))
end
# Schedules a manual failover to a redis node.
def schedule_manual_failover
return unless @leader
new_master = @zk.get(@manual_znode, :watch => true).first
logger.info("Received manual failover request for: #{new_master}")
node = if new_master == ManualFailover::ANY_SLAVE
@slaves.sample
else
host, port = new_master.split(':', 2)
Node.new(:host => host, :port => port, :password => @options[:password])
end
notify_state(node, :manual_failover) if node
end
end
end
Modified NodeManager to only create its ZK node if it doesn't exist.
The #create_path method is called in quite a few places, often when the
node in question already exists. This causes a stream of NodeExists
exceptions from ZooKeeper, which might pollute its logs a bit.
I've modified the method to check if the node exists before creating it.
I didn't write specs, but manual testing showed this doesn't change redis_failover's
behavior
module RedisFailover
# NodeManager manages a list of redis nodes. Upon startup, the NodeManager
# will discover the current redis master and slaves. Each redis node is
# monitored by a NodeWatcher instance. The NodeWatchers periodically
# report the current state of the redis node it's watching to the
# NodeManager via an asynchronous queue. The NodeManager processes the
# state reports and reacts appropriately by handling stale/dead nodes,
# and promoting a new redis master if it sees fit to do so.
class NodeManager
include Util
# Name for the znode that handles exclusive locking between multiple
# Node Manager processes. Whoever holds the lock will be considered
# the "master" Node Manager, and will be responsible for monitoring
# the redis nodes. When a Node Manager that holds the lock disappears
# or fails, another Node Manager process will grab the lock and
# become the master.
LOCK_PATH = 'master_node_manager'
# Number of seconds to wait before retrying bootstrap process.
TIMEOUT = 5
# Creates a new instance.
#
# @param [Hash] options the options used to initialize the manager
# @option options [String] :zkservers comma-separated ZK host:port pairs
# @option options [String] :znode_path znode path override for redis nodes
# @option options [String] :password password for redis nodes
# @option options [Array<String>] :nodes the nodes to manage
# @option options [String] :max_failures the max failures for a node
def initialize(options)
logger.info("Redis Node Manager v#{VERSION} starting (#{RUBY_DESCRIPTION})")
@options = options
@znode = @options[:znode_path] || Util::DEFAULT_ZNODE_PATH
@manual_znode = ManualFailover::ZNODE_PATH
@mutex = Mutex.new
end
# Starts the node manager.
#
# @note This method does not return until the manager terminates.
def start
@queue = Queue.new
@leader = false
setup_zk
logger.info('Waiting to become master Node Manager ...')
@zk.with_lock(LOCK_PATH) do
@leader = true
logger.info('Acquired master Node Manager lock')
discover_nodes
initialize_path
spawn_watchers
handle_state_reports
end
rescue ZK::Exceptions::InterruptedSession => ex
logger.error("ZK error while attempting to manage nodes: #{ex.inspect}")
logger.error(ex.backtrace.join("\n"))
shutdown
sleep(TIMEOUT)
retry
end
# Notifies the manager of a state change. Used primarily by
# {RedisFailover::NodeWatcher} to inform the manager of watched node states.
#
# @param [Node] node the node
# @param [Symbol] state the state
def notify_state(node, state)
@queue << [node, state]
end
# Performs a graceful shutdown of the manager.
def shutdown
@queue.clear
@queue << nil
@watchers.each(&:shutdown) if @watchers
@zk.close! if @zk
end
private
# Configures the ZooKeeper client.
def setup_zk
@zk.close! if @zk
@zk = ZK.new("#{@options[:zkservers]}#{@options[:chroot] || ''}")
@zk.register(@manual_znode) do |event|
@mutex.synchronize do
if event.node_changed?
schedule_manual_failover
end
end
end
@zk.on_connected { @zk.stat(@manual_znode, :watch => true) }
@zk.stat(@manual_znode, :watch => true)
end
# Handles periodic state reports from {RedisFailover::NodeWatcher} instances.
def handle_state_reports
while state_report = @queue.pop
begin
node, state = state_report
case state
when :unavailable then handle_unavailable(node)
when :available then handle_available(node)
when :syncing then handle_syncing(node)
when :manual_failover then handle_manual_failover(node)
else raise InvalidNodeStateError.new(node, state)
end
# flush current state
write_state
rescue ZK::Exceptions::InterruptedSession
# fail hard if this is a ZK connection-related error
raise
rescue => ex
logger.error("Error handling #{state_report.inspect}: #{ex.inspect}")
logger.error(ex.backtrace.join("\n"))
end
end
end
# Handles an unavailable node.
#
# @param [Node] node the unavailable node
def handle_unavailable(node)
# no-op if we already know about this node
return if @unavailable.include?(node)
logger.info("Handling unavailable node: #{node}")
@unavailable << node
# find a new master if this node was a master
if node == @master
logger.info("Demoting currently unavailable master #{node}.")
promote_new_master
else
@slaves.delete(node)
end
end
# Handles an available node.
#
# @param [Node] node the available node
def handle_available(node)
reconcile(node)
# no-op if we already know about this node
return if @master == node || @slaves.include?(node)
logger.info("Handling available node: #{node}")
if @master
# master already exists, make a slave
node.make_slave!(@master)
@slaves << node
else
# no master exists, make this the new master
promote_new_master(node)
end
@unavailable.delete(node)
end
# Handles a node that is currently syncing.
#
# @param [Node] node the syncing node
def handle_syncing(node)
reconcile(node)
if node.syncing_with_master? && node.prohibits_stale_reads?
logger.info("Node #{node} not ready yet, still syncing with master.")
force_unavailable_slave(node)
return
end
# otherwise, we can use this node
handle_available(node)
end
# Handles a manual failover request to the given node.
#
# @param [Node] node the candidate node for failover
def handle_manual_failover(node)
# no-op if node to be failed over is already master
return if @master == node
logger.info("Handling manual failover")
# make current master a slave, and promote new master
@slaves << @master
@slaves.delete(node)
promote_new_master(node)
end
# Promotes a new master.
#
# @param [Node] node the optional node to promote
# @note if no node is specified, a random slave will be used
def promote_new_master(node = nil)
delete_path
@master = nil
# make a specific node or slave the new master
candidate = node || @slaves.pop
unless candidate
logger.error('Failed to promote a new master, no candidate available.')
return
end
redirect_slaves_to(candidate)
candidate.make_master!
@master = candidate
create_path
write_state
logger.info("Successfully promoted #{candidate} to master.")
end
# Discovers the current master and slave nodes.
def discover_nodes
@unavailable = []
nodes = @options[:nodes].map { |opts| Node.new(opts) }.uniq
raise NoMasterError unless @master = find_master(nodes)
@slaves = nodes - [@master]
logger.info("Managing master (#{@master}) and slaves" +
" (#{@slaves.map(&:to_s).join(', ')})")
# ensure that slaves are correctly pointing to this master
redirect_slaves_to(@master)
end
# Spawns the {RedisFailover::NodeWatcher} instances for each managed node.
def spawn_watchers
@watchers = [@master, @slaves, @unavailable].flatten.map do |node|
NodeWatcher.new(self, node, @options[:max_failures] || 3)
end
@watchers.each(&:watch)
end
# Searches for the master node.
#
# @param [Array<Node>] nodes the nodes to search
# @return [Node] the found master node, nil if not found
def find_master(nodes)
nodes.find do |node|
begin
node.master?
rescue NodeUnavailableError
false
end
end
end
# Redirects all slaves to the specified node.
#
# @param [Node] node the node to which slaves are redirected
def redirect_slaves_to(node)
@slaves.dup.each do |slave|
begin
slave.make_slave!(node)
rescue NodeUnavailableError
logger.info("Failed to redirect unreachable slave #{slave} to #{node}")
force_unavailable_slave(slave)
end
end
end
# Forces a slave to be marked as unavailable.
#
# @param [Node] node the node to force as unavailable
def force_unavailable_slave(node)
@slaves.delete(node)
@unavailable << node unless @unavailable.include?(node)
end
# It's possible that a newly available node may have been restarted
# and completely lost its dynamically set run-time role by the node
# manager. This method ensures that the node resumes its role as
# determined by the manager.
#
# @param [Node] node the node to reconcile
def reconcile(node)
return if @master == node && node.master?
return if @master && node.slave_of?(@master)
logger.info("Reconciling node #{node}")
if @master == node && !node.master?
# we think the node is a master, but the node doesn't
node.make_master!
return
end
# verify that node is a slave for the current master
if @master && !node.slave_of?(@master)
node.make_slave!(@master)
end
end
# @return [Hash] the set of current nodes grouped by category
def current_nodes
{
:master => @master ? @master.to_s : nil,
:slaves => @slaves.map(&:to_s),
:unavailable => @unavailable.map(&:to_s)
}
end
# Deletes the znode path containing the redis nodes.
def delete_path
@zk.delete(@znode)
logger.info("Deleted ZooKeeper node #{@znode}")
rescue ZK::Exceptions::NoNode => ex
logger.info("Tried to delete missing znode: #{ex.inspect}")
end
# Creates the znode path containing the redis nodes.
def create_path
unless @zk.exists?(@znode)
@zk.create(@znode, encode(current_nodes), :ephemeral => true)
logger.info("Created ZooKeeper node #{@znode}")
end
rescue ZK::Exceptions::NodeExists
# best effort
end
# Initializes the znode path containing the redis nodes.
def initialize_path
create_path
write_state
end
# Writes the current redis nodes state to the znode path.
def write_state
create_path
@zk.set(@znode, encode(current_nodes))
end
# Schedules a manual failover to a redis node.
def schedule_manual_failover
return unless @leader
new_master = @zk.get(@manual_znode, :watch => true).first
logger.info("Received manual failover request for: #{new_master}")
node = if new_master == ManualFailover::ANY_SLAVE
@slaves.sample
else
host, port = new_master.split(':', 2)
Node.new(:host => host, :port => port, :password => @options[:password])
end
notify_state(node, :manual_failover) if node
end
end
end
|
module RedisFailover
# NodeWatcher periodically monitors a specific redis node for its availability.
# NodeWatcher instances periodically report a redis node's current state
# to the NodeManager for proper handling.
class NodeWatcher
include Util
# Time to sleep before checking on the monitored node's status.
WATCHER_SLEEP_TIME = 2
# Creates a new instance.
#
# @param [NodeManager] manager the node manager
# @param [Node] node the node to watch
# @param [Integer] max_failures the max failues before reporting node as down
def initialize(manager, node, max_failures)
@manager = manager
@node = node
@max_failures = max_failures
@monitor_thread = nil
@shutdown_lock = Mutex.new
@shutdown_cv = ConditionVariable.new
@done = false
end
# Starts the node watcher.
#
# @note this method returns immediately and causes monitoring to be
# performed in a new background thread
def watch
@monitor_thread ||= Thread.new { monitor_node }
self
end
# Performs a graceful shutdown of this watcher.
def shutdown
@shutdown_lock.synchronize do
@done = true
begin
@node.wakeup
rescue
# best effort
end
@shutdown_cv.wait(@shutdown_lock)
end
rescue => ex
logger.warn("Failed to gracefully shutdown watcher for #{@node}")
end
private
# Periodically monitors the redis node and reports state changes to
# the {RedisFailover::NodeManager}.
def monitor_node
failures = 0
loop do
begin
break if @done
sleep(WATCHER_SLEEP_TIME)
latency = Benchmark.realtime { @node.ping }
failures = 0
notify(:available, latency)
@node.wait
rescue NodeUnavailableError => ex
logger.debug("Failed to communicate with node #{@node}: #{ex.inspect}")
failures += 1
if failures >= @max_failures
notify(:unavailable)
failures = 0
end
rescue Exception => ex
logger.error("Unexpected error while monitoring node #{@node}: #{ex.inspect}")
logger.error(ex.backtrace.join("\n"))
end
end
@shutdown_lock.synchronize do
@shutdown_cv.broadcast
end
end
# Notifies the manager of a node's state.
#
# @param [Symbol] state the node's state
# @param [Integer] latency an optional latency
def notify(state, latency = nil)
@manager.notify_state(@node, state, latency)
end
end
end
fix shutdown sequence
module RedisFailover
# NodeWatcher periodically monitors a specific redis node for its availability.
# NodeWatcher instances periodically report a redis node's current state
# to the NodeManager for proper handling.
class NodeWatcher
include Util
# Time to sleep before checking on the monitored node's status.
WATCHER_SLEEP_TIME = 2
# Creates a new instance.
#
# @param [NodeManager] manager the node manager
# @param [Node] node the node to watch
# @param [Integer] max_failures the max failues before reporting node as down
def initialize(manager, node, max_failures)
@manager = manager
@node = node
@max_failures = max_failures
@monitor_thread = nil
@done = false
end
# Starts the node watcher.
#
# @note this method returns immediately and causes monitoring to be
# performed in a new background thread
def watch
@monitor_thread ||= Thread.new { monitor_node }
self
end
# Performs a graceful shutdown of this watcher.
def shutdown
@done = true
begin
@node.wakeup
rescue
# best effort
end
@monitor_thread.join
rescue => ex
logger.warn("Failed to gracefully shutdown watcher for #{@node}")
end
private
# Periodically monitors the redis node and reports state changes to
# the {RedisFailover::NodeManager}.
def monitor_node
failures = 0
loop do
begin
break if @done
sleep(WATCHER_SLEEP_TIME)
latency = Benchmark.realtime { @node.ping }
failures = 0
notify(:available, latency)
@node.wait
rescue NodeUnavailableError => ex
logger.debug("Failed to communicate with node #{@node}: #{ex.inspect}")
failures += 1
if failures >= @max_failures
notify(:unavailable)
failures = 0
end
rescue Exception => ex
logger.error("Unexpected error while monitoring node #{@node}: #{ex.inspect}")
logger.error(ex.backtrace.join("\n"))
end
end
end
# Notifies the manager of a node's state.
#
# @param [Symbol] state the node's state
# @param [Integer] latency an optional latency
def notify(state, latency = nil)
@manager.notify_state(@node, state, latency)
end
end
end
|
require_relative '../base_input'
class MojangInput
# reads a general mojang-style library
# TODO os versions
def self.sanetize_mojang_library(object)
lib = VersionLibrary.new
lib.name = object[:name]
lib.url = object.key?(:url) ? object[:url] : 'https://libraries.minecraft.net/'
lib.oldRules = object[:rules] if object.key? :rules
lib.oldNatives = object[:natives] if object.key? :natives
allowed = VersionLibrary.possiblePlatforms
if object.key? :rules
object[:rules].each do |rule|
if rule[:action] == :allow
if rule.key? :os
if rule[:os] == 'windows'
allowed << 'win32'
allowed << 'win64'
elsif rules[:os] == 'linux'
allowed << 'lin32'
allowed << 'lin64'
elsif rules[:os] == 'osx'
allowed << 'osx'
end
else
allowed = allowed + VersionLibrary.possiblePlatforms
end
elsif rule[:action] == :disallow
if rule.key? :os
if rule[:os] == 'windows'
allowed.delete 'win32'
allowed.delete 'win64'
elsif rules[:os] == 'linux'
allowed.delete 'lin32'
allowed.delete 'lin64'
elsif rules[:os] == 'osx'
allowed.delete 'osx'
end
else
allowed = []
end
end
end
end
lib.platforms = allowed
if object.key? :natives
natives = object[:natives]
lib.natives = {} unless lib.natives
if natives.key? :windows
lib.natives['win32'] = natives[:windows].gsub "${arch}", '32'
lib.natives['win64'] = natives[:windows].gsub "${arch}", '64'
end
if natives.key? :linux
lib.natives['lin32'] = natives[:linux].gsub "${arch}", '32'
lib.natives['lin64'] = natives[:linux].gsub "${arch}", '64'
end
if natives.key? :osx
lib.natives['osx64'] = natives[:osx].gsub "${arch}", '64'
end
end
return lib
end
def initialize(artifact)
@artifact = artifact
end
def parse(data)
object = data.class == Hash ? data : JSON.parse(data, symbolize_names: true)
if object[:minimumLauncherVersion] and object[:minimumLauncherVersion] > 14
# TODO log error
return []
end
file = Version.new
file.uid = @artifact
file.version = object[:id]
file.time = object[:releaseTime]
file.type = object[:type]
file.mainClass = object[:mainClass]
file.assets = object[:assets]
file.minecraftArguments = object[:minecraftArguments]
file.libraries = object[:libraries].map do |obj|
MojangInput.sanetize_mojang_library obj
end
file.mainLib = VersionLibrary.new
file.mainLib.name = 'net.minecraft:minecraft:' + file.version
file.mainLib.absoluteUrl = 'http://s3.amazonaws.com/Minecraft.Download/versions/' + file.version + '/' + file.version + '.jar'
return BaseSanitizer.sanitize file, MojangSplitLWJGLSanitizer
end
end
class MojangExtractTweakersSanitizer < BaseSanitizer
def self.sanitize(file)
file.tweakers = file.minecraftArguments.scan(/--tweakClass ([^ ]*)/).flatten
file.minecraftArguments = file.minecraftArguments.gsub /\ ?--tweakClass ([^ ]*)/, ''
return file
end
end
# extract lwjgl specific libraries and natives
class MojangSplitLWJGLSanitizer < BaseSanitizer
@@lwjglList = ['org.lwjgl', 'net.java.jinput', 'net.java.jutils']
@@lwjglMaster = 'org.lwjgl.lwjgl:lwjgl:'
def self.sanitize(file)
lwjgl = Version.new
lwjgl.uid = 'org.lwjgl'
lwjgl.libraries = []
file.libraries.select! do |lib|
if lib.name.include? @@lwjglMaster
lwjgl.version = MavenIdentifier.new(lib.name).version
lwjgl.time = nil
end
nil == @@lwjglList.find do |lwjglCandidate|
if lib.name.include? lwjglCandidate
lwjgl.libraries << lib
true
else
false
end
end
end
file.requires = [] if file.requires.nil?
file.requires << Referenced.new('org.lwjgl')
return [file, lwjgl]
end
end
class MojangTraitsSanitizer < BaseSanitizer
def self.sanitize(file)
if file.uid == 'net.minecraft'
end
file
end
end
class MojangProcessArgumentsSanitizer < BaseSanitizer
def self.sanitize(file)
if file.extra[:processArguments]
case file.extra[:processArguments]
when 'legacy'
file.minecraftArguments = ' ${auth_player_name} ${auth_session}'
when 'username_session'
file.minecraftArguments = '--username ${auth_player_name} --session ${auth_session}'
when 'username_session_version'
file.minecraftArguments = '--username ${auth_player_name} --session ${auth_session} --version ${profile_name}'
end
file.extra.delete :processArguments
end
file
end
end
Add 'release' type to LWJGL versions
require_relative '../base_input'
class MojangInput
# reads a general mojang-style library
# TODO os versions
def self.sanetize_mojang_library(object)
lib = VersionLibrary.new
lib.name = object[:name]
lib.url = object.key?(:url) ? object[:url] : 'https://libraries.minecraft.net/'
lib.oldRules = object[:rules] if object.key? :rules
lib.oldNatives = object[:natives] if object.key? :natives
allowed = VersionLibrary.possiblePlatforms
if object.key? :rules
object[:rules].each do |rule|
if rule[:action] == :allow
if rule.key? :os
if rule[:os] == 'windows'
allowed << 'win32'
allowed << 'win64'
elsif rules[:os] == 'linux'
allowed << 'lin32'
allowed << 'lin64'
elsif rules[:os] == 'osx'
allowed << 'osx'
end
else
allowed = allowed + VersionLibrary.possiblePlatforms
end
elsif rule[:action] == :disallow
if rule.key? :os
if rule[:os] == 'windows'
allowed.delete 'win32'
allowed.delete 'win64'
elsif rules[:os] == 'linux'
allowed.delete 'lin32'
allowed.delete 'lin64'
elsif rules[:os] == 'osx'
allowed.delete 'osx'
end
else
allowed = []
end
end
end
end
lib.platforms = allowed
if object.key? :natives
natives = object[:natives]
lib.natives = {} unless lib.natives
if natives.key? :windows
lib.natives['win32'] = natives[:windows].gsub "${arch}", '32'
lib.natives['win64'] = natives[:windows].gsub "${arch}", '64'
end
if natives.key? :linux
lib.natives['lin32'] = natives[:linux].gsub "${arch}", '32'
lib.natives['lin64'] = natives[:linux].gsub "${arch}", '64'
end
if natives.key? :osx
lib.natives['osx64'] = natives[:osx].gsub "${arch}", '64'
end
end
return lib
end
def initialize(artifact)
@artifact = artifact
end
def parse(data)
object = data.class == Hash ? data : JSON.parse(data, symbolize_names: true)
if object[:minimumLauncherVersion] and object[:minimumLauncherVersion] > 14
# TODO log error
return []
end
file = Version.new
file.uid = @artifact
file.version = object[:id]
file.time = object[:releaseTime]
file.type = object[:type]
file.mainClass = object[:mainClass]
file.assets = object[:assets]
file.minecraftArguments = object[:minecraftArguments]
file.libraries = object[:libraries].map do |obj|
MojangInput.sanetize_mojang_library obj
end
file.mainLib = VersionLibrary.new
file.mainLib.name = 'net.minecraft:minecraft:' + file.version
file.mainLib.absoluteUrl = 'http://s3.amazonaws.com/Minecraft.Download/versions/' + file.version + '/' + file.version + '.jar'
return BaseSanitizer.sanitize file, MojangSplitLWJGLSanitizer
end
end
class MojangExtractTweakersSanitizer < BaseSanitizer
def self.sanitize(file)
file.tweakers = file.minecraftArguments.scan(/--tweakClass ([^ ]*)/).flatten
file.minecraftArguments = file.minecraftArguments.gsub /\ ?--tweakClass ([^ ]*)/, ''
return file
end
end
# extract lwjgl specific libraries and natives
class MojangSplitLWJGLSanitizer < BaseSanitizer
@@lwjglList = ['org.lwjgl', 'net.java.jinput', 'net.java.jutils']
@@lwjglMaster = 'org.lwjgl.lwjgl:lwjgl:'
def self.sanitize(file)
lwjgl = Version.new
lwjgl.uid = 'org.lwjgl'
lwjgl.type = 'release'
lwjgl.libraries = []
file.libraries.select! do |lib|
if lib.name.include? @@lwjglMaster
lwjgl.version = MavenIdentifier.new(lib.name).version
lwjgl.time = nil
end
nil == @@lwjglList.find do |lwjglCandidate|
if lib.name.include? lwjglCandidate
lwjgl.libraries << lib
true
else
false
end
end
end
file.requires = [] if file.requires.nil?
file.requires << Referenced.new('org.lwjgl')
return [file, lwjgl]
end
end
class MojangTraitsSanitizer < BaseSanitizer
def self.sanitize(file)
if file.uid == 'net.minecraft'
end
file
end
end
class MojangProcessArgumentsSanitizer < BaseSanitizer
def self.sanitize(file)
if file.extra[:processArguments]
case file.extra[:processArguments]
when 'legacy'
file.minecraftArguments = ' ${auth_player_name} ${auth_session}'
when 'username_session'
file.minecraftArguments = '--username ${auth_player_name} --session ${auth_session}'
when 'username_session_version'
file.minecraftArguments = '--username ${auth_player_name} --session ${auth_session} --version ${profile_name}'
end
file.extra.delete :processArguments
end
file
end
end
|
require 'spec_helper'
require 'cancan/matchers'
describe 'User' do
describe 'Abilities' do
let!(:admin) { create(:admin) }
# see https://github.com/CanCanCommunity/cancancan/wiki/Testing-Abilities
subject(:ability){ Ability.new(user) }
let(:user){ nil }
let!(:my_conference) { create(:full_conference) }
let!(:my_cfp) { create(:cfp, program: my_conference.program) }
let(:my_venue) { my_conference.venue || create(:venue, conference: my_conference) }
let(:my_registration) { create(:registration, conference: my_conference, user: admin) }
let(:other_registration) { create(:registration, conference: conference_public) }
let(:my_event) { create(:event_full, program: my_conference.program) }
let(:my_room) { create(:room, venue: my_conference.venue) }
let!(:my_event_scheduled) { create(:event_full, program: my_conference.program, room_id: my_room.id) }
let(:other_event) { create(:event_full, program: conference_public.program) }
let(:conference_not_public) { create(:conference, splashpage: create(:splashpage, public: false)) }
let(:conference_public) { create(:full_conference, splashpage: create(:splashpage, public: true)) }
let!(:conference_public_cfp) { create(:cfp, program: conference_public.program) }
let(:event_confirmed) { create(:event, state: 'confirmed') }
let(:event_unconfirmed) { create(:event) }
let(:commercial_event_confirmed) { create(:commercial, commercialable: event_confirmed) }
let(:commercial_event_unconfirmed) { create(:commercial, commercialable: event_unconfirmed) }
let(:registration) { create(:registration) }
let(:program_with_cfp) { create(:program, cfp: create(:cfp)) }
let(:program_without_cfp) { create(:program) }
let(:conference_with_open_registration) { create(:conference) }
let!(:open_registration_period) { create(:registration_period, conference: conference_with_open_registration, start_date: Date.current - 6.days) }
let(:conference_with_closed_registration) { create(:conference) }
let!(:closed_registration_period) { create(:registration_period, conference: conference_with_closed_registration, start_date: Date.current - 6.days, end_date: Date.current - 6.days) }
# Test abilities for not signed in users
context 'when user is not signed in' do
it{ should be_able_to(:index, Conference)}
it{ should be_able_to(:show, conference_public)}
it{ should_not be_able_to(:show, conference_not_public)}
it do
conference_public.program.schedule_public = true
conference_public.program.save
should be_able_to(:schedule, conference_public)
end
it{ should_not be_able_to(:schedule, conference_not_public)}
it{ should be_able_to(:show, event_confirmed)}
it{ should_not be_able_to(:show, event_unconfirmed)}
it{ should be_able_to(:show, commercial_event_confirmed)}
it{ should_not be_able_to(:show, commercial_event_unconfirmed)}
it{ should be_able_to(:show, User)}
it{ should be_able_to(:create, User)}
it{ should be_able_to(:show, Registration.new)}
it{ should be_able_to(:create, Registration.new(conference_id: conference_with_open_registration.id))}
it{ should be_able_to(:new, Registration.new(conference_id: conference_with_open_registration.id))}
it{ should_not be_able_to(:new, Registration.new(conference_id: conference_with_closed_registration.id))}
it{ should_not be_able_to(:create, Registration.new(conference_id: conference_with_closed_registration.id))}
it{ should_not be_able_to(:manage, registration)}
it{ should be_able_to(:new, Event.new(program: program_with_cfp)) }
it{ should_not be_able_to(:new, Event.new(program: program_without_cfp)) }
it{ should_not be_able_to(:create, Event.new(program: program_without_cfp))}
it{ should be_able_to(:show, Event.new)}
it{ should_not be_able_to(:manage, :any)}
end
# Test abilities for signed in users (without any role)
context 'when user is signed in' do
let(:user) { create(:user) }
let(:user2) { create(:user) }
let(:event_user2) { create(:submitter, user: user2) }
let(:subscription) { create(:subscription, user: user) }
let(:registration_public) { create(:registration, conference: conference_public, user: user) }
let(:registration_not_public) { create(:registration, conference: conference_not_public, user: user) }
let(:user_event_with_cfp) { create(:event, users: [user], program: program_with_cfp) }
let(:user_commercial) { create(:commercial, commercialable: user_event_with_cfp) }
it{ should be_able_to(:manage, user) }
it{ should be_able_to(:manage, registration_public) }
it{ should be_able_to(:manage, registration_not_public) }
it{ should_not be_able_to(:new, Registration.new(conference_id: conference_with_closed_registration.id))}
it{ should_not be_able_to(:create, Registration.new(conference_id: conference_with_closed_registration.id))}
it{ should be_able_to(:index, Ticket) }
it{ should be_able_to(:manage, TicketPurchase.new(user_id: user.id)) }
it{ should be_able_to(:create, Subscription.new(user_id: user.id)) }
it{ should be_able_to(:destroy, subscription) }
it{ should be_able_to(:manage, user_event_with_cfp) }
it{ should_not be_able_to(:new, Event.new(program: program_without_cfp)) }
it{ should_not be_able_to(:create, Event.new(program: program_without_cfp)) }
it{ should_not be_able_to(:new, Event.new(program: program_with_cfp, event_users: [event_user2])) }
it{ should_not be_able_to(:create, Event.new(program: program_with_cfp, event_users: [event_user2])) }
it{ should_not be_able_to(:manage, event_unconfirmed) }
it{ should be_able_to(:create, user_event_with_cfp.commercials.new) }
it{ should be_able_to(:manage, user_commercial) }
it{ should_not be_able_to(:manage, commercial_event_unconfirmed) }
end
context 'user #is_admin?' do
let(:venue) { my_conference.venue }
let(:room) { create(:room, venue: venue) }
let!(:event) { create(:event_full, program: my_conference.program, room_id: room.id) }
let(:user) { create(:admin) }
it{ should be_able_to(:manage, :all) }
it{ should_not be_able_to(:destroy, my_conference.program) }
it{ should_not be_able_to(:destroy, my_venue) }
end
shared_examples 'user with any role' do
before do
@other_conference = create(:conference)
end
%w(organizer cfp info_desk volunteers_coordinator).each do |role|
it{ should_not be_able_to(:toggle_user, Role.find_by(name: role, resource: @other_conference)) }
it{ should_not be_able_to(:update, Role.find_by(name: role, resource: @other_conference)) }
it{ should_not be_able_to(:edit, Role.find_by(name: role, resource: @other_conference)) }
it{ should be_able_to(:show, Role.find_by(name: role, resource: @other_conference)) }
it{ should be_able_to(:index, Role.find_by(name: role, resource: @other_conference)) }
end
end
shared_examples 'user with non-organizer role' do |role_name|
%w(organizer cfp info_desk volunteers_coordinator).each do |role|
if role == role_name
it{ should be_able_to(:toggle_user, Role.find_by(name: role, resource: my_conference)) }
else
it{ should_not be_able_to(:toggle_user, Role.find_by(name: role, resource: my_conference)) }
end
it{ should_not be_able_to(:update, Role.find_by(name: role, resource: my_conference)) }
it{ should_not be_able_to(:edit, Role.find_by(name: role, resource: my_conference)) }
it{ should be_able_to(:show, Role.find_by(name: role, resource: my_conference)) }
it{ should be_able_to(:index, Role.find_by(name: role, resource: my_conference)) }
end
end
context 'when user has the role organizer' do
let!(:my_conference) { create(:full_conference) }
let(:role) { Role.find_by(name: 'organizer', resource: my_conference) }
let(:user) { create(:user, role_ids: [role.id]) }
it{ should_not be_able_to(:destroy, my_conference.program) }
it 'when there is a room assigned to an event' do
should_not be_able_to(:destroy, my_venue)
end
it 'when there are no rooms used' do
my_event_scheduled.room_id = nil
my_event_scheduled.save!
my_event_scheduled.reload
should be_able_to(:destroy, my_venue)
end
it{ should be_able_to([:create, :new], Conference) }
it{ should be_able_to(:manage, my_conference) }
it{ should_not be_able_to(:manage, conference_public) }
it{ should be_able_to(:manage, my_conference.splashpage) }
it{ should_not be_able_to(:manage, conference_public.splashpage) }
it{ should be_able_to(:manage, my_conference.contact) }
it{ should_not be_able_to(:manage, conference_public.contact) }
it{ should be_able_to(:manage, my_conference.email_settings) }
it{ should_not be_able_to(:manage, conference_public.email_settings) }
it{ should be_able_to(:manage, my_conference.campaigns.first) }
it{ should_not be_able_to(:manage, conference_public.campaigns.first) }
it{ should be_able_to(:manage, my_conference.targets.first) }
it{ should_not be_able_to(:manage, conference_public.targets.first) }
it{ should be_able_to(:manage, my_conference.commercials.first) }
it{ should_not be_able_to(:manage, conference_public.commercials.first) }
it{ should be_able_to(:manage, my_conference.registration_period) }
it{ should_not be_able_to(:manage, conference_public.registration_period) }
it{ should be_able_to(:manage, my_conference.questions.first) }
it{ should_not be_able_to(:manage, conference_public.questions.first) }
it{ should be_able_to(:manage, my_conference.program.cfp) }
it{ should_not be_able_to(:manage, conference_public.program.cfp) }
it{ should be_able_to(:manage, my_conference.venue) }
it{ should_not be_able_to(:manage, conference_public.venue) }
it{ should be_able_to(:manage, my_conference.lodgings.first) }
it{ should_not be_able_to(:manage, conference_public.lodgings.first) }
it{ should be_able_to(:manage, my_conference.sponsors.first) }
it{ should_not be_able_to(:manage, conference_public.sponsors.first) }
it{ should be_able_to(:manage, my_conference.sponsorship_levels.first) }
it{ should_not be_able_to(:manage, conference_public.sponsorship_levels.first) }
it{ should be_able_to(:manage, my_conference.tickets.first) }
it{ should_not be_able_to(:manage, conference_public.tickets.first) }
it{ should be_able_to(:manage, my_registration) }
it{ should_not be_able_to(:manage, other_registration) }
it{ should be_able_to(:manage, my_event) }
it{ should_not be_able_to(:manage, other_event) }
it{ should be_able_to(:manage, my_event.event_type) }
it{ should_not be_able_to(:manage, other_event.event_type) }
it{ should be_able_to(:manage, my_event.track) }
it{ should_not be_able_to(:manage, other_event.track) }
it{ should be_able_to(:manage, my_event.difficulty_level) }
it{ should_not be_able_to(:manage, other_event.difficulty_level) }
it{ should be_able_to(:manage, my_event.commercials.first) }
it{ should_not be_able_to(:manage, other_event.commercials.first) }
it{ should be_able_to(:index, my_event.comment_threads.first) }
it{ should_not be_able_to(:index, other_event.comment_threads.first) }
%w(organizer cfp info_desk volunteers_coordinator).each do |role|
it{ should be_able_to(:toggle_user, Role.find_by(name: role, resource: my_conference)) }
it{ should be_able_to(:edit, Role.find_by(name: role, resource: my_conference)) }
it{ should be_able_to(:update, Role.find_by(name: role, resource: my_conference)) }
it{ should be_able_to(:show, Role.find_by(name: role, resource: my_conference)) }
it{ should be_able_to(:index, Role.find_by(name: role, resource: my_conference)) }
end
it_behaves_like 'user with any role'
end
context 'when user has the role cfp' do
let!(:my_conference) { create(:full_conference) }
let(:role) { Role.find_by(name: 'cfp', resource: my_conference) }
let(:user) { create(:user, role_ids: [role.id]) }
it{ should_not be_able_to([:create, :new], Conference.new) }
it{ should_not be_able_to(:manage, my_conference) }
it{ should_not be_able_to(:manage, conference_public) }
it{ should_not be_able_to(:manage, my_conference.splashpage) }
it{ should_not be_able_to(:manage, conference_public.splashpage) }
it{ should_not be_able_to(:manage, my_conference.contact) }
it{ should_not be_able_to(:manage, conference_public.contact) }
it{ should be_able_to(:manage, my_conference.email_settings) }
it{ should_not be_able_to(:manage, conference_public.email_settings) }
it{ should_not be_able_to(:manage, my_conference.campaigns.first) }
it{ should_not be_able_to(:manage, conference_public.campaigns.first) }
it{ should_not be_able_to(:manage, my_conference.targets.first) }
it{ should_not be_able_to(:manage, conference_public.targets.first) }
it{ should_not be_able_to(:manage, my_conference.commercials.first) }
it{ should_not be_able_to(:manage, conference_public.commercials.first) }
it{ should_not be_able_to(:manage, my_conference.registration_period) }
it{ should_not be_able_to(:manage, conference_public.registration_period) }
it{ should_not be_able_to(:manage, my_conference.questions.first) }
it{ should_not be_able_to(:manage, conference_public.questions.first) }
it{ should be_able_to(:manage, my_conference.program.cfp) }
it{ should_not be_able_to(:manage, conference_public.program.cfp) }
it{ should_not be_able_to(:manage, my_conference.venue) }
it{ should be_able_to(:show, my_conference.venue) }
it{ should_not be_able_to(:manage, conference_public.venue) }
it{ should_not be_able_to(:manage, my_conference.lodgings.first) }
it{ should_not be_able_to(:manage, conference_public.lodgings.first) }
it{ should_not be_able_to(:manage, my_conference.sponsors.first) }
it{ should_not be_able_to(:manage, conference_public.sponsors.first) }
it{ should_not be_able_to(:manage, my_conference.sponsorship_levels.first) }
it{ should_not be_able_to(:manage, conference_public.sponsorship_levels.first) }
it{ should_not be_able_to(:manage, my_conference.tickets.first) }
it{ should_not be_able_to(:manage, conference_public.tickets.first) }
it{ should_not be_able_to(:manage, my_registration) }
it{ should_not be_able_to(:manage, other_registration) }
it{ should be_able_to(:manage, my_event) }
it{ should_not be_able_to(:manage, other_event) }
it{ should be_able_to(:manage, my_event.event_type) }
it{ should_not be_able_to(:manage, other_event.event_type) }
it{ should be_able_to(:manage, my_event.track) }
it{ should_not be_able_to(:manage, other_event.track) }
it{ should be_able_to(:manage, my_event.difficulty_level) }
it{ should_not be_able_to(:manage, other_event.difficulty_level) }
it{ should be_able_to(:manage, my_event.commercials.first) }
it{ should_not be_able_to(:manage, other_event.commercials.first) }
it{ should be_able_to(:index, my_event.comment_threads.first) }
it{ should_not be_able_to(:index, other_event.comment_threads.first) }
it_behaves_like 'user with any role'
it_behaves_like 'user with non-organizer role', 'cfp'
end
context 'when user has the role info_desk' do
let!(:my_conference) { create(:full_conference) }
let(:role) { Role.find_by(name: 'info_desk', resource: my_conference) }
let(:user) { create(:user, role_ids: [role.id]) }
it{ should_not be_able_to([:create, :new], Conference.new) }
it{ should_not be_able_to(:manage, my_conference) }
it{ should_not be_able_to(:manage, conference_public) }
it{ should_not be_able_to(:manage, my_conference.splashpage) }
it{ should_not be_able_to(:manage, conference_public.splashpage) }
it{ should_not be_able_to(:manage, my_conference.contact) }
it{ should_not be_able_to(:manage, conference_public.contact) }
it{ should_not be_able_to(:manage, my_conference.email_settings) }
it{ should_not be_able_to(:manage, conference_public.email_settings) }
it{ should_not be_able_to(:manage, my_conference.campaigns.first) }
it{ should_not be_able_to(:manage, conference_public.campaigns.first) }
it{ should_not be_able_to(:manage, my_conference.targets.first) }
it{ should_not be_able_to(:manage, conference_public.targets.first) }
it{ should_not be_able_to(:manage, my_conference.commercials.first) }
it{ should_not be_able_to(:manage, conference_public.commercials.first) }
it{ should_not be_able_to(:manage, my_conference.registration_period) }
it{ should_not be_able_to(:manage, conference_public.registration_period) }
it{ should be_able_to(:manage, my_conference.questions.first) }
it{ should_not be_able_to(:manage, conference_public.questions.first) }
it{ should_not be_able_to(:manage, my_conference.program.cfp) }
it{ should_not be_able_to(:manage, conference_public.program.cfp) }
it{ should_not be_able_to(:manage, my_conference.venue) }
it{ should_not be_able_to(:show, my_conference.venue) }
it{ should_not be_able_to(:manage, conference_public.venue) }
it{ should_not be_able_to(:manage, my_conference.lodgings.first) }
it{ should_not be_able_to(:manage, conference_public.lodgings.first) }
it{ should_not be_able_to(:manage, my_conference.sponsors.first) }
it{ should_not be_able_to(:manage, conference_public.sponsors.first) }
it{ should_not be_able_to(:manage, my_conference.sponsorship_levels.first) }
it{ should_not be_able_to(:manage, conference_public.sponsorship_levels.first) }
it{ should_not be_able_to(:manage, my_conference.tickets.first) }
it{ should_not be_able_to(:manage, conference_public.tickets.first) }
it{ should be_able_to(:manage, my_registration) }
it{ should_not be_able_to(:manage, other_registration) }
it{ should_not be_able_to(:manage, my_event) }
it{ should_not be_able_to(:manage, other_event) }
it{ should_not be_able_to(:manage, my_event.event_type) }
it{ should_not be_able_to(:manage, other_event.event_type) }
it{ should_not be_able_to(:manage, my_event.track) }
it{ should_not be_able_to(:manage, other_event.track) }
it{ should_not be_able_to(:manage, my_event.difficulty_level) }
it{ should_not be_able_to(:manage, other_event.difficulty_level) }
it{ should_not be_able_to(:manage, my_event.commercials.first) }
it{ should_not be_able_to(:manage, other_event.commercials.first) }
it{ should_not be_able_to(:index, my_event.comment_threads.first) }
it{ should_not be_able_to(:index, other_event.comment_threads.first) }
it_behaves_like 'user with any role'
it_behaves_like 'user with non-organizer role', 'info_desk'
end
context 'when user has the role volunteers_coordinator' do
let!(:my_conference) { create(:full_conference) }
let(:role) { Role.find_by(name: 'volunteers_coordinator', resource: my_conference) }
let(:user) { create(:user, role_ids: [role.id]) }
it{ should_not be_able_to([:create, :new], Conference.new) }
it{ should_not be_able_to(:manage, my_conference) }
it{ should_not be_able_to(:manage, conference_public) }
it{ should_not be_able_to(:manage, my_conference.splashpage) }
it{ should_not be_able_to(:manage, conference_public.splashpage) }
it{ should_not be_able_to(:manage, my_conference.contact) }
it{ should_not be_able_to(:manage, conference_public.contact) }
it{ should_not be_able_to(:manage, my_conference.email_settings) }
it{ should_not be_able_to(:manage, conference_public.email_settings) }
it{ should_not be_able_to(:manage, my_conference.campaigns.first) }
it{ should_not be_able_to(:manage, conference_public.campaigns.first) }
it{ should_not be_able_to(:manage, my_conference.targets.first) }
it{ should_not be_able_to(:manage, conference_public.targets.first) }
it{ should_not be_able_to(:manage, my_conference.commercials.first) }
it{ should_not be_able_to(:manage, conference_public.commercials.first) }
it{ should_not be_able_to(:manage, my_conference.registration_period) }
it{ should_not be_able_to(:manage, conference_public.registration_period) }
it{ should_not be_able_to(:manage, my_conference.questions.first) }
it{ should_not be_able_to(:manage, conference_public.questions.first) }
it{ should_not be_able_to(:manage, my_conference.program.cfp) }
it{ should_not be_able_to(:manage, conference_public.program.cfp) }
it{ should_not be_able_to(:manage, my_conference.venue) }
it{ should_not be_able_to(:show, my_conference.venue) }
it{ should_not be_able_to(:manage, conference_public.venue) }
it{ should_not be_able_to(:manage, my_conference.lodgings.first) }
it{ should_not be_able_to(:manage, conference_public.lodgings.first) }
it{ should_not be_able_to(:manage, my_conference.sponsors.first) }
it{ should_not be_able_to(:manage, conference_public.sponsors.first) }
it{ should_not be_able_to(:manage, my_conference.sponsorship_levels.first) }
it{ should_not be_able_to(:manage, conference_public.sponsorship_levels.first) }
it{ should_not be_able_to(:manage, my_conference.tickets.first) }
it{ should_not be_able_to(:manage, conference_public.tickets.first) }
it{ should_not be_able_to(:manage, registration) }
it{ should_not be_able_to(:manage, other_registration) }
it{ should_not be_able_to(:manage, my_event) }
it{ should_not be_able_to(:manage, other_event) }
it{ should_not be_able_to(:manage, my_event.event_type) }
it{ should_not be_able_to(:manage, other_event.event_type) }
it{ should_not be_able_to(:manage, my_event.track) }
it{ should_not be_able_to(:manage, other_event.track) }
it{ should_not be_able_to(:manage, my_event.difficulty_level) }
it{ should_not be_able_to(:manage, other_event.difficulty_level) }
it{ should_not be_able_to(:manage, my_event.commercials.first) }
it{ should_not be_able_to(:manage, other_event.commercials.first) }
it{ should_not be_able_to(:index, my_event.comment_threads.first) }
it{ should_not be_able_to(:index, other_event.comment_threads.first) }
it 'should be_able to :manage Vposition'
it 'should be_able to :manage Vday'
it_behaves_like 'user with any role'
it_behaves_like 'user with non-organizer role', 'volunteers_coordinator'
end
end
end
Fix ability model spec
Cancancan does not support testing multiple permissions in one be_able_to clause
Replace all occurrences of such examples with separate examples for each permission
require 'spec_helper'
require 'cancan/matchers'
describe 'User' do
describe 'Abilities' do
let!(:admin) { create(:admin) }
# see https://github.com/CanCanCommunity/cancancan/wiki/Testing-Abilities
subject(:ability){ Ability.new(user) }
let(:user){ nil }
let!(:my_conference) { create(:full_conference) }
let!(:my_cfp) { create(:cfp, program: my_conference.program) }
let(:my_venue) { my_conference.venue || create(:venue, conference: my_conference) }
let(:my_registration) { create(:registration, conference: my_conference, user: admin) }
let(:other_registration) { create(:registration, conference: conference_public) }
let(:my_event) { create(:event_full, program: my_conference.program) }
let(:my_room) { create(:room, venue: my_conference.venue) }
let!(:my_event_scheduled) { create(:event_full, program: my_conference.program, room_id: my_room.id) }
let(:other_event) { create(:event_full, program: conference_public.program) }
let(:conference_not_public) { create(:conference, splashpage: create(:splashpage, public: false)) }
let(:conference_public) { create(:full_conference, splashpage: create(:splashpage, public: true)) }
let!(:conference_public_cfp) { create(:cfp, program: conference_public.program) }
let(:event_confirmed) { create(:event, state: 'confirmed') }
let(:event_unconfirmed) { create(:event) }
let(:commercial_event_confirmed) { create(:commercial, commercialable: event_confirmed) }
let(:commercial_event_unconfirmed) { create(:commercial, commercialable: event_unconfirmed) }
let(:registration) { create(:registration) }
let(:program_with_cfp) { create(:program, cfp: create(:cfp)) }
let(:program_without_cfp) { create(:program) }
let(:conference_with_open_registration) { create(:conference) }
let!(:open_registration_period) { create(:registration_period, conference: conference_with_open_registration, start_date: Date.current - 6.days) }
let(:conference_with_closed_registration) { create(:conference) }
let!(:closed_registration_period) { create(:registration_period, conference: conference_with_closed_registration, start_date: Date.current - 6.days, end_date: Date.current - 6.days) }
# Test abilities for not signed in users
context 'when user is not signed in' do
it{ should be_able_to(:index, Conference)}
it{ should be_able_to(:show, conference_public)}
it{ should_not be_able_to(:show, conference_not_public)}
it do
conference_public.program.schedule_public = true
conference_public.program.save
should be_able_to(:schedule, conference_public)
end
it{ should_not be_able_to(:schedule, conference_not_public)}
it{ should be_able_to(:show, event_confirmed)}
it{ should_not be_able_to(:show, event_unconfirmed)}
it{ should be_able_to(:show, commercial_event_confirmed)}
it{ should_not be_able_to(:show, commercial_event_unconfirmed)}
it{ should be_able_to(:show, User)}
it{ should be_able_to(:create, User)}
it{ should be_able_to(:show, Registration.new)}
it{ should be_able_to(:create, Registration.new(conference_id: conference_with_open_registration.id))}
it{ should be_able_to(:new, Registration.new(conference_id: conference_with_open_registration.id))}
it{ should_not be_able_to(:new, Registration.new(conference_id: conference_with_closed_registration.id))}
it{ should_not be_able_to(:create, Registration.new(conference_id: conference_with_closed_registration.id))}
it{ should_not be_able_to(:manage, registration)}
it{ should be_able_to(:new, Event.new(program: program_with_cfp)) }
it{ should_not be_able_to(:new, Event.new(program: program_without_cfp)) }
it{ should_not be_able_to(:create, Event.new(program: program_without_cfp))}
it{ should be_able_to(:show, Event.new)}
it{ should_not be_able_to(:manage, :any)}
end
# Test abilities for signed in users (without any role)
context 'when user is signed in' do
let(:user) { create(:user) }
let(:user2) { create(:user) }
let(:event_user2) { create(:submitter, user: user2) }
let(:subscription) { create(:subscription, user: user) }
let(:registration_public) { create(:registration, conference: conference_public, user: user) }
let(:registration_not_public) { create(:registration, conference: conference_not_public, user: user) }
let(:user_event_with_cfp) { create(:event, users: [user], program: program_with_cfp) }
let(:user_commercial) { create(:commercial, commercialable: user_event_with_cfp) }
it{ should be_able_to(:manage, user) }
it{ should be_able_to(:manage, registration_public) }
it{ should be_able_to(:manage, registration_not_public) }
it{ should_not be_able_to(:new, Registration.new(conference_id: conference_with_closed_registration.id))}
it{ should_not be_able_to(:create, Registration.new(conference_id: conference_with_closed_registration.id))}
it{ should be_able_to(:index, Ticket) }
it{ should be_able_to(:manage, TicketPurchase.new(user_id: user.id)) }
it{ should be_able_to(:create, Subscription.new(user_id: user.id)) }
it{ should be_able_to(:destroy, subscription) }
it{ should be_able_to(:manage, user_event_with_cfp) }
it{ should_not be_able_to(:new, Event.new(program: program_without_cfp)) }
it{ should_not be_able_to(:create, Event.new(program: program_without_cfp)) }
it{ should_not be_able_to(:new, Event.new(program: program_with_cfp, event_users: [event_user2])) }
it{ should_not be_able_to(:create, Event.new(program: program_with_cfp, event_users: [event_user2])) }
it{ should_not be_able_to(:manage, event_unconfirmed) }
it{ should be_able_to(:create, user_event_with_cfp.commercials.new) }
it{ should be_able_to(:manage, user_commercial) }
it{ should_not be_able_to(:manage, commercial_event_unconfirmed) }
end
context 'user #is_admin?' do
let(:venue) { my_conference.venue }
let(:room) { create(:room, venue: venue) }
let!(:event) { create(:event_full, program: my_conference.program, room_id: room.id) }
let(:user) { create(:admin) }
it{ should be_able_to(:manage, :all) }
it{ should_not be_able_to(:destroy, my_conference.program) }
it{ should_not be_able_to(:destroy, my_venue) }
end
shared_examples 'user with any role' do
before do
@other_conference = create(:conference)
end
%w(organizer cfp info_desk volunteers_coordinator).each do |role|
it{ should_not be_able_to(:toggle_user, Role.find_by(name: role, resource: @other_conference)) }
it{ should_not be_able_to(:update, Role.find_by(name: role, resource: @other_conference)) }
it{ should_not be_able_to(:edit, Role.find_by(name: role, resource: @other_conference)) }
it{ should be_able_to(:show, Role.find_by(name: role, resource: @other_conference)) }
it{ should be_able_to(:index, Role.find_by(name: role, resource: @other_conference)) }
end
end
shared_examples 'user with non-organizer role' do |role_name|
%w(organizer cfp info_desk volunteers_coordinator).each do |role|
if role == role_name
it{ should be_able_to(:toggle_user, Role.find_by(name: role, resource: my_conference)) }
else
it{ should_not be_able_to(:toggle_user, Role.find_by(name: role, resource: my_conference)) }
end
it{ should_not be_able_to(:update, Role.find_by(name: role, resource: my_conference)) }
it{ should_not be_able_to(:edit, Role.find_by(name: role, resource: my_conference)) }
it{ should be_able_to(:show, Role.find_by(name: role, resource: my_conference)) }
it{ should be_able_to(:index, Role.find_by(name: role, resource: my_conference)) }
end
end
context 'when user has the role organizer' do
let!(:my_conference) { create(:full_conference) }
let(:role) { Role.find_by(name: 'organizer', resource: my_conference) }
let(:user) { create(:user, role_ids: [role.id]) }
it{ should_not be_able_to(:destroy, my_conference.program) }
it 'when there is a room assigned to an event' do
should_not be_able_to(:destroy, my_venue)
end
it 'when there are no rooms used' do
my_event_scheduled.room_id = nil
my_event_scheduled.save!
my_event_scheduled.reload
should be_able_to(:destroy, my_venue)
end
it{ should be_able_to(:new, Conference) }
it{ should be_able_to(:create, Conference) }
it{ should be_able_to(:manage, my_conference) }
it{ should_not be_able_to(:manage, conference_public) }
it{ should be_able_to(:manage, my_conference.splashpage) }
it{ should_not be_able_to(:manage, conference_public.splashpage) }
it{ should be_able_to(:manage, my_conference.contact) }
it{ should_not be_able_to(:manage, conference_public.contact) }
it{ should be_able_to(:manage, my_conference.email_settings) }
it{ should_not be_able_to(:manage, conference_public.email_settings) }
it{ should be_able_to(:manage, my_conference.campaigns.first) }
it{ should_not be_able_to(:manage, conference_public.campaigns.first) }
it{ should be_able_to(:manage, my_conference.targets.first) }
it{ should_not be_able_to(:manage, conference_public.targets.first) }
it{ should be_able_to(:manage, my_conference.commercials.first) }
it{ should_not be_able_to(:manage, conference_public.commercials.first) }
it{ should be_able_to(:manage, my_conference.registration_period) }
it{ should_not be_able_to(:manage, conference_public.registration_period) }
it{ should be_able_to(:manage, my_conference.questions.first) }
it{ should_not be_able_to(:manage, conference_public.questions.first) }
it{ should be_able_to(:manage, my_conference.program.cfp) }
it{ should_not be_able_to(:manage, conference_public.program.cfp) }
it{ should be_able_to(:manage, my_conference.venue) }
it{ should_not be_able_to(:manage, conference_public.venue) }
it{ should be_able_to(:manage, my_conference.lodgings.first) }
it{ should_not be_able_to(:manage, conference_public.lodgings.first) }
it{ should be_able_to(:manage, my_conference.sponsors.first) }
it{ should_not be_able_to(:manage, conference_public.sponsors.first) }
it{ should be_able_to(:manage, my_conference.sponsorship_levels.first) }
it{ should_not be_able_to(:manage, conference_public.sponsorship_levels.first) }
it{ should be_able_to(:manage, my_conference.tickets.first) }
it{ should_not be_able_to(:manage, conference_public.tickets.first) }
it{ should be_able_to(:manage, my_registration) }
it{ should_not be_able_to(:manage, other_registration) }
it{ should be_able_to(:manage, my_event) }
it{ should_not be_able_to(:manage, other_event) }
it{ should be_able_to(:manage, my_event.event_type) }
it{ should_not be_able_to(:manage, other_event.event_type) }
it{ should be_able_to(:manage, my_event.track) }
it{ should_not be_able_to(:manage, other_event.track) }
it{ should be_able_to(:manage, my_event.difficulty_level) }
it{ should_not be_able_to(:manage, other_event.difficulty_level) }
it{ should be_able_to(:manage, my_event.commercials.first) }
it{ should_not be_able_to(:manage, other_event.commercials.first) }
it{ should be_able_to(:index, my_event.comment_threads.first) }
it{ should_not be_able_to(:index, other_event.comment_threads.first) }
%w(organizer cfp info_desk volunteers_coordinator).each do |role|
it{ should be_able_to(:toggle_user, Role.find_by(name: role, resource: my_conference)) }
it{ should be_able_to(:edit, Role.find_by(name: role, resource: my_conference)) }
it{ should be_able_to(:update, Role.find_by(name: role, resource: my_conference)) }
it{ should be_able_to(:show, Role.find_by(name: role, resource: my_conference)) }
it{ should be_able_to(:index, Role.find_by(name: role, resource: my_conference)) }
end
it_behaves_like 'user with any role'
end
context 'when user has the role cfp' do
let!(:my_conference) { create(:full_conference) }
let(:role) { Role.find_by(name: 'cfp', resource: my_conference) }
let(:user) { create(:user, role_ids: [role.id]) }
it{ should_not be_able_to(:new, Conference.new) }
it{ should_not be_able_to(:create, Conference.new) }
it{ should_not be_able_to(:manage, my_conference) }
it{ should_not be_able_to(:manage, conference_public) }
it{ should_not be_able_to(:manage, my_conference.splashpage) }
it{ should_not be_able_to(:manage, conference_public.splashpage) }
it{ should_not be_able_to(:manage, my_conference.contact) }
it{ should_not be_able_to(:manage, conference_public.contact) }
it{ should be_able_to(:manage, my_conference.email_settings) }
it{ should_not be_able_to(:manage, conference_public.email_settings) }
it{ should_not be_able_to(:manage, my_conference.campaigns.first) }
it{ should_not be_able_to(:manage, conference_public.campaigns.first) }
it{ should_not be_able_to(:manage, my_conference.targets.first) }
it{ should_not be_able_to(:manage, conference_public.targets.first) }
it{ should_not be_able_to(:manage, my_conference.commercials.first) }
it{ should_not be_able_to(:manage, conference_public.commercials.first) }
it{ should_not be_able_to(:manage, my_conference.registration_period) }
it{ should_not be_able_to(:manage, conference_public.registration_period) }
it{ should_not be_able_to(:manage, my_conference.questions.first) }
it{ should_not be_able_to(:manage, conference_public.questions.first) }
it{ should be_able_to(:manage, my_conference.program.cfp) }
it{ should_not be_able_to(:manage, conference_public.program.cfp) }
it{ should_not be_able_to(:manage, my_conference.venue) }
it{ should be_able_to(:show, my_conference.venue) }
it{ should_not be_able_to(:manage, conference_public.venue) }
it{ should_not be_able_to(:manage, my_conference.lodgings.first) }
it{ should_not be_able_to(:manage, conference_public.lodgings.first) }
it{ should_not be_able_to(:manage, my_conference.sponsors.first) }
it{ should_not be_able_to(:manage, conference_public.sponsors.first) }
it{ should_not be_able_to(:manage, my_conference.sponsorship_levels.first) }
it{ should_not be_able_to(:manage, conference_public.sponsorship_levels.first) }
it{ should_not be_able_to(:manage, my_conference.tickets.first) }
it{ should_not be_able_to(:manage, conference_public.tickets.first) }
it{ should_not be_able_to(:manage, my_registration) }
it{ should_not be_able_to(:manage, other_registration) }
it{ should be_able_to(:manage, my_event) }
it{ should_not be_able_to(:manage, other_event) }
it{ should be_able_to(:manage, my_event.event_type) }
it{ should_not be_able_to(:manage, other_event.event_type) }
it{ should be_able_to(:manage, my_event.track) }
it{ should_not be_able_to(:manage, other_event.track) }
it{ should be_able_to(:manage, my_event.difficulty_level) }
it{ should_not be_able_to(:manage, other_event.difficulty_level) }
it{ should be_able_to(:manage, my_event.commercials.first) }
it{ should_not be_able_to(:manage, other_event.commercials.first) }
it{ should be_able_to(:index, my_event.comment_threads.first) }
it{ should_not be_able_to(:index, other_event.comment_threads.first) }
it_behaves_like 'user with any role'
it_behaves_like 'user with non-organizer role', 'cfp'
end
context 'when user has the role info_desk' do
let!(:my_conference) { create(:full_conference) }
let(:role) { Role.find_by(name: 'info_desk', resource: my_conference) }
let(:user) { create(:user, role_ids: [role.id]) }
it{ should_not be_able_to(:new, Conference.new) }
it{ should_not be_able_to(:create, Conference.new) }
it{ should_not be_able_to(:manage, my_conference) }
it{ should_not be_able_to(:manage, conference_public) }
it{ should_not be_able_to(:manage, my_conference.splashpage) }
it{ should_not be_able_to(:manage, conference_public.splashpage) }
it{ should_not be_able_to(:manage, my_conference.contact) }
it{ should_not be_able_to(:manage, conference_public.contact) }
it{ should_not be_able_to(:manage, my_conference.email_settings) }
it{ should_not be_able_to(:manage, conference_public.email_settings) }
it{ should_not be_able_to(:manage, my_conference.campaigns.first) }
it{ should_not be_able_to(:manage, conference_public.campaigns.first) }
it{ should_not be_able_to(:manage, my_conference.targets.first) }
it{ should_not be_able_to(:manage, conference_public.targets.first) }
it{ should_not be_able_to(:manage, my_conference.commercials.first) }
it{ should_not be_able_to(:manage, conference_public.commercials.first) }
it{ should_not be_able_to(:manage, my_conference.registration_period) }
it{ should_not be_able_to(:manage, conference_public.registration_period) }
it{ should be_able_to(:manage, my_conference.questions.first) }
it{ should_not be_able_to(:manage, conference_public.questions.first) }
it{ should_not be_able_to(:manage, my_conference.program.cfp) }
it{ should_not be_able_to(:manage, conference_public.program.cfp) }
it{ should_not be_able_to(:manage, my_conference.venue) }
it{ should_not be_able_to(:show, my_conference.venue) }
it{ should_not be_able_to(:manage, conference_public.venue) }
it{ should_not be_able_to(:manage, my_conference.lodgings.first) }
it{ should_not be_able_to(:manage, conference_public.lodgings.first) }
it{ should_not be_able_to(:manage, my_conference.sponsors.first) }
it{ should_not be_able_to(:manage, conference_public.sponsors.first) }
it{ should_not be_able_to(:manage, my_conference.sponsorship_levels.first) }
it{ should_not be_able_to(:manage, conference_public.sponsorship_levels.first) }
it{ should_not be_able_to(:manage, my_conference.tickets.first) }
it{ should_not be_able_to(:manage, conference_public.tickets.first) }
it{ should be_able_to(:manage, my_registration) }
it{ should_not be_able_to(:manage, other_registration) }
it{ should_not be_able_to(:manage, my_event) }
it{ should_not be_able_to(:manage, other_event) }
it{ should_not be_able_to(:manage, my_event.event_type) }
it{ should_not be_able_to(:manage, other_event.event_type) }
it{ should_not be_able_to(:manage, my_event.track) }
it{ should_not be_able_to(:manage, other_event.track) }
it{ should_not be_able_to(:manage, my_event.difficulty_level) }
it{ should_not be_able_to(:manage, other_event.difficulty_level) }
it{ should_not be_able_to(:manage, my_event.commercials.first) }
it{ should_not be_able_to(:manage, other_event.commercials.first) }
it{ should_not be_able_to(:index, my_event.comment_threads.first) }
it{ should_not be_able_to(:index, other_event.comment_threads.first) }
it_behaves_like 'user with any role'
it_behaves_like 'user with non-organizer role', 'info_desk'
end
context 'when user has the role volunteers_coordinator' do
let!(:my_conference) { create(:full_conference) }
let(:role) { Role.find_by(name: 'volunteers_coordinator', resource: my_conference) }
let(:user) { create(:user, role_ids: [role.id]) }
it{ should_not be_able_to(:new, Conference.new) }
it{ should_not be_able_to(:create, Conference.new) }
it{ should_not be_able_to(:manage, my_conference) }
it{ should_not be_able_to(:manage, conference_public) }
it{ should_not be_able_to(:manage, my_conference.splashpage) }
it{ should_not be_able_to(:manage, conference_public.splashpage) }
it{ should_not be_able_to(:manage, my_conference.contact) }
it{ should_not be_able_to(:manage, conference_public.contact) }
it{ should_not be_able_to(:manage, my_conference.email_settings) }
it{ should_not be_able_to(:manage, conference_public.email_settings) }
it{ should_not be_able_to(:manage, my_conference.campaigns.first) }
it{ should_not be_able_to(:manage, conference_public.campaigns.first) }
it{ should_not be_able_to(:manage, my_conference.targets.first) }
it{ should_not be_able_to(:manage, conference_public.targets.first) }
it{ should_not be_able_to(:manage, my_conference.commercials.first) }
it{ should_not be_able_to(:manage, conference_public.commercials.first) }
it{ should_not be_able_to(:manage, my_conference.registration_period) }
it{ should_not be_able_to(:manage, conference_public.registration_period) }
it{ should_not be_able_to(:manage, my_conference.questions.first) }
it{ should_not be_able_to(:manage, conference_public.questions.first) }
it{ should_not be_able_to(:manage, my_conference.program.cfp) }
it{ should_not be_able_to(:manage, conference_public.program.cfp) }
it{ should_not be_able_to(:manage, my_conference.venue) }
it{ should_not be_able_to(:show, my_conference.venue) }
it{ should_not be_able_to(:manage, conference_public.venue) }
it{ should_not be_able_to(:manage, my_conference.lodgings.first) }
it{ should_not be_able_to(:manage, conference_public.lodgings.first) }
it{ should_not be_able_to(:manage, my_conference.sponsors.first) }
it{ should_not be_able_to(:manage, conference_public.sponsors.first) }
it{ should_not be_able_to(:manage, my_conference.sponsorship_levels.first) }
it{ should_not be_able_to(:manage, conference_public.sponsorship_levels.first) }
it{ should_not be_able_to(:manage, my_conference.tickets.first) }
it{ should_not be_able_to(:manage, conference_public.tickets.first) }
it{ should_not be_able_to(:manage, registration) }
it{ should_not be_able_to(:manage, other_registration) }
it{ should_not be_able_to(:manage, my_event) }
it{ should_not be_able_to(:manage, other_event) }
it{ should_not be_able_to(:manage, my_event.event_type) }
it{ should_not be_able_to(:manage, other_event.event_type) }
it{ should_not be_able_to(:manage, my_event.track) }
it{ should_not be_able_to(:manage, other_event.track) }
it{ should_not be_able_to(:manage, my_event.difficulty_level) }
it{ should_not be_able_to(:manage, other_event.difficulty_level) }
it{ should_not be_able_to(:manage, my_event.commercials.first) }
it{ should_not be_able_to(:manage, other_event.commercials.first) }
it{ should_not be_able_to(:index, my_event.comment_threads.first) }
it{ should_not be_able_to(:index, other_event.comment_threads.first) }
it 'should be_able to :manage Vposition'
it 'should be_able to :manage Vday'
it_behaves_like 'user with any role'
it_behaves_like 'user with non-organizer role', 'volunteers_coordinator'
end
end
end
|
# == Schema Information
#
# Table name: addresses
#
# id :integer not null, primary key
# address_type :integer default("residence"), not null
# street_address :string default(""), not null
# secondary_address :string
# city :string default("San Francisco"), not null
# state :string default("California"), not null
# zip :string default(""), not null
# country :string default("United States"), not null
# address_owner_type :string
# address_owner_id :integer
# created_at :datetime not null
# updated_at :datetime not null
#
require 'rails_helper'
RSpec.describe Address, type: :model do
pending "add some examples to (or delete) #{__FILE__}"
end
Update address validation specs
# == Schema Information
#
# Table name: addresses
#
# id :integer not null, primary key
# address_type :integer default("residence"), not null
# street_address :string default(""), not null
# secondary_address :string
# city :string default("San Francisco"), not null
# state :string default("California"), not null
# zip :string default(""), not null
# country :string default("United States"), not null
# address_owner_type :string
# address_owner_id :integer
# created_at :datetime not null
# updated_at :datetime not null
#
require 'rails_helper'
RSpec.describe Address, type: :model do
# let(:user) { FactoryGirl.build :user }
let(:address) { FactoryGirl.build :address }
# let(:address) { FactoryGirl.build_stubbed :address }
# let(:user) {build_stubbed(:user)}
# let(:addd) {build_stubbed(:address)}
# let(:order) {build_stubbed(:order)}
# NOTE: Set with - address_type, street_address, zip
# it { should belong_to(:address_owner) }
it 'is valid with a street_address, address_type, and zip' do
binding.pry
Address.destroy_all
expect(subject).to be_valid
end
it 'successfully creates a new Address instance and saves it to the database' do
Address.destroy_all
address.save
expect(Address.count).to eql(1)
end
describe "address_type" do
it { is_expected.to respond_to(:address_type) }
it 'is invalid without an address_type' do
address.address_type = nil
expect(address).to_not be_valid
end
end
describe "street_address" do
it { is_expected.to respond_to(:street_address) }
it 'is invalid without a street_address' do
address.street_address = nil
expect(address).to_not be_valid
end
it 'is invalid when street_address has more than 250 characters' do
address.street_address = "x" * 251
expect(address).to_not be_valid
end
end
describe "secondary_address" do
it 'is valid with a secondary_address' do
address.secondary_address = "apartment 5"
expect(address).to be_valid
end
it 'is invalid when secondary_address has more than 250 characters' do
address.street_address = "x" * 251
expect(address).to_not be_valid
end
end
describe "city" do
it "defaults to San Francisco" do
address.save
expect(address.city).to eql("San Francisco")
end
it 'is invalid without a city' do
address.city = nil
expect(address).to_not be_valid
end
it 'is invalid when city has more than 25 characters' do
address.city = "x" * 26
expect(address).to_not be_valid
end
end
describe "state" do
it "defaults to California" do
address.save
expect(address.state).to eql("California")
end
it 'is invalid without a state' do
address.state = nil
expect(address).to_not be_valid
end
it 'is invalid when a state has more than 25 characters' do
address.state = "x" * 26
expect(address).to_not be_valid
end
end
describe "zip" do
it 'is invalid without a zip' do
address.zip = nil
expect(address).to_not be_valid
end
it 'is invalid when a zip has more than 5 characters' do
address.zip = "123456"
expect(address).to_not be_valid
end
it 'is invalid when zip has less than 5 characters' do
address.zip = "1234"
expect(address).to_not be_valid
end
it 'is invalid when zip contains non numeric characters' do
address.zip = "1a3-5"
expect(address).to_not be_valid
end
end
describe "country" do
it "defaults to United States when a country is not entered" do
address.save
expect(address.country).to eql("United States")
end
it 'is invalid without a country' do
address.country = nil
expect(address).to_not be_valid
end
it 'is invalid when country has more than 25 characters' do
address.country = "x" * 51
expect(address).to_not be_valid
end
end
end
|
# coding: utf-8
require 'spec_helper'
describe Article do
before do
@blog = stub_model(Blog)
@blog.stub(:base_url) { "http://myblog.net" }
@blog.stub(:text_filter) { "textile" }
@blog.stub(:send_outbound_pings) { false }
Blog.stub(:default) { @blog }
@articles = []
end
def assert_results_are(*expected)
assert_equal expected.size, @articles.size
expected.each do |i|
assert @articles.include?(i.is_a?(Symbol) ? contents(i) : i)
end
end
it "test_content_fields" do
a = Article.new
assert_equal [:body, :extended], a.content_fields
end
describe 'merge method' do
context 'other article does not exist' do
before :each do
@a1 = Factory(:article, :state => 'draft', body: 'a1 body: kitties')
Article.stub(:find_by_id).and_return nil
end
it 'should not change the callee model' do
pending #check to make sure that nothing is received on the mock and Article doesn't get any calls either
#TODO neither of the bodies should change
end
it 'should return nil' do
@a1.merge_with!(338484848).should be_nil
end
end
context 'other article does exist' do
before :each do
@a1 = Factory(:article, :state => 'draft', body: 'kitties', :author => 'sam')
@a2 = Factory(:article, :state => 'draft', body: 'puppies', :author => 'mandy')
Article.stub(:find_by_id).and_return @a2
Article.stub(:delete)
end
it 'should contain the text of the other article' do
@a1.merge_with!(@a2.id)
@a1.body.should == 'kittiespuppies'
end
it 'should contain only one author' do
@a1.merge_with!(@a2.id)
@a1.author.should == "sam"
end
it 'should delete the other article' do
Article.should_receive(:delete).with @a2
@a1.merge_with! @a2.id
end
it 'should return the receiving article' do
@a1.merge_with!(@a2.id).should_not be_nil
end
it 'should concatenate the comments of the merging article' do
a2_comments = (1..5).map { Factory(:comment, article: @a2) }
@a2.stub(:comments).and_return a2_comments
a1_comments = mock()
a1_comments.should_receive(:<<).with a2_comments
@a1.stub(:comments).and_return a1_comments
@a1.merge_with!(@a2.id)
end
end
end
describe "#permalink_url" do
describe "with hostname" do
subject { Article.new(:permalink => 'article-3', :published_at => Time.new(2004, 6, 1)).permalink_url(anchor=nil, only_path=false) }
it { should == 'http://myblog.net/2004/06/01/article-3' }
end
describe "without hostname" do
subject { Article.new(:permalink => 'article-3', :published_at => Time.new(2004, 6, 1)).permalink_url(anchor=nil, only_path=true) }
it { should == '/2004/06/01/article-3' }
end
# NOTE: URLs must not have any multibyte characters in them. The
# browser may display them differently, though.
describe "with a multibyte permalink" do
subject { Article.new(:permalink => 'ルビー', :published_at => Time.new(2004, 6, 1)) }
it "escapes the multibyte characters" do
subject.permalink_url(anchor=nil, only_path=true).should == '/2004/06/01/%E3%83%AB%E3%83%93%E3%83%BC'
end
end
describe "with a permalink containing a space" do
subject { Article.new(:permalink => 'hello there', :published_at => Time.new(2004, 6, 1)) }
it "escapes the space as '%20', not as '+'" do
subject.permalink_url(anchor=nil, only_path=true).should == '/2004/06/01/hello%20there'
end
end
describe "with a permalink containing a plus" do
subject { Article.new(:permalink => 'one+two', :published_at => Time.new(2004, 6, 1)) }
it "does not escape the plus" do
subject.permalink_url(anchor=nil, only_path=true).should == '/2004/06/01/one+two'
end
end
end
describe "#initialize" do
it "accepts a settings field in its parameter hash" do
Article.new({"password" => 'foo'})
end
end
it "test_edit_url" do
a = stub_model(Article, :id => 123)
assert_equal "http://myblog.net/admin/content/edit/#{a.id}", a.edit_url
end
it "test_delete_url" do
a = stub_model(Article, :id => 123)
assert_equal "http://myblog.net/admin/content/destroy/#{a.id}", a.delete_url
end
it "test_feed_url" do
a = stub_model(Article, :permalink => 'article-3', :published_at => Time.new(2004, 6, 1))
assert_equal "http://myblog.net/2004/06/01/article-3.atom", a.feed_url(:atom10)
assert_equal "http://myblog.net/2004/06/01/article-3.rss", a.feed_url(:rss20)
end
it "test_create" do
a = Article.new
a.user_id = 1
a.body = "Foo"
a.title = "Zzz"
assert a.save
a.categories << Category.find(Factory(:category).id)
assert_equal 1, a.categories.size
b = Article.find(a.id)
assert_equal 1, b.categories.size
end
it "test_permalink_with_title" do
article = Factory(:article, :permalink => 'article-3', :published_at => Time.utc(2004, 6, 1))
assert_equal(article,
Article.find_by_permalink({:year => 2004, :month => 06, :day => 01, :title => "article-3"}) )
assert_raises(ActiveRecord::RecordNotFound) do
Article.find_by_permalink :year => 2005, :month => "06", :day => "01", :title => "article-5"
end
end
it "test_strip_title" do
assert_equal "article-3", "Article-3".to_url
assert_equal "article-3", "Article 3!?#".to_url
assert_equal "there-is-sex-in-my-violence", "There is Sex in my Violence!".to_url
assert_equal "article", "-article-".to_url
assert_equal "lorem-ipsum-dolor-sit-amet-consectetaur-adipisicing-elit", "Lorem ipsum dolor sit amet, consectetaur adipisicing elit".to_url
assert_equal "my-cats-best-friend", "My Cat's Best Friend".to_url
end
describe "#stripped_title" do
it "works for simple cases" do
assert_equal "article-1", Article.new(:title => 'Article 1!').stripped_title
assert_equal "article-2", Article.new(:title => 'Article 2!').stripped_title
assert_equal "article-3", Article.new(:title => 'Article 3!').stripped_title
end
it "strips html" do
a = Article.new(:title => "This <i>is</i> a <b>test</b>")
assert_equal 'this-is-a-test', a.stripped_title
end
it "does not escape multibyte characters" do
a = Article.new(:title => "ルビー")
a.stripped_title.should == "ルビー"
end
it "is called upon saving the article" do
a = Article.new(:title => "space separated")
a.permalink.should be_nil
a.save
a.permalink.should == "space-separated"
end
end
describe "the html_urls method" do
before do
@blog.stub(:text_filter_object) { TextFilter.new(:filters => []) }
@article = Article.new
end
it "extracts URLs from the generated body html" do
@article.body = 'happy halloween <a href="http://www.example.com/public">with</a>'
urls = @article.html_urls
assert_equal ["http://www.example.com/public"], urls
end
it "should only match the href attribute" do
@article.body = '<a href="http://a/b">a</a> <a fhref="wrong">wrong</a>'
urls = @article.html_urls
assert_equal ["http://a/b"], urls
end
it "should match across newlines" do
@article.body = "<a\nhref=\"http://foo/bar\">foo</a>"
urls = @article.html_urls
assert_equal ["http://foo/bar"], urls
end
it "should match with single quotes" do
@article.body = "<a href='http://foo/bar'>foo</a>"
urls = @article.html_urls
assert_equal ["http://foo/bar"], urls
end
it "should match with no quotes" do
@article.body = "<a href=http://foo/bar>foo</a>"
urls = @article.html_urls
assert_equal ["http://foo/bar"], urls
end
end
### XXX: Should we have a test here?
it "test_send_pings" do
end
### XXX: Should we have a test here?
it "test_send_multiple_pings" do
end
describe "with tags" do
it "recieves tags from the keywords property" do
a = Factory(:article, :keywords => 'foo bar')
assert_equal ['foo', 'bar'].sort, a.tags.collect {|t| t.name}.sort
end
it "changes tags when changing keywords" do
a = Factory(:article, :keywords => 'foo bar')
a.keywords = 'foo baz'
a.save
assert_equal ['foo', 'baz'].sort, a.tags.collect {|t| t.name}.sort
end
it "empties tags when keywords is set to ''" do
a = Factory(:article, :keywords => 'foo bar')
a.keywords = ''
a.save
assert_equal [], a.tags.collect {|t| t.name}.sort
end
it "properly deals with dots and spaces" do
c = Factory(:article, :keywords => 'test "tag test" web2.0')
assert_equal ['test', 'tag-test', 'web2-0'].sort, c.tags.collect(&:name).sort
end
# TODO: Get rid of using the keywords field.
# TODO: Add functions to Tag to convert collection from and to string.
it "lets the tag collection survive a load-save cycle"
end
it "test_find_published_by_tag_name" do
art1 = Factory(:article)
art2 = Factory(:article)
Factory(:tag, :name => 'foo', :articles => [art1, art2])
articles = Tag.find_by_name('foo').published_articles
assert_equal 2, articles.size
end
it "test_find_published" do
article = Factory(:article, :title => 'Article 1!', :state => 'published')
Factory(:article, :published => false, :state => 'draft')
@articles = Article.find_published
assert_equal 1, @articles.size
@articles = Article.find_published(:all, :conditions => "title = 'Article 1!'")
assert_equal [article], @articles
end
it "test_just_published_flag" do
art = Article.new(:title => 'title', :body => 'body', :published => true)
assert art.just_changed_published_status?
assert art.save
art = Article.find(art.id)
assert !art.just_changed_published_status?
art = Article.create!(:title => 'title2', :body => 'body', :published => false)
assert ! art.just_changed_published_status?
end
it "test_future_publishing" do
assert_sets_trigger(Article.create!(:title => 'title', :body => 'body',
:published => true, :published_at => Time.now + 4.seconds))
end
it "test_future_publishing_without_published_flag" do
assert_sets_trigger Article.create!(:title => 'title', :body => 'body',
:published_at => Time.now + 4.seconds)
end
it "test_triggers_are_dependent" do
pending "Needs a fix for Rails ticket #5105: has_many: Dependent deleting does not work with STI"
art = Article.create!(:title => 'title', :body => 'body',
:published_at => Time.now + 1.hour)
assert_equal 1, Trigger.count
art.destroy
assert_equal 0, Trigger.count
end
def assert_sets_trigger(art)
assert_equal 1, Trigger.count
assert Trigger.find(:first, :conditions => ['pending_item_id = ?', art.id])
assert !art.published
t = Time.now
# We stub the Time.now answer to emulate a sleep of 4. Avoid the sleep. So
# speed up in test
Time.stub!(:now).and_return(t + 5.seconds)
Trigger.fire
art.reload
assert art.published
end
it "test_find_published_by_category" do
cat = Factory(:category, :permalink => 'personal')
cat.articles << Factory(:article)
cat.articles << Factory(:article)
cat.articles << Factory(:article)
cat = Factory(:category, :permalink => 'software')
cat.articles << Factory(:article)
Article.create!(:title => "News from the future!",
:body => "The future is cool!",
:keywords => "future",
:published_at => Time.now + 12.minutes)
articles = Category.find_by_permalink('personal').published_articles
assert_equal 3, articles.size
articles = Category.find_by_permalink('software').published_articles
assert_equal 1, articles.size
end
it "test_find_published_by_nonexistent_category_raises_exception" do
assert_raises ActiveRecord::RecordNotFound do
Category.find_by_permalink('does-not-exist').published_articles
end
end
it "test_destroy_file_upload_associations" do
a = Factory(:article)
Factory(:resource, :article => a)
Factory(:resource, :article => a)
assert_equal 2, a.resources.size
a.resources << Factory(:resource)
assert_equal 3, a.resources.size
a.destroy
assert_equal 0, Resource.find(:all, :conditions => "article_id = #{a.id}").size
end
it 'should notify' do
henri = Factory(:user, :login => 'henri', :notify_on_new_articles => true)
alice = Factory(:user, :login => 'alice', :notify_on_new_articles => true)
a = Factory.build(:article)
assert a.save
assert_equal 2, a.notify_users.size
assert_equal ['alice', 'henri'], a.notify_users.collect {|u| u.login }.sort
end
it "test_withdrawal" do
art = Factory(:article)
assert art.published?
assert ! art.withdrawn?
art.withdraw!
assert ! art.published?
assert art.withdrawn?
art.reload
assert ! art.published?
assert art.withdrawn?
end
describe "#default_text_filter" do
it "returns the blog's text filter" do
a = Article.new
assert_equal @blog.text_filter, a.default_text_filter.name
end
end
it 'should get only ham not spam comment' do
article = Factory(:article)
ham_comment = Factory(:comment, :article => article)
spam_comment = Factory(:spam_comment, :article => article)
article.comments.ham.should == [ham_comment]
article.comments.count.should == 2
end
describe '#access_by?' do
before do
@alice = Factory.build(:user, :profile => Factory.build(:profile_admin, :label => Profile::ADMIN))
end
it 'admin should have access to an article written by another' do
Factory.build(:article).should be_access_by(@alice)
end
it 'admin should have access to an article written by himself' do
article = Factory.build(:article, :author => @alice)
article.should be_access_by(@alice)
end
end
describe 'body_and_extended' do
before :each do
@article = Article.new(
:body => 'basic text',
:extended => 'extended text to explain more and more how Typo is wonderful')
end
it 'should combine body and extended content' do
@article.body_and_extended.should ==
"#{@article.body}\n<!--more-->\n#{@article.extended}"
end
it 'should not insert <!--more--> tags if extended is empty' do
@article.extended = ''
@article.body_and_extended.should == @article.body
end
end
describe '#search' do
describe 'with several words and no result' do
# FIXME: This tests nothing, really.
before :each do
@articles = Article.search('hello world')
end
it 'should be empty' do
@articles.should be_empty
end
end
describe 'with one word and result' do
it 'should have two items' do
Factory(:article, :extended => "extended talk")
Factory(:article, :extended => "Once uppon a time, an extended story")
assert_equal 2, Article.search('extended').size
end
end
end
describe 'body_and_extended=' do
before :each do
@article = Article.new
end
it 'should split apart values at <!--more-->' do
@article.body_and_extended = 'foo<!--more-->bar'
@article.body.should == 'foo'
@article.extended.should == 'bar'
end
it 'should remove newlines around <!--more-->' do
@article.body_and_extended = "foo\n<!--more-->\nbar"
@article.body.should == 'foo'
@article.extended.should == 'bar'
end
it 'should make extended empty if no <!--more--> tag' do
@article.body_and_extended = "foo"
@article.body.should == 'foo'
@article.extended.should be_empty
end
it 'should preserve extra <!--more--> tags' do
@article.body_and_extended = "foo<!--more-->bar<!--more-->baz"
@article.body.should == 'foo'
@article.extended.should == 'bar<!--more-->baz'
end
it 'should be settable via self.attributes=' do
@article.attributes = { :body_and_extended => 'foo<!--more-->bar' }
@article.body.should == 'foo'
@article.extended.should == 'bar'
end
end
describe '#comment_url' do
it 'should render complete url of comment' do
article = stub_model(Article, :id => 123)
article.comment_url.should == "http://myblog.net/comments?article_id=#{article.id}"
end
end
describe '#preview_comment_url' do
it 'should render complete url of comment' do
article = stub_model(Article, :id => 123)
article.preview_comment_url.should == "http://myblog.net/comments/preview?article_id=#{article.id}"
end
end
it "test_can_ping_fresh_article_iff_it_allows_pings" do
a = Factory(:article, :allow_pings => true)
assert_equal(false, a.pings_closed?)
a.allow_pings = false
assert_equal(true, a.pings_closed?)
end
it "test_cannot_ping_old_article" do
a = Factory(:article, :allow_pings => false)
assert_equal(true, a.pings_closed?)
a.allow_pings = false
assert_equal(true, a.pings_closed?)
end
describe '#published_at_like' do
before do
# Note: these choices of times depend on no other articles within
# these timeframes existing in test/fixtures/contents.yaml.
# In particular, all articles there are from 2005 or earlier, which
# is now more than two years ago, except for two, which are from
# yesterday and the day before. The existence of those two makes
# 1.month.ago not suitable, because yesterday can be last month.
@article_two_month_ago = Factory(:article, :published_at => 2.month.ago)
@article_four_months_ago = Factory(:article, :published_at => 4.month.ago)
@article_2_four_months_ago = Factory(:article, :published_at => 4.month.ago)
@article_two_year_ago = Factory(:article, :published_at => 2.year.ago)
@article_2_two_year_ago = Factory(:article, :published_at => 2.year.ago)
end
it 'should return all content for the year if only year sent' do
Article.published_at_like(2.year.ago.strftime('%Y')).map(&:id).sort.should == [@article_two_year_ago.id, @article_2_two_year_ago.id].sort
end
it 'should return all content for the month if year and month sent' do
Article.published_at_like(4.month.ago.strftime('%Y-%m')).map(&:id).sort.should == [@article_four_months_ago.id, @article_2_four_months_ago.id].sort
end
it 'should return all content on this date if date send' do
Article.published_at_like(2.month.ago.strftime('%Y-%m-%d')).map(&:id).sort.should == [@article_two_month_ago.id].sort
end
end
describe '#has_child?' do
it 'should be true if article has one to link it by parent_id' do
parent = Factory(:article)
Factory(:article, :parent_id => parent.id)
parent.should be_has_child
end
it 'should be false if article has no article to link it by parent_id' do
parent = Factory(:article)
Factory(:article, :parent_id => nil)
parent.should_not be_has_child
end
end
describe 'self#last_draft(id)' do
it 'should return article if no draft associated' do
draft = Factory(:article, :state => 'draft')
Article.last_draft(draft.id).should == draft
end
it 'should return draft associated to this article if there are one' do
parent = Factory(:article)
draft = Factory(:article, :parent_id => parent.id, :state => 'draft')
Article.last_draft(draft.id).should == draft
end
end
describe "an article published just before midnight UTC" do
before do
@a = Factory.build(:article)
@a.published_at = "21 Feb 2011 23:30 UTC"
end
describe "#permalink_url" do
it "uses UTC to determine correct day" do
@a.permalink_url.should == "http://myblog.net/2011/02/21/a-big-article"
end
end
describe "#find_by_permalink" do
it "uses UTC to determine correct day" do
@a.save
a = Article.find_by_permalink :year => 2011, :month => 2, :day => 21, :permalink => 'a-big-article'
a.should == @a
end
end
end
describe "an article published just after midnight UTC" do
before do
@a = Factory.build(:article)
@a.published_at = "22 Feb 2011 00:30 UTC"
end
describe "#permalink_url" do
it "uses UTC to determine correct day" do
@a.permalink_url.should == "http://myblog.net/2011/02/22/a-big-article"
end
end
describe "#find_by_permalink" do
it "uses UTC to determine correct day" do
@a.save
a = Article.find_by_permalink :year => 2011, :month => 2, :day => 22, :permalink => 'a-big-article'
a.should == @a
end
end
end
describe "#get_or_build" do
context "when no params given" do
before(:each) do
@article = Article.get_or_build_article
end
it "should return article" do
@article.should be_a(Article)
end
context "should have blog default value for" do
it "allow_comments" do
@article.allow_comments.should be == @blog.default_allow_comments
end
it "allow_pings" do
@article.allow_pings.should be == @blog.default_allow_pings
end
it "should have default text filter" do
@article.text_filter.should be == @blog.text_filter_object
end
end
end
context "when id params given" do
it "should return article" do
already_exist_article = Factory.create(:article)
article = Article.get_or_build_article(already_exist_article.id)
article.should be == already_exist_article
end
end
end
end
ghetto way to check that update_attributes is never called
# coding: utf-8
require 'spec_helper'
describe Article do
before do
@blog = stub_model(Blog)
@blog.stub(:base_url) { "http://myblog.net" }
@blog.stub(:text_filter) { "textile" }
@blog.stub(:send_outbound_pings) { false }
Blog.stub(:default) { @blog }
@articles = []
end
def assert_results_are(*expected)
assert_equal expected.size, @articles.size
expected.each do |i|
assert @articles.include?(i.is_a?(Symbol) ? contents(i) : i)
end
end
it "test_content_fields" do
a = Article.new
assert_equal [:body, :extended], a.content_fields
end
describe 'merge method' do
context 'other article does not exist' do
before :each do
@a1 = Factory(:article, :state => 'draft', body: 'a1 body: kitties')
Article.stub(:find_by_id).and_return nil
end
it 'should not change the callee model' do
@a1.should_receive(:update_attributes!).exactly(0).times
@a1.merge_with!(8383848583)
end
it 'should return nil' do
@a1.merge_with!(338484848).should be_nil
end
end
context 'other article does exist' do
before :each do
@a1 = Factory(:article, :state => 'draft', body: 'kitties', :author => 'sam')
@a2 = Factory(:article, :state => 'draft', body: 'puppies', :author => 'mandy')
Article.stub(:find_by_id).and_return @a2
Article.stub(:delete)
end
it 'should contain the text of the other article' do
@a1.merge_with!(@a2.id)
@a1.body.should == 'kittiespuppies'
end
it 'should contain only one author' do
@a1.merge_with!(@a2.id)
@a1.author.should == "sam"
end
it 'should delete the other article' do
Article.should_receive(:delete).with @a2
@a1.merge_with! @a2.id
end
it 'should return the receiving article' do
@a1.merge_with!(@a2.id).should_not be_nil
end
it 'should concatenate the comments of the merging article' do
a2_comments = (1..5).map { Factory(:comment, article: @a2) }
@a2.stub(:comments).and_return a2_comments
a1_comments = mock()
a1_comments.should_receive(:<<).with a2_comments
@a1.stub(:comments).and_return a1_comments
@a1.merge_with!(@a2.id)
end
end
end
describe "#permalink_url" do
describe "with hostname" do
subject { Article.new(:permalink => 'article-3', :published_at => Time.new(2004, 6, 1)).permalink_url(anchor=nil, only_path=false) }
it { should == 'http://myblog.net/2004/06/01/article-3' }
end
describe "without hostname" do
subject { Article.new(:permalink => 'article-3', :published_at => Time.new(2004, 6, 1)).permalink_url(anchor=nil, only_path=true) }
it { should == '/2004/06/01/article-3' }
end
# NOTE: URLs must not have any multibyte characters in them. The
# browser may display them differently, though.
describe "with a multibyte permalink" do
subject { Article.new(:permalink => 'ルビー', :published_at => Time.new(2004, 6, 1)) }
it "escapes the multibyte characters" do
subject.permalink_url(anchor=nil, only_path=true).should == '/2004/06/01/%E3%83%AB%E3%83%93%E3%83%BC'
end
end
describe "with a permalink containing a space" do
subject { Article.new(:permalink => 'hello there', :published_at => Time.new(2004, 6, 1)) }
it "escapes the space as '%20', not as '+'" do
subject.permalink_url(anchor=nil, only_path=true).should == '/2004/06/01/hello%20there'
end
end
describe "with a permalink containing a plus" do
subject { Article.new(:permalink => 'one+two', :published_at => Time.new(2004, 6, 1)) }
it "does not escape the plus" do
subject.permalink_url(anchor=nil, only_path=true).should == '/2004/06/01/one+two'
end
end
end
describe "#initialize" do
it "accepts a settings field in its parameter hash" do
Article.new({"password" => 'foo'})
end
end
it "test_edit_url" do
a = stub_model(Article, :id => 123)
assert_equal "http://myblog.net/admin/content/edit/#{a.id}", a.edit_url
end
it "test_delete_url" do
a = stub_model(Article, :id => 123)
assert_equal "http://myblog.net/admin/content/destroy/#{a.id}", a.delete_url
end
it "test_feed_url" do
a = stub_model(Article, :permalink => 'article-3', :published_at => Time.new(2004, 6, 1))
assert_equal "http://myblog.net/2004/06/01/article-3.atom", a.feed_url(:atom10)
assert_equal "http://myblog.net/2004/06/01/article-3.rss", a.feed_url(:rss20)
end
it "test_create" do
a = Article.new
a.user_id = 1
a.body = "Foo"
a.title = "Zzz"
assert a.save
a.categories << Category.find(Factory(:category).id)
assert_equal 1, a.categories.size
b = Article.find(a.id)
assert_equal 1, b.categories.size
end
it "test_permalink_with_title" do
article = Factory(:article, :permalink => 'article-3', :published_at => Time.utc(2004, 6, 1))
assert_equal(article,
Article.find_by_permalink({:year => 2004, :month => 06, :day => 01, :title => "article-3"}) )
assert_raises(ActiveRecord::RecordNotFound) do
Article.find_by_permalink :year => 2005, :month => "06", :day => "01", :title => "article-5"
end
end
it "test_strip_title" do
assert_equal "article-3", "Article-3".to_url
assert_equal "article-3", "Article 3!?#".to_url
assert_equal "there-is-sex-in-my-violence", "There is Sex in my Violence!".to_url
assert_equal "article", "-article-".to_url
assert_equal "lorem-ipsum-dolor-sit-amet-consectetaur-adipisicing-elit", "Lorem ipsum dolor sit amet, consectetaur adipisicing elit".to_url
assert_equal "my-cats-best-friend", "My Cat's Best Friend".to_url
end
describe "#stripped_title" do
it "works for simple cases" do
assert_equal "article-1", Article.new(:title => 'Article 1!').stripped_title
assert_equal "article-2", Article.new(:title => 'Article 2!').stripped_title
assert_equal "article-3", Article.new(:title => 'Article 3!').stripped_title
end
it "strips html" do
a = Article.new(:title => "This <i>is</i> a <b>test</b>")
assert_equal 'this-is-a-test', a.stripped_title
end
it "does not escape multibyte characters" do
a = Article.new(:title => "ルビー")
a.stripped_title.should == "ルビー"
end
it "is called upon saving the article" do
a = Article.new(:title => "space separated")
a.permalink.should be_nil
a.save
a.permalink.should == "space-separated"
end
end
describe "the html_urls method" do
before do
@blog.stub(:text_filter_object) { TextFilter.new(:filters => []) }
@article = Article.new
end
it "extracts URLs from the generated body html" do
@article.body = 'happy halloween <a href="http://www.example.com/public">with</a>'
urls = @article.html_urls
assert_equal ["http://www.example.com/public"], urls
end
it "should only match the href attribute" do
@article.body = '<a href="http://a/b">a</a> <a fhref="wrong">wrong</a>'
urls = @article.html_urls
assert_equal ["http://a/b"], urls
end
it "should match across newlines" do
@article.body = "<a\nhref=\"http://foo/bar\">foo</a>"
urls = @article.html_urls
assert_equal ["http://foo/bar"], urls
end
it "should match with single quotes" do
@article.body = "<a href='http://foo/bar'>foo</a>"
urls = @article.html_urls
assert_equal ["http://foo/bar"], urls
end
it "should match with no quotes" do
@article.body = "<a href=http://foo/bar>foo</a>"
urls = @article.html_urls
assert_equal ["http://foo/bar"], urls
end
end
### XXX: Should we have a test here?
it "test_send_pings" do
end
### XXX: Should we have a test here?
it "test_send_multiple_pings" do
end
describe "with tags" do
it "recieves tags from the keywords property" do
a = Factory(:article, :keywords => 'foo bar')
assert_equal ['foo', 'bar'].sort, a.tags.collect {|t| t.name}.sort
end
it "changes tags when changing keywords" do
a = Factory(:article, :keywords => 'foo bar')
a.keywords = 'foo baz'
a.save
assert_equal ['foo', 'baz'].sort, a.tags.collect {|t| t.name}.sort
end
it "empties tags when keywords is set to ''" do
a = Factory(:article, :keywords => 'foo bar')
a.keywords = ''
a.save
assert_equal [], a.tags.collect {|t| t.name}.sort
end
it "properly deals with dots and spaces" do
c = Factory(:article, :keywords => 'test "tag test" web2.0')
assert_equal ['test', 'tag-test', 'web2-0'].sort, c.tags.collect(&:name).sort
end
# TODO: Get rid of using the keywords field.
# TODO: Add functions to Tag to convert collection from and to string.
it "lets the tag collection survive a load-save cycle"
end
it "test_find_published_by_tag_name" do
art1 = Factory(:article)
art2 = Factory(:article)
Factory(:tag, :name => 'foo', :articles => [art1, art2])
articles = Tag.find_by_name('foo').published_articles
assert_equal 2, articles.size
end
it "test_find_published" do
article = Factory(:article, :title => 'Article 1!', :state => 'published')
Factory(:article, :published => false, :state => 'draft')
@articles = Article.find_published
assert_equal 1, @articles.size
@articles = Article.find_published(:all, :conditions => "title = 'Article 1!'")
assert_equal [article], @articles
end
it "test_just_published_flag" do
art = Article.new(:title => 'title', :body => 'body', :published => true)
assert art.just_changed_published_status?
assert art.save
art = Article.find(art.id)
assert !art.just_changed_published_status?
art = Article.create!(:title => 'title2', :body => 'body', :published => false)
assert ! art.just_changed_published_status?
end
it "test_future_publishing" do
assert_sets_trigger(Article.create!(:title => 'title', :body => 'body',
:published => true, :published_at => Time.now + 4.seconds))
end
it "test_future_publishing_without_published_flag" do
assert_sets_trigger Article.create!(:title => 'title', :body => 'body',
:published_at => Time.now + 4.seconds)
end
it "test_triggers_are_dependent" do
pending "Needs a fix for Rails ticket #5105: has_many: Dependent deleting does not work with STI"
art = Article.create!(:title => 'title', :body => 'body',
:published_at => Time.now + 1.hour)
assert_equal 1, Trigger.count
art.destroy
assert_equal 0, Trigger.count
end
def assert_sets_trigger(art)
assert_equal 1, Trigger.count
assert Trigger.find(:first, :conditions => ['pending_item_id = ?', art.id])
assert !art.published
t = Time.now
# We stub the Time.now answer to emulate a sleep of 4. Avoid the sleep. So
# speed up in test
Time.stub!(:now).and_return(t + 5.seconds)
Trigger.fire
art.reload
assert art.published
end
it "test_find_published_by_category" do
cat = Factory(:category, :permalink => 'personal')
cat.articles << Factory(:article)
cat.articles << Factory(:article)
cat.articles << Factory(:article)
cat = Factory(:category, :permalink => 'software')
cat.articles << Factory(:article)
Article.create!(:title => "News from the future!",
:body => "The future is cool!",
:keywords => "future",
:published_at => Time.now + 12.minutes)
articles = Category.find_by_permalink('personal').published_articles
assert_equal 3, articles.size
articles = Category.find_by_permalink('software').published_articles
assert_equal 1, articles.size
end
it "test_find_published_by_nonexistent_category_raises_exception" do
assert_raises ActiveRecord::RecordNotFound do
Category.find_by_permalink('does-not-exist').published_articles
end
end
it "test_destroy_file_upload_associations" do
a = Factory(:article)
Factory(:resource, :article => a)
Factory(:resource, :article => a)
assert_equal 2, a.resources.size
a.resources << Factory(:resource)
assert_equal 3, a.resources.size
a.destroy
assert_equal 0, Resource.find(:all, :conditions => "article_id = #{a.id}").size
end
it 'should notify' do
henri = Factory(:user, :login => 'henri', :notify_on_new_articles => true)
alice = Factory(:user, :login => 'alice', :notify_on_new_articles => true)
a = Factory.build(:article)
assert a.save
assert_equal 2, a.notify_users.size
assert_equal ['alice', 'henri'], a.notify_users.collect {|u| u.login }.sort
end
it "test_withdrawal" do
art = Factory(:article)
assert art.published?
assert ! art.withdrawn?
art.withdraw!
assert ! art.published?
assert art.withdrawn?
art.reload
assert ! art.published?
assert art.withdrawn?
end
describe "#default_text_filter" do
it "returns the blog's text filter" do
a = Article.new
assert_equal @blog.text_filter, a.default_text_filter.name
end
end
it 'should get only ham not spam comment' do
article = Factory(:article)
ham_comment = Factory(:comment, :article => article)
spam_comment = Factory(:spam_comment, :article => article)
article.comments.ham.should == [ham_comment]
article.comments.count.should == 2
end
describe '#access_by?' do
before do
@alice = Factory.build(:user, :profile => Factory.build(:profile_admin, :label => Profile::ADMIN))
end
it 'admin should have access to an article written by another' do
Factory.build(:article).should be_access_by(@alice)
end
it 'admin should have access to an article written by himself' do
article = Factory.build(:article, :author => @alice)
article.should be_access_by(@alice)
end
end
describe 'body_and_extended' do
before :each do
@article = Article.new(
:body => 'basic text',
:extended => 'extended text to explain more and more how Typo is wonderful')
end
it 'should combine body and extended content' do
@article.body_and_extended.should ==
"#{@article.body}\n<!--more-->\n#{@article.extended}"
end
it 'should not insert <!--more--> tags if extended is empty' do
@article.extended = ''
@article.body_and_extended.should == @article.body
end
end
describe '#search' do
describe 'with several words and no result' do
# FIXME: This tests nothing, really.
before :each do
@articles = Article.search('hello world')
end
it 'should be empty' do
@articles.should be_empty
end
end
describe 'with one word and result' do
it 'should have two items' do
Factory(:article, :extended => "extended talk")
Factory(:article, :extended => "Once uppon a time, an extended story")
assert_equal 2, Article.search('extended').size
end
end
end
describe 'body_and_extended=' do
before :each do
@article = Article.new
end
it 'should split apart values at <!--more-->' do
@article.body_and_extended = 'foo<!--more-->bar'
@article.body.should == 'foo'
@article.extended.should == 'bar'
end
it 'should remove newlines around <!--more-->' do
@article.body_and_extended = "foo\n<!--more-->\nbar"
@article.body.should == 'foo'
@article.extended.should == 'bar'
end
it 'should make extended empty if no <!--more--> tag' do
@article.body_and_extended = "foo"
@article.body.should == 'foo'
@article.extended.should be_empty
end
it 'should preserve extra <!--more--> tags' do
@article.body_and_extended = "foo<!--more-->bar<!--more-->baz"
@article.body.should == 'foo'
@article.extended.should == 'bar<!--more-->baz'
end
it 'should be settable via self.attributes=' do
@article.attributes = { :body_and_extended => 'foo<!--more-->bar' }
@article.body.should == 'foo'
@article.extended.should == 'bar'
end
end
describe '#comment_url' do
it 'should render complete url of comment' do
article = stub_model(Article, :id => 123)
article.comment_url.should == "http://myblog.net/comments?article_id=#{article.id}"
end
end
describe '#preview_comment_url' do
it 'should render complete url of comment' do
article = stub_model(Article, :id => 123)
article.preview_comment_url.should == "http://myblog.net/comments/preview?article_id=#{article.id}"
end
end
it "test_can_ping_fresh_article_iff_it_allows_pings" do
a = Factory(:article, :allow_pings => true)
assert_equal(false, a.pings_closed?)
a.allow_pings = false
assert_equal(true, a.pings_closed?)
end
it "test_cannot_ping_old_article" do
a = Factory(:article, :allow_pings => false)
assert_equal(true, a.pings_closed?)
a.allow_pings = false
assert_equal(true, a.pings_closed?)
end
describe '#published_at_like' do
before do
# Note: these choices of times depend on no other articles within
# these timeframes existing in test/fixtures/contents.yaml.
# In particular, all articles there are from 2005 or earlier, which
# is now more than two years ago, except for two, which are from
# yesterday and the day before. The existence of those two makes
# 1.month.ago not suitable, because yesterday can be last month.
@article_two_month_ago = Factory(:article, :published_at => 2.month.ago)
@article_four_months_ago = Factory(:article, :published_at => 4.month.ago)
@article_2_four_months_ago = Factory(:article, :published_at => 4.month.ago)
@article_two_year_ago = Factory(:article, :published_at => 2.year.ago)
@article_2_two_year_ago = Factory(:article, :published_at => 2.year.ago)
end
it 'should return all content for the year if only year sent' do
Article.published_at_like(2.year.ago.strftime('%Y')).map(&:id).sort.should == [@article_two_year_ago.id, @article_2_two_year_ago.id].sort
end
it 'should return all content for the month if year and month sent' do
Article.published_at_like(4.month.ago.strftime('%Y-%m')).map(&:id).sort.should == [@article_four_months_ago.id, @article_2_four_months_ago.id].sort
end
it 'should return all content on this date if date send' do
Article.published_at_like(2.month.ago.strftime('%Y-%m-%d')).map(&:id).sort.should == [@article_two_month_ago.id].sort
end
end
describe '#has_child?' do
it 'should be true if article has one to link it by parent_id' do
parent = Factory(:article)
Factory(:article, :parent_id => parent.id)
parent.should be_has_child
end
it 'should be false if article has no article to link it by parent_id' do
parent = Factory(:article)
Factory(:article, :parent_id => nil)
parent.should_not be_has_child
end
end
describe 'self#last_draft(id)' do
it 'should return article if no draft associated' do
draft = Factory(:article, :state => 'draft')
Article.last_draft(draft.id).should == draft
end
it 'should return draft associated to this article if there are one' do
parent = Factory(:article)
draft = Factory(:article, :parent_id => parent.id, :state => 'draft')
Article.last_draft(draft.id).should == draft
end
end
describe "an article published just before midnight UTC" do
before do
@a = Factory.build(:article)
@a.published_at = "21 Feb 2011 23:30 UTC"
end
describe "#permalink_url" do
it "uses UTC to determine correct day" do
@a.permalink_url.should == "http://myblog.net/2011/02/21/a-big-article"
end
end
describe "#find_by_permalink" do
it "uses UTC to determine correct day" do
@a.save
a = Article.find_by_permalink :year => 2011, :month => 2, :day => 21, :permalink => 'a-big-article'
a.should == @a
end
end
end
describe "an article published just after midnight UTC" do
before do
@a = Factory.build(:article)
@a.published_at = "22 Feb 2011 00:30 UTC"
end
describe "#permalink_url" do
it "uses UTC to determine correct day" do
@a.permalink_url.should == "http://myblog.net/2011/02/22/a-big-article"
end
end
describe "#find_by_permalink" do
it "uses UTC to determine correct day" do
@a.save
a = Article.find_by_permalink :year => 2011, :month => 2, :day => 22, :permalink => 'a-big-article'
a.should == @a
end
end
end
describe "#get_or_build" do
context "when no params given" do
before(:each) do
@article = Article.get_or_build_article
end
it "should return article" do
@article.should be_a(Article)
end
context "should have blog default value for" do
it "allow_comments" do
@article.allow_comments.should be == @blog.default_allow_comments
end
it "allow_pings" do
@article.allow_pings.should be == @blog.default_allow_pings
end
it "should have default text filter" do
@article.text_filter.should be == @blog.text_filter_object
end
end
end
context "when id params given" do
it "should return article" do
already_exist_article = Factory.create(:article)
article = Article.get_or_build_article(already_exist_article.id)
article.should be == already_exist_article
end
end
end
end
|
module Rexport #:nodoc:
module ExportItemMethods
def self.included(base)
base.extend ClassMethods
base.class_eval do
include InstanceMethods
acts_as_list scope: :export
belongs_to :export
before_validation :replace_blank_name_with_rexport_field
validates_presence_of :name, :rexport_field
scope :ordered, -> { order :position }
end
end
module ClassMethods
def resort(export_item_ids)
transaction do
export_item_ids.each_with_index do |id, index|
find(id.gsub(/[^0-9]/, '')).update_attribute(:position, index + 1)
end
end
end
end
module InstanceMethods
def attributes_for_copy
attributes.slice('position', 'name', 'rexport_field')
end
private
def replace_blank_name_with_rexport_field
return unless name.blank?
self.name = if rexport_field.include?('.')
rexport_field.split('.').values_at(-2..-1).map {|v| v.titleize}.join(' - ')
else
rexport_field.titleize
end
end
end
end
end
update to use active_support concern
module Rexport #:nodoc:
module ExportItemMethods
extend ActiveSupport::Concern
included do
acts_as_list scope: :export
belongs_to :export
before_validation :replace_blank_name_with_rexport_field
validates_presence_of :name, :rexport_field
scope :ordered, -> { order :position }
end
module ClassMethods
def resort(export_item_ids)
transaction do
export_item_ids.each_with_index do |id, index|
find(id.gsub(/[^0-9]/, '')).update_attribute(:position, index + 1)
end
end
end
end
def attributes_for_copy
attributes.slice('position', 'name', 'rexport_field')
end
private
def replace_blank_name_with_rexport_field
return unless name.blank?
self.name = if rexport_field.include?('.')
rexport_field.split('.').values_at(-2..-1).map {|v| v.titleize}.join(' - ')
else
rexport_field.titleize
end
end
end
end
|
Update podspec
|
require 'rails_helper'
describe Chapter, :type => :model do
before do
@chapter = FactoryGirl.build(:chapter)
end
it 'has valid factory' do
expect(@chapter).to be_valid
end
context 'properties' do
it 'responds to goal' do
expect(@chapter).to respond_to(:goal)
end
it 'responds to title' do
expect(@chapter).to respond_to(:title)
end
it 'responds to description' do
expect(@chapter).to respond_to(:description)
end
it 'responds to created at' do
expect(@chapter).to respond_to(:created_at)
end
it 'responds to completed at' do
expect(@chapter).to respond_to(:completed_at)
end
end
context 'associations' do
it 'belongs to a log' do
expect(@chapter).to respond_to(:log)
end
it 'has many entries'
it 'has many measurements through entries'
end
context 'validations' do
it 'requires the presence of a goal' do
chapter = FactoryGirl.build(:chapter, goal: nil)
expect(chapter).to have(2).error_on(:goal)
end
it 'requires the presence of a title' do
chapter = FactoryGirl.build(:chapter, title: nil)
expect(chapter).to have(1).error_on(:title)
end
it 'requires that goal is a number' do
chapter = FactoryGirl.build(:chapter, goal: "i wana get huuuge")
expect(chapter).to have(1).error_on(:goal)
end
end
end
Add skeleton tests for Chapter spec
require 'rails_helper'
describe Chapter, :type => :model do
before do
@chapter = FactoryGirl.build(:chapter)
end
it 'has valid factory' do
expect(@chapter).to be_valid
end
context 'properties' do
it 'responds to goal' do
expect(@chapter).to respond_to(:goal)
end
it 'responds to title' do
expect(@chapter).to respond_to(:title)
end
it 'responds to description' do
expect(@chapter).to respond_to(:description)
end
it 'responds to created at' do
expect(@chapter).to respond_to(:created_at)
end
it 'responds to completed at' do
expect(@chapter).to respond_to(:completed_at)
end
it 'responds to total calories'
it 'responds to total carbs'
it 'responds to total fats'
it 'responds to total protein'
end
context 'associations' do
it 'belongs to a log' do
expect(@chapter).to respond_to(:log)
end
it 'has many entries'
it 'has many measurements through entries'
end
context 'validations' do
it 'requires the presence of a goal' do
chapter = FactoryGirl.build(:chapter, goal: nil)
expect(chapter).to have(2).error_on(:goal)
end
it 'requires the presence of a title' do
chapter = FactoryGirl.build(:chapter, title: nil)
expect(chapter).to have(1).error_on(:title)
end
it 'requires that goal is a number' do
chapter = FactoryGirl.build(:chapter, goal: "i wana get huuuge")
expect(chapter).to have(1).error_on(:goal)
end
end
context 'methods' do
it 'can calculate the average weekly change in weight' #(total change / days passed [completed_at || Date.today]) * 7
it 'can calculate the total change in weight' #latest measurement - earliest measurement
it 'can calculate the average daily consumption of calories' # total calories / entries.count
it 'can calculate the average daily consumption of carbs' # total carbs / entries.count
it 'can calculate the average daily consumption of fats' # total fats / entries.count
it 'can calculate the average daily consumption of protein' # total protein / entries.count
it 'can estimate the total daily expenditure of energy' # use daily change in weight and daily calorie consumption, +-500/day = 1 lb/week
end
end
|
require 'erb'
module RidgepoleRake
module Brancher
module Configuration
def self.prepended(klass)
klass.class_eval { attr_accessor :brancher }
end
# @note override
def initialize
super
@brancher = { enable: true }.with_indifferent_access
end
end
module Command
private
# @note override
def add_config
stash.push('--config', database_configuration)
end
def database_configuration
if config.brancher[:enable] && (yaml = database_configuration_with_brancher rescue nil)
yaml
else
config.ridgepole.fetch(:config)
end
end
def database_configuration_with_brancher
configurations = YAML.load(ERB.new(File.read(config.ridgepole.fetch(:config))).result)
::Brancher::DatabaseRenameService.rename!(configurations, config.ridgepole.fetch(:env))
yaml = configurations[config.ridgepole.fetch(:env)].to_yaml
yaml.sub(/---\n/, '') if action.eql?(:diff)
yaml
end
# @note override
def add_diff_action
stash.push('--diff', database_configuration, config.ridgepole.fetch(:file))
end
end
end
end
RidgepoleRake::Configuration.__send__(:prepend, RidgepoleRake::Brancher::Configuration)
RidgepoleRake::Command.__send__(:prepend, RidgepoleRake::Brancher::Command)
Tweak
require 'erb'
module RidgepoleRake
module Brancher
module Configuration
def self.prepended(klass)
klass.class_eval { attr_accessor :brancher }
end
# @note override
def initialize
super
@brancher = { enable: true }.with_indifferent_access
end
end
module Command
private
# @note override
def add_config
stash.push('--config', database_configuration)
end
def database_configuration
if config.brancher[:enable] && (yaml = database_configuration_with_brancher rescue nil)
yaml
else
config.ridgepole.fetch(:config)
end
end
def database_configuration_with_brancher
config_yaml = File.read(config.ridgepole.fetch(:config))
configurations = YAML.load(ERB.new(config_yaml).result)
::Brancher::DatabaseRenameService.rename!(configurations, config.ridgepole.fetch(:env))
yaml = configurations[config.ridgepole.fetch(:env)].to_yaml
yaml.sub(/---\n/, '') if action.eql?(:diff)
yaml
end
# @note override
def add_diff_action
stash.push('--diff', database_configuration, config.ridgepole.fetch(:file))
end
end
end
end
RidgepoleRake::Configuration.__send__(:prepend, RidgepoleRake::Brancher::Configuration)
RidgepoleRake::Command.__send__(:prepend, RidgepoleRake::Brancher::Command)
|
# frozen_string_literal: true
require "rails_helper"
require "publify_core/testing_support/dns_mock"
describe Comment, type: :model do
let(:blog) { build_stubbed :blog }
let(:published_article) { build_stubbed(:article, published_at: 1.hour.ago, blog: blog) }
def valid_comment(options = {})
Comment.new({ author: "Bob", article: published_article, body: "nice post",
ip: "1.2.3.4" }.merge(options))
end
describe "#permalink_url" do
let(:comment) { build_stubbed(:comment) }
it "renders permalink to comment in public part" do
expect(comment.permalink_url).
to eq("#{comment.article.permalink_url}#comment-#{comment.id}")
end
end
describe "#save" do
it "saves good comment" do
c = build(:comment, url: "http://www.google.de")
assert c.save
assert_equal "http://www.google.de", c.url
end
it "saves spam comment" do
c = build(:comment, body: 'test <a href="http://fakeurl.com">body</a>')
assert c.save
assert_equal "http://fakeurl.com", c.url
end
it "does not save when article comment window is closed" do
article = build :article, published_at: 1.year.ago
article.blog.sp_article_auto_close = 30
comment = build(:comment, author: "Old Spammer", body: "Old trackback body",
article: article)
expect(comment.save).to be_falsey
expect(comment.errors[:article]).not_to be_empty
end
it "changes old comment" do
c = build(:comment, body: "Comment body <em>italic</em> <strong>bold</strong>")
assert c.save
assert c.errors.empty?
end
it "saves a valid comment" do
c = build :comment
expect(c.save).to be_truthy
expect(c.errors).to be_empty
end
it "does not save with article not allow comment" do
c = build(:comment, article: build_stubbed(:article, allow_comments: false))
expect(c.save).not_to be_truthy
expect(c.errors).not_to be_empty
end
it "generates guid" do
c = build :comment, guid: nil
assert c.save
assert c.guid.size > 15
end
it "preserves urls starting with https://" do
c = build :comment, url: "https://example.com/"
c.save
expect(c.url).to eq("https://example.com/")
end
it "preserves urls starting with http://" do
c = build :comment, url: "http://example.com/"
c.save
expect(c.url).to eq("http://example.com/")
end
it "prepends http:// to urls without protocol" do
c = build :comment, url: "example.com"
c.save
expect(c.url).to eq("http://example.com")
end
end
describe "#classify_content" do
it "rejects spam rbl" do
comment = valid_comment(
author: "Spammer",
body: <<-BODY,
This is just some random text.
<a href="http://chinaaircatering.com">without any senses.</a>.
Please disregard.
BODY
url: "http://buy-computer.us")
comment.classify_content
expect(comment).to be_spammy
expect(comment).not_to be_status_confirmed
end
it "does not define spam a comment rbl with lookup succeeds" do
comment = valid_comment(author: "Not a Spammer", body: "Useful commentary!",
url: "http://www.bofh.org.uk")
comment.classify_content
expect(comment).not_to be_spammy
expect(comment).not_to be_status_confirmed
end
it "rejects spam with uri limit" do
comment =
valid_comment(author: "Yet Another Spammer",
body: <<~HTML,
<a href="http://www.one.com/">one</a>
<a href="http://www.two.com/">two</a>
<a href="http://www.three.com/">three</a>
<a href="http://www.four.com/">four</a>
HTML
url: "http://www.uri-limit.com")
comment.classify_content
expect(comment).to be_spammy
expect(comment).not_to be_status_confirmed
end
describe "with feedback moderation enabled" do
before do
allow(blog).to receive(:sp_global).and_return(false)
allow(blog).to receive(:default_moderate_comments).and_return(true)
end
it "marks comment as presumably spam" do
comment = described_class.new do |c|
c.body = "Test foo"
c.author = "Bob"
c.article = build_stubbed(:article, blog: blog)
end
comment.classify_content
assert !comment.published?
assert comment.presumed_spam?
assert !comment.status_confirmed?
end
it "marks comment from known user as confirmed ham" do
comment = described_class.new do |c|
c.body = "Test foo"
c.author = "Henri"
c.article = build_stubbed(:article, blog: blog)
c.user = build_stubbed(:user)
end
comment.classify_content
assert comment.published?
assert comment.ham?
assert comment.status_confirmed?
end
end
end
it "has good relation" do
article = build_stubbed(:article)
comment = build_stubbed(:comment, article: article)
assert comment.article
assert_equal article, comment.article
end
describe "reject xss" do
let(:comment) do
described_class.new do |c|
c.body = "Test foo <script>do_evil();</script>"
c.author = "Bob"
c.article = build_stubbed(:article, blog: blog)
end
end
["", "textile", "markdown", "smartypants", "markdown smartypants"].each do |filter|
it "rejects with filter '#{filter}'" do
blog.comment_text_filter = filter
ActiveSupport::Deprecation.silence do
assert comment.html(:body) !~ /<script>/
end
end
end
end
describe "change state" do
it "becomes unpublished if withdrawn" do
c = build :comment
assert c.published?
assert c.withdraw!
assert !c.published?
assert c.spam?
assert c.status_confirmed?
end
it "becomeses confirmed if withdrawn" do
unconfirmed = build(:comment, state: "presumed_ham")
expect(unconfirmed).not_to be_status_confirmed
unconfirmed.withdraw!
expect(unconfirmed).to be_status_confirmed
end
end
it "has good default filter" do
create :blog, text_filter: "textile", comment_text_filter: "markdown"
a = create(:comment)
assert_equal "markdown", a.default_text_filter.name
end
describe "spam", integration: true do
let!(:comment) { create(:comment, state: "spam") }
let!(:ham_comment) { create(:comment, state: "ham") }
it "returns only spam comment" do
expect(described_class.spam).to eq([comment])
end
end
describe "not_spam", integration: true do
let!(:comment) { create(:comment, state: "spam") }
let!(:ham_comment) { create(:comment, state: "ham") }
let!(:presumed_spam_comment) { create(:comment, state: "presumed_spam") }
it "returns all comment that not_spam" do
expect(described_class.not_spam).to match_array [ham_comment, presumed_spam_comment]
end
end
describe "presumed_spam", integration: true do
let!(:comment) { create(:comment, state: "spam") }
let!(:ham_comment) { create(:comment, state: "ham") }
let!(:presumed_spam_comment) { create(:comment, state: "presumed_spam") }
it "returns only presumed_spam" do
expect(described_class.presumed_spam).to eq([presumed_spam_comment])
end
end
describe "last_published", integration: true do
let(:date) { DateTime.new(2012, 12, 23, 12, 47).in_time_zone }
let!(:comment_1) { create(:comment, body: "1", created_at: date + 1.day) }
let!(:comment_4) { create(:comment, body: "4", created_at: date + 4.days) }
let!(:comment_2) { create(:comment, body: "2", created_at: date + 2.days) }
let!(:comment_6) { create(:comment, body: "6", created_at: date + 6.days) }
let!(:comment_3) { create(:comment, body: "3", created_at: date + 3.days) }
let!(:comment_5) { create(:comment, body: "5", created_at: date + 5.days) }
it "respond only 5 last_published" do
expect(described_class.last_published).to eq([comment_6, comment_5, comment_4,
comment_3, comment_2])
end
end
describe "#html" do
it "renders email addresses in the body" do
comment = build_stubbed(:comment, body: "foo@example.com")
expect(comment.html).to match(/mailto:/)
end
it "returns an html_safe string" do
comment = build_stubbed(:comment, body: "Just a comment")
expect(comment.html).to be_html_safe
end
context "with an evil comment" do
let(:comment) { build_stubbed :comment, body: "Test foo <script>do_evil();</script>" }
let(:blog) { comment.article.blog }
["", "textile", "markdown", "smartypants", "markdown smartypants"].each do |filter|
it "rejects xss attempt with filter '#{filter}'" do
blog.comment_text_filter = filter
ActiveSupport::Deprecation.silence do
assert comment.html(:body) !~ /<script>/
end
end
end
end
context "with a markdown comment with italic and bold" do
let(:comment) { build(:comment, body: "Comment body _italic_ **bold**") }
let(:blog) { comment.article.blog }
it "converts the comment markup to html" do
blog.comment_text_filter = "markdown"
result = comment.html
aggregate_failures do
expect(result).to match(%r{<em>italic</em>})
expect(result).to match(%r{<strong>bold</strong>})
end
end
end
end
end
Unify specs for disallowing script tags in comments
# frozen_string_literal: true
require "rails_helper"
require "publify_core/testing_support/dns_mock"
describe Comment, type: :model do
let(:blog) { build_stubbed :blog }
let(:published_article) { build_stubbed(:article, published_at: 1.hour.ago, blog: blog) }
def valid_comment(options = {})
Comment.new({ author: "Bob", article: published_article, body: "nice post",
ip: "1.2.3.4" }.merge(options))
end
describe "#permalink_url" do
let(:comment) { build_stubbed(:comment) }
it "renders permalink to comment in public part" do
expect(comment.permalink_url).
to eq("#{comment.article.permalink_url}#comment-#{comment.id}")
end
end
describe "#save" do
it "saves good comment" do
c = build(:comment, url: "http://www.google.de")
assert c.save
assert_equal "http://www.google.de", c.url
end
it "saves spam comment" do
c = build(:comment, body: 'test <a href="http://fakeurl.com">body</a>')
assert c.save
assert_equal "http://fakeurl.com", c.url
end
it "does not save when article comment window is closed" do
article = build :article, published_at: 1.year.ago
article.blog.sp_article_auto_close = 30
comment = build(:comment, author: "Old Spammer", body: "Old trackback body",
article: article)
expect(comment.save).to be_falsey
expect(comment.errors[:article]).not_to be_empty
end
it "changes old comment" do
c = build(:comment, body: "Comment body <em>italic</em> <strong>bold</strong>")
assert c.save
assert c.errors.empty?
end
it "saves a valid comment" do
c = build :comment
expect(c.save).to be_truthy
expect(c.errors).to be_empty
end
it "does not save with article not allow comment" do
c = build(:comment, article: build_stubbed(:article, allow_comments: false))
expect(c.save).not_to be_truthy
expect(c.errors).not_to be_empty
end
it "generates guid" do
c = build :comment, guid: nil
assert c.save
assert c.guid.size > 15
end
it "preserves urls starting with https://" do
c = build :comment, url: "https://example.com/"
c.save
expect(c.url).to eq("https://example.com/")
end
it "preserves urls starting with http://" do
c = build :comment, url: "http://example.com/"
c.save
expect(c.url).to eq("http://example.com/")
end
it "prepends http:// to urls without protocol" do
c = build :comment, url: "example.com"
c.save
expect(c.url).to eq("http://example.com")
end
end
describe "#classify_content" do
it "rejects spam rbl" do
comment = valid_comment(
author: "Spammer",
body: <<-BODY,
This is just some random text.
<a href="http://chinaaircatering.com">without any senses.</a>.
Please disregard.
BODY
url: "http://buy-computer.us")
comment.classify_content
expect(comment).to be_spammy
expect(comment).not_to be_status_confirmed
end
it "does not define spam a comment rbl with lookup succeeds" do
comment = valid_comment(author: "Not a Spammer", body: "Useful commentary!",
url: "http://www.bofh.org.uk")
comment.classify_content
expect(comment).not_to be_spammy
expect(comment).not_to be_status_confirmed
end
it "rejects spam with uri limit" do
comment =
valid_comment(author: "Yet Another Spammer",
body: <<~HTML,
<a href="http://www.one.com/">one</a>
<a href="http://www.two.com/">two</a>
<a href="http://www.three.com/">three</a>
<a href="http://www.four.com/">four</a>
HTML
url: "http://www.uri-limit.com")
comment.classify_content
expect(comment).to be_spammy
expect(comment).not_to be_status_confirmed
end
describe "with feedback moderation enabled" do
before do
allow(blog).to receive(:sp_global).and_return(false)
allow(blog).to receive(:default_moderate_comments).and_return(true)
end
it "marks comment as presumably spam" do
comment = described_class.new do |c|
c.body = "Test foo"
c.author = "Bob"
c.article = build_stubbed(:article, blog: blog)
end
comment.classify_content
assert !comment.published?
assert comment.presumed_spam?
assert !comment.status_confirmed?
end
it "marks comment from known user as confirmed ham" do
comment = described_class.new do |c|
c.body = "Test foo"
c.author = "Henri"
c.article = build_stubbed(:article, blog: blog)
c.user = build_stubbed(:user)
end
comment.classify_content
assert comment.published?
assert comment.ham?
assert comment.status_confirmed?
end
end
end
it "has good relation" do
article = build_stubbed(:article)
comment = build_stubbed(:comment, article: article)
assert comment.article
assert_equal article, comment.article
end
describe "change state" do
it "becomes unpublished if withdrawn" do
c = build :comment
assert c.published?
assert c.withdraw!
assert !c.published?
assert c.spam?
assert c.status_confirmed?
end
it "becomeses confirmed if withdrawn" do
unconfirmed = build(:comment, state: "presumed_ham")
expect(unconfirmed).not_to be_status_confirmed
unconfirmed.withdraw!
expect(unconfirmed).to be_status_confirmed
end
end
it "has good default filter" do
create :blog, text_filter: "textile", comment_text_filter: "markdown"
a = create(:comment)
assert_equal "markdown", a.default_text_filter.name
end
describe "spam", integration: true do
let!(:comment) { create(:comment, state: "spam") }
let!(:ham_comment) { create(:comment, state: "ham") }
it "returns only spam comment" do
expect(described_class.spam).to eq([comment])
end
end
describe "not_spam", integration: true do
let!(:comment) { create(:comment, state: "spam") }
let!(:ham_comment) { create(:comment, state: "ham") }
let!(:presumed_spam_comment) { create(:comment, state: "presumed_spam") }
it "returns all comment that not_spam" do
expect(described_class.not_spam).to match_array [ham_comment, presumed_spam_comment]
end
end
describe "presumed_spam", integration: true do
let!(:comment) { create(:comment, state: "spam") }
let!(:ham_comment) { create(:comment, state: "ham") }
let!(:presumed_spam_comment) { create(:comment, state: "presumed_spam") }
it "returns only presumed_spam" do
expect(described_class.presumed_spam).to eq([presumed_spam_comment])
end
end
describe "last_published", integration: true do
let(:date) { DateTime.new(2012, 12, 23, 12, 47).in_time_zone }
let!(:comment_1) { create(:comment, body: "1", created_at: date + 1.day) }
let!(:comment_4) { create(:comment, body: "4", created_at: date + 4.days) }
let!(:comment_2) { create(:comment, body: "2", created_at: date + 2.days) }
let!(:comment_6) { create(:comment, body: "6", created_at: date + 6.days) }
let!(:comment_3) { create(:comment, body: "3", created_at: date + 3.days) }
let!(:comment_5) { create(:comment, body: "5", created_at: date + 5.days) }
it "respond only 5 last_published" do
expect(described_class.last_published).to eq([comment_6, comment_5, comment_4,
comment_3, comment_2])
end
end
describe "#html" do
it "renders email addresses in the body" do
comment = build_stubbed(:comment, body: "foo@example.com")
expect(comment.html).to match(/mailto:/)
end
it "returns an html_safe string" do
comment = build_stubbed(:comment, body: "Just a comment")
expect(comment.html).to be_html_safe
end
context "with an attempted xss body" do
let(:comment) do
described_class.new do |c|
c.body = "Test foo <script>do_evil();</script>"
c.author = "Bob"
c.article = build_stubbed(:article, blog: blog)
end
end
["", "textile", "markdown", "smartypants", "markdown smartypants"].each do |filter|
it "rejects with filter '#{filter}'" do
blog.comment_text_filter = filter
ActiveSupport::Deprecation.silence do
assert comment.html(:body) !~ /<script>/
end
end
end
end
context "with a markdown comment with italic and bold" do
let(:comment) { build(:comment, body: "Comment body _italic_ **bold**") }
let(:blog) { comment.article.blog }
it "converts the comment markup to html" do
blog.comment_text_filter = "markdown"
result = comment.html
aggregate_failures do
expect(result).to match(%r{<em>italic</em>})
expect(result).to match(%r{<strong>bold</strong>})
end
end
end
end
end
|
require 'pathname'
# Load this library before enabling the monkey-patches to avoid HI-581
begin
require 'hiera/util/win32'
rescue LoadError
# ignore this on installs without hiera, e.g. puppet 3 gems
end
class RSpec::Puppet::EventListener
def self.example_started(example)
if rspec3?
@rspec_puppet_example = example.example.example_group.ancestors.include?(RSpec::Puppet::Support)
@current_example = example.example
if !@current_example.respond_to?(:environment) && @current_example.respond_to?(:example_group_instance)
@current_example = @current_example.example_group_instance
end
else
@rspec_puppet_example = example.example_group.ancestors.include?(RSpec::Puppet::Support)
@current_example = example
end
end
def self.example_passed(example)
@rspec_puppet_example = false
end
def self.example_pending(example)
@rspec_puppet_example = false
end
def self.example_failed(example)
@rspec_puppet_example = false
end
def self.rspec_puppet_example?
@rspec_puppet_example || false
end
def self.rspec3?
if @rspec3.nil?
@rspec3 = defined?(RSpec::Core::Notifications)
end
@rspec3
end
def self.current_example
@current_example
end
end
RSpec.configuration.reporter.register_listener(RSpec::Puppet::EventListener, :example_started, :example_pending, :example_passed, :example_failed)
require 'rspec-puppet/monkey_patches/win32/taskscheduler'
require 'rspec-puppet/monkey_patches/win32/registry'
require 'rspec-puppet/monkey_patches/windows/taskschedulerconstants'
module Puppet
# Allow rspec-puppet to prevent Puppet::Type from automatically picking
# a provider for a resource. We need to do this because in order to fully
# resolve the graph edges, we have to convert the Puppet::Resource objects
# into Puppet::Type objects so that their autorequires are evaluated. We need
# to prevent provider code from being called during this process as it's very
# platform specific.
class Type
old_set_default = instance_method(:set_default)
define_method(:set_default) do |attr|
if RSpec::Puppet.rspec_puppet_example?
old_posix = nil
old_microsoft_windows = nil
if attr == :provider
old_posix = Puppet.features.posix?
old_microsoft_windows = Puppet.features.microsoft_windows?
if Puppet::Util::Platform.pretend_windows?
Puppet.features.add(:posix) { false }
Puppet.features.add(:microsoft_windows) { true }
else
Puppet.features.add(:posix) { true }
Puppet.features.add(:microsoft_windows) { false }
end
end
retval = old_set_default.bind(self).call(attr)
unless old_posix.nil?
Puppet.features.add(:posix) { old_posix }
end
unless old_microsoft_windows.nil?
Puppet.features.add(:microsoft_windows) { old_microsoft_windows }
end
retval
else
old_set_default.bind(self).call(attr)
end
end
end
module Parser::Files
alias :old_find_manifests_in_modules :find_manifests_in_modules
module_function :old_find_manifests_in_modules
def find_manifests_in_modules(pattern, environment)
if RSpec::Puppet.rspec_puppet_example?
pretending = Puppet::Util::Platform.pretend_platform
unless pretending.nil?
Puppet::Util::Platform.pretend_to_be nil
RSpec::Puppet::Consts.stub_consts_for(RSpec.configuration.platform)
end
if pretending && pretending != Puppet::Util::Platform.actual_platform
environment.send(:value_cache).clear if environment.respond_to?(:value_cache, true)
end
output = old_find_manifests_in_modules(pattern, environment)
unless pretending.nil?
Puppet::Util::Platform.pretend_to_be pretending
RSpec::Puppet::Consts.stub_consts_for pretending
end
output
else
old_find_manifests_in_modules(pattern, environment)
end
end
module_function :find_manifests_in_modules
end
module Util
if respond_to?(:get_env)
alias :old_get_env :get_env
module_function :old_get_env
def get_env(name, mode = default_env)
if RSpec::Puppet.rspec_puppet_example?
# use the actual platform, not the pretended
old_get_env(name, Platform.actual_platform)
else
old_get_env(name, mode)
end
end
module_function :get_env
end
if respond_to?(:path_to_uri)
alias :old_path_to_uri :path_to_uri
module_function :old_path_to_uri
def path_to_uri(*args)
if RSpec::Puppet.rspec_puppet_example?
RSpec::Puppet::Consts.without_stubs do
old_path_to_uri(*args)
end
else
old_path_to_uri(*args)
end
end
module_function :path_to_uri
end
# Allow rspec-puppet to pretend to be different platforms.
module Platform
alias :old_windows? :windows?
module_function :old_windows?
def windows?
if RSpec::Puppet.rspec_puppet_example?
!pretending? ? (actual_platform == :windows) : pretend_windows?
else
old_windows?
end
end
module_function :windows?
def actual_platform
@actual_platform ||= !!File::ALT_SEPARATOR ? :windows : :posix
end
module_function :actual_platform
def actually_windows?
actual_platform == :windows
end
module_function :actually_windows?
def pretend_windows?
pretend_platform == :windows
end
module_function :pretend_windows?
def pretend_to_be(platform)
# Ensure that we cache the real platform before pretending to be
# a different one
actual_platform
@pretend_platform = platform
end
module_function :pretend_to_be
def pretend_platform
@pretend_platform ||= nil
end
module_function :pretend_platform
def pretending?
!pretend_platform.nil?
end
module_function :pretending?
end
class Autoload
if respond_to?(:load_file)
singleton_class.send(:alias_method, :old_load_file, :load_file)
def self.load_file(*args)
if RSpec::Puppet.rspec_puppet_example?
RSpec::Puppet::Consts.without_stubs do
old_load_file(*args)
end
else
old_load_file(*args)
end
end
end
end
end
begin
require 'puppet/confine/exists'
class Confine::Exists < Puppet::Confine
old_pass = instance_method(:pass?)
define_method(:pass?) do |value|
if RSpec::Puppet.rspec_puppet_example?
true
else
old_pass.bind(self).call(value)
end
end
end
rescue LoadError
require 'puppet/provider/confine/exists'
class Provider::Confine::Exists < Puppet::Provider::Confine
old_pass = instance_method(:pass?)
define_method(:pass?) do |value|
if RSpec::Puppet.rspec_puppet_example?
true
else
old_pass.bind(self).call(value)
end
end
end
end
end
class Pathname
def rspec_puppet_basename(path)
raise ArgumentError, 'pathname stubbing not enabled' unless RSpec.configuration.enable_pathname_stubbing
if path =~ /\A[a-zA-Z]:(#{SEPARATOR_PAT}.*)\z/
path = path[2..-1]
end
path.split(SEPARATOR_PAT).last || path[/(#{SEPARATOR_PAT})/, 1] || path
end
if instance_methods.include?("chop_basename")
old_chop_basename = instance_method(:chop_basename)
define_method(:chop_basename) do |path|
if RSpec::Puppet.rspec_puppet_example?
if RSpec.configuration.enable_pathname_stubbing
base = rspec_puppet_basename(path)
if /\A#{SEPARATOR_PAT}?\z/o =~ base
return nil
else
return path[0, path.rindex(base)], base
end
else
old_chop_basename.bind(self).call(path)
end
else
old_chop_basename.bind(self).call(path)
end
end
end
end
# Puppet loads init.pp, then foo.pp, to find class "mod::foo". If
# class "mod" has been mocked using pre_condition when testing
# "mod::foo", this causes duplicate declaration for "mod".
# This monkey patch only loads "init.pp" if "foo.pp" does not exist.
class Puppet::Module
if instance_methods.include?(:match_manifests)
old_match_manifests = instance_method(:match_manifests)
define_method(:match_manifests) do |rest|
result = old_match_manifests.bind(self).call(rest)
if result.length > 1 && result[0] =~ %r{/init.pp$}
result.shift
end
result
end
end
end
# Prevent the File type from munging paths (which uses File.expand_path to
# normalise paths, which does very bad things to *nix paths on Windows.
file_path_munge = Puppet::Type.type(:file).paramclass(:path).instance_method(:unsafe_munge)
Puppet::Type.type(:file).paramclass(:path).munge do |value|
if RSpec::Puppet.rspec_puppet_example?
value
else
file_path_munge.bind(self).call(value)
end
end
# Prevent the Exec type from validating the user. This parameter isn't
# supported under Windows at all and only under *nix when the current user is
# root.
exec_user_validate = Puppet::Type.type(:exec).paramclass(:user).instance_method(:unsafe_validate)
Puppet::Type.type(:exec).paramclass(:user).validate do |value|
if RSpec::Puppet.rspec_puppet_example?
true
else
exec_user_validate.bind(self).call(value)
end
end
# Stub out Puppet::Util::Windows::Security.supports_acl? if it has been
# defined. This check only makes sense when applying the catalogue to a host
# and so can be safely stubbed out for unit testing.
Puppet::Type.type(:file).provide(:windows).class_eval do
old_supports_acl = instance_method(:supports_acl?) if respond_to?(:supports_acl?)
def supports_acl?(path)
if RSpec::Puppet.rspec_puppet_example?
true
else
old_supports_acl.bind(self).call(value)
end
end
old_manages_symlinks = instance_method(:manages_symlinks?) if respond_to?(:manages_symlinks?)
def manages_symlinks?
if RSpec::Puppet.rspec_puppet_example?
true
else
old_manages_symlinks.bind(self).call(value)
end
end
end
# Prevent Puppet from requiring 'puppet/util/windows' if we're pretending to be
# windows, otherwise it will require other libraries that probably won't be
# available on non-windows hosts.
module Kernel
alias :old_require :require
def require(path)
return if (['puppet/util/windows', 'win32/registry'].include?(path)) && RSpec::Puppet.rspec_puppet_example? && Puppet::Util::Platform.pretend_windows?
old_require(path)
end
end
Fix for Ruby < 2
require 'pathname'
# Load this library before enabling the monkey-patches to avoid HI-581
begin
require 'hiera/util/win32'
rescue LoadError
# ignore this on installs without hiera, e.g. puppet 3 gems
end
class RSpec::Puppet::EventListener
def self.example_started(example)
if rspec3?
@rspec_puppet_example = example.example.example_group.ancestors.include?(RSpec::Puppet::Support)
@current_example = example.example
if !@current_example.respond_to?(:environment) && @current_example.respond_to?(:example_group_instance)
@current_example = @current_example.example_group_instance
end
else
@rspec_puppet_example = example.example_group.ancestors.include?(RSpec::Puppet::Support)
@current_example = example
end
end
def self.example_passed(example)
@rspec_puppet_example = false
end
def self.example_pending(example)
@rspec_puppet_example = false
end
def self.example_failed(example)
@rspec_puppet_example = false
end
def self.rspec_puppet_example?
@rspec_puppet_example || false
end
def self.rspec3?
if @rspec3.nil?
@rspec3 = defined?(RSpec::Core::Notifications)
end
@rspec3
end
def self.current_example
@current_example
end
end
RSpec.configuration.reporter.register_listener(RSpec::Puppet::EventListener, :example_started, :example_pending, :example_passed, :example_failed)
require 'rspec-puppet/monkey_patches/win32/taskscheduler'
require 'rspec-puppet/monkey_patches/win32/registry'
require 'rspec-puppet/monkey_patches/windows/taskschedulerconstants'
module Puppet
# Allow rspec-puppet to prevent Puppet::Type from automatically picking
# a provider for a resource. We need to do this because in order to fully
# resolve the graph edges, we have to convert the Puppet::Resource objects
# into Puppet::Type objects so that their autorequires are evaluated. We need
# to prevent provider code from being called during this process as it's very
# platform specific.
class Type
old_set_default = instance_method(:set_default)
define_method(:set_default) do |attr|
if RSpec::Puppet.rspec_puppet_example?
old_posix = nil
old_microsoft_windows = nil
if attr == :provider
old_posix = Puppet.features.posix?
old_microsoft_windows = Puppet.features.microsoft_windows?
if Puppet::Util::Platform.pretend_windows?
Puppet.features.add(:posix) { false }
Puppet.features.add(:microsoft_windows) { true }
else
Puppet.features.add(:posix) { true }
Puppet.features.add(:microsoft_windows) { false }
end
end
retval = old_set_default.bind(self).call(attr)
unless old_posix.nil?
Puppet.features.add(:posix) { old_posix }
end
unless old_microsoft_windows.nil?
Puppet.features.add(:microsoft_windows) { old_microsoft_windows }
end
retval
else
old_set_default.bind(self).call(attr)
end
end
end
module Parser::Files
alias :old_find_manifests_in_modules :find_manifests_in_modules
module_function :old_find_manifests_in_modules
def find_manifests_in_modules(pattern, environment)
if RSpec::Puppet.rspec_puppet_example?
pretending = Puppet::Util::Platform.pretend_platform
unless pretending.nil?
Puppet::Util::Platform.pretend_to_be nil
RSpec::Puppet::Consts.stub_consts_for(RSpec.configuration.platform)
end
if pretending && pretending != Puppet::Util::Platform.actual_platform
environment.send(:value_cache).clear if environment.respond_to?(:value_cache, true)
end
output = old_find_manifests_in_modules(pattern, environment)
unless pretending.nil?
Puppet::Util::Platform.pretend_to_be pretending
RSpec::Puppet::Consts.stub_consts_for pretending
end
output
else
old_find_manifests_in_modules(pattern, environment)
end
end
module_function :find_manifests_in_modules
end
module Util
if respond_to?(:get_env)
alias :old_get_env :get_env
module_function :old_get_env
def get_env(name, mode = default_env)
if RSpec::Puppet.rspec_puppet_example?
# use the actual platform, not the pretended
old_get_env(name, Platform.actual_platform)
else
old_get_env(name, mode)
end
end
module_function :get_env
end
if respond_to?(:path_to_uri)
alias :old_path_to_uri :path_to_uri
module_function :old_path_to_uri
def path_to_uri(*args)
if RSpec::Puppet.rspec_puppet_example?
RSpec::Puppet::Consts.without_stubs do
old_path_to_uri(*args)
end
else
old_path_to_uri(*args)
end
end
module_function :path_to_uri
end
# Allow rspec-puppet to pretend to be different platforms.
module Platform
alias :old_windows? :windows?
module_function :old_windows?
def windows?
if RSpec::Puppet.rspec_puppet_example?
!pretending? ? (actual_platform == :windows) : pretend_windows?
else
old_windows?
end
end
module_function :windows?
def actual_platform
@actual_platform ||= !!File::ALT_SEPARATOR ? :windows : :posix
end
module_function :actual_platform
def actually_windows?
actual_platform == :windows
end
module_function :actually_windows?
def pretend_windows?
pretend_platform == :windows
end
module_function :pretend_windows?
def pretend_to_be(platform)
# Ensure that we cache the real platform before pretending to be
# a different one
actual_platform
@pretend_platform = platform
end
module_function :pretend_to_be
def pretend_platform
@pretend_platform ||= nil
end
module_function :pretend_platform
def pretending?
!pretend_platform.nil?
end
module_function :pretending?
end
class Autoload
if respond_to?(:load_file)
singleton_class.send(:alias_method, :old_load_file, :load_file)
def self.load_file(*args)
if RSpec::Puppet.rspec_puppet_example?
RSpec::Puppet::Consts.without_stubs do
old_load_file(*args)
end
else
old_load_file(*args)
end
end
end
end
end
begin
require 'puppet/confine/exists'
class Confine::Exists < Puppet::Confine
old_pass = instance_method(:pass?)
define_method(:pass?) do |value|
if RSpec::Puppet.rspec_puppet_example?
true
else
old_pass.bind(self).call(value)
end
end
end
rescue LoadError
require 'puppet/provider/confine/exists'
class Provider::Confine::Exists < Puppet::Provider::Confine
old_pass = instance_method(:pass?)
define_method(:pass?) do |value|
if RSpec::Puppet.rspec_puppet_example?
true
else
old_pass.bind(self).call(value)
end
end
end
end
end
class Pathname
def rspec_puppet_basename(path)
raise ArgumentError, 'pathname stubbing not enabled' unless RSpec.configuration.enable_pathname_stubbing
if path =~ /\A[a-zA-Z]:(#{SEPARATOR_PAT}.*)\z/
path = path[2..-1]
end
path.split(SEPARATOR_PAT).last || path[/(#{SEPARATOR_PAT})/, 1] || path
end
if instance_methods.include?("chop_basename")
old_chop_basename = instance_method(:chop_basename)
define_method(:chop_basename) do |path|
if RSpec::Puppet.rspec_puppet_example?
if RSpec.configuration.enable_pathname_stubbing
base = rspec_puppet_basename(path)
if /\A#{SEPARATOR_PAT}?\z/o =~ base
return nil
else
return path[0, path.rindex(base)], base
end
else
old_chop_basename.bind(self).call(path)
end
else
old_chop_basename.bind(self).call(path)
end
end
end
end
# Puppet loads init.pp, then foo.pp, to find class "mod::foo". If
# class "mod" has been mocked using pre_condition when testing
# "mod::foo", this causes duplicate declaration for "mod".
# This monkey patch only loads "init.pp" if "foo.pp" does not exist.
class Puppet::Module
if [:match_manifests, 'match_manifests'].any? { |r| instance_methods.include?(r) }
old_match_manifests = instance_method(:match_manifests)
define_method(:match_manifests) do |rest|
result = old_match_manifests.bind(self).call(rest)
if result.length > 1 && File.basename(result[0]) == 'init.pp'
result.shift
end
result
end
end
end
# Prevent the File type from munging paths (which uses File.expand_path to
# normalise paths, which does very bad things to *nix paths on Windows.
file_path_munge = Puppet::Type.type(:file).paramclass(:path).instance_method(:unsafe_munge)
Puppet::Type.type(:file).paramclass(:path).munge do |value|
if RSpec::Puppet.rspec_puppet_example?
value
else
file_path_munge.bind(self).call(value)
end
end
# Prevent the Exec type from validating the user. This parameter isn't
# supported under Windows at all and only under *nix when the current user is
# root.
exec_user_validate = Puppet::Type.type(:exec).paramclass(:user).instance_method(:unsafe_validate)
Puppet::Type.type(:exec).paramclass(:user).validate do |value|
if RSpec::Puppet.rspec_puppet_example?
true
else
exec_user_validate.bind(self).call(value)
end
end
# Stub out Puppet::Util::Windows::Security.supports_acl? if it has been
# defined. This check only makes sense when applying the catalogue to a host
# and so can be safely stubbed out for unit testing.
Puppet::Type.type(:file).provide(:windows).class_eval do
old_supports_acl = instance_method(:supports_acl?) if respond_to?(:supports_acl?)
def supports_acl?(path)
if RSpec::Puppet.rspec_puppet_example?
true
else
old_supports_acl.bind(self).call(value)
end
end
old_manages_symlinks = instance_method(:manages_symlinks?) if respond_to?(:manages_symlinks?)
def manages_symlinks?
if RSpec::Puppet.rspec_puppet_example?
true
else
old_manages_symlinks.bind(self).call(value)
end
end
end
# Prevent Puppet from requiring 'puppet/util/windows' if we're pretending to be
# windows, otherwise it will require other libraries that probably won't be
# available on non-windows hosts.
module Kernel
alias :old_require :require
def require(path)
return if (['puppet/util/windows', 'win32/registry'].include?(path)) && RSpec::Puppet.rspec_puppet_example? && Puppet::Util::Platform.pretend_windows?
old_require(path)
end
end
|
require 'spec_helper'
describe Contact do
it "is valid with a firstname, lastname and email" do
contact = Contact.new(
firstname: 'Aaron',
lastname: 'Sumner',
email: 'tester@exemple.com')
expect(contact).to be_valid
end
it "is invalid without a firstname" do
expect(Contact.new(firstname: nil)).to have(1).errors_on(:firstname)
end
it "is invalid without a lastname" do
expect(Contact.new(lastname: nil)).to have(1).errors_on(:lastname)
end
it "is invalid without an email address"
it "is invalid with a duplicate email address" do
Contact.create(
firstname: 'Joe', lastname: 'Tester',
email: 'tester@example.com')
contact = Contact.create(
firstname: 'Jane', lastname: 'Tester',
email: 'tester@example.com')
expect(contact).to have(1).errors_on(:email)
end
it "returns a contact's full name as a string" do
contact = Contact.new(firstname: 'John', lastname: 'Doe',
email: 'johndoe@example.com')
expect(contact.name).to eq 'John Doe'
end
end
add test case
require 'spec_helper'
describe Contact do
it "is valid with a firstname, lastname and email" do
contact = Contact.new(
firstname: 'Aaron',
lastname: 'Sumner',
email: 'tester@exemple.com')
expect(contact).to be_valid
end
it "is invalid without a firstname" do
expect(Contact.new(firstname: nil)).to have(1).errors_on(:firstname)
end
it "is invalid without a lastname" do
expect(Contact.new(lastname: nil)).to have(1).errors_on(:lastname)
end
it "is invalid without an email address" do
expect(Contact.new(email: nil)).to have(1).errors_on(:email)
end
it "is invalid with a duplicate email address" do
Contact.create(
firstname: 'Joe', lastname: 'Tester',
email: 'tester@example.com')
contact = Contact.create(
firstname: 'Jane', lastname: 'Tester',
email: 'tester@example.com')
expect(contact).to have(1).errors_on(:email)
end
it "returns a contact's full name as a string" do
contact = Contact.new(firstname: 'John', lastname: 'Doe',
email: 'johndoe@example.com')
expect(contact.name).to eq 'John Doe'
end
end |
module RSpec
module Parameterized
VERSION = "0.0.4"
end
end
Version 0.0.5
module RSpec
module Parameterized
VERSION = "0.0.5"
end
end
|
require 'spec_helper'
describe Dataset do
let(:schema) { schemas(:default) }
let(:other_schema) { schemas(:other_schema) }
let(:dataset) { datasets(:default_table) }
it_behaves_like "a notable model" do
let!(:note) do
Events::NoteOnDataset.create!({
:actor => users(:owner),
:dataset => model,
:body => "This is the body"
}, :as => :create)
end
let!(:model) { dataset }
end
it_should_behave_like "taggable models", [:datasets, :table]
describe "associations" do
it { should belong_to(:scoped_schema).class_name('Schema') }
it { should have_many :activities }
it { should have_many :events }
it { should have_many :notes }
it { should have_many :comments }
describe "#schema" do
it "returns the schema even if it is deleted" do
any_instance_of(GreenplumConnection) do |data_source|
stub(data_source).running? { false }
end
dataset.schema.should == schema
schema.destroy
dataset.reload.schema.should == schema
end
end
describe 'associable?' do
let(:dataset) { Dataset.new }
it 'raises NotImplemented' do
expect { dataset.associable? }.to raise_error(NotImplementedError)
end
end
end
describe "workspace association" do
let(:workspace) { workspaces(:public) }
let(:dataset) { datasets(:source_table) }
before do
dataset.bound_workspaces = []
dataset.bound_workspaces << workspace
end
it "can be bound to workspaces" do
dataset.bound_workspaces.should include workspace
end
end
describe "validations" do
it { should validate_presence_of :scoped_schema }
it { should validate_presence_of :name }
it "validates uniqueness of name in the database" do
duplicate_dataset = GpdbTable.new
duplicate_dataset.schema = dataset.schema
duplicate_dataset.name = dataset.name
expect {
duplicate_dataset.save!(:validate => false)
}.to raise_error(ActiveRecord::RecordNotUnique)
end
it "does not bother validating uniqueness of name in the database if the record is deleted" do
duplicate_dataset = GpdbTable.new
duplicate_dataset.schema = dataset.schema
duplicate_dataset.name = dataset.name
duplicate_dataset.deleted_at = Time.current
duplicate_dataset.save(:validate => false).should be_true
end
it "validates uniqueness of name, scoped to schema id" do
duplicate_dataset = GpdbTable.new
duplicate_dataset.schema = dataset.schema
duplicate_dataset.name = dataset.name
duplicate_dataset.should have_at_least(1).error_on(:name)
duplicate_dataset.schema = other_schema
duplicate_dataset.should have(:no).errors_on(:name)
end
it "validates uniqueness of name, scoped to type" do
duplicate_dataset = ChorusView.new
duplicate_dataset.name = dataset.name
duplicate_dataset.schema = dataset.schema
duplicate_dataset.should have(:no).errors_on(:name)
end
it "validate uniqueness of name, scoped to deleted_at" do
duplicate_dataset = GpdbTable.new
duplicate_dataset.name = dataset.name
duplicate_dataset.schema = dataset.schema
duplicate_dataset.should have_at_least(1).error_on(:name)
duplicate_dataset.deleted_at = Time.current
duplicate_dataset.should have(:no).errors_on(:name)
end
end
describe ".with_name_like" do
it "matches anywhere in the name, regardless of case" do
dataset.update_attributes!({:name => "amatCHingtable"}, :without_protection => true)
Dataset.with_name_like("match").count.should == 1
Dataset.with_name_like("MATCH").count.should == 1
end
it "returns all objects if name is not provided" do
Dataset.with_name_like(nil).count.should == Dataset.count
end
it "does not treat special characters as wildcards" do
dataset.update_attributes!({:name => "amat_%ingtable"}, :without_protection => true)
Dataset.with_name_like("t_%i").count.should == 1
#Dataset.with_name_like("_m").count.should == 0
end
it "is a joinable query" do
# Regression test in case query provides ambiguous column references
workspace = workspaces(:public)
expect {
workspace.datasets(users(:owner), {
:name_filter => 'match',
:database_id => workspace.sandbox.database
})
}.not_to raise_error
end
end
describe ".filter_by_name" do
let(:second_dataset) {
GpdbTable.new({:name => 'rails_only_table', :scoped_schema => schema}, :without_protection => true)
}
let(:dataset_list) {
[dataset, second_dataset]
}
it "matches anywhere in the name, regardless of case" do
dataset.update_attributes!({:name => "amatCHingtable"}, :without_protection => true)
Dataset.filter_by_name(dataset_list, "match").count.should == 1
Dataset.filter_by_name(dataset_list, "MATCH").count.should == 1
end
it "returns all objects if name is not provided" do
Dataset.filter_by_name(dataset_list, nil).count.should == dataset_list.count
end
end
describe "#all_rows_sql" do
it "returns the correct sql" do
dataset = datasets(:default_table)
dataset.all_rows_sql.strip.should == %Q{SELECT * FROM "#{dataset.name}"}
end
context "with a limit" do
it "uses the limit" do
dataset = datasets(:default_table)
dataset.all_rows_sql(10).should match "LIMIT 10"
end
end
end
describe ".find_and_verify_in_source" do
let(:user) { users(:owner) }
let(:dataset) { datasets(:default_table) }
before do
stub(Dataset).find(dataset.id) { dataset }
end
context 'when it exists in the source database' do
before do
mock(dataset).verify_in_source(user) { true }
end
it 'returns the dataset' do
described_class.find_and_verify_in_source(dataset.id, user).should == dataset
end
end
context 'when it does not exist in Greenplum' do
before do
mock(dataset).verify_in_source(user) { false }
end
it 'raises ActiveRecord::RecordNotFound' do
expect {
described_class.find_and_verify_in_source(dataset.id, user)
}.to raise_error(ActiveRecord::RecordNotFound)
end
end
end
describe "caching" do
before do
mock(Rails.cache).fetch({:model_class => "GpdbTable", :model_id => dataset.id, :key => cache_key}, :expires_in => 60.seconds)
end
describe "#column_data" do
let(:cache_key) { :column_data }
it "uses caching" do
dataset.column_data
end
end
describe "#table_description" do
let(:cache_key) { :table_description }
it "uses caching" do
dataset.table_description
end
end
end
describe "destroy" do
context "with imports" do
let(:dataset) { datasets(:default_table) }
it "cancels the import" do
unfinished_imports = dataset.imports.unfinished
stub(dataset.imports).unfinished { unfinished_imports }
unfinished_imports.should_not be_empty
unfinished_imports.each do |import|
mock(import).cancel(false, "Source/Destination of this import was deleted")
end
dataset.destroy
end
end
end
describe "list_order" do
# postgres loves to order by update order, it's like that
it "sorts by id as a secondary sort, and not by update order" do
Dataset.order("id desc").all.each_with_index do |dataset, i|
Dataset.where(:id => dataset.id).update_all(:name => 'billy', :schema_id => i)
end
ids = Dataset.list_order.collect &:id
ids.should == ids.sort
User.create
end
end
it_should_behave_like "taggable models", [:datasets, :default_table]
it_behaves_like 'a soft deletable model' do
let(:model) { dataset }
end
end
changed table name from :table to :default_table
require 'spec_helper'
describe Dataset do
let(:schema) { schemas(:default) }
let(:other_schema) { schemas(:other_schema) }
let(:dataset) { datasets(:default_table) }
it_behaves_like "a notable model" do
let!(:note) do
Events::NoteOnDataset.create!({
:actor => users(:owner),
:dataset => model,
:body => "This is the body"
}, :as => :create)
end
let!(:model) { dataset }
end
it_should_behave_like "taggable models", [:datasets, :default_table]
describe "associations" do
it { should belong_to(:scoped_schema).class_name('Schema') }
it { should have_many :activities }
it { should have_many :events }
it { should have_many :notes }
it { should have_many :comments }
describe "#schema" do
it "returns the schema even if it is deleted" do
any_instance_of(GreenplumConnection) do |data_source|
stub(data_source).running? { false }
end
dataset.schema.should == schema
schema.destroy
dataset.reload.schema.should == schema
end
end
describe 'associable?' do
let(:dataset) { Dataset.new }
it 'raises NotImplemented' do
expect { dataset.associable? }.to raise_error(NotImplementedError)
end
end
end
describe "workspace association" do
let(:workspace) { workspaces(:public) }
let(:dataset) { datasets(:source_table) }
before do
dataset.bound_workspaces = []
dataset.bound_workspaces << workspace
end
it "can be bound to workspaces" do
dataset.bound_workspaces.should include workspace
end
end
describe "validations" do
it { should validate_presence_of :scoped_schema }
it { should validate_presence_of :name }
it "validates uniqueness of name in the database" do
duplicate_dataset = GpdbTable.new
duplicate_dataset.schema = dataset.schema
duplicate_dataset.name = dataset.name
expect {
duplicate_dataset.save!(:validate => false)
}.to raise_error(ActiveRecord::RecordNotUnique)
end
it "does not bother validating uniqueness of name in the database if the record is deleted" do
duplicate_dataset = GpdbTable.new
duplicate_dataset.schema = dataset.schema
duplicate_dataset.name = dataset.name
duplicate_dataset.deleted_at = Time.current
duplicate_dataset.save(:validate => false).should be_true
end
it "validates uniqueness of name, scoped to schema id" do
duplicate_dataset = GpdbTable.new
duplicate_dataset.schema = dataset.schema
duplicate_dataset.name = dataset.name
duplicate_dataset.should have_at_least(1).error_on(:name)
duplicate_dataset.schema = other_schema
duplicate_dataset.should have(:no).errors_on(:name)
end
it "validates uniqueness of name, scoped to type" do
duplicate_dataset = ChorusView.new
duplicate_dataset.name = dataset.name
duplicate_dataset.schema = dataset.schema
duplicate_dataset.should have(:no).errors_on(:name)
end
it "validate uniqueness of name, scoped to deleted_at" do
duplicate_dataset = GpdbTable.new
duplicate_dataset.name = dataset.name
duplicate_dataset.schema = dataset.schema
duplicate_dataset.should have_at_least(1).error_on(:name)
duplicate_dataset.deleted_at = Time.current
duplicate_dataset.should have(:no).errors_on(:name)
end
end
describe ".with_name_like" do
it "matches anywhere in the name, regardless of case" do
dataset.update_attributes!({:name => "amatCHingtable"}, :without_protection => true)
Dataset.with_name_like("match").count.should == 1
Dataset.with_name_like("MATCH").count.should == 1
end
it "returns all objects if name is not provided" do
Dataset.with_name_like(nil).count.should == Dataset.count
end
it "does not treat special characters as wildcards" do
dataset.update_attributes!({:name => "amat_%ingtable"}, :without_protection => true)
Dataset.with_name_like("t_%i").count.should == 1
#Dataset.with_name_like("_m").count.should == 0
end
it "is a joinable query" do
# Regression test in case query provides ambiguous column references
workspace = workspaces(:public)
expect {
workspace.datasets(users(:owner), {
:name_filter => 'match',
:database_id => workspace.sandbox.database
})
}.not_to raise_error
end
end
describe ".filter_by_name" do
let(:second_dataset) {
GpdbTable.new({:name => 'rails_only_table', :scoped_schema => schema}, :without_protection => true)
}
let(:dataset_list) {
[dataset, second_dataset]
}
it "matches anywhere in the name, regardless of case" do
dataset.update_attributes!({:name => "amatCHingtable"}, :without_protection => true)
Dataset.filter_by_name(dataset_list, "match").count.should == 1
Dataset.filter_by_name(dataset_list, "MATCH").count.should == 1
end
it "returns all objects if name is not provided" do
Dataset.filter_by_name(dataset_list, nil).count.should == dataset_list.count
end
end
describe "#all_rows_sql" do
it "returns the correct sql" do
dataset = datasets(:default_table)
dataset.all_rows_sql.strip.should == %Q{SELECT * FROM "#{dataset.name}"}
end
context "with a limit" do
it "uses the limit" do
dataset = datasets(:default_table)
dataset.all_rows_sql(10).should match "LIMIT 10"
end
end
end
describe ".find_and_verify_in_source" do
let(:user) { users(:owner) }
let(:dataset) { datasets(:default_table) }
before do
stub(Dataset).find(dataset.id) { dataset }
end
context 'when it exists in the source database' do
before do
mock(dataset).verify_in_source(user) { true }
end
it 'returns the dataset' do
described_class.find_and_verify_in_source(dataset.id, user).should == dataset
end
end
context 'when it does not exist in Greenplum' do
before do
mock(dataset).verify_in_source(user) { false }
end
it 'raises ActiveRecord::RecordNotFound' do
expect {
described_class.find_and_verify_in_source(dataset.id, user)
}.to raise_error(ActiveRecord::RecordNotFound)
end
end
end
describe "caching" do
before do
mock(Rails.cache).fetch({:model_class => "GpdbTable", :model_id => dataset.id, :key => cache_key}, :expires_in => 60.seconds)
end
describe "#column_data" do
let(:cache_key) { :column_data }
it "uses caching" do
dataset.column_data
end
end
describe "#table_description" do
let(:cache_key) { :table_description }
it "uses caching" do
dataset.table_description
end
end
end
describe "destroy" do
context "with imports" do
let(:dataset) { datasets(:default_table) }
it "cancels the import" do
unfinished_imports = dataset.imports.unfinished
stub(dataset.imports).unfinished { unfinished_imports }
unfinished_imports.should_not be_empty
unfinished_imports.each do |import|
mock(import).cancel(false, "Source/Destination of this import was deleted")
end
dataset.destroy
end
end
end
describe "list_order" do
# postgres loves to order by update order, it's like that
it "sorts by id as a secondary sort, and not by update order" do
Dataset.order("id desc").all.each_with_index do |dataset, i|
Dataset.where(:id => dataset.id).update_all(:name => 'billy', :schema_id => i)
end
ids = Dataset.list_order.collect &:id
ids.should == ids.sort
User.create
end
end
it_should_behave_like "taggable models", [:datasets, :default_table]
it_behaves_like 'a soft deletable model' do
let(:model) { dataset }
end
end |
require 'rspec/core/formatters/base_formatter'
require 'rspectacles/config'
require 'ostruct'
require 'redis'
require 'uri'
module RSpectacles
class RedisFormatter < RSpec::Core::Formatters::BaseFormatter
attr_accessor :redis
class << self
def config
RSpectacles.config
end
end
def initialize(output)
uri = URI.parse config.redis_uri
self.redis = Redis.new host: uri.host, port: uri.port, password: uri.password
end
def message(message)
log "message:#{message}"
end
def start(example_count)
log 'status:start'
redis.del config.last_run_primary_key
end
def stop
log 'status:stop'
end
def example_started(example)
end
def example_passed(example)
log_formatted example
end
def example_pending(example)
log_formatted example
end
def example_failed(example)
log_formatted example
end
def close
end
private
def config
self.class.config
end
def log(message)
redis.publish config.pubsub_channel_name, message
redis.lpush config.last_run_primary_key, message
end
def log_formatted(example)
message = format_example(example)
redis.publish config.pubsub_channel_name, message
redis.lpush config.last_run_primary_key, message
end
def format_example(example)
{
:description => example.description,
:full_description => example.full_description,
:status => example.execution_result[:status],
:duration => example.execution_result[:run_time],
:file_path => example.metadata[:file_path],
:line_number => example.metadata[:line_number]
}.to_json
end
end
end
require json for to_json conversion
require 'rspec/core/formatters/base_formatter'
require 'rspectacles/config'
require 'ostruct'
require 'redis'
require 'uri'
require 'json'
module RSpectacles
class RedisFormatter < RSpec::Core::Formatters::BaseFormatter
attr_accessor :redis
class << self
def config
RSpectacles.config
end
end
def initialize(output)
uri = URI.parse config.redis_uri
self.redis = Redis.new host: uri.host, port: uri.port, password: uri.password
end
def message(message)
log "message:#{message}"
end
def start(example_count)
log 'status:start'
redis.del config.last_run_primary_key
end
def stop
log 'status:stop'
end
def example_started(example)
end
def example_passed(example)
log_formatted example
end
def example_pending(example)
log_formatted example
end
def example_failed(example)
log_formatted example
end
def close
end
private
def config
self.class.config
end
def log(message)
redis.publish config.pubsub_channel_name, message
redis.lpush config.last_run_primary_key, message
end
def log_formatted(example)
message = format_example(example)
redis.publish config.pubsub_channel_name, message
redis.lpush config.last_run_primary_key, message
end
def format_example(example)
{
:description => example.description,
:full_description => example.full_description,
:status => example.execution_result[:status],
:duration => example.execution_result[:run_time],
:file_path => example.metadata[:file_path],
:line_number => example.metadata[:line_number]
}.to_json
end
end
end
|
require 'spec_helper'
describe Service, models: true do
describe "Associations" do
it { is_expected.to belong_to :project }
it { is_expected.to have_one :service_hook }
end
describe 'Validations' do
it { is_expected.to validate_presence_of(:type) }
end
describe "Test Button" do
describe '#can_test?' do
let(:service) { create(:service, project: project) }
context 'when repository is not empty' do
let(:project) { create(:project, :repository) }
it 'returns true' do
expect(service.can_test?).to be true
end
end
context 'when repository is empty' do
let(:project) { create(:empty_project) }
it 'returns true' do
expect(service.can_test?).to be true
end
end
end
describe '#test' do
let(:data) { 'test' }
let(:service) { create(:service, project: project) }
context 'when repository is not empty' do
let(:project) { create(:project, :repository) }
it 'test runs execute' do
expect(service).to receive(:execute).with(data)
service.test(data)
end
end
context 'when repository is empty' do
let(:project) { create(:empty_project) }
it 'test runs execute' do
expect(service).to receive(:execute).with(data)
service.test(data)
end
end
end
end
describe "Template" do
describe "for pushover service" do
let!(:service_template) do
PushoverService.create(
template: true,
properties: {
device: 'MyDevice',
sound: 'mic',
priority: 4,
api_key: '123456789'
})
end
let(:project) { create(:empty_project) }
describe 'is prefilled for projects pushover service' do
it "has all fields prefilled" do
service = project.find_or_initialize_service('pushover')
expect(service.template).to eq(false)
expect(service.device).to eq('MyDevice')
expect(service.sound).to eq('mic')
expect(service.priority).to eq(4)
expect(service.api_key).to eq('123456789')
end
end
end
end
describe "{property}_changed?" do
let(:service) do
BambooService.create(
project: create(:empty_project),
properties: {
bamboo_url: 'http://gitlab.com',
username: 'mic',
password: "password"
}
)
end
it "returns false when the property has not been assigned a new value" do
service.username = "key_changed"
expect(service.bamboo_url_changed?).to be_falsy
end
it "returns true when the property has been assigned a different value" do
service.bamboo_url = "http://example.com"
expect(service.bamboo_url_changed?).to be_truthy
end
it "returns true when the property has been assigned a different value twice" do
service.bamboo_url = "http://example.com"
service.bamboo_url = "http://example.com"
expect(service.bamboo_url_changed?).to be_truthy
end
it "returns false when the property has been re-assigned the same value" do
service.bamboo_url = 'http://gitlab.com'
expect(service.bamboo_url_changed?).to be_falsy
end
it "returns false when the property has been assigned a new value then saved" do
service.bamboo_url = 'http://example.com'
service.save
expect(service.bamboo_url_changed?).to be_falsy
end
end
describe "{property}_touched?" do
let(:service) do
BambooService.create(
project: create(:empty_project),
properties: {
bamboo_url: 'http://gitlab.com',
username: 'mic',
password: "password"
}
)
end
it "returns false when the property has not been assigned a new value" do
service.username = "key_changed"
expect(service.bamboo_url_touched?).to be_falsy
end
it "returns true when the property has been assigned a different value" do
service.bamboo_url = "http://example.com"
expect(service.bamboo_url_touched?).to be_truthy
end
it "returns true when the property has been assigned a different value twice" do
service.bamboo_url = "http://example.com"
service.bamboo_url = "http://example.com"
expect(service.bamboo_url_touched?).to be_truthy
end
it "returns true when the property has been re-assigned the same value" do
service.bamboo_url = 'http://gitlab.com'
expect(service.bamboo_url_touched?).to be_truthy
end
it "returns false when the property has been assigned a new value then saved" do
service.bamboo_url = 'http://example.com'
service.save
expect(service.bamboo_url_changed?).to be_falsy
end
end
describe "{property}_was" do
let(:service) do
BambooService.create(
project: create(:empty_project),
properties: {
bamboo_url: 'http://gitlab.com',
username: 'mic',
password: "password"
}
)
end
it "returns nil when the property has not been assigned a new value" do
service.username = "key_changed"
expect(service.bamboo_url_was).to be_nil
end
it "returns the previous value when the property has been assigned a different value" do
service.bamboo_url = "http://example.com"
expect(service.bamboo_url_was).to eq('http://gitlab.com')
end
it "returns initial value when the property has been re-assigned the same value" do
service.bamboo_url = 'http://gitlab.com'
expect(service.bamboo_url_was).to eq('http://gitlab.com')
end
it "returns initial value when the property has been assigned multiple values" do
service.bamboo_url = "http://example.com"
service.bamboo_url = "http://example2.com"
expect(service.bamboo_url_was).to eq('http://gitlab.com')
end
it "returns nil when the property has been assigned a new value then saved" do
service.bamboo_url = 'http://example.com'
service.save
expect(service.bamboo_url_was).to be_nil
end
end
describe 'initialize service with no properties' do
let(:service) do
GitlabIssueTrackerService.create(
project: create(:empty_project),
title: 'random title'
)
end
it 'does not raise error' do
expect { service }.not_to raise_error
end
it 'creates the properties' do
expect(service.properties).to eq({ "title" => "random title" })
end
end
describe "callbacks" do
let(:project) { create(:empty_project) }
let!(:service) do
RedmineService.new(
project: project,
active: true,
properties: {
project_url: 'http://redmine/projects/project_name_in_redmine',
issues_url: "http://redmine/#{project.id}/project_name_in_redmine/:id",
new_issue_url: 'http://redmine/projects/project_name_in_redmine/issues/new'
}
)
end
describe "on create" do
it "updates the has_external_issue_tracker boolean" do
expect do
service.save!
end.to change { service.project.has_external_issue_tracker }.from(false).to(true)
end
end
describe "on update" do
it "updates the has_external_issue_tracker boolean" do
service.save!
expect do
service.update_attributes(active: false)
end.to change { service.project.has_external_issue_tracker }.from(true).to(false)
end
end
end
end
add service spec
require 'spec_helper'
describe Service, models: true do
describe "Associations" do
it { is_expected.to belong_to :project }
it { is_expected.to have_one :service_hook }
end
describe 'Validations' do
it { is_expected.to validate_presence_of(:type) }
end
describe "Test Button" do
describe '#can_test?' do
let(:service) { create(:service, project: project) }
context 'when repository is not empty' do
let(:project) { create(:project, :repository) }
it 'returns true' do
expect(service.can_test?).to be true
end
end
context 'when repository is empty' do
let(:project) { create(:empty_project) }
it 'returns true' do
expect(service.can_test?).to be true
end
end
end
describe '#test' do
let(:data) { 'test' }
let(:service) { create(:service, project: project) }
context 'when repository is not empty' do
let(:project) { create(:project, :repository) }
it 'test runs execute' do
expect(service).to receive(:execute).with(data)
service.test(data)
end
end
context 'when repository is empty' do
let(:project) { create(:empty_project) }
it 'test runs execute' do
expect(service).to receive(:execute).with(data)
service.test(data)
end
end
end
end
describe "Template" do
describe "for pushover service" do
let!(:service_template) do
PushoverService.create(
template: true,
properties: {
device: 'MyDevice',
sound: 'mic',
priority: 4,
api_key: '123456789'
})
end
let(:project) { create(:empty_project) }
describe 'is prefilled for projects pushover service' do
it "has all fields prefilled" do
service = project.find_or_initialize_service('pushover')
expect(service.template).to eq(false)
expect(service.device).to eq('MyDevice')
expect(service.sound).to eq('mic')
expect(service.priority).to eq(4)
expect(service.api_key).to eq('123456789')
end
end
end
end
describe "{property}_changed?" do
let(:service) do
BambooService.create(
project: create(:empty_project),
properties: {
bamboo_url: 'http://gitlab.com',
username: 'mic',
password: "password"
}
)
end
it "returns false when the property has not been assigned a new value" do
service.username = "key_changed"
expect(service.bamboo_url_changed?).to be_falsy
end
it "returns true when the property has been assigned a different value" do
service.bamboo_url = "http://example.com"
expect(service.bamboo_url_changed?).to be_truthy
end
it "returns true when the property has been assigned a different value twice" do
service.bamboo_url = "http://example.com"
service.bamboo_url = "http://example.com"
expect(service.bamboo_url_changed?).to be_truthy
end
it "returns false when the property has been re-assigned the same value" do
service.bamboo_url = 'http://gitlab.com'
expect(service.bamboo_url_changed?).to be_falsy
end
it "returns false when the property has been assigned a new value then saved" do
service.bamboo_url = 'http://example.com'
service.save
expect(service.bamboo_url_changed?).to be_falsy
end
end
describe "{property}_touched?" do
let(:service) do
BambooService.create(
project: create(:empty_project),
properties: {
bamboo_url: 'http://gitlab.com',
username: 'mic',
password: "password"
}
)
end
it "returns false when the property has not been assigned a new value" do
service.username = "key_changed"
expect(service.bamboo_url_touched?).to be_falsy
end
it "returns true when the property has been assigned a different value" do
service.bamboo_url = "http://example.com"
expect(service.bamboo_url_touched?).to be_truthy
end
it "returns true when the property has been assigned a different value twice" do
service.bamboo_url = "http://example.com"
service.bamboo_url = "http://example.com"
expect(service.bamboo_url_touched?).to be_truthy
end
it "returns true when the property has been re-assigned the same value" do
service.bamboo_url = 'http://gitlab.com'
expect(service.bamboo_url_touched?).to be_truthy
end
it "returns false when the property has been assigned a new value then saved" do
service.bamboo_url = 'http://example.com'
service.save
expect(service.bamboo_url_changed?).to be_falsy
end
end
describe "{property}_was" do
let(:service) do
BambooService.create(
project: create(:empty_project),
properties: {
bamboo_url: 'http://gitlab.com',
username: 'mic',
password: "password"
}
)
end
it "returns nil when the property has not been assigned a new value" do
service.username = "key_changed"
expect(service.bamboo_url_was).to be_nil
end
it "returns the previous value when the property has been assigned a different value" do
service.bamboo_url = "http://example.com"
expect(service.bamboo_url_was).to eq('http://gitlab.com')
end
it "returns initial value when the property has been re-assigned the same value" do
service.bamboo_url = 'http://gitlab.com'
expect(service.bamboo_url_was).to eq('http://gitlab.com')
end
it "returns initial value when the property has been assigned multiple values" do
service.bamboo_url = "http://example.com"
service.bamboo_url = "http://example2.com"
expect(service.bamboo_url_was).to eq('http://gitlab.com')
end
it "returns nil when the property has been assigned a new value then saved" do
service.bamboo_url = 'http://example.com'
service.save
expect(service.bamboo_url_was).to be_nil
end
end
describe 'initialize service with no properties' do
let(:service) do
GitlabIssueTrackerService.create(
project: create(:empty_project),
title: 'random title'
)
end
it 'does not raise error' do
expect { service }.not_to raise_error
end
it 'creates the properties' do
expect(service.properties).to eq({ "title" => "random title" })
end
end
describe "callbacks" do
let(:project) { create(:empty_project) }
let!(:service) do
RedmineService.new(
project: project,
active: true,
properties: {
project_url: 'http://redmine/projects/project_name_in_redmine',
issues_url: "http://redmine/#{project.id}/project_name_in_redmine/:id",
new_issue_url: 'http://redmine/projects/project_name_in_redmine/issues/new'
}
)
end
describe "on create" do
it "updates the has_external_issue_tracker boolean" do
expect do
service.save!
end.to change { service.project.has_external_issue_tracker }.from(false).to(true)
end
end
describe "on update" do
it "updates the has_external_issue_tracker boolean" do
service.save!
expect do
service.update_attributes(active: false)
end.to change { service.project.has_external_issue_tracker }.from(true).to(false)
end
end
end
describe "#update_and_propagate" do
let!(:service) do
RedmineService.new(
project: project,
active: false,
properties: {
project_url: 'http://redmine/projects/project_name_in_redmine',
issues_url: "http://redmine/#{project.id}/project_name_in_redmine/:id",
new_issue_url: 'http://redmine/projects/project_name_in_redmine/issues/new'
}
)
end
it 'updates the service params successfully and calls the propagation worker' do
expect(PropagateProjectServiceWorker).to receve(:perform_async)
expect(service.update_and_propagate(active: true)).to be true
end
it 'updates the service params successfully' do
expect(PropagateProjectServiceWorker).not_to receve(:perform_asyncs)
expect(service.update_and_propagate(properties: {})).to be true
end
end
end
|
# frozen_string_literal: true
# rubocop:disable Layout/LineLength
# == Schema Information
#
# Table name: settings
#
# id :uuid not null, primary key
# default_outgoing_sms_adapter :string(255)
# frontlinecloud_api_key :string(255)
# generic_sms_config :jsonb
# incoming_sms_numbers :text
# incoming_sms_token :string(255)
# override_code :string(255)
# preferred_locales :string(255) not null
# theme :string default("nemo"), not null
# timezone :string(255) not null
# twilio_account_sid :string(255)
# twilio_auth_token :string(255)
# twilio_phone_number :string(255)
# created_at :datetime not null
# updated_at :datetime not null
# mission_id :uuid
#
# Indexes
#
# index_settings_on_mission_id (mission_id) UNIQUE
#
# Foreign Keys
#
# settings_mission_id_fkey (mission_id => missions.id) ON DELETE => restrict ON UPDATE => restrict
#
# rubocop:enable Layout/LineLength
require "rails_helper"
describe Setting do
let(:setting) { get_mission.setting }
it "serialized locales are always symbols" do
expect(setting.preferred_locales.first.class).to eq(Symbol)
setting.update!(preferred_locales_str: "fr,ar")
expect(setting.preferred_locales.first.class).to eq(Symbol)
end
it "locales with spaces should still be accepted" do
setting.update!(preferred_locales_str: "fr , ar1")
expect(setting.preferred_locales).to eq(%i[fr ar])
end
it "generate override code will generate a new six character code" do
previous_code = setting.override_code
setting.generate_override_code!
expect(previous_code).not_to eq(setting.override_code)
expect(setting.override_code.size).to eq(6)
end
describe "load_for_mission" do
shared_examples_for "load_for_mission" do
context "when there are no existing settings" do
before do
Setting.load_for_mission(mission).destroy
end
it "should create one with default values" do
setting = Setting.load_for_mission(mission)
expect(setting.new_record?).to be_falsey
expect(setting.mission).to eq(mission)
expect(setting.timezone).to eq(Setting::DEFAULT_TIMEZONE)
end
end
context "when a setting exists" do
before { Setting.load_for_mission(mission).update!(preferred_locales: [:fr]) }
it "should load it" do
setting = Setting.load_for_mission(mission)
expect(setting.preferred_locales).to eq([:fr])
end
end
end
context "for null mission" do
let(:mission) { nil }
it_should_behave_like "load_for_mission"
it "should not have an incoming_sms_token", :sms do
setting = Setting.load_for_mission(mission)
expect(setting.incoming_sms_token).to be_nil
end
end
context "for mission" do
let(:mission) { get_mission }
it_should_behave_like "load_for_mission"
it "should have an incoming_sms_token", :sms do
setting = Setting.load_for_mission(mission)
expect(setting.incoming_sms_token).to match(/\A[0-9a-f]{32}\z/)
end
it "should have the same incoming_sms_token after reloading", :sms do
setting = Setting.load_for_mission(mission)
token = setting.incoming_sms_token
setting.reload
expect(setting.incoming_sms_token).to eq(token)
end
it "should have a different incoming_sms_token after calling regenerate_incoming_sms_token!", :sms do
setting = Setting.load_for_mission(mission)
token = setting.incoming_sms_token
setting.regenerate_incoming_sms_token!
expect(setting.incoming_sms_token).not_to eq(token)
end
it "should normalize the twilio_phone_number on save", :sms do
setting = Setting.load_for_mission(mission)
setting.twilio_phone_number = "+1 770 555 1212"
setting.twilio_account_sid = "AC0000000"
setting.twilio_auth_token = "ABCDefgh1234"
setting.save!
expect(setting.twilio_phone_number).to eq("+17705551212")
end
end
describe ".build_default" do
let(:mission) { get_mission }
context "with existing admin mode setting" do
let!(:admin_setting) { Setting.load_for_mission(nil).update_attribute(:theme, "elmo") }
it "copies theme setting from admin mode setting" do
expect(Setting.build_default(mission).theme).to eq("elmo")
end
end
context "without existing admin mode setting" do
it "defaults to nemo" do
expect(Setting.build_default(mission).theme).to eq("nemo")
end
end
end
describe "validation" do
describe "generic_sms_config_str" do
it "should error if invalid json" do
setting = build(:setting,
mission_id: get_mission.id,
generic_sms_config_str: "{")
expect(setting).to be_invalid
expect(setting.errors[:generic_sms_config_str].join).to match(/JSON error:/)
end
it "should error if invalid keys" do
setting = build(:setting,
mission_id: get_mission.id,
generic_sms_config_str: '{"params":{"from":"x", "body":"y"}, "response":"x", "foo":"y"}')
expect(setting).to be_invalid
expect(setting.errors[:generic_sms_config_str].join).to match(/Valid keys are params/)
end
it "should error if missing top-level key" do
setting = build(:setting,
mission: get_mission,
generic_sms_config_str: '{"params":{"from":"x", "body":"y"}}')
expect(setting).to be_invalid
expect(setting.errors[:generic_sms_config_str].join).to match(/Configuration must include/)
end
it "should error if missing second-level key" do
setting = build(:setting,
mission: get_mission,
generic_sms_config_str: '{"params":{"from":"x"}, "response":"x"}')
expect(setting).to be_invalid
expect(setting.errors[:generic_sms_config_str].join).to match(/Configuration must include/)
end
it "should not error if required keys present" do
setting = build(:setting,
mission: get_mission,
generic_sms_config_str: '{"params":{"from":"x", "body":"y"}, "response":"x"}')
expect(setting).to be_valid
end
end
end
end
end
10398: Fix setting spec
# frozen_string_literal: true
# rubocop:disable Layout/LineLength
# == Schema Information
#
# Table name: settings
#
# id :uuid not null, primary key
# default_outgoing_sms_adapter :string(255)
# frontlinecloud_api_key :string(255)
# generic_sms_config :jsonb
# incoming_sms_numbers :text
# incoming_sms_token :string(255)
# override_code :string(255)
# preferred_locales :string(255) not null
# theme :string default("nemo"), not null
# timezone :string(255) not null
# twilio_account_sid :string(255)
# twilio_auth_token :string(255)
# twilio_phone_number :string(255)
# created_at :datetime not null
# updated_at :datetime not null
# mission_id :uuid
#
# Indexes
#
# index_settings_on_mission_id (mission_id) UNIQUE
#
# Foreign Keys
#
# settings_mission_id_fkey (mission_id => missions.id) ON DELETE => restrict ON UPDATE => restrict
#
# rubocop:enable Layout/LineLength
require "rails_helper"
describe Setting do
let(:setting) { get_mission.setting }
it "serialized locales are always symbols" do
expect(setting.preferred_locales.first.class).to eq(Symbol)
setting.update!(preferred_locales_str: "fr,ar")
expect(setting.preferred_locales.first.class).to eq(Symbol)
end
it "locales with spaces should still be accepted" do
setting.update!(preferred_locales_str: "fr , ar1")
expect(setting.preferred_locales).to eq(%i[fr ar])
end
it "generate override code will generate a new six character code" do
previous_code = setting.override_code
setting.generate_override_code!
expect(previous_code).not_to eq(setting.override_code)
expect(setting.override_code.size).to eq(6)
end
describe "load_for_mission" do
context "for root setting" do
context "when there is no existing root setting (should never happen)" do
before do
Setting.root.destroy
end
it "should throw error" do
expect { Setting.load_for_mission(nil) }.to raise_error(ActiveRecord::RecordNotFound)
end
end
context "when there is a root setting" do
it "should not have an incoming_sms_token", :sms do
setting = Setting.load_for_mission(nil)
expect(setting.incoming_sms_token).to be_nil
end
end
end
context "for mission" do
let(:mission) { get_mission }
context "when there is no existing setting for the mission" do
before do
setting.destroy
end
it "should throw error" do
expect { Setting.load_for_mission(mission) }.to raise_error(ActiveRecord::RecordNotFound)
end
end
context "when there is a setting for the mission" do
it "should have an incoming_sms_token", :sms do
setting = Setting.load_for_mission(mission)
expect(setting.incoming_sms_token).to match(/\A[0-9a-f]{32}\z/)
end
it "should have the same incoming_sms_token after reloading", :sms do
setting = Setting.load_for_mission(mission)
token = setting.incoming_sms_token
setting.reload
expect(setting.incoming_sms_token).to eq(token)
end
it "should have a different incoming_sms_token after calling regenerate_incoming_sms_token!", :sms do
setting = Setting.load_for_mission(mission)
token = setting.incoming_sms_token
setting.regenerate_incoming_sms_token!
expect(setting.incoming_sms_token).not_to eq(token)
end
it "should normalize the twilio_phone_number on save", :sms do
setting = Setting.load_for_mission(mission)
setting.twilio_phone_number = "+1 770 555 1212"
setting.twilio_account_sid = "AC0000000"
setting.twilio_auth_token = "ABCDefgh1234"
setting.save!
expect(setting.twilio_phone_number).to eq("+17705551212")
end
end
end
describe ".build_default" do
let(:mission) { get_mission }
context "with existing admin mode setting" do
let!(:admin_setting) { Setting.load_for_mission(nil).update_attribute(:theme, "elmo") }
it "copies theme setting from admin mode setting" do
expect(Setting.build_default(mission).theme).to eq("elmo")
end
end
context "without existing admin mode setting" do
it "defaults to nemo" do
expect(Setting.build_default(mission).theme).to eq("nemo")
end
end
end
describe "validation" do
describe "generic_sms_config_str" do
it "should error if invalid json" do
setting = build(:setting,
mission_id: get_mission.id,
generic_sms_config_str: "{")
expect(setting).to be_invalid
expect(setting.errors[:generic_sms_config_str].join).to match(/JSON error:/)
end
it "should error if invalid keys" do
setting = build(:setting,
mission_id: get_mission.id,
generic_sms_config_str: '{"params":{"from":"x", "body":"y"}, "response":"x", "foo":"y"}')
expect(setting).to be_invalid
expect(setting.errors[:generic_sms_config_str].join).to match(/Valid keys are params/)
end
it "should error if missing top-level key" do
setting = build(:setting,
mission: get_mission,
generic_sms_config_str: '{"params":{"from":"x", "body":"y"}}')
expect(setting).to be_invalid
expect(setting.errors[:generic_sms_config_str].join).to match(/Configuration must include/)
end
it "should error if missing second-level key" do
setting = build(:setting,
mission: get_mission,
generic_sms_config_str: '{"params":{"from":"x"}, "response":"x"}')
expect(setting).to be_invalid
expect(setting.errors[:generic_sms_config_str].join).to match(/Configuration must include/)
end
it "should not error if required keys present" do
setting = build(:setting,
mission: get_mission,
generic_sms_config_str: '{"params":{"from":"x", "body":"y"}, "response":"x"}')
expect(setting).to be_valid
end
end
end
end
end
|
class EnginesCore
require "/opt/engines/lib/ruby/system/SystemUtils.rb"
require "/opt/engines/lib/ruby/system/DNSHosting.rb"
require_relative 'DockerApi.rb'
require_relative 'SystemApi.rb'
require_relative 'SystemPreferences.rb'
def initialize
@docker_api = DockerApi.new
@system_api = SystemApi.new(self) #will change to to docker_api and not self
@system_preferences = SystemPreferences.new
@last_error = String.new
end
attr_reader :last_error
def software_service_definition(params)
sm = loadServiceManager
return sm.software_service_definition(params)
end
#@return an [Array] of service_hashes regsitered against the Service params[:publisher_namespace] params[:type_path]
def get_registered_against_service(params)
sm = loadServiceManager
return sm.get_registered_against_service(params)
end
def add_domain(params)
return @system_api.add_domain(params)
end
#
# def remove_containers_cron_list(container_name)
# p :remove_containers_cron
# if @system_api.remove_containers_cron_list(container_name)
# cron_service = loadManagedService("cron")
# return @system_api.rebuild_crontab(cron_service)
# else
# return false
# end
# end
#
# def rebuild_crontab(cron_service)
# #acutally a rebuild (or resave) as hadh already removed from consumer list
# p :rebuild_crontab
# return @system_api.rebuild_crontab(cron_service)
# end
def remove_domain(params)
return @system_api.rm_domain(params[:domain_name],@system_api)
end
def update_domain(old_domain,params)
return @system_api.update_domain(old_domain,params,@system_api)
end
def signal_service_process(pid,sig,name)
container = loadManagedService(name)
return @docker_api.signal_container_process(pid,sig,container)
end
def start_container(container)
return @docker_api.start_container(container)
end
def inspect_container(container)
return @docker_api.inspect_container(container)
end
def stop_container(container)
return @docker_api.stop_container(container)
end
def pause_container(container)
return @docker_api.pause_container(container)
end
def unpause_container(container)
return @docker_api.unpause_container(container)
end
def ps_container(container)
return @docker_api.ps_container(container)
end
def logs_container(container)
return @docker_api.logs_container(container)
end
# def add_monitor(site_hash)
# return @system_api.add_monitor(site_hash)
# end
#
# def rm_monitor(site_hash)
# return @system_api.rm_monitor(site_hash)
# end
def get_build_report(engine_name)
return @system_api.get_build_report(engine_name)
end
def save_build_report(container,build_report)
return @system_api.save_build_report(container,build_report)
end
def save_container(container)
return @system_api.save_container(container)
end
def save_blueprint(blueprint,container)
return @system_api.save_blueprint(blueprint,container)
end
def load_blueprint(container)
return @system_api.load_blueprint(container)
end
def add_volume(site_hash)
return @system_api.add_volume(site_hash)
end
def rm_volume(site_hash)
return @system_api.rm_volume(site_hash)
end
def remove_self_hosted_domain(domain_name)
return @system_api.remove_self_hosted_domain(domain_name)
end
def add_self_hosted_domain(params)
return @system_api.add_self_hosted_domain(params)
end
def list_self_hosted_domains()
return @system_api.list_self_hosted_domains()
end
def update_self_hosted_domain(old_domain_name, params)
@system_api.update_self_hosted_domain(old_domain_name, params)
end
# def load_system_preferences
# return @system_api.load_system_preferences
# end
#
# def save_system_preferences(preferences)
# return @system_api.save_system_preferences(preferences)
# end
def get_container_memory_stats(container)
return @system_api.get_container_memory_stats(container)
end
def set_engine_hostname_details(container,params)
return @system_api.set_engine_hostname_details(container,params)
end
def image_exists?(container_name)
imageName = container_name
return @docker_api.image_exists?(imageName)
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
def list_attached_services_for(objectName,identifier)
sm = loadServiceManager()
return sm.list_attached_services_for(objectName,identifier)
rescue Exception=>e
SystemUtils.log_exception e
end
def list_avail_services_for(object)
objectname = object.class.name.split('::').last
# p :load_vail_services_for
# p objectname
services = load_avail_services_for(objectname)
subservices = load_avail_component_services_for(object)
retval = Hash.new
retval[:services] = services
retval[:subservices] = subservices
return retval
rescue Exception=>e
SystemUtils.log_exception e
end
def load_software_service(params)
sm = loadServiceManager()
# p :load_software_service
# p params
service_container = sm.get_software_service_container_name(params)
params[:service_container_name] = service_container
# p :service_container_name
# p service_container
service = loadManagedService(service_container)
if service == nil
return nil
end
return service
rescue Exception=>e
SystemUtils.log_exception e
end
def setup_email_params(params)
arg="smarthost_hostname=" + params[:smarthost_hostname] \
+ ":smarthost_username=" + params[:smarthost_username]\
+ ":smarthost_password=" + params[:smarthost_password]\
+ ":mail_name=smtp." + params[:default_domain]
container=loadManagedService("smtp")
return @docker_api.docker_exec(container,SysConfig.SetupParamsScript,arg)
rescue Exception=>e
SystemUtils.log_exception(e)
end
def set_engines_ssh_pw(params)
pass = params[:ssh_password]
cmd = "echo -e " + pass + "\n" + pass + " | passwd engines"
SystemUtils.debug_output( "ssh_pw",cmd)
SystemUtils.run_system(cmd)
end
def set_default_domain(params)
@system_preferences.set_default_domain(params)
end
def set_default_site(params)
@system_preferences.set_default_site(params)
end
def get_default_site()
@system_preferences.get_default_site
end
def get_default_domain()
@system_preferences.get_default_domain
end
def set_database_password(container_name,params)
arg = "mysql_password=" + params[:mysql_password] +":" \
+ "server=" + container_name + ":" \
+ "psql_password=" + params[:psql_password] #Need two args
if container_name
server_container = loadManagedService(container_name)
return @docker_api.docker_exec(server_container,SysConfig.SetupParamsScript,arg)
end
return true
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
#Attach the service defined in service_hash [Hash]
#@return boolean indicating sucess
def attach_service(service_hash)
p :attach_Service
p service_hash
service_hash = SystemUtils.symbolize_keys(service_hash)
if service_hash == nil
log_error_mesg("Attach Service passed a nil","")
return false
elsif service_hash.is_a?(Hash) == false
log_error_mesg("Attached Service passed a non Hash",service_hash)
return false
end
if service_hash.has_key?(:service_handle) == false || service_hash[:service_handle] == nil
service_handle_field = SoftwareServiceDefinition.service_handle_field(service_hash)
service_hash[:service_handle] = service_hash[:variables][service_handle_field.to_sym]
end
if service_hash.has_key?(:variables) == false
log_error_mesg("Attached Service passed no variables",service_hash)
return false
end
sm = loadServiceManager()
if sm.add_service(service_hash)
return sm.register_service_hash_with_service(service_hash)
else
log_error_mesg("register failed", service_hash)
end
return false
rescue Exception=>e
SystemUtils.log_exception e
end
def remove_orphaned_service(params)
sm = loadServiceManager()
return sm.remove_orphaned_service(params)
rescue Exception=>e
SystemUtils.log_exception e
end
def dettach_service(params)
sm = loadServiceManager()
return sm.remove_service(params)
# if service !=nil && service != false
# return service.remove_consumer(params)
# end
# @last_error = "Failed to dettach Service: " + @last_error
rescue Exception=>e
SystemUtils.log_exception e
return false
end
def list_providers_in_use
sm = loadServiceManager()
return sm.list_providers_in_use
end
def loadServiceManager()
if @service_manager == nil
@service_manager = ServiceManager.new(self)
return @service_manager
end
return @service_manager
end
def match_orphan_service(service_hash)
sm = loadServiceManager()
if sm.retrieve_orphan(service_hash) == false
return false
end
return true
end
#returns
def find_service_consumers(params)
sm = loadServiceManager()
return sm.find_service_consumers(params)
end
def service_is_registered?(service_hash)
sm = loadServiceManager()
return sm.service_is_registered?(service_hash)
end
def get_engine_persistant_services(params)
sm = loadServiceManager()
return sm.get_engine_persistant_services(params)
end
def managed_service_tree
sm = loadServiceManager()
return sm.managed_service_tree
end
def get_managed_engine_tree
sm = loadServiceManager()
return sm.get_managed_engine_tree
end
def find_engine_services(params)
sm = loadServiceManager()
return sm.find_engine_services(params)
end
def load_service_definition(filename)
yaml_file = File.open(filename)
p :open
p filename
return SoftwareServiceDefinition.from_yaml(yaml_file)
rescue Exception=>e
p :filename
p filename
SystemUtils.log_exception e
end
def fillin_template_for_service_def(service_hash)
service_def = SoftwareServiceDefinition.find(service_hash[:type_path],service_hash[:publisher_namespace])
container = getManagedEngines(service_hash[:parent_engine])
templater = Templater.new(SystemAccess.new,container)
templater.proccess_templated_service_hash(service_def)
return service_def
rescue Exception=>e
p service_hash
p service_def
SystemUtils.log_exception e
end
def load_avail_services_for_type(typename)
# p :load_avail_services_for_by_type
# p typename
retval = Array.new
dir = SysConfig.ServiceMapTemplateDir + "/" + typename
# p :dir
# p dir
if Dir.exists?(dir)
Dir.foreach(dir) do |service_dir_entry|
begin
if service_dir_entry.start_with?(".") == true
next
end
# p :service_dir_entry
# p service_dir_entry
if service_dir_entry.end_with?(".yaml")
service = load_service_definition(dir + "/" + service_dir_entry)
if service != nil
# p :service_as_serivce
# p service
# p :as_hash
# p service.to_h
# p :as_yaml
# p service.to_yaml()
if service.is_a?(String)
log_error_mesg("service yaml load error",service)
else
retval.push(service.to_h)
end
end
end
rescue Exception=>e
SystemUtils.log_exception e
puts dir.to_s + "/" + service_dir_entry
next
end
end
end
# p typename
# p retval
return retval
rescue Exception=>e
SystemUtils.log_exception e
end
def retrieve_service_configuration(service_param)
if service_param.has_key?(:service_name)
service = loadManagedService(service_param[:service_name])
if service != false && service != nil
retval = service.retrieve_configurator(service_param)
else
@last_error = "no Service"
return false
end
end
@last_error = retval[:stderr]
return retval
end
def update_service_configuration(service_param)
if service_param.has_key?(:service_name)
service = loadManagedService(service_param[:service_name])
if service != false && service != nil
retval = service.run_configurator(service_param)
if retval[:result] == 0
return true
else
@last_error = retval[:stderr]
end
else
@last_error = "no Service"
end
end
return false
end
def attach_subservice(params)
if params.has_key?(:parent_service) && params[:parent_service].has_key?(:publisher_namespace) && params[:parent_service].has_key?(:type_path) && params[:parent_service].has_key?(:service_handle)
return attach_service(params)
end
@last_error = "missing parrameters"
return false
end
def dettach_subservice(params)
if params.has_key?(:parent_service) && params[:parent_service].has_key?(:publisher_namespace) && params[:parent_service].has_key?(:type_path) && params[:parent_service].has_key?(:service_handle)
return dettach_service(params)
end
@last_error = "missing parrameters"
return false
end
def load_avail_services_for(typename)
# p :load_avail_services_for
# p typename
retval = Array.new
dir = SysConfig.ServiceMapTemplateDir + "/" + typename
# p :dir
# p dir
if Dir.exists?(dir)
Dir.foreach(dir) do |service_dir_entry|
begin
if service_dir_entry.start_with?(".") == true
next
end
# p :service_dir_entry
# p service_dir_entry
if service_dir_entry.end_with?(".yaml")
service = load_service_definition(dir + "/" + service_dir_entry)
if service != nil
retval.push(service.to_h)
end
end
rescue Exception=>e
SystemUtils.log_exception e
next
end
end
end
# p typename
# p retval
return retval
rescue Exception=>e
SystemUtils.log_exception e
end
def load_avail_component_services_for(engine)
retval = Hash.new
if engine.is_a?(ManagedEngine)
params = Hash.new
params[:engine_name]=engine.container_name
persistant_services = get_engine_persistant_services(params)
persistant_services.each do |service|
type_path = service[:type_path]
retval[type_path] = load_avail_services_for_type(type_path)
# p retval[type_path]
end
else
# p :load_avail_component_services_for_engine_got_a
# p engine.to_s
return nil
end
return retval
rescue Exception=>e
SystemUtils.log_exception e
end
def set_engine_runtime_properties(params)
#FIX ME also need to deal with Env Variables
engine_name = params[:engine_name]
engine = loadManagedEngine(engine_name)
if engine.is_a?(EnginesOSapiResult) == true
last_error = engine.result_mesg
return false
end
if engine.is_active == true
last_error="Container is active"
return false
end
if params.has_key?(:memory)
if params[:memory] == engine.memory
last_error="No Change in Memory Value"
return false
end
if engine.update_memory(params[:memory]) == false
last_error= engine.last_error
return false
end
end
if engine.has_container? == true
if destroy_container(engine) == false
last_error= engine.last_error
return false
end
end
if engine.create_container == false
last_error= engine.last_error
return false
end
return true
end
def set_engine_network_properties (engine, params)
return @system_api.set_engine_network_properties(engine,params)
end
def get_system_load_info
return @system_api.get_system_load_info
end
def get_system_memory_info
return @system_api.get_system_memory_info
end
def getManagedEngines
return @system_api.getManagedEngines
end
def loadManagedEngine(engine_name)
return @system_api.loadManagedEngine(engine_name)
end
def get_orphaned_services_tree
return loadServiceManager.get_orphaned_services_tree
end
def loadManagedService(service_name)
return @system_api.loadManagedService(service_name)
end
def getManagedServices
return @system_api.getManagedServices
end
def list_domains
return @system_api.list_domains
end
def list_managed_engines
return @system_api.list_managed_engines
end
def list_managed_services
return @system_api.list_managed_services
end
def destroy_container(container)
clear_error
begin
if @docker_api.destroy_container(container) != false
@system_api.destroy_container(container) #removes cid file
return true
else
return false
end
rescue Exception=>e
container.last_error=( "Failed To Destroy " + e.to_s)
SystemUtils.log_exception(e)
return false
end
end
def generate_engines_user_ssh_key
return @system_api.regen_system_ssh_key
end
def system_update
return @system_api.system_update
end
def delete_image(container)
begin
clear_error
if @docker_api.delete_image(container) == true
#only delete if del all otherwise backup
return @system_api.delete_container_configs(container)
end
return false
rescue Exception=>e
@last_error=( "Failed To Delete " + e.to_s)
SystemUtils.log_exception(e)
return false
end
end
#@return boolean indicating sucess
#@params [Hash] :engine_name
#Retrieves all persistant service registered to :engine_name and destroys the underlying service (fs db etc)
# They are removed from the tree if delete is sucessful
def delete_engine_persistant_services(params)
sm = loadServiceManager()
services = sm.get_engine_persistant_services(params)
services.each do |service_hash|
service_hash[:remove_all_application_data] = params[:remove_all_application_data]
if service_hash.has_key?(:service_container_name) == false
log_error_mesg("Missing :service_container_name in service_hash",service_hash)
return false
end
service = loadManagedService(service_hash[:service_container_name])
if service == nil
log_error_mesg("Failed to load container name keyed by :service_container_name ",service_hash)
return false
end
if service.is_running == false
log_error_mesg("Cannot remove service consumer if service is not running ",service_hash)
return false
end
if service.remove_consumer(service_hash) == false
log_error_mesg("Failed to remove service ",service_hash)
return false
end
#REMOVE THE SERVICE HERE AND NOW
if sm.remove_from_engine_registery(service_hash) ==true
if sm.remove_from_services_registry(service_hash) == false
log_error_mesg("Cannot remove from Service Registry",service_hash)
return false
end
else
log_error_mesg("Cannot remove from Engine Registry",service_hash)
return false
end
end
return true
rescue Exception=>e
@last_error=( "Failed To Delete " + e.to_s)
SystemUtils.log_exception(e)
return false
end
def delete_image_dependancies(params)
sm = loadServiceManager()
if sm.rm_remove_engine(params) == false
log_error_mesg("Failed to remove deleted Service",params)
return false
end
return true
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
def run_system(cmd)
clear_error
begin
cmd = cmd + " 2>&1"
res= %x<#{cmd}>
SystemUtils.debug_output("run system",res)
#FIXME should be case insensitive The last one is a pure kludge
#really need to get stderr and stdout separately
if $? == 0 && res.downcase.include?("error") == false && res.downcase.include?("fail") == false && res.downcase.include?("could not resolve hostname") == false && res.downcase.include?("unsuccessful") == false
return true
else
@last_error = res
SystemUtils.debug_output("run system result",res)
return false
end
rescue Exception=>e
SystemUtils.log_exception(e)
return ret_val
end
end
def run_volume_builder(container,username)
clear_error
begin
if File.exists?(SysConfig.CidDir + "/volbuilder.cid") == true
command = "docker stop volbuilder"
run_system(command)
command = "docker rm volbuilder"
run_system(command)
File.delete(SysConfig.CidDir + "/volbuilder.cid")
end
mapped_vols = get_volbuild_volmaps container
command = "docker run --name volbuilder --memory=20m -e fw_user=" + username + " --cidfile /opt/engines/run/volbuilder.cid " + mapped_vols + " -t engines/volbuilder:" + SystemUtils.system_release + " /bin/sh /home/setup_vols.sh "
SystemUtils.debug_output("Run volumen builder",command)
run_system(command)
#Note no -d so process will not return until setup.sh completes
command = "docker rm volbuilder"
if File.exists?(SysConfig.CidDir + "/volbuilder.cid") == true
File.delete(SysConfig.CidDir + "/volbuilder.cid")
end
res = run_system(command)
if res != true
SystemUtils.log_error(res)
#don't return false as
end
return true
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
end
def create_container(container)
clear_error
begin
if @system_api.clear_cid(container) != false
@system_api.clear_container_var_run(container)
if @docker_api.create_container(container) == true
return @system_api.create_container(container)
end
else
return false
end
rescue Exception=>e
container.last_error=("Failed To Create " + e.to_s)
SystemUtils.log_exception(e)
return false
end
end
def load_and_attach_persistant_services(container)
dirname = get_container_dir(container) + "/persistant/"
sm = loadServiceManager()
return sm.load_and_attach_services(dirname,container )
end
def load_and_attach_nonpersistant_services(container)
dirname = get_container_dir(container) + "/nonpersistant/"
sm = loadServiceManager()
return sm.load_and_attach_services(dirname,container)
end
def get_container_dir(container)
return @system_api.container_state_dir(container) +"/services/"
end
#install from fresh copy of blueprint in repositor
def reinstall_engine(engine)
clear_error
EngineBuilder.re_install_engine(engine,self)
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
#rebuilds image from current blueprint
def rebuild_image(container)
clear_error
begin
params=Hash.new
params[:engine_name] = container.container_name
params[:domain_name] = container.domain_name
params[:host_name] = container.hostname
params[:env_variables] = container.environments
params[:http_protocol] = container.protocol
params[:repository_url] = container.repo
params[:software_environment_variables] = container.environments
# custom_env=params
# @http_protocol = params[:http_protocol] = container.
builder = EngineBuilder.new(params, self)
return builder.rebuild_managed_container(container)
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
end
#FIXME Kludge should read from network namespace /proc ?
def get_container_network_metrics(container_name)
begin
ret_val = Hash.new
clear_error
def error_result
ret_val = Hash.new
ret_val[:in]="n/a"
ret_val[:out]="n/a"
return ret_val
end
commandargs="docker exec " + container_name + " netstat --interfaces -e | grep bytes |head -1 | awk '{ print $2 \" \" $6}' 2>&1"
result = SystemUtils.execute_command(commandargs)
p result
if result[:result] != 0
ret_val = error_result
else
res = result[:stdout]
vals = res.split("bytes:")
p res
p vals
if vals.count < 2
if vals[1] != nil && vals[2] != nil
ret_val[:in] = vals[1].chop
ret_val[:out] = vals[2].chop
else
ret_val = error_result
end
else
ret_val = error_result
end
p ret_val
return ret_val
end
rescue Exception=>e
SystemUtils.log_exception(e)
return error_result
end
end
def is_startup_complete container
clear_error
begin
return @system_api.is_startup_complete(container)
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
end
def log_error_mesg(msg,object)
obj_str = object.to_s.slice(0,256)
@last_error = msg +":" + obj_str
SystemUtils.log_error_mesg(msg,object)
end
def register_non_persistant_services(engine_name)
sm = loadServiceManager()
return sm.register_non_persistant_services(engine_name)
end
def deregister_non_persistant_services(engine_name)
sm = loadServiceManager()
return sm.deregister_non_persistant_services(engine_name)
end
#@return an [Array] of service_hashs of Orphaned persistant services match @params [Hash]
#:path_type :publisher_namespace
def get_orphaned_services(params)
return loadServiceManager.get_orphaned_services(params)
end
def clean_up_dangling_images
@docker_api.clean_up_dangling_images
end
#@ return [Boolean] indicating sucess
#For Maintanence ONLY
def delete_service_from_service_registry(service_hash)
sm = loadServiceManager()
return sm.remove_from_services_registry(service_hash)
end
def delete_service_from_engine_registry(service_hash)
sm = loadServiceManager()
return sm.remove_from_engine_registery(service_hash)
end
protected
def get_volbuild_volmaps container
begin
clear_error
state_dir = SysConfig.CidDir + "/containers/" + container.container_name + "/run/"
log_dir = SysConfig.SystemLogRoot + "/containers/" + container.container_name
volume_option = " -v " + state_dir + ":/client/state:rw "
volume_option += " -v " + log_dir + ":/client/log:rw "
if container.volumes != nil
container.volumes.each_value do |vol|
SystemUtils.debug_output("build vol maps",vol)
volume_option += " -v " + vol.localpath.to_s + ":/dest/fs:rw"
end
end
volume_option += " --volumes-from " + container.container_name
return volume_option
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
end
def clear_error
@last_error = ""
end
#@return an [Array] of service_hashs of Active persistant services match @params [Hash]
#:path_type :publisher_namespace
def get_active_persistant_services(params)
return loadServiceManager.get_active_persistant_services(params)
end
end
correct < to >
class EnginesCore
require "/opt/engines/lib/ruby/system/SystemUtils.rb"
require "/opt/engines/lib/ruby/system/DNSHosting.rb"
require_relative 'DockerApi.rb'
require_relative 'SystemApi.rb'
require_relative 'SystemPreferences.rb'
def initialize
@docker_api = DockerApi.new
@system_api = SystemApi.new(self) #will change to to docker_api and not self
@system_preferences = SystemPreferences.new
@last_error = String.new
end
attr_reader :last_error
def software_service_definition(params)
sm = loadServiceManager
return sm.software_service_definition(params)
end
#@return an [Array] of service_hashes regsitered against the Service params[:publisher_namespace] params[:type_path]
def get_registered_against_service(params)
sm = loadServiceManager
return sm.get_registered_against_service(params)
end
def add_domain(params)
return @system_api.add_domain(params)
end
#
# def remove_containers_cron_list(container_name)
# p :remove_containers_cron
# if @system_api.remove_containers_cron_list(container_name)
# cron_service = loadManagedService("cron")
# return @system_api.rebuild_crontab(cron_service)
# else
# return false
# end
# end
#
# def rebuild_crontab(cron_service)
# #acutally a rebuild (or resave) as hadh already removed from consumer list
# p :rebuild_crontab
# return @system_api.rebuild_crontab(cron_service)
# end
def remove_domain(params)
return @system_api.rm_domain(params[:domain_name],@system_api)
end
def update_domain(old_domain,params)
return @system_api.update_domain(old_domain,params,@system_api)
end
def signal_service_process(pid,sig,name)
container = loadManagedService(name)
return @docker_api.signal_container_process(pid,sig,container)
end
def start_container(container)
return @docker_api.start_container(container)
end
def inspect_container(container)
return @docker_api.inspect_container(container)
end
def stop_container(container)
return @docker_api.stop_container(container)
end
def pause_container(container)
return @docker_api.pause_container(container)
end
def unpause_container(container)
return @docker_api.unpause_container(container)
end
def ps_container(container)
return @docker_api.ps_container(container)
end
def logs_container(container)
return @docker_api.logs_container(container)
end
# def add_monitor(site_hash)
# return @system_api.add_monitor(site_hash)
# end
#
# def rm_monitor(site_hash)
# return @system_api.rm_monitor(site_hash)
# end
def get_build_report(engine_name)
return @system_api.get_build_report(engine_name)
end
def save_build_report(container,build_report)
return @system_api.save_build_report(container,build_report)
end
def save_container(container)
return @system_api.save_container(container)
end
def save_blueprint(blueprint,container)
return @system_api.save_blueprint(blueprint,container)
end
def load_blueprint(container)
return @system_api.load_blueprint(container)
end
def add_volume(site_hash)
return @system_api.add_volume(site_hash)
end
def rm_volume(site_hash)
return @system_api.rm_volume(site_hash)
end
def remove_self_hosted_domain(domain_name)
return @system_api.remove_self_hosted_domain(domain_name)
end
def add_self_hosted_domain(params)
return @system_api.add_self_hosted_domain(params)
end
def list_self_hosted_domains()
return @system_api.list_self_hosted_domains()
end
def update_self_hosted_domain(old_domain_name, params)
@system_api.update_self_hosted_domain(old_domain_name, params)
end
# def load_system_preferences
# return @system_api.load_system_preferences
# end
#
# def save_system_preferences(preferences)
# return @system_api.save_system_preferences(preferences)
# end
def get_container_memory_stats(container)
return @system_api.get_container_memory_stats(container)
end
def set_engine_hostname_details(container,params)
return @system_api.set_engine_hostname_details(container,params)
end
def image_exists?(container_name)
imageName = container_name
return @docker_api.image_exists?(imageName)
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
def list_attached_services_for(objectName,identifier)
sm = loadServiceManager()
return sm.list_attached_services_for(objectName,identifier)
rescue Exception=>e
SystemUtils.log_exception e
end
def list_avail_services_for(object)
objectname = object.class.name.split('::').last
# p :load_vail_services_for
# p objectname
services = load_avail_services_for(objectname)
subservices = load_avail_component_services_for(object)
retval = Hash.new
retval[:services] = services
retval[:subservices] = subservices
return retval
rescue Exception=>e
SystemUtils.log_exception e
end
def load_software_service(params)
sm = loadServiceManager()
# p :load_software_service
# p params
service_container = sm.get_software_service_container_name(params)
params[:service_container_name] = service_container
# p :service_container_name
# p service_container
service = loadManagedService(service_container)
if service == nil
return nil
end
return service
rescue Exception=>e
SystemUtils.log_exception e
end
def setup_email_params(params)
arg="smarthost_hostname=" + params[:smarthost_hostname] \
+ ":smarthost_username=" + params[:smarthost_username]\
+ ":smarthost_password=" + params[:smarthost_password]\
+ ":mail_name=smtp." + params[:default_domain]
container=loadManagedService("smtp")
return @docker_api.docker_exec(container,SysConfig.SetupParamsScript,arg)
rescue Exception=>e
SystemUtils.log_exception(e)
end
def set_engines_ssh_pw(params)
pass = params[:ssh_password]
cmd = "echo -e " + pass + "\n" + pass + " | passwd engines"
SystemUtils.debug_output( "ssh_pw",cmd)
SystemUtils.run_system(cmd)
end
def set_default_domain(params)
@system_preferences.set_default_domain(params)
end
def set_default_site(params)
@system_preferences.set_default_site(params)
end
def get_default_site()
@system_preferences.get_default_site
end
def get_default_domain()
@system_preferences.get_default_domain
end
def set_database_password(container_name,params)
arg = "mysql_password=" + params[:mysql_password] +":" \
+ "server=" + container_name + ":" \
+ "psql_password=" + params[:psql_password] #Need two args
if container_name
server_container = loadManagedService(container_name)
return @docker_api.docker_exec(server_container,SysConfig.SetupParamsScript,arg)
end
return true
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
#Attach the service defined in service_hash [Hash]
#@return boolean indicating sucess
def attach_service(service_hash)
p :attach_Service
p service_hash
service_hash = SystemUtils.symbolize_keys(service_hash)
if service_hash == nil
log_error_mesg("Attach Service passed a nil","")
return false
elsif service_hash.is_a?(Hash) == false
log_error_mesg("Attached Service passed a non Hash",service_hash)
return false
end
if service_hash.has_key?(:service_handle) == false || service_hash[:service_handle] == nil
service_handle_field = SoftwareServiceDefinition.service_handle_field(service_hash)
service_hash[:service_handle] = service_hash[:variables][service_handle_field.to_sym]
end
if service_hash.has_key?(:variables) == false
log_error_mesg("Attached Service passed no variables",service_hash)
return false
end
sm = loadServiceManager()
if sm.add_service(service_hash)
return sm.register_service_hash_with_service(service_hash)
else
log_error_mesg("register failed", service_hash)
end
return false
rescue Exception=>e
SystemUtils.log_exception e
end
def remove_orphaned_service(params)
sm = loadServiceManager()
return sm.remove_orphaned_service(params)
rescue Exception=>e
SystemUtils.log_exception e
end
def dettach_service(params)
sm = loadServiceManager()
return sm.remove_service(params)
# if service !=nil && service != false
# return service.remove_consumer(params)
# end
# @last_error = "Failed to dettach Service: " + @last_error
rescue Exception=>e
SystemUtils.log_exception e
return false
end
def list_providers_in_use
sm = loadServiceManager()
return sm.list_providers_in_use
end
def loadServiceManager()
if @service_manager == nil
@service_manager = ServiceManager.new(self)
return @service_manager
end
return @service_manager
end
def match_orphan_service(service_hash)
sm = loadServiceManager()
if sm.retrieve_orphan(service_hash) == false
return false
end
return true
end
#returns
def find_service_consumers(params)
sm = loadServiceManager()
return sm.find_service_consumers(params)
end
def service_is_registered?(service_hash)
sm = loadServiceManager()
return sm.service_is_registered?(service_hash)
end
def get_engine_persistant_services(params)
sm = loadServiceManager()
return sm.get_engine_persistant_services(params)
end
def managed_service_tree
sm = loadServiceManager()
return sm.managed_service_tree
end
def get_managed_engine_tree
sm = loadServiceManager()
return sm.get_managed_engine_tree
end
def find_engine_services(params)
sm = loadServiceManager()
return sm.find_engine_services(params)
end
def load_service_definition(filename)
yaml_file = File.open(filename)
p :open
p filename
return SoftwareServiceDefinition.from_yaml(yaml_file)
rescue Exception=>e
p :filename
p filename
SystemUtils.log_exception e
end
def fillin_template_for_service_def(service_hash)
service_def = SoftwareServiceDefinition.find(service_hash[:type_path],service_hash[:publisher_namespace])
container = getManagedEngines(service_hash[:parent_engine])
templater = Templater.new(SystemAccess.new,container)
templater.proccess_templated_service_hash(service_def)
return service_def
rescue Exception=>e
p service_hash
p service_def
SystemUtils.log_exception e
end
def load_avail_services_for_type(typename)
# p :load_avail_services_for_by_type
# p typename
retval = Array.new
dir = SysConfig.ServiceMapTemplateDir + "/" + typename
# p :dir
# p dir
if Dir.exists?(dir)
Dir.foreach(dir) do |service_dir_entry|
begin
if service_dir_entry.start_with?(".") == true
next
end
# p :service_dir_entry
# p service_dir_entry
if service_dir_entry.end_with?(".yaml")
service = load_service_definition(dir + "/" + service_dir_entry)
if service != nil
# p :service_as_serivce
# p service
# p :as_hash
# p service.to_h
# p :as_yaml
# p service.to_yaml()
if service.is_a?(String)
log_error_mesg("service yaml load error",service)
else
retval.push(service.to_h)
end
end
end
rescue Exception=>e
SystemUtils.log_exception e
puts dir.to_s + "/" + service_dir_entry
next
end
end
end
# p typename
# p retval
return retval
rescue Exception=>e
SystemUtils.log_exception e
end
def retrieve_service_configuration(service_param)
if service_param.has_key?(:service_name)
service = loadManagedService(service_param[:service_name])
if service != false && service != nil
retval = service.retrieve_configurator(service_param)
else
@last_error = "no Service"
return false
end
end
@last_error = retval[:stderr]
return retval
end
def update_service_configuration(service_param)
if service_param.has_key?(:service_name)
service = loadManagedService(service_param[:service_name])
if service != false && service != nil
retval = service.run_configurator(service_param)
if retval[:result] == 0
return true
else
@last_error = retval[:stderr]
end
else
@last_error = "no Service"
end
end
return false
end
def attach_subservice(params)
if params.has_key?(:parent_service) && params[:parent_service].has_key?(:publisher_namespace) && params[:parent_service].has_key?(:type_path) && params[:parent_service].has_key?(:service_handle)
return attach_service(params)
end
@last_error = "missing parrameters"
return false
end
def dettach_subservice(params)
if params.has_key?(:parent_service) && params[:parent_service].has_key?(:publisher_namespace) && params[:parent_service].has_key?(:type_path) && params[:parent_service].has_key?(:service_handle)
return dettach_service(params)
end
@last_error = "missing parrameters"
return false
end
def load_avail_services_for(typename)
# p :load_avail_services_for
# p typename
retval = Array.new
dir = SysConfig.ServiceMapTemplateDir + "/" + typename
# p :dir
# p dir
if Dir.exists?(dir)
Dir.foreach(dir) do |service_dir_entry|
begin
if service_dir_entry.start_with?(".") == true
next
end
# p :service_dir_entry
# p service_dir_entry
if service_dir_entry.end_with?(".yaml")
service = load_service_definition(dir + "/" + service_dir_entry)
if service != nil
retval.push(service.to_h)
end
end
rescue Exception=>e
SystemUtils.log_exception e
next
end
end
end
# p typename
# p retval
return retval
rescue Exception=>e
SystemUtils.log_exception e
end
def load_avail_component_services_for(engine)
retval = Hash.new
if engine.is_a?(ManagedEngine)
params = Hash.new
params[:engine_name]=engine.container_name
persistant_services = get_engine_persistant_services(params)
persistant_services.each do |service|
type_path = service[:type_path]
retval[type_path] = load_avail_services_for_type(type_path)
# p retval[type_path]
end
else
# p :load_avail_component_services_for_engine_got_a
# p engine.to_s
return nil
end
return retval
rescue Exception=>e
SystemUtils.log_exception e
end
def set_engine_runtime_properties(params)
#FIX ME also need to deal with Env Variables
engine_name = params[:engine_name]
engine = loadManagedEngine(engine_name)
if engine.is_a?(EnginesOSapiResult) == true
last_error = engine.result_mesg
return false
end
if engine.is_active == true
last_error="Container is active"
return false
end
if params.has_key?(:memory)
if params[:memory] == engine.memory
last_error="No Change in Memory Value"
return false
end
if engine.update_memory(params[:memory]) == false
last_error= engine.last_error
return false
end
end
if engine.has_container? == true
if destroy_container(engine) == false
last_error= engine.last_error
return false
end
end
if engine.create_container == false
last_error= engine.last_error
return false
end
return true
end
def set_engine_network_properties (engine, params)
return @system_api.set_engine_network_properties(engine,params)
end
def get_system_load_info
return @system_api.get_system_load_info
end
def get_system_memory_info
return @system_api.get_system_memory_info
end
def getManagedEngines
return @system_api.getManagedEngines
end
def loadManagedEngine(engine_name)
return @system_api.loadManagedEngine(engine_name)
end
def get_orphaned_services_tree
return loadServiceManager.get_orphaned_services_tree
end
def loadManagedService(service_name)
return @system_api.loadManagedService(service_name)
end
def getManagedServices
return @system_api.getManagedServices
end
def list_domains
return @system_api.list_domains
end
def list_managed_engines
return @system_api.list_managed_engines
end
def list_managed_services
return @system_api.list_managed_services
end
def destroy_container(container)
clear_error
begin
if @docker_api.destroy_container(container) != false
@system_api.destroy_container(container) #removes cid file
return true
else
return false
end
rescue Exception=>e
container.last_error=( "Failed To Destroy " + e.to_s)
SystemUtils.log_exception(e)
return false
end
end
def generate_engines_user_ssh_key
return @system_api.regen_system_ssh_key
end
def system_update
return @system_api.system_update
end
def delete_image(container)
begin
clear_error
if @docker_api.delete_image(container) == true
#only delete if del all otherwise backup
return @system_api.delete_container_configs(container)
end
return false
rescue Exception=>e
@last_error=( "Failed To Delete " + e.to_s)
SystemUtils.log_exception(e)
return false
end
end
#@return boolean indicating sucess
#@params [Hash] :engine_name
#Retrieves all persistant service registered to :engine_name and destroys the underlying service (fs db etc)
# They are removed from the tree if delete is sucessful
def delete_engine_persistant_services(params)
sm = loadServiceManager()
services = sm.get_engine_persistant_services(params)
services.each do |service_hash|
service_hash[:remove_all_application_data] = params[:remove_all_application_data]
if service_hash.has_key?(:service_container_name) == false
log_error_mesg("Missing :service_container_name in service_hash",service_hash)
return false
end
service = loadManagedService(service_hash[:service_container_name])
if service == nil
log_error_mesg("Failed to load container name keyed by :service_container_name ",service_hash)
return false
end
if service.is_running == false
log_error_mesg("Cannot remove service consumer if service is not running ",service_hash)
return false
end
if service.remove_consumer(service_hash) == false
log_error_mesg("Failed to remove service ",service_hash)
return false
end
#REMOVE THE SERVICE HERE AND NOW
if sm.remove_from_engine_registery(service_hash) ==true
if sm.remove_from_services_registry(service_hash) == false
log_error_mesg("Cannot remove from Service Registry",service_hash)
return false
end
else
log_error_mesg("Cannot remove from Engine Registry",service_hash)
return false
end
end
return true
rescue Exception=>e
@last_error=( "Failed To Delete " + e.to_s)
SystemUtils.log_exception(e)
return false
end
def delete_image_dependancies(params)
sm = loadServiceManager()
if sm.rm_remove_engine(params) == false
log_error_mesg("Failed to remove deleted Service",params)
return false
end
return true
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
def run_system(cmd)
clear_error
begin
cmd = cmd + " 2>&1"
res= %x<#{cmd}>
SystemUtils.debug_output("run system",res)
#FIXME should be case insensitive The last one is a pure kludge
#really need to get stderr and stdout separately
if $? == 0 && res.downcase.include?("error") == false && res.downcase.include?("fail") == false && res.downcase.include?("could not resolve hostname") == false && res.downcase.include?("unsuccessful") == false
return true
else
@last_error = res
SystemUtils.debug_output("run system result",res)
return false
end
rescue Exception=>e
SystemUtils.log_exception(e)
return ret_val
end
end
def run_volume_builder(container,username)
clear_error
begin
if File.exists?(SysConfig.CidDir + "/volbuilder.cid") == true
command = "docker stop volbuilder"
run_system(command)
command = "docker rm volbuilder"
run_system(command)
File.delete(SysConfig.CidDir + "/volbuilder.cid")
end
mapped_vols = get_volbuild_volmaps container
command = "docker run --name volbuilder --memory=20m -e fw_user=" + username + " --cidfile /opt/engines/run/volbuilder.cid " + mapped_vols + " -t engines/volbuilder:" + SystemUtils.system_release + " /bin/sh /home/setup_vols.sh "
SystemUtils.debug_output("Run volumen builder",command)
run_system(command)
#Note no -d so process will not return until setup.sh completes
command = "docker rm volbuilder"
if File.exists?(SysConfig.CidDir + "/volbuilder.cid") == true
File.delete(SysConfig.CidDir + "/volbuilder.cid")
end
res = run_system(command)
if res != true
SystemUtils.log_error(res)
#don't return false as
end
return true
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
end
def create_container(container)
clear_error
begin
if @system_api.clear_cid(container) != false
@system_api.clear_container_var_run(container)
if @docker_api.create_container(container) == true
return @system_api.create_container(container)
end
else
return false
end
rescue Exception=>e
container.last_error=("Failed To Create " + e.to_s)
SystemUtils.log_exception(e)
return false
end
end
def load_and_attach_persistant_services(container)
dirname = get_container_dir(container) + "/persistant/"
sm = loadServiceManager()
return sm.load_and_attach_services(dirname,container )
end
def load_and_attach_nonpersistant_services(container)
dirname = get_container_dir(container) + "/nonpersistant/"
sm = loadServiceManager()
return sm.load_and_attach_services(dirname,container)
end
def get_container_dir(container)
return @system_api.container_state_dir(container) +"/services/"
end
#install from fresh copy of blueprint in repositor
def reinstall_engine(engine)
clear_error
EngineBuilder.re_install_engine(engine,self)
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
#rebuilds image from current blueprint
def rebuild_image(container)
clear_error
begin
params=Hash.new
params[:engine_name] = container.container_name
params[:domain_name] = container.domain_name
params[:host_name] = container.hostname
params[:env_variables] = container.environments
params[:http_protocol] = container.protocol
params[:repository_url] = container.repo
params[:software_environment_variables] = container.environments
# custom_env=params
# @http_protocol = params[:http_protocol] = container.
builder = EngineBuilder.new(params, self)
return builder.rebuild_managed_container(container)
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
end
#FIXME Kludge should read from network namespace /proc ?
def get_container_network_metrics(container_name)
begin
ret_val = Hash.new
clear_error
def error_result
ret_val = Hash.new
ret_val[:in]="n/a"
ret_val[:out]="n/a"
return ret_val
end
commandargs="docker exec " + container_name + " netstat --interfaces -e | grep bytes |head -1 | awk '{ print $2 \" \" $6}' 2>&1"
result = SystemUtils.execute_command(commandargs)
p result
if result[:result] != 0
ret_val = error_result
else
res = result[:stdout]
vals = res.split("bytes:")
p res
p vals
if vals.count > 2
if vals[1] != nil && vals[2] != nil
ret_val[:in] = vals[1].chop
ret_val[:out] = vals[2].chop
else
ret_val = error_result
end
else
ret_val = error_result
end
p ret_val
return ret_val
end
rescue Exception=>e
SystemUtils.log_exception(e)
return error_result
end
end
def is_startup_complete container
clear_error
begin
return @system_api.is_startup_complete(container)
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
end
def log_error_mesg(msg,object)
obj_str = object.to_s.slice(0,256)
@last_error = msg +":" + obj_str
SystemUtils.log_error_mesg(msg,object)
end
def register_non_persistant_services(engine_name)
sm = loadServiceManager()
return sm.register_non_persistant_services(engine_name)
end
def deregister_non_persistant_services(engine_name)
sm = loadServiceManager()
return sm.deregister_non_persistant_services(engine_name)
end
#@return an [Array] of service_hashs of Orphaned persistant services match @params [Hash]
#:path_type :publisher_namespace
def get_orphaned_services(params)
return loadServiceManager.get_orphaned_services(params)
end
def clean_up_dangling_images
@docker_api.clean_up_dangling_images
end
#@ return [Boolean] indicating sucess
#For Maintanence ONLY
def delete_service_from_service_registry(service_hash)
sm = loadServiceManager()
return sm.remove_from_services_registry(service_hash)
end
def delete_service_from_engine_registry(service_hash)
sm = loadServiceManager()
return sm.remove_from_engine_registery(service_hash)
end
protected
def get_volbuild_volmaps container
begin
clear_error
state_dir = SysConfig.CidDir + "/containers/" + container.container_name + "/run/"
log_dir = SysConfig.SystemLogRoot + "/containers/" + container.container_name
volume_option = " -v " + state_dir + ":/client/state:rw "
volume_option += " -v " + log_dir + ":/client/log:rw "
if container.volumes != nil
container.volumes.each_value do |vol|
SystemUtils.debug_output("build vol maps",vol)
volume_option += " -v " + vol.localpath.to_s + ":/dest/fs:rw"
end
end
volume_option += " --volumes-from " + container.container_name
return volume_option
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
end
def clear_error
@last_error = ""
end
#@return an [Array] of service_hashs of Active persistant services match @params [Hash]
#:path_type :publisher_namespace
def get_active_persistant_services(params)
return loadServiceManager.get_active_persistant_services(params)
end
end
|
# require 'spec_helper'
require 'gitreport'
# require 'fakefs/spec_helpers'
describe 'GitReport::Storage' do
# include FakeFS::SpecHelpers
before :each do
@repo = FakeRepository.new
GitReport.stub!(:project).and_return(GitReport::Project.new(@repo.path))
@project = GitReport::Project.new(@repo.path)
@commit = GitReport::Commit.new(@project.log.first)
@storage = GitReport::Storage.new('path','filename')
end
describe '#save!' do
it 'should save the given data to a file' do
pending
@storage.save!("data")
end
end
describe '#load' do
it 'should load previously stored data'
end
end
Storage specs finished.
# require 'spec_helper'
require 'gitreport'
describe 'GitReport::Storage' do
before :each do
@tempfile = Tempfile.new('storage')
@tempdir = File.dirname(@tempfile.path)
@storage = GitReport::Storage.new(@tempdir, @tempfile)
class Foo
attr_accessor :foo, :bar
def initialize foo, bar
@foo = foo
@bar = bar
end
end
end
describe '#save!' do
it 'should save the given object to a file' do
f1 = Foo.new("foo1", "bar1")
f2 = Foo.new("foo2", "bar2")
@storage.save! [f1,f2]
restore = Marshal.load(Base64.decode64(File.read "#{@tempdir}/#{@tempfile}"))
restore.first.foo.should == f1.foo
restore.first.bar.should == f1.bar
restore.last.foo.should == f2.foo
restore.last.bar.should == f2.bar
end
end
describe '#load' do
it 'should load previously stored objects' do
f1 = Foo.new("foo1", "bar1")
f2 = Foo.new("foo2", "bar2")
@storage.save! [f1,f2]
restore = @storage.load
restore.first.foo.should == f1.foo
restore.first.bar.should == f1.bar
restore.last.foo.should == f2.foo
restore.last.bar.should == f2.bar
end
end
end
|
require_relative './noko_doc'
# Scrapes data for Repositories and Users on Github.com
class GithubRepoScraper
@github_doc = NokoDoc.new
@current_repo = nil
SECONDS_BETWEEN_REQUESTS = 0
@BASE_URL = "https://github.com"
@commits_created_this_session = 0
@start_time = Time.now
# TODO: add check so that these methods don't necessarily take and active record
# model, because we don't want to hit the db everytime in the dispatcher
# TODO: we could pass in a shallow repository model and only actually find the model
# if we need to associate a commit, or actually do an update etc.
class << self
# Gets the following:
# - number of stars the project has
# - raw README.md file
#
# Example project's Github url vs raw url
# - Github: https://github.com/rspec/rspec/blob/master/README.md
# - Raw: https://raw.githubusercontent.com/rspec/rspec/master/README.md
def update_repo_data(repos = Repository.all)
repos.each do |repo|
begin
break unless get_repo_doc(repo)
# TODO: add to update_repo_data to get repo name and owner name
# owner, repo_name = @current_repo.url[/\/\w+\/\w+/].split('/)
update_repo_meta(false)
puts "Updated repo #{@current_repo.name}"
rescue OpenURI::HTTPError => e
repo.destroy
puts "DESTROYED #{@current_repo.name} : its Github URL #{@current_repo.url} resulted in #{e.message}"
end
end
end
# Retrieves the open issues and comments for each repository
def issues(scrape_limit_opts={}, get_repo_meta=false)
handle_scrape_limits(scrape_limit_opts)
@repositories.each do |repo|
break unless get_repo_doc(repo, "/issues")
update_repo_meta if get_repo_meta
puts "Scraping issues for #{repo.name}"
issues = [] # cache issues so we can cycle through without hitting the db
loop do
# Get all the issues from page
raw_issues = @github_doc.doc.css("div.issues-listing ul li div.d-table")
raw_issues.each do |raw_issue|
issue = Issue.create( build_issue(raw_issue) )
puts "Creating Issue" if issue.id
issues << issue
end
next_url_anchor = @github_doc.doc.css("a.next_page")
if next_url_anchor.present?
next_url_rel_path = next_url_anchor.attribute("href").value
break unless @github_doc.new_doc(@BASE_URL + next_url_rel_path)
else
break
end
end
# Get all the comments for each issue
issues.each do |issue|
doc_path = @BASE_URL + issue.url
next unless @github_doc.new_doc(doc_path)
raw_comments = @github_doc.doc.css("div.timeline-comment-wrapper")
raw_comments.each do |raw_comment|
comment_json = build_comment(raw_comment)
comment_json['issue_id'] = issue
issue_comment = IssueComment.create(comment_json)
puts "Creating Issue Comment" if issue_comment
end
end
end
end
# Retrieves the commits for each Repository
#
# NOTE: you can use all options together, but whichever one ends first
# will be the one that stops the scraper
#
# Options
# repositories: repos to be scraped for data
# page_limit: maximum number of pages to iterate
# user_limit: max number of users to add
def commits(scrape_limit_opts={}, get_repo_meta=false)
handle_scrape_limits(scrape_limit_opts)
catch :scrape_limit_reached do
@repositories.each do |repo|
break unless get_repo_doc(repo, "/commits")
update_repo_meta if get_repo_meta
puts "Scraping #{repo.name} commits"
catch :recent_commits_finished do
traverse_commit_pagination
end
end
end
end
private
# TODO: we should cache all the users for a repo when a repo is requested, so
# we don't have to hit the DB as often, because I'll have to get the name
# of the user from the comment, search if it exist, then create it.
# If we had them cached I could search those, if not found, create it.
# Basically let's make that query when we get the repo.
def build_comment(raw_comment)
user_name = raw_comment.css("a.author").text
user = User.find_by(github_username: user_name)
unless user
puts "Creating new user: #{user_name}"
user = User.create(github_username: user_name)
end
comment_json = {}
comment_json['user_id'] = user.id
comment_json['github_created_at'] = raw_comment.css("a relative-time").attribute("datetime").value
comment_json['body'] = raw_comment.css("td.comment-body").text
comment_json
end
def build_issue(raw_issue)
issue = {}
issue['repository_id'] = @current_repo.id
issue['name'] = raw_issue.css("a.h4").text.strip
issue['creator'] = raw_issue.css("a.h4")
issue['url'] = raw_issue.css("a.h4").attribute("href").value
issue_number, open_date, creator = raw_issue.css("span.opened-by").text.strip.split("\n")
issue['issue_number'] = issue_number[1..-1].to_i
issue['creator'] = creator.strip
issue['open_date'] = open_date.split(" ")[1..-2].join(" ")
issue
end
def get_repo_doc(repo, path="")
@current_repo = repo
# TODO: consider making a psuedo object to pass around
doc_path = @current_repo.url + path
return @github_doc.new_doc(doc_path)
end
def update_repo_meta(get_readme = false)
if get_readme
readme_content = repo_readme_content
else
readme_content = nil
end
# Grab general meta data that is available on the commits page
# if told to do so
@current_repo.update(
watchers: repo_watchers,
stars: repo_stars,
forks: repo_forks,
open_issues: repo_open_issues,
readme_content: readme_content
)
end
# this can be added to the other scraper
def handle_scrape_limits(opts={})
@repositories = opts[:repositories] || Repository.all
@page_limit = opts[:page_limit] || Float::INFINITY
@user_limit = opts[:user_limit] || Float::INFINITY
end
def traverse_commit_pagination
page_count = 1
loop do
fetch_commit_data
throw :scrape_limit_reached if page_count >= @page_limit
break unless @github_doc.doc.css('.pagination').any?
page_count += 1
next_path = @github_doc.doc.css('.pagination a')[0]['href']
sleep SECONDS_BETWEEN_REQUESTS
break unless @github_doc.new_doc(@BASE_URL + next_path)
end
end
def fetch_commit_data
@github_doc.doc.css('.commit').each do |commit_info|
commit_date = Time.parse(commit_info.css('relative-time')[0][:datetime])
throw :recent_commits_finished unless commit_date.to_date >= last_years_time # for today: commit_date.today?
# Not all avatars are users
user_anchor = commit_info.css('.commit-avatar-cell a')[0]
github_username = user_anchor['href'][1..-1] if user_anchor
if !github_username.nil? && !User.exists?(github_username: github_username)
user = User.create(github_username: github_username)
puts "User CREATE github_username:#{user.github_username}"
elsif !github_username.nil?
user = User.find_by(github_username: github_username)
end
if user
message = commit_info.css("a.message").text
github_identifier = commit_info.css("a.sha").text.strip
github_created_at = DateTime.parse(commit_info.css("relative-time").first['datetime'])
unless Commit.exists?(github_identifier: github_identifier)
Commit.create(
message: message,
user: user,
repository: @current_repo,
github_identifier: github_identifier,
github_created_at: github_created_at
)
@commits_created_this_session += 1
puts "Commit CREATE identifier:#{github_identifier} by #{user.github_username}"
puts "Commits cretaed this session: #{@commits_created_this_session}"
puts "Total time so far: #{((Time.now - @start_time) / 60).round(2)}"
end
end
throw :scrape_limit_reached if User.count >= @user_limit
end
end
def last_years_time
DateTime.now - 365
end
def repo_readme_content
# NOTE: Only available on the code subpage of the repo
if @github_doc.doc.at('td span:contains("README")')
raw_file_url = @current_repo.url.gsub('github', 'raw.githubusercontent' +
'/master/README.md')
NokoDoc.new_temp_doc(raw_file_url).css('body p').text
else
nil
end
end
def select_social_count(child=nil)
@github_doc.doc.css("ul.pagehead-actions li:nth-child(#{child}) .social-count")
.text.strip.gsub(',', '').to_i
end
def repo_watchers
select_social_count(1)
end
def repo_stars
select_social_count(2)
end
def repo_forks
select_social_count(3)
end
def repo_open_issues
@github_doc.doc.css("a.reponav-item span:nth-child(2).counter").text.to_i
end
end
end
Fixed problem where we were trying to access relative-time and it was empty
require_relative './noko_doc'
# Scrapes data for Repositories and Users on Github.com
class GithubRepoScraper
@github_doc = NokoDoc.new
@current_repo = nil
SECONDS_BETWEEN_REQUESTS = 0
@BASE_URL = "https://github.com"
@commits_created_this_session = 0
@start_time = Time.now
# TODO: add check so that these methods don't necessarily take and active record
# model, because we don't want to hit the db everytime in the dispatcher
# TODO: we could pass in a shallow repository model and only actually find the model
# if we need to associate a commit, or actually do an update etc.
class << self
# Gets the following:
# - number of stars the project has
# - raw README.md file
#
# Example project's Github url vs raw url
# - Github: https://github.com/rspec/rspec/blob/master/README.md
# - Raw: https://raw.githubusercontent.com/rspec/rspec/master/README.md
def update_repo_data(repos = Repository.all)
repos.each do |repo|
begin
break unless get_repo_doc(repo)
# TODO: add to update_repo_data to get repo name and owner name
# owner, repo_name = @current_repo.url[/\/\w+\/\w+/].split('/)
update_repo_meta(false)
puts "Updated repo #{@current_repo.name}"
rescue OpenURI::HTTPError => e
repo.destroy
puts "DESTROYED #{@current_repo.name} : its Github URL #{@current_repo.url} resulted in #{e.message}"
end
end
end
# Retrieves the open issues and comments for each repository
def issues(scrape_limit_opts={}, get_repo_meta=false)
handle_scrape_limits(scrape_limit_opts)
@repositories.each do |repo|
break unless get_repo_doc(repo, "/issues")
update_repo_meta if get_repo_meta
puts "Scraping issues for #{repo.name}"
issues = [] # cache issues so we can cycle through without hitting the db
loop do
# Get all the issues from page
raw_issues = @github_doc.doc.css("div.issues-listing ul li div.d-table")
raw_issues.each do |raw_issue|
issue = Issue.create( build_issue(raw_issue) )
puts "Creating Issue" if issue.id
issues << issue
end
next_url_anchor = @github_doc.doc.css("a.next_page")
if next_url_anchor.present?
next_url_rel_path = next_url_anchor.attribute("href").value
break unless @github_doc.new_doc(@BASE_URL + next_url_rel_path)
else
break
end
end
# Get all the comments for each issue
issues.each do |issue|
doc_path = @BASE_URL + issue.url
next unless @github_doc.new_doc(doc_path)
raw_comments = @github_doc.doc.css("div.timeline-comment-wrapper")
raw_comments.each do |raw_comment|
comment_json = build_comment(raw_comment)
comment_json['issue_id'] = issue
issue_comment = IssueComment.create(comment_json)
puts "Creating Issue Comment" if issue_comment
end
end
end
end
# Retrieves the commits for each Repository
#
# NOTE: you can use all options together, but whichever one ends first
# will be the one that stops the scraper
#
# Options
# repositories: repos to be scraped for data
# page_limit: maximum number of pages to iterate
# user_limit: max number of users to add
def commits(scrape_limit_opts={}, get_repo_meta=false)
handle_scrape_limits(scrape_limit_opts)
catch :scrape_limit_reached do
@repositories.each do |repo|
break unless get_repo_doc(repo, "/commits")
update_repo_meta if get_repo_meta
puts "Scraping #{repo.name} commits"
catch :recent_commits_finished do
traverse_commit_pagination
end
end
end
end
private
# TODO: we should cache all the users for a repo when a repo is requested, so
# we don't have to hit the DB as often, because I'll have to get the name
# of the user from the comment, search if it exist, then create it.
# If we had them cached I could search those, if not found, create it.
# Basically let's make that query when we get the repo.
def build_comment(raw_comment)
user_name = raw_comment.css("a.author").text
user = User.find_by(github_username: user_name)
unless user
puts "Creating new user: #{user_name}"
user = User.create(github_username: user_name)
end
comment_json = {}
comment_json['user_id'] = user.id
comment_json['github_created_at'] = raw_comment.css("a relative-time").attribute("datetime").value
comment_json['body'] = raw_comment.css("td.comment-body").text
comment_json
end
def build_issue(raw_issue)
issue = {}
issue['repository_id'] = @current_repo.id
issue['name'] = raw_issue.css("a.h4").text.strip
issue['creator'] = raw_issue.css("a.h4")
issue['url'] = raw_issue.css("a.h4").attribute("href").value
issue_number, open_date, creator = raw_issue.css("span.opened-by").text.strip.split("\n")
issue['issue_number'] = issue_number[1..-1].to_i
issue['creator'] = creator.strip
issue['open_date'] = open_date.split(" ")[1..-2].join(" ")
issue
end
def get_repo_doc(repo, path="")
@current_repo = repo
# TODO: consider making a psuedo object to pass around
doc_path = @current_repo.url + path
return @github_doc.new_doc(doc_path)
end
def update_repo_meta(get_readme = false)
if get_readme
readme_content = repo_readme_content
else
readme_content = nil
end
# Grab general meta data that is available on the commits page
# if told to do so
@current_repo.update(
watchers: repo_watchers,
stars: repo_stars,
forks: repo_forks,
open_issues: repo_open_issues,
readme_content: readme_content
)
end
# this can be added to the other scraper
def handle_scrape_limits(opts={})
@repositories = opts[:repositories] || Repository.all
@page_limit = opts[:page_limit] || Float::INFINITY
@user_limit = opts[:user_limit] || Float::INFINITY
end
def traverse_commit_pagination
page_count = 1
loop do
fetch_commit_data
throw :scrape_limit_reached if page_count >= @page_limit
break unless @github_doc.doc.css('.pagination').any?
page_count += 1
next_path = @github_doc.doc.css('.pagination a')[0]['href']
sleep SECONDS_BETWEEN_REQUESTS
break unless @github_doc.new_doc(@BASE_URL + next_path)
end
end
def fetch_commit_data
@github_doc.doc.css('.commit').each do |commit_info|
relative_time = commit_info.css('relative-time')
next if relative_time.empty?
commit_date = Time.parse(relative_time[0][:datetime])
throw :recent_commits_finished unless commit_date.to_date >= last_years_time # for today: commit_date.today?
# Not all avatars are users
user_anchor = commit_info.css('.commit-avatar-cell a')[0]
github_username = user_anchor['href'][1..-1] if user_anchor
if !github_username.nil? && !User.exists?(github_username: github_username)
user = User.create(github_username: github_username)
puts "User CREATE github_username:#{user.github_username}"
elsif !github_username.nil?
user = User.find_by(github_username: github_username)
end
if user
message = commit_info.css("a.message").text
github_identifier = commit_info.css("a.sha").text.strip
github_created_at = DateTime.parse(commit_info.css("relative-time").first['datetime'])
unless Commit.exists?(github_identifier: github_identifier)
Commit.create(
message: message,
user: user,
repository: @current_repo,
github_identifier: github_identifier,
github_created_at: github_created_at
)
@commits_created_this_session += 1
puts "Commit CREATE identifier:#{github_identifier} by #{user.github_username}"
puts "Commits cretaed this session: #{@commits_created_this_session}"
puts "Total time so far: #{((Time.now - @start_time) / 60).round(2)}"
end
end
throw :scrape_limit_reached if User.count >= @user_limit
end
end
def last_years_time
DateTime.now - 365
end
def repo_readme_content
# NOTE: Only available on the code subpage of the repo
if @github_doc.doc.at('td span:contains("README")')
raw_file_url = @current_repo.url.gsub('github', 'raw.githubusercontent' +
'/master/README.md')
NokoDoc.new_temp_doc(raw_file_url).css('body p').text
else
nil
end
end
def select_social_count(child=nil)
@github_doc.doc.css("ul.pagehead-actions li:nth-child(#{child}) .social-count")
.text.strip.gsub(',', '').to_i
end
def repo_watchers
select_social_count(1)
end
def repo_stars
select_social_count(2)
end
def repo_forks
select_social_count(3)
end
def repo_open_issues
@github_doc.doc.css("a.reponav-item span:nth-child(2).counter").text.to_i
end
end
end
|
Test dynamic accessor generation for output context class
require "spec_helper"
describe Yarrow::Output::Context do
let(:context_hash) do
Yarrow::Output::Context.new(number: 99, text: "plain value")
end
class Value
def text
"nested value"
end
end
let(:object_hash) do
Yarrow::Output::Context.new(value: Value.new)
end
it 'generates dynamic accessors for hash values on initialization' do
expect(context_hash.number).to eq(99)
expect(context_hash.text).to eq("plain value")
end
it 'generates dynamic accessors for value objects on initialization' do
expect(object_hash.value.text).to eq("nested value")
end
end
|
module Searchkick
class RelationIndexer
attr_reader :index
def initialize(index)
@index = index
end
def reindex(relation, mode:, method_name: nil, full: false, resume: false, scope: nil)
# apply scopes
if scope
relation = relation.send(scope)
elsif relation.respond_to?(:search_import)
relation = relation.search_import
end
# remove unneeded loading for async
if mode == :async
if relation.respond_to?(:primary_key)
relation = relation.select(relation.primary_key).except(:includes, :preload)
elsif relation.respond_to?(:only)
relation = relation.only(:_id)
end
end
if mode == :async && full
return full_reindex_async(relation)
end
relation = resume_relation(relation) if resume
reindex_options = {
mode: mode,
method_name: method_name,
full: full
}
record_indexer = RecordIndexer.new(index)
in_batches(relation) do |items|
record_indexer.reindex(items, **reindex_options)
end
end
def batches_left
Searchkick.with_redis { |r| r.scard(batches_key) }
end
def batch_completed(batch_id)
Searchkick.with_redis { |r| r.srem(batches_key, batch_id) }
end
private
def resume_relation(relation)
if relation.respond_to?(:primary_key)
# use total docs instead of max id since there's not a great way
# to get the max _id without scripting since it's a string
where = relation.arel_table[relation.primary_key].gt(index.total_docs)
relation = relation.where(where)
else
raise Error, "Resume not supported for Mongoid"
end
end
def full_reindex_async(relation)
batch_id = 1
class_name = relation.searchkick_options[:class_name]
in_batches(relation) do |items|
batch_job(class_name, batch_id, items.map(&:id))
batch_id += 1
end
end
def in_batches(relation, &block)
if relation.respond_to?(:find_in_batches)
# remove order to prevent possible warnings
relation.except(:order).find_in_batches(batch_size: batch_size, &block)
else
each_batch(relation, batch_size: batch_size, &block)
end
end
def each_batch(relation, batch_size:)
# https://github.com/karmi/tire/blob/master/lib/tire/model/import.rb
# use cursor for Mongoid
items = []
relation.all.each do |item|
items << item
if items.length == batch_size
yield items
items = []
end
end
yield items if items.any?
end
def batch_job(class_name, batch_id, record_ids)
Searchkick.with_redis { |r| r.sadd(batches_key, batch_id) }
Searchkick::BulkReindexJob.perform_later(
class_name: class_name,
index_name: index.name,
batch_id: batch_id,
record_ids: record_ids.map { |v| v.instance_of?(Integer) ? v : v.to_s }
)
end
def batches_key
"searchkick:reindex:#{index.name}:batches"
end
def batch_size
@batch_size ||= index.options[:batch_size] || 1000
end
end
end
Reordered methods [skip ci]
module Searchkick
class RelationIndexer
attr_reader :index
def initialize(index)
@index = index
end
def reindex(relation, mode:, method_name: nil, full: false, resume: false, scope: nil)
# apply scopes
if scope
relation = relation.send(scope)
elsif relation.respond_to?(:search_import)
relation = relation.search_import
end
# remove unneeded loading for async
if mode == :async
if relation.respond_to?(:primary_key)
relation = relation.select(relation.primary_key).except(:includes, :preload)
elsif relation.respond_to?(:only)
relation = relation.only(:_id)
end
end
if mode == :async && full
return full_reindex_async(relation)
end
relation = resume_relation(relation) if resume
reindex_options = {
mode: mode,
method_name: method_name,
full: full
}
record_indexer = RecordIndexer.new(index)
in_batches(relation) do |items|
record_indexer.reindex(items, **reindex_options)
end
end
def batches_left
Searchkick.with_redis { |r| r.scard(batches_key) }
end
def batch_completed(batch_id)
Searchkick.with_redis { |r| r.srem(batches_key, batch_id) }
end
private
def resume_relation(relation)
if relation.respond_to?(:primary_key)
# use total docs instead of max id since there's not a great way
# to get the max _id without scripting since it's a string
where = relation.arel_table[relation.primary_key].gt(index.total_docs)
relation = relation.where(where)
else
raise Error, "Resume not supported for Mongoid"
end
end
def in_batches(relation, &block)
if relation.respond_to?(:find_in_batches)
# remove order to prevent possible warnings
relation.except(:order).find_in_batches(batch_size: batch_size, &block)
else
each_batch(relation, batch_size: batch_size, &block)
end
end
def each_batch(relation, batch_size:)
# https://github.com/karmi/tire/blob/master/lib/tire/model/import.rb
# use cursor for Mongoid
items = []
relation.all.each do |item|
items << item
if items.length == batch_size
yield items
items = []
end
end
yield items if items.any?
end
def batch_size
@batch_size ||= index.options[:batch_size] || 1000
end
def full_reindex_async(relation)
batch_id = 1
class_name = relation.searchkick_options[:class_name]
in_batches(relation) do |items|
batch_job(class_name, batch_id, items.map(&:id))
batch_id += 1
end
end
def batch_job(class_name, batch_id, record_ids)
Searchkick.with_redis { |r| r.sadd(batches_key, batch_id) }
Searchkick::BulkReindexJob.perform_later(
class_name: class_name,
index_name: index.name,
batch_id: batch_id,
record_ids: record_ids.map { |v| v.instance_of?(Integer) ? v : v.to_s }
)
end
def batches_key
"searchkick:reindex:#{index.name}:batches"
end
end
end
|
require 'rack/app_version/rake_task'
module Rack
class AppVersion
def self.generate_version
'10000'
end
end
end
RSpec.describe 'rake tasks' do
context 'tasks loaded' do
before(:all) { Rack::AppVersion.load_tasks }
def silence_output(&block)
expect(&block).to output(anything).to_stdout
end
it 'rakes app_version:init' do
expect(FileUtils).to receive(:touch).with('.app_version').once
silence_output { Rake::Task['app_version:init'].invoke }
end
it 'rakes app_version:generate' do
silence_output { Rake::Task['app_version:init'].invoke }
expect(IO)
.to receive(:write)
.with(Rack::AppVersion::APP_VERSION_PATH, '10000').once
silence_output { Rake::Task['app_version:generate'].invoke }
end
end
context 'tasks not loaded' do
before(:all) { Rake::Task.clear }
it 'raises error on "rake app_version:init"' do
expect {
Rake::Task['app_version:init'].invoke
}.to raise_error(RuntimeError, "Don't know how to build task 'app_version:init' (see --tasks)")
end
it 'raises error on "rake app_version:generate"' do
expect {
Rake::Task['app_version:generate'].invoke
}.to raise_error(RuntimeError, "Don't know how to build task 'app_version:generate' (see --tasks)")
end
end
end
[chore] Just check if it raise RuntimeError
require 'rack/app_version/rake_task'
module Rack
class AppVersion
def self.generate_version
'10000'
end
end
end
RSpec.describe 'rake tasks' do
context 'tasks loaded' do
before(:all) { Rack::AppVersion.load_tasks }
def silence_output(&block)
expect(&block).to output(anything).to_stdout
end
it 'rakes app_version:init' do
expect(FileUtils).to receive(:touch).with('.app_version').once
silence_output { Rake::Task['app_version:init'].invoke }
end
it 'rakes app_version:generate' do
silence_output { Rake::Task['app_version:init'].invoke }
expect(IO)
.to receive(:write)
.with(Rack::AppVersion::APP_VERSION_PATH, '10000').once
silence_output { Rake::Task['app_version:generate'].invoke }
end
end
context 'tasks not loaded' do
before(:all) { Rake::Task.clear }
it 'raises error on "rake app_version:init"' do
expect {
Rake::Task['app_version:init'].invoke
}.to raise_error(RuntimeError)
end
it 'raises error on "rake app_version:generate"' do
expect {
Rake::Task['app_version:generate'].invoke
}.to raise_error(RuntimeError)
end
end
end
|
# encoding: UTF-8
#
# Copyright 2014, Deutsche Telekom AG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe 'ssh-hardening::server' do
# converge
cached(:chef_run) do
ChefSpec::ServerRunner.new.converge(described_recipe)
end
it 'installs openssh-server' do
expect(chef_run).to install_package('openssh-server')
end
it 'creates /etc/ssh/sshd_config' do
expect(chef_run).to create_template('/etc/ssh/sshd_config')
.with(mode: '0600')
.with(owner: 'root')
.with(group: 'root')
end
it 'enables the ssh server' do
expect(chef_run).to enable_service('sshd')
end
it 'starts the server' do
expect(chef_run).to start_service('sshd')
end
it 'creates the directory /etc/ssh' do
expect(chef_run).to create_directory('/etc/ssh')
.with(mode: '0755')
.with(owner: 'root')
.with(group: 'root')
end
it 'disables weak hmacs' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/MACs [^#]*\bhmac-sha1\b/)
end
it 'disables weak kexs' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/KexAlgorithms [^#]*\bdiffie-hellman-group1-sha1\b/)
end
it 'disables cbc ciphers' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/Ciphers [^#]*-cbc\b/)
end
it 'enables ctr ciphers' do
expect(chef_run).to render_file('/etc/ssh/sshd_config')
.with_content(/Ciphers [^#]*\baes128-ctr\b/)
.with_content(/Ciphers [^#]*\baes192-ctr\b/)
.with_content(/Ciphers [^#]*\baes256-ctr\b/)
end
context 'with weak hmacs enabled' do
cached(:chef_run) do
ChefSpec::ServerRunner.new do |node|
node.set['ssh']['weak_hmac'] = true
end.converge(described_recipe)
end
it 'allows weak hmacs' do
expect(chef_run).to render_file('/etc/ssh/sshd_config')
.with_content(/MACs [^#]*\bhmac-sha1\b/)
end
it 'still does not allow weak kexs' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/KexAlgorithms [^#]*\bdiffie-hellman-group1-sha1\b/)
end
it 'still doss not allow cbc ciphers' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/Ciphers [^#]*-cbc\b/)
end
end
context 'with weak kexs enabled' do
cached(:chef_run) do
ChefSpec::ServerRunner.new do |node|
node.set['ssh']['weak_kex'] = true
end.converge(described_recipe)
end
it 'allows weak kexs' do
expect(chef_run).to render_file('/etc/ssh/sshd_config')
.with_content(/KexAlgorithms [^#]*\bdiffie-hellman-group1-sha1\b/)
end
it 'still does not allow weak macs' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/MACs [^#]*\bhmac-sha1\b/)
end
it 'still does not allow cbc ciphers' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/Ciphers [^#]*-cbc\b/)
end
end
context 'with cbc required' do
cached(:chef_run) do
ChefSpec::ServerRunner.new do |node|
node.set['ssh']['cbc_required'] = true
end.converge(described_recipe)
end
it 'allows cbc ciphers' do
expect(chef_run).to render_file('/etc/ssh/sshd_config')
.with_content(/Ciphers [^#]*\baes256-cbc\b/)
.with_content(/Ciphers [^#]*\baes192-cbc\b/)
.with_content(/Ciphers [^#]*\baes128-cbc\b/)
end
it 'still does not allow weak macs' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/MACs [^#]*\bhmac-sha1\b/)
end
it 'still does not allow weak kexs' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/KexAlgorithms [^#]*\bdiffie-hellman-group1-sha1\b/)
end
it 'still enables ctr ciphers' do
expect(chef_run).to render_file('/etc/ssh/sshd_config')
.with_content(/Ciphers [^#]*\baes128-ctr\b/)
.with_content(/Ciphers [^#]*\baes192-ctr\b/)
.with_content(/Ciphers [^#]*\baes256-ctr\b/)
end
end
it 'restarts the ssh server on config changes' do
resource = chef_run.template('/etc/ssh/sshd_config')
expect(resource).to notify('service[sshd]').to(:restart).delayed
end
it 'creates .ssh directory for user root' do
expect(chef_run).to create_directory('/root/.ssh')
.with(mode: '0500')
.with(owner: 'root')
.with(group: 'root')
end
context 'without attribute allow_root_with_key' do
it 'does not unlock root account' do
expect(chef_run).to_not run_execute('unlock root account if it is locked')
end
end
context 'with attribute allow_root_with_key' do
cached(:chef_run) do
ChefSpec::ServerRunner.new do |node|
node.set['ssh']['allow_root_with_key'] = true
end.converge(described_recipe)
end
it 'unlocks root account' do
expect(chef_run).to run_execute('unlock root account if it is locked')
.with(command: "sed 's/^root:\!/root:*/' /etc/shadow -i")
end
end
context 'with users data bag' do
cached(:chef_run) do
ChefSpec::ServerRunner.new do |_node, server|
server.create_data_bag(
'users',
'user1' => { id: 'user1', ssh_rootkey: 'key-user1' },
'user2' => { id: 'user2', ssh_rootkey: 'key-user2' },
'user3' => { id: 'user3', ssh_rootkeys: %w(key1-user3 key2-user3) },
'user4' => { id: 'user4', ssh_rootkeys: %w(key1-user4) }
)
end.converge(described_recipe)
end
it 'creates authorized_keys for root' do
expect(chef_run).to create_template('/root/.ssh/authorized_keys')
.with(mode: '0400')
.with(owner: 'root')
.with(group: 'root')
end
it 'authorizes files from the user data bag for root access' do
expect(chef_run).to render_file('/root/.ssh/authorized_keys')
.with_content(/^key-user1$/)
.with_content(/^key-user2$/)
.with_content(/^key1-user3$/)
.with_content(/^key2-user3$/)
.with_content(/^key1-user4$/)
end
end
context 'without users data bag' do
cached(:chef_run) do
ChefSpec::ServerRunner.new.converge(described_recipe)
end
it 'does not raise an error' do
expect { chef_run }.not_to raise_error
end
it 'does not touch authorized_keys by root' do
expect(chef_run).to_not create_template('/root/.ssh/authorized_keys')
end
end
end
reword misleading test description
# encoding: UTF-8
#
# Copyright 2014, Deutsche Telekom AG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe 'ssh-hardening::server' do
# converge
cached(:chef_run) do
ChefSpec::ServerRunner.new.converge(described_recipe)
end
it 'installs openssh-server' do
expect(chef_run).to install_package('openssh-server')
end
it 'creates /etc/ssh/sshd_config' do
expect(chef_run).to create_template('/etc/ssh/sshd_config')
.with(mode: '0600')
.with(owner: 'root')
.with(group: 'root')
end
it 'enables the ssh server' do
expect(chef_run).to enable_service('sshd')
end
it 'starts the server' do
expect(chef_run).to start_service('sshd')
end
it 'creates the directory /etc/ssh' do
expect(chef_run).to create_directory('/etc/ssh')
.with(mode: '0755')
.with(owner: 'root')
.with(group: 'root')
end
it 'disables weak hmacs' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/MACs [^#]*\bhmac-sha1\b/)
end
it 'disables weak kexs' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/KexAlgorithms [^#]*\bdiffie-hellman-group1-sha1\b/)
end
it 'disables cbc ciphers' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/Ciphers [^#]*-cbc\b/)
end
it 'enables ctr ciphers' do
expect(chef_run).to render_file('/etc/ssh/sshd_config')
.with_content(/Ciphers [^#]*\baes128-ctr\b/)
.with_content(/Ciphers [^#]*\baes192-ctr\b/)
.with_content(/Ciphers [^#]*\baes256-ctr\b/)
end
context 'with weak hmacs enabled' do
cached(:chef_run) do
ChefSpec::ServerRunner.new do |node|
node.set['ssh']['weak_hmac'] = true
end.converge(described_recipe)
end
it 'allows weak hmacs' do
expect(chef_run).to render_file('/etc/ssh/sshd_config')
.with_content(/MACs [^#]*\bhmac-sha1\b/)
end
it 'still does not allow weak kexs' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/KexAlgorithms [^#]*\bdiffie-hellman-group1-sha1\b/)
end
it 'still doss not allow cbc ciphers' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/Ciphers [^#]*-cbc\b/)
end
end
context 'with weak kexs enabled' do
cached(:chef_run) do
ChefSpec::ServerRunner.new do |node|
node.set['ssh']['weak_kex'] = true
end.converge(described_recipe)
end
it 'allows weak kexs' do
expect(chef_run).to render_file('/etc/ssh/sshd_config')
.with_content(/KexAlgorithms [^#]*\bdiffie-hellman-group1-sha1\b/)
end
it 'still does not allow weak macs' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/MACs [^#]*\bhmac-sha1\b/)
end
it 'still does not allow cbc ciphers' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/Ciphers [^#]*-cbc\b/)
end
end
context 'with cbc required' do
cached(:chef_run) do
ChefSpec::ServerRunner.new do |node|
node.set['ssh']['cbc_required'] = true
end.converge(described_recipe)
end
it 'allows cbc ciphers' do
expect(chef_run).to render_file('/etc/ssh/sshd_config')
.with_content(/Ciphers [^#]*\baes256-cbc\b/)
.with_content(/Ciphers [^#]*\baes192-cbc\b/)
.with_content(/Ciphers [^#]*\baes128-cbc\b/)
end
it 'still does not allow weak macs' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/MACs [^#]*\bhmac-sha1\b/)
end
it 'still does not allow weak kexs' do
expect(chef_run).not_to render_file('/etc/ssh/sshd_config')
.with_content(/KexAlgorithms [^#]*\bdiffie-hellman-group1-sha1\b/)
end
it 'still enables ctr ciphers' do
expect(chef_run).to render_file('/etc/ssh/sshd_config')
.with_content(/Ciphers [^#]*\baes128-ctr\b/)
.with_content(/Ciphers [^#]*\baes192-ctr\b/)
.with_content(/Ciphers [^#]*\baes256-ctr\b/)
end
end
it 'restarts the ssh server on config changes' do
resource = chef_run.template('/etc/ssh/sshd_config')
expect(resource).to notify('service[sshd]').to(:restart).delayed
end
it 'creates .ssh directory for user root' do
expect(chef_run).to create_directory('/root/.ssh')
.with(mode: '0500')
.with(owner: 'root')
.with(group: 'root')
end
context 'without attribute allow_root_with_key' do
it 'does not unlock root account' do
expect(chef_run).to_not run_execute('unlock root account if it is locked')
end
end
context 'with attribute allow_root_with_key' do
cached(:chef_run) do
ChefSpec::ServerRunner.new do |node|
node.set['ssh']['allow_root_with_key'] = true
end.converge(described_recipe)
end
it 'unlocks root account' do
expect(chef_run).to run_execute('unlock root account if it is locked')
.with(command: "sed 's/^root:\!/root:*/' /etc/shadow -i")
end
end
context 'with users data bag' do
cached(:chef_run) do
ChefSpec::ServerRunner.new do |_node, server|
server.create_data_bag(
'users',
'user1' => { id: 'user1', ssh_rootkey: 'key-user1' },
'user2' => { id: 'user2', ssh_rootkey: 'key-user2' },
'user3' => { id: 'user3', ssh_rootkeys: %w(key1-user3 key2-user3) },
'user4' => { id: 'user4', ssh_rootkeys: %w(key1-user4) }
)
end.converge(described_recipe)
end
it 'creates authorized_keys for root' do
expect(chef_run).to create_template('/root/.ssh/authorized_keys')
.with(mode: '0400')
.with(owner: 'root')
.with(group: 'root')
end
it 'authorizes keys from the user data bag for root access' do
expect(chef_run).to render_file('/root/.ssh/authorized_keys')
.with_content(/^key-user1$/)
.with_content(/^key-user2$/)
.with_content(/^key1-user3$/)
.with_content(/^key2-user3$/)
.with_content(/^key1-user4$/)
end
end
context 'without users data bag' do
cached(:chef_run) do
ChefSpec::ServerRunner.new.converge(described_recipe)
end
it 'does not raise an error' do
expect { chef_run }.not_to raise_error
end
it 'does not touch authorized_keys by root' do
expect(chef_run).to_not create_template('/root/.ssh/authorized_keys')
end
end
end
|
require 'spec_helper'
describe Reel::WebSocket do
include WebSocketHelpers
let(:example_message) { "Hello, World!" }
let(:another_message) { "What's going on?" }
it "performs websocket handshakes" do
with_socket_pair do |client, connection|
client << handshake.to_data
websocket = connection.request
websocket.should be_a Reel::WebSocket
handshake.errors.should be_empty
end
end
it "knows its URL" do
with_websocket_pair do |_, websocket|
websocket.url.should == example_path
end
end
it "knows its headers" do
with_websocket_pair do |_, websocket|
websocket['Host'].should == example_host
end
end
it "reads frames" do
with_websocket_pair do |client, websocket|
client << WebSocket::Message.new(example_message).to_data
client << WebSocket::Message.new(another_message).to_data
websocket.read.should == example_message
websocket.read.should == another_message
end
end
it "writes messages" do
with_websocket_pair do |client, websocket|
websocket.write example_message
websocket.write another_message
parser = WebSocket::Parser.new
parser.append client.readpartial(4096) until first_message = parser.next_message
first_message.should == example_message
parser.append client.readpartial(4096) until next_message = parser.next_message
next_message.should == another_message
end
end
it "closes" do
with_websocket_pair do |_, websocket|
websocket.should_not be_closed
websocket.close
websocket.should be_closed
end
end
it "raises a RequestError when connection used after it was upgraded" do
with_socket_pair do |client, connection|
client << handshake.to_data
remote_host = "localhost"
connection.remote_host.should == remote_host
websocket = connection.request
websocket.should be_a Reel::WebSocket
lambda { connection.remote_host }.should raise_error(Reel::RequestError)
websocket.remote_host.should == remote_host
end
end
def with_websocket_pair
with_socket_pair do |client, connection|
client << handshake.to_data
websocket = connection.request
websocket.should be_a Reel::WebSocket
# Discard handshake
client.readpartial(4096)
yield client, websocket
end
end
end
fixed tests
require 'spec_helper'
describe Reel::WebSocket do
include WebSocketHelpers
let(:example_message) { "Hello, World!" }
let(:another_message) { "What's going on?" }
it "performs websocket handshakes" do
with_socket_pair do |client, connection|
client << handshake.to_data
websocket = connection.request
websocket.should be_a Reel::WebSocket
handshake.errors.should be_empty
end
end
it "knows its URL" do
with_websocket_pair do |_, websocket|
websocket.url.should == example_path
end
end
it "knows its headers" do
with_websocket_pair do |_, websocket|
websocket['Host'].should == example_host
end
end
it "reads frames" do
with_websocket_pair do |client, websocket|
client << WebSocket::Message.new(example_message).to_data
client << WebSocket::Message.new(another_message).to_data
websocket.read.should == example_message
websocket.read.should == another_message
end
end
it "writes messages" do
with_websocket_pair do |client, websocket|
websocket.write example_message
websocket.write another_message
parser = WebSocket::Parser.new
parser.append client.readpartial(4096) until first_message = parser.next_message
first_message.should == example_message
parser.append client.readpartial(4096) until next_message = parser.next_message
next_message.should == another_message
end
end
it "closes" do
with_websocket_pair do |_, websocket|
websocket.should_not be_closed
websocket.close
websocket.should be_closed
end
end
it "raises a RequestError when connection used after it was upgraded" do
with_socket_pair do |client, connection|
client << handshake.to_data
remote_host = "localhost"
connection.remote_host.should =~ remote_host
websocket = connection.request
websocket.should be_a Reel::WebSocket
lambda { connection.remote_host }.should raise_error(Reel::RequestError)
websocket.remote_host.should =~ remote_host
end
end
def with_websocket_pair
with_socket_pair do |client, connection|
client << handshake.to_data
websocket = connection.request
websocket.should be_a Reel::WebSocket
# Discard handshake
client.readpartial(4096)
yield client, websocket
end
end
end
|
require 'spec_helper'
describe RSpec::Testrail do
include Helpers
let(:options) do
Hash(
url: 'http://test.site',
user: 'test@test.site',
password: '12345678',
project_id: 111,
suite_id: 222,
run_name: `git rev-parse --abbrev-ref HEAD`.strip,
run_description: `git rev-parse HEAD`.strip
)
end
before(:each) do
RSpec::Testrail.reset
RSpec::Testrail.init(options)
end
it 'has a version number' do
expect(RSpec::Testrail::VERSION).not_to be nil
end
describe '.init' do
it 'sets options' do
expect(RSpec::Testrail.options).to eq(options)
end
end
describe '.client' do
it 'returns example of Client' do
expect(RSpec::Testrail.client).to be_a_kind_of(RSpec::Testrail::Client)
end
end
describe '.process', testrail_id: 123 do
before(:each) { WebMock.reset! }
it 'gets all runs' do |example|
stub_get_runs
stub_update_run
stub_add_result_for_case example.metadata[:testrail_id]
RSpec::Testrail.process(example)
expect(WebMock).to have_requested(:get, "#{options[:url]}/index.php?/api/v2/get_runs/\
#{options[:project_id]}").with(basic_auth: [options[:user], options[:password]])
end
it 'gets run with specified name' do |example|
stub_get_runs
stub_update_run
stub_add_result_for_case example.metadata[:testrail_id]
RSpec::Testrail.process(example)
expect(WebMock).to have_requested(:post, "#{options[:url]}/index.php?/api/v2/update_run/\
#{site_runs[0][:id]}")
.with(basic_auth: [options[:user], options[:password]],
body: { description: options[:run_description] }.to_json)
end
it 'posts new run with specified name if none found'
end
end
Refactor tests
require 'spec_helper'
describe RSpec::Testrail do
include Helpers
let(:options) do
Hash(
url: 'http://test.site',
user: 'test@test.site',
password: '12345678',
project_id: 111,
suite_id: 222,
run_name: `git rev-parse --abbrev-ref HEAD`.strip,
run_description: `git rev-parse HEAD`.strip
)
end
before(:each) do
RSpec::Testrail.reset
RSpec::Testrail.init(options)
end
it 'has a version number' do
expect(RSpec::Testrail::VERSION).not_to be nil
end
describe '.init' do
it 'sets options' do
expect(RSpec::Testrail.options).to eq(options)
end
end
describe '.client' do
it 'returns example of Client' do
expect(RSpec::Testrail.client).to be_a_kind_of(RSpec::Testrail::Client)
end
end
describe '.process', testrail_id: 123 do
before(:each) do |example|
stub_get_runs
stub_update_run
stub_add_result_for_case example.metadata[:testrail_id]
RSpec::Testrail.process(example)
end
it 'gets all runs' do
expect(WebMock).to have_requested(:get, "#{options[:url]}/index.php?/api/v2/get_runs/\
#{options[:project_id]}").with(basic_auth: [options[:user], options[:password]])
end
it 'gets run with specified name' do
expect(WebMock).to have_requested(:post, "#{options[:url]}/index.php?/api/v2/update_run/\
#{site_runs[0][:id]}")
.with(basic_auth: [options[:user], options[:password]],
body: { description: options[:run_description] }.to_json)
end
it 'posts new run with specified name if none found'
end
end
|
Add test for RSpec matcher
Our custom matcher doesn't have a test. This commit fixes that so we
can confidently work on it.
require 'spec_helper'
require 'govuk-content-schema-test-helpers/rspec_matchers'
describe GovukContentSchemaTestHelpers::RSpecMatchers do
include GovukContentSchemaTestHelpers::RSpecMatchers
before do
ENV['GOVUK_CONTENT_SCHEMAS_PATH'] = fixture_path
GovukContentSchemaTestHelpers.configure do |c|
c.schema_type = 'publisher'
end
end
describe "#be_valid_against_schema" do
it "correctly tests valid schemas" do
expect(
format: "minidisc"
).to be_valid_against_schema('minidisc')
end
it "fails for invalid schemas" do
expect(
not_format: "not minidisc"
).to_not be_valid_against_schema('minidisc')
end
end
end
|
shared_examples_for 'a backend' do
def create_job(opts = {})
@backend.create(opts.merge(:payload_object => SimpleJob.new))
end
before do
SimpleJob.runs = 0
end
it "should set run_at automatically if not set" do
@backend.create(:payload_object => ErrorJob.new ).run_at.should_not be_nil
end
it "should not set run_at automatically if already set" do
later = @backend.db_time_now + 5.minutes
@backend.create(:payload_object => ErrorJob.new, :run_at => later).run_at.should be_close(later, 1)
end
it "should raise ArgumentError when handler doesn't respond_to :perform" do
lambda { @backend.enqueue(Object.new) }.should raise_error(ArgumentError)
end
it "should increase count after enqueuing items" do
@backend.enqueue SimpleJob.new
@backend.count.should == 1
end
it "should be able to set priority when enqueuing items" do
@job = @backend.enqueue SimpleJob.new, 5
@job.priority.should == 5
end
it "should be able to set run_at when enqueuing items" do
later = @backend.db_time_now + 5.minutes
@job = @backend.enqueue SimpleJob.new, 5, later
@job.run_at.should be_close(later, 1)
end
it "should work with jobs in modules" do
M::ModuleJob.runs = 0
job = @backend.enqueue M::ModuleJob.new
lambda { job.invoke_job }.should change { M::ModuleJob.runs }.from(0).to(1)
end
it "should raise an DeserializationError when the job class is totally unknown" do
job = @backend.new :handler => "--- !ruby/object:JobThatDoesNotExist {}"
lambda { job.payload_object.perform }.should raise_error(Delayed::Backend::DeserializationError)
end
it "should try to load the class when it is unknown at the time of the deserialization" do
job = @backend.new :handler => "--- !ruby/object:JobThatDoesNotExist {}"
job.should_receive(:attempt_to_load).with('JobThatDoesNotExist').and_return(true)
lambda { job.payload_object.perform }.should raise_error(Delayed::Backend::DeserializationError)
end
it "should try include the namespace when loading unknown objects" do
job = @backend.new :handler => "--- !ruby/object:Delayed::JobThatDoesNotExist {}"
job.should_receive(:attempt_to_load).with('Delayed::JobThatDoesNotExist').and_return(true)
lambda { job.payload_object.perform }.should raise_error(Delayed::Backend::DeserializationError)
end
it "should also try to load structs when they are unknown (raises TypeError)" do
job = @backend.new :handler => "--- !ruby/struct:JobThatDoesNotExist {}"
job.should_receive(:attempt_to_load).with('JobThatDoesNotExist').and_return(true)
lambda { job.payload_object.perform }.should raise_error(Delayed::Backend::DeserializationError)
end
it "should try include the namespace when loading unknown structs" do
job = @backend.new :handler => "--- !ruby/struct:Delayed::JobThatDoesNotExist {}"
job.should_receive(:attempt_to_load).with('Delayed::JobThatDoesNotExist').and_return(true)
lambda { job.payload_object.perform }.should raise_error(Delayed::Backend::DeserializationError)
end
describe "find_available" do
it "should not find failed jobs" do
@job = create_job :attempts => 50, :failed_at => @backend.db_time_now
@backend.find_available('worker', 5, 1.second).should_not include(@job)
end
it "should not find jobs scheduled for the future" do
@job = create_job :run_at => (@backend.db_time_now + 1.minute)
@backend.find_available('worker', 5, 4.hours).should_not include(@job)
end
it "should not find jobs locked by another worker" do
@job = create_job(:locked_by => 'other_worker', :locked_at => @backend.db_time_now - 1.minute)
@backend.find_available('worker', 5, 4.hours).should_not include(@job)
end
it "should find open jobs" do
@job = create_job
@backend.find_available('worker', 5, 4.hours).should include(@job)
end
it "should find expired jobs" do
@job = create_job(:locked_by => 'worker', :locked_at => @backend.db_time_now - 2.minutes)
@backend.find_available('worker', 5, 1.minute).should include(@job)
end
it "should find own jobs" do
@job = create_job(:locked_by => 'worker', :locked_at => (@backend.db_time_now - 1.minutes))
@backend.find_available('worker', 5, 4.hours).should include(@job)
end
end
context "when another worker is already performing an task, it" do
before :each do
@job = @backend.create :payload_object => SimpleJob.new, :locked_by => 'worker1', :locked_at => @backend.db_time_now - 5.minutes
end
it "should not allow a second worker to get exclusive access" do
@job.lock_exclusively!(4.hours, 'worker2').should == false
end
it "should allow a second worker to get exclusive access if the timeout has passed" do
@job.lock_exclusively!(1.minute, 'worker2').should == true
end
it "should be able to get access to the task if it was started more then max_age ago" do
@job.locked_at = 5.hours.ago
@job.save
@job.lock_exclusively! 4.hours, 'worker2'
@job.reload
@job.locked_by.should == 'worker2'
@job.locked_at.should > 1.minute.ago
end
it "should not be found by another worker" do
@backend.find_available('worker2', 1, 6.minutes).length.should == 0
end
it "should be found by another worker if the time has expired" do
@backend.find_available('worker2', 1, 4.minutes).length.should == 1
end
it "should be able to get exclusive access again when the worker name is the same" do
@job.lock_exclusively!(5.minutes, 'worker1').should be_true
@job.lock_exclusively!(5.minutes, 'worker1').should be_true
@job.lock_exclusively!(5.minutes, 'worker1').should be_true
end
end
context "when another worker has worked on a task since the job was found to be available, it" do
before :each do
@job = @backend.create :payload_object => SimpleJob.new
@job_copy_for_worker_2 = @backend.find(@job.id)
end
it "should not allow a second worker to get exclusive access if already successfully processed by worker1" do
@job.destroy
@job_copy_for_worker_2.lock_exclusively!(4.hours, 'worker2').should == false
end
it "should not allow a second worker to get exclusive access if failed to be processed by worker1 and run_at time is now in future (due to backing off behaviour)" do
@job.update_attributes(:attempts => 1, :run_at => 1.day.from_now)
@job_copy_for_worker_2.lock_exclusively!(4.hours, 'worker2').should == false
end
end
context "#name" do
it "should be the class name of the job that was enqueued" do
@backend.create(:payload_object => ErrorJob.new ).name.should == 'ErrorJob'
end
it "should be the method that will be called if its a performable method object" do
@job = Story.send_later(:create)
@job.name.should == "Story.create"
end
it "should be the instance method that will be called if its a performable method object" do
@job = Story.create(:text => "...").send_later(:save)
@job.name.should == 'Story#save'
end
end
context "worker prioritization" do
before(:each) do
Delayed::Worker.max_priority = nil
Delayed::Worker.min_priority = nil
end
it "should fetch jobs ordered by priority" do
number_of_jobs = 10
number_of_jobs.times { @backend.enqueue SimpleJob.new, rand(10) }
jobs = @backend.find_available('worker', 10)
ordered = true
jobs[1..-1].each_index{ |i|
if (jobs[i].priority > jobs[i+1].priority)
ordered = false
break
end
}
ordered.should == true
end
end
context "clear_locks!" do
before do
@job = create_job(:locked_by => 'worker', :locked_at => @backend.db_time_now)
end
it "should clear locks for the given worker" do
@backend.clear_locks!('worker')
@backend.find_available('worker2', 5, 1.minute).should include(@job)
end
it "should not clear locks for other workers" do
@backend.clear_locks!('worker1')
@backend.find_available('worker1', 5, 1.minute).should_not include(@job)
end
end
context "unlock" do
before do
@job = create_job(:locked_by => 'worker', :locked_at => @backend.db_time_now)
end
it "should clear locks" do
@job.unlock
@job.locked_by.should be_nil
@job.locked_at.should be_nil
end
end
end
Clean up priority spec
shared_examples_for 'a backend' do
def create_job(opts = {})
@backend.create(opts.merge(:payload_object => SimpleJob.new))
end
before do
SimpleJob.runs = 0
end
it "should set run_at automatically if not set" do
@backend.create(:payload_object => ErrorJob.new ).run_at.should_not be_nil
end
it "should not set run_at automatically if already set" do
later = @backend.db_time_now + 5.minutes
@backend.create(:payload_object => ErrorJob.new, :run_at => later).run_at.should be_close(later, 1)
end
it "should raise ArgumentError when handler doesn't respond_to :perform" do
lambda { @backend.enqueue(Object.new) }.should raise_error(ArgumentError)
end
it "should increase count after enqueuing items" do
@backend.enqueue SimpleJob.new
@backend.count.should == 1
end
it "should be able to set priority when enqueuing items" do
@job = @backend.enqueue SimpleJob.new, 5
@job.priority.should == 5
end
it "should be able to set run_at when enqueuing items" do
later = @backend.db_time_now + 5.minutes
@job = @backend.enqueue SimpleJob.new, 5, later
@job.run_at.should be_close(later, 1)
end
it "should work with jobs in modules" do
M::ModuleJob.runs = 0
job = @backend.enqueue M::ModuleJob.new
lambda { job.invoke_job }.should change { M::ModuleJob.runs }.from(0).to(1)
end
it "should raise an DeserializationError when the job class is totally unknown" do
job = @backend.new :handler => "--- !ruby/object:JobThatDoesNotExist {}"
lambda { job.payload_object.perform }.should raise_error(Delayed::Backend::DeserializationError)
end
it "should try to load the class when it is unknown at the time of the deserialization" do
job = @backend.new :handler => "--- !ruby/object:JobThatDoesNotExist {}"
job.should_receive(:attempt_to_load).with('JobThatDoesNotExist').and_return(true)
lambda { job.payload_object.perform }.should raise_error(Delayed::Backend::DeserializationError)
end
it "should try include the namespace when loading unknown objects" do
job = @backend.new :handler => "--- !ruby/object:Delayed::JobThatDoesNotExist {}"
job.should_receive(:attempt_to_load).with('Delayed::JobThatDoesNotExist').and_return(true)
lambda { job.payload_object.perform }.should raise_error(Delayed::Backend::DeserializationError)
end
it "should also try to load structs when they are unknown (raises TypeError)" do
job = @backend.new :handler => "--- !ruby/struct:JobThatDoesNotExist {}"
job.should_receive(:attempt_to_load).with('JobThatDoesNotExist').and_return(true)
lambda { job.payload_object.perform }.should raise_error(Delayed::Backend::DeserializationError)
end
it "should try include the namespace when loading unknown structs" do
job = @backend.new :handler => "--- !ruby/struct:Delayed::JobThatDoesNotExist {}"
job.should_receive(:attempt_to_load).with('Delayed::JobThatDoesNotExist').and_return(true)
lambda { job.payload_object.perform }.should raise_error(Delayed::Backend::DeserializationError)
end
describe "find_available" do
it "should not find failed jobs" do
@job = create_job :attempts => 50, :failed_at => @backend.db_time_now
@backend.find_available('worker', 5, 1.second).should_not include(@job)
end
it "should not find jobs scheduled for the future" do
@job = create_job :run_at => (@backend.db_time_now + 1.minute)
@backend.find_available('worker', 5, 4.hours).should_not include(@job)
end
it "should not find jobs locked by another worker" do
@job = create_job(:locked_by => 'other_worker', :locked_at => @backend.db_time_now - 1.minute)
@backend.find_available('worker', 5, 4.hours).should_not include(@job)
end
it "should find open jobs" do
@job = create_job
@backend.find_available('worker', 5, 4.hours).should include(@job)
end
it "should find expired jobs" do
@job = create_job(:locked_by => 'worker', :locked_at => @backend.db_time_now - 2.minutes)
@backend.find_available('worker', 5, 1.minute).should include(@job)
end
it "should find own jobs" do
@job = create_job(:locked_by => 'worker', :locked_at => (@backend.db_time_now - 1.minutes))
@backend.find_available('worker', 5, 4.hours).should include(@job)
end
end
context "when another worker is already performing an task, it" do
before :each do
@job = @backend.create :payload_object => SimpleJob.new, :locked_by => 'worker1', :locked_at => @backend.db_time_now - 5.minutes
end
it "should not allow a second worker to get exclusive access" do
@job.lock_exclusively!(4.hours, 'worker2').should == false
end
it "should allow a second worker to get exclusive access if the timeout has passed" do
@job.lock_exclusively!(1.minute, 'worker2').should == true
end
it "should be able to get access to the task if it was started more then max_age ago" do
@job.locked_at = 5.hours.ago
@job.save
@job.lock_exclusively! 4.hours, 'worker2'
@job.reload
@job.locked_by.should == 'worker2'
@job.locked_at.should > 1.minute.ago
end
it "should not be found by another worker" do
@backend.find_available('worker2', 1, 6.minutes).length.should == 0
end
it "should be found by another worker if the time has expired" do
@backend.find_available('worker2', 1, 4.minutes).length.should == 1
end
it "should be able to get exclusive access again when the worker name is the same" do
@job.lock_exclusively!(5.minutes, 'worker1').should be_true
@job.lock_exclusively!(5.minutes, 'worker1').should be_true
@job.lock_exclusively!(5.minutes, 'worker1').should be_true
end
end
context "when another worker has worked on a task since the job was found to be available, it" do
before :each do
@job = @backend.create :payload_object => SimpleJob.new
@job_copy_for_worker_2 = @backend.find(@job.id)
end
it "should not allow a second worker to get exclusive access if already successfully processed by worker1" do
@job.destroy
@job_copy_for_worker_2.lock_exclusively!(4.hours, 'worker2').should == false
end
it "should not allow a second worker to get exclusive access if failed to be processed by worker1 and run_at time is now in future (due to backing off behaviour)" do
@job.update_attributes(:attempts => 1, :run_at => 1.day.from_now)
@job_copy_for_worker_2.lock_exclusively!(4.hours, 'worker2').should == false
end
end
context "#name" do
it "should be the class name of the job that was enqueued" do
@backend.create(:payload_object => ErrorJob.new ).name.should == 'ErrorJob'
end
it "should be the method that will be called if its a performable method object" do
@job = Story.send_later(:create)
@job.name.should == "Story.create"
end
it "should be the instance method that will be called if its a performable method object" do
@job = Story.create(:text => "...").send_later(:save)
@job.name.should == 'Story#save'
end
end
context "worker prioritization" do
before(:each) do
Delayed::Worker.max_priority = nil
Delayed::Worker.min_priority = nil
end
it "should fetch jobs ordered by priority" do
10.times { @backend.enqueue SimpleJob.new, rand(10) }
jobs = @backend.find_available('worker', 10)
jobs.size.should == 10
jobs.each_cons(2) do |a, b|
a.priority.should <= b.priority
end
end
end
context "clear_locks!" do
before do
@job = create_job(:locked_by => 'worker', :locked_at => @backend.db_time_now)
end
it "should clear locks for the given worker" do
@backend.clear_locks!('worker')
@backend.find_available('worker2', 5, 1.minute).should include(@job)
end
it "should not clear locks for other workers" do
@backend.clear_locks!('worker1')
@backend.find_available('worker1', 5, 1.minute).should_not include(@job)
end
end
context "unlock" do
before do
@job = create_job(:locked_by => 'worker', :locked_at => @backend.db_time_now)
end
it "should clear locks" do
@job.unlock
@job.locked_by.should be_nil
@job.locked_at.should be_nil
end
end
end |
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe "Sinatra::Warden" do
include Warden::Test::Helpers
before(:each) do
@user = User.create(:email => 'justin.smestad@gmail.com', :password => 'thedude')
end
after{ Warden.test_reset! }
def registered_user
User.first(:email => 'justin.smestad@gmail.com')
end
it "should be a valid user" do
@user.new?.should be_falsey
end
it "should create successfully" do
@user.password.should == "thedude"
User.authenticate('justin.smestad@gmail.com', 'thedude').should == @user
end
context "the authentication system" do
it "should allow us to login as that user" do
post '/login', 'email' => 'justin.smestad@gmail.com', 'password' => 'thedude'
last_request.env['warden'].authenticated?.should == true
end
it "should allow us to logout after logging in" do
post '/login', 'email' => 'justin.smestad@gmail.com', 'password' => 'thedude'
last_request.env['warden'].authenticated?.should == true
get '/logout'
last_request.env['warden'].authenticated?.should == false
end
context "auth_use_referrer is disabled" do
it "should not store :return_to" do
get '/dashboard'
follow_redirect!
last_request.session[:return_to].should be_nil
end
it "should redirect to a default success URL" do
get '/dashboard'
follow_redirect!
post '/login', 'email' => 'justin.smestad@gmail.com', 'password' => 'thedude'
follow_redirect!
last_request.path.should == '/welcome'
end
end
context "when auth_use_referrer is set to true" do
def app; app_with_referrer; end
it "should store referrer in user's session" do
get '/dashboard'
last_request.session[:return_to].should == "/dashboard"
end
it "should redirect to stored return_to URL" do
get '/dashboard'
last_request.session[:return_to].should == '/dashboard'
login_as registered_user
last_request.path.should == '/dashboard'
end
it "should remove :return_to from session" do
get '/dashboard'
follow_redirect!
post '/login', 'email' => 'justin.smestad@gmail.com', 'password' => 'thedude'
follow_redirect!
last_request.session[:return_to].should be_nil
end
it "should default to :auth_success_path if there wasn't a return_to" do
post '/login', 'email' => 'justin.smestad@gmail.com', 'password' => 'thedude'
follow_redirect!
last_request.path.should == '/welcome'
end
end
context "TestingLoginAsRackApp" do
def app; @app ||= TestingLoginAsRackApp; end
# what happens here is you'll eventually get
# "stack too deep" error if the following test fails
it "should not get in a loop" do
post '/login', :email => 'bad', :password => 'password'
last_request.env['warden.options'][:action].should == 'unauthenticated'
end
end
end
context "the helpers" do
context "the authorize! helper" do
it "should redirect to root (default) if not logged in" do
get '/admin'
follow_redirect!
last_request.url.should == 'http://example.org/'
end
it "should redirect to the passed path if available" do
get '/dashboard'
follow_redirect!
last_request.url.should == 'http://example.org/login'
end
it "should allow access if user is logged in" do
login_as registered_user
get '/dashboard'
last_response.body.should == "My Dashboard"
end
end
context "the user helper" do
it "should be aliased to current_user" do
login_as registered_user
get '/admin'
last_response.body.should == "Welcome #{@user.email}"
end
it "should allow assignment of the user (user=)" do
login_as registered_user
get '/dashboard'
last_request.env['warden'].user.should == @user
john = User.create(:email => 'john.doe@hotmail.com', :password => 'secret')
login_as john
get '/dashboard'
last_request.env['warden'].user.should == john
end
it "should return the current logged in user" do
login_as registered_user
get '/account'
last_response.body.should == "#{@user.email}'s account page"
end
end
context "the logged_in/authenticated? helper" do
it "should be aliased as logged_in?" do
login_as registered_user
get '/check_login'
last_response.body.should == "Hello Moto"
end
it "should return false when a user is not authenticated" do
login_as registered_user
get '/logout'
last_request.env['warden'].authenticated?.should be_falsey
get '/check_login'
last_response.body.should == "Get out!"
end
end
context "the warden helper" do
it "returns the environment variables from warden" do
get '/warden'
last_response.body.should_not be_nil
end
end
end
context "Rack::Flash integration" do
it "should return a success message" do
post '/login', 'email' => 'justin.smestad@gmail.com', 'password' => 'thedude'
last_request.env['x-rack.flash'][:success].should == "You have logged in successfully."
end
it "should return an error message" do
post '/login', 'email' => 'bad', 'password' => 'wrong'
last_request.env['x-rack.flash'][:error].should == "Could not log you in."
end
end
context "OAuth support" do
context "when enabled" do
before do
#TestingLogin.set(:auth_use_oauth, true)
#@app = app
end
xit "should redirect to authorize_url" do
get '/login'
follow_redirect!
last_request.url.should == "http://twitter.com/oauth/authorize"
end
xit "should redirect to a custom authorize_url, if set" do
get '/login'
follow_redirect!
last_request.url.should == "http://facebook.com"
end
end
end
end
Use Transpec to create expect-style asserts
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe "Sinatra::Warden" do
include Warden::Test::Helpers
before(:each) do
@user = User.create(:email => 'justin.smestad@gmail.com', :password => 'thedude')
end
after{ Warden.test_reset! }
def registered_user
User.first(:email => 'justin.smestad@gmail.com')
end
it "should be a valid user" do
expect(@user.new?).to be_falsey
end
it "should create successfully" do
expect(@user.password).to eq("thedude")
expect(User.authenticate('justin.smestad@gmail.com', 'thedude')).to eq(@user)
end
context "the authentication system" do
it "should allow us to login as that user" do
post '/login', 'email' => 'justin.smestad@gmail.com', 'password' => 'thedude'
expect(last_request.env['warden'].authenticated?).to eq(true)
end
it "should allow us to logout after logging in" do
post '/login', 'email' => 'justin.smestad@gmail.com', 'password' => 'thedude'
expect(last_request.env['warden'].authenticated?).to eq(true)
get '/logout'
expect(last_request.env['warden'].authenticated?).to eq(false)
end
context "auth_use_referrer is disabled" do
it "should not store :return_to" do
get '/dashboard'
follow_redirect!
expect(last_request.session[:return_to]).to be_nil
end
it "should redirect to a default success URL" do
get '/dashboard'
follow_redirect!
post '/login', 'email' => 'justin.smestad@gmail.com', 'password' => 'thedude'
follow_redirect!
expect(last_request.path).to eq('/welcome')
end
end
context "when auth_use_referrer is set to true" do
def app; app_with_referrer; end
it "should store referrer in user's session" do
get '/dashboard'
expect(last_request.session[:return_to]).to eq("/dashboard")
end
it "should redirect to stored return_to URL" do
get '/dashboard'
expect(last_request.session[:return_to]).to eq('/dashboard')
login_as registered_user
expect(last_request.path).to eq('/dashboard')
end
it "should remove :return_to from session" do
get '/dashboard'
follow_redirect!
post '/login', 'email' => 'justin.smestad@gmail.com', 'password' => 'thedude'
follow_redirect!
expect(last_request.session[:return_to]).to be_nil
end
it "should default to :auth_success_path if there wasn't a return_to" do
post '/login', 'email' => 'justin.smestad@gmail.com', 'password' => 'thedude'
follow_redirect!
expect(last_request.path).to eq('/welcome')
end
end
context "TestingLoginAsRackApp" do
def app; @app ||= TestingLoginAsRackApp; end
# what happens here is you'll eventually get
# "stack too deep" error if the following test fails
it "should not get in a loop" do
post '/login', :email => 'bad', :password => 'password'
expect(last_request.env['warden.options'][:action]).to eq('unauthenticated')
end
end
end
context "the helpers" do
context "the authorize! helper" do
it "should redirect to root (default) if not logged in" do
get '/admin'
follow_redirect!
expect(last_request.url).to eq('http://example.org/')
end
it "should redirect to the passed path if available" do
get '/dashboard'
follow_redirect!
expect(last_request.url).to eq('http://example.org/login')
end
it "should allow access if user is logged in" do
login_as registered_user
get '/dashboard'
expect(last_response.body).to eq("My Dashboard")
end
end
context "the user helper" do
it "should be aliased to current_user" do
login_as registered_user
get '/admin'
expect(last_response.body).to eq("Welcome #{@user.email}")
end
it "should allow assignment of the user (user=)" do
login_as registered_user
get '/dashboard'
expect(last_request.env['warden'].user).to eq(@user)
john = User.create(:email => 'john.doe@hotmail.com', :password => 'secret')
login_as john
get '/dashboard'
expect(last_request.env['warden'].user).to eq(john)
end
it "should return the current logged in user" do
login_as registered_user
get '/account'
expect(last_response.body).to eq("#{@user.email}'s account page")
end
end
context "the logged_in/authenticated? helper" do
it "should be aliased as logged_in?" do
login_as registered_user
get '/check_login'
expect(last_response.body).to eq("Hello Moto")
end
it "should return false when a user is not authenticated" do
login_as registered_user
get '/logout'
expect(last_request.env['warden'].authenticated?).to be_falsey
get '/check_login'
expect(last_response.body).to eq("Get out!")
end
end
context "the warden helper" do
it "returns the environment variables from warden" do
get '/warden'
expect(last_response.body).not_to be_nil
end
end
end
context "Rack::Flash integration" do
it "should return a success message" do
post '/login', 'email' => 'justin.smestad@gmail.com', 'password' => 'thedude'
expect(last_request.env['x-rack.flash'][:success]).to eq("You have logged in successfully.")
end
it "should return an error message" do
post '/login', 'email' => 'bad', 'password' => 'wrong'
expect(last_request.env['x-rack.flash'][:error]).to eq("Could not log you in.")
end
end
context "OAuth support" do
context "when enabled" do
before do
#TestingLogin.set(:auth_use_oauth, true)
#@app = app
end
xit "should redirect to authorize_url" do
get '/login'
follow_redirect!
expect(last_request.url).to eq("http://twitter.com/oauth/authorize")
end
xit "should redirect to a custom authorize_url, if set" do
get '/login'
follow_redirect!
expect(last_request.url).to eq("http://facebook.com")
end
end
end
end
|
# frozen_string_literal: true
shared_context 'log capture', log_capture: true do
let(:log_capture) { @log_stream.toString }
before do
@log_stream = Java::java.io.ByteArrayOutputStream.new
root_log4j_logger = Log4jruby::Logger.root.log4j_logger
root_log4j_logger.removeAllAppenders
layout = Java::org.apache.log4j.PatternLayout
.new('%5p %.50X{fileName} %X{methodName}:%X{lineNumber} - %m%n')
appender = Java::org.apache.log4j.WriterAppender.new(layout, @log_stream)
appender.setImmediateFlush(true)
root_log4j_logger.addAppender(appender)
end
end
chrore: Updated log4j2 implementation for testing log_capture
# frozen_string_literal: true
shared_context 'log capture', log_capture: true do
let(:log_stream) { Java::java.io.ByteArrayOutputStream.new }
let(:log_capture) { log_stream.toString }
before do
layout = Java::org.apache.logging.log4j.core.layout.PatternLayout.newBuilder
.withPattern('%5p %.50X{fileName} %X{methodName}:%X{lineNumber} - %m%n').build
appender = Java::org.apache.logging.log4j.core.appender.OutputStreamAppender
.newBuilder.setName('memory')
.setImmediateFlush(true)
.withLayout(layout).setTarget(log_stream).build
# log4j2 logger attributes are inherited from config only
# i.e. loggers will not inherit properties from private config on rootLogger
root_config = Java::org.apache.logging.log4j.LogManager.rootLogger.get
root_config.level = Java::org.apache.logging.log4j.Level::DEBUG
root_config.addAppender(appender, nil, nil)
end
end
|
module ResourceRepresentations
def representation_for(object, template, name=nil, parent=nil)
representation_class = if object.is_a?(ActiveRecord::Base)
ActiveRecordRepresentation
else
"ResourceRepresentations::#{object.class.to_s.demodulize}Representation".constantize rescue DefaultRepresentation
end
representation_class.new(object, template, name, parent)
end
module_function :representation_for
class Representation
def initialize(value, template, name=nil, parent=nil)
@value = value
@name = name
@template = template
@parent = parent
end
def id
@value
end
def to_s
ERB::Util::h(@value.to_s)
end
def with_block(&block)
yield self if block_given?
end
def method_missing(method_name, *args, &block)
method = <<-EOF
def #{method_name}(*args, &block)
@__#{method_name} ||= ResourceRepresentations.representation_for(@value.#{method_name}, @template, "#{method_name}", self)
@__#{method_name}.with_block(&block)
@__#{method_name} if block.nil?
end
EOF
::ResourceRepresentations::ActiveRecordRepresentation.class_eval(method, __FILE__, __LINE__)
self.__send__(method_name, &block)
end
def label(value, html_options = {})
tree = get_parents_tree
for_attr_value = tree.join('_')
tags = get_tags(html_options, {:for => for_attr_value})
value = ERB::Util::h(@name.humanize) if value.nil?
%Q{<label #{tags}>#{value}</label>}
end
protected
def get_parents_tree
children_names = Array.new
parent = @parent
children_names.push(@name)
while parent.nil? == false do #iterate parent tree
children_names.push(parent.instance_variable_get(:@name))
parent = parent.instance_variable_get(:@parent)
end #children_names now looks something like that [name, profile, user]
children_names.reverse
end
def get_html_name_attribute_value(tree)
name = tree.delete_at(0)
tree.each do |x|
name += "[" + x + "]"
end
name
end
def get_tags(user_options, base_options)
base_options.merge!(user_options)
base_options.stringify_keys!
base_options.map{ |key, value| %(#{key}="#{value}" ) }
end
end
class DefaultRepresentation < Representation
def text_field(html_options = {})
tree = get_parents_tree
id_attr_value = tree.join('_')
tags = get_tags(html_options, {:value => to_s, :id => id_attr_value, :name=>get_html_name_attribute_value(tree)})
%Q{<input type="text" #{tags}/>}
end
def text_area(html_options = {})
tree = get_parents_tree
id_attr_value = tree.join('_')
tags = get_tags(html_options, {:id => id_attr_value, :name => get_html_name_attribute_value(tree)})
%Q{<textarea #{tags}>#{to_s}</textarea>}
end
def password_field(html_options = {})
tree = get_parents_tree
id_attr_value = tree.join('_')
tags = get_tags(html_options, {:value => to_s, :id => id_attr_value, :name=>get_html_name_attribute_value(tree)})
%Q{<input type="password" #{tags}/>}
end
def radio_button(value, html_options = {})
tree = get_parents_tree
id_attr_value = tree.join('_') + "_#{value}"
name_attr_value = get_html_name_attribute_value(tree)
tags = get_tags(html_options, {:name => name_attr_value, :value=>value, :id=>id_attr_value, :checked=>"#{@value.capitalize==value.capitalize}"})
%Q{<input type="radio" #{tags}/>}
end
def radio_button_label(radio_button_value, value = nil, html_options = {})
tree = get_parents_tree
for_attr_value = tree.join('_') + "_#{radio_button_value}"
value = radio_button_value.capitalize if value.nil?
tags = get_tags(html_options, {:for => for_attr_value})
%Q{<label #{tags}>#{ERB::Util::h(value)}</label>}
end
end
class NilClassRepresentation < Representation
def method_missing(method_name, *args)
return self
end
def with_block(&block)
end
def to_s
return ''
end
end
class ActiveRecordRepresentation < Representation
def form(&block)
raise "You need to provide block to form representation" unless block_given?
content = @template.capture(self, &block)
@template.concat(@template.form_tag(@value))
@template.concat(content)
@template.concat("</form>")
self
end
end
class TimeWithZoneRepresentation < Representation
def select(passed_options = {}, html_options = {})
options = {:defaults => {:day => @value.day, :month => @value.month, :year => @value.year}}
options.merge!(passed_options)
tree = get_parents_tree
tree.pop
name = get_html_name_attribute_value(tree)
@template.date_select(name, @name, options, html_options)
end
end
end
Added hiddden_field, file_field and check_box but they nedd testing in
th view
module ResourceRepresentations
def representation_for(object, template, name=nil, parent=nil)
representation_class = if object.is_a?(ActiveRecord::Base)
ActiveRecordRepresentation
else
"ResourceRepresentations::#{object.class.to_s.demodulize}Representation".constantize rescue DefaultRepresentation
end
representation_class.new(object, template, name, parent)
end
module_function :representation_for
class Representation
def initialize(value, template, name=nil, parent=nil)
@value = value
@name = name
@template = template
@parent = parent
end
def id
@value
end
def to_s
ERB::Util::h(@value.to_s)
end
def with_block(&block)
yield self if block_given?
end
def method_missing(method_name, *args, &block)
method = <<-EOF
def #{method_name}(*args, &block)
@__#{method_name} ||= ResourceRepresentations.representation_for(@value.#{method_name}, @template, "#{method_name}", self)
@__#{method_name}.with_block(&block)
@__#{method_name} if block.nil?
end
EOF
::ResourceRepresentations::ActiveRecordRepresentation.class_eval(method, __FILE__, __LINE__)
self.__send__(method_name, &block)
end
def label(value, html_options = {})
tree = get_parents_tree
for_attr_value = tree.join('_')
tags = get_tags(html_options, {:for => for_attr_value})
value = ERB::Util::h(@name.humanize) if value.nil?
%Q{<label #{tags}>#{value}</label>}
end
protected
def get_parents_tree
children_names = Array.new
parent = @parent
children_names.push(@name)
while parent.nil? == false do #iterate parent tree
children_names.push(parent.instance_variable_get(:@name))
parent = parent.instance_variable_get(:@parent)
end #children_names now looks something like that [name, profile, user]
children_names.reverse
end
def get_html_name_attribute_value(tree)
name = tree.delete_at(0)
tree.each do |x|
name += "[" + x + "]"
end
name
end
def get_tags(user_options, base_options)
base_options.merge!(user_options)
base_options.stringify_keys!
base_options.map{ |key, value| %(#{key}="#{value}" ) }
end
end
class DefaultRepresentation < Representation
#not tested in the view
def check_box(checked_value = "1", unchecked_value = "0", html_options = {})
tree = get_parents_tree
id_attr_value = tree.join('_')
name_attr_value = get_html_name_attribute_value(tree)
tags = get_tags(html_options, {:value => checked_value, :id => id_attr_value, :name=>name_attr_value})
%Q{<input type="checkbox" #{tags}/>\n<input type="hidden" value="#{unchecked_value}" id="#{id_attr_value}" name="#{name_attr_value}"/>}
end
#not tested in the view
def file_field(html_options = {})
tree = get_parents_tree
id_attr_value = tree.join('_')
tags = get_tags(html_options, {:value => to_s, :id => id_attr_value, :name=>get_html_name_attribute_value(tree)})
%Q{<input type="file" #{tags}/>}
end
#not tested in the view
def hidden_field(html_options = {})
tree = get_parents_tree
id_attr_value = tree.join('_')
tags = get_tags(html_options, {:value => to_s, :id => id_attr_value, :name=>get_html_name_attribute_value(tree)})
%Q{<input type="hidden" #{tags}/>}
end
def text_field(html_options = {})
tree = get_parents_tree
id_attr_value = tree.join('_')
tags = get_tags(html_options, {:value => to_s, :id => id_attr_value, :name=>get_html_name_attribute_value(tree)})
%Q{<input type="text" #{tags}/>}
end
def text_area(html_options = {})
tree = get_parents_tree
id_attr_value = tree.join('_')
tags = get_tags(html_options, {:id => id_attr_value, :name => get_html_name_attribute_value(tree)})
%Q{<textarea #{tags}>\n#{to_s}\n</textarea>}
end
def password_field(html_options = {})
tree = get_parents_tree
id_attr_value = tree.join('_')
tags = get_tags(html_options, {:value => to_s, :id => id_attr_value, :name=>get_html_name_attribute_value(tree)})
%Q{<input type="password" #{tags}/>}
end
def radio_button(value, html_options = {})
tree = get_parents_tree
id_attr_value = tree.join('_') + "_#{value}"
name_attr_value = get_html_name_attribute_value(tree)
tags = get_tags(html_options, {:name => name_attr_value, :value=>value, :id=>id_attr_value, :checked=>"#{@value.capitalize==value.capitalize}"})
%Q{<input type="radio" #{tags}/>}
end
def radio_button_label(radio_button_value, value = nil, html_options = {})
tree = get_parents_tree
for_attr_value = tree.join('_') + "_#{radio_button_value}"
value = radio_button_value.capitalize if value.nil?
tags = get_tags(html_options, {:for => for_attr_value})
%Q{<label #{tags}>#{ERB::Util::h(value)}</label>}
end
end
class NilClassRepresentation < Representation
def method_missing(method_name, *args)
return self
end
def with_block(&block)
end
def to_s
return ''
end
end
class ActiveRecordRepresentation < Representation
def form(&block)
raise "You need to provide block to form representation" unless block_given?
content = @template.capture(self, &block)
@template.concat(@template.form_tag(@value))
@template.concat(content)
@template.concat("</form>")
self
end
end
class TimeWithZoneRepresentation < Representation
def select(passed_options = {}, html_options = {})
options = {:defaults => {:day => @value.day, :month => @value.month, :year => @value.year}}
options.merge!(passed_options)
tree = get_parents_tree
tree.pop
name = get_html_name_attribute_value(tree)
@template.date_select(name, @name, options, html_options)
end
end
end
|
require 'spec_helper'
describe CheckGcpProjectBillingWorker do
describe '.perform' do
let(:token) { 'bogustoken' }
subject { described_class.new.perform('token_key') }
context 'when there is a token in redis' do
before do
allow_any_instance_of(described_class).to receive(:get_token).and_return(token)
end
context 'when there is no lease' do
before do
allow_any_instance_of(described_class).to receive(:try_obtain_lease_for).and_return('randomuuid')
end
it 'calls the service' do
expect(CheckGcpProjectBillingService).to receive_message_chain(:new, :execute).and_return([double])
subject
end
it 'stores billing status in redis' do
redis_double = double
expect(CheckGcpProjectBillingService).to receive_message_chain(:new, :execute).and_return([double])
expect(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis_double)
expect(redis_double).to receive(:set).with(described_class.redis_shared_state_key_for(token), anything)
subject
end
end
context 'when there is a lease' do
before do
allow_any_instance_of(described_class).to receive(:try_obtain_lease_for).and_return(false)
end
it 'does not call the service' do
expect(CheckGcpProjectBillingService).not_to receive(:new)
subject
end
end
end
context 'when there is no token in redis' do
before do
allow_any_instance_of(described_class).to receive(:get_token).and_return(nil)
end
it 'does not call the service' do
expect(CheckGcpProjectBillingService).not_to receive(:new)
subject
end
end
end
end
Separate let and subject line in spec
require 'spec_helper'
describe CheckGcpProjectBillingWorker do
describe '.perform' do
let(:token) { 'bogustoken' }
subject { described_class.new.perform('token_key') }
context 'when there is a token in redis' do
before do
allow_any_instance_of(described_class).to receive(:get_token).and_return(token)
end
context 'when there is no lease' do
before do
allow_any_instance_of(described_class).to receive(:try_obtain_lease_for).and_return('randomuuid')
end
it 'calls the service' do
expect(CheckGcpProjectBillingService).to receive_message_chain(:new, :execute).and_return([double])
subject
end
it 'stores billing status in redis' do
redis_double = double
expect(CheckGcpProjectBillingService).to receive_message_chain(:new, :execute).and_return([double])
expect(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis_double)
expect(redis_double).to receive(:set).with(described_class.redis_shared_state_key_for(token), anything)
subject
end
end
context 'when there is a lease' do
before do
allow_any_instance_of(described_class).to receive(:try_obtain_lease_for).and_return(false)
end
it 'does not call the service' do
expect(CheckGcpProjectBillingService).not_to receive(:new)
subject
end
end
end
context 'when there is no token in redis' do
before do
allow_any_instance_of(described_class).to receive(:get_token).and_return(nil)
end
it 'does not call the service' do
expect(CheckGcpProjectBillingService).not_to receive(:new)
subject
end
end
end
end
|
#
# Author:: Aliasgar Batterywala (<aliasgar.batterywala@clogeny.com>)
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative "../spec_helper"
require_relative "query_azure_mock"
require_relative "../../lib/azure/resource_management/ARM_interface"
require "chef/knife/bootstrap"
describe Chef::Knife::BootstrapAzure do
include AzureSpecHelper
include QueryAzureMock
include AzureUtility
before do
@bootstrap_azure_instance = create_instance(Chef::Knife::BootstrapAzure)
@service = @bootstrap_azure_instance.service
@bootstrap_azure_instance.config[:azure_dns_name] = "test-dns-01"
@bootstrap_azure_instance.name_args = ["test-vm-01"]
@server_role = Azure::Role.new("connection")
allow(@bootstrap_azure_instance.ui).to receive(:info)
allow(@bootstrap_azure_instance).to receive(:puts)
allow(@bootstrap_azure_instance).to receive(:check_license)
end
describe "parameters validation" do
it "raises error when azure_subscription_id is not specified" do
@bootstrap_azure_instance.config.delete(:azure_subscription_id)
expect(@bootstrap_azure_instance.ui).to receive(:error)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
it "raises error when azure_mgmt_cert is not specified" do
@bootstrap_azure_instance.config.delete(:azure_mgmt_cert)
expect(@bootstrap_azure_instance.ui).to receive(:error)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
it "raises error when azure_api_host_name is not specified" do
@bootstrap_azure_instance.config.delete(:azure_api_host_name)
expect(@bootstrap_azure_instance.ui).to receive(:error)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
it "raises error when server name is not specified" do
@bootstrap_azure_instance.name_args = []
expect(@service).to_not receive(:add_extension)
expect(@bootstrap_azure_instance.ui).to receive(
:error
).with("Please specify the SERVER name which needs to be bootstrapped via the Chef Extension.")
expect(Chef::Log).to receive(:debug).at_least(:once)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
it "raises error when more than one server name is specified" do
@bootstrap_azure_instance.name_args = %w{test-vm-01 test-vm-02 test-vm-03}
expect(@bootstrap_azure_instance.name_args.length).to be == 3
expect(@service).to_not receive(:add_extension)
expect(@bootstrap_azure_instance.ui).to receive(
:error
).with("Please specify only one SERVER name which needs to be bootstrapped via the Chef Extension.")
expect(Chef::Log).to receive(:debug).at_least(:once)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
it "raises error when server name specified does not exist under the given hosted service" do
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@service).to_not receive(:add_extension)
expect(@service).to receive(
:find_server
).and_return([])
expect(@bootstrap_azure_instance.ui).to receive(
:error
).with("Server test-vm-01 does not exist under the hosted service test-dns-01.")
expect(Chef::Log).to receive(:debug).at_least(:once)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
it "raises error when hosted service specified does not exist" do
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@service).to_not receive(:add_extension)
expect(@service).to receive(
:find_server
).and_return(nil)
expect(@bootstrap_azure_instance.ui).to receive(
:error
).with("Hosted service test-dns-01 does not exist.")
expect(Chef::Log).to receive(:debug).at_least(:once)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
it "raises error when hosted service name is not given but invalid server name is given" do
@bootstrap_azure_instance.config.delete(:azure_dns_name)
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@service).to_not receive(:add_extension)
expect(@service).to receive(
:find_server
).and_return(nil)
expect(@bootstrap_azure_instance.ui).to receive(
:error
).with("Server test-vm-01 does not exist.")
expect(Chef::Log).to receive(:debug).at_least(:once)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
context "server name specified do exist" do
context "hosted service name is specified in @bootstrap_azure_instance.config object" do
before do
@server_role.hostedservicename = "my_new_dns"
allow(@server_role).to receive_message_chain(
:os_type, :downcase
).and_return("windows")
allow(@server_role).to receive(
:deployname
).and_return("")
allow(@server_role).to receive(:role_xml).and_return("")
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_version
)
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_public_params
)
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_private_params
)
end
it "does not raise error when server name do exist and does not re-initializes azure_dns_name in bootstrap_azure_instance's config using server object" do
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@service).to receive(:add_extension)
expect(@service).to receive(
:find_server
).and_return(@server_role)
expect { @bootstrap_azure_instance.run }.not_to raise_error
expect(@bootstrap_azure_instance.config[:azure_dns_name]).to be == "test-dns-01"
end
end
context "hosted service name is not specified in @bootstrap_azure_instance.config object or anywhere else" do
before do
@bootstrap_azure_instance.config.delete(:azure_dns_name)
@server_role.hostedservicename = "my_new_dns"
allow(@server_role).to receive_message_chain(
:os_type, :downcase
).and_return("windows")
allow(@server_role).to receive(
:deployname
).and_return("")
allow(@server_role).to receive(:role_xml).and_return("")
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_version
)
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_public_params
)
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_private_params
)
end
it "does not raise error when server name do exist and initializes azure_dns_name in bootstrap_azure_instance's config using server object" do
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@service).to receive(:add_extension)
expect(@service).to receive(
:find_server
).and_return(@server_role)
expect { @bootstrap_azure_instance.run }.not_to raise_error
expect(@bootstrap_azure_instance.config[:azure_dns_name]).to be == "my_new_dns"
end
end
end
end
describe "extended_logs functionality" do
context "when extended_logs is false" do
it "deploys the Chef Extension on the server but then does not wait and fetch the chef-client run logs" do
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@bootstrap_azure_instance).to receive(:set_ext_params)
expect(@service).to receive(:add_extension)
expect(@bootstrap_azure_instance).to_not receive(:print)
expect(@bootstrap_azure_instance).to_not receive(:wait_until_extension_available)
expect(@bootstrap_azure_instance).to_not receive(:fetch_chef_client_logs)
@bootstrap_azure_instance.run
end
end
context "when extended_logs is true" do
before do
@bootstrap_azure_instance.config[:extended_logs] = true
end
it "deploys the Chef Extension on the server and also waits and fetch the chef-client run logs" do
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@bootstrap_azure_instance).to receive(:set_ext_params)
expect(@service).to receive(:add_extension)
expect(@bootstrap_azure_instance).to receive(:print).exactly(2).times
expect(@bootstrap_azure_instance).to receive(:wait_until_extension_available)
expect(@bootstrap_azure_instance).to receive(:fetch_chef_client_logs)
@bootstrap_azure_instance.run
end
context "when Chef Extension becomes available/ready within the prescribed timeout" do
it "successfully deploys the Chef Extension on the server and also y fetches the chef-client run logs without raising any error" do
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@bootstrap_azure_instance).to receive(:set_ext_params)
expect(@service).to receive(:add_extension)
expect(@bootstrap_azure_instance).to receive(:print).exactly(2).times
expect(@bootstrap_azure_instance).to receive(:wait_until_extension_available)
expect(@bootstrap_azure_instance).to receive(:fetch_chef_client_logs)
expect { @bootstrap_azure_instance.run }.to_not raise_error
end
end
context "when Chef Extension does not become available/ready within the prescribed timeout" do
it "successfully deploys the Chef Extension on the server but fails to fetch the chef-client run logs as extension is unavailable and so it raises error and exits" do
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@bootstrap_azure_instance).to receive(:set_ext_params)
expect(@service).to receive(:add_extension)
expect(@bootstrap_azure_instance).to receive(:print).exactly(1).times
allow(@bootstrap_azure_instance).to receive(
:wait_until_extension_available
).and_raise(
"\nUnable to fetch chef-client run logs as Chef Extension seems to be unavailable even after 11 minutes of its deployment.\n"
)
expect(@bootstrap_azure_instance).to_not receive(:fetch_chef_client_logs)
expect(@bootstrap_azure_instance.ui).to receive(:error).with(
"\nUnable to fetch chef-client run logs as Chef Extension seems to be unavailable even after 11 minutes of its deployment.\n"
)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
end
end
end
describe "os_type and os_version support validation" do
context "invalid os_type for the given server" do
before do
allow(@server_role).to receive(
:os_type
).and_return("Abc")
end
it "raises an error" do
expect(@service).to receive(
:find_server
).and_return(@server_role)
expect(@bootstrap_azure_instance.ui).to receive(
:error
).with("OS type Abc is not supported.")
expect(Chef::Log).to receive(:debug).at_least(:once)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
end
context "invalid os_version for the given Linux server" do
before do
allow(@server_role).to receive(
:os_type
).and_return("Linux")
allow(@server_role).to receive(
:os_version
).and_return("Suse")
end
it "raises an error" do
expect(@service).to receive(
:find_server
).and_return(@server_role)
expect(@bootstrap_azure_instance.ui).to receive(
:error
).with("OS version Suse for OS type Linux is not supported.")
expect(Chef::Log).to receive(:debug).at_least(:once)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
end
context "valid os_type and valid os_version" do
before do
allow(@server_role).to receive(
:deployname
).and_return("test-deploy-01")
allow(@server_role).to receive(
:role_xml
).and_return("vm-role-xml")
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_version
).and_return("1210.*")
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_public_params
).and_return(
"chef_ext_public_params"
)
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_private_params
).and_return(
"chef_ext_private_params"
)
end
context "for Linux" do
before do
allow(@server_role).to receive(
:os_type
).and_return("Linux")
allow(@server_role).to receive(
:os_version
).and_return("CentOS")
end
it "sets the extension parameters for Linux platform" do
expect(@service).to receive(
:find_server
).and_return(@server_role)
response = @bootstrap_azure_instance.set_ext_params
expect(response[:chef_extension]).to be == "LinuxChefClient"
expect(response[:azure_dns_name]).to be == "test-dns-01"
expect(response[:deploy_name]).to be == "test-deploy-01"
expect(response[:role_xml]).to be == "vm-role-xml"
expect(response[:azure_vm_name]).to be == "test-vm-01"
expect(response[:chef_extension_publisher]).to be == "Chef.Bootstrap.WindowsAzure"
expect(response[:chef_extension_version]).to be == "1210.*"
expect(response[:chef_extension_public_param]).to be == "chef_ext_public_params"
expect(response[:chef_extension_private_param]).to be == "chef_ext_private_params"
end
end
context "for Windows" do
before do
allow(@server_role).to receive(
:os_type
).and_return("Windows")
end
it "sets the extension parameters for Windows platform" do
expect(@service).to receive(
:find_server
).and_return(@server_role)
response = @bootstrap_azure_instance.set_ext_params
expect(response[:chef_extension]).to be == "ChefClient"
expect(response[:azure_dns_name]).to be == "test-dns-01"
expect(response[:deploy_name]).to be == "test-deploy-01"
expect(response[:role_xml]).to be == "vm-role-xml"
expect(response[:azure_vm_name]).to be == "test-vm-01"
expect(response[:chef_extension_publisher]).to be == "Chef.Bootstrap.WindowsAzure"
expect(response[:chef_extension_version]).to be == "1210.*"
expect(response[:chef_extension_public_param]).to be == "chef_ext_public_params"
expect(response[:chef_extension_private_param]).to be == "chef_ext_private_params"
end
end
end
end
describe "parse role list xml" do
it "reads os_type and os_version from role list 1 xml" do
role_list_1_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/parse_role_list_xml/role_list_1.xml"))
role = Azure::Role.new("connection")
role.parse_role_list_xml(role_list_1_xml)
expect(role.role_xml).to be == role_list_1_xml
expect(role.os_type).to be == "Linux"
expect(role.os_version).to be == "842c8b9c6cvxzcvxzcv048xvbvge2323qe4c3__OpenLogic-CentOS-67-20140205"
end
it "reads os_type and os_version from role list 2 xml" do
role_list_2_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/parse_role_list_xml/role_list_2.xml"))
role = Azure::Role.new("connection")
role.parse_role_list_xml(role_list_2_xml)
expect(role.role_xml).to be == role_list_2_xml
expect(role.os_type).to be == "Windows"
expect(role.os_version).to be == "a6dfsdfwerfdfc0bc8f24rwefsd4ds01__Windows-Server-2012-R2-20141128-en.us-127GB.vhd"
end
end
describe "add_extension" do
it "calls role update and prints success message on successful completion" do
expect(@service.ui).to receive(:info).with(
"Started with Chef Extension deployment on the server test-vm-01..."
)
expect(@service).to receive_message_chain(
:connection, :roles, :update
)
expect(@service.ui).to receive(:info).with(
"\nSuccessfully deployed Chef Extension on the server test-vm-01."
)
@service.add_extension(@bootstrap_azure_instance.name_args[0])
end
it "calls role update and raises error on unsuccessful completion" do
expect(@service).to receive_message_chain(
:connection, :roles, :update
).and_raise
expect(Chef::Log).to receive(:error)
expect(Chef::Log).to receive(:debug).at_least(:once)
@service.add_extension(@bootstrap_azure_instance.name_args[0])
end
end
describe "roles_update" do
before do
@roles = Azure::Roles.new("connection")
@role = double("Role")
allow(Azure::Role).to receive(:new).and_return(@role)
end
it "calls setup_extension and update methods of Role class" do
expect(@role).to receive(
:setup_extension
).with({}).and_return(nil)
expect(@role).to receive(:update).with(
@bootstrap_azure_instance.name_args[0], {}, nil
)
@roles.update(@bootstrap_azure_instance.name_args[0], {})
end
end
describe "setup_extension" do
before do
@role = Azure::Role.new("connection")
updated_role_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/setup_extension/updated_role.xml"))
allow(@role).to receive(:update_role_xml_for_extension).and_return(updated_role_xml)
@update_role_xml_for_extension = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/setup_extension/update_role.xml"))
allow(@role).to receive(:puts)
end
it "creates new xml for update role" do
response = @role.setup_extension({})
expect(response).to eq(@update_role_xml_for_extension.to_xml)
end
end
describe "update_role_xml_for_extension" do
before do
@params = {
chef_extension_publisher: "Chef.Bootstrap.WindowsAzure",
chef_extension_version: "1210.12",
chef_extension_public_param: "MyPublicParamsValue",
chef_extension_private_param: "MyPrivateParamsValue",
azure_dns_name: @bootstrap_azure_instance.config[:azure_dns_name],
}
@role = Azure::Role.new("connection")
end
context "ResourceExtensionReferences node is not present in role xml" do
before do
@input_role_1_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/update_role_xml_for_extension/input_role_1.xml"))
@output_role_1_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/update_role_xml_for_extension/output_role_1.xml"))
@params[:chef_extension] = "LinuxChefClient"
end
it "adds ResourceExtensionReferences node with ChefExtension config" do
response = @role.update_role_xml_for_extension(@input_role_1_xml.at_css("Role"), @params)
expect(response.to_xml).to eq(@output_role_1_xml.at_css("Role").to_xml)
end
end
context "ResourceExtensionReferences node is present in xml but it is empty" do
before do
@input_role_2_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/update_role_xml_for_extension/input_role_2.xml"))
@output_role_2_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/update_role_xml_for_extension/output_role_2.xml"))
@params[:chef_extension] = "ChefClient"
end
it "updates ResourceExtensionReferences node with ChefExtension config" do
response = @role.update_role_xml_for_extension(@input_role_2_xml.at_css("Role"), @params)
expect(response.to_xml).to eq(@output_role_2_xml.at_css("Role").to_xml)
end
end
context "ResourceExtensionReferences node is present in role xml but ChefExtension is not installed on the server" do
before do
@input_role_3_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/update_role_xml_for_extension/input_role_3.xml"))
@output_role_3_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/update_role_xml_for_extension/output_role_3.xml"))
@params[:chef_extension] = "ChefClient"
end
it "adds ChefExtension config in ResourceExtensionReferences node" do
response = @role.update_role_xml_for_extension(@input_role_3_xml.at_css("Role"), @params)
expect(response.to_xml).to eq(@output_role_3_xml.at_css("Role").to_xml)
end
end
context "ResourceExtensionReferences node is present in role xml and ChefExtension is already installed on the server" do
before do
@input_role_4_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/update_role_xml_for_extension/input_role_4.xml"))
@params[:chef_extension] = "LinuxChefClient"
@params[:azure_vm_name] = "test-vm-01"
end
it "raises an error with message as 'ChefExtension is already installed on the server'" do
expect { @role.update_role_xml_for_extension(@input_role_4_xml.at_css("Role"), @params) }.to raise_error("Chef Extension is already installed on the server test-vm-01.")
end
end
end
describe "role_update" do
before do
@role = Azure::Role.new("connection")
@role.connection = double("Connection")
allow(@role).to receive(:puts)
end
it "does not raise error on update role success" do
expect(@role.connection).to receive(:query_azure)
expect(@role).to receive(:error_from_response_xml).and_return(["", ""])
expect(Chef::Log).to_not receive(:debug)
expect { @role.update(@bootstrap_azure_instance.name_args[0], {}, "") }.not_to raise_error
end
it "raises an error on update role failure" do
expect(@role.connection).to receive(:query_azure)
expect(@role).to receive(:error_from_response_xml)
.and_return(["InvalidXmlRequest", "The request body's XML was invalid or not correctly specified."])
expect(Chef::Log).to receive(:debug).at_least(:once)
expect { @role.update(@bootstrap_azure_instance.name_args[0], {}, "") }.to raise_error("Unable to update role:InvalidXmlRequest : The request body's XML was invalid or not correctly specified.")
end
end
describe "get_chef_extension_version" do
before do
allow(@service).to receive(:instance_of?).with(
Azure::ResourceManagement::ARMInterface
).and_return(false)
allow(@service).to receive(:instance_of?).with(
Azure::ServiceManagement::ASMInterface
).and_return(true)
end
context "when extension version is set in knife.rb" do
before do
@bootstrap_azure_instance.config[:azure_chef_extension_version] = "1012.10"
end
it "will pick up the extension version from knife.rb" do
response = @bootstrap_azure_instance.get_chef_extension_version("MyChefClient")
expect(response).to be == "1012.10"
end
end
context "when extension version is not set in knife.rb" do
before do
@bootstrap_azure_instance.config.delete(:azure_chef_extension_version)
extensions_list_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/extensions_list.xml"))
allow(@service).to receive(
:get_extension
).and_return(extensions_list_xml)
end
it "will pick up the latest version of the extension" do
expect(@service).to_not receive(:get_latest_chef_extension_version)
response = @bootstrap_azure_instance.get_chef_extension_version("MyChefClient")
expect(response).to be == "1210.*"
end
end
end
describe "wait_until_extension_available" do
context "extension_availability_wait_time has exceeded the extension_availability_wait_timeout" do
before do
@start_time = Time.now
end
it "raises error saying unable to fetch chef-client run logs" do
expect { @bootstrap_azure_instance.wait_until_extension_available(@start_time, -1) }.to raise_error(
"\nUnable to fetch chef-client run logs as Chef Extension seems to be unavailable even after -1 minutes of its deployment.\n"
)
end
end
context "extension_availability_wait_time has not exceeded the extension_availability_wait_timeout" do
context "deployment not available" do
before do
@start_time = Time.now
deployment = Nokogiri::XML("")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "goes to sleep and then re-invokes the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to receive(:print).exactly(1).times
expect(@bootstrap_azure_instance).to receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
context "deployment available" do
context "given role not available" do
before do
@start_time = Time.now
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "goes to sleep and then re-invokes the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to receive(:print).exactly(1).times
expect(@bootstrap_azure_instance).to receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
context "given role available" do
context "GuestAgent not ready" do
before do
@bootstrap_azure_instance.name_args = ["vm05"]
@start_time = Time.now
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "goes to sleep and then re-invokes the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to receive(:print).exactly(1).times
expect(@bootstrap_azure_instance).to receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
context "GuestAgent ready" do
context "none of the extension status available" do
before do
@bootstrap_azure_instance.name_args = ["vm06"]
@start_time = Time.now
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "goes to sleep and then re-invokes the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to receive(:print).exactly(1).times
expect(@bootstrap_azure_instance).to receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
context "extension status(es) available apart from extension status for Chef Extension" do
context "example-1" do
before do
@bootstrap_azure_instance.name_args = ["vm01"]
@start_time = Time.now
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "goes to sleep and then re-invokes the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to receive(:print).exactly(1).times
expect(@bootstrap_azure_instance).to receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
context "example-2" do
before do
@bootstrap_azure_instance.name_args = ["vm07"]
@start_time = Time.now
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "goes to sleep and then re-invokes the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to receive(:print).exactly(1).times
expect(@bootstrap_azure_instance).to receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
end
context "extension status(es) available including extension status for Chef Extension" do
context "example-1" do
before do
@bootstrap_azure_instance.name_args = ["vm02"]
@start_time = Time.now
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "does not go to sleep and does not re-invoke the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to_not receive(:print)
expect(@bootstrap_azure_instance).to_not receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to_not receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
context "example-2" do
before do
@bootstrap_azure_instance.name_args = ["vm03"]
@start_time = Time.now
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "does not go to sleep and does not re-invoke the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to_not receive(:print)
expect(@bootstrap_azure_instance).to_not receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to_not receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
context "example-3" do
before do
@bootstrap_azure_instance.name_args = ["vm08"]
@start_time = Time.now
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "does not go to sleep and does not re-invoke the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to_not receive(:print)
expect(@bootstrap_azure_instance).to_not receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to_not receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
end
end
end
end
end
end
describe "fetch_deployment" do
before do
allow(@bootstrap_azure_instance.service).to receive(
:deployment_name
).and_return("deploymentExtension")
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance.service).to receive(
:deployment
).and_return(deployment)
end
it "returns the deployment" do
response = @bootstrap_azure_instance.fetch_deployment
expect(response).to_not be nil
expect(response.at_css("Deployment Name").text).to be == "deploymentExtension"
expect(response.css("RoleInstanceList RoleInstance RoleName").class).to be == Nokogiri::XML::NodeSet
expect(response.css("RoleInstanceList RoleInstance RoleName").children.count).to be == 8
end
end
def mock_recursive_call
@bootstrap_azure_instance.instance_eval do
class << self
alias wait_until_extension_available_mocked wait_until_extension_available
end
end
end
end
Fix another typo
Signed-off-by: Tim Smith <764ef62106582a09ed09dfa0b6bff7c05fd7d1e4@chef.io>
#
# Author:: Aliasgar Batterywala (<aliasgar.batterywala@clogeny.com>)
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative "../spec_helper"
require_relative "query_azure_mock"
require_relative "../../lib/azure/resource_management/ARM_interface"
require "chef/knife/bootstrap"
describe Chef::Knife::BootstrapAzure do
include AzureSpecHelper
include QueryAzureMock
include AzureUtility
before do
@bootstrap_azure_instance = create_instance(Chef::Knife::BootstrapAzure)
@service = @bootstrap_azure_instance.service
@bootstrap_azure_instance.config[:azure_dns_name] = "test-dns-01"
@bootstrap_azure_instance.name_args = ["test-vm-01"]
@server_role = Azure::Role.new("connection")
allow(@bootstrap_azure_instance.ui).to receive(:info)
allow(@bootstrap_azure_instance).to receive(:puts)
allow(@bootstrap_azure_instance).to receive(:check_license)
end
describe "parameters validation" do
it "raises error when azure_subscription_id is not specified" do
@bootstrap_azure_instance.config.delete(:azure_subscription_id)
expect(@bootstrap_azure_instance.ui).to receive(:error)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
it "raises error when azure_mgmt_cert is not specified" do
@bootstrap_azure_instance.config.delete(:azure_mgmt_cert)
expect(@bootstrap_azure_instance.ui).to receive(:error)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
it "raises error when azure_api_host_name is not specified" do
@bootstrap_azure_instance.config.delete(:azure_api_host_name)
expect(@bootstrap_azure_instance.ui).to receive(:error)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
it "raises error when server name is not specified" do
@bootstrap_azure_instance.name_args = []
expect(@service).to_not receive(:add_extension)
expect(@bootstrap_azure_instance.ui).to receive(
:error
).with("Please specify the SERVER name which needs to be bootstrapped via the Chef Extension.")
expect(Chef::Log).to receive(:debug).at_least(:once)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
it "raises error when more than one server name is specified" do
@bootstrap_azure_instance.name_args = %w{test-vm-01 test-vm-02 test-vm-03}
expect(@bootstrap_azure_instance.name_args.length).to be == 3
expect(@service).to_not receive(:add_extension)
expect(@bootstrap_azure_instance.ui).to receive(
:error
).with("Please specify only one SERVER name which needs to be bootstrapped via the Chef Extension.")
expect(Chef::Log).to receive(:debug).at_least(:once)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
it "raises error when server name specified does not exist under the given hosted service" do
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@service).to_not receive(:add_extension)
expect(@service).to receive(
:find_server
).and_return([])
expect(@bootstrap_azure_instance.ui).to receive(
:error
).with("Server test-vm-01 does not exist under the hosted service test-dns-01.")
expect(Chef::Log).to receive(:debug).at_least(:once)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
it "raises error when hosted service specified does not exist" do
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@service).to_not receive(:add_extension)
expect(@service).to receive(
:find_server
).and_return(nil)
expect(@bootstrap_azure_instance.ui).to receive(
:error
).with("Hosted service test-dns-01 does not exist.")
expect(Chef::Log).to receive(:debug).at_least(:once)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
it "raises error when hosted service name is not given but invalid server name is given" do
@bootstrap_azure_instance.config.delete(:azure_dns_name)
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@service).to_not receive(:add_extension)
expect(@service).to receive(
:find_server
).and_return(nil)
expect(@bootstrap_azure_instance.ui).to receive(
:error
).with("Server test-vm-01 does not exist.")
expect(Chef::Log).to receive(:debug).at_least(:once)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
context "server name specified do exist" do
context "hosted service name is specified in @bootstrap_azure_instance.config object" do
before do
@server_role.hostedservicename = "my_new_dns"
allow(@server_role).to receive_message_chain(
:os_type, :downcase
).and_return("windows")
allow(@server_role).to receive(
:deployname
).and_return("")
allow(@server_role).to receive(:role_xml).and_return("")
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_version
)
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_public_params
)
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_private_params
)
end
it "does not raise error when server name do exist and does not re-initializes azure_dns_name in bootstrap_azure_instance's config using server object" do
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@service).to receive(:add_extension)
expect(@service).to receive(
:find_server
).and_return(@server_role)
expect { @bootstrap_azure_instance.run }.not_to raise_error
expect(@bootstrap_azure_instance.config[:azure_dns_name]).to be == "test-dns-01"
end
end
context "hosted service name is not specified in @bootstrap_azure_instance.config object or anywhere else" do
before do
@bootstrap_azure_instance.config.delete(:azure_dns_name)
@server_role.hostedservicename = "my_new_dns"
allow(@server_role).to receive_message_chain(
:os_type, :downcase
).and_return("windows")
allow(@server_role).to receive(
:deployname
).and_return("")
allow(@server_role).to receive(:role_xml).and_return("")
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_version
)
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_public_params
)
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_private_params
)
end
it "does not raise error when server name do exist and initializes azure_dns_name in bootstrap_azure_instance's config using server object" do
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@service).to receive(:add_extension)
expect(@service).to receive(
:find_server
).and_return(@server_role)
expect { @bootstrap_azure_instance.run }.not_to raise_error
expect(@bootstrap_azure_instance.config[:azure_dns_name]).to be == "my_new_dns"
end
end
end
end
describe "extended_logs functionality" do
context "when extended_logs is false" do
it "deploys the Chef Extension on the server but then does not wait and fetch the chef-client run logs" do
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@bootstrap_azure_instance).to receive(:set_ext_params)
expect(@service).to receive(:add_extension)
expect(@bootstrap_azure_instance).to_not receive(:print)
expect(@bootstrap_azure_instance).to_not receive(:wait_until_extension_available)
expect(@bootstrap_azure_instance).to_not receive(:fetch_chef_client_logs)
@bootstrap_azure_instance.run
end
end
context "when extended_logs is true" do
before do
@bootstrap_azure_instance.config[:extended_logs] = true
end
it "deploys the Chef Extension on the server and also waits and fetch the chef-client run logs" do
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@bootstrap_azure_instance).to receive(:set_ext_params)
expect(@service).to receive(:add_extension)
expect(@bootstrap_azure_instance).to receive(:print).exactly(2).times
expect(@bootstrap_azure_instance).to receive(:wait_until_extension_available)
expect(@bootstrap_azure_instance).to receive(:fetch_chef_client_logs)
@bootstrap_azure_instance.run
end
context "when Chef Extension becomes available/ready within the prescribed timeout" do
it "successfully deploys the Chef Extension on the server and also successfully fetches the chef-client run logs without raising any error" do
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@bootstrap_azure_instance).to receive(:set_ext_params)
expect(@service).to receive(:add_extension)
expect(@bootstrap_azure_instance).to receive(:print).exactly(2).times
expect(@bootstrap_azure_instance).to receive(:wait_until_extension_available)
expect(@bootstrap_azure_instance).to receive(:fetch_chef_client_logs)
expect { @bootstrap_azure_instance.run }.to_not raise_error
end
end
context "when Chef Extension does not become available/ready within the prescribed timeout" do
it "successfully deploys the Chef Extension on the server but fails to fetch the chef-client run logs as extension is unavailable and so it raises error and exits" do
expect(@bootstrap_azure_instance.name_args.length).to be == 1
expect(@bootstrap_azure_instance).to receive(:set_ext_params)
expect(@service).to receive(:add_extension)
expect(@bootstrap_azure_instance).to receive(:print).exactly(1).times
allow(@bootstrap_azure_instance).to receive(
:wait_until_extension_available
).and_raise(
"\nUnable to fetch chef-client run logs as Chef Extension seems to be unavailable even after 11 minutes of its deployment.\n"
)
expect(@bootstrap_azure_instance).to_not receive(:fetch_chef_client_logs)
expect(@bootstrap_azure_instance.ui).to receive(:error).with(
"\nUnable to fetch chef-client run logs as Chef Extension seems to be unavailable even after 11 minutes of its deployment.\n"
)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
end
end
end
describe "os_type and os_version support validation" do
context "invalid os_type for the given server" do
before do
allow(@server_role).to receive(
:os_type
).and_return("Abc")
end
it "raises an error" do
expect(@service).to receive(
:find_server
).and_return(@server_role)
expect(@bootstrap_azure_instance.ui).to receive(
:error
).with("OS type Abc is not supported.")
expect(Chef::Log).to receive(:debug).at_least(:once)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
end
context "invalid os_version for the given Linux server" do
before do
allow(@server_role).to receive(
:os_type
).and_return("Linux")
allow(@server_role).to receive(
:os_version
).and_return("Suse")
end
it "raises an error" do
expect(@service).to receive(
:find_server
).and_return(@server_role)
expect(@bootstrap_azure_instance.ui).to receive(
:error
).with("OS version Suse for OS type Linux is not supported.")
expect(Chef::Log).to receive(:debug).at_least(:once)
expect { @bootstrap_azure_instance.run }.to raise_error(SystemExit)
end
end
context "valid os_type and valid os_version" do
before do
allow(@server_role).to receive(
:deployname
).and_return("test-deploy-01")
allow(@server_role).to receive(
:role_xml
).and_return("vm-role-xml")
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_version
).and_return("1210.*")
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_public_params
).and_return(
"chef_ext_public_params"
)
allow(@bootstrap_azure_instance).to receive(
:get_chef_extension_private_params
).and_return(
"chef_ext_private_params"
)
end
context "for Linux" do
before do
allow(@server_role).to receive(
:os_type
).and_return("Linux")
allow(@server_role).to receive(
:os_version
).and_return("CentOS")
end
it "sets the extension parameters for Linux platform" do
expect(@service).to receive(
:find_server
).and_return(@server_role)
response = @bootstrap_azure_instance.set_ext_params
expect(response[:chef_extension]).to be == "LinuxChefClient"
expect(response[:azure_dns_name]).to be == "test-dns-01"
expect(response[:deploy_name]).to be == "test-deploy-01"
expect(response[:role_xml]).to be == "vm-role-xml"
expect(response[:azure_vm_name]).to be == "test-vm-01"
expect(response[:chef_extension_publisher]).to be == "Chef.Bootstrap.WindowsAzure"
expect(response[:chef_extension_version]).to be == "1210.*"
expect(response[:chef_extension_public_param]).to be == "chef_ext_public_params"
expect(response[:chef_extension_private_param]).to be == "chef_ext_private_params"
end
end
context "for Windows" do
before do
allow(@server_role).to receive(
:os_type
).and_return("Windows")
end
it "sets the extension parameters for Windows platform" do
expect(@service).to receive(
:find_server
).and_return(@server_role)
response = @bootstrap_azure_instance.set_ext_params
expect(response[:chef_extension]).to be == "ChefClient"
expect(response[:azure_dns_name]).to be == "test-dns-01"
expect(response[:deploy_name]).to be == "test-deploy-01"
expect(response[:role_xml]).to be == "vm-role-xml"
expect(response[:azure_vm_name]).to be == "test-vm-01"
expect(response[:chef_extension_publisher]).to be == "Chef.Bootstrap.WindowsAzure"
expect(response[:chef_extension_version]).to be == "1210.*"
expect(response[:chef_extension_public_param]).to be == "chef_ext_public_params"
expect(response[:chef_extension_private_param]).to be == "chef_ext_private_params"
end
end
end
end
describe "parse role list xml" do
it "reads os_type and os_version from role list 1 xml" do
role_list_1_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/parse_role_list_xml/role_list_1.xml"))
role = Azure::Role.new("connection")
role.parse_role_list_xml(role_list_1_xml)
expect(role.role_xml).to be == role_list_1_xml
expect(role.os_type).to be == "Linux"
expect(role.os_version).to be == "842c8b9c6cvxzcvxzcv048xvbvge2323qe4c3__OpenLogic-CentOS-67-20140205"
end
it "reads os_type and os_version from role list 2 xml" do
role_list_2_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/parse_role_list_xml/role_list_2.xml"))
role = Azure::Role.new("connection")
role.parse_role_list_xml(role_list_2_xml)
expect(role.role_xml).to be == role_list_2_xml
expect(role.os_type).to be == "Windows"
expect(role.os_version).to be == "a6dfsdfwerfdfc0bc8f24rwefsd4ds01__Windows-Server-2012-R2-20141128-en.us-127GB.vhd"
end
end
describe "add_extension" do
it "calls role update and prints success message on successful completion" do
expect(@service.ui).to receive(:info).with(
"Started with Chef Extension deployment on the server test-vm-01..."
)
expect(@service).to receive_message_chain(
:connection, :roles, :update
)
expect(@service.ui).to receive(:info).with(
"\nSuccessfully deployed Chef Extension on the server test-vm-01."
)
@service.add_extension(@bootstrap_azure_instance.name_args[0])
end
it "calls role update and raises error on unsuccessful completion" do
expect(@service).to receive_message_chain(
:connection, :roles, :update
).and_raise
expect(Chef::Log).to receive(:error)
expect(Chef::Log).to receive(:debug).at_least(:once)
@service.add_extension(@bootstrap_azure_instance.name_args[0])
end
end
describe "roles_update" do
before do
@roles = Azure::Roles.new("connection")
@role = double("Role")
allow(Azure::Role).to receive(:new).and_return(@role)
end
it "calls setup_extension and update methods of Role class" do
expect(@role).to receive(
:setup_extension
).with({}).and_return(nil)
expect(@role).to receive(:update).with(
@bootstrap_azure_instance.name_args[0], {}, nil
)
@roles.update(@bootstrap_azure_instance.name_args[0], {})
end
end
describe "setup_extension" do
before do
@role = Azure::Role.new("connection")
updated_role_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/setup_extension/updated_role.xml"))
allow(@role).to receive(:update_role_xml_for_extension).and_return(updated_role_xml)
@update_role_xml_for_extension = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/setup_extension/update_role.xml"))
allow(@role).to receive(:puts)
end
it "creates new xml for update role" do
response = @role.setup_extension({})
expect(response).to eq(@update_role_xml_for_extension.to_xml)
end
end
describe "update_role_xml_for_extension" do
before do
@params = {
chef_extension_publisher: "Chef.Bootstrap.WindowsAzure",
chef_extension_version: "1210.12",
chef_extension_public_param: "MyPublicParamsValue",
chef_extension_private_param: "MyPrivateParamsValue",
azure_dns_name: @bootstrap_azure_instance.config[:azure_dns_name],
}
@role = Azure::Role.new("connection")
end
context "ResourceExtensionReferences node is not present in role xml" do
before do
@input_role_1_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/update_role_xml_for_extension/input_role_1.xml"))
@output_role_1_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/update_role_xml_for_extension/output_role_1.xml"))
@params[:chef_extension] = "LinuxChefClient"
end
it "adds ResourceExtensionReferences node with ChefExtension config" do
response = @role.update_role_xml_for_extension(@input_role_1_xml.at_css("Role"), @params)
expect(response.to_xml).to eq(@output_role_1_xml.at_css("Role").to_xml)
end
end
context "ResourceExtensionReferences node is present in xml but it is empty" do
before do
@input_role_2_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/update_role_xml_for_extension/input_role_2.xml"))
@output_role_2_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/update_role_xml_for_extension/output_role_2.xml"))
@params[:chef_extension] = "ChefClient"
end
it "updates ResourceExtensionReferences node with ChefExtension config" do
response = @role.update_role_xml_for_extension(@input_role_2_xml.at_css("Role"), @params)
expect(response.to_xml).to eq(@output_role_2_xml.at_css("Role").to_xml)
end
end
context "ResourceExtensionReferences node is present in role xml but ChefExtension is not installed on the server" do
before do
@input_role_3_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/update_role_xml_for_extension/input_role_3.xml"))
@output_role_3_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/update_role_xml_for_extension/output_role_3.xml"))
@params[:chef_extension] = "ChefClient"
end
it "adds ChefExtension config in ResourceExtensionReferences node" do
response = @role.update_role_xml_for_extension(@input_role_3_xml.at_css("Role"), @params)
expect(response.to_xml).to eq(@output_role_3_xml.at_css("Role").to_xml)
end
end
context "ResourceExtensionReferences node is present in role xml and ChefExtension is already installed on the server" do
before do
@input_role_4_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/update_role_xml_for_extension/input_role_4.xml"))
@params[:chef_extension] = "LinuxChefClient"
@params[:azure_vm_name] = "test-vm-01"
end
it "raises an error with message as 'ChefExtension is already installed on the server'" do
expect { @role.update_role_xml_for_extension(@input_role_4_xml.at_css("Role"), @params) }.to raise_error("Chef Extension is already installed on the server test-vm-01.")
end
end
end
describe "role_update" do
before do
@role = Azure::Role.new("connection")
@role.connection = double("Connection")
allow(@role).to receive(:puts)
end
it "does not raise error on update role success" do
expect(@role.connection).to receive(:query_azure)
expect(@role).to receive(:error_from_response_xml).and_return(["", ""])
expect(Chef::Log).to_not receive(:debug)
expect { @role.update(@bootstrap_azure_instance.name_args[0], {}, "") }.not_to raise_error
end
it "raises an error on update role failure" do
expect(@role.connection).to receive(:query_azure)
expect(@role).to receive(:error_from_response_xml)
.and_return(["InvalidXmlRequest", "The request body's XML was invalid or not correctly specified."])
expect(Chef::Log).to receive(:debug).at_least(:once)
expect { @role.update(@bootstrap_azure_instance.name_args[0], {}, "") }.to raise_error("Unable to update role:InvalidXmlRequest : The request body's XML was invalid or not correctly specified.")
end
end
describe "get_chef_extension_version" do
before do
allow(@service).to receive(:instance_of?).with(
Azure::ResourceManagement::ARMInterface
).and_return(false)
allow(@service).to receive(:instance_of?).with(
Azure::ServiceManagement::ASMInterface
).and_return(true)
end
context "when extension version is set in knife.rb" do
before do
@bootstrap_azure_instance.config[:azure_chef_extension_version] = "1012.10"
end
it "will pick up the extension version from knife.rb" do
response = @bootstrap_azure_instance.get_chef_extension_version("MyChefClient")
expect(response).to be == "1012.10"
end
end
context "when extension version is not set in knife.rb" do
before do
@bootstrap_azure_instance.config.delete(:azure_chef_extension_version)
extensions_list_xml = Nokogiri::XML(readFile("bootstrap_azure_role_xmls/extensions_list.xml"))
allow(@service).to receive(
:get_extension
).and_return(extensions_list_xml)
end
it "will pick up the latest version of the extension" do
expect(@service).to_not receive(:get_latest_chef_extension_version)
response = @bootstrap_azure_instance.get_chef_extension_version("MyChefClient")
expect(response).to be == "1210.*"
end
end
end
describe "wait_until_extension_available" do
context "extension_availability_wait_time has exceeded the extension_availability_wait_timeout" do
before do
@start_time = Time.now
end
it "raises error saying unable to fetch chef-client run logs" do
expect { @bootstrap_azure_instance.wait_until_extension_available(@start_time, -1) }.to raise_error(
"\nUnable to fetch chef-client run logs as Chef Extension seems to be unavailable even after -1 minutes of its deployment.\n"
)
end
end
context "extension_availability_wait_time has not exceeded the extension_availability_wait_timeout" do
context "deployment not available" do
before do
@start_time = Time.now
deployment = Nokogiri::XML("")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "goes to sleep and then re-invokes the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to receive(:print).exactly(1).times
expect(@bootstrap_azure_instance).to receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
context "deployment available" do
context "given role not available" do
before do
@start_time = Time.now
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "goes to sleep and then re-invokes the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to receive(:print).exactly(1).times
expect(@bootstrap_azure_instance).to receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
context "given role available" do
context "GuestAgent not ready" do
before do
@bootstrap_azure_instance.name_args = ["vm05"]
@start_time = Time.now
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "goes to sleep and then re-invokes the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to receive(:print).exactly(1).times
expect(@bootstrap_azure_instance).to receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
context "GuestAgent ready" do
context "none of the extension status available" do
before do
@bootstrap_azure_instance.name_args = ["vm06"]
@start_time = Time.now
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "goes to sleep and then re-invokes the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to receive(:print).exactly(1).times
expect(@bootstrap_azure_instance).to receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
context "extension status(es) available apart from extension status for Chef Extension" do
context "example-1" do
before do
@bootstrap_azure_instance.name_args = ["vm01"]
@start_time = Time.now
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "goes to sleep and then re-invokes the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to receive(:print).exactly(1).times
expect(@bootstrap_azure_instance).to receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
context "example-2" do
before do
@bootstrap_azure_instance.name_args = ["vm07"]
@start_time = Time.now
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "goes to sleep and then re-invokes the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to receive(:print).exactly(1).times
expect(@bootstrap_azure_instance).to receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
end
context "extension status(es) available including extension status for Chef Extension" do
context "example-1" do
before do
@bootstrap_azure_instance.name_args = ["vm02"]
@start_time = Time.now
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "does not go to sleep and does not re-invoke the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to_not receive(:print)
expect(@bootstrap_azure_instance).to_not receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to_not receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
context "example-2" do
before do
@bootstrap_azure_instance.name_args = ["vm03"]
@start_time = Time.now
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "does not go to sleep and does not re-invoke the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to_not receive(:print)
expect(@bootstrap_azure_instance).to_not receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to_not receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
context "example-3" do
before do
@bootstrap_azure_instance.name_args = ["vm08"]
@start_time = Time.now
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance).to receive(:fetch_deployment).and_return(deployment)
end
it "does not go to sleep and does not re-invoke the wait_until_extension_available method recursively" do
mock_recursive_call
expect(@bootstrap_azure_instance).to_not receive(:print)
expect(@bootstrap_azure_instance).to_not receive(:sleep).with(30)
expect(@bootstrap_azure_instance).to_not receive(
:wait_until_extension_available
).with(@start_time, 10)
@bootstrap_azure_instance.wait_until_extension_available_mocked(@start_time, 10)
end
end
end
end
end
end
end
end
describe "fetch_deployment" do
before do
allow(@bootstrap_azure_instance.service).to receive(
:deployment_name
).and_return("deploymentExtension")
deployment = Nokogiri::XML readFile("extension_deployment_xml.xml")
allow(@bootstrap_azure_instance.service).to receive(
:deployment
).and_return(deployment)
end
it "returns the deployment" do
response = @bootstrap_azure_instance.fetch_deployment
expect(response).to_not be nil
expect(response.at_css("Deployment Name").text).to be == "deploymentExtension"
expect(response.css("RoleInstanceList RoleInstance RoleName").class).to be == Nokogiri::XML::NodeSet
expect(response.css("RoleInstanceList RoleInstance RoleName").children.count).to be == 8
end
end
def mock_recursive_call
@bootstrap_azure_instance.instance_eval do
class << self
alias wait_until_extension_available_mocked wait_until_extension_available
end
end
end
end
|
require 'persistent_enum'
require 'byebug'
require_relative '../spec_helper'
RSpec.describe PersistentEnum, :database do
CONSTANTS = [:One, :Two, :Three, :Four]
before(:context) do
initialize_database
end
after(:each) do
destroy_test_models
end
shared_examples "acts like an enum" do
# abstract :model
it "looks up each value" do
CONSTANTS.each do |c|
e = model.value_of(c)
expect(e).to be_present
expect(e.enum_constant).to be_a(String)
expect(e.to_sym).to eq(c)
expect(e).to eq(model[e.ordinal])
expect(e).to eq(model.const_get(c.upcase))
expect(e).to be_frozen
expect(e.enum_constant).to be_frozen
end
end
it "returns all values from the cache" do
expect(model.values.map(&:to_sym)).to contain_exactly(*CONSTANTS)
end
end
shared_examples "acts like a persisted enum" do
# abstract :model
include_examples "acts like an enum"
context "a referring model" do
let(:foreign_name) { model.model_name.singular }
let(:foreign_key) { foreign_name + "_id" }
let(:other_model) do
foreign_name = foreign_name()
foreign_key_type = model.columns.detect { |x| x.name == "id" }.sql_type
create_table = ->(t){
t.references foreign_name, type: foreign_key_type, foreign_key: true
}
create_test_model(:referrer, create_table) do
belongs_to_enum foreign_name
end
end
it "can be created from enum value" do
model.values.each do |v|
t = other_model.new(foreign_name => v)
expect(t).to be_valid
.and have_attributes(foreign_name => v,
foreign_key => v.ordinal)
end
end
it "can be created from constant name" do
model.values.each do |v|
t = other_model.new(foreign_name => v.enum_constant)
expect(t).to be_valid
.and have_attributes(foreign_name => v,
foreign_key => v.ordinal)
end
end
it "can be created from ordinal" do
model.values.each do |v|
t = other_model.new(foreign_key => v.ordinal)
expect(t).to be_valid
.and have_attributes(foreign_name => v,
foreign_key => v.ordinal)
end
end
end
end
context "with an enum model" do
let(:model) do
create_test_model(:with_table, ->(t){ t.string :name }) do
acts_as_enum(CONSTANTS)
end
end
it_behaves_like "acts like a persisted enum"
it "returns all values from the database" do
expect(model.all.map(&:to_sym)).to contain_exactly(*CONSTANTS)
end
it "is immutable" do
expect { model.create(name: "foo") }
.to raise_error(ActiveRecord::ReadOnlyRecord)
expect { model::ONE.name = "foo" }
.to raise_error(RuntimeError) # Frozen object
expect { model.first.update_attribute(:name, "foo") }
.to raise_error(ActiveRecord::ReadOnlyRecord)
expect { model.first.destroy }
.to raise_error(ActiveRecord::ReadOnlyRecord)
end
end
context "with a table-less enum" do
let(:model) do
create_test_model(:without_table, nil, create_table: false) do
acts_as_enum(CONSTANTS)
end
end
it_behaves_like "acts like an enum"
it "initializes dummy values correctly" do
model.values.each do |val|
i = val.ordinal
expect(i).to be_a(Integer)
expect(val.id).to eq(i)
expect(val["id"]).to eq(i)
expect(val[:id]).to eq(i)
c = val.enum_constant
expect(c).to be_a(String)
expect(val.name).to eq(c)
expect(val["name"]).to eq(c)
expect(val[:name]).to eq(c)
end
end
end
context "with existing data" do
let(:initial_ordinal) { 9998 }
let(:initial_constant) { CONSTANTS.first }
let(:existing_ordinal) { 9999 }
let(:existing_constant) { :Hello }
let!(:model) do
model = create_test_model(:with_existing, ->(t){ t.string :name })
@initial_value = model.create(id: initial_ordinal, name: initial_constant.to_s)
@existing_value = model.create(id: existing_ordinal, name: existing_constant.to_s)
model.acts_as_enum(CONSTANTS)
model
end
it_behaves_like "acts like a persisted enum"
let(:expected_all) { (CONSTANTS + [existing_constant])}
let(:expected_required) { CONSTANTS}
it "caches required values" do
expect(model.values.map(&:to_sym)).to contain_exactly(*expected_required)
end
it "caches all values" do
expect(model.all_values.map(&:to_sym)).to contain_exactly(*expected_all)
end
it "loads all values" do
expect(model.all.map(&:to_sym)).to contain_exactly(*expected_all)
end
let(:required_ordinals) { expected_required.map { |name| model.value_of!(name).ordinal } }
let(:all_ordinals) { expected_all.map { |name| model.value_of!(name).ordinal } }
it "caches required ordinals" do
expect(model.ordinals).to contain_exactly(*required_ordinals)
end
it "caches all ordinals" do
expect(model.all_ordinals).to contain_exactly(*all_ordinals)
end
it "loads all ordinals" do
expect(model.pluck(:id)).to contain_exactly(*all_ordinals)
end
it "respects initial value" do
expect(model[initial_ordinal]).to eq(@initial_value)
expect(model.value_of(initial_constant)).to eq(@initial_value)
expect(model.where(name: initial_constant).first).to eq(@initial_value)
end
it "respects existing value" do
expect(model[existing_ordinal]).to eq(@existing_value)
expect(model.value_of(existing_constant)).to eq(@existing_value)
expect(model.where(name: existing_constant).first).to eq(@existing_value)
end
it "marks existing model as non-active" do
expect(model[existing_ordinal]).to_not be_active
end
end
context "with cached constants" do
let(:model) do
create_test_model(:with_constants, ->(t){ t.string :name }) do
PersistentEnum.cache_constants(self, CONSTANTS)
end
end
it "caches all the constants" do
CONSTANTS.each do |c|
cached = model.const_get(c.upcase)
expect(cached).to be_present
expect(cached.name).to eq(c.to_s)
loaded = model.find_by(name: c.to_s)
expect(loaded).to be_present.and eq(cached)
end
end
end
context "with complex constant names" do
let(:test_constants) do
{
"CamelCase" => "CAMEL_CASE",
:Symbolic => "SYMBOLIC",
"with.punctuation" => "WITH_PUNCTUATION",
"multiple_.underscores" => "MULTIPLE_UNDERSCORES"
}
end
let(:model) do
test_constants = test_constants()
create_test_model(:with_complex_names, ->(t){ t.string :name }) do
PersistentEnum.cache_constants(self, test_constants.keys)
end
end
it "caches the constant name as we expect" do
test_constants.each do |expected_name, expected_constant|
val = model.const_get(expected_constant)
expect(val).to be_present
expect(val.name).to eq(expected_name.to_s)
end
end
end
context "with extra fields" do
let(:members) do
{
:One => { count: 1 },
:Two => { count: 2 },
:Three => { count: 3 },
:Four => { count: 4 }
}
end
shared_examples "acts like an enum with extra fields" do
it "has all expected members with expected values" do
members.each do |name, fields|
ev = model.value_of(name)
# Ensure it exists and is correctly saved
expect(ev).to be_present
expect(model.values).to include(ev)
expect(model.all_values).to include(ev)
expect(ev).to eq(model[ev.ordinal])
# Ensure it's correctly saved
if model.table_exists?
expect(model.where(name: name).first).to eq(ev)
end
# and that fields have been correctly set
fields.each do |fname, fvalue|
expect(ev[fname]).to eq(fvalue)
end
end
end
end
shared_examples "acts like a persisted enum with extra fields" do
include_examples "acts like an enum with extra fields"
end
context "providing a hash" do
let(:model) do
members = members()
create_test_model(:with_extra_field, ->(t){ t.string :name; t.integer :count }) do
# pre-existing matching, non-matching, and outdated data
create(name: "One", count: 3)
create(name: "Two", count: 2)
create(name: "Zero", count: 0)
acts_as_enum(members)
end
end
it_behaves_like "acts like a persisted enum"
it_behaves_like "acts like a persisted enum with extra fields"
it "keeps outdated data" do
z = model.value_of("Zero")
expect(z).to be_present
expect(model[z.ordinal]).to eq(z)
expect(z.count).to eq(0)
expect(model.all_values).to include(z)
expect(model.values).not_to include(z)
end
end
context "using builder interface" do
let(:model) do
create_test_model(:with_extra_field_using_builder, ->(t){ t.string :name; t.integer :count }) do
acts_as_enum([]) do
One(count: 1)
Two(count: 2)
constant!(:Three, count: 3)
Four(count: 4)
end
end
end
it_behaves_like "acts like a persisted enum"
it_behaves_like "acts like a persisted enum with extra fields"
end
context "without table" do
let(:model) do
members = members()
create_test_model(:with_extra_field_without_table, nil, create_table: false) do
acts_as_enum(members)
end
end
it_behaves_like "acts like an enum"
it_behaves_like "acts like an enum with extra fields"
end
it "must have attributes that match the table" do
expect {
create_test_model(:test_invalid_args_a, ->(t){ t.string :name; t.integer :count }) do
acts_as_enum([:One])
end
}.to raise_error(ArgumentError)
destroy_test_model(:test_invalid_args_a)
expect {
create_test_model(:test_invalid_args_b, ->(t){ t.string :name; t.integer :count }) do
acts_as_enum({ :One => { incorrect: 1 } })
end
}.to raise_error(ArgumentError)
destroy_test_model(:test_invalid_args_b)
expect {
create_test_model(:test_invalid_args_c, ->(t){ t.string :name }) do
acts_as_enum({ :One => { incorrect: 1 } })
end
}.to raise_error(ArgumentError)
destroy_test_model(:test_invalid_args_c)
end
end
context "using a postgresql enum valued id" do
let(:name) { "with_enum_id" }
let(:enum_type) { "#{name}_type" }
context "with table" do
before(:each) do
ActiveRecord::Base.connection.execute("CREATE TYPE #{enum_type} AS ENUM ()")
ActiveRecord::Base.connection.create_table(name.pluralize, id: false) do |t|
t.column :id, enum_type, primary_key: true, null: false
t.string :name
end
end
after(:each) do
ActiveRecord::Base.connection.execute("DROP TYPE #{enum_type} CASCADE")
end
let!(:model) do
enum_type = enum_type()
create_test_model(:with_enum_id, nil, create_table: false) do
acts_as_enum(CONSTANTS, sql_enum_type: enum_type)
end
end
it_behaves_like "acts like a persisted enum"
end
context "without table" do
let!(:model) do
enum_type = enum_type()
create_test_model(:no_table_enum_id, nil, create_table: false) do
acts_as_enum(CONSTANTS, sql_enum_type: enum_type)
end
end
it_behaves_like "acts like an enum"
end
end
context "with the name of the enum value column changed" do
let(:model) do
create_test_model(:test_new_name, ->(t){ t.string :namey }) do
acts_as_enum(CONSTANTS, name_attr: :namey)
end
end
it_behaves_like "acts like a persisted enum"
end
it "refuses to create a table in a transaction" do
expect {
ActiveRecord::Base.transaction do
create_test_model(:test_create_in_transaction, ->(t){ t.string :name }) do
acts_as_enum([:A, :B])
end
end
}.to raise_error(RuntimeError)
end
end
Port missing tests
require 'persistent_enum'
require 'byebug'
require_relative '../spec_helper'
RSpec.describe PersistentEnum, :database do
CONSTANTS = [:One, :Two, :Three, :Four]
before(:context) do
initialize_database
end
after(:each) do
destroy_test_models
end
shared_examples "acts like an enum" do
# abstract :model
it "looks up each value" do
CONSTANTS.each do |c|
e = model.value_of(c)
expect(e).to be_present
expect(e.enum_constant).to be_a(String)
expect(e.to_sym).to eq(c)
expect(e).to eq(model[e.ordinal])
expect(e).to eq(model.const_get(c.upcase))
expect(e).to be_frozen
expect(e.enum_constant).to be_frozen
end
end
it "returns all values from the cache" do
expect(model.values.map(&:to_sym)).to contain_exactly(*CONSTANTS)
end
end
shared_examples "acts like a persisted enum" do
# abstract :model
include_examples "acts like an enum"
context "a referring model" do
let(:foreign_name) { model.model_name.singular }
let(:foreign_key) { foreign_name + "_id" }
let(:other_model) do
foreign_name = foreign_name()
foreign_key_type = model.columns.detect { |x| x.name == "id" }.sql_type
create_table = ->(t){
t.references foreign_name, type: foreign_key_type, foreign_key: true
}
create_test_model(:referrer, create_table) do
belongs_to_enum foreign_name
end
end
it "can be created from enum value" do
model.values.each do |v|
t = other_model.new(foreign_name => v)
expect(t).to be_valid
.and have_attributes(foreign_name => v,
foreign_key => v.ordinal)
end
end
it "can be created from constant name" do
model.values.each do |v|
t = other_model.new(foreign_name => v.enum_constant)
expect(t).to be_valid
.and have_attributes(foreign_name => v,
foreign_key => v.ordinal)
end
end
it "can be created from ordinal" do
model.values.each do |v|
t = other_model.new(foreign_key => v.ordinal)
expect(t).to be_valid
.and have_attributes(foreign_name => v,
foreign_key => v.ordinal)
end
end
it "can be created with null foreign key" do
t = other_model.new
expect(t).to be_valid
end
it "can not be created with invalid foreign key" do
t = other_model.new(foreign_key => -1)
expect(t).not_to be_valid
end
it "can not be created with invalid foreign constant" do
expect {
other_model.new(foreign_name => :BadConstant)
}.to raise_error(NameError)
end
end
end
context "with an enum model" do
let(:model) do
create_test_model(:with_table, ->(t){ t.string :name }) do
acts_as_enum(CONSTANTS)
end
end
it_behaves_like "acts like a persisted enum"
it "returns all values from the database" do
expect(model.all.map(&:to_sym)).to contain_exactly(*CONSTANTS)
end
it "is immutable" do
expect { model.create(name: "foo") }
.to raise_error(ActiveRecord::ReadOnlyRecord)
expect { model::ONE.name = "foo" }
.to raise_error(RuntimeError) # Frozen object
expect { model.first.update_attribute(:name, "foo") }
.to raise_error(ActiveRecord::ReadOnlyRecord)
expect { model.first.destroy }
.to raise_error(ActiveRecord::ReadOnlyRecord)
end
end
context "with a table-less enum" do
let(:model) do
create_test_model(:without_table, nil, create_table: false) do
acts_as_enum(CONSTANTS)
end
end
it_behaves_like "acts like an enum"
it "initializes dummy values correctly" do
model.values.each do |val|
i = val.ordinal
expect(i).to be_a(Integer)
expect(val.id).to eq(i)
expect(val["id"]).to eq(i)
expect(val[:id]).to eq(i)
c = val.enum_constant
expect(c).to be_a(String)
expect(val.name).to eq(c)
expect(val["name"]).to eq(c)
expect(val[:name]).to eq(c)
end
end
end
context "with existing data" do
let(:initial_ordinal) { 9998 }
let(:initial_constant) { CONSTANTS.first }
let(:existing_ordinal) { 9999 }
let(:existing_constant) { :Hello }
let!(:model) do
model = create_test_model(:with_existing, ->(t){ t.string :name })
@initial_value = model.create(id: initial_ordinal, name: initial_constant.to_s)
@existing_value = model.create(id: existing_ordinal, name: existing_constant.to_s)
model.acts_as_enum(CONSTANTS)
model
end
it_behaves_like "acts like a persisted enum"
let(:expected_all) { (CONSTANTS + [existing_constant])}
let(:expected_required) { CONSTANTS}
it "caches required values" do
expect(model.values.map(&:to_sym)).to contain_exactly(*expected_required)
end
it "caches all values" do
expect(model.all_values.map(&:to_sym)).to contain_exactly(*expected_all)
end
it "loads all values" do
expect(model.all.map(&:to_sym)).to contain_exactly(*expected_all)
end
let(:required_ordinals) { expected_required.map { |name| model.value_of!(name).ordinal } }
let(:all_ordinals) { expected_all.map { |name| model.value_of!(name).ordinal } }
it "caches required ordinals" do
expect(model.ordinals).to contain_exactly(*required_ordinals)
end
it "caches all ordinals" do
expect(model.all_ordinals).to contain_exactly(*all_ordinals)
end
it "loads all ordinals" do
expect(model.pluck(:id)).to contain_exactly(*all_ordinals)
end
it "respects initial value" do
expect(model[initial_ordinal]).to eq(@initial_value)
expect(model.value_of(initial_constant)).to eq(@initial_value)
expect(model.where(name: initial_constant).first).to eq(@initial_value)
end
it "respects existing value" do
expect(model[existing_ordinal]).to eq(@existing_value)
expect(model.value_of(existing_constant)).to eq(@existing_value)
expect(model.where(name: existing_constant).first).to eq(@existing_value)
end
it "marks existing model as non-active" do
expect(model[existing_ordinal]).to_not be_active
end
end
context "with cached constants" do
let(:model) do
create_test_model(:with_constants, ->(t){ t.string :name }) do
PersistentEnum.cache_constants(self, CONSTANTS)
end
end
it "caches all the constants" do
CONSTANTS.each do |c|
cached = model.const_get(c.upcase)
expect(cached).to be_present
expect(cached.name).to eq(c.to_s)
loaded = model.find_by(name: c.to_s)
expect(loaded).to be_present.and eq(cached)
end
end
end
context "with complex constant names" do
let(:test_constants) do
{
"CamelCase" => "CAMEL_CASE",
:Symbolic => "SYMBOLIC",
"with.punctuation" => "WITH_PUNCTUATION",
"multiple_.underscores" => "MULTIPLE_UNDERSCORES"
}
end
let(:model) do
test_constants = test_constants()
create_test_model(:with_complex_names, ->(t){ t.string :name }) do
PersistentEnum.cache_constants(self, test_constants.keys)
end
end
it "caches the constant name as we expect" do
test_constants.each do |expected_name, expected_constant|
val = model.const_get(expected_constant)
expect(val).to be_present
expect(val.name).to eq(expected_name.to_s)
end
end
end
context "with extra fields" do
let(:members) do
{
:One => { count: 1 },
:Two => { count: 2 },
:Three => { count: 3 },
:Four => { count: 4 }
}
end
shared_examples "acts like an enum with extra fields" do
it "has all expected members with expected values" do
members.each do |name, fields|
ev = model.value_of(name)
# Ensure it exists and is correctly saved
expect(ev).to be_present
expect(model.values).to include(ev)
expect(model.all_values).to include(ev)
expect(ev).to eq(model[ev.ordinal])
# Ensure it's correctly saved
if model.table_exists?
expect(model.where(name: name).first).to eq(ev)
end
# and that fields have been correctly set
fields.each do |fname, fvalue|
expect(ev[fname]).to eq(fvalue)
end
end
end
end
shared_examples "acts like a persisted enum with extra fields" do
include_examples "acts like an enum with extra fields"
end
context "providing a hash" do
let(:model) do
members = members()
create_test_model(:with_extra_field, ->(t){ t.string :name; t.integer :count }) do
# pre-existing matching, non-matching, and outdated data
create(name: "One", count: 3)
create(name: "Two", count: 2)
create(name: "Zero", count: 0)
acts_as_enum(members)
end
end
it_behaves_like "acts like a persisted enum"
it_behaves_like "acts like a persisted enum with extra fields"
it "keeps outdated data" do
z = model.value_of("Zero")
expect(z).to be_present
expect(model[z.ordinal]).to eq(z)
expect(z.count).to eq(0)
expect(model.all_values).to include(z)
expect(model.values).not_to include(z)
end
end
context "using builder interface" do
let(:model) do
create_test_model(:with_extra_field_using_builder, ->(t){ t.string :name; t.integer :count }) do
acts_as_enum([]) do
One(count: 1)
Two(count: 2)
constant!(:Three, count: 3)
Four(count: 4)
end
end
end
it_behaves_like "acts like a persisted enum"
it_behaves_like "acts like a persisted enum with extra fields"
end
context "without table" do
let(:model) do
members = members()
create_test_model(:with_extra_field_without_table, nil, create_table: false) do
acts_as_enum(members)
end
end
it_behaves_like "acts like an enum"
it_behaves_like "acts like an enum with extra fields"
end
it "must have attributes that match the table" do
expect {
create_test_model(:test_invalid_args_a, ->(t){ t.string :name; t.integer :count }) do
acts_as_enum([:One])
end
}.to raise_error(ArgumentError)
destroy_test_model(:test_invalid_args_a)
expect {
create_test_model(:test_invalid_args_b, ->(t){ t.string :name; t.integer :count }) do
acts_as_enum({ :One => { incorrect: 1 } })
end
}.to raise_error(ArgumentError)
destroy_test_model(:test_invalid_args_b)
expect {
create_test_model(:test_invalid_args_c, ->(t){ t.string :name }) do
acts_as_enum({ :One => { incorrect: 1 } })
end
}.to raise_error(ArgumentError)
destroy_test_model(:test_invalid_args_c)
end
end
context "using a postgresql enum valued id" do
let(:name) { "with_enum_id" }
let(:enum_type) { "#{name}_type" }
context "with table" do
before(:each) do
ActiveRecord::Base.connection.execute("CREATE TYPE #{enum_type} AS ENUM ()")
ActiveRecord::Base.connection.create_table(name.pluralize, id: false) do |t|
t.column :id, enum_type, primary_key: true, null: false
t.string :name
end
end
after(:each) do
ActiveRecord::Base.connection.execute("DROP TYPE #{enum_type} CASCADE")
end
let!(:model) do
enum_type = enum_type()
create_test_model(:with_enum_id, nil, create_table: false) do
acts_as_enum(CONSTANTS, sql_enum_type: enum_type)
end
end
it_behaves_like "acts like a persisted enum"
end
context "without table" do
let!(:model) do
enum_type = enum_type()
create_test_model(:no_table_enum_id, nil, create_table: false) do
acts_as_enum(CONSTANTS, sql_enum_type: enum_type)
end
end
it_behaves_like "acts like an enum"
end
end
context "with the name of the enum value column changed" do
let(:model) do
create_test_model(:test_new_name, ->(t){ t.string :namey }) do
acts_as_enum(CONSTANTS, name_attr: :namey)
end
end
it_behaves_like "acts like a persisted enum"
end
it "refuses to create a table in a transaction" do
expect {
ActiveRecord::Base.transaction do
create_test_model(:test_create_in_transaction, ->(t){ t.string :name }) do
acts_as_enum([:A, :B])
end
end
}.to raise_error(RuntimeError)
end
end
|
require 'spec_helper'
describe 'rackspace_motd::default' do
let(:file) {"/etc/motd"}
let(:chef_run) do
ChefSpec::Runner.new do |node|
node.set[:platform_family] = 'rhel'
node.set[:rackspace_motd][:additional_text] = 'some additional text'
end.converge(described_recipe)
end
it 'writes /etc/motd' do
expect(chef_run).to render_file(file).with_content('Chef-Client')
end
it 'writes /etc/motd with additional text' do
expect(chef_run).to render_file(file).with_content('some additional text')
end
end
debian tests
require 'spec_helper'
describe 'rackspace_motd::default' do
context "platform family - rhel" do
let(:file) {"/etc/motd"}
let(:chef_run) do
ChefSpec::Runner.new do |node|
node.set[:platform_family] = 'rhel'
node.set[:rackspace_motd][:additional_text] = 'some additional text'
end.converge(described_recipe)
end
it 'writes /etc/motd' do
expect(chef_run).to render_file(file).with_content('Chef-Client')
end
it 'writes /etc/motd with additional text' do
expect(chef_run).to render_file(file).with_content('some additional text')
end
end
context "platform family - debian" do
let(:file) {"/etc/motd.tail"}
let(:chef_run) do
ChefSpec::Runner.new do |node|
node.set[:platform_family] = 'debian'
node.set[:rackspace_motd][:additional_text] = 'some additional text'
end.converge(described_recipe)
end
it 'writes /etc/motd.tail' do
expect(chef_run).to render_file(file).with_content('Chef-Client')
end
it 'writes /etc/motd.tail with additional text' do
expect(chef_run).to render_file(file).with_content('some additional text')
end
end
end
|
Added pending test for (currently broken) date-only timestamp support
require 'spec_helper'
module Resync
describe TimeNode do
it 'supports all W3C datetime formats' # see http://www.w3.org/TR/NOTE-datetime
end
end
|
Green
|
Remove defunct config setting
|
Dont use log_buddy in production
|
# encoding: utf-8
require 'sinatra'
require 'connexionz'
require 'haml'
require 'tropo-webapi-ruby'
require 'json'
set :sender_phone, ENV['SC_PHONE']
set :va_phone, ENV['VA_PHONE']
set :char_phone, ENV['CHAR_PHONE']
set :spanish_sc, ENV['SPANISH_SC']
set :spanish_va, ENV['SPANISH_VA']
set :spanish_char, ENV['SPANISH_CHAR']
use Rack::Session::Pool
post '/index.json' do
v = Tropo::Generator.parse request.env["rack.input"].read
session[:from] = v[:session][:from]
session[:to_phone] = v[:session][:to][:name]
session[:network] = v[:session][:to][:network]
session[:channel] = v[:session][:to][:channel]
t = Tropo::Generator.new
t.say "Welcome to yak bus"
t.ask :name => 'digit',
:timeout => 60,
:say => {:value => "Enter the five digit bus stop number"},
:choices => {:value => "[5 DIGITS]",:mode => "dtmf"}
t.on :event => 'continue', :next => '/continue.json'
t.response
end
post '/continue.json' do
v = Tropo::Generator.parse request.env["rack.input"].read
t = Tropo::Generator.new
answer = v[:result][:actions][:digit][:value]
if session[:to_phone] == settings.va_phone
stop = get_et_info('va', answer)
elsif session[:to_phone] == settings.char_phone
stop = get_et_info('char', answer)
else
stop = get_et_info('sc', answer)
end
t.say(:value => stop)
t.on :event => 'continue', :next => '/next.json'
t.response
end
post '/spanish.json' do
v = Tropo::Generator.parse request.env["rack.input"].read
session[:from] = v[:session][:from]
session[:to_phone] = v[:session][:to][:name]
session[:network] = v[:session][:to][:network]
session[:channel] = v[:session][:to][:channel]
t = Tropo::Generator.new
t.say "Bienvenido al bus yak", :voice =>"esperanza"
t.ask :name => 'digit',
:timeout => 60,
:say => {:value => "Introduzca los cinco dígitos del número parada de autobús"},
:voice => "esperanza",
:choices => {:value => "[5 DIGITS]"},
:recognizer => "es-mx"
t.on :event => 'continue', :next => '/continue_spanish.json'
t.response
end
post '/continue_spanish.json' do
v = Tropo::Generator.parse request.env["rack.input"].read
t = Tropo::Generator.new
answer = v[:result][:actions][:digit][:value]
if session[:to_phone] == settings.spanish_va
stop = get_et_info('va', answer)
elsif session[:to_phone] == settings.spanish_char
stop = get_et_info('char', answer)
else
stop = get_et_info('sc', answer)
end
stop = stop.tr('Destination', 'destino')
stop = stop.tr('Route', 'ruta')
if stop == "No bus stop found"
stop = "No encuentra la parada de autobús"
elsif stop == "No arrivals for next 30 minutes"
stop = "No hay llegadas para los próximos 30 minutos"
elsif stop == "No arrival for next 45 minutes"
stop = "No hay llegadas para los próximos 45 minutos"
else
stop = stop.tr('Destination', 'destino')
stop = stop.tr('Route', 'ruta')
end
t.say(:value => stop, :voice =>"esperanza")
t.on :event => 'continue', :next => '/next_spanish.json'
t.response
end
post '/next.json' do
v = Tropo::Generator.parse request.env["rack.input"].read
t = Tropo::Generator.new
t.ask :name => 'next', :bargein => true, :timeout => 60, :attempts => 1,
:say => [{:event => "nomatch:1", :value => "That wasn't a valid answer. "},
{:value => "Would you like hear another bus stop?
Press 1 for yes; Press 2 to end this call."}],
:choices => { :value => "true(1), false(2)"}
t.on :event => 'continue', :next => '/index.json'
t.on :event => 'hangup', :next => '/hangup.json'
t.response
end
post '/sms_incoming.json' do
t = Tropo::Generator.new
v = Tropo::Generator.parse request.env["rack.input"].read
from = v[:session][:to][:id]
initial_text = v[:session][:initial_text]
if from == settings.va_phone.tr('+','')
stop = get_et_info('va', initial_text)
elsif from == settings.char_phone.tr('+','')
stop = get_et_info('char', initial_text)
else
stop = get_et_info('sc', initial_text)
end
t.say(:value => stop)
t.hangup
t.on :event => 'hangup', :next => '/hangup.json'
t.response
end
post '/hangup.json' do
v = Tropo::Generator.parse request.env["rack.input"].read
puts " Call complete (CDR received). Call duration: #{v[:result][:session_duration]} second(s)"
end
def get_et_info(location,platform)
if location == "va"
@client = Connexionz::Client.new({:endpoint => "http://realtime.commuterpage.com"})
elsif location == "char"
@client = Connexionz::Client.new({:endpoint => "http://avlweb.charlottesville.org"})
else
@client = Connexionz::Client.new({:endpoint => "http://12.233.207.166"})
end
@platform_info = @client.route_position_et({:platformno => platform})
if @platform_info.route_position_et.platform.nil?
sms_message = "No bus stop found"
else
name = @platform_info.route_position_et.platform.name
arrival_scope = @platform_info.route_position_et.content.max_arrival_scope
sms_message = ""
eta = ""
if @platform_info.route_position_et.platform.route.nil?
sms_message = "No arrivals for next #{arrival_scope} minutes"
elsif @platform_info.route_position_et.platform.route.class == Array
@platforms = @platform_info.route_position_et.platform.route
@platforms.each do |platform|
sms_message += "Route #{platform.route_no}-Destination #{platform.destination.name}-ETA #{platform.destination.trip.eta } minutes "
end
else
route_no = @platform_info.route_position_et.platform.route.route_no
destination = @platform_info.route_position_et.platform.route.destination.name
if @platform_info.route_position_et.platform.route.destination.trip.is_a?(Array)
@platform_info.route_position_et.platform.route.destination.trip.each do |mult_eta|
eta += "#{mult_eta.eta} min "
end
else
eta = "#{@platform_info.route_position_et.platform.route.destination.trip.eta} min"
end
sms_message = "Route #{route_no} " + "-Destination #{destination} " + "-ETA #{eta}"
end
end
sms_message
end
##################
### WEB ROUTES ###
##################
get '/' do
haml :root
end
get '/sc/:name' do
#matches "GET /sc/19812"
get_et_info('sc',params[:name])
end
get '/va/:name' do
#matches "GET /va/41215"
get_et_info('va',params[:name])
end
get '/char/:name' do
#matches "GET /char/19812"
get_et_info('char',params[:name])
end
Added spanish next.json
# encoding: utf-8
require 'sinatra'
require 'connexionz'
require 'haml'
require 'tropo-webapi-ruby'
require 'json'
set :sender_phone, ENV['SC_PHONE']
set :va_phone, ENV['VA_PHONE']
set :char_phone, ENV['CHAR_PHONE']
set :spanish_sc, ENV['SPANISH_SC']
set :spanish_va, ENV['SPANISH_VA']
set :spanish_char, ENV['SPANISH_CHAR']
use Rack::Session::Pool
post '/index.json' do
v = Tropo::Generator.parse request.env["rack.input"].read
session[:from] = v[:session][:from]
session[:to_phone] = v[:session][:to][:name]
session[:network] = v[:session][:to][:network]
session[:channel] = v[:session][:to][:channel]
t = Tropo::Generator.new
t.say "Welcome to yak bus"
t.ask :name => 'digit',
:timeout => 60,
:say => {:value => "Enter the five digit bus stop number"},
:choices => {:value => "[5 DIGITS]",:mode => "dtmf"}
t.on :event => 'continue', :next => '/continue.json'
t.response
end
post '/continue.json' do
v = Tropo::Generator.parse request.env["rack.input"].read
t = Tropo::Generator.new
answer = v[:result][:actions][:digit][:value]
if session[:to_phone] == settings.va_phone
stop = get_et_info('va', answer)
elsif session[:to_phone] == settings.char_phone
stop = get_et_info('char', answer)
else
stop = get_et_info('sc', answer)
end
t.say(:value => stop)
t.on :event => 'continue', :next => '/next.json'
t.response
end
post '/next.json' do
v = Tropo::Generator.parse request.env["rack.input"].read
t = Tropo::Generator.new
t.ask :name => 'next', :bargein => true, :timeout => 60, :attempts => 1,
:say => [{:event => "nomatch:1", :value => "That wasn't a valid answer. "},
{:value => "Would you like hear another bus stop?
Press 1 for yes; Press 2 to end this call."}],
:choices => { :value => "true(1), false(2)"}
t.on :event => 'continue', :next => '/index.json'
t.on :event => 'hangup', :next => '/hangup.json'
t.response
end
post '/spanish.json' do
v = Tropo::Generator.parse request.env["rack.input"].read
session[:from] = v[:session][:from]
session[:to_phone] = v[:session][:to][:name]
session[:network] = v[:session][:to][:network]
session[:channel] = v[:session][:to][:channel]
t = Tropo::Generator.new
t.say "Bienvenido al bus yak", :voice =>"esperanza"
t.ask :name => 'digit',
:timeout => 60,
:say => {:value => "Introduzca los cinco dígitos del número parada de autobús"},
:voice => "esperanza",
:choices => {:value => "[5 DIGITS]"},
:recognizer => "es-mx"
t.on :event => 'continue', :next => '/continue_spanish.json'
t.response
end
post '/continue_spanish.json' do
v = Tropo::Generator.parse request.env["rack.input"].read
t = Tropo::Generator.new
answer = v[:result][:actions][:digit][:value]
if session[:to_phone] == settings.spanish_va
stop = get_et_info('va', answer)
elsif session[:to_phone] == settings.spanish_char
stop = get_et_info('char', answer)
else
stop = get_et_info('sc', answer)
end
stop = stop.tr('Destination', 'destino')
stop = stop.tr('Route', 'ruta')
if stop == "No bus stop found"
stop = "No encuentra la parada de autobús"
elsif stop == "No arrivals for next 30 minutes"
stop = "No hay llegadas para los próximos 30 minutos"
elsif stop == "No arrival for next 45 minutes"
stop = "No hay llegadas para los próximos 45 minutos"
else
stop = stop.tr('Destination', 'destino')
stop = stop.tr('Route', 'ruta')
end
t.say(:value => stop, :voice =>"esperanza")
t.on :event => 'continue', :next => '/next_spanish.json'
t.response
end
post '/next_spanish.json' do
v = Tropo::Generator.parse request.env["rack.input"].read
t = Tropo::Generator.new
t.ask :name => 'next', :bargein => true, :timeout => 60, :attempts => 1,
:say => [{:event => "nomatch:1", :value => "Que no era una respuesta válida. "},
{:value => "¿Te gustaría escuchar otra parada de autobús?
Presione 1 para sí, Pulse 2 para poner fin a esta convocatoria."}],
:choices => { :value => "true(1), false(2)"},
:voice => "esperanza",
:recognizer => "es-mx"
t.on :event => 'continue', :next => '/spanish.json'
t.on :event => 'hangup', :next => '/hangup.json'
t.response
end
post '/sms_incoming.json' do
t = Tropo::Generator.new
v = Tropo::Generator.parse request.env["rack.input"].read
from = v[:session][:to][:id]
initial_text = v[:session][:initial_text]
if from == settings.va_phone.tr('+','')
stop = get_et_info('va', initial_text)
elsif from == settings.char_phone.tr('+','')
stop = get_et_info('char', initial_text)
else
stop = get_et_info('sc', initial_text)
end
t.say(:value => stop)
t.hangup
t.on :event => 'hangup', :next => '/hangup.json'
t.response
end
post '/hangup.json' do
v = Tropo::Generator.parse request.env["rack.input"].read
puts " Call complete (CDR received). Call duration: #{v[:result][:session_duration]} second(s)"
end
def get_et_info(location,platform)
if location == "va"
@client = Connexionz::Client.new({:endpoint => "http://realtime.commuterpage.com"})
elsif location == "char"
@client = Connexionz::Client.new({:endpoint => "http://avlweb.charlottesville.org"})
else
@client = Connexionz::Client.new({:endpoint => "http://12.233.207.166"})
end
@platform_info = @client.route_position_et({:platformno => platform})
if @platform_info.route_position_et.platform.nil?
sms_message = "No bus stop found"
else
name = @platform_info.route_position_et.platform.name
arrival_scope = @platform_info.route_position_et.content.max_arrival_scope
sms_message = ""
eta = ""
if @platform_info.route_position_et.platform.route.nil?
sms_message = "No arrivals for next #{arrival_scope} minutes"
elsif @platform_info.route_position_et.platform.route.class == Array
@platforms = @platform_info.route_position_et.platform.route
@platforms.each do |platform|
sms_message += "Route #{platform.route_no}-Destination #{platform.destination.name}-ETA #{platform.destination.trip.eta } minutes "
end
else
route_no = @platform_info.route_position_et.platform.route.route_no
destination = @platform_info.route_position_et.platform.route.destination.name
if @platform_info.route_position_et.platform.route.destination.trip.is_a?(Array)
@platform_info.route_position_et.platform.route.destination.trip.each do |mult_eta|
eta += "#{mult_eta.eta} min "
end
else
eta = "#{@platform_info.route_position_et.platform.route.destination.trip.eta} min"
end
sms_message = "Route #{route_no} " + "-Destination #{destination} " + "-ETA #{eta}"
end
end
sms_message
end
##################
### WEB ROUTES ###
##################
get '/' do
haml :root
end
get '/sc/:name' do
#matches "GET /sc/19812"
get_et_info('sc',params[:name])
end
get '/va/:name' do
#matches "GET /va/41215"
get_et_info('va',params[:name])
end
get '/char/:name' do
#matches "GET /char/19812"
get_et_info('char',params[:name])
end
|
moved responsability of parameter url generation to url_parameter module.
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'list_to_columns/version'
Gem::Specification.new do |spec|
spec.name = 'list_to_columns'
spec.version = ListToColumns::VERSION
spec.authors = ['Christian Höltje']
spec.email = ['docwhat@gerf.org']
spec.summary = 'Formats a list into columns'
spec.description = 'Given an Array of Strings, it formats it into '\
'columns to make it more compact for terminal displays.'
spec.homepage = 'https://github.com/docwhat/list_to_columns'
spec.license = 'MIT'
spec.files = `git ls-files -z`
.split("\x0")
.reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_development_dependency 'bundler', '~> 1.10'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'psych'
spec.add_development_dependency 'rspec'
spec.add_development_dependency 'rspec-given'
spec.add_development_dependency 'rubocop'
spec.add_development_dependency 'simplecov'
spec.add_development_dependency 'coveralls'
spec.add_development_dependency 'semver2', '~> 3.4'
end
I don't want to deal with ancient rubies
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'list_to_columns/version'
Gem::Specification.new do |spec|
spec.name = 'list_to_columns'
spec.version = ListToColumns::VERSION
spec.authors = ['Christian Höltje']
spec.email = ['docwhat@gerf.org']
spec.summary = 'Formats a list into columns'
spec.description = 'Given an Array of Strings, it formats it into '\
'columns to make it more compact for terminal displays.'
spec.homepage = 'https://github.com/docwhat/list_to_columns'
spec.license = 'MIT'
spec.files = `git ls-files -z`
.split("\x0")
.reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.required_ruby_version = '>= 2.0.0'
spec.add_development_dependency 'bundler', '~> 1.10'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'psych'
spec.add_development_dependency 'rspec'
spec.add_development_dependency 'rspec-given'
spec.add_development_dependency 'rubocop'
spec.add_development_dependency 'simplecov'
spec.add_development_dependency 'coveralls'
spec.add_development_dependency 'semver2', '~> 3.4'
end
|
change class names
|
#!/usr/bin/env ruby
require 'net/http'
require 'timeout'
module Sensu::Extension
class Influx < Handler
@@extension_name = 'influxdb-extension'
def name
@@extension_name
end
def description
'Outputs metrics to InfluxDB'
end
def post_init
influxdb_config = settings[@@extension_name]
validate_config(influxdb_config)
hostname = influxdb_config[:hostname]
port = influxdb_config[:port] || 8086
database = influxdb_config[:database]
@username = influxdb_config[:username]
@password = influxdb_config[:password]
@timeout = influxdb_config[:timeout] || 15
@uri = URI("http://#{hostname}:#{port}/write?db=#{database}")
@http = Net::HTTP::new(@uri.host, @uri.port)
@logger.info("#{@@extension_name}: Successfully initialized config: hostname: #{hostname}, port: #{port}, database: #{database}, username: #{@username}, timeout: #{@timeout}")
end
def validate_config(config)
if config.nil?
raise ArgumentError, "No configuration for #{@@extension_name} provided. Exiting..."
end
["hostname", "database"].each do |required_setting|
if config[required_setting].nil?
raise ArgumentError, "Required setting #{required_setting} not provided to extension. This should be provided as JSON element with key '#{@@extension_name}'. Exiting..."
end
end
end
def create_tags(event)
begin
incoming_tags = Hash[event[:client][:tags].sort] # sorting tags alphabetically in order to increase InfluxDB performance
# if no tags are provided with the client, we add hostname as a tag.
if incoming_tags.nil?
incoming_tags = {"hostname" => event[:client][:address]}
end
tag_strings = []
incoming_tags.each { |key,value| tag_strings << "#{key}=#{value}" }
tag_strings.join(",")
rescue => e
@logger.error("#{@@extension_name}: unable to create to tags from event data #{e.backtrace.to_s}")
end
end
def create_payload(output, tags)
points = []
output.split(/\n/).each do |line|
measurement, field_value, timestamp = line.split(/\s+/)
timestamp_nano = Integer(timestamp) * (10 ** 9)
point = "#{measurement},#{tags} value=#{field_value} #{timestamp_nano}"
points << point
end
points.join("\n")
end
def run(event)
begin
event = MultiJson.load(event)
tags = create_tags(event)
@logger.debug("created tags: #{tags}")
payload = create_payload(event[:check][:output], tags)
request = Net::HTTP::Post.new(@uri.request_uri)
request.body = payload
request.basic_auth(@username, @password)
@logger.debug("writing payload #{payload} to endpoint #{@uri.to_s}")
Thread.new do
@http.request(request)
request.finish
end
rescue => e
@logger.error("#{@@extension_name}: unable to post payload to influxdb - #{e.backtrace.to_s}")
end
yield("#{@@extension_name}: Handler finished", 0)
end
end
end
fixes #2 : checks that :tags exists before sorting
#!/usr/bin/env ruby
require 'net/http'
require 'timeout'
module Sensu::Extension
class Influx < Handler
@@extension_name = 'influxdb-extension'
def name
@@extension_name
end
def description
'Outputs metrics to InfluxDB'
end
def post_init
influxdb_config = settings[@@extension_name]
validate_config(influxdb_config)
hostname = influxdb_config[:hostname]
port = influxdb_config[:port] || 8086
database = influxdb_config[:database]
@username = influxdb_config[:username]
@password = influxdb_config[:password]
@timeout = influxdb_config[:timeout] || 15
@uri = URI("http://#{hostname}:#{port}/write?db=#{database}")
@http = Net::HTTP::new(@uri.host, @uri.port)
@logger.info("#{@@extension_name}: Successfully initialized config: hostname: #{hostname}, port: #{port}, database: #{database}, username: #{@username}, timeout: #{@timeout}")
end
def validate_config(config)
if config.nil?
raise ArgumentError, "No configuration for #{@@extension_name} provided. Exiting..."
end
["hostname", "database"].each do |required_setting|
if config[required_setting].nil?
raise ArgumentError, "Required setting #{required_setting} not provided to extension. This should be provided as JSON element with key '#{@@extension_name}'. Exiting..."
end
end
end
def create_tags(event)
begin
if event[:client].has_key?(:tags)
# sorting tags alphabetically in order to increase InfluxDB performance
incoming_tags = Hash[event[:client][:tags].sort]
else
# if no tags are provided with the client, we add hostname as a tag.
incoming_tags = {"hostname" => event[:client][:address]}
end
tag_strings = []
incoming_tags.each { |key,value| tag_strings << "#{key}=#{value}" }
tag_strings.join(",")
rescue => e
@logger.error("#{@@extension_name}: unable to create to tags from event data #{e.backtrace.to_s}")
end
end
def create_payload(output, tags)
points = []
output.split(/\n/).each do |line|
measurement, field_value, timestamp = line.split(/\s+/)
timestamp_nano = Integer(timestamp) * (10 ** 9)
point = "#{measurement},#{tags} value=#{field_value} #{timestamp_nano}"
points << point
end
points.join("\n")
end
def run(event)
begin
event = MultiJson.load(event)
tags = create_tags(event)
@logger.debug("created tags: #{tags}")
payload = create_payload(event[:check][:output], tags)
request = Net::HTTP::Post.new(@uri.request_uri)
request.body = payload
request.basic_auth(@username, @password)
@logger.debug("#{@@extension_name}: writing payload #{payload} to endpoint #{@uri.to_s}")
Thread.new do
@http.request(request)
request.finish
end
rescue => e
@logger.error("#{@@extension_name}: unable to post payload to influxdb - #{e.backtrace.to_s}")
end
yield("#{@@extension_name}: Handler finished", 0)
end
end
end
|
SPEC = Gem::Specification.new do |s|
s.name = 'locale_selector'
s.version = '1.93.0'
s.summary = 'Wraps and improves ruby-gettext, provides UI for locale selection, maintains user preferences.'
s.description = s.summary
s.author = 'Vladimir Dobriakov'
s.email = 'vd_extern@vfnet.de'
s.homepage = 'http://github.com/geekq/locale_selector'
s.files = %w(MIT-LICENSE README.rdoc Rakefile TESTING.rdoc init.rb install.rb locale_selector.gemspec uninstall.rb) +
Dir.glob("{generators,lib,tasks}/**/*")
s.require_path = "lib"
s.bindir = "bin"
s.has_rdoc = true
s.extra_rdoc_files = ['README.rdoc', 'TESTING.rdoc', 'MIT-LICENSE']
s.rdoc_options = ['--line-numbers', '--inline-source', '--promiscuous', '--main', 'README.rdoc']
s.add_dependency 'gettext', '1.93.0'
end
replaced glob by a list of files
SPEC = Gem::Specification.new do |s|
s.name = 'locale_selector'
s.version = '1.93.0'
s.summary = 'Wraps and improves ruby-gettext, provides UI for locale selection, maintains user preferences.'
s.description = s.summary
s.author = 'Vladimir Dobriakov'
s.email = 'vd_extern@vfnet.de'
s.homepage = 'http://github.com/geekq/locale_selector'
s.files = %w(MIT-LICENSE README.rdoc Rakefile TESTING.rdoc init.rb install.rb locale_selector.gemspec uninstall.rb) +
['generators/gettext_hacks/templates/gettext_hacks.rb',
'generators/gettext_hacks/gettext_hacks_generator.rb',
'lib/locale_selector.rb',
'tasks/gettext.rake' ]
Dir.glob("{generators,lib,tasks}/**/*")
s.require_path = "lib"
s.bindir = "bin"
s.has_rdoc = true
s.extra_rdoc_files = ['README.rdoc', 'TESTING.rdoc', 'MIT-LICENSE']
s.rdoc_options = ['--line-numbers', '--inline-source', '--promiscuous', '--main', 'README.rdoc']
s.add_dependency 'gettext', '1.93.0'
end
|
require 'sequel'
require_relative 'options'
module Sequelizer
# Class that handles loading/interpretting the database options and
# creates the Sequel connection
class ConnectionMaker
# The options for Sequel.connect
attr :options
# Accepts an optional set of database options
#
# If no options are provided, attempts to read options from
# config/database.yml
#
# If config/database.yml doesn't exist, Dotenv is used to try to load a
# .env file, then uses any SEQUELIZER_* environment variables as
# database options
def initialize(options = nil)
@options = Options.new(options)
end
# Returns a Sequel connection to the database
def connection
Sequel.connect(options.to_hash)
end
end
end
Use URI/URL as a string in Sequel.connect
require 'sequel'
require_relative 'options'
module Sequelizer
# Class that handles loading/interpretting the database options and
# creates the Sequel connection
class ConnectionMaker
# The options for Sequel.connect
attr :options
# Accepts an optional set of database options
#
# If no options are provided, attempts to read options from
# config/database.yml
#
# If config/database.yml doesn't exist, Dotenv is used to try to load a
# .env file, then uses any SEQUELIZER_* environment variables as
# database options
def initialize(options = nil)
@options = Options.new(options)
end
# Returns a Sequel connection to the database
def connection
opts = options.to_hash
if url = (opts.delete(:uri) || opts.delete(:url))
Sequel.connect(url, opts)
else
Sequel.connect(options.to_hash)
end
end
end
end
|
class SettingsValidationWrapper
class DataPathsValidator < ActiveModel::Validator
def defaults(key)
[Settings.paths[key], PathsInitializer::DEFAULT_PATHS[key]]
end
def set_directory(key)
setting, fallback = defaults(key)
PathsInitializer.prepare(setting, fallback)
end
def failure_condition_met?(key)
setting, _fallback = defaults(key)
setting.nil? && !File.directory?(set_directory(key))
end
def validate(record)
PathsInitializer::DEFAULT_PATHS.each do |key, _default_value|
dir = set_directory(key)
if failure_condition_met?(key)
record.errors["yml__paths__#{key}".to_sym] =
"Implicitly set data directory path '#{dir}' is not a directory."
elsif !Settings.paths[key].is_a?(String)
record.errors["yml__paths__#{key}".to_sym] = 'Is not a String value.'
elsif !File.directory?(dir)
record.errors["yml__paths__#{key}".to_sym] = 'Is not a directory.'
end
end
end
end
include ActiveModel::Validations
include SettingsValidationWrapper::Validators
# We assume that deployment is done on a linux machine that has 'nproc'.
# Counting processors is different on other machines. For them, we would need
# to use a gem.
NPROC_PATH = `which nproc`
NPROC_AVAILABLE = NPROC_PATH.present? && File.executable?(NPROC_PATH)
PRESENCE = %i(yml__name
yml__OMS
yml__OMS_qualifier
yml__action_mailer__delivery_method
yml__action_mailer__smtp_settings__address
yml__allow_unconfirmed_access_for_days
yml__max_read_filesize
yml__max_combined_diff_size
yml__ontology_parse_timeout
yml__footer
yml__exception_notifier__email_prefix
yml__exception_notifier__sender_address
yml__exception_notifier__exception_recipients
yml__paths__data
yml__git__verify_url
yml__git__default_branch
yml__git__push_priority__commits
yml__git__push_priority__changed_files_per_commit
yml__git__fallbacks__committer_name
yml__git__fallbacks__committer_email
yml__allowed_iri_schemes
yml__external_repository_name
yml__formality_levels
yml__license_models
yml__ontology_types
yml__tasks
yml__hets__version_minimum_version
yml__hets__version_minimum_revision
yml__hets__stack_size
yml__hets__cmd_line_options
yml__hets__server_options
yml__hets__env__LANG
initializers__fqdn)
PRESENCE_IN_PRODUCTION = %i(yml__hets__executable_path
yml__hets__instances_count)
BOOLEAN = %i(yml__exception_notifier__enabled
yml__display_head_commit
yml__display_symbols_tab
yml__format_selection
yml__action_mailer__perform_deliveries
yml__action_mailer__raise_delivery_errors
yml__action_mailer__smtp_settings__enable_starttls_auto
initializers__consider_all_requests_local)
FIXNUM = %i(yml__hets__instances_count
yml__action_mailer__smtp_settings__port
yml__allow_unconfirmed_access_for_days
yml__git__push_priority__commits
yml__git__push_priority__changed_files_per_commit
yml__access_token__expiration_minutes
yml__hets__time_between_updates
yml__hets__version_minimum_revision)
FLOAT = %i(yml__hets__version_minimum_version)
STRING = %i(yml__name
yml__OMS
yml__OMS_qualifier
yml__email
yml__action_mailer__smtp_settings__address
yml__exception_notifier__email_prefix
yml__exception_notifier__sender_address
yml__paths__data
yml__git__verify_url
yml__git__default_branch
yml__git__fallbacks__committer_name
yml__git__fallbacks__committer_email
yml__external_repository_name)
ARRAY = %i(yml__footer
yml__exception_notifier__exception_recipients
yml__allowed_iri_schemes
yml__formality_levels
yml__license_models
yml__ontology_types
yml__tasks
yml__hets__cmd_line_options
yml__hets__server_options)
DIRECTORY_PRODUCTION = %i(yml__paths__data)
ELEMENT_PRESENT = %i(yml__allowed_iri_schemes
yml__hets__cmd_line_options
yml__hets__server_options)
validates_with DataPathsValidator
validates_presence_of *PRESENCE
validates_presence_of *PRESENCE_IN_PRODUCTION, if: :in_production?
BOOLEAN.each do |field|
validates field, class: {in: [TrueClass, FalseClass]}
end
FIXNUM.each { |field| validates field, class: {in: [Fixnum]} }
FLOAT.each { |field| validates field, class: {in: [Float]} }
STRING.each { |field| validates field, class: {in: [String]} }
ARRAY.each { |field| validates field, class: {in: [Array]} }
DIRECTORY_PRODUCTION.each do |field|
validates field, directory: true, if: :in_production?
end
ELEMENT_PRESENT.each { |field| validates field, elements_are_present: true }
validates :initializers__secret_token,
presence: true,
length: {minimum: 64},
if: :in_production?
validates :yml__email,
email_from_host: {hostname: ->(record) { record.initializers__fqdn }},
if: :in_production?
validates :yml__exception_notifier__exception_recipients,
elements_are_email: true
validates :yml__action_mailer__delivery_method,
inclusion: {in: %i(sendmail smtp file test)}
validates :yml__allow_unconfirmed_access_for_days,
numericality: {greater_than_or_equal_to: 0}
validates :yml__max_read_filesize, numericality: {greater_than: 1024}
validates :yml__max_combined_diff_size, numericality: {greater_than: 2048}
validates :yml__ontology_parse_timeout, numericality: {greater_than: 0}
validates :yml__git__verify_url, format: URI.regexp
validates :yml__git__push_priority__commits,
numericality: {greater_than_or_equal_to: 1}
validates :yml__git__push_priority__changed_files_per_commit,
numericality: {greater_than_or_equal_to: 1}
validates :yml__access_token__expiration_minutes,
numericality: {greater_than_or_equal_to: 1}
validates :yml__footer, elements_have_keys: {keys: %i(text)}
validates :yml__formality_levels,
elements_have_keys: {keys: %i(name description)}
validates :yml__license_models, elements_have_keys: {keys: %i(name url)}
validates :yml__ontology_types,
elements_have_keys: {keys: %i(name description documentation)}
validates :yml__tasks,
elements_have_keys: {keys: %i(name description)}
validates :initializers__log_level,
inclusion: {in: %i(fatal error warn info debug)}
validates :yml__hets__executable_path, executable: true, if: :in_production?
if NPROC_AVAILABLE
validates :yml__hets__instances_count,
numericality: {greater_than: 0,
less_than_or_equal_to: `nproc`.to_i},
if: :in_production?
else
validates :yml__hets__instances_count,
numericality: {greater_than: 0},
if: :in_production?
end
validates :yml__hets__time_between_updates,
numericality: {greater_than_or_equal_to: 1}
def self.base(first_portion)
case first_portion
when 'yml'
Settings
when 'initializers'
Ontohub::Application.config
else
:error
end
end
def self.get_value(object, key_chain)
key_chain.each do |key|
if object.respond_to?(key)
object = object.send(key)
else
# The nil value shall be caught by the presence validators.
return nil
end
end
object
end
protected
def in_production?
Rails.env.production?
end
# We use '__' as a separator. It will be replaced by a dot.
# This uses the fact that our settings-keys never have two consecutive
# underscores.
# yml__git__verify_url maps to Settings.git.verify_url.
# initializers__git__verify_url maps to @config.git.verify_url.
def method_missing(method_name, *_args)
portions = method_name.to_s.split('__')
object = self.class.base(portions[0])
key_chain = portions[1..-1]
if object == :error || key_chain.blank?
raise NoMethodError,
"undefined method `#{method_name}' for #{self}:#{self.class}"
end
self.class.get_value(object, key_chain)
end
end
Only validate the paths in production.
class SettingsValidationWrapper
class DataPathsValidator < ActiveModel::Validator
def defaults(key)
[Settings.paths[key], PathsInitializer::DEFAULT_PATHS[key]]
end
def set_directory(key)
setting, fallback = defaults(key)
PathsInitializer.prepare(setting, fallback)
end
def failure_condition_met?(key)
setting, _fallback = defaults(key)
setting.nil? && !File.directory?(set_directory(key))
end
def validate(record)
PathsInitializer::DEFAULT_PATHS.each do |key, _default_value|
dir = set_directory(key)
if failure_condition_met?(key)
record.errors["yml__paths__#{key}".to_sym] =
"Implicitly set data directory path '#{dir}' is not a directory."
elsif !Settings.paths[key].is_a?(String)
record.errors["yml__paths__#{key}".to_sym] = 'Is not a String value.'
elsif !File.directory?(dir)
record.errors["yml__paths__#{key}".to_sym] = 'Is not a directory.'
end
end
end
end
include ActiveModel::Validations
include SettingsValidationWrapper::Validators
# We assume that deployment is done on a linux machine that has 'nproc'.
# Counting processors is different on other machines. For them, we would need
# to use a gem.
NPROC_PATH = `which nproc`
NPROC_AVAILABLE = NPROC_PATH.present? && File.executable?(NPROC_PATH)
PRESENCE = %i(yml__name
yml__OMS
yml__OMS_qualifier
yml__action_mailer__delivery_method
yml__action_mailer__smtp_settings__address
yml__allow_unconfirmed_access_for_days
yml__max_read_filesize
yml__max_combined_diff_size
yml__ontology_parse_timeout
yml__footer
yml__exception_notifier__email_prefix
yml__exception_notifier__sender_address
yml__exception_notifier__exception_recipients
yml__paths__data
yml__git__verify_url
yml__git__default_branch
yml__git__push_priority__commits
yml__git__push_priority__changed_files_per_commit
yml__git__fallbacks__committer_name
yml__git__fallbacks__committer_email
yml__allowed_iri_schemes
yml__external_repository_name
yml__formality_levels
yml__license_models
yml__ontology_types
yml__tasks
yml__hets__version_minimum_version
yml__hets__version_minimum_revision
yml__hets__stack_size
yml__hets__cmd_line_options
yml__hets__server_options
yml__hets__env__LANG
initializers__fqdn)
PRESENCE_IN_PRODUCTION = %i(yml__hets__executable_path
yml__hets__instances_count)
BOOLEAN = %i(yml__exception_notifier__enabled
yml__display_head_commit
yml__display_symbols_tab
yml__format_selection
yml__action_mailer__perform_deliveries
yml__action_mailer__raise_delivery_errors
yml__action_mailer__smtp_settings__enable_starttls_auto
initializers__consider_all_requests_local)
FIXNUM = %i(yml__hets__instances_count
yml__action_mailer__smtp_settings__port
yml__allow_unconfirmed_access_for_days
yml__git__push_priority__commits
yml__git__push_priority__changed_files_per_commit
yml__access_token__expiration_minutes
yml__hets__time_between_updates
yml__hets__version_minimum_revision)
FLOAT = %i(yml__hets__version_minimum_version)
STRING = %i(yml__name
yml__OMS
yml__OMS_qualifier
yml__email
yml__action_mailer__smtp_settings__address
yml__exception_notifier__email_prefix
yml__exception_notifier__sender_address
yml__paths__data
yml__git__verify_url
yml__git__default_branch
yml__git__fallbacks__committer_name
yml__git__fallbacks__committer_email
yml__external_repository_name)
ARRAY = %i(yml__footer
yml__exception_notifier__exception_recipients
yml__allowed_iri_schemes
yml__formality_levels
yml__license_models
yml__ontology_types
yml__tasks
yml__hets__cmd_line_options
yml__hets__server_options)
DIRECTORY_PRODUCTION = %i(yml__paths__data)
ELEMENT_PRESENT = %i(yml__allowed_iri_schemes
yml__hets__cmd_line_options
yml__hets__server_options)
validates_with DataPathsValidator, if: :in_production?
validates_presence_of *PRESENCE
validates_presence_of *PRESENCE_IN_PRODUCTION, if: :in_production?
BOOLEAN.each do |field|
validates field, class: {in: [TrueClass, FalseClass]}
end
FIXNUM.each { |field| validates field, class: {in: [Fixnum]} }
FLOAT.each { |field| validates field, class: {in: [Float]} }
STRING.each { |field| validates field, class: {in: [String]} }
ARRAY.each { |field| validates field, class: {in: [Array]} }
DIRECTORY_PRODUCTION.each do |field|
validates field, directory: true, if: :in_production?
end
ELEMENT_PRESENT.each { |field| validates field, elements_are_present: true }
validates :initializers__secret_token,
presence: true,
length: {minimum: 64},
if: :in_production?
validates :yml__email,
email_from_host: {hostname: ->(record) { record.initializers__fqdn }},
if: :in_production?
validates :yml__exception_notifier__exception_recipients,
elements_are_email: true
validates :yml__action_mailer__delivery_method,
inclusion: {in: %i(sendmail smtp file test)}
validates :yml__allow_unconfirmed_access_for_days,
numericality: {greater_than_or_equal_to: 0}
validates :yml__max_read_filesize, numericality: {greater_than: 1024}
validates :yml__max_combined_diff_size, numericality: {greater_than: 2048}
validates :yml__ontology_parse_timeout, numericality: {greater_than: 0}
validates :yml__git__verify_url, format: URI.regexp
validates :yml__git__push_priority__commits,
numericality: {greater_than_or_equal_to: 1}
validates :yml__git__push_priority__changed_files_per_commit,
numericality: {greater_than_or_equal_to: 1}
validates :yml__access_token__expiration_minutes,
numericality: {greater_than_or_equal_to: 1}
validates :yml__footer, elements_have_keys: {keys: %i(text)}
validates :yml__formality_levels,
elements_have_keys: {keys: %i(name description)}
validates :yml__license_models, elements_have_keys: {keys: %i(name url)}
validates :yml__ontology_types,
elements_have_keys: {keys: %i(name description documentation)}
validates :yml__tasks,
elements_have_keys: {keys: %i(name description)}
validates :initializers__log_level,
inclusion: {in: %i(fatal error warn info debug)}
validates :yml__hets__executable_path, executable: true, if: :in_production?
if NPROC_AVAILABLE
validates :yml__hets__instances_count,
numericality: {greater_than: 0,
less_than_or_equal_to: `nproc`.to_i},
if: :in_production?
else
validates :yml__hets__instances_count,
numericality: {greater_than: 0},
if: :in_production?
end
validates :yml__hets__time_between_updates,
numericality: {greater_than_or_equal_to: 1}
def self.base(first_portion)
case first_portion
when 'yml'
Settings
when 'initializers'
Ontohub::Application.config
else
:error
end
end
def self.get_value(object, key_chain)
key_chain.each do |key|
if object.respond_to?(key)
object = object.send(key)
else
# The nil value shall be caught by the presence validators.
return nil
end
end
object
end
protected
def in_production?
Rails.env.production?
end
# We use '__' as a separator. It will be replaced by a dot.
# This uses the fact that our settings-keys never have two consecutive
# underscores.
# yml__git__verify_url maps to Settings.git.verify_url.
# initializers__git__verify_url maps to @config.git.verify_url.
def method_missing(method_name, *_args)
portions = method_name.to_s.split('__')
object = self.class.base(portions[0])
key_chain = portions[1..-1]
if object == :error || key_chain.blank?
raise NoMethodError,
"undefined method `#{method_name}' for #{self}:#{self.class}"
end
self.class.get_value(object, key_chain)
end
end
|
require_relative('../spec_helper')
describe RubyCqrs::Domain::Aggregate do
let(:aggregate_id) { SomeDomain::AGGREGATE_ID }
let(:aggregate) { SomeDomain::AggregateRoot.new }
describe '#new' do
it 'has aggregate_id initilized as a valid uuid' do
expect(aggregate.aggregate_id).to be_a_valid_uuid
end
it 'has version initilized as 0' do
expect(aggregate.version).to be_zero
end
it 'has source_version initilized as 0' do
expect(aggregate.instance_variable_get(:@source_version)).to be_zero
end
end
describe '#raise_event' do
it 'raise NotADomainEventError when raising an object that is not a proper event' do
expect { aggregate.fire_weird_stuff }.to raise_error(RubyCqrs::NotADomainEventError)
end
context 'after raising an event' do
it 'has version increased by 1' do
original_version = aggregate.version
aggregate.test_fire
expect(aggregate.version).to eq(original_version + 1)
end
it 'leaves source_version unchanged' do
original_source_version = aggregate.instance_variable_get(:@source_version)
aggregate.test_fire
expect(aggregate.instance_variable_get(:@source_version)).to eq original_source_version
end
it 'calls #on_third_event' do
expect(aggregate).to receive(:on_third_event)
aggregate.test_fire
end
end
end
describe '#is_version_conflicted?' do
let(:unsorted_events) { [ SomeDomain::SecondEvent.new, SomeDomain::FirstEvent.new ] }
let(:state) { { :aggregate_id => aggregate_id, :events => unsorted_events } }
let(:loaded_aggregate) { aggregate.send(:load_from, state); aggregate; }
it 'returns true when supplied client side version does not match the server side persisted source_version' do
client_side_version = unsorted_events.size - 1
expect(loaded_aggregate.is_version_conflicted? client_side_version).to be_truthy
end
it 'returns false when supplied client side version matches the server side persisted source_version' do
client_side_version = unsorted_events.size
expect(loaded_aggregate.is_version_conflicted? client_side_version).to be_falsy
end
end
describe '#get_changes' do
context 'after raising no event' do
it 'returns nil' do
expect(aggregate.send(:get_changes)).to be_nil
end
end
context 'after raising 2 events' do
it 'returns proper change summary' do
aggregate.test_fire
aggregate.test_fire_ag
pending_changes = aggregate.send(:get_changes)
expect(pending_changes[:events].size).to eq(2)
expect(pending_changes[:events][0].version).to eq(1)
expect(pending_changes[:events][1].version).to eq(2)
expect(pending_changes[:aggregate_id]).to eq(aggregate.aggregate_id)
expect(pending_changes[:aggregate_type]).to eq(aggregate.class.name)
expect(pending_changes[:expecting_source_version]).to eq(0)
expect(pending_changes[:expecting_version]).to eq(2)
end
end
end
describe '#load_from' do
let(:unsorted_events) { [ SomeDomain::SecondEvent.new, SomeDomain::FirstEvent.new ] }
let(:state) { { :aggregate_id => aggregate_id, :events => unsorted_events } }
let(:loaded_aggregate) { aggregate.send(:load_from, state); aggregate; }
context 'when loading events' do
after(:each) { aggregate.send(:load_from, state) }
it 'calls #on_first_event' do
expect(aggregate).to receive(:on_first_event)
end
it 'calls #on_second_event' do
expect(aggregate).to receive(:on_second_event)
end
it 'calls #on_first_event, #on_second_event in order' do
expect(aggregate).to receive(:on_first_event).ordered
expect(aggregate).to receive(:on_second_event).ordered
end
end
context 'after events are loaded' do
it "has aggregate_id set to the events' aggregate_id" do
expect(loaded_aggregate.aggregate_id).to eq(aggregate_id)
end
it 'has version set to the number of loaded events' do
expect(loaded_aggregate.version).to eq(unsorted_events.size)
end
it 'has source_version set to the number of loaded events' do
expect(loaded_aggregate.instance_variable_get(:@source_version)).to eq(unsorted_events.size)
end
end
end
end
removed duplication
require_relative('../spec_helper')
describe RubyCqrs::Domain::Aggregate do
let(:aggregate_id) { SomeDomain::AGGREGATE_ID }
let(:aggregate) { SomeDomain::AggregateRoot.new }
let(:unsorted_events) { [ SomeDomain::SecondEvent.new, SomeDomain::FirstEvent.new ] }
describe '#new' do
it 'has aggregate_id initilized as a valid uuid' do
expect(aggregate.aggregate_id).to be_a_valid_uuid
end
it 'has version initilized as 0' do
expect(aggregate.version).to be_zero
end
it 'has source_version initilized as 0' do
expect(aggregate.instance_variable_get(:@source_version)).to be_zero
end
end
describe '#raise_event' do
it 'raise NotADomainEventError when raising an object that is not a proper event' do
expect { aggregate.fire_weird_stuff }.to raise_error(RubyCqrs::NotADomainEventError)
end
context 'after raising an event' do
it 'has version increased by 1' do
original_version = aggregate.version
aggregate.test_fire
expect(aggregate.version).to eq(original_version + 1)
end
it 'leaves source_version unchanged' do
original_source_version = aggregate.instance_variable_get(:@source_version)
aggregate.test_fire
expect(aggregate.instance_variable_get(:@source_version)).to eq original_source_version
end
it 'calls #on_third_event' do
expect(aggregate).to receive(:on_third_event)
aggregate.test_fire
end
end
end
describe '#is_version_conflicted?' do
let(:state) { { :aggregate_id => aggregate_id, :events => unsorted_events } }
let(:loaded_aggregate) { aggregate.send(:load_from, state); aggregate; }
it 'returns true when supplied client side version does not match the server side persisted source_version' do
client_side_version = unsorted_events.size - 1
expect(loaded_aggregate.is_version_conflicted? client_side_version).to be_truthy
end
it 'returns false when supplied client side version matches the server side persisted source_version' do
client_side_version = unsorted_events.size
expect(loaded_aggregate.is_version_conflicted? client_side_version).to be_falsy
end
end
describe '#get_changes' do
context 'after raising no event' do
it 'returns nil' do
expect(aggregate.send(:get_changes)).to be_nil
end
end
context 'after raising 2 events' do
it 'returns proper change summary' do
aggregate.test_fire
aggregate.test_fire_ag
pending_changes = aggregate.send(:get_changes)
expect(pending_changes[:events].size).to eq(2)
expect(pending_changes[:events][0].version).to eq(1)
expect(pending_changes[:events][1].version).to eq(2)
expect(pending_changes[:aggregate_id]).to eq(aggregate.aggregate_id)
expect(pending_changes[:aggregate_type]).to eq(aggregate.class.name)
expect(pending_changes[:expecting_source_version]).to eq(0)
expect(pending_changes[:expecting_version]).to eq(2)
end
end
end
describe '#load_from' do
let(:state) { { :aggregate_id => aggregate_id, :events => unsorted_events } }
let(:loaded_aggregate) { aggregate.send(:load_from, state); aggregate; }
context 'when loading events' do
after(:each) { aggregate.send(:load_from, state) }
it 'calls #on_first_event' do
expect(aggregate).to receive(:on_first_event)
end
it 'calls #on_second_event' do
expect(aggregate).to receive(:on_second_event)
end
it 'calls #on_first_event, #on_second_event in order' do
expect(aggregate).to receive(:on_first_event).ordered
expect(aggregate).to receive(:on_second_event).ordered
end
end
context 'after events are loaded' do
it "has aggregate_id set to the events' aggregate_id" do
expect(loaded_aggregate.aggregate_id).to eq(aggregate_id)
end
it 'has version set to the number of loaded events' do
expect(loaded_aggregate.version).to eq(unsorted_events.size)
end
it 'has source_version set to the number of loaded events' do
expect(loaded_aggregate.instance_variable_get(:@source_version)).to eq(unsorted_events.size)
end
end
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "array_2d"
s.version = "0.0.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Luke Grecki"]
s.date = "2012-10-08"
s.description = "With this class you can create mutable 2D arrays and change subarrays."
s.email = "lukegrecki@gmail.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/array_2d.rb",
"test/helper.rb",
"test/test_array_2d.rb"
]
s.homepage = "http://github.com/lukegrecki/array_2d"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.10"
s.summary = "A lightweight mutabke 2D array class"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rdoc>, [">= 0"])
s.add_development_dependency(%q<bundler>, [">= 0"])
s.add_development_dependency(%q<jeweler>, [">= 0"])
else
s.add_dependency(%q<rdoc>, [">= 0"])
s.add_dependency(%q<bundler>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
end
else
s.add_dependency(%q<rdoc>, [">= 0"])
s.add_dependency(%q<bundler>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
end
end
Updated gemspec
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "array_2d"
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Luke Grecki"]
s.date = "2012-10-08"
s.description = "With this class you can create mutable 2D arrays and change subarrays."
s.email = "lukegrecki@gmail.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"array_2d.gemspec",
"lib/array_2d.rb",
"test/helper.rb",
"test/test_array_2d.rb"
]
s.homepage = "http://github.com/lukegrecki/array_2d"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.10"
s.summary = "A lightweight mutabke 2D array class"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rdoc>, [">= 0"])
s.add_development_dependency(%q<bundler>, [">= 0"])
s.add_development_dependency(%q<jeweler>, [">= 0"])
else
s.add_dependency(%q<rdoc>, [">= 0"])
s.add_dependency(%q<bundler>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
end
else
s.add_dependency(%q<rdoc>, [">= 0"])
s.add_dependency(%q<bundler>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
end
end
|
require "sfn"
require "sparkle_formation"
module Sfn
module CommandModule
# Planning helpers
module Planning
# Create a new planner instance
#
# @param [Miasma::Models::Orchestration::Stack]
# @return [Sfn::Planner]
def build_planner(stack)
klass_name = stack.api.class.to_s.split("::").last
if Planner.const_defined?(klass_name)
Planner.const_get(klass_name).new(ui, config, arguments, stack)
else
warn "Failed to build planner for current provider. No provider implemented. (`#{klass_name}`)"
nil
end
end
# Display plan result on the UI
#
# @param result [Miasma::Models::Orchestration::Stack::Plan]
def display_plan_information(result)
ui.info ui.color("Pre-update resource planning report:", :bold)
unless print_plan_result(result)
ui.info "No resources life cycle changes detected in this update!"
end
cmd = self.class.to_s.split("::").last.downcase
ui.confirm "Apply this stack #{cmd}?" unless config[:plan_only]
end
# Print plan information to the UI
#
# @param info [Miasma::Models::Orchestration::Stack::Plan]
# @param names [Array<String>] nested names
def print_plan_result(info, names = [])
said_any_things = false
unless Array(info.stacks).empty?
info.stacks.each do |s_name, s_info|
result = print_plan_result(s_info, [*names, s_name].compact)
said_any_things ||= result
end
end
if !names.flatten.compact.empty? || info.name
said_things = false
output_name = names.empty? ? info.name : names.join(" > ")
ui.puts
ui.puts " #{ui.color("Update plan for:", :bold)} #{ui.color(names.join(" > "), :blue)}"
unless Array(info.unknown).empty?
ui.puts " #{ui.color("!!! Unknown update effect:", :red, :bold)}"
print_plan_items(info, :unknown, :red)
ui.puts
said_any_things = said_things = true
end
unless Array(info.unavailable).empty?
ui.puts " #{ui.color("Update request not allowed:", :red, :bold)}"
print_plan_items(info, :unavailable, :red)
ui.puts
said_any_things = said_things = true
end
unless Array(info.replace).empty?
ui.puts " #{ui.color("Resources to be replaced:", :red, :bold)}"
print_plan_items(info, :replace, :red)
ui.puts
said_any_things = said_things = true
end
unless Array(info.interrupt).empty?
ui.puts " #{ui.color("Resources to be interrupted:", :yellow, :bold)}"
print_plan_items(info, :interrupt, :yellow)
ui.puts
said_any_things = said_things = true
end
unless Array(info.remove).empty?
ui.puts " #{ui.color("Resources to be removed:", :red, :bold)}"
print_plan_items(info, :remove, :red)
ui.puts
said_any_things = said_things = true
end
unless Array(info.add).empty?
ui.puts " #{ui.color("Resources to be added:", :green, :bold)}"
print_plan_items(info, :add, :green)
ui.puts
said_any_things = said_things = true
end
unless said_things
ui.puts " #{ui.color("No resource lifecycle changes detected!", :green)}"
ui.puts
said_any_things = true
end
end
said_any_things
end
# Print planning items
#
# @param info [Miasma::Models::Orchestration::Stack::Plan] plan
# @param key [Symbol] key of items
# @param color [Symbol] color to flag
def print_plan_items(info, key, color)
collection = info.send(key)
max_name = collection.map(&:name).map(&:size).max
max_type = collection.map(&:type).map(&:size).max
max_p = collection.map(&:diffs).flatten(1).map(&:name).map(&:to_s).map(&:size).max
max_o = collection.map(&:diffs).flatten(1).map(&:current).map(&:to_s).map(&:size).max
collection.each do |val|
name = val.name
ui.print " " * 6
ui.print ui.color("[#{val.type}]", color)
ui.print " " * (max_type - val.type.size)
ui.print " " * 4
ui.print ui.color(name, :bold)
properties = Array(val.diffs).map(&:name)
unless properties.empty?
ui.print " " * (max_name - name.size)
ui.print " " * 4
ui.print "Reason: `#{properties.join("`, `")}`"
end
ui.puts
if config[:diffs]
unless val.diffs.empty?
p_name = nil
val.diffs.each do |diff|
if !diff.proposed.nil? || !diff.current.nil?
p_name = diff.name
ui.print " " * 8
ui.print "#{p_name}: "
ui.print " " * (max_p - p_name.size)
ui.print ui.color("-#{diff.current}", :red) if diff.current
ui.print " " * (max_o - diff.current.to_s.size)
ui.print " "
if diff.proposed == Sfn::Planner::RUNTIME_MODIFIED
ui.puts ui.color("+#{diff.current} <Dependency Modified>", :green)
else
if diff.proposed.nil?
ui.puts
else
ui.puts ui.color("+#{diff.proposed.to_s.gsub("__MODIFIED_REFERENCE_VALUE__", "<Dependency Modified>")}", :green)
end
end
end
end
ui.puts if p_name
end
end
end
end
end
end
end
Include plan name if available
require "sfn"
require "sparkle_formation"
module Sfn
module CommandModule
# Planning helpers
module Planning
# Create a new planner instance
#
# @param [Miasma::Models::Orchestration::Stack]
# @return [Sfn::Planner]
def build_planner(stack)
klass_name = stack.api.class.to_s.split("::").last
if Planner.const_defined?(klass_name)
Planner.const_get(klass_name).new(ui, config, arguments, stack)
else
warn "Failed to build planner for current provider. No provider implemented. (`#{klass_name}`)"
nil
end
end
# Display plan result on the UI
#
# @param result [Miasma::Models::Orchestration::Stack::Plan]
def display_plan_information(result)
ui.info ui.color("Pre-update resource planning report:", :bold)
unless print_plan_result(result, [result.name])
ui.info "No resources life cycle changes detected in this update!"
end
cmd = self.class.to_s.split("::").last.downcase
ui.confirm "Apply this stack #{cmd}?" unless config[:plan_only]
end
# Print plan information to the UI
#
# @param info [Miasma::Models::Orchestration::Stack::Plan]
# @param names [Array<String>] nested names
def print_plan_result(info, names = [])
said_any_things = false
unless Array(info.stacks).empty?
info.stacks.each do |s_name, s_info|
result = print_plan_result(s_info, [*names, s_name].compact)
said_any_things ||= result
end
end
if !names.flatten.compact.empty? || info.name
said_things = false
output_name = names.empty? ? info.name : names.join(" > ")
ui.puts
ui.puts " #{ui.color("Update plan for:", :bold)} #{ui.color(names.join(" > "), :blue)}"
unless Array(info.unknown).empty?
ui.puts " #{ui.color("!!! Unknown update effect:", :red, :bold)}"
print_plan_items(info, :unknown, :red)
ui.puts
said_any_things = said_things = true
end
unless Array(info.unavailable).empty?
ui.puts " #{ui.color("Update request not allowed:", :red, :bold)}"
print_plan_items(info, :unavailable, :red)
ui.puts
said_any_things = said_things = true
end
unless Array(info.replace).empty?
ui.puts " #{ui.color("Resources to be replaced:", :red, :bold)}"
print_plan_items(info, :replace, :red)
ui.puts
said_any_things = said_things = true
end
unless Array(info.interrupt).empty?
ui.puts " #{ui.color("Resources to be interrupted:", :yellow, :bold)}"
print_plan_items(info, :interrupt, :yellow)
ui.puts
said_any_things = said_things = true
end
unless Array(info.remove).empty?
ui.puts " #{ui.color("Resources to be removed:", :red, :bold)}"
print_plan_items(info, :remove, :red)
ui.puts
said_any_things = said_things = true
end
unless Array(info.add).empty?
ui.puts " #{ui.color("Resources to be added:", :green, :bold)}"
print_plan_items(info, :add, :green)
ui.puts
said_any_things = said_things = true
end
unless said_things
ui.puts " #{ui.color("No resource lifecycle changes detected!", :green)}"
ui.puts
said_any_things = true
end
end
said_any_things
end
# Print planning items
#
# @param info [Miasma::Models::Orchestration::Stack::Plan] plan
# @param key [Symbol] key of items
# @param color [Symbol] color to flag
def print_plan_items(info, key, color)
collection = info.send(key)
max_name = collection.map(&:name).map(&:size).max
max_type = collection.map(&:type).map(&:size).max
max_p = collection.map(&:diffs).flatten(1).map(&:name).map(&:to_s).map(&:size).max
max_o = collection.map(&:diffs).flatten(1).map(&:current).map(&:to_s).map(&:size).max
collection.each do |val|
name = val.name
ui.print " " * 6
ui.print ui.color("[#{val.type}]", color)
ui.print " " * (max_type - val.type.size)
ui.print " " * 4
ui.print ui.color(name, :bold)
properties = Array(val.diffs).map(&:name)
unless properties.empty?
ui.print " " * (max_name - name.size)
ui.print " " * 4
ui.print "Reason: `#{properties.join("`, `")}`"
end
ui.puts
if config[:diffs]
unless val.diffs.empty?
p_name = nil
val.diffs.each do |diff|
if !diff.proposed.nil? || !diff.current.nil?
p_name = diff.name
ui.print " " * 8
ui.print "#{p_name}: "
ui.print " " * (max_p - p_name.size)
ui.print ui.color("-#{diff.current}", :red) if diff.current
ui.print " " * (max_o - diff.current.to_s.size)
ui.print " "
if diff.proposed == Sfn::Planner::RUNTIME_MODIFIED
ui.puts ui.color("+#{diff.current} <Dependency Modified>", :green)
else
if diff.proposed.nil?
ui.puts
else
ui.puts ui.color("+#{diff.proposed.to_s.gsub("__MODIFIED_REFERENCE_VALUE__", "<Dependency Modified>")}", :green)
end
end
end
end
ui.puts if p_name
end
end
end
end
end
end
end
|
RSpec.describe ROM::Changeset do
let(:jane) { { id: 2, name: "Jane" } }
let(:relation) { double(ROM::Relation, primary_key: :id) }
describe 'builder function' do
it 'returns a create changeset for new data' do
expect(ROM.Changeset(relation, name: "Jane")).to be_create
end
it 'returns an update changeset for persisted data' do
expect(ROM.Changeset(relation, 2, jane)).to be_update
end
end
describe '#diff' do
it 'returns a hash with changes' do
expect(relation).to receive(:fetch).with(2).and_return(jane)
changeset = ROM::Changeset(relation, 2, name: "Jane Doe")
expect(changeset.diff).to eql(name: "Jane Doe")
end
end
describe '#diff?' do
it 'returns true when data differs from the original tuple' do
expect(relation).to receive(:fetch).with(2).and_return(jane)
changeset = ROM::Changeset(relation, 2, name: "Jane Doe")
expect(changeset).to be_diff
end
it 'returns false when data are equal to the original tuple' do
expect(relation).to receive(:fetch).with(2).and_return(jane)
changeset = ROM::Changeset(relation, 2, name: "Jane")
expect(changeset).to_not be_diff
end
end
describe 'quacks like a hash' do
subject(:changeset) { ROM::Changeset.new(relation, data) }
let(:data) { instance_double(Hash) }
it 'delegates to its data hash' do
expect(data).to receive(:[]).with(:name).and_return('Jane')
expect(changeset[:name]).to eql('Jane')
end
it 'maintains its own type' do
expect(data).to receive(:merge).with(foo: 'bar').and_return(foo: 'bar')
new_changeset = changeset.merge(foo: 'bar')
expect(new_changeset).to be_instance_of(ROM::Changeset)
expect(new_changeset.options).to eql(changeset.options)
expect(new_changeset.to_h).to eql(foo: 'bar')
end
it 'raises NoMethodError when an unknown message was sent' do
expect { changeset.not_here }.to raise_error(NoMethodError, /not_here/)
end
end
end
Add a spec for invalid args in Changeset builder function
RSpec.describe ROM::Changeset do
let(:jane) { { id: 2, name: "Jane" } }
let(:relation) { double(ROM::Relation, primary_key: :id) }
describe 'builder function' do
it 'returns a create changeset for new data' do
expect(ROM.Changeset(relation, name: "Jane")).to be_create
end
it 'returns an update changeset for persisted data' do
expect(ROM.Changeset(relation, 2, jane)).to be_update
end
it 'raises ArgumentError when invalid args are passed' do
expect { ROM.Changeset(1, 2, 3, 4) }.to raise_error(
ArgumentError, 'ROM.Changeset accepts 2 or 3 arguments'
)
end
end
describe '#diff' do
it 'returns a hash with changes' do
expect(relation).to receive(:fetch).with(2).and_return(jane)
changeset = ROM::Changeset(relation, 2, name: "Jane Doe")
expect(changeset.diff).to eql(name: "Jane Doe")
end
end
describe '#diff?' do
it 'returns true when data differs from the original tuple' do
expect(relation).to receive(:fetch).with(2).and_return(jane)
changeset = ROM::Changeset(relation, 2, name: "Jane Doe")
expect(changeset).to be_diff
end
it 'returns false when data are equal to the original tuple' do
expect(relation).to receive(:fetch).with(2).and_return(jane)
changeset = ROM::Changeset(relation, 2, name: "Jane")
expect(changeset).to_not be_diff
end
end
describe 'quacks like a hash' do
subject(:changeset) { ROM::Changeset.new(relation, data) }
let(:data) { instance_double(Hash) }
it 'delegates to its data hash' do
expect(data).to receive(:[]).with(:name).and_return('Jane')
expect(changeset[:name]).to eql('Jane')
end
it 'maintains its own type' do
expect(data).to receive(:merge).with(foo: 'bar').and_return(foo: 'bar')
new_changeset = changeset.merge(foo: 'bar')
expect(new_changeset).to be_instance_of(ROM::Changeset)
expect(new_changeset.options).to eql(changeset.options)
expect(new_changeset.to_h).to eql(foo: 'bar')
end
it 'raises NoMethodError when an unknown message was sent' do
expect { changeset.not_here }.to raise_error(NoMethodError, /not_here/)
end
end
end
|
require "sfn"
require "sparkle_formation"
module Sfn
module CommandModule
# Planning helpers
module Planning
# Create a new planner instance
#
# @param [Miasma::Models::Orchestration::Stack]
# @return [Sfn::Planner]
def build_planner(stack)
klass_name = stack.api.class.to_s.split("::").last
if Planner.const_defined?(klass_name)
Planner.const_get(klass_name).new(ui, config, arguments, stack)
else
warn "Failed to build planner for current provider. No provider implemented. (`#{klass_name}`)"
nil
end
end
# Display plan result on the UI
#
# @param result [Miasma::Models::Orchestration::Stack::Plan]
def display_plan_information(result)
ui.info ui.color("Pre-update resource planning report:", :bold)
unless print_plan_result(result, [result.name])
ui.info "No resources life cycle changes detected in this update!"
end
cmd = self.class.to_s.split("::").last.downcase
if config[:plan_apply]
return ui.info "Applying this stack #{cmd}..."
elsif config[:plan_only]
return
end
ui.confirm "Apply this stack #{cmd}?"
end
# Print plan information to the UI
#
# @param info [Miasma::Models::Orchestration::Stack::Plan]
# @param names [Array<String>] nested names
def print_plan_result(info, names = [])
said_any_things = false
unless Array(info.stacks).empty?
info.stacks.each do |s_name, s_info|
result = print_plan_result(s_info, [*names, s_name].compact)
said_any_things ||= result
end
end
if !names.flatten.compact.empty? || info.name
said_things = false
output_name = names.empty? ? info.name : names.join(" > ")
ui.puts
ui.puts " #{ui.color("Update plan for:", :bold)} #{ui.color(names.join(" > "), :blue)}"
unless Array(info.unknown).empty?
ui.puts " #{ui.color("!!! Unknown update effect:", :red, :bold)}"
print_plan_items(info, :unknown, :red)
ui.puts
said_any_things = said_things = true
end
unless Array(info.unavailable).empty?
ui.puts " #{ui.color("Update request not allowed:", :red, :bold)}"
print_plan_items(info, :unavailable, :red)
ui.puts
said_any_things = said_things = true
end
unless Array(info.replace).empty?
ui.puts " #{ui.color("Resources to be replaced:", :red, :bold)}"
print_plan_items(info, :replace, :red)
ui.puts
said_any_things = said_things = true
end
unless Array(info.interrupt).empty?
ui.puts " #{ui.color("Resources to be interrupted:", :yellow, :bold)}"
print_plan_items(info, :interrupt, :yellow)
ui.puts
said_any_things = said_things = true
end
unless Array(info.remove).empty?
ui.puts " #{ui.color("Resources to be removed:", :red, :bold)}"
print_plan_items(info, :remove, :red)
ui.puts
said_any_things = said_things = true
end
unless Array(info.add).empty?
ui.puts " #{ui.color("Resources to be added:", :green, :bold)}"
print_plan_items(info, :add, :green)
ui.puts
said_any_things = said_things = true
end
unless said_things
ui.puts " #{ui.color("No resource lifecycle changes detected!", :green)}"
ui.puts
said_any_things = true
end
end
said_any_things
end
# Print planning items
#
# @param info [Miasma::Models::Orchestration::Stack::Plan] plan
# @param key [Symbol] key of items
# @param color [Symbol] color to flag
def print_plan_items(info, key, color)
collection = info.send(key)
max_name = collection.map(&:name).map(&:size).max
max_type = collection.map(&:type).map(&:size).max
max_p = collection.map(&:diffs).flatten(1).map(&:name).map(&:to_s).map(&:size).max
max_o = collection.map(&:diffs).flatten(1).map(&:current).map(&:to_s).map(&:size).max
collection.each do |val|
name = val.name
ui.print " " * 6
ui.print ui.color("[#{val.type}]", color)
ui.print " " * (max_type - val.type.size)
ui.print " " * 4
ui.print ui.color(name, :bold)
properties = Array(val.diffs).map(&:name)
unless properties.empty?
ui.print " " * (max_name - name.size)
ui.print " " * 4
ui.print "Reason: `#{properties.join("`, `")}`"
end
ui.puts
if config[:diffs]
unless val.diffs.empty?
p_name = nil
val.diffs.each do |diff|
if !diff.proposed.nil? || !diff.current.nil?
p_name = diff.name
ui.print " " * 8
ui.print "#{p_name}: "
ui.print " " * (max_p - p_name.size)
ui.print ui.color("-#{diff.current}", :red) if diff.current
ui.print " " * (max_o - diff.current.to_s.size)
ui.print " "
if diff.proposed == Sfn::Planner::RUNTIME_MODIFIED
ui.puts ui.color("+#{diff.current} <Dependency Modified>", :green)
else
if diff.proposed.nil?
ui.puts
else
ui.puts ui.color("+#{diff.proposed.to_s.gsub("__MODIFIED_REFERENCE_VALUE__", "<Dependency Modified>")}", :green)
end
end
end
end
ui.puts if p_name
end
end
end
end
end
end
end
Update wording for plan execution to match up with commands
require "sfn"
require "sparkle_formation"
module Sfn
module CommandModule
# Planning helpers
module Planning
# Create a new planner instance
#
# @param [Miasma::Models::Orchestration::Stack]
# @return [Sfn::Planner]
def build_planner(stack)
klass_name = stack.api.class.to_s.split("::").last
if Planner.const_defined?(klass_name)
Planner.const_get(klass_name).new(ui, config, arguments, stack)
else
warn "Failed to build planner for current provider. No provider implemented. (`#{klass_name}`)"
nil
end
end
# Display plan result on the UI
#
# @param result [Miasma::Models::Orchestration::Stack::Plan]
def display_plan_information(result)
ui.info ui.color("Pre-update resource planning report:", :bold)
unless print_plan_result(result, [result.name])
ui.info "No resources life cycle changes detected in this update!"
end
if config[:plan_apply]
return ui.info "Realizing this stack plan..."
elsif config[:plan_only]
return
end
ui.confirm "Realize this stack plan?"
end
# Print plan information to the UI
#
# @param info [Miasma::Models::Orchestration::Stack::Plan]
# @param names [Array<String>] nested names
def print_plan_result(info, names = [])
said_any_things = false
unless Array(info.stacks).empty?
info.stacks.each do |s_name, s_info|
result = print_plan_result(s_info, [*names, s_name].compact)
said_any_things ||= result
end
end
if !names.flatten.compact.empty? || info.name
said_things = false
output_name = names.empty? ? info.name : names.join(" > ")
ui.puts
ui.puts " #{ui.color("Update plan for:", :bold)} #{ui.color(names.join(" > "), :blue)}"
unless Array(info.unknown).empty?
ui.puts " #{ui.color("!!! Unknown update effect:", :red, :bold)}"
print_plan_items(info, :unknown, :red)
ui.puts
said_any_things = said_things = true
end
unless Array(info.unavailable).empty?
ui.puts " #{ui.color("Update request not allowed:", :red, :bold)}"
print_plan_items(info, :unavailable, :red)
ui.puts
said_any_things = said_things = true
end
unless Array(info.replace).empty?
ui.puts " #{ui.color("Resources to be replaced:", :red, :bold)}"
print_plan_items(info, :replace, :red)
ui.puts
said_any_things = said_things = true
end
unless Array(info.interrupt).empty?
ui.puts " #{ui.color("Resources to be interrupted:", :yellow, :bold)}"
print_plan_items(info, :interrupt, :yellow)
ui.puts
said_any_things = said_things = true
end
unless Array(info.remove).empty?
ui.puts " #{ui.color("Resources to be removed:", :red, :bold)}"
print_plan_items(info, :remove, :red)
ui.puts
said_any_things = said_things = true
end
unless Array(info.add).empty?
ui.puts " #{ui.color("Resources to be added:", :green, :bold)}"
print_plan_items(info, :add, :green)
ui.puts
said_any_things = said_things = true
end
unless said_things
ui.puts " #{ui.color("No resource lifecycle changes detected!", :green)}"
ui.puts
said_any_things = true
end
end
said_any_things
end
# Print planning items
#
# @param info [Miasma::Models::Orchestration::Stack::Plan] plan
# @param key [Symbol] key of items
# @param color [Symbol] color to flag
def print_plan_items(info, key, color)
collection = info.send(key)
max_name = collection.map(&:name).map(&:size).max
max_type = collection.map(&:type).map(&:size).max
max_p = collection.map(&:diffs).flatten(1).map(&:name).map(&:to_s).map(&:size).max
max_o = collection.map(&:diffs).flatten(1).map(&:current).map(&:to_s).map(&:size).max
collection.each do |val|
name = val.name
ui.print " " * 6
ui.print ui.color("[#{val.type}]", color)
ui.print " " * (max_type - val.type.size)
ui.print " " * 4
ui.print ui.color(name, :bold)
properties = Array(val.diffs).map(&:name)
unless properties.empty?
ui.print " " * (max_name - name.size)
ui.print " " * 4
ui.print "Reason: `#{properties.join("`, `")}`"
end
ui.puts
if config[:diffs]
unless val.diffs.empty?
p_name = nil
val.diffs.each do |diff|
if !diff.proposed.nil? || !diff.current.nil?
p_name = diff.name
ui.print " " * 8
ui.print "#{p_name}: "
ui.print " " * (max_p - p_name.size)
ui.print ui.color("-#{diff.current}", :red) if diff.current
ui.print " " * (max_o - diff.current.to_s.size)
ui.print " "
if diff.proposed == Sfn::Planner::RUNTIME_MODIFIED
ui.puts ui.color("+#{diff.current} <Dependency Modified>", :green)
else
if diff.proposed.nil?
ui.puts
else
ui.puts ui.color("+#{diff.proposed.to_s.gsub("__MODIFIED_REFERENCE_VALUE__", "<Dependency Modified>")}", :green)
end
end
end
end
ui.puts if p_name
end
end
end
end
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.