CombinedText stringlengths 4 3.42M |
|---|
require 'formula'
class Mongodb < Formula
homepage 'http://www.mongodb.org/'
url 'http://fastdl.mongodb.org/osx/mongodb-osx-x86_64-2.2.2.tgz'
sha1 'b3808eeb6fe481f87db176cd3ab31119f94f7cc1'
version '2.2.2-boxen1'
skip_clean :all
def install
prefix.install Dir['*']
end
end
Update brew to 2.4.2
require 'formula'
class Mongodb < Formula
homepage 'http://www.mongodb.org/'
url 'http://fastdl.mongodb.org/osx/mongodb-osx-x86_64-2.4.2.tgz'
sha1 'e4e6a001a39b39a875bd24db986f3445400a8b64'
version '2.4.2-boxen1'
skip_clean :all
def install
prefix.install Dir['*']
end
end
|
require 'active_tsv'
require 'benchmark'
module ActiveTsvTest
def benchmark_where(b)
Tempfile.create(["", ".tsv"]) do |f|
f.puts [*'a'..'z'].join("\t")
10000.times do |i|
f.puts [*1..26].map{ |j| i * j }.join("\t")
end
bench_klass = Class.new(ActiveTsv::Base) do
self.table_path = f.path
end
f.flush
b.reset_timer
i = 0
n = 5000
while i < b.n
bench_klass.where(a: 1 * n, b: 2 * n, c: 3 * n)
.where(d: 4 * n, e: 5 * n, f: 6 * n)
.where(g: 7 * n, h: 8 * n, i: 9 * n)
.where(j: 10 * n, k: 11 * n, l: 12 * n)
.where(m: 13 * n, n: 14 * n, o: 15 * n)
.where(p: 16 * n, q: 17 * n, r: 18 * n)
.where(s: 19 * n, t: 20 * n, u: 21 * n)
.where(v: 22 * n, w: 23 * n, x: 24 * n)
.where(y: 25 * n, z: 26 * n).first
i += 1
end
end
end
end
Refactoring test
require 'active_tsv'
require 'benchmark'
module ActiveTsvBenchmarkTest
def run_with_temp_table(n)
Tempfile.create(["", ".tsv"]) do |f|
f.puts [*'a'..'z'].join("\t")
n.times do |i|
f.puts [*1..26].map{ |j| i * j }.join("\t")
end
bench_klass = Class.new(ActiveTsv::Base) do
self.table_path = f.path
end
f.flush
yield bench_klass, n / 2
end
end
def benchmark_where(b)
run_with_temp_table(10000) do |bench_klass, n|
b.reset_timer
i = 0
while i < b.n
bench_klass.where(a: 1 * n, b: 2 * n, c: 3 * n)
.where(d: 4 * n, e: 5 * n, f: 6 * n)
.where(g: 7 * n, h: 8 * n, i: 9 * n)
.where(j: 10 * n, k: 11 * n, l: 12 * n)
.where(m: 13 * n, n: 14 * n, o: 15 * n)
.where(p: 16 * n, q: 17 * n, r: 18 * n)
.where(s: 19 * n, t: 20 * n, u: 21 * n)
.where(v: 22 * n, w: 23 * n, x: 24 * n)
.where(y: 25 * n, z: 26 * n).first
i += 1
end
end
end
end
|
#!/opt/sensu/embedded/bin/ruby
#
# Check Cluster
#
require 'socket'
require 'net/http'
require 'rubygems'
require 'sensu'
require 'sensu/settings'
require 'sensu-plugin/check/cli'
require 'json'
class CheckCluster < Sensu::Plugin::Check::CLI
option :cluster_name,
:short => "-N NAME",
:long => "--cluster-name NAME",
:description => "Name of the cluster to use in the source of the alerts",
:required => true
option :check,
:short => "-c CHECK",
:long => "--check CHECK",
:description => "Aggregate CHECK name",
:required => true
option :warning,
:short => "-W PERCENT",
:long => "--warning PERCENT",
:description => "PERCENT non-ok before warning",
:proc => proc {|a| a.to_i }
option :critical,
:short => "-C PERCENT",
:long => "--critical PERCENT",
:description => "PERCENT non-ok before critical",
:proc => proc {|a| a.to_i }
def run
redis_config = sensu_settings[:redis] or raise "Redis config not available"
redis = TinyRedisClient.new(redis_config[:host], redis_config[:port])
lock_key = "lock:#{config[:cluster_name]}:#{config[:check]}"
lock_interval = (cluster_check || target_check || {})[:interval] || 300
RedisLocker.new(redis, lock_key, lock_interval).locked_run(self) do
status, output = check_aggregate
puts output
send_payload status, output
ok "Check executed successfully"
end
rescue RuntimeError => e
critical "#{e.message} (#{e.class}): #{e.backtrace.inspect}"
end
private
EXIT_CODES = Sensu::Plugin::EXIT_CODES
def check_aggregate
path = "/aggregates/#{config[:check]}"
issued = api_request(path, :age => 30)
return EXIT_CODES['WARNING'], "No aggregates for #{config[:check]}" if issued.empty?
time = issued.sort.last
return EXIT_CODES['WARNING'], "No aggregates older than #{config[:age]} seconds" unless time
aggregate = api_request("#{path}/#{time}")
check_thresholds(aggregate) { |status, msg| return status, msg }
# check_pattern(aggregate) { |status, msg| return status, msg }
return EXIT_CODES['OK'], "Aggregate looks GOOD"
end
# yielding means end of checking and sending payload to sensu
def check_thresholds(aggregate)
ok, total = aggregate.values_at("ok", "total")
nz_pct = ((1 - ok.to_f / total.to_f) * 100).to_i
message = "Number of non-zero results exceeds threshold (#{nz_pct}% non-zero)"
if config[:critical] && percent_non_zero >= config[:critical]
yield EXIT_CODES['CRITICAL'], message
elsif config[:warning] && percent_non_zero >= config[:warning]
yield EXIT_CODES['WARNING'], message
else
puts "Number of non-zero results: #{ok}/#{total} #{nz_pct}% - OK"
end
end
def api_request(path, opts={})
api = sensu_settings[:api]
uri = URI("http://#{api[:host]}:#{api[:port]}#{path}")
uri.query = URI.encode_www_form(opts)
req = Net::HTTP::Get.new(uri)
req.basic_auth api[:user], api[:password]
res = Net::HTTP.start(uri.hostname, uri.port) do |http|
http.request(req)
end
if res.is_a?(Net::HTTPSuccess)
JSON.parse(res.body)
else
raise "Error querying sensu api: #{res.code} '#{res.body}'"
end
end
def sensu_settings
@sensu_settings ||=
Sensu::Settings.get(:config_dirs => ["/etc/sensu/conf.d"]) or
raise "Sensu settings not available"
end
def send_payload(status, output)
payload = target_check.merge(
:status => status,
:output => output,
:source => config[:cluster_name],
:name => config[:check])
payload[:runbook] = cluster_check[:runbook] if cluster_check[:runbook] != '-'
payload[:tip] = cluster_check[:tip] if cluster_check[:tip] != '-'
payload.delete :command
sock = TCPSocket.new('localhost', 3030)
sock.puts payload.to_json
sock.close
end
def cluster_check
return {} if ENV['DEBUG']
return JSON.parse(ENV['DEBUG_CC']) if ENV['DEBUG_CC']
sensu_settings[:checks][:"#{config[:cluster_name]}_#{config[:check]}"] or
raise "#{config[:cluster_name]}_#{config[:check]} not found in sensu settings"
end
def target_check
sensu_settings[:checks][config[:check]] or
raise "#{config[:check]} not found in sensu settings"
end
end
class RedisLocker
def initialize(redis, key, interval, now = Time.now.to_i)
raise "Redis connection check failed" unless "hello" == redis.echo("hello")
@redis = redis
@key = key
@interval = interval.to_i
@now = now
end
def locked_run(status)
expire if ENV['DEBUG_UNLOCK']
if @redis.setnx(@key, @now) == 1
puts "Lock acquired"
begin
expire @interval
yield
rescue => e
expire
status.critical "Releasing lock due to error: #{e} #{e.backtrace}"
end
elsif (ttl = @now - @redis.get(@key).to_i) > @interval
expire
status.warning "Locked for #{ttl} seconds, expired immediately"
else
status.ok "Lock expires in #{@interval - ttl} seconds"
end
end
private
def expire(seconds=0)
@redis.pexpire(@key, seconds*100)
end
end
class TinyRedisClient
RN = "\r\n"
def initialize(host='localhost', port=6379)
@socket = TCPSocket.new(host, port)
end
def method_missing(method, *args)
args.unshift method
data = ["*#{args.size}", *args.map {|arg| "$#{arg.to_s.size}#{RN}#{arg}"}]
@socket.write(data.join(RN) << RN)
parse_response
end
def parse_response
case @socket.gets
when /^\+(.*)\r\n$/ then $1
when /^:(\d+)\r\n$/ then $1.to_i
when /^-(.*)\r\n$/ then raise "Redis error: #{$1}"
when /^\$([-\d]+)\r\n$/
$1.to_i >= 0 ? @socket.read($1.to_i+2)[0..-3] : nil
when /^\*([-\d]+)\r\n$/
$1.to_i > 0 ? (1..$1.to_i).inject([]) { |a,_| a << parse_response } : nil
end
end
def close
@socket.close
end
end
not enough zeroes
#!/opt/sensu/embedded/bin/ruby
#
# Check Cluster
#
require 'socket'
require 'net/http'
require 'rubygems'
require 'sensu'
require 'sensu/settings'
require 'sensu-plugin/check/cli'
require 'json'
class CheckCluster < Sensu::Plugin::Check::CLI
option :cluster_name,
:short => "-N NAME",
:long => "--cluster-name NAME",
:description => "Name of the cluster to use in the source of the alerts",
:required => true
option :check,
:short => "-c CHECK",
:long => "--check CHECK",
:description => "Aggregate CHECK name",
:required => true
option :warning,
:short => "-W PERCENT",
:long => "--warning PERCENT",
:description => "PERCENT non-ok before warning",
:proc => proc {|a| a.to_i }
option :critical,
:short => "-C PERCENT",
:long => "--critical PERCENT",
:description => "PERCENT non-ok before critical",
:proc => proc {|a| a.to_i }
def run
redis_config = sensu_settings[:redis] or raise "Redis config not available"
redis = TinyRedisClient.new(redis_config[:host], redis_config[:port])
lock_key = "lock:#{config[:cluster_name]}:#{config[:check]}"
lock_interval = (cluster_check || target_check || {})[:interval] || 300
RedisLocker.new(redis, lock_key, lock_interval).locked_run(self) do
status, output = check_aggregate
puts output
send_payload status, output
ok "Check executed successfully"
end
rescue RuntimeError => e
critical "#{e.message} (#{e.class}): #{e.backtrace.inspect}"
end
private
EXIT_CODES = Sensu::Plugin::EXIT_CODES
def check_aggregate
path = "/aggregates/#{config[:check]}"
issued = api_request(path, :age => 30)
return EXIT_CODES['WARNING'], "No aggregates for #{config[:check]}" if issued.empty?
time = issued.sort.last
return EXIT_CODES['WARNING'], "No aggregates older than #{config[:age]} seconds" unless time
aggregate = api_request("#{path}/#{time}")
check_thresholds(aggregate) { |status, msg| return status, msg }
# check_pattern(aggregate) { |status, msg| return status, msg }
return EXIT_CODES['OK'], "Aggregate looks GOOD"
end
# yielding means end of checking and sending payload to sensu
def check_thresholds(aggregate)
ok, total = aggregate.values_at("ok", "total")
nz_pct = ((1 - ok.to_f / total.to_f) * 100).to_i
message = "Number of non-zero results exceeds threshold (#{nz_pct}% non-zero)"
if config[:critical] && percent_non_zero >= config[:critical]
yield EXIT_CODES['CRITICAL'], message
elsif config[:warning] && percent_non_zero >= config[:warning]
yield EXIT_CODES['WARNING'], message
else
puts "Number of non-zero results: #{ok}/#{total} #{nz_pct}% - OK"
end
end
def api_request(path, opts={})
api = sensu_settings[:api]
uri = URI("http://#{api[:host]}:#{api[:port]}#{path}")
uri.query = URI.encode_www_form(opts)
req = Net::HTTP::Get.new(uri)
req.basic_auth api[:user], api[:password]
res = Net::HTTP.start(uri.hostname, uri.port) do |http|
http.request(req)
end
if res.is_a?(Net::HTTPSuccess)
JSON.parse(res.body)
else
raise "Error querying sensu api: #{res.code} '#{res.body}'"
end
end
def sensu_settings
@sensu_settings ||=
Sensu::Settings.get(:config_dirs => ["/etc/sensu/conf.d"]) or
raise "Sensu settings not available"
end
def send_payload(status, output)
payload = target_check.merge(
:status => status,
:output => output,
:source => config[:cluster_name],
:name => config[:check])
payload[:runbook] = cluster_check[:runbook] if cluster_check[:runbook] != '-'
payload[:tip] = cluster_check[:tip] if cluster_check[:tip] != '-'
payload.delete :command
sock = TCPSocket.new('localhost', 3030)
sock.puts payload.to_json
sock.close
end
def cluster_check
return {} if ENV['DEBUG']
return JSON.parse(ENV['DEBUG_CC']) if ENV['DEBUG_CC']
sensu_settings[:checks][:"#{config[:cluster_name]}_#{config[:check]}"] or
raise "#{config[:cluster_name]}_#{config[:check]} not found in sensu settings"
end
def target_check
sensu_settings[:checks][config[:check]] or
raise "#{config[:check]} not found in sensu settings"
end
end
class RedisLocker
def initialize(redis, key, interval, now = Time.now.to_i)
raise "Redis connection check failed" unless "hello" == redis.echo("hello")
@redis = redis
@key = key
@interval = interval.to_i
@now = now
end
def locked_run(status)
expire if ENV['DEBUG_UNLOCK']
if @redis.setnx(@key, @now) == 1
puts "Lock acquired"
begin
expire @interval
yield
rescue => e
expire
status.critical "Releasing lock due to error: #{e} #{e.backtrace}"
end
elsif (ttl = @now - @redis.get(@key).to_i) > @interval
expire
status.warning "Locked for #{ttl} seconds, expired immediately"
else
status.ok "Lock expires in #{@interval - ttl} seconds"
end
end
private
def expire(seconds=0)
@redis.pexpire(@key, seconds*1000)
end
end
class TinyRedisClient
RN = "\r\n"
def initialize(host='localhost', port=6379)
@socket = TCPSocket.new(host, port)
end
def method_missing(method, *args)
args.unshift method
data = ["*#{args.size}", *args.map {|arg| "$#{arg.to_s.size}#{RN}#{arg}"}]
@socket.write(data.join(RN) << RN)
parse_response
end
def parse_response
case @socket.gets
when /^\+(.*)\r\n$/ then $1
when /^:(\d+)\r\n$/ then $1.to_i
when /^-(.*)\r\n$/ then raise "Redis error: #{$1}"
when /^\$([-\d]+)\r\n$/
$1.to_i >= 0 ? @socket.read($1.to_i+2)[0..-3] : nil
when /^\*([-\d]+)\r\n$/
$1.to_i > 0 ? (1..$1.to_i).inject([]) { |a,_| a << parse_response } : nil
end
end
def close
@socket.close
end
end
|
FactoryGirl.define do
factory :user, class: 'DummyUser' do
sequence(:email) { |n| "john.#{n}@doe.com" }
password 's3cr3t'
factory :admin_user do
alchemy_roles 'admin'
end
factory :member_user do
alchemy_roles 'member'
end
factory :author_user do
alchemy_roles 'author'
end
factory :editor_user do
alchemy_roles 'editor'
end
end
factory :language, :class => 'Alchemy::Language' do
name 'Deutsch'
code 'de'
default true
frontpage_name 'Intro'
page_layout 'intro'
public true
site { Alchemy::Site.first }
factory :klingonian do
name 'Klingonian'
code 'kl'
frontpage_name 'Tuq'
default false
end
factory :english do
name 'English'
code 'en'
frontpage_name 'Intro'
default false
end
end
factory :page, :class => 'Alchemy::Page' do
language { Alchemy::Language.default || FactoryGirl.create(:language) }
sequence(:name) { |n| "A Page #{n}" }
parent_id { (Alchemy::Page.find_by_language_root(true) || FactoryGirl.create(:language_root_page)).id }
page_layout "standard"
# This speeds up creating of pages dramatically. Pass :do_not_autogenerate => false to generate elements
do_not_autogenerate true
factory :language_root_page do
name 'Startseite'
page_layout 'intro'
language_root true
public true
parent_id { Alchemy::Page.root.id }
end
factory :public_page do
sequence(:name) { |n| "A Public Page #{n}" }
public true
end
factory :systempage do
name "Systempage"
parent_id { Alchemy::Page.root.id }
language_root false
page_layout nil
language nil
end
factory :restricted_page do
name "Restricted page"
restricted true
end
end
factory :cell, :class => 'Alchemy::Cell' do
page { Alchemy::Page.find_by(language_root: true) || FactoryGirl.create(:language_root_page) }
name "a_cell"
end
factory :element, :class => 'Alchemy::Element' do
name 'article'
create_contents_after_create false
factory :unique_element do
unique true
name 'header'
end
end
factory :picture, :class => 'Alchemy::Picture' do
image_file File.new(File.expand_path('../../../../spec/fixtures/image.png', __FILE__))
name 'image'
image_file_name 'image.png'
upload_hash Time.now.hash
end
factory :content, :class => 'Alchemy::Content' do
name "text"
essence_type "Alchemy::EssenceText"
association :essence, :factory => :essence_text
end
factory :essence_text, :class => 'Alchemy::EssenceText' do
body ''
end
factory :essence_picture, :class => 'Alchemy::EssencePicture' do
picture
end
factory :essence_file, :class => 'Alchemy::EssenceFile' do
attachment
end
factory :attachment, :class => 'Alchemy::Attachment' do
file File.new(File.expand_path('../../../../spec/fixtures/image.png', __FILE__))
name 'image'
file_name 'image.png'
end
factory :event do
name 'My Event'
hidden_name 'not shown'
starts_at DateTime.new(2012, 03, 02, 8, 15)
ends_at DateTime.new(2012, 03, 02, 19, 30)
description "something\nfancy"
published false
entrance_fee 12.3
end
factory :site, class: 'Alchemy::Site' do
name 'A Site'
host 'domain.com'
end
end
Fixes user factories
The roles wasn't arrays
FactoryGirl.define do
factory :user, class: 'DummyUser' do
sequence(:email) { |n| "john.#{n}@doe.com" }
password 's3cr3t'
factory :admin_user do
alchemy_roles ['admin']
end
factory :member_user do
alchemy_roles ['member']
end
factory :author_user do
alchemy_roles ['author']
end
factory :editor_user do
alchemy_roles ['editor']
end
end
factory :language, :class => 'Alchemy::Language' do
name 'Deutsch'
code 'de'
default true
frontpage_name 'Intro'
page_layout 'intro'
public true
site { Alchemy::Site.first }
factory :klingonian do
name 'Klingonian'
code 'kl'
frontpage_name 'Tuq'
default false
end
factory :english do
name 'English'
code 'en'
frontpage_name 'Intro'
default false
end
end
factory :page, :class => 'Alchemy::Page' do
language { Alchemy::Language.default || FactoryGirl.create(:language) }
sequence(:name) { |n| "A Page #{n}" }
parent_id { (Alchemy::Page.find_by_language_root(true) || FactoryGirl.create(:language_root_page)).id }
page_layout "standard"
# This speeds up creating of pages dramatically. Pass :do_not_autogenerate => false to generate elements
do_not_autogenerate true
factory :language_root_page do
name 'Startseite'
page_layout 'intro'
language_root true
public true
parent_id { Alchemy::Page.root.id }
end
factory :public_page do
sequence(:name) { |n| "A Public Page #{n}" }
public true
end
factory :systempage do
name "Systempage"
parent_id { Alchemy::Page.root.id }
language_root false
page_layout nil
language nil
end
factory :restricted_page do
name "Restricted page"
restricted true
end
end
factory :cell, :class => 'Alchemy::Cell' do
page { Alchemy::Page.find_by(language_root: true) || FactoryGirl.create(:language_root_page) }
name "a_cell"
end
factory :element, :class => 'Alchemy::Element' do
name 'article'
create_contents_after_create false
factory :unique_element do
unique true
name 'header'
end
end
factory :picture, :class => 'Alchemy::Picture' do
image_file File.new(File.expand_path('../../../../spec/fixtures/image.png', __FILE__))
name 'image'
image_file_name 'image.png'
upload_hash Time.now.hash
end
factory :content, :class => 'Alchemy::Content' do
name "text"
essence_type "Alchemy::EssenceText"
association :essence, :factory => :essence_text
end
factory :essence_text, :class => 'Alchemy::EssenceText' do
body ''
end
factory :essence_picture, :class => 'Alchemy::EssencePicture' do
picture
end
factory :essence_file, :class => 'Alchemy::EssenceFile' do
attachment
end
factory :attachment, :class => 'Alchemy::Attachment' do
file File.new(File.expand_path('../../../../spec/fixtures/image.png', __FILE__))
name 'image'
file_name 'image.png'
end
factory :event do
name 'My Event'
hidden_name 'not shown'
starts_at DateTime.new(2012, 03, 02, 8, 15)
ends_at DateTime.new(2012, 03, 02, 19, 30)
description "something\nfancy"
published false
entrance_fee 12.3
end
factory :site, class: 'Alchemy::Site' do
name 'A Site'
host 'domain.com'
end
end
|
module SharingTags
VERSION = "0.0.11"
end
update version to 0.0.12
module SharingTags
VERSION = "0.0.12"
end
|
module Sidekiq
module Pool
VERSION = '1.5.3'
end
end
Bump version
module Sidekiq
module Pool
VERSION = '1.5.4'
end
end
|
# coding: utf-8
require 'rubygems'
require "simple_downloader_td/version"
require "simple_downloader_td/command"
require 'msgpack'
require 'csv'
require 'json'
require 'td'
require 'td-client'
require 'zlib'
require 'fileutils'
module SimpleDownloaderTd
def self.download(job_id)
# Get TD APIKEY in ~/.td/td.conf
data = File.read(File.join(ENV['HOME'], '.td', 'td.conf'))
data.each_line do |line|
line.strip!
case line
when /^#/
next
when /\[(.+)\]/
section = $~[1]
when /^(\w+)\s*=\s*(.+?)\s*$/
key = $~[1]
val = $~[2]
@APIKEY = val
else
STDOUT.puts "Can't read apikey."
end
end
cln = TreasureData::Client.new(@APIKEY, {ssl: true})
# get job result
job = cln.job(job_id)
# check job status
unless job.finished? then
STDOUT.puts "The job didn't finish yet."
exit
end
# download job result as msgpack.gz
begin
File.open("#{job_id}_tmp.msgpack.gz", "wb") do |f|
job.result_format('msgpack.gz', f) do |compr_size|
STDOUT.puts "Downloaded: #{(compr_size / job.result_size) * 100}%" if downloaded != (compr_size / job.result_size)
downloaded = compr_size / job.result_size
end
end
rescue Exception => e
STDERR.puts e
end
STDOUT.puts "Start converting..."
# convert tsv file
begin
File.open("#{job_id}.tsv", "w+") do |writer|
src = MessagePack::Unpacker.new(Zlib::GzipReader.open("#{job_id}_tmp.msgpack.gz"))
job.hive_result_schema.each do |schema|
writer << schema[0] + "\t"
end
writer << "\n"
src.each do |obj|
writer << obj.join("\t") + "\n"
end
end
File.delete("#{job_id}_tmp.msgpack.gz")
STDOUT.puts "Finish."
rescue Exception => e
STDERR.puts e
end
end
end
Revert "Fix download progress"
This reverts commit 325ce7dd906cec56811edf3840043c03d315ff77.
# coding: utf-8
require 'rubygems'
require "simple_downloader_td/version"
require "simple_downloader_td/command"
require 'msgpack'
require 'csv'
require 'json'
require 'td'
require 'td-client'
require 'zlib'
require 'fileutils'
module SimpleDownloaderTd
def self.download(job_id)
# Get TD APIKEY in ~/.td/td.conf
data = File.read(File.join(ENV['HOME'], '.td', 'td.conf'))
data.each_line do |line|
line.strip!
case line
when /^#/
next
when /\[(.+)\]/
section = $~[1]
when /^(\w+)\s*=\s*(.+?)\s*$/
key = $~[1]
val = $~[2]
@APIKEY = val
else
STDOUT.puts "Can't read apikey."
end
end
cln = TreasureData::Client.new(@APIKEY, {ssl: true})
# get job result
job = cln.job(job_id)
# check job status
unless job.finished? then
STDOUT.puts "The job didn't finish yet."
exit
end
# download job result as msgpack.gz
begin
File.open("#{job_id}_tmp.msgpack.gz", "wb") do |f|
job.result_format('msgpack.gz', f) do |compr_size|
STDOUT.puts "Downloaded: #{(compr_size / job.result_size) * 100}%"
end
end
rescue Exception => e
STDERR.puts e
end
STDOUT.puts "Start converting..."
# convert tsv file
begin
File.open("#{job_id}.tsv", "w+") do |writer|
src = MessagePack::Unpacker.new(Zlib::GzipReader.open("#{job_id}_tmp.msgpack.gz"))
job.hive_result_schema.each do |schema|
writer << schema[0] + "\t"
end
writer << "\n"
src.each do |obj|
writer << obj.join("\t") + "\n"
end
end
File.delete("#{job_id}_tmp.msgpack.gz")
STDOUT.puts "Finish."
rescue Exception => e
STDERR.puts e
end
end
end
|
module SimpleModel
VERSION = "1.0.1"
end
version bump
module SimpleModel
VERSION = "1.1.0"
end
|
module SimpleSlack
VERSION = "0.3.1"
end
change version
module SimpleSlack
VERSION = "0.3.2"
end
|
module SmartAssets
VERSION = '0.1.0'
end
bump to 0.2.0
module SmartAssets
VERSION = '0.2.0'
end
|
# frozen_string_literal: true
module SolidusI18n
VERSION = '2.1.0'
end
Bump SolidusI18n to 2.1.1
# frozen_string_literal: true
module SolidusI18n
VERSION = '2.1.1'
end
|
module SourceRoute
# No Test Yet
class Results
DEFAULT_ATTRS = {
call: [:defined_class, :event, :method_id],
return: [:defined_class, :event, :method_id, :return_value]
}
def initialize(wrapper)
@wrapper = wrapper
@output_config = @wrapper.conditions.result_config
@tp_event = @wrapper.conditions.event.to_sym
if @output_config[:selected_attrs].nil? and [@wrapper.conditions.event].flatten.size == 1
@output_config[:selected_attrs] = DEFAULT_ATTRS[@tp_event] - [:event]
end
end
def output(trace_point_instance)
@tp = trace_point_instance
format = @output_config[:output_format].to_sym
collect_data
case format
when Symbol
case format
when :console
console_put
when :test
# do nothing at now
else
klass = "SourceRoute::Formats::#{format.to_s.capitalize}"
::SourceRoute.const_get(klass).render(self)
end
when Proc
format.call(tp)
else
end
@collect_data
end
private
def collect_data
collect_tp_data
@collect_data.push({})
collect_local_var_data
collect_instance_var_data
@collect_data.pop if @collect_data.last == {}
end
def collect_tp_data
@collect_data = @output_config[:selected_attrs].map do |key|
@tp.respond_to?(key) ? @tp.send(key) : nil
end
end
def collect_local_var_data
if @wrapper.conditions.result_config[:include_local_var]
local_var_hash = {}
@tp.binding.eval('local_variables').each do |v|
local_var_hash[v] = @tp.binding.local_variable_get v
end
@collect_data.last.merge!(local_var: local_var_hash)
end
end
def collect_instance_var_data
if @wrapper.conditions.result_config[:include_instance_var]
instance_var_hash = {}
@tp.self.instance_variables.each do |key|
instance_var_hash[key] = @tp.self.instance_variable_get(key)
end
@collect_data.last.merge!(instance_var: instance_var_hash)
end
end
def console_put
ap @collect_data
end
end
end
filter out :html output format. it can not be set in config cause a hard problem
module SourceRoute
#
class Results
DEFAULT_ATTRS = {
call: [:defined_class, :event, :method_id],
return: [:defined_class, :event, :method_id, :return_value]
}
def initialize(wrapper)
@wrapper = wrapper
@output_config = @wrapper.conditions.result_config
@tp_event = @wrapper.conditions.event.to_sym
if @output_config[:selected_attrs].nil? and [@wrapper.conditions.event].flatten.size == 1
@output_config[:selected_attrs] = DEFAULT_ATTRS[@tp_event] - [:event]
end
end
def output(trace_point_instance)
@tp = trace_point_instance
format = @output_config[:output_format].to_sym
collect_data
case format
when Symbol
case format
when :console
console_put
when :html
# I cant solve the problem: to generate html at the end,
# I have to know when the application is end
when :test
# do nothing at now
else
klass = "SourceRoute::Formats::#{format.to_s.capitalize}"
::SourceRoute.const_get(klass).render(self)
end
when Proc
format.call(tp)
else
end
@collect_data
end
private
def collect_data
collect_tp_data
@collect_data.push({})
collect_local_var_data
collect_instance_var_data
@collect_data.pop if @collect_data.last == {}
end
def collect_tp_data
@collect_data = @output_config[:selected_attrs].map do |key|
@tp.respond_to?(key) ? @tp.send(key) : nil
end
end
def collect_local_var_data
if @wrapper.conditions.result_config[:include_local_var]
local_var_hash = {}
@tp.binding.eval('local_variables').each do |v|
local_var_hash[v] = @tp.binding.local_variable_get v
end
@collect_data.last.merge!(local_var: local_var_hash)
end
end
def collect_instance_var_data
if @wrapper.conditions.result_config[:include_instance_var]
instance_var_hash = {}
@tp.self.instance_variables.each do |key|
instance_var_hash[key] = @tp.self.instance_variable_get(key)
end
@collect_data.last.merge!(instance_var: instance_var_hash)
end
end
def console_put
ap @collect_data
end
end
end
|
module SourceRoute
VERSION = "0.1.3"
end
version 0.1.4. No need feature. Just an ugly but important fix
module SourceRoute
VERSION = "0.1.4"
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
# stub: rack-ga-track 0.4.0 ruby lib
Gem::Specification.new do |s|
s.name = "rack-ga-track"
s.version = "0.4.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Daniel Nolan"]
s.date = "2014-07-21"
s.description = "If the user visits via a Google Analytics Campaign link,\n this middleware will track utm_source, utm_content, utm_term, utm_medium, utm_campaign, and time."
s.email = "dnolan@t1dexchange.org"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"lib/rack-ga-track.rb",
"rack-ga-track.gemspec",
"spec/helper.rb",
"spec/rack_ga_track_spec.rb"
]
s.homepage = "http://github.com/T1D/rack-ga-track"
s.licenses = ["MIT"]
s.rubygems_version = "2.3.0"
s.summary = "Tracks referrals via Google Analytics Campaign links."
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rack>, [">= 1.5.2", "~> 1.5"])
s.add_development_dependency(%q<bundler>, [">= 1.6.2", "~> 1.6"])
s.add_development_dependency(%q<jeweler>, [">= 2.0.1", "~> 2.0"])
s.add_development_dependency(%q<simplecov>, [">= 0.8.2", "~> 0.8"])
s.add_development_dependency(%q<rack-test>, [">= 0.6.2", "~> 0.6"])
s.add_development_dependency(%q<minitest>, [">= 5.4.0", "~> 5.4"])
s.add_development_dependency(%q<timecop>, [">= 0.7.1", "~> 0.7"])
else
s.add_dependency(%q<rack>, [">= 1.5.2", "~> 1.5"])
s.add_dependency(%q<bundler>, [">= 1.6.2", "~> 1.6"])
s.add_dependency(%q<jeweler>, [">= 2.0.1", "~> 2.0"])
s.add_dependency(%q<simplecov>, [">= 0.8.2", "~> 0.8"])
s.add_dependency(%q<rack-test>, [">= 0.6.2", "~> 0.6"])
s.add_dependency(%q<minitest>, [">= 5.4.0", "~> 5.4"])
s.add_dependency(%q<timecop>, [">= 0.7.1", "~> 0.7"])
end
else
s.add_dependency(%q<rack>, [">= 1.5.2", "~> 1.5"])
s.add_dependency(%q<bundler>, [">= 1.6.2", "~> 1.6"])
s.add_dependency(%q<jeweler>, [">= 2.0.1", "~> 2.0"])
s.add_dependency(%q<simplecov>, [">= 0.8.2", "~> 0.8"])
s.add_dependency(%q<rack-test>, [">= 0.6.2", "~> 0.6"])
s.add_dependency(%q<minitest>, [">= 5.4.0", "~> 5.4"])
s.add_dependency(%q<timecop>, [">= 0.7.1", "~> 0.7"])
end
end
Regenerate gemspec for version 0.4.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
# stub: rack-ga-track 0.4.0 ruby lib
Gem::Specification.new do |s|
s.name = "rack-ga-track"
s.version = "0.4.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Daniel Nolan"]
s.date = "2014-07-23"
s.description = "If the user visits via a Google Analytics Campaign link,\n this middleware will track utm_source, utm_content, utm_term, utm_medium, utm_campaign, and time."
s.email = "dnolan@t1dexchange.org"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"lib/rack-ga-track.rb",
"rack-ga-track.gemspec",
"spec/helper.rb",
"spec/rack_ga_track_spec.rb"
]
s.homepage = "http://github.com/T1D/rack-ga-track"
s.licenses = ["MIT"]
s.rubygems_version = "2.3.0"
s.summary = "Tracks referrals via Google Analytics Campaign links."
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rack>, [">= 1.5.2", "~> 1.5"])
s.add_development_dependency(%q<bundler>, [">= 1.6.2", "~> 1.6"])
s.add_development_dependency(%q<jeweler>, [">= 2.0.1", "~> 2.0"])
s.add_development_dependency(%q<simplecov>, [">= 0.8.2", "~> 0.8"])
s.add_development_dependency(%q<rack-test>, [">= 0.6.2", "~> 0.6"])
s.add_development_dependency(%q<minitest>, [">= 5.4.0", "~> 5.4"])
s.add_development_dependency(%q<timecop>, [">= 0.7.1", "~> 0.7"])
else
s.add_dependency(%q<rack>, [">= 1.5.2", "~> 1.5"])
s.add_dependency(%q<bundler>, [">= 1.6.2", "~> 1.6"])
s.add_dependency(%q<jeweler>, [">= 2.0.1", "~> 2.0"])
s.add_dependency(%q<simplecov>, [">= 0.8.2", "~> 0.8"])
s.add_dependency(%q<rack-test>, [">= 0.6.2", "~> 0.6"])
s.add_dependency(%q<minitest>, [">= 5.4.0", "~> 5.4"])
s.add_dependency(%q<timecop>, [">= 0.7.1", "~> 0.7"])
end
else
s.add_dependency(%q<rack>, [">= 1.5.2", "~> 1.5"])
s.add_dependency(%q<bundler>, [">= 1.6.2", "~> 1.6"])
s.add_dependency(%q<jeweler>, [">= 2.0.1", "~> 2.0"])
s.add_dependency(%q<simplecov>, [">= 0.8.2", "~> 0.8"])
s.add_dependency(%q<rack-test>, [">= 0.6.2", "~> 0.6"])
s.add_dependency(%q<minitest>, [">= 5.4.0", "~> 5.4"])
s.add_dependency(%q<timecop>, [">= 0.7.1", "~> 0.7"])
end
end
|
module Spinjs
module Rails
VERSION = "0.0.3"
end
end
Bump version
module Spinjs
module Rails
VERSION = "0.0.4"
end
end
|
module SportsSouth
VERSION = '2.0.6'
end
Version 2.0.7
module SportsSouth
VERSION = '2.0.7'
end
|
module SportsSouth
VERSION = '2.0'
end
Version 2.0.1
module SportsSouth
VERSION = '2.0.1'
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "ragios-client"
s.version = "0.0.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["obi-a"]
s.date = "2013-12-15"
s.description = "ruby client for ragios"
s.email = "obioraakubue@yahoo.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/ragios-client.rb",
"spec/ragios-client_spec.rb"
]
s.homepage = "http://github.com/obi-a/ragios-client"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.24"
s.summary = "Ruby client for ragios"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rest-client>, [">= 0"])
s.add_runtime_dependency(%q<yajl-ruby>, [">= 0"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["~> 1.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_development_dependency(%q<simplecov>, [">= 0"])
else
s.add_dependency(%q<rest-client>, [">= 0"])
s.add_dependency(%q<yajl-ruby>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
else
s.add_dependency(%q<rest-client>, [">= 0"])
s.add_dependency(%q<yajl-ruby>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
end
Regenerate gemspec for version 0.0.2
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "ragios-client"
s.version = "0.0.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["obi-a"]
s.date = "2014-04-05"
s.description = "ruby client for ragios"
s.email = "obioraakubue@yahoo.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/ragios-client.rb",
"ragios-client.gemspec",
"spec/ragios-client_spec.rb"
]
s.homepage = "http://github.com/obi-a/ragios-client"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.24"
s.summary = "Ruby client for ragios"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rest-client>, [">= 0"])
s.add_runtime_dependency(%q<yajl-ruby>, [">= 0"])
s.add_development_dependency(%q<pry>, [">= 0"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["~> 1.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_development_dependency(%q<simplecov>, [">= 0"])
else
s.add_dependency(%q<rest-client>, [">= 0"])
s.add_dependency(%q<yajl-ruby>, [">= 0"])
s.add_dependency(%q<pry>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
else
s.add_dependency(%q<rest-client>, [">= 0"])
s.add_dependency(%q<yajl-ruby>, [">= 0"])
s.add_dependency(%q<pry>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
end
|
class Resource
def initialize(endpoint, client)
@endpoint = endpoint
@client = client
end
def get(id = nil)
if(id != nil)
path = "#{@endpoint}/#{id}"
else
path = @endpoint
end
@client.request(path)
end
def save(body)
if(body.is_a?(OpenStruct))
body = body.to_h
end
if(body.has_key?("id"))
path = "#{@endpoint}/#{body['id']}"
else
path = @endpoint
end
@client.request(path, body)
end
end
add filled option to get
class Resource
def initialize(endpoint, client)
@endpoint = endpoint
@client = client
end
def get(id = nil)
if(id != nil)
getById(id)
else
getMany
end
end
def getById(id, filled = false)
fill = filled ? "/filled" : ""
@client.request("#{@endpoint}/#{id}#{fill}")
end
def getMany()
@client.request(@endpoint)
end
def save(body)
if(body.is_a?(OpenStruct))
body = body.to_h
end
if(body.has_key?("id"))
path = "#{@endpoint}/#{body['id']}"
else
path = @endpoint
end
@client.request(path, body)
end
end |
# -*- encoding: utf-8 -*-
require File.expand_path('../lib/rails_version/version', __FILE__)
Gem::Specification.new do |gem|
gem.authors = ["Jamon Holmgren"]
gem.email = ["jamon@clearsightstudio.com"]
gem.description = %q{ Pings a server and reports the current app's Rails version. }
gem.summary = %q{ Pings a server and reports the current app's Rails version. }
gem.homepage = ""
gem.files = `git ls-files`.split($\)
gem.files << "rails.init.rb"
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test)/})
gem.name = "rails_version"
gem.require_paths = ["lib", "rails"]
gem.version = RailsVersion::VERSION
gem.add_development_dependency 'actionpack'
end
Removed unnecessary rails init file ref
# -*- encoding: utf-8 -*-
require File.expand_path('../lib/rails_version/version', __FILE__)
Gem::Specification.new do |gem|
gem.authors = ["Jamon Holmgren"]
gem.email = ["jamon@clearsightstudio.com"]
gem.description = %q{ Pings a server and reports the current app's Rails version. }
gem.summary = %q{ Pings a server and reports the current app's Rails version. }
gem.homepage = ""
gem.files = `git ls-files`.split($\)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test)/})
gem.name = "rails_version"
gem.require_paths = ["lib", "rails"]
gem.version = RailsVersion::VERSION
gem.add_development_dependency 'actionpack'
end
|
require 'sprockets-amd/view_helpers'
module SprocketsAmd
class Engine < Rails::Engine
initializer "sprockets-amd.view_helpers" do
ActionView::Base.send :include, ViewHelpers
end
end
end
change way of adding helpers
require 'sprockets-amd/view_helpers'
module SprocketsAmd
class Engine < Rails::Engine
initializer "sprockets-amd.view_helpers" do
app.config.to_prepare do
ActionController::Base.send :helper, ViewHelpers
end
end
end
end |
# Installs packages I use regularly. Homebrew is required for this to
# work.
#
# Homebrew:
# /usr/bin/ruby -e "$(/usr/bin/curl -fsSL https://raw.github.com/mxcl/homebrew/master/Library/Contributions/install_homebrew.rb)"
#
# rbenv:
# git clone git://github.com/sstephenson/rbenv.git $HOME/.rbenv
#
# NPM:
# curl http://npmjs.org/install.sh | sh
namespace :packages do
PACKAGES = %w(
ack
couchdb
ctags
erlang
git
git-extras
graphviz
htop-osx
hub
jsl
libxml2
libxslt
lorem
memcached
mongodb
moreutils
node
phantomjs
postgis
postgresql
pv
redis
rlwrap
ruby-build
siege
tree
vimpager
wget
zeromq
zsh
zsh-completions
)
HEAD_PACKAGES = %w(
rebar
willgit
)
GEMS = %w(
cocoapods
consular
consular-iterm
gem-browse
gem-ctags
motion-cocoapods
pry
)
task :homebrew do
system 'brew install --HEAD --override-system-vim macvim'
system "brew install #{PACKAGES.join(' ')}"
system "brew install --HEAD #{HEAD_PACKAGES.join(' ')}"
end
task :rubygems do
system "gem install #{GEMS.join(' ')}"
end
task :heroku_plugins do
system 'heroku plugins:install git://github.com/ddollar/heroku-accounts.git'
end
desc 'Install dev dependencies'
task :install => [:taps, :homebrew, :rubygems]
end
Add Heroku and Rbenv plugins to rake install
# Installs packages I use regularly. Homebrew is required for this to
# work.
#
# Homebrew:
# /usr/bin/ruby -e "$(/usr/bin/curl -fsSL https://raw.github.com/mxcl/homebrew/master/Library/Contributions/install_homebrew.rb)"
#
# rbenv:
# git clone git://github.com/sstephenson/rbenv.git $HOME/.rbenv
#
# NPM:
# curl http://npmjs.org/install.sh | sh
namespace :packages do
PACKAGES = %w(
ack
couchdb
ctags
erlang
git
git-extras
graphviz
htop-osx
hub
jsl
libxml2
libxslt
lorem
memcached
mongodb
moreutils
node
phantomjs
postgis
postgresql
pv
redis
rlwrap
ruby-build
siege
tree
vimpager
wget
zeromq
zsh
zsh-completions
)
HEAD_PACKAGES = %w(
rebar
willgit
)
GEMS = %w(
cocoapods
consular
consular-iterm
gem-browse
gem-ctags
motion-cocoapods
pry
)
task :homebrew do
system 'brew install --HEAD --override-system-vim macvim'
system "brew install #{PACKAGES.join(' ')}"
system "brew install --HEAD #{HEAD_PACKAGES.join(' ')}"
end
task :rubygems do
system "gem install #{GEMS.join(' ')}"
end
task :rbenv_plugins do
%w( git://github.com/sstephenson/ruby-build.git
git://github.com/tpope/rbenv-ctags.git
git://github.com/tpope/rbenv-readline.git ).each do |url|
system "git clone #{url} ~/.rbenv/plugins/#{File.basename(url, '.git')}"
end
end
task :heroku_plugins do
%w( git://github.com/ddollar/heroku-accounts.git
https://github.com/tpope/heroku-binstubs.git
https://github.com/tpope/heroku-wildcards.git
https://github.com/tpope/heroku-remote.git
https://github.com/tpope/heroku-surrogate.git
https://github.com/tpope/heroku-pgbackups-pull.git ).each do |url|
system "heroku plugins:install #{url}"
end
end
desc 'Install dev dependencies'
task :install => [:taps, :homebrew, :rubygems]
end
|
# Comprehensively test a formula or pull request.
#
# Usage: brew test-bot [options...] <pull-request|formula>
#
# Options:
# --log: Writes log files under ./brewbot/
# --html: Writes html and log files under ./brewbot/
# --comment: Comment on the pull request
# --clean: Clean the Homebrew directory. Very dangerous. Use with care.
require 'formula'
require 'utils'
require 'date'
HOMEBREW_CONTRIBUTED_CMDS = HOMEBREW_REPOSITORY + "Library/Contributions/cmds/"
class Step
attr_reader :command
attr_accessor :status
def initialize test, command
@test = test
@category = test.category
@command = command
@name = command.split[1].delete '-'
@status = :running
@test.steps << self
write_html
end
def log_file_path full_path=true
return "/dev/null" unless ARGV.include? "--log" or ARGV.include? "--html"
file = "#{@category}.#{@name}.txt"
return file unless @test.log_root and full_path
@test.log_root + file
end
def status_colour
case @status
when :passed then "green"
when :running then "orange"
when :failed then "red"
end
end
def status_upcase
@status.to_s.upcase
end
def puts_command
print "#{Tty.blue}==>#{Tty.white} #{@command}#{Tty.reset}"
tabs = (80 - "PASSED".length + 1 - @command.length) / 8
tabs.times{ print "\t" }
$stdout.flush
end
def puts_result
puts "#{Tty.send status_colour}#{status_upcase}#{Tty.reset}"
end
def write_html
return unless @test.log_root and ARGV.include? "--html"
open(@test.log_root + "index.html", "w") do |index|
commit_html, css = @test.commit_html_and_css
index.write commit_html.result binding
end
end
def self.run test, command
step = new test, command
step.puts_command
`#{step.command} &>#{step.log_file_path}`
step.status = $?.success? ? :passed : :failed
step.puts_result
step.write_html
end
end
class Test
attr_reader :log_root, :category, :name
attr_reader :core_changed, :formulae
attr_accessor :steps
@@css = @@index_html = @@commit_html = nil
def commit_html_and_css
return @@commit_html, @@css
end
def initialize arg
begin
Formula.factory arg
rescue FormulaUnavailableError
odie "#{arg} is not a pull request number or formula." unless arg.to_i > 0
@url = arg
@formulae = []
else
@url = nil
@formulae = [arg]
end
@start_sha1 = nil
@category = __method__
@steps = []
@core_changed = false
@brewbot_root = Pathname.pwd + "brewbot"
FileUtils.mkdir_p @brewbot_root if ARGV.include? "--log" or ARGV.include? "--html"
if ARGV.include? "--html" and not @@css
require 'erb'
root = HOMEBREW_CONTRIBUTED_CMDS/"brew-test-bot"
@@css = IO.read root + "brew-test-bot.css"
@@index_html = ERB.new IO.read root + "brew-test-bot.index.html.erb"
@@commit_html = ERB.new IO.read root + "brew-test-bot.commit.html.erb"
end
end
def write_root_html status
return unless ARGV.include? "--html"
FileUtils.mv Dir.glob("*.txt"), @log_root
open(@log_root + "status.txt", "w") do |file|
file.write status
end
dirs = []
dates = []
statuses = []
Pathname.glob("#{@brewbot_root}/*/status.txt").each do |result|
dirs << result.dirname.basename
status_file = result.dirname + "status.txt"
dates << File.mtime(status_file).strftime("%T %D")
statuses << IO.read(status_file)
end
open(@brewbot_root + "index.html", "w") do |index|
css = @@css
index.write @@index_html.result binding
end
end
def download
def current_sha1
`git rev-parse --short HEAD`.strip
end
def current_branch
`git symbolic-ref HEAD`.slice!("refs/heads/").strip
end
@category = __method__
if @url
`git am --abort 2>/dev/null`
test "brew update" if current_branch == "master"
@start_sha1 = current_sha1
test "brew pull --clean #{@url}"
end_sha1 = current_sha1
else
@start_sha1 = end_sha1 = current_sha1
end
name_prefix = @url ? @url : @formulae.first
@name = "#{name_prefix}-#{end_sha1}"
@log_root = @brewbot_root + @name
FileUtils.mkdir_p @log_root if ARGV.include? "--log" or ARGV.include? "--html"
write_root_html :running
return unless @url and @start_sha1 != end_sha1 and steps.last.status == :passed
`git diff #{@start_sha1}..#{end_sha1} --name-status`.each_line do |line|
status, filename = line.split
# Don't try and do anything to removed files.
if (status == 'A' or status == 'M')
if filename.include? '/Formula/'
@formulae << File.basename(filename, '.rb')
end
end
if filename.include? '/Homebrew/' or filename.include? 'bin/brew'
@homebrew_changed = true
end
end
end
def setup
@category = __method__
test "brew doctor"
test "brew --env"
test "brew --config"
end
def formula formula
@category = __method__.to_s + ".#{formula}"
test "brew audit #{formula}"
test "brew install --verbose --build-bottle #{formula}"
return unless steps.last.status == :passed
test "brew test #{formula}"
test "brew bottle #{formula}"
test "brew uninstall #{formula}"
end
def homebrew
@category = __method__
test "brew tests"
end
def cleanup
@category = __method__
if ARGV.include? "--clean"
test "git reset --hard origin/master"
test "git clean --force -dx"
else
`git diff --exit-code HEAD 2>/dev/null`
odie "Uncommitted changes, aborting." unless $?.success?
test "git reset --hard #{@start_sha1}" if @start_sha1
end
end
def test cmd
Step.run self, cmd
end
def check_results
message = "All tests passed and raring to brew."
status = :passed
steps.each do |step|
case step.status
when :passed then next
when :running then raise
when :failed then
if status == :passed
status = :failed
message = ""
end
message += "#{step.command}: #{step.status.to_s.upcase}\n"
end
end
write_root_html status
if ARGV.include? "--comment" and @url
username, password = IO.read(File.expand_path('~/.brewbot')).split(':')
url = "https://api.github.com/repos/mxcl/homebrew/issues/#{@url}/comments"
require 'vendor/multi_json'
json = MultiJson.encode(:body => message)
curl url, "-X", "POST", "--user", "#{username}:#{password}", "--data", json, "-o", "/dev/null"
end
end
def self.run url
test = new url
test.cleanup
test.download
test.setup
test.formulae.each do |f|
test.formula f
end
test.homebrew if test.core_changed
test.cleanup
test.check_results
end
end
if ARGV.empty?
odie 'This command requires at least one argument containing a pull request number or formula.'
end
Dir.chdir HOMEBREW_REPOSITORY
ARGV.named.each do|arg|
Test.run arg
end
brew-test-bot: Only run brew-test if test defined.
# Comprehensively test a formula or pull request.
#
# Usage: brew test-bot [options...] <pull-request|formula>
#
# Options:
# --log: Writes log files under ./brewbot/
# --html: Writes html and log files under ./brewbot/
# --comment: Comment on the pull request
# --clean: Clean the Homebrew directory. Very dangerous. Use with care.
require 'formula'
require 'utils'
require 'date'
HOMEBREW_CONTRIBUTED_CMDS = HOMEBREW_REPOSITORY + "Library/Contributions/cmds/"
class Step
attr_reader :command
attr_accessor :status
def initialize test, command
@test = test
@category = test.category
@command = command
@name = command.split[1].delete '-'
@status = :running
@test.steps << self
write_html
end
def log_file_path full_path=true
return "/dev/null" unless ARGV.include? "--log" or ARGV.include? "--html"
file = "#{@category}.#{@name}.txt"
return file unless @test.log_root and full_path
@test.log_root + file
end
def status_colour
case @status
when :passed then "green"
when :running then "orange"
when :failed then "red"
end
end
def status_upcase
@status.to_s.upcase
end
def puts_command
print "#{Tty.blue}==>#{Tty.white} #{@command}#{Tty.reset}"
tabs = (80 - "PASSED".length + 1 - @command.length) / 8
tabs.times{ print "\t" }
$stdout.flush
end
def puts_result
puts "#{Tty.send status_colour}#{status_upcase}#{Tty.reset}"
end
def write_html
return unless @test.log_root and ARGV.include? "--html"
open(@test.log_root + "index.html", "w") do |index|
commit_html, css = @test.commit_html_and_css
index.write commit_html.result binding
end
end
def self.run test, command
step = new test, command
step.puts_command
`#{step.command} &>#{step.log_file_path}`
step.status = $?.success? ? :passed : :failed
step.puts_result
step.write_html
end
end
class Test
attr_reader :log_root, :category, :name
attr_reader :core_changed, :formulae
attr_accessor :steps
@@css = @@index_html = @@commit_html = nil
def commit_html_and_css
return @@commit_html, @@css
end
def initialize arg
begin
Formula.factory arg
rescue FormulaUnavailableError
odie "#{arg} is not a pull request number or formula." unless arg.to_i > 0
@url = arg
@formulae = []
else
@url = nil
@formulae = [arg]
end
@start_sha1 = nil
@category = __method__
@steps = []
@core_changed = false
@brewbot_root = Pathname.pwd + "brewbot"
FileUtils.mkdir_p @brewbot_root if ARGV.include? "--log" or ARGV.include? "--html"
if ARGV.include? "--html" and not @@css
require 'erb'
root = HOMEBREW_CONTRIBUTED_CMDS/"brew-test-bot"
@@css = IO.read root + "brew-test-bot.css"
@@index_html = ERB.new IO.read root + "brew-test-bot.index.html.erb"
@@commit_html = ERB.new IO.read root + "brew-test-bot.commit.html.erb"
end
end
def write_root_html status
return unless ARGV.include? "--html"
FileUtils.mv Dir.glob("*.txt"), @log_root
open(@log_root + "status.txt", "w") do |file|
file.write status
end
dirs = []
dates = []
statuses = []
Pathname.glob("#{@brewbot_root}/*/status.txt").each do |result|
dirs << result.dirname.basename
status_file = result.dirname + "status.txt"
dates << File.mtime(status_file).strftime("%T %D")
statuses << IO.read(status_file)
end
open(@brewbot_root + "index.html", "w") do |index|
css = @@css
index.write @@index_html.result binding
end
end
def download
def current_sha1
`git rev-parse --short HEAD`.strip
end
def current_branch
`git symbolic-ref HEAD`.slice!("refs/heads/").strip
end
@category = __method__
if @url
`git am --abort 2>/dev/null`
test "brew update" if current_branch == "master"
@start_sha1 = current_sha1
test "brew pull --clean #{@url}"
end_sha1 = current_sha1
else
@start_sha1 = end_sha1 = current_sha1
end
name_prefix = @url ? @url : @formulae.first
@name = "#{name_prefix}-#{end_sha1}"
@log_root = @brewbot_root + @name
FileUtils.mkdir_p @log_root if ARGV.include? "--log" or ARGV.include? "--html"
write_root_html :running
return unless @url and @start_sha1 != end_sha1 and steps.last.status == :passed
`git diff #{@start_sha1}..#{end_sha1} --name-status`.each_line do |line|
status, filename = line.split
# Don't try and do anything to removed files.
if (status == 'A' or status == 'M')
if filename.include? '/Formula/'
@formulae << File.basename(filename, '.rb')
end
end
if filename.include? '/Homebrew/' or filename.include? 'bin/brew'
@homebrew_changed = true
end
end
end
def setup
@category = __method__
test "brew doctor"
test "brew --env"
test "brew --config"
end
def formula formula
@category = __method__.to_s + ".#{formula}"
test "brew audit #{formula}"
test "brew install --verbose --build-bottle #{formula}"
return unless steps.last.status == :passed
test "brew test #{formula}" if defined? Formula.factory(formula).test
test "brew bottle #{formula}"
test "brew uninstall #{formula}"
end
def homebrew
@category = __method__
test "brew tests"
end
def cleanup
@category = __method__
if ARGV.include? "--clean"
test "git reset --hard origin/master"
test "git clean --force -dx"
else
`git diff --exit-code HEAD 2>/dev/null`
odie "Uncommitted changes, aborting." unless $?.success?
test "git reset --hard #{@start_sha1}" if @start_sha1
end
end
def test cmd
Step.run self, cmd
end
def check_results
message = "All tests passed and raring to brew."
status = :passed
steps.each do |step|
case step.status
when :passed then next
when :running then raise
when :failed then
if status == :passed
status = :failed
message = ""
end
message += "#{step.command}: #{step.status.to_s.upcase}\n"
end
end
write_root_html status
if ARGV.include? "--comment" and @url
username, password = IO.read(File.expand_path('~/.brewbot')).split(':')
url = "https://api.github.com/repos/mxcl/homebrew/issues/#{@url}/comments"
require 'vendor/multi_json'
json = MultiJson.encode(:body => message)
curl url, "-X", "POST", "--user", "#{username}:#{password}", "--data", json, "-o", "/dev/null"
end
end
def self.run url
test = new url
test.cleanup
test.download
test.setup
test.formulae.each do |f|
test.formula f
end
test.homebrew if test.core_changed
test.cleanup
test.check_results
end
end
if ARGV.empty?
odie 'This command requires at least one argument containing a pull request number or formula.'
end
Dir.chdir HOMEBREW_REPOSITORY
ARGV.named.each do|arg|
Test.run arg
end
|
def for_each_gem
GEMS.each do |g|
yield g if block_given?
end
end
def messages_for_failed_tests(failures_by_gem)
failures_by_gem.reduce([]) do |memo, (gem_name, failures)|
memo += failures.map do |f|
original_file_path = f["file_path"]
file_path = original_file_path.sub(".", gem_name)
"#{file_path}:#{f["line_number"]}".red + " # #{f["full_description"]}".cyan
end
end
end
def format_failures(failures_by_gem)
count = 1
failures_by_gem.reduce([]) do |memo, (gem_name, failures)|
memo += failures.map do |failure|
formatted_failure = format_failure(failure, gem_name, count)
count += 1
formatted_failure
end
end
end
def format_failure(failure, gem_name, count)
file_path = failure["file_path"].sub(".", "")
stack_frame = failure["exception"]["backtrace"].detect { |frame| frame.include?(file_path) }
stack_frame_match = /#{gem_name}#{file_path}:(\d+).*/.match(stack_frame)
code_line = get_line_from_file(stack_frame_match[1].to_i, "#{gem_name}#{file_path}").strip
formatted_failure = count == 1 ? "\n" : ""
formatted_failure += "#{count}) #{failure["full_description"]}\n"
formatted_failure += "Failure/Error: #{code_line}\n".red
formatted_failure += "#{failure["exception"]["message"]}".red
formatted_failure += "# #{stack_frame_match[0]}\n".cyan
end
def bundle_install
if ENV['CI']
cache_path = File.expand_path("~/.fastlane_bundle")
path = " --path=#{cache_path}"
end
sh "bundle check#{path} || bundle install#{path} --jobs=4 --retry=3"
end
task :bundle_install_all do
puts "Fetching dependencies in the root"
bundle_install
for_each_gem do |repo|
Dir.chdir(repo) do
puts "Fetching dependencies for #{repo}"
bundle_install
end
end
end
def get_line_from_file(line_number, file)
File.open(file) do |io|
io.each_with_index do |line, index|
return line if line_number == index + 1
end
end
end
desc "Test all fastlane projects"
task :test_all do
require 'bundler/setup'
require 'colored'
require 'fileutils'
require 'json'
exceptions = []
repos_with_exceptions = []
rspec_log_file = "rspec_logs.json"
for_each_gem do |repo|
box "Testing #{repo}"
Dir.chdir(repo) do
FileUtils.rm_f(rspec_log_file)
begin
# From https://github.com/bundler/bundler/issues/1424#issuecomment-2123080
# Since we nest bundle exec in bundle exec
Bundler.with_clean_env do
rspec_command_parts = [
"bundle exec rspec",
"--format documentation",
"--format j --out #{rspec_log_file}"
]
if ENV['CIRCLECI']
output_file = File.join(ENV['CIRCLE_TEST_REPORTS'], 'rspec', "#{repo}-junit.xml")
rspec_command_parts << "--format RspecJunitFormatter --out #{output_file}"
end
sh rspec_command_parts.join(' ')
sh "bundle exec rubocop"
end
rescue => ex
puts "[[FAILURE]] with repo '#{repo}' due to\n\n#{ex}\n\n"
exceptions << "#{repo}: #{ex}"
repos_with_exceptions << repo
ensure
if ENV["CIRCLECI"] && ENV["CIRCLE_ARTIFACTS"] && File.exist?(rspec_log_file)
FileUtils.cp(rspec_log_file, File.join(ENV["CIRCLE_ARTIFACTS"], "rspec_logs_#{repo}.json"))
end
end
end
end
require 'coveralls'
require 'simplecov'
SimpleCov.command_name('Unit Tests')
r = {}
for_each_gem do |repo|
begin
puts "Loading coverage data of #{repo}"
data = JSON.parse(File.read(File.join(repo, "coverage", ".resultset.json")))
r = SimpleCov::Result.from_hash(data).original_result.merge_resultset(r)
rescue => ex
puts "No test results found for #{repo} => #{ex}"
end
end
failed_tests_by_gem = {}
example_count = 0
duration = 0.0
for_each_gem do |gem_name|
failed_tests_by_gem[gem_name] = []
log_file_path = File.join(gem_name, rspec_log_file)
next unless File.readable?(log_file_path)
log_json = JSON.parse(File.read(log_file_path))
tests = log_json["examples"]
summary = log_json["summary"]
example_count += summary["example_count"]
duration += summary["duration"]
failed_tests_by_gem[gem_name] += tests.select { |r| r["status"] != "passed" }
end
failure_messages = messages_for_failed_tests(failed_tests_by_gem)
puts ("*" * 80).yellow
box "Testing Summary"
puts "\nFinished in #{duration.round(3)} seconds"
puts "#{example_count} examples, #{failure_messages.count} failure(s)".send(failure_messages.empty? ? :green : :red)
unless failure_messages.empty?
box "#{exceptions.count} repo(s) with test failures: " + repos_with_exceptions.join(", ")
puts format_failures(failed_tests_by_gem)
puts "Failed examples:"
puts "#{failure_messages.join("\n")}\n"
end
if exceptions.empty?
r.delete_if { |path, coverage| path.to_s.match(/spec/) }
# Only upload coverage results if tests are successful
Coveralls::SimpleCov::Formatter.new.format(SimpleCov::Result.new(r))
puts "Success 🚀".green
else
box "Exceptions 💣"
puts "\n" + exceptions.map(&:red).join("\n")
raise "All tests did not complete successfully. Search for '[[FAILURE]]' in the build logs.".red
end
end
Test coverage reports were not reliable (#5218)
* Test coverage reports were not reliable
* Fixed tests
def for_each_gem
GEMS.each do |g|
yield g if block_given?
end
end
def messages_for_failed_tests(failures_by_gem)
failures_by_gem.reduce([]) do |memo, (gem_name, failures)|
memo += failures.map do |f|
original_file_path = f["file_path"]
file_path = original_file_path.sub(".", gem_name)
"#{file_path}:#{f["line_number"]}".red + " # #{f["full_description"]}".cyan
end
end
end
def format_failures(failures_by_gem)
count = 1
failures_by_gem.reduce([]) do |memo, (gem_name, failures)|
memo += failures.map do |failure|
formatted_failure = format_failure(failure, gem_name, count)
count += 1
formatted_failure
end
end
end
def format_failure(failure, gem_name, count)
file_path = failure["file_path"].sub(".", "")
stack_frame = failure["exception"]["backtrace"].detect { |frame| frame.include?(file_path) }
stack_frame_match = /#{gem_name}#{file_path}:(\d+).*/.match(stack_frame)
code_line = get_line_from_file(stack_frame_match[1].to_i, "#{gem_name}#{file_path}").strip
formatted_failure = count == 1 ? "\n" : ""
formatted_failure += "#{count}) #{failure["full_description"]}\n"
formatted_failure += "Failure/Error: #{code_line}\n".red
formatted_failure += "#{failure["exception"]["message"]}".red
formatted_failure += "# #{stack_frame_match[0]}\n".cyan
end
def bundle_install
if ENV['CI']
cache_path = File.expand_path("~/.fastlane_bundle")
path = " --path=#{cache_path}"
end
sh "bundle check#{path} || bundle install#{path} --jobs=4 --retry=3"
end
task :bundle_install_all do
puts "Fetching dependencies in the root"
bundle_install
for_each_gem do |repo|
Dir.chdir(repo) do
puts "Fetching dependencies for #{repo}"
bundle_install
end
end
end
def get_line_from_file(line_number, file)
File.open(file) do |io|
io.each_with_index do |line, index|
return line if line_number == index + 1
end
end
end
desc "Test all fastlane projects"
task :test_all do
require 'bundler/setup'
require 'colored'
require 'fileutils'
require 'json'
exceptions = []
repos_with_exceptions = []
rspec_log_file = "rspec_logs.json"
for_each_gem do |repo|
box "Testing #{repo}"
Dir.chdir(repo) do
FileUtils.rm_f(rspec_log_file)
begin
# From https://github.com/bundler/bundler/issues/1424#issuecomment-2123080
# Since we nest bundle exec in bundle exec
Bundler.with_clean_env do
rspec_command_parts = [
"bundle exec rspec",
"--format documentation",
"--format j --out #{rspec_log_file}"
]
if ENV['CIRCLECI']
output_file = File.join(ENV['CIRCLE_TEST_REPORTS'], 'rspec', "#{repo}-junit.xml")
rspec_command_parts << "--format RspecJunitFormatter --out #{output_file}"
end
sh rspec_command_parts.join(' ')
sh "bundle exec rubocop"
end
rescue => ex
puts "[[FAILURE]] with repo '#{repo}' due to\n\n#{ex}\n\n"
exceptions << "#{repo}: #{ex}"
repos_with_exceptions << repo
ensure
if ENV["CIRCLECI"] && ENV["CIRCLE_ARTIFACTS"] && File.exist?(rspec_log_file)
FileUtils.cp(rspec_log_file, File.join(ENV["CIRCLE_ARTIFACTS"], "rspec_logs_#{repo}.json"))
end
end
end
end
# require 'coveralls'
# require 'simplecov'
# SimpleCov.command_name('Unit Tests')
# r = {}
# for_each_gem do |repo|
# begin
# puts "Loading coverage data of #{repo}"
# data = JSON.parse(File.read(File.join(repo, "coverage", ".resultset.json")))
# r = SimpleCov::Result.from_hash(data).original_result.merge_resultset(r)
# rescue => ex
# puts "No test results found for #{repo} => #{ex}"
# end
# end
failed_tests_by_gem = {}
example_count = 0
duration = 0.0
for_each_gem do |gem_name|
failed_tests_by_gem[gem_name] = []
log_file_path = File.join(gem_name, rspec_log_file)
next unless File.readable?(log_file_path)
log_json = JSON.parse(File.read(log_file_path))
tests = log_json["examples"]
summary = log_json["summary"]
example_count += summary["example_count"]
duration += summary["duration"]
failed_tests_by_gem[gem_name] += tests.select { |r| r["status"] != "passed" }
end
failure_messages = messages_for_failed_tests(failed_tests_by_gem)
puts ("*" * 80).yellow
box "Testing Summary"
puts "\nFinished in #{duration.round(3)} seconds"
puts "#{example_count} examples, #{failure_messages.count} failure(s)".send(failure_messages.empty? ? :green : :red)
unless failure_messages.empty?
box "#{exceptions.count} repo(s) with test failures: " + repos_with_exceptions.join(", ")
puts format_failures(failed_tests_by_gem)
puts "Failed examples:"
puts "#{failure_messages.join("\n")}\n"
end
if exceptions.empty?
# r.delete_if { |path, coverage| path.to_s.match(/spec/) }
# Only upload coverage results if tests are successful
# Coveralls::SimpleCov::Formatter.new.format(SimpleCov::Result.new(r))
puts "Success 🚀".green
else
box "Exceptions 💣"
puts "\n" + exceptions.map(&:red).join("\n")
raise "All tests did not complete successfully. Search for '[[FAILURE]]' in the build logs.".red
end
end
|
Added tests for language/node.rb
require "language/node"
describe Language::Node do
specify "#npm_cache_config" do
shutup do
ret_val = described_class.npm_cache_config
expect(ret_val).to eq("cache=#{HOMEBREW_CACHE}/npm_cache\n")
end
end
describe "#pack_for_installation" do
it "raises error with non zero exitstatus" do
shutup do
expect{described_class.pack_for_installation}.to raise_error
end
end
it "does not raise error with a zero exitstatus" do
shutup do
allow_any_instance_of(Process::Status).to receive(:exitstatus).and_return(0)
expect{described_class.pack_for_installation}.not_to raise_error
end
end
end
describe "#setup_npm_environment" do
it "npmrc exists" do
shutup do
expect(described_class.setup_npm_environment).to be_nil
end
end
it "npmrc does not exist" do
shutup do
allow_any_instance_of(Pathname).to receive(:exist?).and_return(false)
described_class.setup_npm_environment
end
end
end
specify "#std_npm_install_args" do
shutup do
npm_install_arg = "libexec"
allow_any_instance_of(Process::Status).to receive(:exitstatus).and_return(0)
resp = described_class.std_npm_install_args npm_install_arg
expect(resp).to eq(["--verbose", "--global", "--prefix=#{npm_install_arg}", "#{Dir.pwd}/"])
end
end
specify "#local_npm_install_args" do
shutup do
resp = described_class.local_npm_install_args
expect(resp).to eq(["--verbose"])
end
end
end |
require File.expand_path("../../store", __FILE__)
require File.expand_path("../../red_black_tree", __FILE__)
class Store::RedBlackTree < Store
def initialize
@tree = ::RedBlackTree.new
end
def apply batch
batch.each do |k, v|
@tree.delete k
unless v == nil
@tree[k] = v
end
end
self
end
def get key
@tree[key]
end
def _range from, to
n = @tree.root
stack = []
while n && !n.empty?
break unless n.key
d = from <=> n.key
stack << n
if d <= 0
n = n.left
else
n = n.right
end
end
Enumerator.new do |out|
loop do
break if stack.empty?
n = stack.last
break if n.key > to
out << [n.key, n.value] if n.key >= from
if n.right && !n.right.empty?
n = n.right
while n && !n.empty?
stack << n
n = n.left
end
else
stack.pop
while stack.length > 0 && stack.last.right == n
n = stack.last
stack.pop
end
end
end
end
end
def dup
new = super
new.instance_variable_set :@tree, @tree.dup
new
end
end
Don't delete the node before setting it
require File.expand_path("../../store", __FILE__)
require File.expand_path("../../red_black_tree", __FILE__)
class Store::RedBlackTree < Store
def initialize
@tree = ::RedBlackTree.new
end
def apply batch
batch.each do |k, v|
if v == nil
@tree.delete k
else
@tree[k] = v
end
end
self
end
def get key
@tree[key]
end
def _range from, to
n = @tree.root
stack = []
while n && !n.empty?
break unless n.key
d = from <=> n.key
stack << n
if d <= 0
n = n.left
else
n = n.right
end
end
Enumerator.new do |out|
loop do
break if stack.empty?
n = stack.last
break if n.key > to
out << [n.key, n.value] if n.key >= from
if n.right && !n.right.empty?
n = n.right
while n && !n.empty?
stack << n
n = n.left
end
else
stack.pop
while stack.length > 0 && stack.last.right == n
n = stack.last
stack.pop
end
end
end
end
end
def dup
new = super
new.instance_variable_set :@tree, @tree.dup
new
end
end |
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = 'raygun_client'
s.version = '0.2.0.0'
s.summary = 'Client for the Raygun API using the Obsidian HTTP client'
s.description = ' '
s.authors = ['Obsidian Software, Inc']
s.email = 'opensource@obsidianexchange.com'
s.homepage = 'https://github.com/obsidian-btc/raygun-client'
s.licenses = ['MIT']
s.require_paths = ['lib']
s.files = Dir.glob('{lib}/**/*')
s.platform = Gem::Platform::RUBY
s.required_ruby_version = '>= 2.2.3'
s.add_runtime_dependency 'error_data'
s.add_runtime_dependency 'serialize'
s.add_runtime_dependency 'connection-client'
s.add_runtime_dependency 'controls'
s.add_runtime_dependency 'http-commands'
s.add_runtime_dependency 'telemetry'
s.add_runtime_dependency 'settings'
s.add_development_dependency 'test_bench'
end
Package version is increased from 0.2.0.0 to 0.2.0.1
(Changed logging when posting to Raygun is complete)
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = 'raygun_client'
s.version = '0.2.0.1'
s.summary = 'Client for the Raygun API using the Obsidian HTTP client'
s.description = ' '
s.authors = ['Obsidian Software, Inc']
s.email = 'opensource@obsidianexchange.com'
s.homepage = 'https://github.com/obsidian-btc/raygun-client'
s.licenses = ['MIT']
s.require_paths = ['lib']
s.files = Dir.glob('{lib}/**/*')
s.platform = Gem::Platform::RUBY
s.required_ruby_version = '>= 2.2.3'
s.add_runtime_dependency 'error_data'
s.add_runtime_dependency 'serialize'
s.add_runtime_dependency 'connection-client'
s.add_runtime_dependency 'controls'
s.add_runtime_dependency 'http-commands'
s.add_runtime_dependency 'telemetry'
s.add_runtime_dependency 'settings'
s.add_development_dependency 'test_bench'
end
|
module Subspp
class Configuration
attr_writer :host, :api_version, :plan_ids
attr_accessor :token, :site
def host
@host || 'https://subs.pinpayments.com'
end
def api_version
@api_version || 'v4'
end
def plan_ids
@plan_ids || {}
end
end
end
Configuration inherits from OpenStruct
require 'ostruct'
module Subspp
class Configuration < OpenStruct
attr_writer :host, :api_version, :plan_ids
attr_accessor :token, :site
def host
@host || 'https://subs.pinpayments.com'
end
def api_version
@api_version || 'v4'
end
def plan_ids
@plan_ids || {}
end
end
end
|
module Supercharged
VERSION = "2.0.2"
end
Version 2.0.3
module Supercharged
VERSION = "2.0.3"
end
|
# Versioning
module LonelyPlanetScrape
VERSION = '2.0.1'
DATE = '2015-10-11'
end
Added version number and date
# Versioning
module LonelyPlanetScrape
VERSION = '0.1.0'
DATE = '2015-10-16'
end |
namespace :fromthepage do
desc "Display all transcriber names and emails"
task :all_transcribers, [:collection_id] => :environment do |t, args|
collection_id = args.collection_id
trans_deeds = ["page_trans", "page_edit"]
collection = Collection.find_by(id: collection_id)
transcription_deeds = collection.deeds.where(deed_type: trans_deeds)
user_deeds = transcription_deeds.distinct.pluck(:user_id)
all_transcribers = User.where(id: user_deeds)
all_transcribers.each do |t|
puts "#{t.display_name} <#{t.email}>, "
end
end
desc "Display new transcriber names"
task :new_transcribers, [:collection_id, :start_date, :end_date] => :environment do |t, args|
collection_id = args.collection_id
start_date = args.start_date
end_date = args.end_date
trans_deeds = ["page_trans", "page_edit"]
collection = Collection.find_by(id: collection_id)
transcription_deeds = collection.deeds.where(deed_type: trans_deeds)
recent_trans_deeds = transcription_deeds.where("created_at >= ? AND created_at <= ?", start_date, end_date).distinct.pluck(:user_id)
recent_users = User.where(id: recent_trans_deeds)
older_trans_deeds = transcription_deeds.where("created_at < ?", start_date).distinct.pluck(:user_id)
older_users = User.where(id: older_trans_deeds)
new_transcribers = recent_users - older_users
unless new_transcribers.empty?
new_transcribers.each do |t|
puts "#{t.display_name} #{t.email}"
end
else
puts "No new transcribers"
end
end
desc "Display recent activity in a collection"
task :recent_activity, [:collection_id, :start_date, :end_date] => :environment do |t, args|
collection_id = args.collection_id
start_date = args.start_date
end_date = args.end_date
trans_deeds = ["page_trans", "page_edit"]
collection = Collection.find_by(id: collection_id)
transcription_deeds = collection.deeds.where(deed_type: trans_deeds)
note_deeds = collection.deeds.where(deed_type: "note_add")
recent_notes = note_deeds.where("created_at >= ? AND created_at <= ?", start_date, end_date)
recent_transcriptions = transcription_deeds.where("created_at >= ? AND created_at <= ?", start_date, end_date)
puts "Recent Transcriptions:"
recent_transcriptions.each do |t|
puts "Work: #{t.work.title}, Page: #{t.page.title}, User: #{t.user.display_name}, Action: #{t.deed_type}, Date: #{t.created_at}"
end
puts "Recent Notes:"
note_deeds.each do |n|
puts "Work: #{n.work.title}, User: #{n.user.display_name}, Note: #{n.note.title}"
end
end
end
combined two tasks into one
namespace :fromthepage do
desc "Display all transcriber names and emails"
task :all_transcribers, [:collection_id] => :environment do |t, args|
collection_id = args.collection_id
trans_deeds = ["page_trans", "page_edit"]
collection = Collection.find_by(id: collection_id)
transcription_deeds = collection.deeds.where(deed_type: trans_deeds)
user_deeds = transcription_deeds.distinct.pluck(:user_id)
all_transcribers = User.where(id: user_deeds)
all_transcribers.each do |t|
puts "#{t.display_name} <#{t.email}>, "
end
end
desc "Display all recent activity for a collection"
task :recent_activity, [:collection_id, :start_date, :end_date] => :environment do |t, args|
collection_id = args.collection_id
start_date = args.start_date
end_date = args.end_date
trans_deeds = ["page_trans", "page_edit"]
collection = Collection.find_by(id: collection_id)
transcription_deeds = collection.deeds.where(deed_type: trans_deeds)
note_deeds = collection.deeds.where(deed_type: "note_add")
# notes and transcriptions created during the time frame
recent_notes = note_deeds.where("created_at >= ? AND created_at <= ?", start_date, end_date)
recent_transcriptions = transcription_deeds.where("created_at >= ? AND created_at <= ?", start_date, end_date)
#find recent users
recent_trans_deeds = recent_transcriptions.distinct.pluck(:user_id)
recent_users = User.where(id: recent_trans_deeds)
#find older users (from before time frame)
older_trans_deeds = transcription_deeds.where("created_at < ?", start_date).distinct.pluck(:user_id)
older_users = User.where(id: older_trans_deeds)
#find the difference between the recent and older lists
new_transcribers = recent_users - older_users
unless new_transcribers.empty?
puts "New Transcribers"
new_transcribers.each do |t|
puts "#{t.display_name} #{t.email}"
end
else
puts "No new transcribers"
end
unless recent_transcriptions.empty?
puts "Recent Transcriptions:"
recent_transcriptions.each do |t|
puts "Work: #{t.work.title}, Page: #{t.page.title}, User: #{t.user.display_name}, Action: #{t.deed_type}, Date: #{t.created_at}"
end
else
puts "No recent transcriptions"
end
unless recent_notes.empty?
puts "Recent Notes:"
recent_notes.each do |n|
puts "Work: #{n.work.title}, User: #{n.user.display_name}, Note: #{n.note.title}, Date: #{n.created_at}"
end
else
puts "No recent notes"
end
end
end
|
namespace :multitenancy do
task :create do
db_envs = ActiveRecord::Base.configurations.keys.select{ |k| k.match(/_development$|_production$|_test$/) }
cd Rails.root.join('config', 'environments'), :verbose => true
file_envs = Dir.glob "{*_development.rb,*_prodution.rb,*_test.rb}"
(db_envs.map{ |e| e + '.rb' } - file_envs).each { |env| ln_s env.split('_').last, env }
end
task :remove do
db_envs = ActiveRecord::Base.configurations.keys.select{ |k| k.match(/_development$|_production$|_test$/) }
cd Rails.root.join('config', 'environments'), :verbose => true
file_envs = Dir.glob "{*_development.rb,*_prodution.rb,*_test.rb}"
(file_envs - db_envs.map{ |e| e + '.rb' }).each { |env| safe_unlink env }
end
end
namespace :db do
task :migrate_other_environments => :environment do
envs = ActiveRecord::Base.configurations.keys.select{ |k| k.match(/_#{Rails.env}$/) }
envs.each do |e|
puts "*** Migrating #{e}" if Rake.application.options.trace
system "rake db:migrate RAILS_ENV=#{e}"
end
end
task :migrate => :migrate_other_environments
end
Fixes multitenancy rake tasks
- Fixes a typo;
- Loads the environment first so ActiveRecord is available.
namespace :multitenancy do
task :create => :environment do
db_envs = ActiveRecord::Base.configurations.keys.select{ |k| k.match(/_development$|_production$|_test$/) }
cd Rails.root.join('config', 'environments'), :verbose => true
file_envs = Dir.glob "{*_development.rb,*_production.rb,*_test.rb}"
(db_envs.map{ |e| e + '.rb' } - file_envs).each { |env| ln_s env.split('_').last, env }
end
task :remove => :environment do
db_envs = ActiveRecord::Base.configurations.keys.select{ |k| k.match(/_development$|_production$|_test$/) }
cd Rails.root.join('config', 'environments'), :verbose => true
file_envs = Dir.glob "{*_development.rb,*_production.rb,*_test.rb}"
(file_envs - db_envs.map{ |e| e + '.rb' }).each { |env| safe_unlink env }
end
end
namespace :db do
task :migrate_other_environments => :environment do
envs = ActiveRecord::Base.configurations.keys.select{ |k| k.match(/_#{Rails.env}$/) }
envs.each do |e|
puts "*** Migrating #{e}" if Rake.application.options.trace
system "rake db:migrate RAILS_ENV=#{e}"
end
end
task :migrate => :migrate_other_environments
end
|
# frozen_string_literal: true
spec = Gem::Specification.find_by_name 'stormbreaker'
Dir.glob(File.join(spec.gem_dir, 'lib', 'stormbreaker', 'tasks', '*')).each { |f| load f }
rake should only load stormbreaker if its installed
Change-Id: I8db6e3b6561e0c6cb00c54e4cea274aaea2d95ac
Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/267774
Tested-by: Service Cloud Jenkins <9144042a601061f88f1e1d7a1753ea3e2972119d@instructure.com>
Reviewed-by: Brian Watson <4411c2aac809443ec63e2dd517de0f2dbc7408c6@instructure.com>
Reviewed-by: Ryan Norton <65299b45178f94900ff304765e8fbf07bd13b331@instructure.com>
QA-Review: Alex Slaughter <11669a22a05ead38ed9badafd55ef4c5386724b6@instructure.com>
Product-Review: Alex Slaughter <11669a22a05ead38ed9badafd55ef4c5386724b6@instructure.com>
# frozen_string_literal: true
if Gem.loaded_specs.has_key?('stormbreaker')
spec = Gem::Specification.find_by_name 'stormbreaker'
Dir.glob(File.join(spec.gem_dir, 'lib', 'stormbreaker', 'tasks', '*')).each { |f| load f }
end
|
module Teamocil
module Tmux
class Window < ClosedStruct.new(:index, :root, :focus, :layout, :name, :panes)
def initialize(object)
super
# Make sure paths like `~/foo/bar` work
self.root = File.expand_path(root) if root
self.panes ||= []
self.panes = panes.each_with_index.map do |pane, index|
# Support single command instead of `commands` key in Hash
pane = { commands: [pane] } if pane.is_a?(String)
# Panes need to know their position
pane.merge! index: index
# Panes need know the window root directory
pane.merge! root: root
Teamocil::Tmux::Pane.new(pane)
end
end
def as_tmux
[].tap do |tmux|
# Rename the current window or create a new one
if Teamocil.options[:here] && first?
if root
first_pane_index = panes.first.internal_index
tmux << Teamocil::Command::SendKeysToPane.new(index: first_pane_index, keys: %Q(cd "#{root}"))
tmux << Teamocil::Command::SendKeysToPane.new(index: first_pane_index, keys: 'Enter')
end
tmux << Teamocil::Command::RenameWindow.new(name: name)
else
tmux << Teamocil::Command::NewWindow.new(name: name, root: root)
end
# Execute all panes commands
tmux << panes.map(&:as_tmux).flatten
# Select the window layout
tmux << Teamocil::Command::SelectLayout.new(layout: layout) if layout
# Set the focus on the right pane or the first one
focused_pane = panes.find(&:focus)
focused_index = focused_pane ? focused_pane.internal_index : Teamocil::Tmux::Pane.pane_base_index
tmux << Teamocil::Command::SelectPane.new(index: focused_index)
end.flatten
end
def internal_index
index + self.class.window_base_index
end
def self.window_base_index
@window_base_index ||= begin
base_index = Teamocil::Tmux.option('base-index', default: 0)
current_window_count = Teamocil::Tmux.window_count
# If `--here` is specified, treat the current window as a new one
current_window_count -= 1 if Teamocil.options[:here]
base_index + current_window_count
end
end
protected
def first?
index.zero?
end
end
end
end
Fix Rubocop error, again
module Teamocil
module Tmux
class Window < ClosedStruct.new(:index, :root, :focus, :layout, :name, :panes)
def initialize(object)
super
# Make sure paths like `~/foo/bar` work
self.root = File.expand_path(root) if root
self.panes ||= []
self.panes = panes.each_with_index.map do |pane, index|
# Support single command instead of `commands` key in Hash
pane = { commands: [pane] } if pane.is_a?(String)
# Panes need to know their position
pane.merge! index: index
# Panes need know the window root directory
pane.merge! root: root
Teamocil::Tmux::Pane.new(pane)
end
end
def as_tmux
[].tap do |tmux|
# Rename the current window or create a new one
if Teamocil.options[:here] && first?
if root
first_pane_index = panes.first.internal_index
tmux << Teamocil::Command::SendKeysToPane.new(index: first_pane_index, keys: %(cd "#{root}"))
tmux << Teamocil::Command::SendKeysToPane.new(index: first_pane_index, keys: 'Enter')
end
tmux << Teamocil::Command::RenameWindow.new(name: name)
else
tmux << Teamocil::Command::NewWindow.new(name: name, root: root)
end
# Execute all panes commands
tmux << panes.map(&:as_tmux).flatten
# Select the window layout
tmux << Teamocil::Command::SelectLayout.new(layout: layout) if layout
# Set the focus on the right pane or the first one
focused_pane = panes.find(&:focus)
focused_index = focused_pane ? focused_pane.internal_index : Teamocil::Tmux::Pane.pane_base_index
tmux << Teamocil::Command::SelectPane.new(index: focused_index)
end.flatten
end
def internal_index
index + self.class.window_base_index
end
def self.window_base_index
@window_base_index ||= begin
base_index = Teamocil::Tmux.option('base-index', default: 0)
current_window_count = Teamocil::Tmux.window_count
# If `--here` is specified, treat the current window as a new one
current_window_count -= 1 if Teamocil.options[:here]
base_index + current_window_count
end
end
protected
def first?
index.zero?
end
end
end
end
|
module TelegramBot
class Message
include Virtus.model
attribute :message_id, Integer
alias_method :id, :message_id
alias_method :to_i, :id
attribute :from, User
attribute :text, String
attribute :date, DateTime
attribute :chat, Channel
attribute :reply_to_message, Message
def reply(&block)
reply = OutMessage.new(chat_id: chat.id)
yield reply if block_given?
reply
end
def get_command_for(bot)
text && text.sub(Regexp.new("@#{bot.identity.username}($|\s|\.|,)", Regexp::IGNORECASE), '').strip
end
end
end
Aliasing user to from in Message
module TelegramBot
class Message
include Virtus.model
attribute :message_id, Integer
alias_method :id, :message_id
alias_method :to_i, :id
attribute :from, User
alias_method :user, :from
attribute :text, String
attribute :date, DateTime
attribute :chat, Channel
attribute :reply_to_message, Message
def reply(&block)
reply = OutMessage.new(chat_id: chat.id)
yield reply if block_given?
reply
end
def get_command_for(bot)
text && text.sub(Regexp.new("@#{bot.identity.username}($|\s|\.|,)", Regexp::IGNORECASE), '').strip
end
end
end
|
# Copyright (c) 2012 Kenichi Kamiya
require 'optionalargument'
require_relative 'progressbar/version'
require_relative 'progressbar/singleton_class'
module Terminal
class ProgressBar
DEFAULT_WIDTH = 80
CR = "\r".freeze
EOL = "\n".freeze
STOP = '|'.freeze
SPACE = ' '.freeze
DECORATION_LENGTH ="100% #{STOP}#{STOP}".length
class Error < StandardError; end
class InvalidPointingError < Error; end
attr_reader :max_count, :max_width, :pointer, :output
alias_method :current_count, :pointer
# @return [Class]
OptArg = OptionalArgument.define {
opt :body_char, must: true,
condition: ->v{v.length == 1},
adjuster: ->v{v.to_str.dup.freeze},
aliases: [:mark]
opt :max_count, default: 100,
condition: AND(Integer, ->v{v >= 1}),
adjuster: ->v{v.to_int}
opt :max_width, default: DEFAULT_WIDTH,
condition: AND(Integer, ->v{v >= 1}),
adjuster: ->v{v.to_int}
opt :output, default: $stderr,
condition: AND(CAN(:print), CAN(:flush))
}
# @param [Hash] options
# @option options [String, #to_str] :body_char (also :mark)
# @option options [Integer, #to_int] :max_count
# @option options [Integer, #to_int] :max_width
# @option options [IO, StringIO, #print, #flush] :output
def initialize(options={})
opts = OptArg.parse options
@body_char = opts.body_char
@max_count = opts.max_count
@max_width = opts.max_width
@output = opts.output
@pointer = 0
end
# @return [String]
def body_char
@body_char.dup
end
# @return [Integer]
def max_bar_width
max_width - DECORATION_LENGTH
end
# @return [Integer]
def current_bar_width
percentage == 0 ? 0 : (max_bar_width * rational).to_int
end
# @return [Fixnum] 1..100
def percentage
((rational * (100 / @max_count)) * 100).to_int
end
# @return [String]
def bar
"#{@body_char * current_bar_width}#{bar_padding}"
end
# @return [String]
def line
"#{percentage.to_s.rjust 3}% #{STOP}#{bar}#{STOP}"
end
# @return [void]
def flush
@output.print line
@output.print(finished? ? EOL : CR)
@output.flush
end
# @param [Integer, #to_int] point
def pointable?(point)
int = point.to_int
(int >= 0) && (int <= @max_count)
end
def finished?
@pointer == @max_count
end
alias_method :end?, :finished?
# @group Change Pointer
# @param [Integer, #to_int] point
# @return [point]
def pointer=(point)
int = point.to_int
raise InvalidPointingError unless pointable? int
@pointer = int
point
end
# @param [Integer, #to_int] step
# @return [step]
def increment(step=1)
new_pointer = @pointer + step.to_int
raise InvalidPointingError unless pointable? new_pointer
@pointer = new_pointer
step
end
# @param [Integer, #to_int] step
# @return [step]
def decrement(step=1)
increment(-step)
step
end
# @return [void]
def rewind
@pointer = 0
nil
end
# @return [void]
def fast_forward
@pointer = @max_count
nil
end
alias_method :finish, :fast_forward
[:increment, :decrement, :rewind, :fast_forward, :finish].each do |changer|
define_method :"#{changer}!" do |*args, &block|
__send__ changer, *args, &block
flush
end
end
# @endgroup
private
# @return [String]
def bar_padding
SPACE * (max_bar_width - current_bar_width)
end
# @return [Rational] pointer / max_count
def rational
Rational @pointer, @max_count
end
end
end
modify default terminal width for windows
# Copyright (c) 2012 Kenichi Kamiya
require 'optionalargument'
require_relative 'progressbar/version'
require_relative 'progressbar/singleton_class'
module Terminal
class ProgressBar
DEFAULT_WIDTH = 78
CR = "\r".freeze
EOL = "\n".freeze
STOP = '|'.freeze
SPACE = ' '.freeze
DECORATION_LENGTH ="100% #{STOP}#{STOP}".length
class Error < StandardError; end
class InvalidPointingError < Error; end
attr_reader :max_count, :max_width, :pointer, :output
alias_method :current_count, :pointer
# @return [Class]
OptArg = OptionalArgument.define {
opt :body_char, must: true,
condition: ->v{v.length == 1},
adjuster: ->v{v.to_str.dup.freeze},
aliases: [:mark]
opt :max_count, default: 100,
condition: AND(Integer, ->v{v >= 1}),
adjuster: ->v{v.to_int}
opt :max_width, default: DEFAULT_WIDTH,
condition: AND(Integer, ->v{v >= 1}),
adjuster: ->v{v.to_int}
opt :output, default: $stderr,
condition: AND(CAN(:print), CAN(:flush))
}
# @param [Hash] options
# @option options [String, #to_str] :body_char (also :mark)
# @option options [Integer, #to_int] :max_count
# @option options [Integer, #to_int] :max_width
# @option options [IO, StringIO, #print, #flush] :output
def initialize(options={})
opts = OptArg.parse options
@body_char = opts.body_char
@max_count = opts.max_count
@max_width = opts.max_width
@output = opts.output
@pointer = 0
end
# @return [String]
def body_char
@body_char.dup
end
# @return [Integer]
def max_bar_width
max_width - DECORATION_LENGTH
end
# @return [Integer]
def current_bar_width
percentage == 0 ? 0 : (max_bar_width * rational).to_int
end
# @return [Fixnum] 1..100
def percentage
((rational * (100 / @max_count)) * 100).to_int
end
# @return [String]
def bar
"#{@body_char * current_bar_width}#{bar_padding}"
end
# @return [String]
def line
"#{percentage.to_s.rjust 3}% #{STOP}#{bar}#{STOP}"
end
# @return [void]
def flush
@output.print line
@output.print(finished? ? EOL : CR)
@output.flush
end
# @param [Integer, #to_int] point
def pointable?(point)
int = point.to_int
(int >= 0) && (int <= @max_count)
end
def finished?
@pointer == @max_count
end
alias_method :end?, :finished?
# @group Change Pointer
# @param [Integer, #to_int] point
# @return [point]
def pointer=(point)
int = point.to_int
raise InvalidPointingError unless pointable? int
@pointer = int
point
end
# @param [Integer, #to_int] step
# @return [step]
def increment(step=1)
new_pointer = @pointer + step.to_int
raise InvalidPointingError unless pointable? new_pointer
@pointer = new_pointer
step
end
# @param [Integer, #to_int] step
# @return [step]
def decrement(step=1)
increment(-step)
step
end
# @return [void]
def rewind
@pointer = 0
nil
end
# @return [void]
def fast_forward
@pointer = @max_count
nil
end
alias_method :finish, :fast_forward
[:increment, :decrement, :rewind, :fast_forward, :finish].each do |changer|
define_method :"#{changer}!" do |*args, &block|
__send__ changer, *args, &block
flush
end
end
# @endgroup
private
# @return [String]
def bar_padding
SPACE * (max_bar_width - current_bar_width)
end
# @return [Rational] pointer / max_count
def rational
Rational @pointer, @max_count
end
end
end
|
module TerrImporter
class Application
class Options < Hash
attr_reader :opts, :orig_args
def initialize(args)
super()
@orig_args = args.clone
self[:verbose] = true
self[:show_help] = false
require 'optparse'
@opts = OptionParser.new do |o|
o.banner = "Usage: #{File.basename($0)} [options] \n" +
"Use #{File.basename($0)} [application_url] --init to initialize importer before first usage."
o.separator ''
o.separator 'Common options:'
o.on('-a', '--all', 'export everything configured; javascripts, css files and images') do
self[:import_css] = true
self[:import_js] = true
self[:import_images] = true
end
o.on('-c', '--css', 'export configured css files') { self[:import_css] = true }
o.on('-i', '--img', 'export configured image files') { self[:import_images] = true }
o.on('-j', '--js', 'export configured javascript files') { self[:import_js] = true }
o.on('--init [CONFIG_EXISTS]', [:backup, :replace], 'create configuration file in current working directory. use optional argument to force file replacement (backup, replace)') do |init|
self[:init] = init || true
end
o.on('-f', '--config CONFIG_FILE', 'use alternative configuration file') do |config_file|
self[:config_file] = config_file
end
o.separator ''
o.separator 'Additional configuration:'
o.on('-v', '--[no-]verbose', 'run verbosely') do |v|
self[:verbose] = v
end
o.on('--version', 'Show version') do
self[:show_version] = true
end
o.on_tail('-h', '--help', 'display this help and exit') { self[:show_help] = true }
end
begin
@opts.parse!(args)
self[:application_url] = args.shift
unless validate_application_url
raise OptionParser::InvalidOption, "Application url is invalid, please use the form http:// or https://"
end
rescue OptionParser::InvalidOption => e
self[:invalid_argument] = e.message
end
end
def merge(other)
self.class.new(@orig_args + other.orig_args)
end
def validate_application_url
unless self[:application_url].nil?
valid = self[:application_url] =~ /^(http|https):\/\/.*$/
end
valid ||= true
end
end
end
end
remove application url validator, is handled by config validator
module TerrImporter
class Application
class Options < Hash
attr_reader :opts, :orig_args
def initialize(args)
super()
@orig_args = args.clone
self[:verbose] = true
self[:show_help] = false
require 'optparse'
@opts = OptionParser.new do |o|
o.banner = "Usage: #{File.basename($0)} [options] \n" +
"Use #{File.basename($0)} [application_url] --init to initialize importer before first usage."
o.separator ''
o.separator 'Common options:'
o.on('-a', '--all', 'export everything configured; javascripts, css files and images') do
self[:import_css] = true
self[:import_js] = true
self[:import_images] = true
end
o.on('-c', '--css', 'export configured css files') { self[:import_css] = true }
o.on('-i', '--img', 'export configured image files') { self[:import_images] = true }
o.on('-j', '--js', 'export configured javascript files') { self[:import_js] = true }
o.on('--init [CONFIG_EXISTS]', [:backup, :replace], 'create configuration file in current working directory. use optional argument to force file replacement (backup, replace)') do |init|
self[:init] = init || true
end
o.on('-f', '--config CONFIG_FILE', 'use alternative configuration file') do |config_file|
self[:config_file] = config_file
end
o.separator ''
o.separator 'Additional configuration:'
o.on('-v', '--[no-]verbose', 'run verbosely') do |v|
self[:verbose] = v
end
o.on('--version', 'Show version') do
self[:show_version] = true
end
o.on_tail('-h', '--help', 'display this help and exit') { self[:show_help] = true }
end
begin
@opts.parse!(args)
self[:application_url] = args.shift
rescue OptionParser::InvalidOption => e
self[:invalid_argument] = e.message
end
end
def merge(other)
self.class.new(@orig_args + other.orig_args)
end
end
end
end
|
#Generated by rake task, last bump: patch3major1minor5
module TerrImporter
VERSION = "0.5.2"
end
version correction
#Generated by rake task, last bump: patch3major1minor5
module TerrImporter
VERSION = "0.5.4"
end
|
module TextHelpers
VERSION = "0.1.7"
end
Bump version to 0.1.8
module TextHelpers
VERSION = "0.1.8"
end
|
module Thrift
class BufferedTransport
def timeout=(timeout)
@transport.timeout = timeout
end
def timeout
@transport.timeout
end
end
module Client
def timeout=(timeout)
@iprot.trans.timeout = timeout
end
def timeout
@iprot.trans.timeout
end
end
end
Add support for specifying client-side timeouts when using FramedTransport
module Thrift
class BufferedTransport
def timeout=(timeout)
@transport.timeout = timeout
end
def timeout
@transport.timeout
end
end
class FramedTransport
def timeout=(timeout)
@transport.timeout = timeout
end
def timeout
@transport.timeout
end
end
module Client
def timeout=(timeout)
@iprot.trans.timeout = timeout
end
def timeout
@iprot.trans.timeout
end
end
end
|
module TransamCore
VERSION = "0.0.30a"
end
Bump version
module TransamCore
VERSION = "0.0.31"
end
|
module TransamCore
VERSION = "0.7.01"
end
Bump version
module TransamCore
VERSION = "0.7.2"
end
|
class ReactivesocketCli < Formula
desc "ReactiveSocket CLI"
homepage "https://github.com/yschimke/reactivesocket-cli"
url "https://github.com/ReactiveSocket/reactivesocket-cli/releases/download/0.0.2-SNAPSHOT/reactivesocket-cli-0.0.2-SNAPSHOT.tar"
version "0.0.2-SNAPSHOT"
sha256 "d6137b50d66bb149e78c5e50cd0191213820cad1bae4a5dd5d3df8bfe8fa187c"
depends_on :java
def install
libexec.install Dir["*"]
bin.install_symlink "#{libexec}/bin/reactivesocket-cli"
bash_completion.install "#{libexec}/bash/completion.bash" => "reactivesocket-cli"
end
end
reactivesocket-cli 0.0.2
class ReactivesocketCli < Formula
desc "ReactiveSocket CLI"
homepage "https://github.com/yschimke/reactivesocket-cli"
url "https://github.com/ReactiveSocket/reactivesocket-cli/releases/download/0.0.2/reactivesocket-cli-0.0.2.tar"
version "0.0.2"
sha256 "a73ce35a2b865cb72a15ffb3019ab62c5ed71c7a29df71312282951a4f48a16f"
depends_on :java
def install
libexec.install Dir["*"]
bin.install_symlink "#{libexec}/bin/reactivesocket-cli"
bash_completion.install "#{libexec}/bash/completion.bash" => "reactivesocket-cli"
end
end
|
module TransamCore
VERSION = "0.2.0c"
end
Bump version
module TransamCore
VERSION = "0.2.0d"
end
|
module TransamCore
VERSION = "2.7.4"
end
Bump version
module TransamCore
VERSION = "2.8.0"
end
|
module TransamCore
VERSION = "0.0.24a"
end
Bump version
module TransamCore
VERSION = "0.0.24b"
end
|
module TransamSign
VERSION = "0.0.29"
end
Bump version
module TransamSign
VERSION = "0.0.30"
end
|
require 'multi_json'
require 'hashr'
require 'hot_bunnies'
require 'thread'
module Travis
module Worker
# Public: Represents a single Worker which is bound to a single VM instance.
class Worker
# Public: Returns the string name of the worker.
attr_reader :name
# Public: Returns the builds queue which the worker subscribes to.
attr_reader :jobs_queue
# Public: Returns the reporting channel used for streaming build results.
attr_reader :reporting_channel
# Public: Returns the Subscription to the jobs_queue
attr_reader :subscribtion
# Public: Returns the virtual machine used by this worker
attr_reader :virtual_machine
# Public: Instantiates and a new worker.
#
# name - The String name of the worker.
# jobs_queue - The Queue where jobs are published to.
# reporting_channel - The Channel used for reporting build results.
#
# Returns the thread containing the worker.
def initialize(name, jobs_queue, reporting_channel)
@name = name
@jobs_queue = jobs_queue
@reporting_channel = reporting_channel
@subscription = nil
@virtual_machine = VirtualMachine::VirtualBox.new(name)
end
# Public: Subscribes to the jobs_queue.
#
# Returns the worker.
def run
virtual_machine.prepare
opts = { :ack => true, :blocking => false }
@subscription = jobs_queue.subscribe(opts) do |meta, payload|
begin
process_job(meta, payload)
rescue => e
puts e.inspect
end
end
announce("Subscribed to the '#{@jobs_queue.name}' queue.")
self
end
# Public: Processes the build job using the messaging payload.
#
# metadata - The Headers from the messaging backend
# payload - The String payload.
#
# If the job fails due to the VM not being found, or if the ssh connection
# encounters an error, then the job is requeued and the error is reraised.
#
# Returns true if the job completed correctly, or false if it fails
# Raises VmNotFound if the VM can not be found
# Raises Errno::ECONNREFUSED if the SSH connection is refused
def process_job(metadata, payload)
deserialized = deserialized_payload(payload)
announce("Handling #{deserialized.inspect}")
create_job_and_work(deserialized)
announce("Done")
confirm_job_completion(metadata)
true
rescue Travis::Worker::VirtualMachine::VmNotFound, Errno::ECONNREFUSED
announce_error
requeue(metadata)
raise $!
rescue Exception => e
announce_error
reject_job_completion(metadata)
false
end
private
# Internal: Creates a job from the payload and executes it.
#
# payload - The job payload.
#
# Returns ?
def create_job_and_work(payload)
job = Job.create(payload, virtual_machine)
job.observers << Reporter.new(reporting_channel)
job.work!
end
def confirm_job_completion(metadata)
metadata.ack
announce("Acknowledged")
end
def reject_job_completion(metadata)
announce("Caught an exception while dispatching a message:")
announce_error
metadata.reject
announce("Rejected")
end
def requeue(metadata)
announce("#{$!.class.name}: #{$!.message}", $@)
announce('Can not connect to VM. Stopping job processing ...')
metadata.reject(:requeue => true)
end
def announce(what)
puts "[#{name}] #{what}"
end
def deserialized_payload(payload)
deserialized = MultiJson.decode(payload)
Hashr.new(deserialized)
end
def announce_error
announce("#{$!.class.name}: #{$!.message}")
announce($@)
end
end
end
end
minor update to the docs for the Worker class
require 'multi_json'
require 'hashr'
require 'hot_bunnies'
require 'thread'
module Travis
module Worker
# Represents a single Worker which is bound to a single VM instance.
class Worker
# Returns the string name of the worker.
attr_reader :name
# Returns the builds queue which the worker subscribes to.
attr_reader :jobs_queue
# Returns the reporting channel used for streaming build results.
attr_reader :reporting_channel
# Returns the Subscription to the jobs_queue
attr_reader :subscribtion
# Returns the virtual machine used by this worker
attr_reader :virtual_machine
# Instantiates and a new worker.
#
# name - The String name of the worker.
# jobs_queue - The Queue where jobs are published to.
# reporting_channel - The Channel used for reporting build results.
#
# Returns the thread containing the worker.
def initialize(name, jobs_queue, reporting_channel)
@name = name
@jobs_queue = jobs_queue
@reporting_channel = reporting_channel
@subscription = nil
@virtual_machine = VirtualMachine::VirtualBox.new(name)
end
# Subscribes to the jobs_queue.
#
# Returns the worker.
def run
virtual_machine.prepare
opts = { :ack => true, :blocking => false }
@subscription = jobs_queue.subscribe(opts) do |meta, payload|
begin
process_job(meta, payload)
rescue => e
puts e.inspect
end
end
announce("Subscribed to the '#{@jobs_queue.name}' queue.")
self
end
# Processes the build job using the messaging payload.
#
# metadata - The Headers from the messaging backend
# payload - The String payload.
#
# If the job fails due to the VM not being found, or if the ssh connection
# encounters an error, then the job is requeued and the error is reraised.
#
# Returns true if the job completed correctly, or false if it fails
# Raises VmNotFound if the VM can not be found
# Raises Errno::ECONNREFUSED if the SSH connection is refused
def process_job(metadata, payload)
deserialized = deserialized_payload(payload)
announce("Handling #{deserialized.inspect}")
create_job_and_work(deserialized)
announce("Done")
confirm_job_completion(metadata)
true
rescue Travis::Worker::VirtualMachine::VmNotFound, Errno::ECONNREFUSED
announce_error
requeue(metadata)
raise $!
rescue Exception => e
announce_error
reject_job_completion(metadata)
false
end
private
# Internal: Creates a job from the payload and executes it.
#
# payload - The job payload.
#
# Returns ?
def create_job_and_work(payload)
job = Job.create(payload, virtual_machine)
job.observers << Reporter.new(reporting_channel)
job.work!
end
def confirm_job_completion(metadata)
metadata.ack
announce("Acknowledged")
end
def reject_job_completion(metadata)
announce("Caught an exception while dispatching a message:")
announce_error
metadata.reject
announce("Rejected")
end
def requeue(metadata)
announce("#{$!.class.name}: #{$!.message}", $@)
announce('Can not connect to VM. Stopping job processing ...')
metadata.reject(:requeue => true)
end
def announce(what)
puts "[#{name}] #{what}"
end
def deserialized_payload(payload)
deserialized = MultiJson.decode(payload)
Hashr.new(deserialized)
end
def announce_error
announce("#{$!.class.name}: #{$!.message}")
announce($@)
end
end
end
end
|
# Interactive aeolus configure installation utility.
# Prompt the user for provider account and instance values and write them
# to a new puppet config files
require 'rubygems'
require 'highline/import'
puts "Press ^C at any time to terminate"
Signal.trap("INT") do
exit 1
end
def clear_screen
print "\e[H\e[2J"
true
end
NODE_YAML='/etc/aeolus-configure/nodes/custom'
IMAGE_TEMPLATE='/etc/aeolus-configure/custom_template.tdl'
PROFILE_RECIPE='/usr/share/aeolus-configure/modules/aeolus/manifests/profiles/custom.pp'
installed_component = nil
install_components = []
while ![:None, :All].include?(installed_component)
clear_screen
say "Select Aeolus Components to Install"
installed_component =
choose do |menu|
menu.prompt = "Install Aeolus Component: "
menu.choice :All
menu.choice :None
menu.choice :"Image Factory"
menu.choice :"Image Warehouse"
menu.choice :"Conductor"
end
if installed_component == :"Image Factory"
install_components << "- aeolus::image-factory"
elsif installed_component == :"Image Warehouse"
install_components << "- aeolus::iwhd"
elsif installed_component == :"Conductor"
install_components << "- aeolus::conductor"
elsif installed_component == :All
install_components << "- aeolus::conductor" <<
"- aeolus::image-factory" <<
"- aeolus::iwhd"
end
end
providers = []
if install_components.include? "- aeolus::conductor"
provider_port = 3001
profile=''
profile_requires = []
profile_packages = ''
profile_repos = ''
while clear_screen && agree("Add provider (y/n)? ")
name = ask("Cloud provider label: ")
type = choose do |menu|
menu.prompt = "Cloud provider type: "
menu.choice :mock
menu.choice :ec2
menu.choice :rackspace
menu.choice :rhevm
menu.choice :vsphere
end
providers << [name,type]
if type == :mock
profile += "aeolus::provider{#{name}:\n" +
" type => 'mock',\n" +
" port => '#{provider_port += 1}',\n" +
" require => Aeolus::Conductor::Login['admin'] }\n\n" +
"aeolus::conductor::provider::account{#{name}:\n" +
" provider => 'mock',\n" +
" type => 'mock',\n" +
" username => 'mockuser',\n" +
" password => 'mockpassword',\n" +
" require => Aeolus::Provider['#{name}'] }\n\n"
profile_requires << "Aeolus::Provider['#{name}']" <<
"Aeolus::Conductor::Provider::Account['#{name}']"
elsif type == :rackspace
username = ask("Rackspace Username: ")
api_key = ask("Rackspace API Key: "){ |q| q.echo = false }
profile += "aeolus::provider{#{name}:\n" +
" type => 'rackspace',\n" +
" port => '#{provider_port += 1}',\n" +
" require => Aeolus::Conductor::Login['admin'] }\n\n" +
"aeolus::conductor::provider::account{#{name}:\n" +
" provider => '#{name}',\n" +
" type => 'rackspace',\n" +
" username => '#{username}',\n" +
" password => '#{api_key}',\n" +
" require => Aeolus::Provider['#{name}'] }\n\n"
profile_requires << "Aeolus::Provider['#{name}']" <<
"Aeolus::Conductor::Provider::Account['#{name}']"
elsif type == :ec2
endpoint = ask("EC2 Endpoint: ")
access_key = ask("EC2 Access Key: ")
secret_access_key = ask("EC2 Secret Access Key: "){ |q| q.echo = false }
account_id = ask("EC2 Account ID: ")
public_cert = ask("EC2 Public Cert: ")
private_key = ask("EC2 Private Key: ")
profile += "aeolus::provider{#{name}:\n" +
" type => 'ec2',\n" +
" endpoint => '#{endpoint}',\n" +
" port => '#{provider_port += 1}',\n" +
" require => Aeolus::Conductor::Login['admin'] }\n\n" +
"aeolus::conductor::provider::account{#{name}:\n" +
" provider => '#{name}',\n" +
" type => 'ec2',\n" +
" username => '#{access_key}',\n" +
" password => '#{secret_access_key}',\n" +
" account_id => '#{account_id}',\n" +
" x509private => '#{private_key}',\n" +
" x509public => '#{public_cert}',\n" +
" require => Aeolus::Provider['#{name}'] }\n\n"
profile_requires << "Aeolus::Provider['#{name}']" <<
"Aeolus::Conductor::Provider::Account['#{name}']"
end
end
# TODO change to create image / deploy to providers (which to select)
while clear_screen && agree("Deploy an instance to providers (y/n)? ")
name = ask("Instance name: ")
providers.each { |provider|
pname,ptype = *provider
profile += "aeolus::image{#{pname}-#{name}:\n" +
" target => '#{ptype}',\n" +
" template => 'custom_template.tdl',\n" +
" provider => '#{pname}',\n" +
" hwp => '#{ptype == :rackspace ? 'hwp2' : 'hwp1' }',\n" +
" require => [Aeolus::Conductor::Provider::Account['#{pname}'], Aeolus::Conductor::Hwp['hwp1', 'hwp2']] }\n\n"
profile_requires << "Aeolus::Image['#{pname}-#{name}']"
}
while agree("Add package repo to instance (y/n)? ")
repo_name = ask("Name: ")
repo_uri = ask("URI: ")
profile_repos += "<repository name='#{repo_name}'>"
profile_repos += "<url>#{repo_uri}</url>"
profile_repos += "<signed>false</signed></repository>"
end
while agree("Add package to instance (y/n)? ") do
package_name = ask("Package Name: ")
profile_packages += "<package name='#{package_name}' />"
end
#
# while agree("Add file? ") do
# src_location = ask("File Source ")
# dst_location = ask("File Destination ")
# end
#
end
end
# create the profile
text = File.read PROFILE_RECIPE
File.open(PROFILE_RECIPE, 'w+'){|f|
requires = profile_requires.join(',')
requires += ", " unless requires == ""
f << text.gsub(/#AEOLUS_SEED_DATA_REQUIRES/, requires).
gsub(/#AEOLUS_SEED_DATA/, profile)
}
# create the node yaml
text = File.read NODE_YAML
File.open(NODE_YAML, 'w+'){|f|
f << text.gsub(/CUSTOM_CLASSES/, install_components.join("\n"))
}
# create the image template
text = File.read IMAGE_TEMPLATE
File.open(IMAGE_TEMPLATE, 'w+'){|f|
f << text.gsub(/<!--AEOLUS_PACKAGE_DATA-->/, profile_packages).
gsub(/<!--AEOLUS_REPO_DATA-->/, profile_repos)
}
small fix for interactive installer use cases for which conductor isn't selected
# Interactive aeolus configure installation utility.
# Prompt the user for provider account and instance values and write them
# to a new puppet config files
require 'rubygems'
require 'highline/import'
puts "Press ^C at any time to terminate"
Signal.trap("INT") do
exit 1
end
def clear_screen
print "\e[H\e[2J"
true
end
NODE_YAML='/etc/aeolus-configure/nodes/custom'
IMAGE_TEMPLATE='/etc/aeolus-configure/custom_template.tdl'
PROFILE_RECIPE='/usr/share/aeolus-configure/modules/aeolus/manifests/profiles/custom.pp'
installed_component = nil
install_components = []
while ![:None, :All].include?(installed_component)
clear_screen
say "Select Aeolus Components to Install"
installed_component =
choose do |menu|
menu.prompt = "Install Aeolus Component: "
menu.choice :All
menu.choice :None
menu.choice :"Image Factory"
menu.choice :"Image Warehouse"
menu.choice :"Conductor"
end
if installed_component == :"Image Factory"
install_components << "- aeolus::image-factory"
elsif installed_component == :"Image Warehouse"
install_components << "- aeolus::iwhd"
elsif installed_component == :"Conductor"
install_components << "- aeolus::conductor"
elsif installed_component == :All
install_components << "- aeolus::conductor" <<
"- aeolus::image-factory" <<
"- aeolus::iwhd"
end
end
providers = []
profile=''
profile_requires = []
profile_packages = ''
profile_repos = ''
if install_components.include? "- aeolus::conductor"
provider_port = 3001
while clear_screen && agree("Add provider (y/n)? ")
name = ask("Cloud provider label: ")
type = choose do |menu|
menu.prompt = "Cloud provider type: "
menu.choice :mock
menu.choice :ec2
menu.choice :rackspace
menu.choice :rhevm
menu.choice :vsphere
end
providers << [name,type]
if type == :mock
profile += "aeolus::provider{#{name}:\n" +
" type => 'mock',\n" +
" port => '#{provider_port += 1}',\n" +
" require => Aeolus::Conductor::Login['admin'] }\n\n" +
"aeolus::conductor::provider::account{#{name}:\n" +
" provider => 'mock',\n" +
" type => 'mock',\n" +
" username => 'mockuser',\n" +
" password => 'mockpassword',\n" +
" require => Aeolus::Provider['#{name}'] }\n\n"
profile_requires << "Aeolus::Provider['#{name}']" <<
"Aeolus::Conductor::Provider::Account['#{name}']"
elsif type == :rackspace
username = ask("Rackspace Username: ")
api_key = ask("Rackspace API Key: "){ |q| q.echo = false }
profile += "aeolus::provider{#{name}:\n" +
" type => 'rackspace',\n" +
" port => '#{provider_port += 1}',\n" +
" require => Aeolus::Conductor::Login['admin'] }\n\n" +
"aeolus::conductor::provider::account{#{name}:\n" +
" provider => '#{name}',\n" +
" type => 'rackspace',\n" +
" username => '#{username}',\n" +
" password => '#{api_key}',\n" +
" require => Aeolus::Provider['#{name}'] }\n\n"
profile_requires << "Aeolus::Provider['#{name}']" <<
"Aeolus::Conductor::Provider::Account['#{name}']"
elsif type == :ec2
endpoint = ask("EC2 Endpoint: ")
access_key = ask("EC2 Access Key: ")
secret_access_key = ask("EC2 Secret Access Key: "){ |q| q.echo = false }
account_id = ask("EC2 Account ID: ")
public_cert = ask("EC2 Public Cert: ")
private_key = ask("EC2 Private Key: ")
profile += "aeolus::provider{#{name}:\n" +
" type => 'ec2',\n" +
" endpoint => '#{endpoint}',\n" +
" port => '#{provider_port += 1}',\n" +
" require => Aeolus::Conductor::Login['admin'] }\n\n" +
"aeolus::conductor::provider::account{#{name}:\n" +
" provider => '#{name}',\n" +
" type => 'ec2',\n" +
" username => '#{access_key}',\n" +
" password => '#{secret_access_key}',\n" +
" account_id => '#{account_id}',\n" +
" x509private => '#{private_key}',\n" +
" x509public => '#{public_cert}',\n" +
" require => Aeolus::Provider['#{name}'] }\n\n"
profile_requires << "Aeolus::Provider['#{name}']" <<
"Aeolus::Conductor::Provider::Account['#{name}']"
end
end
# TODO change to create image / deploy to providers (which to select)
while clear_screen && agree("Deploy an instance to providers (y/n)? ")
name = ask("Instance name: ")
providers.each { |provider|
pname,ptype = *provider
profile += "aeolus::image{#{pname}-#{name}:\n" +
" target => '#{ptype}',\n" +
" template => 'custom_template.tdl',\n" +
" provider => '#{pname}',\n" +
" hwp => '#{ptype == :rackspace ? 'hwp2' : 'hwp1' }',\n" +
" require => [Aeolus::Conductor::Provider::Account['#{pname}'], Aeolus::Conductor::Hwp['hwp1', 'hwp2']] }\n\n"
profile_requires << "Aeolus::Image['#{pname}-#{name}']"
}
while agree("Add package repo to instance (y/n)? ")
repo_name = ask("Name: ")
repo_uri = ask("URI: ")
profile_repos += "<repository name='#{repo_name}'>"
profile_repos += "<url>#{repo_uri}</url>"
profile_repos += "<signed>false</signed></repository>"
end
while agree("Add package to instance (y/n)? ") do
package_name = ask("Package Name: ")
profile_packages += "<package name='#{package_name}' />"
end
#
# while agree("Add file? ") do
# src_location = ask("File Source ")
# dst_location = ask("File Destination ")
# end
#
end
end
# create the profile
text = File.read PROFILE_RECIPE
File.open(PROFILE_RECIPE, 'w+'){|f|
requires = profile_requires.join(',')
requires += ", " unless requires == ""
f << text.gsub(/#AEOLUS_SEED_DATA_REQUIRES/, requires).
gsub(/#AEOLUS_SEED_DATA/, profile)
}
# create the node yaml
text = File.read NODE_YAML
File.open(NODE_YAML, 'w+'){|f|
f << text.gsub(/CUSTOM_CLASSES/, install_components.join("\n"))
}
# create the image template
text = File.read IMAGE_TEMPLATE
File.open(IMAGE_TEMPLATE, 'w+'){|f|
f << text.gsub(/<!--AEOLUS_PACKAGE_DATA-->/, profile_packages).
gsub(/<!--AEOLUS_REPO_DATA-->/, profile_repos)
}
|
# encoding: utf-8
require 'English'
require_relative 'choices'
require_relative 'enum_paginator'
require_relative 'paginator'
module TTY
class Prompt
# A class reponsible for rendering enumerated list menu.
# Used by {Prompt} to display static choice menu.
#
# @api private
class EnumList
PAGE_HELP = '(Press tab/right or left to reveal more choices)'.freeze
# Create instance of EnumList menu.
#
# @api public
def initialize(prompt, options = {})
@prompt = prompt
@prefix = options.fetch(:prefix) { @prompt.prefix }
@enum = options.fetch(:enum) { ')' }
@default = options.fetch(:default) { 1 }
@active_color = options.fetch(:active_color) { @prompt.active_color }
@help_color = options.fetch(:help_color) { @prompt.help_color }
@error_color = options.fetch(:error_color) { @prompt.error_color }
@cycle = options.fetch(:cycle) { false }
@input = nil
@done = false
@first_render = true
@failure = false
@active = @default
@choices = Choices.new
@per_page = options[:per_page]
@page_help = options[:page_help] || PAGE_HELP
@paginator = EnumPaginator.new
@page_active = @default
@prompt.subscribe(self)
end
# Set default option selected
#
# @api public
def default(default)
@default = default
end
# Set number of items per page
#
# @api public
def per_page(value)
@per_page = value
end
def page_size
(@per_page || Paginator::DEFAULT_PAGE_SIZE)
end
# Check if list is paginated
#
# @return [Boolean]
#
# @api private
def paginated?
@choices.size > page_size
end
# @param [String] text
# the help text to display per page
# @api pbulic
def page_help(text)
@page_help = text
end
# Set selecting active index using number pad
#
# @api public
def enum(value)
@enum = value
end
# Add a single choice
#
# @api public
def choice(*value, &block)
if block
@choices << (value << block)
else
@choices << value
end
end
# Add multiple choices
#
# @param [Array[Object]] values
# the values to add as choices
#
# @api public
def choices(values)
values.each { |val| choice(*val) }
end
# Call the list menu by passing question and choices
#
# @param [String] question
#
# @param
# @api public
def call(question, possibilities, &block)
choices(possibilities)
@question = question
block[self] if block
setup_defaults
render
end
def keypress(event)
if [:backspace, :delete].include?(event.key.name)
return if @input.empty?
@input.chop!
mark_choice_as_active
elsif event.value =~ /^\d+$/
@input += event.value
mark_choice_as_active
end
end
def keyreturn(*)
@failure = false
if (@input.to_i > 0 && @input.to_i <= @choices.size) || @input.empty?
@done = true
else
@input = ''
@failure = true
end
end
alias keyenter keyreturn
def keyright(*)
if (@page_active + page_size) <= @choices.size
@page_active += page_size
elsif @cycle
@page_active = 1
end
end
alias keytab keyright
def keyleft(*)
if (@page_active - page_size) >= 0
@page_active -= page_size
elsif @cycle
@page_active = @choices.size - 1
end
end
private
# Find active choice or set to default
#
# @return [nil]
#
# @api private
def mark_choice_as_active
if (@input.to_i > 0) && !@choices[@input.to_i - 1].nil?
@active = @input.to_i
else
@active = @default
end
@page_active = @active
end
# Validate default indexes to be within range
#
# @api private
def validate_defaults
return if @default >= 1 && @default <= @choices.size
raise ConfigurationError,
"default index `#{@default}` out of range (1 - #{@choices.size})"
end
# Setup default option and active selection
#
# @api private
def setup_defaults
validate_defaults
mark_choice_as_active
end
# Render a selection list.
#
# By default the result is printed out.
#
# @return [Object] value
# return the selected value
#
# @api private
def render
@input = ''
until @done
question = render_question
@prompt.print(question)
@prompt.print(render_error) if @failure
if paginated? && !@done
@prompt.print(render_page_help)
end
@prompt.read_keypress
question_lines = question.split($INPUT_RECORD_SEPARATOR, -1)
@prompt.print(refresh(question_lines_count(question_lines)))
end
@prompt.print(render_question)
answer
end
# Count how many screen lines the question spans
#
# @return [Integer]
#
# @api private
def question_lines_count(question_lines)
question_lines.reduce(0) do |acc, line|
acc + @prompt.count_screen_lines(line)
end
end
# Find value for the choice selected
#
# @return [nil, Object]
#
# @api private
def answer
@choices[@active - 1].value
end
# Determine area of the screen to clear
#
# @param [Integer] lines
# the lines to clear
#
# @return [String]
#
# @api private
def refresh(lines)
@prompt.clear_lines(lines) +
@prompt.cursor.clear_screen_down
end
# Render question with the menu options
#
# @return [String]
#
# @api private
def render_question
header = "#{@prefix}#{@question} #{render_header}\n"
unless @done
header << render_menu
header << render_footer
end
header
end
# Error message when incorrect index chosen
#
# @api private
def error_message
error = 'Please enter a valid number'
"\n" + @prompt.decorate('>>', @error_color) + ' ' + error
end
# Render error message and return cursor to position of input
#
# @return [String]
#
# @api private
def render_error
error = error_message.dup
if !paginated?
error << @prompt.cursor.prev_line
error << @prompt.cursor.forward(render_footer.size)
end
error
end
# Render chosen option
#
# @return [String]
#
# @api private
def render_header
return '' unless @done
return '' unless @active
selected_item = @choices[@active - 1].name.to_s
@prompt.decorate(selected_item, @active_color)
end
# Render footer for the indexed menu
#
# @return [String]
#
# @api private
def render_footer
" Choose 1-#{@choices.size} [#{@default}]: #{@input}"
end
# Pagination help message
#
# @return [String]
#
# @api private
def page_help_message
return '' unless paginated?
"\n" + @prompt.decorate(@page_help, @help_color)
end
# Render page help
#
# @return [String]
#
# @api private
def render_page_help
help = page_help_message.dup
if @failure
help << @prompt.cursor.prev_line
end
help << @prompt.cursor.prev_line
help << @prompt.cursor.forward(render_footer.size)
end
# Render menu with indexed choices to select from
#
# @return [String]
#
# @api private
def render_menu
output = ''
@paginator.paginate(@choices, @page_active, @per_page) do |choice, index|
num = (index + 1).to_s + @enum + ' '
selected = ' ' * 2 + num + choice.name
output << if index + 1 == @active
@prompt.decorate(selected.to_s, @active_color)
else
selected
end
output << "\n"
end
output
end
end # EnumList
end # Prompt
end # TTY
Change to ensure no strings mutations
# encoding: utf-8
# frozen_string_literal: true
require 'English'
require_relative 'choices'
require_relative 'enum_paginator'
require_relative 'paginator'
module TTY
class Prompt
# A class reponsible for rendering enumerated list menu.
# Used by {Prompt} to display static choice menu.
#
# @api private
class EnumList
PAGE_HELP = '(Press tab/right or left to reveal more choices)'.freeze
# Create instance of EnumList menu.
#
# @api public
def initialize(prompt, options = {})
@prompt = prompt
@prefix = options.fetch(:prefix) { @prompt.prefix }
@enum = options.fetch(:enum) { ')' }
@default = options.fetch(:default) { 1 }
@active_color = options.fetch(:active_color) { @prompt.active_color }
@help_color = options.fetch(:help_color) { @prompt.help_color }
@error_color = options.fetch(:error_color) { @prompt.error_color }
@cycle = options.fetch(:cycle) { false }
@input = nil
@done = false
@first_render = true
@failure = false
@active = @default
@choices = Choices.new
@per_page = options[:per_page]
@page_help = options[:page_help] || PAGE_HELP
@paginator = EnumPaginator.new
@page_active = @default
@prompt.subscribe(self)
end
# Set default option selected
#
# @api public
def default(default)
@default = default
end
# Set number of items per page
#
# @api public
def per_page(value)
@per_page = value
end
def page_size
(@per_page || Paginator::DEFAULT_PAGE_SIZE)
end
# Check if list is paginated
#
# @return [Boolean]
#
# @api private
def paginated?
@choices.size > page_size
end
# @param [String] text
# the help text to display per page
# @api pbulic
def page_help(text)
@page_help = text
end
# Set selecting active index using number pad
#
# @api public
def enum(value)
@enum = value
end
# Add a single choice
#
# @api public
def choice(*value, &block)
if block
@choices << (value << block)
else
@choices << value
end
end
# Add multiple choices
#
# @param [Array[Object]] values
# the values to add as choices
#
# @api public
def choices(values)
values.each { |val| choice(*val) }
end
# Call the list menu by passing question and choices
#
# @param [String] question
#
# @param
# @api public
def call(question, possibilities, &block)
choices(possibilities)
@question = question
block[self] if block
setup_defaults
render
end
def keypress(event)
if [:backspace, :delete].include?(event.key.name)
return if @input.empty?
@input.chop!
mark_choice_as_active
elsif event.value =~ /^\d+$/
@input += event.value
mark_choice_as_active
end
end
def keyreturn(*)
@failure = false
if (@input.to_i > 0 && @input.to_i <= @choices.size) || @input.empty?
@done = true
else
@input = ''
@failure = true
end
end
alias keyenter keyreturn
def keyright(*)
if (@page_active + page_size) <= @choices.size
@page_active += page_size
elsif @cycle
@page_active = 1
end
end
alias keytab keyright
def keyleft(*)
if (@page_active - page_size) >= 0
@page_active -= page_size
elsif @cycle
@page_active = @choices.size - 1
end
end
private
# Find active choice or set to default
#
# @return [nil]
#
# @api private
def mark_choice_as_active
if (@input.to_i > 0) && !@choices[@input.to_i - 1].nil?
@active = @input.to_i
else
@active = @default
end
@page_active = @active
end
# Validate default indexes to be within range
#
# @api private
def validate_defaults
return if @default >= 1 && @default <= @choices.size
raise ConfigurationError,
"default index `#{@default}` out of range (1 - #{@choices.size})"
end
# Setup default option and active selection
#
# @api private
def setup_defaults
validate_defaults
mark_choice_as_active
end
# Render a selection list.
#
# By default the result is printed out.
#
# @return [Object] value
# return the selected value
#
# @api private
def render
@input = ''
until @done
question = render_question
@prompt.print(question)
@prompt.print(render_error) if @failure
if paginated? && !@done
@prompt.print(render_page_help)
end
@prompt.read_keypress
question_lines = question.split($INPUT_RECORD_SEPARATOR, -1)
@prompt.print(refresh(question_lines_count(question_lines)))
end
@prompt.print(render_question)
answer
end
# Count how many screen lines the question spans
#
# @return [Integer]
#
# @api private
def question_lines_count(question_lines)
question_lines.reduce(0) do |acc, line|
acc + @prompt.count_screen_lines(line)
end
end
# Find value for the choice selected
#
# @return [nil, Object]
#
# @api private
def answer
@choices[@active - 1].value
end
# Determine area of the screen to clear
#
# @param [Integer] lines
# the lines to clear
#
# @return [String]
#
# @api private
def refresh(lines)
@prompt.clear_lines(lines) +
@prompt.cursor.clear_screen_down
end
# Render question with the menu options
#
# @return [String]
#
# @api private
def render_question
header = ["#{@prefix}#{@question} #{render_header}\n"]
unless @done
header << render_menu
header << render_footer
end
header.join
end
# Error message when incorrect index chosen
#
# @api private
def error_message
error = 'Please enter a valid number'
"\n" + @prompt.decorate('>>', @error_color) + ' ' + error
end
# Render error message and return cursor to position of input
#
# @return [String]
#
# @api private
def render_error
error = error_message.dup
if !paginated?
error << @prompt.cursor.prev_line
error << @prompt.cursor.forward(render_footer.size)
end
error
end
# Render chosen option
#
# @return [String]
#
# @api private
def render_header
return '' unless @done
return '' unless @active
selected_item = @choices[@active - 1].name.to_s
@prompt.decorate(selected_item, @active_color)
end
# Render footer for the indexed menu
#
# @return [String]
#
# @api private
def render_footer
" Choose 1-#{@choices.size} [#{@default}]: #{@input}"
end
# Pagination help message
#
# @return [String]
#
# @api private
def page_help_message
return '' unless paginated?
"\n" + @prompt.decorate(@page_help, @help_color)
end
# Render page help
#
# @return [String]
#
# @api private
def render_page_help
help = page_help_message.dup
if @failure
help << @prompt.cursor.prev_line
end
help << @prompt.cursor.prev_line
help << @prompt.cursor.forward(render_footer.size)
end
# Render menu with indexed choices to select from
#
# @return [String]
#
# @api private
def render_menu
output = []
@paginator.paginate(@choices, @page_active, @per_page) do |choice, index|
num = (index + 1).to_s + @enum + ' '
selected = ' ' * 2 + num + choice.name
output << if index + 1 == @active
@prompt.decorate(selected.to_s, @active_color)
else
selected
end
output << "\n"
end
output.join
end
end # EnumList
end # Prompt
end # TTY
|
require_relative 'sudoer'
require_relative 'elb_swapper'
require_relative 'aws_wrapper'
class Ec2ElbStarter < AwsWrapper
DEVICE_PATH = '/dev/sdb'
MOUNT_PATH = '/mnt/ebs'
def start(zone_id, name, size_in_gb)
create_key(name)
LOGGER.info("Created PK for #{name}")
create_group(name)
add_current_user_to_group(name)
LOGGER.info("Created group #{name}, and added current user")
instance_ids = start_instances(2, name).map(&:instance_id)
LOGGER.info("Started 2 EC2 instances #{instance_ids}")
instance_ids.each do |instance_id|
create_and_attach_volume(instance_id, DEVICE_PATH, size_in_gb)
end
LOGGER.info("Attached EBS volume to each instance")
elb_names = elb_names(name)
elb_a_names = elb_names.map{ |name| create_elb(name) }
instance_ids.zip(elb_names).each do |instance_id, elb_name|
register_instance_with_elb(instance_id, elb_name)
end
LOGGER.info("Created load balancers and registered instances")
# Maybe this would be better managed than inline?
# But then that would be another thing to clean up.
put_group_policy(name, {
'Effect' => 'Allow',
'Action' => 'elasticloadbalancing:*', # TODO: tighten
'Resource' => elb_names.map { |elb_name| elb_arn(elb_name) }
})
LOGGER.info("Create group policy for ELB")
name_target_pairs = cname_pair(name).zip(elb_a_names)
create_dns_cname_records(zone_id, name_target_pairs)
LOGGER.info("Created CNAMEs")
Sudoer.new(debug: @debug, availability_zone: @availability_zone).tap do |sudoer|
command = [
"yum update", # TODO: non-interactive?
"mkfs -t ext4 #{DEVICE_PATH}",
"mkdir #{MOUNT_PATH}",
"mount #{DEVICE_PATH} #{MOUNT_PATH}"
].join (' && ')
sudoer.sudo(zone_id, "demo.#{name}", command)
LOGGER.info("Swap instances and do it again.")
ElbSwapper.new(debug: @debug, availability_zone: @availability_zone).swap(zone_id, name)
sudoer.sudo(zone_id, "demo.#{name}", command)
end
LOGGER.info("Instances are up / EBS volumes are mounted.")
end
end
Make yum non-interactive.
require_relative 'sudoer'
require_relative 'elb_swapper'
require_relative 'aws_wrapper'
class Ec2ElbStarter < AwsWrapper
DEVICE_PATH = '/dev/sdb'
MOUNT_PATH = '/mnt/ebs'
def start(zone_id, name, size_in_gb)
create_key(name)
LOGGER.info("Created PK for #{name}")
create_group(name)
add_current_user_to_group(name)
LOGGER.info("Created group #{name}, and added current user")
instance_ids = start_instances(2, name).map(&:instance_id)
LOGGER.info("Started 2 EC2 instances #{instance_ids}")
instance_ids.each do |instance_id|
create_and_attach_volume(instance_id, DEVICE_PATH, size_in_gb)
end
LOGGER.info("Attached EBS volume to each instance")
elb_names = elb_names(name)
elb_a_names = elb_names.map{ |name| create_elb(name) }
instance_ids.zip(elb_names).each do |instance_id, elb_name|
register_instance_with_elb(instance_id, elb_name)
end
LOGGER.info("Created load balancers and registered instances")
# Maybe this would be better managed than inline?
# But then that would be another thing to clean up.
put_group_policy(name, {
'Effect' => 'Allow',
'Action' => 'elasticloadbalancing:*', # TODO: tighten
'Resource' => elb_names.map { |elb_name| elb_arn(elb_name) }
})
LOGGER.info("Create group policy for ELB")
name_target_pairs = cname_pair(name).zip(elb_a_names)
create_dns_cname_records(zone_id, name_target_pairs)
LOGGER.info("Created CNAMEs")
Sudoer.new(debug: @debug, availability_zone: @availability_zone).tap do |sudoer|
command = [
"yum update --assumeyes",
"mkfs -t ext4 #{DEVICE_PATH}",
"mkdir #{MOUNT_PATH}",
"mount #{DEVICE_PATH} #{MOUNT_PATH}"
].join (' && ')
sudoer.sudo(zone_id, "demo.#{name}", command)
LOGGER.info("Swap instances and do it again.")
ElbSwapper.new(debug: @debug, availability_zone: @availability_zone).swap(zone_id, name)
sudoer.sudo(zone_id, "demo.#{name}", command)
end
LOGGER.info("Instances are up / EBS volumes are mounted.")
end
end
|
module VagrantPlugins
# Define version for Gem here
module Adam
VERSION = '0.4.0a'
end
end
Bump version up
module VagrantPlugins
# Define version for Gem here
module Adam
VERSION = '0.5.0a'
end
end
|
#--------------------------------------------------------------------------
# Copyright (c) Microsoft Open Technologies, Inc.
# All Rights Reserved. Licensed under the Apache 2.0 License.
#--------------------------------------------------------------------------
require 'vagrant'
require 'azure'
module VagrantPlugins
module WinAzure
class Config < Vagrant.plugin('2', :config)
attr_accessor :mgmt_certificate
attr_accessor :mgmt_endpoint
attr_accessor :subscription_id
attr_accessor :storage_acct_name
attr_accessor :storage_access_key
attr_accessor :vm_name
attr_accessor :vm_user
attr_accessor :vm_password
attr_accessor :vm_image
attr_accessor :vm_location
attr_accessor :vm_affinity_group
attr_accessor :cloud_service_name
attr_accessor :deployment_name
attr_accessor :tcp_endpoints
attr_accessor :ssh_private_key_file
attr_accessor :ssh_certificate_file
attr_accessor :ssh_port
attr_accessor :vm_size
attr_accessor :winrm_transport
attr_accessor :winrm_http_port
attr_accessor :winrm_https_port
attr_accessor :availability_set_name
attr_accessor :state_read_timeout
def initialize
@storage_acct_name = UNSET_VALUE
@storage_access_key = UNSET_VALUE
@mgmt_certificate = UNSET_VALUE
@mgmt_endpoint = UNSET_VALUE
@subscription_id = UNSET_VALUE
@vm_name = UNSET_VALUE
@vm_user = UNSET_VALUE
@vm_password = UNSET_VALUE
@vm_image = UNSET_VALUE
@vm_location = UNSET_VALUE
@vm_affinity_group = UNSET_VALUE
@cloud_service_name = UNSET_VALUE
@deployment_name = UNSET_VALUE
@tcp_endpoints = UNSET_VALUE
@ssh_private_key_file = UNSET_VALUE
@ssh_certificate_file = UNSET_VALUE
@ssh_port = UNSET_VALUE
@vm_size = UNSET_VALUE
@winrm_transport = UNSET_VALUE
@winrm_http_port = UNSET_VALUE
@winrm_https_port = UNSET_VALUE
@availability_set_name = UNSET_VALUE
@state_read_timeout = UNSET_VALUE
end
def finalize!
@storage_acct_name = ENV["AZURE_STORAGE_ACCOUNT"] if \
@storage_acct_name == UNSET_VALUE
@storage_access_key = ENV["AZURE_STORAGE_ACCESS_KEY"] if \
@storage_access_key == UNSET_VALUE
@mgmt_certificate = ENV["AZURE_MANAGEMENT_CERTIFICATE"] if \
@mgmt_certificate == UNSET_VALUE
@mgmt_endpoint = ENV["AZURE_MANAGEMENT_ENDPOINT"] if \
@mgmt_endpoint == UNSET_VALUE
@subscription_id = ENV["AZURE_SUBSCRIPTION_ID"] if \
@subscription_id == UNSET_VALUE
@vm_name = nil if @vm_name == UNSET_VALUE
@vm_user = 'vagrant' if @vm_user == UNSET_VALUE
@vm_password = nil if @vm_password == UNSET_VALUE
@vm_image = nil if @vm_image == UNSET_VALUE
@vm_location = nil if @vm_location == UNSET_VALUE
@vm_affinity_group = nil if @vm_affinity_group == UNSET_VALUE
@cloud_service_name = nil if @cloud_service_name == UNSET_VALUE
@deployment_name = nil if @deployment_name == UNSET_VALUE
@tcp_endpoints = nil if @tcp_endpoints == UNSET_VALUE
@ssh_private_key_file = nil if @ssh_private_key_file == UNSET_VALUE
@ssh_certificate_file = nil if @ssh_certificate_file == UNSET_VALUE
@ssh_port = nil if @ssh_port == UNSET_VALUE
@vm_size = nil if @vm_size == UNSET_VALUE
@winrm_transport = nil if @winrm_transport == UNSET_VALUE
@winrm_http_port = nil if @winrm_http_port == UNSET_VALUE
@winrm_https_port = nil if @winrm_https_port == UNSET_VALUE
@availability_set_name = nil if @availability_set_name == UNSET_VALUE
@state_read_timeout = 360 if @state_read_timeout == UNSET_VALUE
# This done due to a bug in Ruby SDK - it doesn't generate a storage
# account name if add_role = true
if @storage_acct_name.nil? || @storage_acct_name.empty?
@storage_acct_name = Azure::Core::Utility.random_string(
"#{@vm_name}storage"
).gsub(/[^0-9a-z ]/i, '').downcase[0..23]
end
if @cloud_service_name.nil? || @cloud_service_name.empty?
@cloud_service_name = Azure::Core::Utility.random_string(
"#{@vm_name}-service-"
)
end
end
def merge(other)
super.tap do |result|
result.mgmt_certificate = other.mgmt_certificate || \
self.mgmt_certificate
result.mgmt_endpoint = other.mgmt_endpoint || \
self.mgmt_endpoint
result.subscription_id = other.subscription_id || \
self.subscription_id
result.storage_account_name = other.storage_acct_name || \
self.storage_acct_name
result.storage_access_key = other.storage_access_key || \
self.storage_access_key
end
end
def validate(machine)
errors = _detected_errors
# Azure connection properties related validation.
errors << "vagrant_azure.subscription_id.required" if \
@subscription_id.nil?
errors << "vagrant_azure.mgmt_certificate.required" if \
@mgmt_certificate.nil?
errors << "vagrant_azure.mgmt_endpoint.required" if \
@mgmt_endpoint.nil?
# Azure Virtual Machine related validation
errors << "vagrant_azure.vm_name.required" if @vm_name.nil?
{ "Windows Azure Provider" => errors }
end
end
end
end
Fix typo in config
#--------------------------------------------------------------------------
# Copyright (c) Microsoft Open Technologies, Inc.
# All Rights Reserved. Licensed under the Apache 2.0 License.
#--------------------------------------------------------------------------
require 'vagrant'
require 'azure'
module VagrantPlugins
module WinAzure
class Config < Vagrant.plugin('2', :config)
attr_accessor :mgmt_certificate
attr_accessor :mgmt_endpoint
attr_accessor :subscription_id
attr_accessor :storage_acct_name
attr_accessor :storage_access_key
attr_accessor :vm_name
attr_accessor :vm_user
attr_accessor :vm_password
attr_accessor :vm_image
attr_accessor :vm_location
attr_accessor :vm_affinity_group
attr_accessor :cloud_service_name
attr_accessor :deployment_name
attr_accessor :tcp_endpoints
attr_accessor :ssh_private_key_file
attr_accessor :ssh_certificate_file
attr_accessor :ssh_port
attr_accessor :vm_size
attr_accessor :winrm_transport
attr_accessor :winrm_http_port
attr_accessor :winrm_https_port
attr_accessor :availability_set_name
attr_accessor :state_read_timeout
def initialize
@storage_acct_name = UNSET_VALUE
@storage_access_key = UNSET_VALUE
@mgmt_certificate = UNSET_VALUE
@mgmt_endpoint = UNSET_VALUE
@subscription_id = UNSET_VALUE
@vm_name = UNSET_VALUE
@vm_user = UNSET_VALUE
@vm_password = UNSET_VALUE
@vm_image = UNSET_VALUE
@vm_location = UNSET_VALUE
@vm_affinity_group = UNSET_VALUE
@cloud_service_name = UNSET_VALUE
@deployment_name = UNSET_VALUE
@tcp_endpoints = UNSET_VALUE
@ssh_private_key_file = UNSET_VALUE
@ssh_certificate_file = UNSET_VALUE
@ssh_port = UNSET_VALUE
@vm_size = UNSET_VALUE
@winrm_transport = UNSET_VALUE
@winrm_http_port = UNSET_VALUE
@winrm_https_port = UNSET_VALUE
@availability_set_name = UNSET_VALUE
@state_read_timeout = UNSET_VALUE
end
def finalize!
@storage_acct_name = ENV["AZURE_STORAGE_ACCOUNT"] if \
@storage_acct_name == UNSET_VALUE
@storage_access_key = ENV["AZURE_STORAGE_ACCESS_KEY"] if \
@storage_access_key == UNSET_VALUE
@mgmt_certificate = ENV["AZURE_MANAGEMENT_CERTIFICATE"] if \
@mgmt_certificate == UNSET_VALUE
@mgmt_endpoint = ENV["AZURE_MANAGEMENT_ENDPOINT"] if \
@mgmt_endpoint == UNSET_VALUE
@subscription_id = ENV["AZURE_SUBSCRIPTION_ID"] if \
@subscription_id == UNSET_VALUE
@vm_name = nil if @vm_name == UNSET_VALUE
@vm_user = 'vagrant' if @vm_user == UNSET_VALUE
@vm_password = nil if @vm_password == UNSET_VALUE
@vm_image = nil if @vm_image == UNSET_VALUE
@vm_location = nil if @vm_location == UNSET_VALUE
@vm_affinity_group = nil if @vm_affinity_group == UNSET_VALUE
@cloud_service_name = nil if @cloud_service_name == UNSET_VALUE
@deployment_name = nil if @deployment_name == UNSET_VALUE
@tcp_endpoints = nil if @tcp_endpoints == UNSET_VALUE
@ssh_private_key_file = nil if @ssh_private_key_file == UNSET_VALUE
@ssh_certificate_file = nil if @ssh_certificate_file == UNSET_VALUE
@ssh_port = nil if @ssh_port == UNSET_VALUE
@vm_size = nil if @vm_size == UNSET_VALUE
@winrm_transport = nil if @winrm_transport == UNSET_VALUE
@winrm_http_port = nil if @winrm_http_port == UNSET_VALUE
@winrm_https_port = nil if @winrm_https_port == UNSET_VALUE
@availability_set_name = nil if @availability_set_name == UNSET_VALUE
@state_read_timeout = 360 if @state_read_timeout == UNSET_VALUE
# This done due to a bug in Ruby SDK - it doesn't generate a storage
# account name if add_role = true
if @storage_acct_name.nil? || @storage_acct_name.empty?
@storage_acct_name = Azure::Core::Utility.random_string(
"#{@vm_name}storage"
).gsub(/[^0-9a-z ]/i, '').downcase[0..23]
end
if @cloud_service_name.nil? || @cloud_service_name.empty?
@cloud_service_name = Azure::Core::Utility.random_string(
"#{@vm_name}-service-"
)
end
end
def merge(other)
super.tap do |result|
result.mgmt_certificate = other.mgmt_certificate || \
self.mgmt_certificate
result.mgmt_endpoint = other.mgmt_endpoint || \
self.mgmt_endpoint
result.subscription_id = other.subscription_id || \
self.subscription_id
result.storage_acct_name = other.storage_acct_name || \
self.storage_acct_name
result.storage_access_key = other.storage_access_key || \
self.storage_access_key
end
end
def validate(machine)
errors = _detected_errors
# Azure connection properties related validation.
errors << "vagrant_azure.subscription_id.required" if \
@subscription_id.nil?
errors << "vagrant_azure.mgmt_certificate.required" if \
@mgmt_certificate.nil?
errors << "vagrant_azure.mgmt_endpoint.required" if \
@mgmt_endpoint.nil?
# Azure Virtual Machine related validation
errors << "vagrant_azure.vm_name.required" if @vm_name.nil?
{ "Windows Azure Provider" => errors }
end
end
end
end
|
module ValRequired
VERSION = "0.0.2"
end
Bumped version.
module ValRequired
VERSION = "0.0.3"
end
|
# encoding: utf-8
# 选曲列表
require 'view/dispose_bitmap'
module View
class SongList
class Songs < Sprite
include DisposeBitmap
FONT = Font.new('simhei', 14)
LINE_HEIGHT = 30
SCORE_HEIGHT = 15
def initialize(_)
super
self.bitmap = Bitmap.new(Graphics.width, Graphics.height)
bitmap.font = FONT
refresh
end
def update
refresh unless @songdata.equal?(songdata)
end
private
def songdata(offset = 0)
Scene.scene.songdata(offset)
end
def refresh
@songdata = songdata
bitmap.clear
draw_up
draw_current
draw_down
end
# 绘制简单的歌曲信息
def draw_simple_info(songdata, x, y, width = 340, height = LINE_HEIGHT)
bitmap.draw_text(x, y, width, height, songdata.title)
bitmap.draw_text(x, y, width, height, "★#{songdata.level}", 2)
end
# 绘制上方的歌曲信息
def draw_up
(-3).upto(-1) do |i|
draw_simple_info(songdata(i), 60, 105 + i * LINE_HEIGHT)
end
end
# 绘制下方的歌曲信息
def draw_down
1.upto(3) do |i|
draw_simple_info(songdata(i), 60, 125 + i * LINE_HEIGHT)
end
end
# 绘制选中的歌曲信息
def draw_current
draw_simple_info(@songdata, 120, 110, 340)
draw_playdata Taiko.load(@songdata.name)
end
def draw_playdata(playdata)
return unless playdata
crown_type = case
when playdata[:miss].zero? then 3
when playdata[:normal_clear] then 2
else 1
end
bitmap.blt(65, 115, Cache.skin('clearmark'), Rect.new(28 * crown_type, 0, 28, 28))
bitmap.draw_text(0, Graphics.height - 14, Graphics.width, 14, playdata[:score], 2)
end
end
end
end
view/song_list/songs.rb: draw the "new" icon
# encoding: utf-8
# 选曲列表
require 'view/dispose_bitmap'
module View
class SongList
class Songs < Sprite
include DisposeBitmap
FONT = Font.new('simhei', 14)
LINE_HEIGHT = 30
SCORE_HEIGHT = 15
def initialize(_)
super
self.bitmap = Bitmap.new(Graphics.width, Graphics.height)
bitmap.font = FONT
refresh
end
def update
refresh unless @songdata.equal?(songdata)
end
private
def songdata(offset = 0)
Scene.scene.songdata(offset)
end
def refresh
@songdata = songdata
bitmap.clear
draw_up
draw_current
draw_down
end
# 绘制简单的歌曲信息
def draw_simple_info(songdata, x, y, width = 340, height = LINE_HEIGHT)
bitmap.draw_text(x, y, width, height, songdata.title)
bitmap.draw_text(x, y, width, height, "★#{songdata.level}", 2)
end
# 绘制上方的歌曲信息
def draw_up
(-3).upto(-1) do |i|
draw_simple_info(songdata(i), 60, 105 + i * LINE_HEIGHT)
end
end
# 绘制下方的歌曲信息
def draw_down
1.upto(3) do |i|
draw_simple_info(songdata(i), 60, 125 + i * LINE_HEIGHT)
end
end
# 绘制选中的歌曲信息
def draw_current
draw_simple_info(@songdata, 120, 110, 340)
draw_playdata Taiko.load(@songdata.name)
end
def draw_playdata(playdata)
crown_type = case
when !playdata then 0
when playdata[:miss].zero? then 3
when playdata[:normal_clear] then 2
else 1
end
bitmap.blt(65, 115, Cache.skin('clearmark'), Rect.new(28 * crown_type, 0, 28, 28))
return unless playdata
bitmap.draw_text(0, Graphics.height - 14, Graphics.width, 14, playdata[:score], 2)
end
end
end
end |
module Waveapi
class WaveService
def initialize(use_sandbox=false, server_rpc_base=nil, consumer_key='anonymous', consumer_secret='anonymous')
# TODO
@context = Context.new
end
def self.new_blip_data(wave_id, wavelet_id, initial_content='', parent_blip_id=nil)
temp_blip_id = "TBD_#{wavelet_id}_#{rand(1000000).to_s(16)}"
{
'waveId' => wave_id,
'waveletId' => wavelet_id,
'blipId' => temp_blip_id,
'content' => initial_content,
'parentBlipId' => parent_blip_id
}
end
def self.new_wavelet_data(domain, participants)
wave_id = "#{domain}!TBD_#{rand(1000000).to_s(16)}"
wavelet_id = "#{domain}!conv+root"
root_blip_data = new_blip_data(wave_id, wavelet_id)
wavelet_data = {
'waveId' => wave_id,
'waveletId' => wavelet_id,
'rootBlipId' => root_blip_data['blipId'],
'participants' => participants.participants
}
[root_blip_data, wavelet_data]
end
def new_wave(domain, participants=[], message='', proxy_for_id=nil, submit=false)
# TODO: check if valid proxy for id
case message
when String; # do nothing
when Hash; message = message.to_json
when JSON; message = message.to_s
else; raise ArgumentError.new("Invalid message type: #{message.class.name}")
end
blip_data, wavelet_data = self.class.new_wavelet_data(domain, participants)
operation = RobotCreateWaveletOperation.new(wavelet_data['waveId'],
wavelet_data['waveletId'], wavelet_data, message)
@context.add_operation(operation)
root_blip = Blip.new(blip_data, @context)
@context.add_blip(root_blip)
created_wavelet = Wavelet.new(wavelet_data, @context)
if submit
# TODO
end
created_wavelet
end
def fetch_wavelet(wave_id, wavelet_id=nil, proxy_for_id=nil)
raise 'Not Yet' # TODO
end
def blind_wavelet(json, proxy_for_id=nil)
json = JSON.parse(json.gsub("\n", '\n')) if json.is_a?(String) # TODO
context = Context.new
context.proxy_for(proxy_for_id)
blips = Hash[*json['blips'].to_a.map{|k, v| [k, Blip.new(v, context)]}.flatten] # TODO: refactor
context.message_bundle.blips = blips
wavelet_from_json(json, context)
end
def wavelet_from_json(json, context)
blips = {}
threads = {}
threads_data = json['threads'] || []
# ちょっと中断
# TODO: ずいぶん違う・・・
Wavelet.new(json, context)
end
end
end
fix a bug
module Waveapi
class WaveService
def initialize(use_sandbox=false, server_rpc_base=nil, consumer_key='anonymous', consumer_secret='anonymous')
# TODO
@context = Context.new
end
def self.new_blip_data(wave_id, wavelet_id, initial_content='', parent_blip_id=nil)
temp_blip_id = "TBD_#{wavelet_id}_#{rand(1000000).to_s(16)}"
{
'waveId' => wave_id,
'waveletId' => wavelet_id,
'blipId' => temp_blip_id,
'content' => initial_content,
'parentBlipId' => parent_blip_id
}
end
def self.new_wavelet_data(domain, participants)
wave_id = "#{domain}!TBD_#{rand(1000000).to_s(16)}"
wavelet_id = "#{domain}!conv+root"
root_blip_data = new_blip_data(wave_id, wavelet_id)
wavelet_data = {
'waveId' => wave_id,
'waveletId' => wavelet_id,
'rootBlipId' => root_blip_data['blipId'],
'participants' => participants.participants
}
[root_blip_data, wavelet_data]
end
def new_wave(domain, participants=[], message='', proxy_for_id=nil, submit=false)
# TODO: check if valid proxy for id
case message
when String; # do nothing
when Wavelet; message = message.to_json
when Hash; message = message.to_json
when JSON; message = message.to_s
else; raise ArgumentError.new("Invalid message type: #{message.class.name}")
end
blip_data, wavelet_data = self.class.new_wavelet_data(domain, participants)
operation = RobotCreateWaveletOperation.new(wavelet_data['waveId'],
wavelet_data['waveletId'], wavelet_data, message)
@context.add_operation(operation)
root_blip = Blip.new(blip_data, @context)
@context.add_blip(root_blip)
created_wavelet = Wavelet.new(wavelet_data, @context)
if submit
# TODO
end
created_wavelet
end
def fetch_wavelet(wave_id, wavelet_id=nil, proxy_for_id=nil)
raise 'Not Yet' # TODO
end
def blind_wavelet(json, proxy_for_id=nil)
json = JSON.parse(json.gsub("\n", '\n')) if json.is_a?(String) # TODO
context = Context.new
context.proxy_for(proxy_for_id)
blips = Hash[*json['blips'].to_a.map{|k, v| [k, Blip.new(v, context)]}.flatten] # TODO: refactor
context.message_bundle.blips = blips
wavelet_from_json(json, context)
end
def wavelet_from_json(json, context)
blips = {}
threads = {}
threads_data = json['threads'] || []
# ちょっと中断
# TODO: ずいぶん違う・・・
Wavelet.new(json, context)
end
end
end
|
##########################################
#
# Module WFMStat
#
##########################################
module WFMStat
##########################################
#
# Class StatusEngine
#
##########################################
class StatusEngine
require 'workflowmgr/workflowdoc'
require 'workflowmgr/workflowdb'
require 'workflowmgr/cycledef'
require "workflowmgr/cycle"
require 'workflowmgr/dependency'
require 'workflowmgr/workflowconfig'
require 'workflowmgr/launchserver'
require 'workflowmgr/dbproxy'
require 'workflowmgr/workflowioproxy'
##########################################
#
# initialize
#
##########################################
def initialize(options)
# Get command line options
@options=options
# Get configuration file options
@config=WorkflowMgr::WorkflowYAMLConfig.new
# Get the base directory of the WFM installation
@wfmdir=File.dirname(File.dirname(File.expand_path(File.dirname(__FILE__))))
# Set up an object to serve the workflow database (but do not open the database)
@dbServer=WorkflowMgr::DBProxy.new(@options.database,@config)
end # initialize
##########################################
#
# wfmstat
#
##########################################
def wfmstat
begin
# Open/Create the database
@dbServer.dbopen
# Set up an object to serve file stat info
@workflowIOServer=WorkflowMgr::WorkflowIOProxy.new(@dbServer,@config)
# Open the workflow document
@workflowdoc = WorkflowMgr::WorkflowXMLDoc.new(@options.workflowdoc,@workflowIOServer)
# Print a cycle summary report if requested
if @options.summary
print_summary
else
print_status
end
ensure
# Make sure we release the workflow lock in the database and shutdown the dbserver
unless @dbServer.nil?
@dbServer.unlock_workflow if @locked
@dbServer.stop! if @config.DatabaseServer
end
# Make sure to shut down the workflow file stat server
unless @workflowIOServer.nil?
@workflowIOServer.stop! if @config.WorkflowIOServer
end
end # ensure
end # wfmstat
##########################################
#
# checkTask
#
##########################################
def checkTask
begin
# Open/Create the database
@dbServer.dbopen
# Set up an object to serve file stat info
@workflowIOServer=WorkflowMgr::WorkflowIOProxy.new(@dbServer,@config)
# Open the workflow document
@workflowdoc = WorkflowMgr::WorkflowXMLDoc.new(@options.workflowdoc,@workflowIOServer)
# Get cycle time and task name options
cycletime=@options.cycles.first
taskname=@options.tasks.first
# Get the cycle
cycle=@dbServer.get_cycles( {:start=>cycletime, :end=>cycletime } ).first || WorkflowMgr::Cycle.new(cycletime)
# Get the task
task=@workflowdoc.tasks[taskname]
task=task.localize(cycletime) unless task.nil?
# Get the job (if there is one)
jobcycles=[cycletime]
@workflowdoc.taskdep_cycle_offsets.each do |offset|
jobcycles << cycletime + offset
end
jobs=@dbServer.get_jobs(jobcycles)
if jobs[taskname].nil?
job=nil
else
job=jobs[taskname][cycletime]
end
# Print the task information
print_taskinfo(task)
# Query and print task dependency info
unless task.nil?
unless task.dependency.nil?
printf "%2s%s\n", "","dependencies"
print_deps(task.dependency.query(cycle.cycle,jobs,@workflowIOServer),0)
end
unless task.hangdependency.nil?
printf "%2s%s\n", "","hang dependencies"
print_deps(task.hangdependency.query(cycle.cycle,jobs,@workflowIOServer),0)
end
end
# Print the cycle information
print_cycleinfo(cycle)
# Print the job information
print_jobinfo(job)
# Print throttling violations
print_violations(task,cycle,dependencies) if job.nil?
ensure
# Make sure we release the workflow lock in the database and shutdown the dbserver
unless @dbServer.nil?
@dbServer.unlock_workflow if @locked
@dbServer.stop! if @config.DatabaseServer
end
# Make sure to shut down the workflow file stat server
unless @workflowIOServer.nil?
@workflowIOServer.stop! if @config.WorkflowIOServer
end
end # ensure
end
##########################################
#
# getCycles
#
##########################################
def getCycles
# Initialize empty lists of cycles
dbcycles=[]
xmlcycles=[]
undefcycles=[]
# Get the cycles of interest that are in the database
if @options.cycles.nil?
# Get the latest cycle
last_cycle=@dbServer.get_last_cycle
dbcycles << last_cycle unless last_cycle.nil?
elsif @options.cycles.is_a?(Range)
# Get all cycles within the range
dbcycles += @dbServer.get_cycles( {:start=>@options.cycles.first, :end=>@options.cycles.last } )
elsif @options.cycles.is_a?(Array)
# Get the specific cycles asked for
@options.cycles.each do |c|
cycle = @dbServer.get_cycles( {:start=>c, :end=>c } )
if cycle.empty?
undefcycles << WorkflowMgr::Cycle.new(c)
else
dbcycles += cycle
end
end
else
puts "Invalid cycle specification"
end
# Add cycles defined in XML that aren't in the database
# We only need to do this when a range of cycles is requested
if @options.cycles.is_a?(Range)
# Get the cycle definitions
cycledefs = @workflowdoc.cycledefs
# Find every cycle in the range
xml_cycle_times = []
reftime=cycledefs.collect { |cdef| cdef.next(@options.cycles.first) }.compact.min
while true do
break if reftime.nil?
break if reftime > @options.cycles.last
xml_cycle_times << reftime
reftime=cycledefs.collect { |cdef| cdef.next(reftime+60) }.compact.min
end
# Add the cycles that are in the XML but not in the DB
xmlcycles = (xml_cycle_times - dbcycles.collect { |c| c.cycle } ).collect { |c| WorkflowMgr::Cycle.new(c) }
end
[dbcycles,xmlcycles,undefcycles]
end
##########################################
#
# print_summary
#
##########################################
def print_summary
# Get cycles of interest
dbcycles,xmlcycles,undefcycles=getCycles
# Print the header
printf "%12s %8s %20s %20s\n","CYCLE".center(12),
"STATE".center(8),
"ACTIVATED".center(20),
"DEACTIVATED".center(20)
# Print the cycle date/times
(dbcycles+xmlcycles).sort.each do |cycle|
printf "%12s %8s %20s %20s\n","#{cycle.cycle.strftime("%Y%m%d%H%M")}",
"#{cycle.state.to_s.capitalize}",
"#{cycle.activated_time_string.center(20)}",
"#{cycle.deactivated_time_string.center(20)}"
end
end
##########################################
#
# print_status
#
##########################################
def print_status
# Get cycles of interest
dbcycles,xmlcycles,undefcycles=getCycles
# Get the jobs from the database for the cycles of interest
jobs=@dbServer.get_jobs(dbcycles.collect {|c| c.cycle})
# Get the list of tasks from the workflow definition
definedTasks=@workflowdoc.tasks
# Print the job status info
if @options.taskfirst
format = "%20s %12s %24s %16s %16s %6s\n"
header = "TASK".rjust(20),"CYCLE".rjust(12),"JOBID".rjust(24),
"STATE".rjust(16),"EXIT STATUS".rjust(16),"TRIES".rjust(6)
puts format % header
# Sort the task list in sequence order
tasklist=jobs.keys | definedTasks.values.collect { |t| t.attributes[:name] }
unless @options.tasks.nil?
tasklist = tasklist.find_all { |task| @options.tasks.any? { |pattern| task=~/#{pattern}/ } }
end
tasklist=tasklist.sort_by { |t| [definedTasks[t].nil? ? 999999999 : definedTasks[t].seq, t.split(/(\d+)/).map { |i| i=~/\d+/ ? i.to_i : i }].flatten }
tasklist.each do |task|
printf "==================================================================================================================\n"
# Print status of all jobs for this task
cyclelist=(dbcycles | xmlcycles).collect { |c| c.cycle }.sort
cyclelist.each do |cycle|
if jobs[task].nil?
jobdata=["-","-","-","-"]
elsif jobs[task][cycle].nil?
jobdata=["-","-","-","-"]
else
case jobs[task][cycle].state
when "SUCCEEDED","DEAD","FAILED"
jobdata=[jobs[task][cycle].id,jobs[task][cycle].state,jobs[task][cycle].exit_status,jobs[task][cycle].tries]
else
jobdata=[jobs[task][cycle].id,jobs[task][cycle].state,"-",jobs[task][cycle].tries]
end
end
puts format % ([task,cycle.strftime("%Y%m%d%H%M")] + jobdata)
end
end
else
format = "%12s %20s %24s %16s %16s %6s\n"
header = "CYCLE".rjust(12),"TASK".rjust(20),"JOBID".rjust(24),
"STATE".rjust(16),"EXIT STATUS".rjust(16),"TRIES".rjust(6)
puts format % header
# Print status of jobs for each cycle
cyclelist=(dbcycles | xmlcycles).collect { |c| c.cycle }.sort
cyclelist.each do |cycle|
printf "==================================================================================================================\n"
# Sort the task list in sequence order
tasklist=jobs.keys | definedTasks.values.collect { |t| t.attributes[:name] }
unless @options.tasks.nil?
tasklist = tasklist.find_all { |task| @options.tasks.any? { |pattern| task=~/#{pattern}/ } }
end
tasklist=tasklist.sort_by { |t| [definedTasks[t].nil? ? 999999999 : definedTasks[t].seq, t.split(/(\d+)/).map { |i| i=~/\d+/ ? i.to_i : i }].flatten }
tasklist.each do |task|
if jobs[task].nil?
jobdata=["-","-","-","-"]
elsif jobs[task][cycle].nil?
jobdata=["-","-","-","-"]
else
case jobs[task][cycle].state
when "SUCCEEDED","DEAD","FAILED"
jobdata=[jobs[task][cycle].id,jobs[task][cycle].state,jobs[task][cycle].exit_status,jobs[task][cycle].tries]
else
jobdata=[jobs[task][cycle].id,jobs[task][cycle].state,"-",jobs[task][cycle].tries]
end
end
puts format % ([cycle.strftime("%Y%m%d%H%M"),task] + jobdata)
end
end
end
end
##########################################
#
# print_taskinfo
#
##########################################
def print_taskinfo(task)
puts
if task.nil?
puts "Task: Not defined in current workflow definition"
else
puts "Task: #{task.attributes[:name]}"
task.attributes.keys.sort { |a1,a2| a1.to_s <=> a2.to_s }.each { |attr|
puts " #{attr}: #{task.attributes[attr]}"
}
puts " environment"
task.envars.keys.sort.each { |envar|
puts " #{envar} ==> #{task.envars[envar]}"
}
end
end
##########################################
#
# print_cycleinfo
#
##########################################
def print_cycleinfo(cycle)
puts
puts "Cycle: #{cycle.cycle.strftime("%Y%m%d%H%M")}"
puts " State: #{cycle.state}"
puts " Activated: #{cycle.activated != Time.at(0) ? cycle.activated : "-"}"
puts " Completed: #{cycle.done? ? cycle.done : "-"}"
puts " Expired: #{cycle.expired? ? cycle.expired : "-"}"
end
##########################################
#
# print_jobinfo
#
##########################################
def print_jobinfo(job)
puts
if job.nil?
puts "Job: This task has not been submitted for this cycle"
else
puts "Job: #{job.id}"
puts " State: #{job.state} (#{job.native_state})"
puts " Exit Status: #{job.done? ? job.exit_status : "-"}"
puts " Tries: #{job.tries}"
puts " Unknown count: #{job.nunknowns}"
end
end
##########################################
#
# print_violations
#
##########################################
def print_violations(task,cycle,dependencies)
puts
puts "Task can not be submitted because:"
# Check for non-existent task
if task.nil?
puts " The task is not defined"
return
end
# Check for inactive cycle
unless cycle.active?
puts " The cycle is not active"
return
end
# Check for unsatisfied dependencies
unless dependencies.nil?
unless dependencies.first[:resolved]
puts " Dependencies are not satisfied"
return
end
end
# Check for throttle violations
active_cycles=@dbServer.get_active_cycles
active_jobs=@dbServer.get_jobs(active_cycles.collect { |c| c.cycle })
ncores=0
ntasks=0
active_jobs.keys.each do |jobtask|
active_jobs[jobtask].keys.each do |jobcycle|
if !active_jobs[jobtask][jobcycle].done?
ntasks += 1
ncores += active_jobs[jobtask][jobcycle].cores
end
end
end
if ntasks + 1 > @workflowdoc.taskthrottle
puts " Task throttle violation (#{ntasks} of #{@workflowdoc.taskthrottle} tasks are already active)"
end
if ncores + task.attributes[:cores] > @workflowdoc.corethrottle
puts " Core throttle violation (#{ncores} of #{@workflowdoc.corethrottle} cores are already in use)"
end
end
##########################################
#
# print_deps
#
##########################################
def print_deps(deps,n)
return if deps.nil?
deps.each do |d|
if d.is_a?(Array)
print_deps(d,n+1) if d.is_a?(Array)
else
printf "%#{2*n+4}s%s %s\n","",d[:dep],d[:msg]
end
end
end
end # Class StatusEngine
end # Module WorkflowMgr
Bug Fix: Geez! Couldn't get it right the first time. Or the second time. Maybe the third time will work.
Dependencies for jobs that hadn't started yet could not be checked and resulted in a rocotocheck crash.
Need better testing....on the list, but too swamped with fires to do it.
##########################################
#
# Module WFMStat
#
##########################################
module WFMStat
##########################################
#
# Class StatusEngine
#
##########################################
class StatusEngine
require 'workflowmgr/workflowdoc'
require 'workflowmgr/workflowdb'
require 'workflowmgr/cycledef'
require "workflowmgr/cycle"
require 'workflowmgr/dependency'
require 'workflowmgr/workflowconfig'
require 'workflowmgr/launchserver'
require 'workflowmgr/dbproxy'
require 'workflowmgr/workflowioproxy'
##########################################
#
# initialize
#
##########################################
def initialize(options)
# Get command line options
@options=options
# Get configuration file options
@config=WorkflowMgr::WorkflowYAMLConfig.new
# Get the base directory of the WFM installation
@wfmdir=File.dirname(File.dirname(File.expand_path(File.dirname(__FILE__))))
# Set up an object to serve the workflow database (but do not open the database)
@dbServer=WorkflowMgr::DBProxy.new(@options.database,@config)
end # initialize
##########################################
#
# wfmstat
#
##########################################
def wfmstat
begin
# Open/Create the database
@dbServer.dbopen
# Set up an object to serve file stat info
@workflowIOServer=WorkflowMgr::WorkflowIOProxy.new(@dbServer,@config)
# Open the workflow document
@workflowdoc = WorkflowMgr::WorkflowXMLDoc.new(@options.workflowdoc,@workflowIOServer)
# Print a cycle summary report if requested
if @options.summary
print_summary
else
print_status
end
ensure
# Make sure we release the workflow lock in the database and shutdown the dbserver
unless @dbServer.nil?
@dbServer.unlock_workflow if @locked
@dbServer.stop! if @config.DatabaseServer
end
# Make sure to shut down the workflow file stat server
unless @workflowIOServer.nil?
@workflowIOServer.stop! if @config.WorkflowIOServer
end
end # ensure
end # wfmstat
##########################################
#
# checkTask
#
##########################################
def checkTask
begin
# Open/Create the database
@dbServer.dbopen
# Set up an object to serve file stat info
@workflowIOServer=WorkflowMgr::WorkflowIOProxy.new(@dbServer,@config)
# Open the workflow document
@workflowdoc = WorkflowMgr::WorkflowXMLDoc.new(@options.workflowdoc,@workflowIOServer)
# Get cycle time and task name options
cycletime=@options.cycles.first
taskname=@options.tasks.first
# Get the cycle
cycle=@dbServer.get_cycles( {:start=>cycletime, :end=>cycletime } ).first || WorkflowMgr::Cycle.new(cycletime)
# Get the task
task=@workflowdoc.tasks[taskname]
task=task.localize(cycletime) unless task.nil?
# Get the job (if there is one)
jobcycles=[cycletime]
@workflowdoc.taskdep_cycle_offsets.each do |offset|
jobcycles << cycletime + offset
end
jobs=@dbServer.get_jobs(jobcycles)
if jobs[taskname].nil?
job=nil
else
job=jobs[taskname][cycletime]
end
# Print the task information
print_taskinfo(task)
# Query and print task dependency info
dependencies=nil
hangdependencies=nil
unless task.nil?
unless task.dependency.nil?
dependencies=task.dependency.query(cycle.cycle,jobs,@workflowIOServer)
printf "%2s%s\n", "","dependencies"
print_deps(dependencies,0)
end
unless task.hangdependency.nil?
hangdependencies=task.hangdependency.query(cycle.cycle,jobs,@workflowIOServer)
printf "%2s%s\n", "","hang dependencies"
print_deps(hangdependencies,0)
end
end
# Print the cycle information
print_cycleinfo(cycle)
# Print the job information
print_jobinfo(job)
# Print throttling violations
print_violations(task,cycle,dependencies) if job.nil?
ensure
# Make sure we release the workflow lock in the database and shutdown the dbserver
unless @dbServer.nil?
@dbServer.unlock_workflow if @locked
@dbServer.stop! if @config.DatabaseServer
end
# Make sure to shut down the workflow file stat server
unless @workflowIOServer.nil?
@workflowIOServer.stop! if @config.WorkflowIOServer
end
end # ensure
end
##########################################
#
# getCycles
#
##########################################
def getCycles
# Initialize empty lists of cycles
dbcycles=[]
xmlcycles=[]
undefcycles=[]
# Get the cycles of interest that are in the database
if @options.cycles.nil?
# Get the latest cycle
last_cycle=@dbServer.get_last_cycle
dbcycles << last_cycle unless last_cycle.nil?
elsif @options.cycles.is_a?(Range)
# Get all cycles within the range
dbcycles += @dbServer.get_cycles( {:start=>@options.cycles.first, :end=>@options.cycles.last } )
elsif @options.cycles.is_a?(Array)
# Get the specific cycles asked for
@options.cycles.each do |c|
cycle = @dbServer.get_cycles( {:start=>c, :end=>c } )
if cycle.empty?
undefcycles << WorkflowMgr::Cycle.new(c)
else
dbcycles += cycle
end
end
else
puts "Invalid cycle specification"
end
# Add cycles defined in XML that aren't in the database
# We only need to do this when a range of cycles is requested
if @options.cycles.is_a?(Range)
# Get the cycle definitions
cycledefs = @workflowdoc.cycledefs
# Find every cycle in the range
xml_cycle_times = []
reftime=cycledefs.collect { |cdef| cdef.next(@options.cycles.first) }.compact.min
while true do
break if reftime.nil?
break if reftime > @options.cycles.last
xml_cycle_times << reftime
reftime=cycledefs.collect { |cdef| cdef.next(reftime+60) }.compact.min
end
# Add the cycles that are in the XML but not in the DB
xmlcycles = (xml_cycle_times - dbcycles.collect { |c| c.cycle } ).collect { |c| WorkflowMgr::Cycle.new(c) }
end
[dbcycles,xmlcycles,undefcycles]
end
##########################################
#
# print_summary
#
##########################################
def print_summary
# Get cycles of interest
dbcycles,xmlcycles,undefcycles=getCycles
# Print the header
printf "%12s %8s %20s %20s\n","CYCLE".center(12),
"STATE".center(8),
"ACTIVATED".center(20),
"DEACTIVATED".center(20)
# Print the cycle date/times
(dbcycles+xmlcycles).sort.each do |cycle|
printf "%12s %8s %20s %20s\n","#{cycle.cycle.strftime("%Y%m%d%H%M")}",
"#{cycle.state.to_s.capitalize}",
"#{cycle.activated_time_string.center(20)}",
"#{cycle.deactivated_time_string.center(20)}"
end
end
##########################################
#
# print_status
#
##########################################
def print_status
# Get cycles of interest
dbcycles,xmlcycles,undefcycles=getCycles
# Get the jobs from the database for the cycles of interest
jobs=@dbServer.get_jobs(dbcycles.collect {|c| c.cycle})
# Get the list of tasks from the workflow definition
definedTasks=@workflowdoc.tasks
# Print the job status info
if @options.taskfirst
format = "%20s %12s %24s %16s %16s %6s\n"
header = "TASK".rjust(20),"CYCLE".rjust(12),"JOBID".rjust(24),
"STATE".rjust(16),"EXIT STATUS".rjust(16),"TRIES".rjust(6)
puts format % header
# Sort the task list in sequence order
tasklist=jobs.keys | definedTasks.values.collect { |t| t.attributes[:name] }
unless @options.tasks.nil?
tasklist = tasklist.find_all { |task| @options.tasks.any? { |pattern| task=~/#{pattern}/ } }
end
tasklist=tasklist.sort_by { |t| [definedTasks[t].nil? ? 999999999 : definedTasks[t].seq, t.split(/(\d+)/).map { |i| i=~/\d+/ ? i.to_i : i }].flatten }
tasklist.each do |task|
printf "==================================================================================================================\n"
# Print status of all jobs for this task
cyclelist=(dbcycles | xmlcycles).collect { |c| c.cycle }.sort
cyclelist.each do |cycle|
if jobs[task].nil?
jobdata=["-","-","-","-"]
elsif jobs[task][cycle].nil?
jobdata=["-","-","-","-"]
else
case jobs[task][cycle].state
when "SUCCEEDED","DEAD","FAILED"
jobdata=[jobs[task][cycle].id,jobs[task][cycle].state,jobs[task][cycle].exit_status,jobs[task][cycle].tries]
else
jobdata=[jobs[task][cycle].id,jobs[task][cycle].state,"-",jobs[task][cycle].tries]
end
end
puts format % ([task,cycle.strftime("%Y%m%d%H%M")] + jobdata)
end
end
else
format = "%12s %20s %24s %16s %16s %6s\n"
header = "CYCLE".rjust(12),"TASK".rjust(20),"JOBID".rjust(24),
"STATE".rjust(16),"EXIT STATUS".rjust(16),"TRIES".rjust(6)
puts format % header
# Print status of jobs for each cycle
cyclelist=(dbcycles | xmlcycles).collect { |c| c.cycle }.sort
cyclelist.each do |cycle|
printf "==================================================================================================================\n"
# Sort the task list in sequence order
tasklist=jobs.keys | definedTasks.values.collect { |t| t.attributes[:name] }
unless @options.tasks.nil?
tasklist = tasklist.find_all { |task| @options.tasks.any? { |pattern| task=~/#{pattern}/ } }
end
tasklist=tasklist.sort_by { |t| [definedTasks[t].nil? ? 999999999 : definedTasks[t].seq, t.split(/(\d+)/).map { |i| i=~/\d+/ ? i.to_i : i }].flatten }
tasklist.each do |task|
if jobs[task].nil?
jobdata=["-","-","-","-"]
elsif jobs[task][cycle].nil?
jobdata=["-","-","-","-"]
else
case jobs[task][cycle].state
when "SUCCEEDED","DEAD","FAILED"
jobdata=[jobs[task][cycle].id,jobs[task][cycle].state,jobs[task][cycle].exit_status,jobs[task][cycle].tries]
else
jobdata=[jobs[task][cycle].id,jobs[task][cycle].state,"-",jobs[task][cycle].tries]
end
end
puts format % ([cycle.strftime("%Y%m%d%H%M"),task] + jobdata)
end
end
end
end
##########################################
#
# print_taskinfo
#
##########################################
def print_taskinfo(task)
puts
if task.nil?
puts "Task: Not defined in current workflow definition"
else
puts "Task: #{task.attributes[:name]}"
task.attributes.keys.sort { |a1,a2| a1.to_s <=> a2.to_s }.each { |attr|
puts " #{attr}: #{task.attributes[attr]}"
}
puts " environment"
task.envars.keys.sort.each { |envar|
puts " #{envar} ==> #{task.envars[envar]}"
}
end
end
##########################################
#
# print_cycleinfo
#
##########################################
def print_cycleinfo(cycle)
puts
puts "Cycle: #{cycle.cycle.strftime("%Y%m%d%H%M")}"
puts " State: #{cycle.state}"
puts " Activated: #{cycle.activated != Time.at(0) ? cycle.activated : "-"}"
puts " Completed: #{cycle.done? ? cycle.done : "-"}"
puts " Expired: #{cycle.expired? ? cycle.expired : "-"}"
end
##########################################
#
# print_jobinfo
#
##########################################
def print_jobinfo(job)
puts
if job.nil?
puts "Job: This task has not been submitted for this cycle"
else
puts "Job: #{job.id}"
puts " State: #{job.state} (#{job.native_state})"
puts " Exit Status: #{job.done? ? job.exit_status : "-"}"
puts " Tries: #{job.tries}"
puts " Unknown count: #{job.nunknowns}"
end
end
##########################################
#
# print_violations
#
##########################################
def print_violations(task,cycle,dependencies)
puts
puts "Task can not be submitted because:"
# Check for non-existent task
if task.nil?
puts " The task is not defined"
return
end
# Check for inactive cycle
unless cycle.active?
puts " The cycle is not active"
return
end
# Check for unsatisfied dependencies
unless dependencies.nil?
unless dependencies.first[:resolved]
puts " Dependencies are not satisfied"
return
end
end
# Check for throttle violations
active_cycles=@dbServer.get_active_cycles
active_jobs=@dbServer.get_jobs(active_cycles.collect { |c| c.cycle })
ncores=0
ntasks=0
active_jobs.keys.each do |jobtask|
active_jobs[jobtask].keys.each do |jobcycle|
if !active_jobs[jobtask][jobcycle].done?
ntasks += 1
ncores += active_jobs[jobtask][jobcycle].cores
end
end
end
if ntasks + 1 > @workflowdoc.taskthrottle
puts " Task throttle violation (#{ntasks} of #{@workflowdoc.taskthrottle} tasks are already active)"
end
if ncores + task.attributes[:cores] > @workflowdoc.corethrottle
puts " Core throttle violation (#{ncores} of #{@workflowdoc.corethrottle} cores are already in use)"
end
end
##########################################
#
# print_deps
#
##########################################
def print_deps(deps,n)
return if deps.nil?
deps.each do |d|
if d.is_a?(Array)
print_deps(d,n+1) if d.is_a?(Array)
else
printf "%#{2*n+4}s%s %s\n","",d[:dep],d[:msg]
end
end
end
end # Class StatusEngine
end # Module WorkflowMgr
|
require 'builder/xchar'
module XLiveServices
module Utils
def self.BuildHeader(endpoint, action, compactRPSTicket)
%{
<a:Action s:mustUnderstand="1">#{action}</a:Action>
<a:To s:mustUnderstand="1">#{endpoint}</a:To>
<o:Security s:mustUnderstand="1" xmlns:o="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd">
<cct:RpsSecurityToken wsu:Id="00000000-0000-0000-0000-000000000000" xmlns:cct="http://samples.microsoft.com/wcf/security/Extensibility/" xmlns:wsu="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd">
<cct:RpsTicket>#{Builder::XChar.encode(compactRPSTicket)}</cct:RpsTicket>
</cct:RpsSecurityToken>
</o:Security>
}
end
def self.BuildAction(namespace, configurationName, name)
namespace + configurationName + '/' + name
end
class Serialization
def self.Serialize(data, type)
serialized = {}
case type
when 'enum'
serialized = data.to_s
when 'uint[]'
serialized[:'@xmlns:b'] = 'http://schemas.microsoft.com/2003/10/Serialization/Arrays'
serialized[:content!] = { 'b:unsignedInt' => data }
when 'string[]'
serialized[:'@xmlns:b'] = 'http://schemas.microsoft.com/2003/10/Serialization/Arrays'
serialized[:content!] = { 'b:string' => data }
end
serialized
end
def self.Deserialize(data, type)
unserialized = nil
case type
when 'string[]'
unserialized = data['string']
unserialized = [] unless unserialized
unserialized = [unserialized] unless unserialized.is_a?(Array)
end
unserialized
end
end
end
end
Add CleanURLs function in Utils
require 'builder/xchar'
module XLiveServices
module Utils
def self.BuildHeader(endpoint, action, compactRPSTicket)
%{
<a:Action s:mustUnderstand="1">#{action}</a:Action>
<a:To s:mustUnderstand="1">#{endpoint}</a:To>
<o:Security s:mustUnderstand="1" xmlns:o="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd">
<cct:RpsSecurityToken wsu:Id="00000000-0000-0000-0000-000000000000" xmlns:cct="http://samples.microsoft.com/wcf/security/Extensibility/" xmlns:wsu="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd">
<cct:RpsTicket>#{Builder::XChar.encode(compactRPSTicket)}</cct:RpsTicket>
</cct:RpsSecurityToken>
</o:Security>
}
end
def self.BuildAction(namespace, configurationName, name)
namespace + configurationName + '/' + name
end
def self.CleanURLs(urls)
clean = []
urls.each do |url|
uri = URI.parse(url)
uri.query = nil
uri.fragment = nil
clean << uri.to_s
end
clean
end
class Serialization
def self.Serialize(data, type)
serialized = {}
case type
when 'enum'
serialized = data.to_s
when 'uint[]'
serialized[:'@xmlns:b'] = 'http://schemas.microsoft.com/2003/10/Serialization/Arrays'
serialized[:content!] = { 'b:unsignedInt' => data }
when 'string[]'
serialized[:'@xmlns:b'] = 'http://schemas.microsoft.com/2003/10/Serialization/Arrays'
serialized[:content!] = { 'b:string' => data }
end
serialized
end
def self.Deserialize(data, type)
unserialized = nil
case type
when 'string[]'
unserialized = data['string']
unserialized = [] unless unserialized
unserialized = [unserialized] unless unserialized.is_a?(Array)
end
unserialized
end
end
end
end
|
module YahooStocks
VERSION = '0.0.1'
end
version bump
module YahooStocks
VERSION = '0.0.2'
end
|
module Zeamays
class Cob
module Freezing
def pack
@rows.collect { |row|
row.pack(self.class.package_pattern)
}.join("")
end
def freeze
pack
end
def freezed
pack
end
module ClassMethods
def package_pattern
gene_sequence.map { |type|
case type
when :i8 then 'C'
when :i16 then 'n'
when :i32 then 'N'
when :integer then 'N'
when :string then 'Z*'
end
}.join("")
end
end
def self.included(me)
me.extend ClassMethods
end
end
end
end
use alias (pack, freeze, freezed)
module Zeamays
class Cob
module Freezing
def pack
@rows.collect { |row|
row.pack(self.class.package_pattern)
}.join("")
end
alias freeze pack
alias freezed pack
module ClassMethods
def package_pattern
gene_sequence.map { |type|
case type
when :i8 then 'C'
when :i16 then 'n'
when :i32 then 'N'
when :integer then 'N'
when :string then 'Z*'
end
}.join("")
end
end
def self.included(me)
me.extend ClassMethods
end
end
end
end
|
require 'zombie_scout/ruby_project'
require 'zombie_scout/parser'
require 'zombie_scout/method_call_finder'
module ZombieScout
class Mission
def initialize(globs)
puts "Scouting out #{Dir.pwd}!"
@ruby_project = RubyProject.new(*globs)
end
def scout
start_time = Time.now
zombies.each do |zombie|
puts [zombie.location, zombie.name] * "\t"
end
duration = Time.now - start_time
puts "Scouted #{methods.size} methods in #{sources.size} files, in #{duration}."
puts "Found #{zombies.size} potential zombies."
end
private
def zombies
return @zombies unless @zombies.nil?
scout!
@zombies ||= @defined_methods.select { |method|
might_be_dead?(method)
}
end
def scout!
@defined_methods, @called_methods = [], []
sources.each do |ruby_source|
parser = ZombieScout::Parser.new(ruby_source)
@defined_methods.concat(parser.defined_methods)
@called_methods.concat(parser.called_methods)
end
@called_methods.uniq!
puts "Ignoring #{@called_methods.count} methods that we already saw called."
s = @defined_methods.size
@defined_methods.reject! do |method|
@called_methods.include?(method.name)
end
puts "This will save us #{s - @defined_methods.size} greps."
end
def sources
@sources ||= @ruby_project.ruby_sources
end
def might_be_dead?(method)
@method_call_counter ||= MethodCallFinder.new(@ruby_project)
@method_call_counter.count_calls(method.name) < 2
end
end
end
Delete puts-es about how many greps we save by tracking method calls
require 'zombie_scout/ruby_project'
require 'zombie_scout/parser'
require 'zombie_scout/method_call_finder'
module ZombieScout
class Mission
def initialize(globs)
puts "Scouting out #{Dir.pwd}!"
@ruby_project = RubyProject.new(*globs)
end
def scout
start_time = Time.now
zombies.each do |zombie|
puts [zombie.location, zombie.name] * "\t"
end
duration = Time.now - start_time
puts "Scouted #{methods.size} methods in #{sources.size} files, in #{duration}."
puts "Found #{zombies.size} potential zombies."
end
private
def zombies
return @zombies unless @zombies.nil?
scout!
@zombies ||= @defined_methods.select { |method|
might_be_dead?(method)
}
end
def scout!
@defined_methods, @called_methods = [], []
sources.each do |ruby_source|
parser = ZombieScout::Parser.new(ruby_source)
@defined_methods.concat(parser.defined_methods)
@called_methods.concat(parser.called_methods)
end
@called_methods.uniq!
@defined_methods.reject! do |method|
@called_methods.include?(method.name)
end
end
def sources
@sources ||= @ruby_project.ruby_sources
end
def might_be_dead?(method)
@method_call_counter ||= MethodCallFinder.new(@ruby_project)
@method_call_counter.count_calls(method.name) < 2
end
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{rego-ruby-ext}
s.version = "0.0.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Alex Tkachev"]
s.date = %q{2011-08-21}
s.description = %q{Ruby core extensions that are common for all ReGO projects}
s.email = %q{tkachev.alex@gmail.com}
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
".rspec",
"Gemfile",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"init.rb",
"lib/date.rb",
"lib/enumerable.rb",
"lib/nil.rb",
"lib/numeric.rb",
"lib/rego-ruby-ext.rb",
"lib/string-interpolation.rb",
"lib/string.rb",
"lib/symbol.rb",
"lib/time.rb",
"rego-ruby-ext.gemspec",
"spec/date_spec.rb",
"spec/enumerable_spec.rb",
"spec/nil_spec.rb",
"spec/numeric_spec.rb",
"spec/rego-ruby-ext_spec.rb",
"spec/spec_helper.rb",
"spec/string_interpolation_spec.rb",
"spec/string_spec.rb",
"spec/symbol_spec.rb",
"spec/time_spec.rb"
]
s.homepage = %q{http://github.com/alextk/rego-ruby-ext}
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.4.2}
s.summary = %q{Some basic ruby core classes extensions with tests}
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rdoc>, [">= 0"])
s.add_development_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rcov>, [">= 0"])
else
s.add_dependency(%q<rdoc>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
end
else
s.add_dependency(%q<rdoc>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
end
end
version bump
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{rego-ruby-ext}
s.version = "0.0.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Alex Tkachev"]
s.date = %q{2011-09-14}
s.description = %q{Ruby core extensions that are common for all ReGO projects}
s.email = %q{tkachev.alex@gmail.com}
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
".rspec",
"Gemfile",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"init.rb",
"lib/date.rb",
"lib/enumerable.rb",
"lib/hash.rb",
"lib/nil.rb",
"lib/numeric.rb",
"lib/rego-ruby-ext.rb",
"lib/string-interpolation.rb",
"lib/string.rb",
"lib/symbol.rb",
"lib/time.rb",
"rego-ruby-ext.gemspec",
"spec/date_spec.rb",
"spec/enumerable_spec.rb",
"spec/hash_spec.rb",
"spec/nil_spec.rb",
"spec/numeric_spec.rb",
"spec/rego-ruby-ext_spec.rb",
"spec/spec_helper.rb",
"spec/string_interpolation_spec.rb",
"spec/string_spec.rb",
"spec/symbol_spec.rb",
"spec/time_spec.rb"
]
s.homepage = %q{http://github.com/alextk/rego-ruby-ext}
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.4.2}
s.summary = %q{Some basic ruby core classes extensions with tests}
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rdoc>, [">= 0"])
s.add_development_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rcov>, [">= 0"])
else
s.add_dependency(%q<rdoc>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
end
else
s.add_dependency(%q<rdoc>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
end
end
|
# -*- encoding: utf-8 -*-
lib = File.expand_path('../lib/', __FILE__)
$:.unshift lib unless $:.include?(lib)
require 'representable/version'
Gem::Specification.new do |s|
s.name = "representable"
s.version = Representable::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Nick Sutterer"]
s.email = ["apotonick@gmail.com"]
s.homepage = "https://github.com/apotonick/representable/"
s.summary = %q{Maps representation documents from and to Ruby objects. Includes XML and JSON support, plain properties, collections and compositions.}
s.description = %q{Maps representation documents from and to Ruby objects. Includes XML and JSON support, plain properties, collections and compositions.}
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
s.add_dependency "nokogiri"
s.add_dependency "multi_json"
s.add_development_dependency "rake"
s.add_development_dependency "test_xml"
s.add_development_dependency "minitest", "~> 4.0.0"
s.add_development_dependency "mocha", ">= 0.13.0"
s.add_development_dependency "mongoid"
s.add_development_dependency "virtus", "~> 0.5.0"
s.add_development_dependency "yajl-ruby"
end
require minitest 5.0 for testing and the new test_xml 0.1.6.
# -*- encoding: utf-8 -*-
lib = File.expand_path('../lib/', __FILE__)
$:.unshift lib unless $:.include?(lib)
require 'representable/version'
Gem::Specification.new do |s|
s.name = "representable"
s.version = Representable::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Nick Sutterer"]
s.email = ["apotonick@gmail.com"]
s.homepage = "https://github.com/apotonick/representable/"
s.summary = %q{Maps representation documents from and to Ruby objects. Includes XML and JSON support, plain properties, collections and compositions.}
s.description = %q{Maps representation documents from and to Ruby objects. Includes XML and JSON support, plain properties, collections and compositions.}
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
s.add_dependency "nokogiri"
s.add_dependency "multi_json"
s.add_development_dependency "rake"
s.add_development_dependency "test_xml", ">= 0.1.6"
s.add_development_dependency "minitest", "~> 5.0.0"
s.add_development_dependency "mocha", ">= 0.13.0"
s.add_development_dependency "mongoid"
s.add_development_dependency "virtus", "~> 0.5.0"
s.add_development_dependency "yajl-ruby"
end
|
Pod::Spec.new do |s|
s.name = "MercadoPagoSDKV4"
s.version = "4.0.5"
s.summary = "MercadoPagoSDK"
s.homepage = "https://www.mercadopago.com"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = "Mercado Pago"
s.source = { :git => "https://github.com/mercadopago/px-ios.git", :tag => s.version.to_s }
s.swift_version = '4.0'
s.platform = :ios, '9.0'
s.requires_arc = true
s.default_subspec = 'Default'
s.subspec 'Default' do |default|
default.resources = ['MercadoPagoSDK/MercadoPagoSDK/*.xcassets','MercadoPagoSDK/MercadoPagoSDK/*/*.xcassets', 'MercadoPagoSDK/MercadoPagoSDK/*.ttf', 'MercadoPagoSDK/MercadoPagoSDK/**/**.{xib,strings}', 'MercadoPagoSDK/MercadoPagoSDK/Translations/**/**.{plist,strings}', 'MercadoPagoSDK/MercadoPagoSDK/Plist/*.plist', 'MercadoPagoSDK/MercadoPagoSDK/*.lproj']
default.source_files = ['MercadoPagoSDK/MercadoPagoSDK/**/**/**.{h,m,swift}']
s.dependency 'MLUI', '~> 5.0'
end
s.subspec 'ESC' do |esc|
esc.dependency 'MercadoPagoSDKV4/Default'
esc.dependency 'MLESCManager'
esc.pod_target_xcconfig = {
'OTHER_SWIFT_FLAGS[config=Debug]' => '-D MPESC_ENABLE',
'OTHER_SWIFT_FLAGS[config=Release]' => '-D MPESC_ENABLE',
'OTHER_SWIFT_FLAGS[config=Testflight]' => '-D MPESC_ENABLE'
}
end
end
[ci deploy]
Pod::Spec.new do |s|
s.name = "MercadoPagoSDKV4"
s.version = "4.0.5"
s.summary = "MercadoPagoSDK"
s.homepage = "https://www.mercadopago.com"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = "Mercado Pago"
s.source = { :git => "https://github.com/mercadopago/px-ios.git", :tag => s.version.to_s }
s.swift_version = '4.0'
s.platform = :ios, '9.0'
s.requires_arc = true
s.default_subspec = 'Default'
s.subspec 'Default' do |default|
default.resources = ['MercadoPagoSDK/MercadoPagoSDK/*.xcassets','MercadoPagoSDK/MercadoPagoSDK/*/*.xcassets', 'MercadoPagoSDK/MercadoPagoSDK/*.ttf', 'MercadoPagoSDK/MercadoPagoSDK/**/**.{xib,strings}', 'MercadoPagoSDK/MercadoPagoSDK/Translations/**/**.{plist,strings}', 'MercadoPagoSDK/MercadoPagoSDK/Plist/*.plist', 'MercadoPagoSDK/MercadoPagoSDK/*.lproj']
default.source_files = ['MercadoPagoSDK/MercadoPagoSDK/**/**/**.{h,m,swift}']
s.dependency 'MLUI', '~> 5.0'
end
s.subspec 'ESC' do |esc|
esc.dependency 'MercadoPagoSDKV4/Default'
esc.dependency 'MLESCManager'
esc.pod_target_xcconfig = {
'OTHER_SWIFT_FLAGS[config=Debug]' => '-D MPESC_ENABLE',
'OTHER_SWIFT_FLAGS[config=Release]' => '-D MPESC_ENABLE',
'OTHER_SWIFT_FLAGS[config=Testflight]' => '-D MPESC_ENABLE'
}
end
end
|
require 'spec_helper_acceptance'
describe 'cassandra class' do
cassandra_install_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian' and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_package = 'cassandra20'
$version = '2.0.17-1'
class { 'cassandra::java':
before => Class['cassandra']
}
} else {
if $::lsbdistid == 'Ubuntu' {
class { 'cassandra::java':
aptkey => {
'openjdk-r' => {
id => 'DA1A4A13543B466853BAF164EB9B1D8886F44E2A',
server => 'keyserver.ubuntu.com',
},
},
aptsource => {
'openjdk-r' => {
location => 'http://ppa.launchpad.net/openjdk-r/ppa/ubuntu',
comment => 'OpenJDK builds (all archs)',
release => $::lsbdistcodename,
repos => 'main',
},
},
package_name => 'openjdk-8-jdk',
}
} else {
class { 'cassandra::java':
aptkey => {
'ZuluJDK' => {
id => '27BC0C8CB3D81623F59BDADCB1998361219BD9C9',
server => 'keyserver.ubuntu.com',
},
},
aptsource => {
'ZuluJDK' => {
location => 'http://repos.azulsystems.com/debian',
comment => 'Zulu OpenJDK 8 for Debian',
release => 'stable',
repos => 'main',
},
},
package_name => 'zulu-8',
}
}
$cassandra_package = 'cassandra'
$version = '2.0.17'
exec { '/bin/chown root:root /etc/apt/sources.list.d/datastax.list':
unless => '/usr/bin/test -O /etc/apt/sources.list.d/datastax.list',
require => Class['cassandra::opscenter']
}
}
class { 'cassandra::datastax_repo': } ->
file { '/var/lib/cassandra':
ensure => directory,
} ->
file { '/var/lib/cassandra/commitlog':
ensure => directory,
} ->
file { '/var/lib/cassandra/caches':
ensure => directory,
} ->
file { [ '/var/lib/cassandra/data' ]:
ensure => directory,
} ->
class { 'cassandra':
cassandra_9822 => true,
cassandra_yaml_tmpl => 'cassandra/cassandra20.yaml.erb',
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
package_ensure => $version,
package_name => $cassandra_package,
saved_caches_directory_mode => '0770',
service_systemd => $service_systemd
}
class { '::cassandra::datastax_agent':
service_systemd => $service_systemd,
require => Class['cassandra']
}
class { '::cassandra::opscenter::pycrypto':
manage_epel => true,
before => Class['::cassandra::opscenter']
}
class { '::cassandra::opscenter':
config_purge => true,
service_systemd => $service_systemd,
require => Class['cassandra'],
}
cassandra::opscenter::cluster_name { 'Cluster1':
cassandra_seed_hosts => 'host1,host2',
}
EOS
describe '########### Cassandra 2.0 installation.' do
it 'should work with no errors' do
apply_manifest(cassandra_install_pp, catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(cassandra_install_pp,
catch_failures: true).exit_code).to be_zero
end
end
firewall_config_pp = <<-EOS
if $::osfamily == 'RedHat' {
$cassandra_package = 'cassandra20'
$version = '2.0.17-1'
} else {
$cassandra_package = 'cassandra'
$version = '2.0.17'
}
class { 'cassandra':
cassandra_9822 => true,
cassandra_yaml_tmpl => 'cassandra/cassandra20.yaml.erb',
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
package_ensure => $version,
package_name => $cassandra_package,
saved_caches_directory_mode => '0770',
}
include '::cassandra::datastax_agent'
include '::cassandra::opscenter'
# This really sucks but Docker, CentOS 6 and iptables don't play nicely
# together. Therefore we can't test the firewall on this platform :-(
if $::operatingsystem != CentOS and $::operatingsystemmajrelease != 6 {
include '::cassandra::firewall_ports'
}
EOS
describe '########### Firewall configuration.' do
it 'should work with no errors' do
apply_manifest(firewall_config_pp, catch_failures: true)
end
end
describe service('cassandra') do
it { is_expected.to be_running }
it { is_expected.to be_enabled }
end
describe service('datastax-agent') do
it { is_expected.to be_running }
it { is_expected.to be_enabled }
end
describe service('opscenterd') do
it { is_expected.to be_running }
it { is_expected.to be_enabled }
end
cassandra_uninstall20_pp = <<-EOS
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra20-tools'
$cassandra_package = 'cassandra20'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
}
package { [$cassandra_optutils_package, $cassandra_package ]:
ensure => absent
}
EOS
describe '########### Uninstall Cassandra 2.0.' do
it 'should work with no errors' do
apply_manifest(cassandra_uninstall20_pp, catch_failures: true)
end
end
cassandra_upgrade21_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian' and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra21-tools'
$cassandra_package = 'cassandra21'
$version = '2.1.13-1'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '2.1.13'
}
class { 'cassandra':
cassandra_9822 => true,
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
listen_interface => 'lo',
package_ensure => $version,
package_name => $cassandra_package,
rpc_interface => 'lo',
saved_caches_directory_mode => '0770',
service_systemd => $service_systemd
}
class { 'cassandra::optutils':
ensure => $version,
package_name => $cassandra_optutils_package,
require => Class['cassandra']
}
EOS
describe '########### Cassandra 2.1 installation.' do
it 'should work with no errors' do
apply_manifest(cassandra_upgrade21_pp, catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(cassandra_upgrade21_pp,
catch_failures: true).exit_code).to be_zero
end
end
describe service('cassandra') do
it { is_expected.to be_running }
it { is_expected.to be_enabled }
end
cassandra_uninstall21_pp = <<-EOS
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra21-tools'
$cassandra_package = 'cassandra21'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
}
package { [$cassandra_optutils_package, $cassandra_package ]:
ensure => absent
}
EOS
describe '########### Uninstall Cassandra 2.1.' do
it 'should work with no errors' do
apply_manifest(cassandra_uninstall21_pp, catch_failures: true)
end
end
cassandra_upgrade22_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian' and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra22-tools'
$cassandra_package = 'cassandra22'
$version = '2.2.5-1'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '2.2.5'
}
class { 'cassandra':
cassandra_9822 => true,
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
listen_interface => 'lo',
package_ensure => $version,
package_name => $cassandra_package,
rpc_interface => 'lo',
saved_caches_directory_mode => '0770',
service_systemd => $service_systemd
}
class { 'cassandra::optutils':
package_ensure => $version,
package_name => $cassandra_optutils_package,
require => Class['cassandra']
}
EOS
describe '########### Cassandra 2.2 installation.' do
it 'should work with no errors' do
apply_manifest(cassandra_upgrade22_pp, catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(cassandra_upgrade22_pp,
catch_failures: true).exit_code).to be_zero
end
end
describe service('cassandra') do
it { is_expected.to be_running }
it { is_expected.to be_enabled }
end
schema_testing_create_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian' and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra22-tools'
$cassandra_package = 'cassandra22'
$version = '2.2.5-1'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '2.2.5'
}
class { 'cassandra':
cassandra_9822 => true,
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
listen_interface => 'lo',
package_ensure => $version,
package_name => $cassandra_package,
rpc_interface => 'lo',
saved_caches_directory_mode => '0770',
service_systemd => $service_systemd
}
$keyspaces = {
'mykeyspace' => {
ensure => present,
replication_map => {
keyspace_class => 'SimpleStrategy',
replication_factor => 1,
},
durable_writes => false,
},
}
if $::operatingsystem != CentOS and $::operatingsystemmajrelease != 6 {
class { 'cassandra::schema':
indexes => {
'users_lname_idx' => {
keyspace => 'mykeyspace',
table => 'users',
keys => 'lname',
},
},
keyspaces => $keyspaces,
tables => {
'users' => {
'keyspace' => 'mykeyspace',
'columns' => {
'userid' => 'int',
'fname' => 'text',
'lname' => 'text',
'PRIMARY KEY' => '(userid)',
},
},
},
}
}
EOS
describe '########### Schema create.' do
it 'should work with no errors' do
apply_manifest(schema_testing_create_pp, catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_create_pp,
catch_failures: true).exit_code).to be_zero
end
end
schema_testing_drop__index_and_cql_type_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian' and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra22-tools'
$cassandra_package = 'cassandra22'
$version = '2.2.5-1'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '2.2.5'
}
class { 'cassandra':
cassandra_9822 => true,
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
listen_interface => 'lo',
package_ensure => $version,
package_name => $cassandra_package,
rpc_interface => 'lo',
saved_caches_directory_mode => '0770',
service_systemd => $service_systemd
}
$cql_types = {
'address' => {
'keyspace' => 'Excalibur',
'ensure' => 'absent'
}
}
if $::operatingsystem != CentOS and $::operatingsystemmajrelease != 6 {
class { 'cassandra::schema':
indexes => {
'users_emails_idx' => {
ensure => absent,
keyspace => 'Excalibur',
table => 'users',
},
},
cql_types => $cql_types
}
}
EOS
describe '########### Schema drop (Indexes & Types).' do
it 'should work with no errors' do
apply_manifest(schema_testing_drop__index_and_cql_type_pp,
catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_drop__index_and_cql_type_pp,
catch_failures: true).exit_code).to be_zero
end
end
schema_testing_drop_table_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian' and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra22-tools'
$cassandra_package = 'cassandra22'
$version = '2.2.5-1'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '2.2.5'
}
class { 'cassandra':
cassandra_9822 => true,
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
listen_interface => 'lo',
package_ensure => $version,
package_name => $cassandra_package,
rpc_interface => 'lo',
saved_caches_directory_mode => '0770',
service_systemd => $service_systemd
}
if $::operatingsystem != CentOS and $::operatingsystemmajrelease != 6 {
class { 'cassandra::schema':
tables => {
'users' => {
ensure => absent,
keyspace => 'Excalibur',
},
},
}
}
EOS
describe '########### Schema drop (Tables).' do
it 'should work with no errors' do
apply_manifest(schema_testing_drop_table_pp, catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_drop_table_pp,
catch_failures: true).exit_code).to be_zero
end
end
schema_testing_drop_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian' and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra22-tools'
$cassandra_package = 'cassandra22'
$version = '2.2.5-1'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '2.2.5'
}
class { 'cassandra':
cassandra_9822 => true,
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
listen_interface => 'lo',
package_ensure => $version,
package_name => $cassandra_package,
rpc_interface => 'lo',
saved_caches_directory_mode => '0770',
service_systemd => $service_systemd
}
$keyspaces = {
'Excelsior' => {
ensure => absent,
}
}
if $::operatingsystem != CentOS and $::operatingsystemmajrelease != 6 {
class { 'cassandra::schema':
keyspaces => $keyspaces,
}
}
EOS
describe '########### Schema drop (Keyspaces).' do
it 'should work with no errors' do
apply_manifest(schema_testing_drop_pp, catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_drop_pp,
catch_failures: true).exit_code).to be_zero
end
end
cassandra_uninstall22_pp = <<-EOS
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra22-tools'
$cassandra_package = 'cassandra22'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
}
package { $cassandra_optutils_package:
ensure => absent
} ->
package { $cassandra_package:
ensure => absent
}
EOS
cassandra_upgrade30_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian'
and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra30-tools'
$cassandra_package = 'cassandra30'
$version = '3.0.3-1'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '3.0.3'
}
class { 'cassandra':
cassandra_9822 => true,
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
hints_directory => '/var/lib/cassandra/hints',
listen_interface => 'lo',
package_ensure => $version,
package_name => $cassandra_package,
rpc_interface => 'lo',
saved_caches_directory_mode => '0770',
#service_systemd => $service_systemd
}
class { 'cassandra::optutils':
ensure => $version,
package_name => $cassandra_optutils_package,
require => Class['cassandra']
}
EOS
describe '########### Uninstall Cassandra 2.2.' do
it 'should work with no errors' do
apply_manifest(cassandra_uninstall22_pp, catch_failures: true)
end
end
describe '########### Cassandra 3.0 installation.' do
it 'should work with no errors' do
apply_manifest(cassandra_upgrade30_pp, catch_failures: true)
end
it 'Give Cassandra 3.0 a minute to fully come alive.' do
sleep 60
end
it 'check code is idempotent' do
expect(apply_manifest(cassandra_upgrade30_pp,
catch_failures: true).exit_code).to be_zero
end
end
describe service('cassandra') do
it { is_expected.to be_running }
it { is_expected.to be_enabled }
end
check_against_previous_version_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian'
and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra30-tools'
$cassandra_package = 'cassandra30'
$version = '3.0.3-1'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '3.0.3'
}
class { 'cassandra':
cassandra_9822 => true,
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
hints_directory => '/var/lib/cassandra/hints',
package_ensure => $version,
package_name => $cassandra_package,
saved_caches_directory_mode => '0770',
service_systemd => $service_systemd,
}
EOS
describe '########### Ensure config file does get updated.' do
it 'Initial install manifest again' do
apply_manifest(check_against_previous_version_pp,
catch_failures: true)
end
it 'Copy the current module to the side without error.' do
shell('cp -R /etc/puppet/modules/cassandra /var/tmp',
acceptable_exit_codes: 0)
end
it 'Remove the current module without error.' do
shell('puppet module uninstall locp-cassandra',
acceptable_exit_codes: 0)
end
it 'Install the latest module from the forge.' do
shell('puppet module install locp-cassandra',
acceptable_exit_codes: 0)
end
it 'Check install works without changes with previous module version.' do
expect(apply_manifest(check_against_previous_version_pp,
catch_failures: true).exit_code).to be_zero
end
end
describe '########### Gather service information (when in debug mode).' do
it 'Show the cassandra system log.' do
shell("grep -v -e '^INFO' -e '^\s*INFO' /var/log/cassandra/system.log")
end
end
end
Minor re-ordering of resources in acceptance tests.
require 'spec_helper_acceptance'
describe 'cassandra class' do
cassandra_install_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian' and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_package = 'cassandra20'
$version = '2.0.17-1'
class { 'cassandra::java':
before => Class['cassandra']
}
} else {
if $::lsbdistid == 'Ubuntu' {
class { 'cassandra::java':
aptkey => {
'openjdk-r' => {
id => 'DA1A4A13543B466853BAF164EB9B1D8886F44E2A',
server => 'keyserver.ubuntu.com',
},
},
aptsource => {
'openjdk-r' => {
location => 'http://ppa.launchpad.net/openjdk-r/ppa/ubuntu',
comment => 'OpenJDK builds (all archs)',
release => $::lsbdistcodename,
repos => 'main',
},
},
package_name => 'openjdk-8-jdk',
}
} else {
class { 'cassandra::java':
aptkey => {
'ZuluJDK' => {
id => '27BC0C8CB3D81623F59BDADCB1998361219BD9C9',
server => 'keyserver.ubuntu.com',
},
},
aptsource => {
'ZuluJDK' => {
location => 'http://repos.azulsystems.com/debian',
comment => 'Zulu OpenJDK 8 for Debian',
release => 'stable',
repos => 'main',
},
},
package_name => 'zulu-8',
}
}
$cassandra_package = 'cassandra'
$version = '2.0.17'
exec { '/bin/chown root:root /etc/apt/sources.list.d/datastax.list':
unless => '/usr/bin/test -O /etc/apt/sources.list.d/datastax.list',
require => Class['cassandra::opscenter']
}
}
class { 'cassandra::datastax_repo': } ->
file { '/var/lib/cassandra':
ensure => directory,
} ->
file { '/var/lib/cassandra/commitlog':
ensure => directory,
} ->
file { '/var/lib/cassandra/caches':
ensure => directory,
} ->
file { [ '/var/lib/cassandra/data' ]:
ensure => directory,
} ->
class { 'cassandra':
cassandra_9822 => true,
cassandra_yaml_tmpl => 'cassandra/cassandra20.yaml.erb',
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
package_ensure => $version,
package_name => $cassandra_package,
saved_caches_directory_mode => '0770',
service_systemd => $service_systemd
}
class { '::cassandra::datastax_agent':
service_systemd => $service_systemd,
require => Class['cassandra']
}
class { '::cassandra::opscenter::pycrypto':
manage_epel => true,
before => Class['::cassandra::opscenter'],
require => Class['::cassandra'],
}
class { '::cassandra::opscenter':
config_purge => true,
service_systemd => $service_systemd,
require => Class['cassandra'],
}
cassandra::opscenter::cluster_name { 'Cluster1':
cassandra_seed_hosts => 'host1,host2',
}
EOS
describe '########### Cassandra 2.0 installation.' do
it 'should work with no errors' do
apply_manifest(cassandra_install_pp, catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(cassandra_install_pp,
catch_failures: true).exit_code).to be_zero
end
end
firewall_config_pp = <<-EOS
if $::osfamily == 'RedHat' {
$cassandra_package = 'cassandra20'
$version = '2.0.17-1'
} else {
$cassandra_package = 'cassandra'
$version = '2.0.17'
}
class { 'cassandra':
cassandra_9822 => true,
cassandra_yaml_tmpl => 'cassandra/cassandra20.yaml.erb',
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
package_ensure => $version,
package_name => $cassandra_package,
saved_caches_directory_mode => '0770',
}
include '::cassandra::datastax_agent'
include '::cassandra::opscenter'
# This really sucks but Docker, CentOS 6 and iptables don't play nicely
# together. Therefore we can't test the firewall on this platform :-(
if $::operatingsystem != CentOS and $::operatingsystemmajrelease != 6 {
include '::cassandra::firewall_ports'
}
EOS
describe '########### Firewall configuration.' do
it 'should work with no errors' do
apply_manifest(firewall_config_pp, catch_failures: true)
end
end
describe service('cassandra') do
it { is_expected.to be_running }
it { is_expected.to be_enabled }
end
describe service('datastax-agent') do
it { is_expected.to be_running }
it { is_expected.to be_enabled }
end
describe service('opscenterd') do
it { is_expected.to be_running }
it { is_expected.to be_enabled }
end
cassandra_uninstall20_pp = <<-EOS
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra20-tools'
$cassandra_package = 'cassandra20'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
}
package { [$cassandra_optutils_package, $cassandra_package ]:
ensure => absent
}
EOS
describe '########### Uninstall Cassandra 2.0.' do
it 'should work with no errors' do
apply_manifest(cassandra_uninstall20_pp, catch_failures: true)
end
end
cassandra_upgrade21_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian' and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra21-tools'
$cassandra_package = 'cassandra21'
$version = '2.1.13-1'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '2.1.13'
}
class { 'cassandra':
cassandra_9822 => true,
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
listen_interface => 'lo',
package_ensure => $version,
package_name => $cassandra_package,
rpc_interface => 'lo',
saved_caches_directory_mode => '0770',
service_systemd => $service_systemd
}
class { 'cassandra::optutils':
ensure => $version,
package_name => $cassandra_optutils_package,
require => Class['cassandra']
}
EOS
describe '########### Cassandra 2.1 installation.' do
it 'should work with no errors' do
apply_manifest(cassandra_upgrade21_pp, catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(cassandra_upgrade21_pp,
catch_failures: true).exit_code).to be_zero
end
end
describe service('cassandra') do
it { is_expected.to be_running }
it { is_expected.to be_enabled }
end
cassandra_uninstall21_pp = <<-EOS
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra21-tools'
$cassandra_package = 'cassandra21'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
}
package { [$cassandra_optutils_package, $cassandra_package ]:
ensure => absent
}
EOS
describe '########### Uninstall Cassandra 2.1.' do
it 'should work with no errors' do
apply_manifest(cassandra_uninstall21_pp, catch_failures: true)
end
end
cassandra_upgrade22_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian' and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra22-tools'
$cassandra_package = 'cassandra22'
$version = '2.2.5-1'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '2.2.5'
}
class { 'cassandra':
cassandra_9822 => true,
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
listen_interface => 'lo',
package_ensure => $version,
package_name => $cassandra_package,
rpc_interface => 'lo',
saved_caches_directory_mode => '0770',
service_systemd => $service_systemd
}
class { 'cassandra::optutils':
package_ensure => $version,
package_name => $cassandra_optutils_package,
require => Class['cassandra']
}
EOS
describe '########### Cassandra 2.2 installation.' do
it 'should work with no errors' do
apply_manifest(cassandra_upgrade22_pp, catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(cassandra_upgrade22_pp,
catch_failures: true).exit_code).to be_zero
end
end
describe service('cassandra') do
it { is_expected.to be_running }
it { is_expected.to be_enabled }
end
schema_testing_create_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian' and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra22-tools'
$cassandra_package = 'cassandra22'
$version = '2.2.5-1'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '2.2.5'
}
class { 'cassandra':
cassandra_9822 => true,
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
listen_interface => 'lo',
package_ensure => $version,
package_name => $cassandra_package,
rpc_interface => 'lo',
saved_caches_directory_mode => '0770',
service_systemd => $service_systemd
}
$keyspaces = {
'mykeyspace' => {
ensure => present,
replication_map => {
keyspace_class => 'SimpleStrategy',
replication_factor => 1,
},
durable_writes => false,
},
}
if $::operatingsystem != CentOS and $::operatingsystemmajrelease != 6 {
class { 'cassandra::schema':
indexes => {
'users_lname_idx' => {
keyspace => 'mykeyspace',
table => 'users',
keys => 'lname',
},
},
keyspaces => $keyspaces,
tables => {
'users' => {
'keyspace' => 'mykeyspace',
'columns' => {
'userid' => 'int',
'fname' => 'text',
'lname' => 'text',
'PRIMARY KEY' => '(userid)',
},
},
},
}
}
EOS
describe '########### Schema create.' do
it 'should work with no errors' do
apply_manifest(schema_testing_create_pp, catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_create_pp,
catch_failures: true).exit_code).to be_zero
end
end
schema_testing_drop__index_and_cql_type_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian' and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra22-tools'
$cassandra_package = 'cassandra22'
$version = '2.2.5-1'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '2.2.5'
}
class { 'cassandra':
cassandra_9822 => true,
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
listen_interface => 'lo',
package_ensure => $version,
package_name => $cassandra_package,
rpc_interface => 'lo',
saved_caches_directory_mode => '0770',
service_systemd => $service_systemd
}
$cql_types = {
'address' => {
'keyspace' => 'Excalibur',
'ensure' => 'absent'
}
}
if $::operatingsystem != CentOS and $::operatingsystemmajrelease != 6 {
class { 'cassandra::schema':
indexes => {
'users_emails_idx' => {
ensure => absent,
keyspace => 'Excalibur',
table => 'users',
},
},
cql_types => $cql_types
}
}
EOS
describe '########### Schema drop (Indexes & Types).' do
it 'should work with no errors' do
apply_manifest(schema_testing_drop__index_and_cql_type_pp,
catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_drop__index_and_cql_type_pp,
catch_failures: true).exit_code).to be_zero
end
end
schema_testing_drop_table_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian' and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra22-tools'
$cassandra_package = 'cassandra22'
$version = '2.2.5-1'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '2.2.5'
}
class { 'cassandra':
cassandra_9822 => true,
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
listen_interface => 'lo',
package_ensure => $version,
package_name => $cassandra_package,
rpc_interface => 'lo',
saved_caches_directory_mode => '0770',
service_systemd => $service_systemd
}
if $::operatingsystem != CentOS and $::operatingsystemmajrelease != 6 {
class { 'cassandra::schema':
tables => {
'users' => {
ensure => absent,
keyspace => 'Excalibur',
},
},
}
}
EOS
describe '########### Schema drop (Tables).' do
it 'should work with no errors' do
apply_manifest(schema_testing_drop_table_pp, catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_drop_table_pp,
catch_failures: true).exit_code).to be_zero
end
end
schema_testing_drop_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian' and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra22-tools'
$cassandra_package = 'cassandra22'
$version = '2.2.5-1'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '2.2.5'
}
class { 'cassandra':
cassandra_9822 => true,
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
listen_interface => 'lo',
package_ensure => $version,
package_name => $cassandra_package,
rpc_interface => 'lo',
saved_caches_directory_mode => '0770',
service_systemd => $service_systemd
}
$keyspaces = {
'Excelsior' => {
ensure => absent,
}
}
if $::operatingsystem != CentOS and $::operatingsystemmajrelease != 6 {
class { 'cassandra::schema':
keyspaces => $keyspaces,
}
}
EOS
describe '########### Schema drop (Keyspaces).' do
it 'should work with no errors' do
apply_manifest(schema_testing_drop_pp, catch_failures: true)
end
it 'check code is idempotent' do
expect(apply_manifest(schema_testing_drop_pp,
catch_failures: true).exit_code).to be_zero
end
end
cassandra_uninstall22_pp = <<-EOS
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra22-tools'
$cassandra_package = 'cassandra22'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
}
package { $cassandra_optutils_package:
ensure => absent
} ->
package { $cassandra_package:
ensure => absent
}
EOS
cassandra_upgrade30_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian'
and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra30-tools'
$cassandra_package = 'cassandra30'
$version = '3.0.3-1'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '3.0.3'
}
class { 'cassandra':
cassandra_9822 => true,
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
hints_directory => '/var/lib/cassandra/hints',
listen_interface => 'lo',
package_ensure => $version,
package_name => $cassandra_package,
rpc_interface => 'lo',
saved_caches_directory_mode => '0770',
#service_systemd => $service_systemd
}
class { 'cassandra::optutils':
ensure => $version,
package_name => $cassandra_optutils_package,
require => Class['cassandra']
}
EOS
describe '########### Uninstall Cassandra 2.2.' do
it 'should work with no errors' do
apply_manifest(cassandra_uninstall22_pp, catch_failures: true)
end
end
describe '########### Cassandra 3.0 installation.' do
it 'should work with no errors' do
apply_manifest(cassandra_upgrade30_pp, catch_failures: true)
end
it 'Give Cassandra 3.0 a minute to fully come alive.' do
sleep 60
end
it 'check code is idempotent' do
expect(apply_manifest(cassandra_upgrade30_pp,
catch_failures: true).exit_code).to be_zero
end
end
describe service('cassandra') do
it { is_expected.to be_running }
it { is_expected.to be_enabled }
end
check_against_previous_version_pp = <<-EOS
if $::osfamily == 'RedHat' and $::operatingsystemmajrelease == 7 {
$service_systemd = true
} elsif $::operatingsystem == 'Debian'
and $::operatingsystemmajrelease == 8 {
$service_systemd = true
} else {
$service_systemd = false
}
if $::osfamily == 'RedHat' {
$cassandra_optutils_package = 'cassandra30-tools'
$cassandra_package = 'cassandra30'
$version = '3.0.3-1'
} else {
$cassandra_optutils_package = 'cassandra-tools'
$cassandra_package = 'cassandra'
$version = '3.0.3'
}
class { 'cassandra':
cassandra_9822 => true,
commitlog_directory_mode => '0770',
data_file_directories_mode => '0770',
hints_directory => '/var/lib/cassandra/hints',
package_ensure => $version,
package_name => $cassandra_package,
saved_caches_directory_mode => '0770',
service_systemd => $service_systemd,
}
EOS
describe '########### Ensure config file does get updated.' do
it 'Initial install manifest again' do
apply_manifest(check_against_previous_version_pp,
catch_failures: true)
end
it 'Copy the current module to the side without error.' do
shell('cp -R /etc/puppet/modules/cassandra /var/tmp',
acceptable_exit_codes: 0)
end
it 'Remove the current module without error.' do
shell('puppet module uninstall locp-cassandra',
acceptable_exit_codes: 0)
end
it 'Install the latest module from the forge.' do
shell('puppet module install locp-cassandra',
acceptable_exit_codes: 0)
end
it 'Check install works without changes with previous module version.' do
expect(apply_manifest(check_against_previous_version_pp,
catch_failures: true).exit_code).to be_zero
end
end
describe '########### Gather service information (when in debug mode).' do
it 'Show the cassandra system log.' do
shell("grep -v -e '^INFO' -e '^\s*INFO' /var/log/cassandra/system.log")
end
end
end
|
require 'spec_helper_acceptance'
describe 'kallithea class' do
if ENV.has_key?('KALLITHEA_VERSION')
kallithea_version = ENV['KALLITHEA_VERSION']
kallithea_version_string = "'#{ENV['KALLITHEA_VERSION']}'"
else
kallithea_version = nil
kallithea_version_string = "undef"
end
context 'default parameters and manage_git, seed_db => true' do
# Using puppet_apply as a helper
it 'should work idempotently with no errors' do
pp = <<-EOS
class { 'kallithea':
seed_db => true,
manage_git => true,
version => #{kallithea_version_string},
}
EOS
# Run it twice and test for idempotency
apply_manifest(pp, :catch_failures => true)
apply_manifest(pp, :catch_changes => true)
end
# the puppetlabs/git module <=0.4.0 does not officially support Fedora, so
# we do some rudimentary testing here:
describe command('git --version') do
its(:exit_status) { should eq 0 }
end
describe file('/srv/kallithea/kallithea.db') do
it { should be_owned_by 'kallithea' }
end
describe service('kallithea') do
it { is_expected.to be_running }
it { is_expected.to be_enabled }
end
describe port(5000) do
it { should be_listening }
end
describe command('curl -I localhost:5000 2> /dev/null | head -n1') do
its(:exit_status) { should eq 0 }
its(:stdout) { should eq "HTTP/1.1 200 OK\n" }
end
describe command('puppet resource service kallithea ensure=stopped') do
its(:exit_status) { should eq 0 }
end
describe port(5000) do
it {
sleep(5)
should_not be_listening
}
end
describe command('puppet resource service kallithea ensure=running') do
its(:exit_status) { should eq 0 }
end
describe port(5000) do
it {
sleep(10)
should be_listening
}
end
describe command('/srv/kallithea/venv/bin/pip show kallithea') do
its(:exit_status) { should eq 0 }
if kallithea_version
its(:stdout) { should match /^Version: #{kallithea_version}$/ }
end
end
end
context 'change configuration with config_hash parameter' do
# Using puppet_apply as a helper
it 'should work idempotently with no errors' do
pp = <<-EOS
class { 'kallithea':
config_hash => {
'server:main' => {
'port' => '12345',
},
'DEFAULT' => {
'smtp_port' => '25',
}
}
}
EOS
# Run it twice and test for idempotency
apply_manifest(pp, :catch_failures => true)
apply_manifest(pp, :catch_changes => true)
end
describe port(12345) do
it {
sleep(10)
should be_listening
}
end
end
context 'downgrading to kallithea v0.2.1' do
# Using puppet_apply as a helper
it 'should work idempotently with no errors' do
pp = <<-EOS
class { 'kallithea':
version => '0.2.1',
}
EOS
# Run it twice and test for idempotency
apply_manifest(pp, :catch_failures => true)
apply_manifest(pp, :catch_changes => true)
end
describe command('/srv/kallithea/venv/bin/pip show kallithea') do
its(:stdout) { should match /^Version: 0.2.1$/ }
end
end
end
add acceptance test for new port parameter
require 'spec_helper_acceptance'
describe 'kallithea class' do
if ENV.has_key?('KALLITHEA_VERSION')
kallithea_version = ENV['KALLITHEA_VERSION']
kallithea_version_string = "'#{ENV['KALLITHEA_VERSION']}'"
else
kallithea_version = nil
kallithea_version_string = "undef"
end
context 'default parameters and manage_git, seed_db => true' do
# Using puppet_apply as a helper
it 'should work idempotently with no errors' do
pp = <<-EOS
class { 'kallithea':
seed_db => true,
manage_git => true,
version => #{kallithea_version_string},
}
EOS
# Run it twice and test for idempotency
apply_manifest(pp, :catch_failures => true)
apply_manifest(pp, :catch_changes => true)
end
# the puppetlabs/git module <=0.4.0 does not officially support Fedora, so
# we do some rudimentary testing here:
describe command('git --version') do
its(:exit_status) { should eq 0 }
end
describe file('/srv/kallithea/kallithea.db') do
it { should be_owned_by 'kallithea' }
end
describe service('kallithea') do
it { is_expected.to be_running }
it { is_expected.to be_enabled }
end
describe port(5000) do
it { should be_listening }
end
describe command('curl -I localhost:5000 2> /dev/null | head -n1') do
its(:exit_status) { should eq 0 }
its(:stdout) { should eq "HTTP/1.1 200 OK\n" }
end
describe command('puppet resource service kallithea ensure=stopped') do
its(:exit_status) { should eq 0 }
end
describe port(5000) do
it {
sleep(5)
should_not be_listening
}
end
describe command('puppet resource service kallithea ensure=running') do
its(:exit_status) { should eq 0 }
end
describe port(5000) do
it {
sleep(10)
should be_listening
}
end
describe command('/srv/kallithea/venv/bin/pip show kallithea') do
its(:exit_status) { should eq 0 }
if kallithea_version
its(:stdout) { should match /^Version: #{kallithea_version}$/ }
end
end
end
context 'change configuration with config_hash parameter' do
# Using puppet_apply as a helper
it 'should work idempotently with no errors' do
pp = <<-EOS
class { 'kallithea':
config_hash => {
'server:main' => {
'port' => '12345',
},
'DEFAULT' => {
'smtp_port' => '25',
}
}
}
EOS
# Run it twice and test for idempotency
apply_manifest(pp, :catch_failures => true)
apply_manifest(pp, :catch_changes => true)
end
describe port(12345) do
it {
sleep(10)
should be_listening
}
end
end
context 'change configuration with port parameter' do
# Using puppet_apply as a helper
it 'should work idempotently with no errors' do
pp = <<-EOS
class { 'kallithea':
port => 1234,
}
EOS
# Run it twice and test for idempotency
apply_manifest(pp, :catch_failures => true)
apply_manifest(pp, :catch_changes => true)
end
describe port(1234) do
it {
sleep(10)
should be_listening
}
end
end
context 'downgrading to kallithea v0.2.1' do
# Using puppet_apply as a helper
it 'should work idempotently with no errors' do
pp = <<-EOS
class { 'kallithea':
version => '0.2.1',
}
EOS
# Run it twice and test for idempotency
apply_manifest(pp, :catch_failures => true)
apply_manifest(pp, :catch_changes => true)
end
describe command('/srv/kallithea/venv/bin/pip show kallithea') do
its(:stdout) { should match /^Version: 0.2.1$/ }
end
end
end
|
require 'spec_helper'
RSpec.describe 'bug_report_templates' do
subject do
Bundler.with_clean_env do
Dir.chdir(chdir_path) do
system({'ACTIVE_ADMIN_PATH' => active_admin_root}, Gem.ruby, template_path)
end
end
end
let(:active_admin_root) { File.expand_path('../..', __FILE__) }
let(:chdir_path) { File.join(active_admin_root, 'lib', 'bug_report_templates') }
context 'when runs rails_5_master.rb' do
let(:template_path) { 'rails_5_master.rb' }
it 'passes' do
expect(subject).to be_truthy
end
end
end
Cleanup output in bug_report_template test
Right now it's too verbose. Just print a dot like the other tests do.
require 'spec_helper'
RSpec.describe 'bug_report_templates' do
subject do
Bundler.with_clean_env do
Dir.chdir(chdir_path) do
system({'ACTIVE_ADMIN_PATH' => active_admin_root},
Gem.ruby,
template_path,
out: File::NULL)
end
end
end
let(:active_admin_root) { File.expand_path('../..', __FILE__) }
let(:chdir_path) { File.join(active_admin_root, 'lib', 'bug_report_templates') }
context 'when runs rails_5_master.rb' do
let(:template_path) { 'rails_5_master.rb' }
it 'passes' do
expect(subject).to be_truthy
end
end
end
|
require 'spec_helper'
describe Chewy::Query::Criteria do
include ClassHelpers
subject { described_class.new }
its(:options) { should be_a Hash }
its(:facets) { should == {} }
its(:aggregations) { should == {} }
its(:queries) { should == [] }
its(:filters) { should == [] }
its(:sort) { should == [] }
its(:fields) { should == [] }
its(:types) { should == [] }
its(:none?){ should be_false }
its(:facets?) { should be_false }
its(:aggregations?) { should be_false }
its(:queries?) { should be_false }
its(:filters?) { should be_false }
its(:sort?) { should be_false }
its(:fields?) { should be_false }
its(:types?) { should be_false }
describe '#update_options' do
specify { expect { subject.update_options(field: 'hello') }.to change { subject.options }.to(hash_including(field: 'hello')) }
end
describe '#update_facets' do
specify { expect { subject.update_facets(field: 'hello') }.to change { subject.facets? }.to(true) }
specify { expect { subject.update_facets(field: 'hello') }.to change { subject.facets }.to(field: 'hello') }
end
describe '#update_aggregations' do
specify { expect { subject.update_aggregations(field: 'hello') }.to change { subject.aggregations? }.to(true) }
specify { expect { subject.update_aggregations(field: 'hello') }.to change { subject.aggregations }.to(field: 'hello') }
end
describe '#update_queries' do
specify { expect { subject.update_queries(field: 'hello') }.to change { subject.queries? }.to(true) }
specify { expect { subject.update_queries(field: 'hello') }.to change { subject.queries }.to([field: 'hello']) }
specify { expect { subject.update_queries(field: 'hello'); subject.update_queries(field: 'world') }
.to change { subject.queries }.to([{field: 'hello'}, {field: 'world'}]) }
specify { expect { subject.update_queries([{field: 'hello'}, {field: 'world'}, nil]) }
.to change { subject.queries }.to([{field: 'hello'}, {field: 'world'}]) }
end
describe '#update_filters' do
specify { expect { subject.update_filters(field: 'hello') }.to change { subject.filters? }.to(true) }
specify { expect { subject.update_filters(field: 'hello') }.to change { subject.filters }.to([{field: 'hello'}]) }
specify { expect { subject.update_filters(field: 'hello'); subject.update_filters(field: 'world') }
.to change { subject.filters }.to([{field: 'hello'}, {field: 'world'}]) }
specify { expect { subject.update_filters([{field: 'hello'}, {field: 'world'}, nil]) }
.to change { subject.filters }.to([{field: 'hello'}, {field: 'world'}]) }
end
describe '#update_sort' do
specify { expect { subject.update_sort(:field) }.to change { subject.sort? }.to(true) }
specify { expect { subject.update_sort([:field]) }.to change { subject.sort }.to([:field]) }
specify { expect { subject.update_sort([:field1, :field2]) }.to change { subject.sort }.to([:field1, :field2]) }
specify { expect { subject.update_sort([{field: :asc}]) }.to change { subject.sort }.to([{field: :asc}]) }
specify { expect { subject.update_sort([:field1, field2: {order: :asc}]) }.to change { subject.sort }.to([:field1, {field2: {order: :asc}}]) }
specify { expect { subject.update_sort([{field1: {order: :asc}}, :field2]) }.to change { subject.sort }.to([{field1: {order: :asc}}, :field2]) }
specify { expect { subject.update_sort([field1: :asc, field2: {order: :asc}]) }.to change { subject.sort }.to([{field1: :asc}, {field2: {order: :asc}}]) }
specify { expect { subject.update_sort([{field1: {order: :asc}}, :field2, :field3]) }.to change { subject.sort }.to([{field1: {order: :asc}}, :field2, :field3]) }
specify { expect { subject.update_sort([{field1: {order: :asc}}, [:field2, :field3]]) }.to change { subject.sort }.to([{field1: {order: :asc}}, :field2, :field3]) }
specify { expect { subject.update_sort([{field1: {order: :asc}}, [:field2], :field3]) }.to change { subject.sort }.to([{field1: {order: :asc}}, :field2, :field3]) }
specify { expect { subject.update_sort([{field1: {order: :asc}, field2: :desc}, [:field3], :field4]) }.to change { subject.sort }.to([{field1: {order: :asc}}, {field2: :desc}, :field3, :field4]) }
specify { expect { subject.tap { |s| s.update_sort([field1: {order: :asc}, field2: :desc]) }.update_sort([[:field3], :field4]) }.to change { subject.sort }.to([{field1: {order: :asc}}, {field2: :desc}, :field3, :field4]) }
specify { expect { subject.tap { |s| s.update_sort([field1: {order: :asc}, field2: :desc]) }.update_sort([[:field3], :field4], purge: true) }.to change { subject.sort }.to([:field3, :field4]) }
end
describe '#update_fields' do
specify { expect { subject.update_fields(:field) }.to change { subject.fields? }.to(true) }
specify { expect { subject.update_fields(:field) }.to change { subject.fields }.to(['field']) }
specify { expect { subject.update_fields([:field, :field]) }.to change { subject.fields }.to(['field']) }
specify { expect { subject.update_fields([:field1, :field2]) }.to change { subject.fields }.to(['field1', 'field2']) }
specify { expect { subject.tap { |s| s.update_fields(:field1) }.update_fields([:field2, :field3]) }
.to change { subject.fields }.to(['field1', 'field2', 'field3']) }
specify { expect { subject.tap { |s| s.update_fields(:field1) }.update_fields([:field2, :field3], purge: true) }
.to change { subject.fields }.to(['field2', 'field3']) }
end
describe '#update_types' do
specify { expect { subject.update_types(:type) }.to change { subject.types? }.to(true) }
specify { expect { subject.update_types(:type) }.to change { subject.types }.to(['type']) }
specify { expect { subject.update_types([:type, :type]) }.to change { subject.types }.to(['type']) }
specify { expect { subject.update_types([:type1, :type2]) }.to change { subject.types }.to(['type1', 'type2']) }
specify { expect { subject.tap { |s| s.update_types(:type1) }.update_types([:type2, :type3]) }
.to change { subject.types }.to(['type1', 'type2', 'type3']) }
specify { expect { subject.tap { |s| s.update_types(:type1) }.update_types([:type2, :type3], purge: true) }
.to change { subject.types }.to(['type2', 'type3']) }
end
describe '#merge' do
let(:criteria) { described_class.new }
specify { subject.merge(criteria).should_not be_equal subject }
specify { subject.merge(criteria).should_not be_equal criteria }
specify { subject.tap { |c| c.update_options(opt1: 'hello') }
.merge(criteria.tap { |c| c.update_options(opt2: 'hello') }).options.should include(opt1: 'hello', opt2: 'hello') }
specify { subject.tap { |c| c.update_facets(field1: 'hello') }
.merge(criteria.tap { |c| c.update_facets(field1: 'hello') }).facets.should == {field1: 'hello', field1: 'hello'} }
specify { subject.tap { |c| c.update_aggregations(field1: 'hello') }
.merge(criteria.tap { |c| c.update_aggregations(field1: 'hello') }).aggregations.should == {field1: 'hello', field1: 'hello'} }
specify { subject.tap { |c| c.update_queries(field1: 'hello') }
.merge(criteria.tap { |c| c.update_queries(field2: 'hello') }).queries.should == [{field1: 'hello'}, {field2: 'hello'}] }
specify { subject.tap { |c| c.update_filters(field1: 'hello') }
.merge(criteria.tap { |c| c.update_filters(field2: 'hello') }).filters.should == [{field1: 'hello'}, {field2: 'hello'}] }
specify { subject.tap { |c| c.update_sort(:field1) }
.merge(criteria.tap { |c| c.update_sort(:field2) }).sort.should == [:field1, :field2] }
specify { subject.tap { |c| c.update_fields(:field1) }
.merge(criteria.tap { |c| c.update_fields(:field2) }).fields.should == ['field1', 'field2'] }
specify { subject.tap { |c| c.update_types(:type1) }
.merge(criteria.tap { |c| c.update_types(:type2) }).types.should == ['type1', 'type2'] }
end
describe '#merge!' do
let(:criteria) { described_class.new }
specify { subject.merge!(criteria).should be_equal subject }
specify { subject.merge!(criteria).should_not be_equal criteria }
specify { subject.tap { |c| c.update_options(opt1: 'hello') }
.merge!(criteria.tap { |c| c.update_options(opt2: 'hello') }).options.should include(opt1: 'hello', opt2: 'hello') }
specify { subject.tap { |c| c.update_facets(field1: 'hello') }
.merge!(criteria.tap { |c| c.update_facets(field1: 'hello') }).facets.should == {field1: 'hello', field1: 'hello'} }
specify { subject.tap { |c| c.update_aggregations(field1: 'hello') }
.merge!(criteria.tap { |c| c.update_aggregations(field1: 'hello') }).aggregations.should == {field1: 'hello', field1: 'hello'} }
specify { subject.tap { |c| c.update_queries(field1: 'hello') }
.merge!(criteria.tap { |c| c.update_queries(field2: 'hello') }).queries.should == [{field1: 'hello'}, {field2: 'hello'}] }
specify { subject.tap { |c| c.update_filters(field1: 'hello') }
.merge!(criteria.tap { |c| c.update_filters(field2: 'hello') }).filters.should == [{field1: 'hello'}, {field2: 'hello'}] }
specify { subject.tap { |c| c.update_sort(:field1) }
.merge!(criteria.tap { |c| c.update_sort(:field2) }).sort.should == [:field1, :field2] }
specify { subject.tap { |c| c.update_fields(:field1) }
.merge!(criteria.tap { |c| c.update_fields(:field2) }).fields.should == ['field1', 'field2'] }
specify { subject.tap { |c| c.update_types(:type1) }
.merge!(criteria.tap { |c| c.update_types(:type2) }).types.should == ['type1', 'type2'] }
end
describe '#request_body' do
def request_body &block
subject.instance_exec(&block) if block
subject.request_body
end
specify { request_body.should == {body: {}} }
specify { request_body { update_options(size: 10) }.should == {body: {size: 10}} }
specify { request_body { update_options(from: 10) }.should == {body: {from: 10}} }
specify { request_body { update_options(explain: true) }.should == {body: {explain: true}} }
specify { request_body { update_queries(:query) }.should == {body: {query: :query}} }
specify { request_body {
update_options(from: 10); update_sort(:field); update_fields(:field); update_queries(:query)
}.should == {body: {query: :query, from: 10, sort: [:field], _source: ['field']}} }
end
describe '#_composed_query' do
def _composed_query &block
subject.instance_exec(&block) if block
subject.send(:_composed_query, subject.send(:_request_query), subject.send(:_request_filter))
end
specify { _composed_query.should be_nil }
specify { _composed_query { update_queries(:query) }.should == {query: :query} }
specify { _composed_query { update_queries([:query1, :query2]) }
.should == {query: {bool: {must: [:query1, :query2]}}} }
specify { _composed_query { update_options(query_mode: :should); update_queries([:query1, :query2]) }
.should == {query: {bool: {should: [:query1, :query2]}}} }
specify { _composed_query { update_options(query_mode: :dis_max); update_queries([:query1, :query2]) }
.should == {query: {dis_max: {queries: [:query1, :query2]}}} }
specify { _composed_query { update_filters([:filter1, :filter2]) }
.should == {query: {filtered: {query: {match_all: {}}, filter: {and: [:filter1, :filter2]}}}} }
specify { _composed_query { update_filters([:filter1, :filter2]); update_queries([:query1, :query2]) }
.should == {query: {filtered: {
query: {bool: {must: [:query1, :query2]}},
filter: {and: [:filter1, :filter2]}
}}}
}
specify { _composed_query {
update_options(query_mode: :should); update_options(filter_mode: :or);
update_filters([:filter1, :filter2]); update_queries([:query1, :query2])
}.should == {query: {filtered: {
query: {bool: {should: [:query1, :query2]}},
filter: {or: [:filter1, :filter2]}
}}}
}
end
describe '#_request_filter' do
def _request_filter &block
subject.instance_exec(&block) if block
subject.send(:_request_filter)
end
specify { _request_filter.should be_nil }
specify { _request_filter { update_types(:type) }.should == {type: {value: 'type'}} }
specify { _request_filter { update_types([:type1, :type2]) }
.should == {or: [{type: {value: 'type1'}}, {type: {value: 'type2'}}]} }
specify { _request_filter { update_filters([:filter1, :filter2]) }
.should == {and: [:filter1, :filter2]} }
specify { _request_filter { update_options(filter_mode: :or); update_filters([:filter1, :filter2]) }
.should == {or: [:filter1, :filter2]} }
specify { _request_filter { update_options(filter_mode: :must); update_filters([:filter1, :filter2]) }
.should == {bool: {must: [:filter1, :filter2]}} }
specify { _request_filter { update_options(filter_mode: :should); update_filters([:filter1, :filter2]) }
.should == {bool: {should: [:filter1, :filter2]}} }
specify { _request_filter { update_types([:type1, :type2]); update_filters([:filter1, :filter2]) }
.should == {and: [{or: [{type: {value: 'type1'}}, {type: {value: 'type2'}}]}, :filter1, :filter2]} }
specify { _request_filter { update_options(filter_mode: :or); update_types([:type1, :type2]); update_filters([:filter1, :filter2]) }
.should == {and: [{or: [{type: {value: 'type1'}}, {type: {value: 'type2'}}]}, {or: [:filter1, :filter2]}]} }
specify { _request_filter { update_options(filter_mode: :must); update_types([:type1, :type2]); update_filters([:filter1, :filter2]) }
.should == {and: [{or: [{type: {value: 'type1'}}, {type: {value: 'type2'}}]}, {bool: {must: [:filter1, :filter2]}}]} }
specify { _request_filter { update_options(filter_mode: :should); update_types([:type1, :type2]); update_filters([:filter1, :filter2]) }
.should == {and: [{or: [{type: {value: 'type1'}}, {type: {value: 'type2'}}]}, {bool: {should: [:filter1, :filter2]}}]} }
end
describe '#_request_types' do
def _request_types &block
subject.instance_exec(&block) if block
subject.send(:_request_types)
end
specify { _request_types.should be_nil }
specify { _request_types { update_types(:type1) }.should == {type: {value: 'type1'}} }
specify { _request_types { update_types([:type1, :type2]) }
.should == {or: [{type: {value: 'type1'}}, {type: {value: 'type2'}}]} }
end
describe '#_queries_join' do
def _queries_join *args
subject.send(:_queries_join, *args)
end
let(:query) { {term: {field: 'value'}} }
specify { _queries_join([], :dis_max).should be_nil }
specify { _queries_join([query], :dis_max).should == query }
specify { _queries_join([query, query], :dis_max).should == {dis_max: {queries: [query, query]}} }
specify { _queries_join([], 0.7).should be_nil }
specify { _queries_join([query], 0.7).should == query }
specify { _queries_join([query, query], 0.7).should == {dis_max: {queries: [query, query], tie_breaker: 0.7}} }
specify { _queries_join([], :must).should be_nil }
specify { _queries_join([query], :must).should == query }
specify { _queries_join([query, query], :must).should == {bool: {must: [query, query]}} }
specify { _queries_join([], :should).should be_nil }
specify { _queries_join([query], :should).should == query }
specify { _queries_join([query, query], :should).should == {bool: {should: [query, query]}} }
specify { _queries_join([], '25%').should be_nil }
specify { _queries_join([query], '25%').should == query }
specify { _queries_join([query, query], '25%').should == {bool: {should: [query, query], minimum_should_match: '25%'}} }
end
describe '#_filters_join' do
def _filters_join *args
subject.send(:_filters_join, *args)
end
let(:filter) { {term: {field: 'value'}} }
specify { _filters_join([], :and).should be_nil }
specify { _filters_join([filter], :and).should == filter }
specify { _filters_join([filter, filter], :and).should == {and: [filter, filter]} }
specify { _filters_join([], :or).should be_nil }
specify { _filters_join([filter], :or).should == filter }
specify { _filters_join([filter, filter], :or).should == {or: [filter, filter]} }
specify { _filters_join([], :must).should be_nil }
specify { _filters_join([filter], :must).should == filter }
specify { _filters_join([filter, filter], :must).should == {bool: {must: [filter, filter]}} }
specify { _filters_join([], :should).should be_nil }
specify { _filters_join([filter], :should).should == filter }
specify { _filters_join([filter, filter], :should).should == {bool: {should: [filter, filter]}} }
specify { _filters_join([], '25%').should be_nil }
specify { _filters_join([filter], '25%').should == filter }
specify { _filters_join([filter, filter], '25%').should == {bool: {should: [filter, filter], minimum_should_match: '25%'}} }
end
end
Added basic spec for filtered_queries? configuration option
require 'spec_helper'
describe Chewy::Query::Criteria do
include ClassHelpers
subject { described_class.new }
its(:options) { should be_a Hash }
its(:facets) { should == {} }
its(:aggregations) { should == {} }
its(:queries) { should == [] }
its(:filters) { should == [] }
its(:sort) { should == [] }
its(:fields) { should == [] }
its(:types) { should == [] }
its(:none?){ should be_false }
its(:facets?) { should be_false }
its(:aggregations?) { should be_false }
its(:queries?) { should be_false }
its(:filters?) { should be_false }
its(:sort?) { should be_false }
its(:fields?) { should be_false }
its(:types?) { should be_false }
describe '#update_options' do
specify { expect { subject.update_options(field: 'hello') }.to change { subject.options }.to(hash_including(field: 'hello')) }
end
describe '#update_facets' do
specify { expect { subject.update_facets(field: 'hello') }.to change { subject.facets? }.to(true) }
specify { expect { subject.update_facets(field: 'hello') }.to change { subject.facets }.to(field: 'hello') }
end
describe '#update_aggregations' do
specify { expect { subject.update_aggregations(field: 'hello') }.to change { subject.aggregations? }.to(true) }
specify { expect { subject.update_aggregations(field: 'hello') }.to change { subject.aggregations }.to(field: 'hello') }
end
describe '#update_queries' do
specify { expect { subject.update_queries(field: 'hello') }.to change { subject.queries? }.to(true) }
specify { expect { subject.update_queries(field: 'hello') }.to change { subject.queries }.to([field: 'hello']) }
specify { expect { subject.update_queries(field: 'hello'); subject.update_queries(field: 'world') }
.to change { subject.queries }.to([{field: 'hello'}, {field: 'world'}]) }
specify { expect { subject.update_queries([{field: 'hello'}, {field: 'world'}, nil]) }
.to change { subject.queries }.to([{field: 'hello'}, {field: 'world'}]) }
end
describe '#update_filters' do
specify { expect { subject.update_filters(field: 'hello') }.to change { subject.filters? }.to(true) }
specify { expect { subject.update_filters(field: 'hello') }.to change { subject.filters }.to([{field: 'hello'}]) }
specify { expect { subject.update_filters(field: 'hello'); subject.update_filters(field: 'world') }
.to change { subject.filters }.to([{field: 'hello'}, {field: 'world'}]) }
specify { expect { subject.update_filters([{field: 'hello'}, {field: 'world'}, nil]) }
.to change { subject.filters }.to([{field: 'hello'}, {field: 'world'}]) }
end
describe '#update_sort' do
specify { expect { subject.update_sort(:field) }.to change { subject.sort? }.to(true) }
specify { expect { subject.update_sort([:field]) }.to change { subject.sort }.to([:field]) }
specify { expect { subject.update_sort([:field1, :field2]) }.to change { subject.sort }.to([:field1, :field2]) }
specify { expect { subject.update_sort([{field: :asc}]) }.to change { subject.sort }.to([{field: :asc}]) }
specify { expect { subject.update_sort([:field1, field2: {order: :asc}]) }.to change { subject.sort }.to([:field1, {field2: {order: :asc}}]) }
specify { expect { subject.update_sort([{field1: {order: :asc}}, :field2]) }.to change { subject.sort }.to([{field1: {order: :asc}}, :field2]) }
specify { expect { subject.update_sort([field1: :asc, field2: {order: :asc}]) }.to change { subject.sort }.to([{field1: :asc}, {field2: {order: :asc}}]) }
specify { expect { subject.update_sort([{field1: {order: :asc}}, :field2, :field3]) }.to change { subject.sort }.to([{field1: {order: :asc}}, :field2, :field3]) }
specify { expect { subject.update_sort([{field1: {order: :asc}}, [:field2, :field3]]) }.to change { subject.sort }.to([{field1: {order: :asc}}, :field2, :field3]) }
specify { expect { subject.update_sort([{field1: {order: :asc}}, [:field2], :field3]) }.to change { subject.sort }.to([{field1: {order: :asc}}, :field2, :field3]) }
specify { expect { subject.update_sort([{field1: {order: :asc}, field2: :desc}, [:field3], :field4]) }.to change { subject.sort }.to([{field1: {order: :asc}}, {field2: :desc}, :field3, :field4]) }
specify { expect { subject.tap { |s| s.update_sort([field1: {order: :asc}, field2: :desc]) }.update_sort([[:field3], :field4]) }.to change { subject.sort }.to([{field1: {order: :asc}}, {field2: :desc}, :field3, :field4]) }
specify { expect { subject.tap { |s| s.update_sort([field1: {order: :asc}, field2: :desc]) }.update_sort([[:field3], :field4], purge: true) }.to change { subject.sort }.to([:field3, :field4]) }
end
describe '#update_fields' do
specify { expect { subject.update_fields(:field) }.to change { subject.fields? }.to(true) }
specify { expect { subject.update_fields(:field) }.to change { subject.fields }.to(['field']) }
specify { expect { subject.update_fields([:field, :field]) }.to change { subject.fields }.to(['field']) }
specify { expect { subject.update_fields([:field1, :field2]) }.to change { subject.fields }.to(['field1', 'field2']) }
specify { expect { subject.tap { |s| s.update_fields(:field1) }.update_fields([:field2, :field3]) }
.to change { subject.fields }.to(['field1', 'field2', 'field3']) }
specify { expect { subject.tap { |s| s.update_fields(:field1) }.update_fields([:field2, :field3], purge: true) }
.to change { subject.fields }.to(['field2', 'field3']) }
end
describe '#update_types' do
specify { expect { subject.update_types(:type) }.to change { subject.types? }.to(true) }
specify { expect { subject.update_types(:type) }.to change { subject.types }.to(['type']) }
specify { expect { subject.update_types([:type, :type]) }.to change { subject.types }.to(['type']) }
specify { expect { subject.update_types([:type1, :type2]) }.to change { subject.types }.to(['type1', 'type2']) }
specify { expect { subject.tap { |s| s.update_types(:type1) }.update_types([:type2, :type3]) }
.to change { subject.types }.to(['type1', 'type2', 'type3']) }
specify { expect { subject.tap { |s| s.update_types(:type1) }.update_types([:type2, :type3], purge: true) }
.to change { subject.types }.to(['type2', 'type3']) }
end
describe '#merge' do
let(:criteria) { described_class.new }
specify { subject.merge(criteria).should_not be_equal subject }
specify { subject.merge(criteria).should_not be_equal criteria }
specify { subject.tap { |c| c.update_options(opt1: 'hello') }
.merge(criteria.tap { |c| c.update_options(opt2: 'hello') }).options.should include(opt1: 'hello', opt2: 'hello') }
specify { subject.tap { |c| c.update_facets(field1: 'hello') }
.merge(criteria.tap { |c| c.update_facets(field1: 'hello') }).facets.should == {field1: 'hello', field1: 'hello'} }
specify { subject.tap { |c| c.update_aggregations(field1: 'hello') }
.merge(criteria.tap { |c| c.update_aggregations(field1: 'hello') }).aggregations.should == {field1: 'hello', field1: 'hello'} }
specify { subject.tap { |c| c.update_queries(field1: 'hello') }
.merge(criteria.tap { |c| c.update_queries(field2: 'hello') }).queries.should == [{field1: 'hello'}, {field2: 'hello'}] }
specify { subject.tap { |c| c.update_filters(field1: 'hello') }
.merge(criteria.tap { |c| c.update_filters(field2: 'hello') }).filters.should == [{field1: 'hello'}, {field2: 'hello'}] }
specify { subject.tap { |c| c.update_sort(:field1) }
.merge(criteria.tap { |c| c.update_sort(:field2) }).sort.should == [:field1, :field2] }
specify { subject.tap { |c| c.update_fields(:field1) }
.merge(criteria.tap { |c| c.update_fields(:field2) }).fields.should == ['field1', 'field2'] }
specify { subject.tap { |c| c.update_types(:type1) }
.merge(criteria.tap { |c| c.update_types(:type2) }).types.should == ['type1', 'type2'] }
end
describe '#merge!' do
let(:criteria) { described_class.new }
specify { subject.merge!(criteria).should be_equal subject }
specify { subject.merge!(criteria).should_not be_equal criteria }
specify { subject.tap { |c| c.update_options(opt1: 'hello') }
.merge!(criteria.tap { |c| c.update_options(opt2: 'hello') }).options.should include(opt1: 'hello', opt2: 'hello') }
specify { subject.tap { |c| c.update_facets(field1: 'hello') }
.merge!(criteria.tap { |c| c.update_facets(field1: 'hello') }).facets.should == {field1: 'hello', field1: 'hello'} }
specify { subject.tap { |c| c.update_aggregations(field1: 'hello') }
.merge!(criteria.tap { |c| c.update_aggregations(field1: 'hello') }).aggregations.should == {field1: 'hello', field1: 'hello'} }
specify { subject.tap { |c| c.update_queries(field1: 'hello') }
.merge!(criteria.tap { |c| c.update_queries(field2: 'hello') }).queries.should == [{field1: 'hello'}, {field2: 'hello'}] }
specify { subject.tap { |c| c.update_filters(field1: 'hello') }
.merge!(criteria.tap { |c| c.update_filters(field2: 'hello') }).filters.should == [{field1: 'hello'}, {field2: 'hello'}] }
specify { subject.tap { |c| c.update_sort(:field1) }
.merge!(criteria.tap { |c| c.update_sort(:field2) }).sort.should == [:field1, :field2] }
specify { subject.tap { |c| c.update_fields(:field1) }
.merge!(criteria.tap { |c| c.update_fields(:field2) }).fields.should == ['field1', 'field2'] }
specify { subject.tap { |c| c.update_types(:type1) }
.merge!(criteria.tap { |c| c.update_types(:type2) }).types.should == ['type1', 'type2'] }
end
describe '#request_body' do
def request_body &block
subject.instance_exec(&block) if block
subject.request_body
end
specify { request_body.should == {body: {}} }
specify { request_body { update_options(size: 10) }.should == {body: {size: 10}} }
specify { request_body { update_options(from: 10) }.should == {body: {from: 10}} }
specify { request_body { update_options(explain: true) }.should == {body: {explain: true}} }
specify { request_body { update_queries(:query) }.should == {body: {query: :query}} }
specify { request_body {
update_options(from: 10); update_sort(:field); update_fields(:field); update_queries(:query)
}.should == {body: {query: :query, from: 10, sort: [:field], _source: ['field']}} }
context do
before { Chewy.filtered_queries = false }
specify { request_body {
update_queries(:query); update_filters(:filters);
}.should == {body: {query: :query, filter: :filters}} }
end
context do
before { Chewy.filtered_queries = true }
specify { request_body {
update_queries(:query); update_filters(:filters);
}.should == {body: {query: {filtered: {query: :query, filter: :filters}}}} }
end
end
describe '#_composed_query' do
def _composed_query &block
subject.instance_exec(&block) if block
subject.send(:_composed_query, subject.send(:_request_query), subject.send(:_request_filter))
end
specify { _composed_query.should be_nil }
specify { _composed_query { update_queries(:query) }.should == {query: :query} }
specify { _composed_query { update_queries([:query1, :query2]) }
.should == {query: {bool: {must: [:query1, :query2]}}} }
specify { _composed_query { update_options(query_mode: :should); update_queries([:query1, :query2]) }
.should == {query: {bool: {should: [:query1, :query2]}}} }
specify { _composed_query { update_options(query_mode: :dis_max); update_queries([:query1, :query2]) }
.should == {query: {dis_max: {queries: [:query1, :query2]}}} }
specify { _composed_query { update_filters([:filter1, :filter2]) }
.should == {query: {filtered: {query: {match_all: {}}, filter: {and: [:filter1, :filter2]}}}} }
specify { _composed_query { update_filters([:filter1, :filter2]); update_queries([:query1, :query2]) }
.should == {query: {filtered: {
query: {bool: {must: [:query1, :query2]}},
filter: {and: [:filter1, :filter2]}
}}}
}
specify { _composed_query {
update_options(query_mode: :should); update_options(filter_mode: :or);
update_filters([:filter1, :filter2]); update_queries([:query1, :query2])
}.should == {query: {filtered: {
query: {bool: {should: [:query1, :query2]}},
filter: {or: [:filter1, :filter2]}
}}}
}
end
describe '#_request_filter' do
def _request_filter &block
subject.instance_exec(&block) if block
subject.send(:_request_filter)
end
specify { _request_filter.should be_nil }
specify { _request_filter { update_types(:type) }.should == {type: {value: 'type'}} }
specify { _request_filter { update_types([:type1, :type2]) }
.should == {or: [{type: {value: 'type1'}}, {type: {value: 'type2'}}]} }
specify { _request_filter { update_filters([:filter1, :filter2]) }
.should == {and: [:filter1, :filter2]} }
specify { _request_filter { update_options(filter_mode: :or); update_filters([:filter1, :filter2]) }
.should == {or: [:filter1, :filter2]} }
specify { _request_filter { update_options(filter_mode: :must); update_filters([:filter1, :filter2]) }
.should == {bool: {must: [:filter1, :filter2]}} }
specify { _request_filter { update_options(filter_mode: :should); update_filters([:filter1, :filter2]) }
.should == {bool: {should: [:filter1, :filter2]}} }
specify { _request_filter { update_types([:type1, :type2]); update_filters([:filter1, :filter2]) }
.should == {and: [{or: [{type: {value: 'type1'}}, {type: {value: 'type2'}}]}, :filter1, :filter2]} }
specify { _request_filter { update_options(filter_mode: :or); update_types([:type1, :type2]); update_filters([:filter1, :filter2]) }
.should == {and: [{or: [{type: {value: 'type1'}}, {type: {value: 'type2'}}]}, {or: [:filter1, :filter2]}]} }
specify { _request_filter { update_options(filter_mode: :must); update_types([:type1, :type2]); update_filters([:filter1, :filter2]) }
.should == {and: [{or: [{type: {value: 'type1'}}, {type: {value: 'type2'}}]}, {bool: {must: [:filter1, :filter2]}}]} }
specify { _request_filter { update_options(filter_mode: :should); update_types([:type1, :type2]); update_filters([:filter1, :filter2]) }
.should == {and: [{or: [{type: {value: 'type1'}}, {type: {value: 'type2'}}]}, {bool: {should: [:filter1, :filter2]}}]} }
end
describe '#_request_types' do
def _request_types &block
subject.instance_exec(&block) if block
subject.send(:_request_types)
end
specify { _request_types.should be_nil }
specify { _request_types { update_types(:type1) }.should == {type: {value: 'type1'}} }
specify { _request_types { update_types([:type1, :type2]) }
.should == {or: [{type: {value: 'type1'}}, {type: {value: 'type2'}}]} }
end
describe '#_queries_join' do
def _queries_join *args
subject.send(:_queries_join, *args)
end
let(:query) { {term: {field: 'value'}} }
specify { _queries_join([], :dis_max).should be_nil }
specify { _queries_join([query], :dis_max).should == query }
specify { _queries_join([query, query], :dis_max).should == {dis_max: {queries: [query, query]}} }
specify { _queries_join([], 0.7).should be_nil }
specify { _queries_join([query], 0.7).should == query }
specify { _queries_join([query, query], 0.7).should == {dis_max: {queries: [query, query], tie_breaker: 0.7}} }
specify { _queries_join([], :must).should be_nil }
specify { _queries_join([query], :must).should == query }
specify { _queries_join([query, query], :must).should == {bool: {must: [query, query]}} }
specify { _queries_join([], :should).should be_nil }
specify { _queries_join([query], :should).should == query }
specify { _queries_join([query, query], :should).should == {bool: {should: [query, query]}} }
specify { _queries_join([], '25%').should be_nil }
specify { _queries_join([query], '25%').should == query }
specify { _queries_join([query, query], '25%').should == {bool: {should: [query, query], minimum_should_match: '25%'}} }
end
describe '#_filters_join' do
def _filters_join *args
subject.send(:_filters_join, *args)
end
let(:filter) { {term: {field: 'value'}} }
specify { _filters_join([], :and).should be_nil }
specify { _filters_join([filter], :and).should == filter }
specify { _filters_join([filter, filter], :and).should == {and: [filter, filter]} }
specify { _filters_join([], :or).should be_nil }
specify { _filters_join([filter], :or).should == filter }
specify { _filters_join([filter, filter], :or).should == {or: [filter, filter]} }
specify { _filters_join([], :must).should be_nil }
specify { _filters_join([filter], :must).should == filter }
specify { _filters_join([filter, filter], :must).should == {bool: {must: [filter, filter]}} }
specify { _filters_join([], :should).should be_nil }
specify { _filters_join([filter], :should).should == filter }
specify { _filters_join([filter, filter], :should).should == {bool: {should: [filter, filter]}} }
specify { _filters_join([], '25%').should be_nil }
specify { _filters_join([filter], '25%').should == filter }
specify { _filters_join([filter, filter], '25%').should == {bool: {should: [filter, filter], minimum_should_match: '25%'}} }
end
end
|
# encoding:utf-8
require 'test_helper'
require 'set'
if RUBY_VERSION < '1.9'
silently do
require 'complex'
require 'rational'
end
end
require 'bigdecimal'
begin
require 'date'
require 'tzinfo'
require 'active_support/timezone'
Time.zone = "Pacific Time (US & Canada)"
Zone = Time.zone.now
rescue LoadError
#warn 'Mocking time with zone'
module ActiveSupport
class TimeWithZone
def initialize(utc_time, zone)
end
end
end
Zone = ActiveSupport::TimeWithZone.new(Time.now.utc, 'EST')
end
begin
require 'active_support/multibyte/chars'
rescue LoadError
warn 'Mocking ActiveSupport::Multibyte::Chars'
module ActiveSupport
module Multibyte
class Chars < String
end
end
end
end
class BSONTest < Test::Unit::TestCase
include BSON
def setup
@encoder = BSON::BSON_CODER
end
def assert_doc_pass(doc, options={})
bson = @encoder.serialize(doc)
if options[:debug]
puts "DEBUGGING DOC:"
p bson.to_a
puts "DESERIALIZES TO:"
end
assert_equal @encoder.serialize(doc).to_a, bson.to_a
assert_equal doc, @encoder.deserialize(bson)
end
def test_interface
doc = { 'a' => 1 }
bson = BSON.serialize(doc)
assert_equal doc, BSON.deserialize(bson)
end
def test_read_bson_document
bson_file_data_h_star = ["21000000075f6964005115883c3d75c94d3aa18b63016100000000000000f03f00"]
strio = StringIO.new(bson_file_data_h_star.pack('H*'))
bson = BSON.read_bson_document(strio)
doc = {"_id"=>BSON::ObjectId('5115883c3d75c94d3aa18b63'), "a"=>1.0}
assert_equal doc, bson
end
def test_bson_ruby_interface
doc = { 'a' => 1 }
buf = BSON_RUBY.serialize(doc)
bson = BSON::BSON_RUBY.new
bson.instance_variable_set(:@buf, buf)
assert_equal [12, 0, 0, 0, 16, 97, 0, 1, 0, 0, 0, 0], bson.to_a
assert_equal "\f\x00\x00\x00\x10a\x00\x01\x00\x00\x00\x00", bson.to_s
assert_equal [12, 0, 0, 0, 16, 97, 0, 1, 0, 0, 0, 0], bson.unpack
end
def test_bson_ruby_hex_dump
doc = { 'a' => 1 }
buf = BSON_RUBY.serialize(doc)
bson = BSON_RUBY.new
bson.instance_variable_set(:@buf, buf)
doc_hex_dump = " 0: 0C 00 00 00 10 61 00 01\n 8: 00 00 00 00"
assert_equal doc_hex_dump, bson.hex_dump
end
def test_bson_ruby_dbref_not_used
buf = BSON::ByteBuffer.new
val = ns = 'namespace'
# Make a hole for the length
len_pos = buf.position
buf.put_int(0)
# Save the string
start_pos = buf.position
BSON::BSON_RUBY.serialize_cstr(buf, val)
end_pos = buf.position
# Put the string size in front
buf.put_int(end_pos - start_pos, len_pos)
# Go back to where we were
buf.position = end_pos
oid = ObjectId.new
buf.put_array(oid.to_a)
buf.rewind
bson = BSON::BSON_RUBY.new
bson.instance_variable_set(:@buf, buf)
assert_equal DBRef.new(ns, oid).to_s, bson.deserialize_dbref_data(buf).to_s
end
def test_require_hash
assert_raise_error InvalidDocument, "takes a Hash" do
BSON.serialize('foo')
end
assert_raise_error InvalidDocument, "takes a Hash" do
BSON.serialize(Object.new)
end
assert_raise_error InvalidDocument, "takes a Hash" do
BSON.serialize(Set.new)
end
end
def test_string
doc = {'doc' => 'hello, world'}
assert_doc_pass(doc)
end
def test_valid_utf8_string
doc = {'doc' => 'aé'}
assert_doc_pass(doc)
end
def test_valid_active_support_multibyte_chars
unless RUBY_PLATFORM =~ /java/
doc = {'doc' => ActiveSupport::Multibyte::Chars.new('aé')}
assert_doc_pass(doc)
bson = @encoder.serialize(doc)
doc = @encoder.deserialize(bson)
assert_equal doc['doc'], 'aé'
end
end
def test_valid_utf8_key
doc = {'aé' => 'hello'}
assert_doc_pass(doc)
end
def test_limit_max_bson_size
doc = {'name' => 'a' * BSON::DEFAULT_MAX_BSON_SIZE}
assert_raise InvalidDocument do
assert @encoder.serialize(doc)
end
end
def test_update_max_bson_size
require 'ostruct'
mock_conn = OpenStruct.new
size = 7 * 1024 * 1024
mock_conn.max_bson_size = size
silently do
assert_equal size, BSON_CODER.update_max_bson_size(mock_conn)
assert_equal size, BSON_CODER.max_bson_size
end
end
def test_round_trip
doc = {'doc' => 123}
@encoder.deserialize(@encoder.serialize(doc))
end
# In 1.8 we test that other string encodings raise an exception.
# In 1.9 we test that they get auto-converted.
if RUBY_VERSION < '1.9'
unless RUBY_PLATFORM == 'java'
require 'iconv'
def test_non_utf8_string
string = Iconv.conv('iso-8859-1', 'utf-8', 'aé')
doc = {'doc' => string}
assert_raise InvalidStringEncoding do
@encoder.serialize(doc)
end
end
def test_non_utf8_key
key = Iconv.conv('iso-8859-1', 'utf-8', 'aé')
doc = {key => 'hello'}
assert_raise InvalidStringEncoding do
@encoder.serialize(doc)
end
end
end
else
unless RUBY_PLATFORM == 'java'
def test_non_utf8_string
assert_raise BSON::InvalidStringEncoding do
BSON::BSON_CODER.serialize({'str' => 'aé'.encode('iso-8859-1')})
end
end
def test_invalid_utf8_string
str = "123\xD9"
assert !str.valid_encoding?
assert_raise BSON::InvalidStringEncoding do
BSON::BSON_CODER.serialize({'str' => str})
end
end
def test_non_utf8_key
assert_raise BSON::InvalidStringEncoding do
BSON::BSON_CODER.serialize({'aé'.encode('iso-8859-1') => 'hello'})
end
end
def test_forced_encoding_with_valid_utf8
doc = {'doc' => "\xC3\xB6".force_encoding("ISO-8859-1")}
serialized = @encoder.serialize(doc)
deserialized = @encoder.deserialize(serialized)
assert_equal(doc['doc'], deserialized['doc'].force_encoding("ISO-8859-1"))
end
# Based on a test from sqlite3-ruby
def test_default_internal_is_honored
before_enc = Encoding.default_internal
str = "壁に耳あり、障子に目あり"
bson = BSON::BSON_CODER.serialize("x" => str)
silently { Encoding.default_internal = 'EUC-JP' }
out = BSON::BSON_CODER.deserialize(bson)["x"]
assert_equal Encoding.default_internal, out.encoding
assert_equal str.encode('EUC-JP'), out
assert_equal str, out.encode(str.encoding)
ensure
silently { Encoding.default_internal = before_enc }
end
end
end
def test_code
code = Code.new('this.a.b < this.b')
assert_equal 17, code.length
assert_match /<BSON::Code:\d+ @data="this.a.b < this.b" @scope="{}">/, code.inspect
doc = {'$where' => code}
assert_doc_pass(doc)
code = 'this.c.d < this.e'.to_bson_code # core_ext.rb
assert_equal BSON::Code, code.class
assert_equal code, code.to_bson_code
end
def test_code_with_symbol
assert_raise_error ArgumentError, "BSON::Code must be in the form of a String" do
Code.new(:fubar)
end
end
def test_code_with_scope
doc = {'$where' => Code.new('this.a.b < this.b', {'foo' => 1})}
assert_doc_pass(doc)
end
def test_double
doc = {'doc' => 41.25}
assert_doc_pass(doc)
end
def test_int
doc = {'doc' => 42}
assert_doc_pass(doc)
doc = {"doc" => -5600}
assert_doc_pass(doc)
doc = {"doc" => 2147483647}
assert_doc_pass(doc)
doc = {"doc" => -2147483648}
assert_doc_pass(doc)
end
def test_ordered_hash
doc = BSON::OrderedHash.new
doc["b"] = 1
doc["a"] = 2
doc["c"] = 3
doc["d"] = 4
assert_doc_pass(doc)
end
def test_object
doc = {'doc' => {'age' => 42, 'name' => 'Spongebob', 'shoe_size' => 9.5}}
assert_doc_pass(doc)
end
def test_embedded_document_with_nil
doc = {'doc' => {'age' => 42, 'name' => nil, 'shoe_size' => 9.5}}
assert_doc_pass(doc)
end
def test_embedded_document_with_date
doc = {'doc' => {'age' => 42, 'date' => Time.now.utc, 'shoe_size' => 9.5}}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
assert doc2['doc']
assert_equal 42, doc2['doc']['age']
assert_equal 9.5, doc2['doc']['shoe_size']
assert_in_delta Time.now, doc2['doc']['date'], 1
end
def test_oid
doc = {'doc' => ObjectId.new}
assert_doc_pass(doc)
end
def test_array
doc = {'doc' => [1, 2, 'a', 'b']}
assert_doc_pass(doc)
end
def test_array_keys
doc = {'doc' => [1, 2, 'a', 'b']}
bson = @encoder.serialize(doc).to_a
assert_equal 48, bson[14]
assert_equal 49, bson[21]
assert_equal 50, bson[28]
assert_equal 51, bson[37]
end
def test_regex
doc = {'doc' => /foobar/i}
assert_doc_pass(doc)
end
def test_regex_multiline
doc = {'doc' => /foobar/m}
assert_doc_pass(doc)
end
def test_boolean
doc = {'doc' => true}
assert_doc_pass(doc)
end
def test_date
doc = {'date' => Time.now}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
# Mongo only stores up to the millisecond
assert_in_delta doc['date'], doc2['date'], 0.001
end
def test_date_in_array
doc = {'date' => [Time.now.utc]}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
assert doc2
end
def test_date_returns_as_utc
doc = {'date' => Time.now.utc}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
assert doc2['date'].utc?
end
def test_date_before_epoch
if RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/ then return true end
begin
doc = {'date' => Time.utc(1600)}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
# Mongo only stores up to the millisecond
assert_in_delta doc['date'], doc2['date'], 2
rescue ArgumentError
# some versions of Ruby won't let you create pre-epoch Time instances
#
# TODO figure out how that will work if somebady has saved data
# w/ early dates already and is just querying for it.
end
end
def test_exeption_on_using_unsupported_date_class
[DateTime.now, Date.today, Zone].each do |invalid_date|
doc = {:date => invalid_date}
begin
BSON::BSON_CODER.serialize(doc)
rescue => e
ensure
if !invalid_date.is_a? Time
assert_equal InvalidDocument, e.class
assert_match(/UTC Time/, e.message)
end
end
end
end
def test_dbref
oid = ObjectId.new
ns = 'namespace'
doc = {}
dbref = DBRef.new(ns, oid)
assert_equal({"$id"=>oid, "$ns"=>ns}, dbref.to_hash)
doc['dbref'] = dbref
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
# Java doesn't deserialize to DBRefs
if RUBY_PLATFORM =~ /java/ && BSON.extension?
assert_equal 'namespace', doc2['dbref']['$ns']
assert_equal oid, doc2['dbref']['$id']
else
assert_equal 'namespace', doc2['dbref'].namespace
assert_equal oid, doc2['dbref'].object_id
end
end
def test_symbol
doc = {'sym' => :foo}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
assert_equal :foo, doc2['sym']
end
def test_binary
bin = Binary.new
'binstring'.each_byte { |b| bin.put(b) }
doc = {'bin' => bin}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
bin2 = doc2['bin']
assert_kind_of Binary, bin2
assert_equal 'binstring', bin2.to_s
assert_equal Binary::SUBTYPE_SIMPLE, bin2.subtype
end
def test_binary_with_deprecated_subtype
bin = Binary.new
'binstring'.each_byte { |b| bin.put(b) }
bin.subtype = Binary::SUBTYPE_BYTES
doc = {'bin' => bin}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
bin2 = doc2['bin']
assert_kind_of Binary, bin2
assert_equal 'binstring', bin2.to_s
assert_equal Binary::SUBTYPE_BYTES, bin2.subtype
end
def test_binary_with_string
b = Binary.new('somebinarystring')
doc = {'bin' => b}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
bin2 = doc2['bin']
assert_kind_of Binary, bin2
assert_equal 'somebinarystring', bin2.to_s
assert_equal Binary::SUBTYPE_SIMPLE, bin2.subtype
end
def test_binary_type
bin = Binary.new([1, 2, 3, 4, 5], Binary::SUBTYPE_USER_DEFINED)
doc = {'bin' => bin}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
bin2 = doc2['bin']
assert_kind_of Binary, bin2
assert_equal [1, 2, 3, 4, 5], bin2.to_a
assert_equal Binary::SUBTYPE_USER_DEFINED, bin2.subtype
end
# Java doesn't support binary subtype 0 yet
if !(RUBY_PLATFORM =~ /java/)
def test_binary_subtype_0
bin = Binary.new([1, 2, 3, 4, 5], Binary::SUBTYPE_SIMPLE)
doc = {'bin' => bin}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
bin2 = doc2['bin']
assert_kind_of Binary, bin2
assert_equal [1, 2, 3, 4, 5], bin2.to_a
assert_equal Binary::SUBTYPE_SIMPLE, bin2.subtype
end
end
def test_binary_byte_buffer
bb = Binary.new
5.times { |i| bb.put(i + 1) }
doc = {'bin' => bb}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
bin2 = doc2['bin']
assert_kind_of Binary, bin2
assert_equal [1, 2, 3, 4, 5], bin2.to_a
assert_equal Binary::SUBTYPE_SIMPLE, bin2.subtype
end
def test_put_id_first
val = BSON::OrderedHash.new
val['not_id'] = 1
val['_id'] = 2
roundtrip = @encoder.deserialize(@encoder.serialize(val, false, true).to_s)
assert_kind_of BSON::OrderedHash, roundtrip
assert_equal '_id', roundtrip.keys.first
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
roundtrip = @encoder.deserialize(@encoder.serialize(val, false, true).to_s)
assert_kind_of BSON::OrderedHash, roundtrip
assert_equal '_id', roundtrip.keys.first
end
def test_nil_id
doc = {"_id" => nil}
assert_doc_pass(doc)
end
if !(RUBY_PLATFORM =~ /java/)
def test_timestamp
# val = {"test" => [4, 20]}
result = @encoder.deserialize([0x13, 0x00, 0x00, 0x00,
0x11, 0x74, 0x65, 0x73,
0x74, 0x00, 0x04, 0x00,
0x00, 0x00, 0x14, 0x00,
0x00, 0x00, 0x00])
silently do
assert_equal 4, result["test"][0]
assert_equal 20, result["test"][1]
end
end
end
def test_timestamp_type
ts = Timestamp.new(5000, 100)
doc = {:ts => ts}
bson = @encoder.serialize(doc)
assert_equal ts, @encoder.deserialize(bson)["ts"]
end
def test_overflow
doc = {"x" => 2**75}
assert_raise RangeError do
@encoder.serialize(doc)
end
doc = {"x" => 9223372036854775}
assert_doc_pass(doc)
doc = {"x" => 9223372036854775807}
assert_doc_pass(doc)
doc["x"] = doc["x"] + 1
assert_raise RangeError do
@encoder.serialize(doc)
end
doc = {"x" => -9223372036854775}
assert_doc_pass(doc)
doc = {"x" => -9223372036854775808}
assert_doc_pass(doc)
doc["x"] = doc["x"] - 1
assert_raise RangeError do
BSON::BSON_CODER.serialize(doc)
end
end
def test_invalid_numeric_types
[BigDecimal.new("1.0"), Complex(0, 1), Rational(2, 3)].each do |type|
doc = {"x" => type}
begin
@encoder.serialize(doc)
rescue => e
ensure
assert_equal InvalidDocument, e.class
assert_match(/Cannot serialize/, e.message)
end
end
end
def test_do_not_change_original_object
val = BSON::OrderedHash.new
val['not_id'] = 1
val['_id'] = 2
assert val.keys.include?('_id')
@encoder.serialize(val)
assert val.keys.include?('_id')
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
assert val.keys.include?(:_id)
@encoder.serialize(val)
assert val.keys.include?(:_id)
end
# note we only test for _id here because in the general case we will
# write duplicates for :key and "key". _id is a special case because
# we call has_key? to check for it's existence rather than just iterating
# over it like we do for the rest of the keys. thus, things like
# HashWithIndifferentAccess can cause problems for _id but not for other
# keys. rather than require rails to test with HWIA directly, we do this
# somewhat hacky test.
#
# Note that the driver only eliminates duplicate ids when move_id is true.
def test_no_duplicate_id
dup = {"_id" => "foo", :_id => "foo"}
one = {"_id" => "foo"}
assert_equal @encoder.serialize(one, false, true).to_a, @encoder.serialize(dup, false, true).to_a
end
def test_duplicate_keys
#dup = {"_foo" => "foo", :_foo => "foo"}
#one = {"_foo" => "foo"}
#assert_equal @encoder.serialize(one).to_a, @encoder.serialize(dup).to_a
#warn "Pending test for duplicate keys"
end
def test_no_duplicate_id_when_moving_id
dup = {"_id" => "foo", :_id => "foo"}
one = {:_id => "foo"}
assert_equal @encoder.serialize(one, false, true).to_s, @encoder.serialize(dup, false, true).to_s
end
def test_null_character
doc = {"a" => "\x00"}
assert_doc_pass(doc)
assert_raise InvalidDocument do
@encoder.serialize({"\x00" => "a"})
end
assert_raise InvalidDocument do
@encoder.serialize({"a" => (Regexp.compile "ab\x00c")})
end
end
def test_max_key
doc = {"a" => MaxKey.new}
assert_doc_pass(doc)
end
def test_min_key
doc = {"a" => MinKey.new}
assert_doc_pass(doc)
end
def test_invalid_object
o = Object.new
assert_raise InvalidDocument do
@encoder.serialize({:foo => o})
end
assert_raise InvalidDocument do
@encoder.serialize({:foo => Date.today})
end
end
def test_move_id
a = BSON::OrderedHash.new
a['text'] = 'abc'
a['key'] = 'abc'
a['_id'] = 1
assert_equal ")\000\000\000\020_id\000\001\000\000\000\002text" +
"\000\004\000\000\000abc\000\002key\000\004\000\000\000abc\000\000",
@encoder.serialize(a, false, true).to_s
assert_equal ")\000\000\000\002text\000\004\000\000\000abc\000\002key" +
"\000\004\000\000\000abc\000\020_id\000\001\000\000\000\000",
@encoder.serialize(a, false, false).to_s
end
def test_move_id_with_nested_doc
b = BSON::OrderedHash.new
b['text'] = 'abc'
b['_id'] = 2
c = BSON::OrderedHash.new
c['text'] = 'abc'
c['hash'] = b
c['_id'] = 3
assert_equal ">\000\000\000\020_id\000\003\000\000\000\002text" +
"\000\004\000\000\000abc\000\003hash\000\034\000\000" +
"\000\002text\000\004\000\000\000abc\000\020_id\000\002\000\000\000\000\000",
@encoder.serialize(c, false, true).to_s
# Java doesn't support this. Isn't actually necessary.
if !(RUBY_PLATFORM =~ /java/)
assert_equal ">\000\000\000\002text\000\004\000\000\000abc\000\003hash" +
"\000\034\000\000\000\002text\000\004\000\000\000abc\000\020_id" +
"\000\002\000\000\000\000\020_id\000\003\000\000\000\000",
@encoder.serialize(c, false, false).to_s
end
end
def test_invalid_key_names
assert @encoder.serialize({"hello" => "world"}, true)
assert @encoder.serialize({"hello" => {"hello" => "world"}}, true)
assert @encoder.serialize({"he$llo" => "world"}, true)
assert @encoder.serialize({"hello" => {"hell$o" => "world"}}, true)
assert_raise BSON::InvalidDocument do
@encoder.serialize({"he\0llo" => "world"}, true)
end
assert_raise BSON::InvalidKeyName do
@encoder.serialize({"$hello" => "world"}, true)
end
assert_raise BSON::InvalidKeyName do
@encoder.serialize({"hello" => {"$hello" => "world"}}, true)
end
assert_raise BSON::InvalidKeyName do
@encoder.serialize({".hello" => "world"}, true)
end
assert_raise BSON::InvalidKeyName do
@encoder.serialize({"hello" => {".hello" => "world"}}, true)
end
assert_raise BSON::InvalidKeyName do
@encoder.serialize({"hello." => "world"}, true)
end
assert_raise BSON::InvalidKeyName do
@encoder.serialize({"hello" => {"hello." => "world"}}, true)
end
assert_raise BSON::InvalidKeyName do
@encoder.serialize({"hel.lo" => "world"}, true)
end
assert_raise BSON::InvalidKeyName do
@encoder.serialize({"hello" => {"hel.lo" => "world"}}, true)
end
end
end
BSON Code inspect test for Ruby 1.8.7 unordered Hash
RUBY-548 As an engineer, I want to improve code coverage
# encoding:utf-8
require 'test_helper'
require 'set'
if RUBY_VERSION < '1.9'
silently do
require 'complex'
require 'rational'
end
end
require 'bigdecimal'
begin
require 'date'
require 'tzinfo'
require 'active_support/timezone'
Time.zone = "Pacific Time (US & Canada)"
Zone = Time.zone.now
rescue LoadError
#warn 'Mocking time with zone'
module ActiveSupport
class TimeWithZone
def initialize(utc_time, zone)
end
end
end
Zone = ActiveSupport::TimeWithZone.new(Time.now.utc, 'EST')
end
begin
require 'active_support/multibyte/chars'
rescue LoadError
warn 'Mocking ActiveSupport::Multibyte::Chars'
module ActiveSupport
module Multibyte
class Chars < String
end
end
end
end
class BSONTest < Test::Unit::TestCase
include BSON
def setup
@encoder = BSON::BSON_CODER
end
def assert_doc_pass(doc, options={})
bson = @encoder.serialize(doc)
if options[:debug]
puts "DEBUGGING DOC:"
p bson.to_a
puts "DESERIALIZES TO:"
end
assert_equal @encoder.serialize(doc).to_a, bson.to_a
assert_equal doc, @encoder.deserialize(bson)
end
def test_interface
doc = { 'a' => 1 }
bson = BSON.serialize(doc)
assert_equal doc, BSON.deserialize(bson)
end
def test_read_bson_document
bson_file_data_h_star = ["21000000075f6964005115883c3d75c94d3aa18b63016100000000000000f03f00"]
strio = StringIO.new(bson_file_data_h_star.pack('H*'))
bson = BSON.read_bson_document(strio)
doc = {"_id"=>BSON::ObjectId('5115883c3d75c94d3aa18b63'), "a"=>1.0}
assert_equal doc, bson
end
def test_bson_ruby_interface
doc = { 'a' => 1 }
buf = BSON_RUBY.serialize(doc)
bson = BSON::BSON_RUBY.new
bson.instance_variable_set(:@buf, buf)
assert_equal [12, 0, 0, 0, 16, 97, 0, 1, 0, 0, 0, 0], bson.to_a
assert_equal "\f\x00\x00\x00\x10a\x00\x01\x00\x00\x00\x00", bson.to_s
assert_equal [12, 0, 0, 0, 16, 97, 0, 1, 0, 0, 0, 0], bson.unpack
end
def test_bson_ruby_hex_dump
doc = { 'a' => 1 }
buf = BSON_RUBY.serialize(doc)
bson = BSON_RUBY.new
bson.instance_variable_set(:@buf, buf)
doc_hex_dump = " 0: 0C 00 00 00 10 61 00 01\n 8: 00 00 00 00"
assert_equal doc_hex_dump, bson.hex_dump
end
def test_bson_ruby_dbref_not_used
buf = BSON::ByteBuffer.new
val = ns = 'namespace'
# Make a hole for the length
len_pos = buf.position
buf.put_int(0)
# Save the string
start_pos = buf.position
BSON::BSON_RUBY.serialize_cstr(buf, val)
end_pos = buf.position
# Put the string size in front
buf.put_int(end_pos - start_pos, len_pos)
# Go back to where we were
buf.position = end_pos
oid = ObjectId.new
buf.put_array(oid.to_a)
buf.rewind
bson = BSON::BSON_RUBY.new
bson.instance_variable_set(:@buf, buf)
assert_equal DBRef.new(ns, oid).to_s, bson.deserialize_dbref_data(buf).to_s
end
def test_require_hash
assert_raise_error InvalidDocument, "takes a Hash" do
BSON.serialize('foo')
end
assert_raise_error InvalidDocument, "takes a Hash" do
BSON.serialize(Object.new)
end
assert_raise_error InvalidDocument, "takes a Hash" do
BSON.serialize(Set.new)
end
end
def test_string
doc = {'doc' => 'hello, world'}
assert_doc_pass(doc)
end
def test_valid_utf8_string
doc = {'doc' => 'aé'}
assert_doc_pass(doc)
end
def test_valid_active_support_multibyte_chars
unless RUBY_PLATFORM =~ /java/
doc = {'doc' => ActiveSupport::Multibyte::Chars.new('aé')}
assert_doc_pass(doc)
bson = @encoder.serialize(doc)
doc = @encoder.deserialize(bson)
assert_equal doc['doc'], 'aé'
end
end
def test_valid_utf8_key
doc = {'aé' => 'hello'}
assert_doc_pass(doc)
end
def test_limit_max_bson_size
doc = {'name' => 'a' * BSON::DEFAULT_MAX_BSON_SIZE}
assert_raise InvalidDocument do
assert @encoder.serialize(doc)
end
end
def test_update_max_bson_size
require 'ostruct'
mock_conn = OpenStruct.new
size = 7 * 1024 * 1024
mock_conn.max_bson_size = size
silently do
assert_equal size, BSON_CODER.update_max_bson_size(mock_conn)
assert_equal size, BSON_CODER.max_bson_size
end
end
def test_round_trip
doc = {'doc' => 123}
@encoder.deserialize(@encoder.serialize(doc))
end
# In 1.8 we test that other string encodings raise an exception.
# In 1.9 we test that they get auto-converted.
if RUBY_VERSION < '1.9'
unless RUBY_PLATFORM == 'java'
require 'iconv'
def test_non_utf8_string
string = Iconv.conv('iso-8859-1', 'utf-8', 'aé')
doc = {'doc' => string}
assert_raise InvalidStringEncoding do
@encoder.serialize(doc)
end
end
def test_non_utf8_key
key = Iconv.conv('iso-8859-1', 'utf-8', 'aé')
doc = {key => 'hello'}
assert_raise InvalidStringEncoding do
@encoder.serialize(doc)
end
end
end
else
unless RUBY_PLATFORM == 'java'
def test_non_utf8_string
assert_raise BSON::InvalidStringEncoding do
BSON::BSON_CODER.serialize({'str' => 'aé'.encode('iso-8859-1')})
end
end
def test_invalid_utf8_string
str = "123\xD9"
assert !str.valid_encoding?
assert_raise BSON::InvalidStringEncoding do
BSON::BSON_CODER.serialize({'str' => str})
end
end
def test_non_utf8_key
assert_raise BSON::InvalidStringEncoding do
BSON::BSON_CODER.serialize({'aé'.encode('iso-8859-1') => 'hello'})
end
end
def test_forced_encoding_with_valid_utf8
doc = {'doc' => "\xC3\xB6".force_encoding("ISO-8859-1")}
serialized = @encoder.serialize(doc)
deserialized = @encoder.deserialize(serialized)
assert_equal(doc['doc'], deserialized['doc'].force_encoding("ISO-8859-1"))
end
# Based on a test from sqlite3-ruby
def test_default_internal_is_honored
before_enc = Encoding.default_internal
str = "壁に耳あり、障子に目あり"
bson = BSON::BSON_CODER.serialize("x" => str)
silently { Encoding.default_internal = 'EUC-JP' }
out = BSON::BSON_CODER.deserialize(bson)["x"]
assert_equal Encoding.default_internal, out.encoding
assert_equal str.encode('EUC-JP'), out
assert_equal str, out.encode(str.encoding)
ensure
silently { Encoding.default_internal = before_enc }
end
end
end
def test_code
code = Code.new('this.a.b < this.b')
assert_equal 17, code.length
assert_match /<BSON::Code:\d+.*@data="this.a.b < this.b".*>/, code.inspect
doc = {'$where' => code}
assert_doc_pass(doc)
code = 'this.c.d < this.e'.to_bson_code # core_ext.rb
assert_equal BSON::Code, code.class
assert_equal code, code.to_bson_code
end
def test_code_with_symbol
assert_raise_error ArgumentError, "BSON::Code must be in the form of a String" do
Code.new(:fubar)
end
end
def test_code_with_scope
doc = {'$where' => Code.new('this.a.b < this.b', {'foo' => 1})}
assert_doc_pass(doc)
end
def test_double
doc = {'doc' => 41.25}
assert_doc_pass(doc)
end
def test_int
doc = {'doc' => 42}
assert_doc_pass(doc)
doc = {"doc" => -5600}
assert_doc_pass(doc)
doc = {"doc" => 2147483647}
assert_doc_pass(doc)
doc = {"doc" => -2147483648}
assert_doc_pass(doc)
end
def test_ordered_hash
doc = BSON::OrderedHash.new
doc["b"] = 1
doc["a"] = 2
doc["c"] = 3
doc["d"] = 4
assert_doc_pass(doc)
end
def test_object
doc = {'doc' => {'age' => 42, 'name' => 'Spongebob', 'shoe_size' => 9.5}}
assert_doc_pass(doc)
end
def test_embedded_document_with_nil
doc = {'doc' => {'age' => 42, 'name' => nil, 'shoe_size' => 9.5}}
assert_doc_pass(doc)
end
def test_embedded_document_with_date
doc = {'doc' => {'age' => 42, 'date' => Time.now.utc, 'shoe_size' => 9.5}}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
assert doc2['doc']
assert_equal 42, doc2['doc']['age']
assert_equal 9.5, doc2['doc']['shoe_size']
assert_in_delta Time.now, doc2['doc']['date'], 1
end
def test_oid
doc = {'doc' => ObjectId.new}
assert_doc_pass(doc)
end
def test_array
doc = {'doc' => [1, 2, 'a', 'b']}
assert_doc_pass(doc)
end
def test_array_keys
doc = {'doc' => [1, 2, 'a', 'b']}
bson = @encoder.serialize(doc).to_a
assert_equal 48, bson[14]
assert_equal 49, bson[21]
assert_equal 50, bson[28]
assert_equal 51, bson[37]
end
def test_regex
doc = {'doc' => /foobar/i}
assert_doc_pass(doc)
end
def test_regex_multiline
doc = {'doc' => /foobar/m}
assert_doc_pass(doc)
end
def test_boolean
doc = {'doc' => true}
assert_doc_pass(doc)
end
def test_date
doc = {'date' => Time.now}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
# Mongo only stores up to the millisecond
assert_in_delta doc['date'], doc2['date'], 0.001
end
def test_date_in_array
doc = {'date' => [Time.now.utc]}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
assert doc2
end
def test_date_returns_as_utc
doc = {'date' => Time.now.utc}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
assert doc2['date'].utc?
end
def test_date_before_epoch
if RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/ then return true end
begin
doc = {'date' => Time.utc(1600)}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
# Mongo only stores up to the millisecond
assert_in_delta doc['date'], doc2['date'], 2
rescue ArgumentError
# some versions of Ruby won't let you create pre-epoch Time instances
#
# TODO figure out how that will work if somebady has saved data
# w/ early dates already and is just querying for it.
end
end
def test_exeption_on_using_unsupported_date_class
[DateTime.now, Date.today, Zone].each do |invalid_date|
doc = {:date => invalid_date}
begin
BSON::BSON_CODER.serialize(doc)
rescue => e
ensure
if !invalid_date.is_a? Time
assert_equal InvalidDocument, e.class
assert_match(/UTC Time/, e.message)
end
end
end
end
def test_dbref
oid = ObjectId.new
ns = 'namespace'
doc = {}
dbref = DBRef.new(ns, oid)
assert_equal({"$id"=>oid, "$ns"=>ns}, dbref.to_hash)
doc['dbref'] = dbref
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
# Java doesn't deserialize to DBRefs
if RUBY_PLATFORM =~ /java/ && BSON.extension?
assert_equal 'namespace', doc2['dbref']['$ns']
assert_equal oid, doc2['dbref']['$id']
else
assert_equal 'namespace', doc2['dbref'].namespace
assert_equal oid, doc2['dbref'].object_id
end
end
def test_symbol
doc = {'sym' => :foo}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
assert_equal :foo, doc2['sym']
end
def test_binary
bin = Binary.new
'binstring'.each_byte { |b| bin.put(b) }
doc = {'bin' => bin}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
bin2 = doc2['bin']
assert_kind_of Binary, bin2
assert_equal 'binstring', bin2.to_s
assert_equal Binary::SUBTYPE_SIMPLE, bin2.subtype
end
def test_binary_with_deprecated_subtype
bin = Binary.new
'binstring'.each_byte { |b| bin.put(b) }
bin.subtype = Binary::SUBTYPE_BYTES
doc = {'bin' => bin}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
bin2 = doc2['bin']
assert_kind_of Binary, bin2
assert_equal 'binstring', bin2.to_s
assert_equal Binary::SUBTYPE_BYTES, bin2.subtype
end
def test_binary_with_string
b = Binary.new('somebinarystring')
doc = {'bin' => b}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
bin2 = doc2['bin']
assert_kind_of Binary, bin2
assert_equal 'somebinarystring', bin2.to_s
assert_equal Binary::SUBTYPE_SIMPLE, bin2.subtype
end
def test_binary_type
bin = Binary.new([1, 2, 3, 4, 5], Binary::SUBTYPE_USER_DEFINED)
doc = {'bin' => bin}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
bin2 = doc2['bin']
assert_kind_of Binary, bin2
assert_equal [1, 2, 3, 4, 5], bin2.to_a
assert_equal Binary::SUBTYPE_USER_DEFINED, bin2.subtype
end
# Java doesn't support binary subtype 0 yet
if !(RUBY_PLATFORM =~ /java/)
def test_binary_subtype_0
bin = Binary.new([1, 2, 3, 4, 5], Binary::SUBTYPE_SIMPLE)
doc = {'bin' => bin}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
bin2 = doc2['bin']
assert_kind_of Binary, bin2
assert_equal [1, 2, 3, 4, 5], bin2.to_a
assert_equal Binary::SUBTYPE_SIMPLE, bin2.subtype
end
end
def test_binary_byte_buffer
bb = Binary.new
5.times { |i| bb.put(i + 1) }
doc = {'bin' => bb}
bson = @encoder.serialize(doc)
doc2 = @encoder.deserialize(bson)
bin2 = doc2['bin']
assert_kind_of Binary, bin2
assert_equal [1, 2, 3, 4, 5], bin2.to_a
assert_equal Binary::SUBTYPE_SIMPLE, bin2.subtype
end
def test_put_id_first
val = BSON::OrderedHash.new
val['not_id'] = 1
val['_id'] = 2
roundtrip = @encoder.deserialize(@encoder.serialize(val, false, true).to_s)
assert_kind_of BSON::OrderedHash, roundtrip
assert_equal '_id', roundtrip.keys.first
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
roundtrip = @encoder.deserialize(@encoder.serialize(val, false, true).to_s)
assert_kind_of BSON::OrderedHash, roundtrip
assert_equal '_id', roundtrip.keys.first
end
def test_nil_id
doc = {"_id" => nil}
assert_doc_pass(doc)
end
if !(RUBY_PLATFORM =~ /java/)
def test_timestamp
# val = {"test" => [4, 20]}
result = @encoder.deserialize([0x13, 0x00, 0x00, 0x00,
0x11, 0x74, 0x65, 0x73,
0x74, 0x00, 0x04, 0x00,
0x00, 0x00, 0x14, 0x00,
0x00, 0x00, 0x00])
silently do
assert_equal 4, result["test"][0]
assert_equal 20, result["test"][1]
end
end
end
def test_timestamp_type
ts = Timestamp.new(5000, 100)
doc = {:ts => ts}
bson = @encoder.serialize(doc)
assert_equal ts, @encoder.deserialize(bson)["ts"]
end
def test_overflow
doc = {"x" => 2**75}
assert_raise RangeError do
@encoder.serialize(doc)
end
doc = {"x" => 9223372036854775}
assert_doc_pass(doc)
doc = {"x" => 9223372036854775807}
assert_doc_pass(doc)
doc["x"] = doc["x"] + 1
assert_raise RangeError do
@encoder.serialize(doc)
end
doc = {"x" => -9223372036854775}
assert_doc_pass(doc)
doc = {"x" => -9223372036854775808}
assert_doc_pass(doc)
doc["x"] = doc["x"] - 1
assert_raise RangeError do
BSON::BSON_CODER.serialize(doc)
end
end
def test_invalid_numeric_types
[BigDecimal.new("1.0"), Complex(0, 1), Rational(2, 3)].each do |type|
doc = {"x" => type}
begin
@encoder.serialize(doc)
rescue => e
ensure
assert_equal InvalidDocument, e.class
assert_match(/Cannot serialize/, e.message)
end
end
end
def test_do_not_change_original_object
val = BSON::OrderedHash.new
val['not_id'] = 1
val['_id'] = 2
assert val.keys.include?('_id')
@encoder.serialize(val)
assert val.keys.include?('_id')
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
assert val.keys.include?(:_id)
@encoder.serialize(val)
assert val.keys.include?(:_id)
end
# note we only test for _id here because in the general case we will
# write duplicates for :key and "key". _id is a special case because
# we call has_key? to check for it's existence rather than just iterating
# over it like we do for the rest of the keys. thus, things like
# HashWithIndifferentAccess can cause problems for _id but not for other
# keys. rather than require rails to test with HWIA directly, we do this
# somewhat hacky test.
#
# Note that the driver only eliminates duplicate ids when move_id is true.
def test_no_duplicate_id
dup = {"_id" => "foo", :_id => "foo"}
one = {"_id" => "foo"}
assert_equal @encoder.serialize(one, false, true).to_a, @encoder.serialize(dup, false, true).to_a
end
def test_duplicate_keys
#dup = {"_foo" => "foo", :_foo => "foo"}
#one = {"_foo" => "foo"}
#assert_equal @encoder.serialize(one).to_a, @encoder.serialize(dup).to_a
#warn "Pending test for duplicate keys"
end
def test_no_duplicate_id_when_moving_id
dup = {"_id" => "foo", :_id => "foo"}
one = {:_id => "foo"}
assert_equal @encoder.serialize(one, false, true).to_s, @encoder.serialize(dup, false, true).to_s
end
def test_null_character
doc = {"a" => "\x00"}
assert_doc_pass(doc)
assert_raise InvalidDocument do
@encoder.serialize({"\x00" => "a"})
end
assert_raise InvalidDocument do
@encoder.serialize({"a" => (Regexp.compile "ab\x00c")})
end
end
def test_max_key
doc = {"a" => MaxKey.new}
assert_doc_pass(doc)
end
def test_min_key
doc = {"a" => MinKey.new}
assert_doc_pass(doc)
end
def test_invalid_object
o = Object.new
assert_raise InvalidDocument do
@encoder.serialize({:foo => o})
end
assert_raise InvalidDocument do
@encoder.serialize({:foo => Date.today})
end
end
def test_move_id
a = BSON::OrderedHash.new
a['text'] = 'abc'
a['key'] = 'abc'
a['_id'] = 1
assert_equal ")\000\000\000\020_id\000\001\000\000\000\002text" +
"\000\004\000\000\000abc\000\002key\000\004\000\000\000abc\000\000",
@encoder.serialize(a, false, true).to_s
assert_equal ")\000\000\000\002text\000\004\000\000\000abc\000\002key" +
"\000\004\000\000\000abc\000\020_id\000\001\000\000\000\000",
@encoder.serialize(a, false, false).to_s
end
def test_move_id_with_nested_doc
b = BSON::OrderedHash.new
b['text'] = 'abc'
b['_id'] = 2
c = BSON::OrderedHash.new
c['text'] = 'abc'
c['hash'] = b
c['_id'] = 3
assert_equal ">\000\000\000\020_id\000\003\000\000\000\002text" +
"\000\004\000\000\000abc\000\003hash\000\034\000\000" +
"\000\002text\000\004\000\000\000abc\000\020_id\000\002\000\000\000\000\000",
@encoder.serialize(c, false, true).to_s
# Java doesn't support this. Isn't actually necessary.
if !(RUBY_PLATFORM =~ /java/)
assert_equal ">\000\000\000\002text\000\004\000\000\000abc\000\003hash" +
"\000\034\000\000\000\002text\000\004\000\000\000abc\000\020_id" +
"\000\002\000\000\000\000\020_id\000\003\000\000\000\000",
@encoder.serialize(c, false, false).to_s
end
end
def test_invalid_key_names
assert @encoder.serialize({"hello" => "world"}, true)
assert @encoder.serialize({"hello" => {"hello" => "world"}}, true)
assert @encoder.serialize({"he$llo" => "world"}, true)
assert @encoder.serialize({"hello" => {"hell$o" => "world"}}, true)
assert_raise BSON::InvalidDocument do
@encoder.serialize({"he\0llo" => "world"}, true)
end
assert_raise BSON::InvalidKeyName do
@encoder.serialize({"$hello" => "world"}, true)
end
assert_raise BSON::InvalidKeyName do
@encoder.serialize({"hello" => {"$hello" => "world"}}, true)
end
assert_raise BSON::InvalidKeyName do
@encoder.serialize({".hello" => "world"}, true)
end
assert_raise BSON::InvalidKeyName do
@encoder.serialize({"hello" => {".hello" => "world"}}, true)
end
assert_raise BSON::InvalidKeyName do
@encoder.serialize({"hello." => "world"}, true)
end
assert_raise BSON::InvalidKeyName do
@encoder.serialize({"hello" => {"hello." => "world"}}, true)
end
assert_raise BSON::InvalidKeyName do
@encoder.serialize({"hel.lo" => "world"}, true)
end
assert_raise BSON::InvalidKeyName do
@encoder.serialize({"hello" => {"hel.lo" => "world"}}, true)
end
end
end
|
require "spec_helper"
RSpec.describe Clarification::Client do
context "when first initialized" do
it "should raise a configuration error if no configuration present" do
Clarification.configuration = nil
expect{Clarification::Client.new}.to raise_error(RuntimeError)
end
it "should initialize from the configuration" do
Clarification.configure do |config|
config.api_key = 'blurgh'
config.default_public_models = [:food]
end
client = Clarification::Client.new
expect(client.active_models.first.name).to eq(:food)
end
end
context "when the models are changed" do
it "should rewrite the active_models" do
client = Clarification::Client.new
client.set_active_models_by_public_key([:general, :focus])
expect(client.active_models.count).to eq(2)
expect(client.active_models.last.name).to eq(:focus)
expect(client.active_models.include?(:food)).to be(false)
end
context "when the models are changed and a single item is passed in" do
it "should raise an error if the argument isn't an array" do
client = Clarification::Client.new
expect{client.set_active_models_by_public_key(:food)}.to raise_error(RuntimeError)
end
it "should give a meaningful error message about arrays" do
client = Clarification::Client.new
expect{client.set_active_models_by_public_key(:food)}.to raise_error(/array of model/)
end
end
end
describe "#predict" do
it "the client should have accees to a predict object" do
expect(Clarification::Client.new.predict.class).to eq Clarification::Predict
end
end
describe "#search" do
it "should have an instance of the search class accessible" do
expect(Clarification::Client.new.search.class).to eq Clarification::Search
end
end
describe "#train" do
it "should have an instance of the train class accessible" do
expect(Clarification::Client.new.train.class).to eq Clarification::Train
end
end
describe "attr_readers" do
it "should respond to active_public_models" do
expect(Clarification::Client.new.respond_to? :active_models).to be true
end
end
end
update test for how models are updated and how Predict needs to be reinitialized.
require "spec_helper"
RSpec.describe Clarification::Client do
context "when first initialized" do
it "should raise a configuration error if no configuration present" do
Clarification.configuration = nil
expect{Clarification::Client.new}.to raise_error(RuntimeError)
end
it "should initialize from the configuration" do
Clarification.configure do |config|
config.api_key = 'blurgh'
config.default_public_models = [:food]
end
client = Clarification::Client.new
expect(client.active_models.first.name).to eq(:food)
end
end
context "when the models are changed" do
describe "#set_active_models_by_public_key" do
it "should rewrite the active_models" do
client = Clarification::Client.new
client.set_active_models_by_public_key([:general, :focus])
expect(client.active_models.count).to eq(2)
expect(client.active_models.last.name).to eq(:focus)
end
it "should update the associated Predict model of the client" do
client = Clarification::Client.new
client.set_active_models_by_public_key([:general, :focus])
models = client.predict.instance_eval{@models}
expect(models.last.name).not_to eq(:food)
end
context "when the models are changed and a single item is passed in" do
it "should raise an error if the argument isn't an array" do
client = Clarification::Client.new
expect{client.set_active_models_by_public_key(:food)}.to raise_error(RuntimeError)
end
it "should give a meaningful error message about arrays" do
client = Clarification::Client.new
expect{client.set_active_models_by_public_key(:food)}.to raise_error(/array of model/)
end
end
end
describe "#set_active_models" do
it "should update the client models" do
client = Clarification::Client.new
client.set_active_models({blurgh: '123123123'})
expect(client.active_models.first.name).to eq(:blurgh)
end
it "should update the models of the predict object" do
client = Clarification::Client.new
client.set_active_models({blurgh: '123123123'})
models = client.predict.instance_eval {@models}
expect(models.first.name).to eq(:blurgh)
end
end
end
describe "#predict" do
it "the client should have accees to a predict object" do
expect(Clarification::Client.new.predict.class).to eq Clarification::Predict
end
end
describe "#search" do
it "should have an instance of the search class accessible" do
expect(Clarification::Client.new.search.class).to eq Clarification::Search
end
end
describe "#train" do
it "should have an instance of the train class accessible" do
expect(Clarification::Client.new.train.class).to eq Clarification::Train
end
end
describe "attr_readers" do
it "should respond to active_public_models" do
expect(Clarification::Client.new.respond_to? :active_models).to be true
end
end
end |
require 'test/unit'
require 'veewee'
class TestVeeweeBuild < Test::Unit::TestCase
def setup
definition_dir=File.expand_path(File.join(File.dirname(__FILE__),"definitions"))
#ENV['VEEWEE_LOG']="STDOUT"
@ve=Veewee::Environment.new({ :definition_dir => definition_dir })
@definition_name="test_definition"
@vd=@ve.definitions[@definition_name]
@box_name=@definition_name
@vd.postinstall_files=["_test_me.sh"]
@box=@ve.providers["virtualbox"].get_box(@box_name)
end
# First build of box
# - the creation
# - kickstart fetch
# - postinstall execution
def test_box_1_build
assert_nothing_raised {
@box.build({'auto' => true,'force' => true, 'nogui' => true })
#@box.build({"auto" => true,"force" => true })
}
end
# Run an ssh command
def test_box_2_ssh
assert_nothing_raised {
result=@box.exec("who am i")
assert_match(/root/,result.stdout)
}
end
# Type on console
def test_box_3_console_type
assert_nothing_raised {
@box.console_type(['echo "bla" > console.txt<Enter>'])
result=@box.exec("cat console.txt")
assert_match(/bla/,result.stdout)
}
end
# Try shutdown
def test_box_4_shutdown
assert_nothing_raised {
@box.halt
}
end
# Now try build again (with no force flag)
def test_box_5_build
assert_raise(Veewee::Error) {
#@box.build({"auto" => true})
@box.build({"auto" => true,'force' => true, 'nogui' => true })
}
end
def test_box_6_destroy
assert_nothing_raised {
@box.destroy
}
end
#
# def teardown
# #@ve.destroy(@vm_name,@vd)
#
# end
end
a rebuild show raise nothing in a real_test
require 'test/unit'
require 'veewee'
class TestVeeweeBuild < Test::Unit::TestCase
def setup
definition_dir=File.expand_path(File.join(File.dirname(__FILE__),"definitions"))
#ENV['VEEWEE_LOG']="STDOUT"
@ve=Veewee::Environment.new({ :definition_dir => definition_dir })
@definition_name="test_definition"
@vd=@ve.definitions[@definition_name]
@box_name=@definition_name
@vd.postinstall_files=["_test_me.sh"]
@box=@ve.providers["virtualbox"].get_box(@box_name)
end
# First build of box
# - the creation
# - kickstart fetch
# - postinstall execution
def test_box_1_build
assert_nothing_raised {
@box.build({'auto' => true,'force' => true, 'nogui' => true })
#@box.build({"auto" => true,"force" => true })
}
end
# Run an ssh command
def test_box_2_ssh
assert_nothing_raised {
result=@box.exec("who am i")
assert_match(/root/,result.stdout)
}
end
# Type on console
def test_box_3_console_type
assert_nothing_raised {
@box.console_type(['echo "bla" > console.txt<Enter>'])
result=@box.exec("cat console.txt")
assert_match(/bla/,result.stdout)
}
end
# Try shutdown
def test_box_4_shutdown
assert_nothing_raised {
@box.halt
}
end
# Now try build again (with no force flag)
def test_box_5_build
assert_nothing_raise(Veewee::Error) {
#@box.build({"auto" => true})
@box.build({"auto" => true,'force' => true, 'nogui' => true })
}
end
def test_box_6_destroy
assert_nothing_raised {
@box.destroy
}
end
#
# def teardown
# #@ve.destroy(@vm_name,@vd)
#
# end
end
|
require 'spec_helper'
describe Ehrmagerd::Translator do
it 'translates a string into ehrmagerd-speak' do
test_string1 = "Oh my god"
expect(Ehrmagerd::Translator.translate(test_string1)).to eq "EHRMAGERD"
test_string2 = "Dogecoins are dumb"
expect(Ehrmagerd::Translator.translate(test_string1)).to eq "DERGERCERNS ER DERMB"
end
end
Update spec to use the test post request's string
require 'spec_helper'
describe Ehrmagerd::Translator do
it 'translates a string into ehrmagerd-speak' do
test_string1 = "OhMyGod, Goosebumps, my favorite books!"
expect(Ehrmagerd::Translator.translate(test_string1)).to eq "ERMAHGERDGERSERBERMPSMAH FRAVRIT BERKS!"
test_string2 = "Dogecoins are dumb"
expect(Ehrmagerd::Translator.translate(test_string1)).to eq "DERGERCERNS ER DERMB"
end
end
|
require "test_helper"
class TestChartkick < Minitest::Test
include Chartkick::Helper
# TODO actual tests
def setup
@data = [[34, 42], [56, 49]]
end
def test_line_chart
assert line_chart(@data)
end
def test_pie_chart
assert pie_chart(@data)
end
def test_column_chart
assert column_chart(@data)
end
def test_options_not_mutated
options = {id: "boom"}
line_chart @data, options
assert_equal "boom", options[:id]
end
def test_chartkick_deep_merge_different_inner_key
global_option = {library: {backgroundColor: "#eee"}}
local_option = {library: {title: "test"}}
correct_merge = {library: {backgroundColor: "#eee", title: "test"}}
assert_equal chartkick_deep_merge(global_option, local_option), correct_merge
end
def test_chartkick_deep_merge_same_inner_key
global_option = {library: {backgroundColor: "#eee"}}
local_option = {library: {backgroundColor: "#fff"}}
correct_merge = {library: {backgroundColor: "#fff"}}
assert_equal chartkick_deep_merge(global_option, local_option), correct_merge
end
end
Better test code
require_relative "test_helper"
class ChartkickTest < Minitest::Test
include Chartkick::Helper
def setup
@data = [[34, 42], [56, 49]]
end
def test_line_chart
assert line_chart(@data)
end
def test_pie_chart
assert pie_chart(@data)
end
def test_column_chart
assert column_chart(@data)
end
def test_options_not_mutated
options = {id: "boom"}
line_chart @data, options
assert_equal "boom", options[:id]
end
def test_chartkick_deep_merge_different_inner_key
global_option = {library: {backgroundColor: "#eee"}}
local_option = {library: {title: "test"}}
correct_merge = {library: {backgroundColor: "#eee", title: "test"}}
assert_equal chartkick_deep_merge(global_option, local_option), correct_merge
end
def test_chartkick_deep_merge_same_inner_key
global_option = {library: {backgroundColor: "#eee"}}
local_option = {library: {backgroundColor: "#fff"}}
correct_merge = {library: {backgroundColor: "#fff"}}
assert_equal chartkick_deep_merge(global_option, local_option), correct_merge
end
end
|
require 'spec_helper'
feature 'Event creation' do
let(:member) { Fabricate(:member) }
let(:chapter) { Fabricate(:chapter_with_groups) }
describe 'an authorised member' do
before do
member.add_role(:organiser, chapter)
login_as_admin(member)
end
describe 'can succesfuly create an event' do
scenario 'when they fill in all mandatory fields' do
sponsor = Fabricate(:sponsor)
date = Time.zone.today + 2.days
visit new_admin_event_path
fill_in 'Event Name', with: 'A test event'
fill_in 'Slug', with: 'a-test-event'
fill_in 'Date', with: date
fill_in 'Begins at', with: '16:00'
fill_in 'Ends at', with: '18:00'
fill_in 'Description', with: 'A test event description'
fill_in 'RSVP instructions', with: 'Some instructions'
fill_in 'Schedule', with: '9:00 Sign up & breakfast <br/> 9:30 kick off'
fill_in 'Coach spaces', with: '19'
fill_in 'Student spaces', with: '25'
select sponsor.name, from: 'Venue'
click_on 'Save'
expect(page).to have_content('Event successfully created')
expect(page).to have_content('A test event')
expect(page).to have_content("#{I18n.l(date, format: :date)} 16:00 to 18:00")
expect(page).to have_content('A test event description')
expect(page).to have_content('25 student spots, 19 coach spots')
expect(page).to have_content('9:00 Sign up & breakfast 9:30 kick off')
within '#host' do
expect(page).to have_content sponsor.name
expect(page).to have_content sponsor.address.street
expect(page).to have_content sponsor.address.city
end
end
end
describe 'can not create an event' do
scenario 'when they don\'t fill in any of the events details' do
visit new_admin_event_path
click_on 'Save'
expect(page).to have_content('Make sure you fill in all mandatory fields')
end
end
end
end
resolve hidden element issue caused by Capybara upgrade
require 'spec_helper'
feature 'Event creation' do
let(:member) { Fabricate(:member) }
let(:chapter) { Fabricate(:chapter_with_groups) }
describe 'an authorised member' do
before do
member.add_role(:organiser, chapter)
login_as_admin(member)
end
describe 'can succesfuly create an event' do
scenario 'when they fill in all mandatory fields' do
sponsor = Fabricate(:sponsor)
date = Time.zone.today + 2.days
visit new_admin_event_path
fill_in 'Event Name', with: 'A test event'
fill_in 'Slug', with: 'a-test-event'
fill_in 'Date', with: date
fill_in 'Begins at', with: '16:00'
fill_in 'Ends at', with: '18:00'
fill_in 'Description', with: 'A test event description'
fill_in 'RSVP instructions', with: 'Some instructions'
fill_in 'Schedule', with: '9:00 Sign up & breakfast <br/> 9:30 kick off'
fill_in 'Coach spaces', with: '19'
fill_in 'Student spaces', with: '25'
select sponsor.name, from: 'Venue'
click_on 'Save'
expect(page).to have_content('Event successfully created')
expect(page).to have_content('A test event')
expect(page).to have_content("#{I18n.l(date, format: :date)} 16:00 to 18:00")
expect(page).to have_content('A test event description')
expect(page).to have_content('25 student spots, 19 coach spots')
expect(find('#schedule', visible: false).text).to eq('9:00 Sign up & breakfast 9:30 kick off')
within '#host' do
expect(page).to have_content sponsor.name
expect(page).to have_content sponsor.address.street
expect(page).to have_content sponsor.address.city
end
end
end
describe 'can not create an event' do
scenario 'when they don\'t fill in any of the events details' do
visit new_admin_event_path
click_on 'Save'
expect(page).to have_content('Make sure you fill in all mandatory fields')
end
end
end
end
|
# coding: utf-8
require 'rails_helper'
require 'login_helper'
describe 'Editing dances', js: true do
it 'displays attributes of an existing dance' do
with_login do |user|
dance = FactoryGirl.create(:box_the_gnat_contra, user: user)
visit edit_dance_path dance.id
expect(page.body).to include(dance.title)
expect(page.body).to include(dance.choreographer.name)
expect(page.body).to include(dance.start_type)
expect(page.body).to include(dance.hook)
expect(page.body).to include(dance.preamble)
expect(page.body).to_not match(/Becket/i)
expect(page).to have_text('neighbors balance & swing')
expect(page).to have_text('ladles allemande right 1½')
expect(page.body).to include dance.notes
expect(page).to have_current_path(edit_dance_path(dance.id))
end
end
it 'editing a dance passes it\'s information through unchanged' do
with_login do |user|
choreographer = FactoryGirl.create(:choreographer, name: 'Becky Hill')
dance1 = FactoryGirl.create(:box_the_gnat_contra, user: user, choreographer: choreographer)
visit edit_dance_path dance1.id
click_button 'Save Dance'
dance2 = FactoryGirl.build(:box_the_gnat_contra, user: user, choreographer: choreographer)
dance1.reload
expect(current_path).to eq dance_path dance1.id
%w[title start_type figures_json hook preamble notes].each do |message|
expect(dance1.send message).to eql dance2.send message
end
expect(dance1.choreographer.name).to eql dance2.choreographer.name
end
end
it 'editing a dance saves form values (except figure editor edits)' do
with_login do |user|
dance = FactoryGirl.create(:box_the_gnat_contra, user: user)
visit edit_dance_path dance.id
fill_in 'dance_title', with: 'Call Me'
fill_in 'dance[choreographer_name]', with: 'Cary Ravitz'
fill_in 'dance[start_type]', with: 'Beckett'
fill_in 'dance[hook]', with: 'wombatty'
fill_in 'dance[preamble]', with: 'prerambling'
fill_in 'dance[notes]', with: 'notey'
choose 'Publish'
click_button 'Save Dance'
dance.reload
expect(dance.title).to eq('Call Me')
expect(dance.choreographer.name).to eq('Cary Ravitz')
expect(dance.start_type).to eq('Beckett')
expect(dance.hook).to eq('wombatty')
expect(dance.preamble).to eq('prerambling')
expect(dance.notes).to eq('notey')
end
end
end
editor spec
# coding: utf-8
require 'rails_helper'
require 'login_helper'
describe 'Editing dances', js: true do
it 'displays attributes of an existing dance' do
with_login do |user|
dance = FactoryGirl.create(:box_the_gnat_contra, user: user)
visit edit_dance_path dance.id
expect(page.body).to include(dance.title)
expect(page.body).to include(dance.choreographer.name)
expect(page.body).to include(dance.start_type)
expect(page.body).to include(dance.hook)
expect(page.body).to include(dance.preamble)
expect(page.body).to_not match(/Becket/i)
expect(page).to have_text('neighbors balance & swing')
expect(page).to have_text('ladles allemande right 1½')
expect(page.body).to include dance.notes
expect(page).to have_current_path(edit_dance_path(dance.id))
end
end
it 'editing a dance passes it\'s information through unchanged' do
with_login do |user|
choreographer = FactoryGirl.create(:choreographer, name: 'Becky Hill')
dance1 = FactoryGirl.create(:box_the_gnat_contra, user: user, choreographer: choreographer)
visit edit_dance_path dance1.id
click_button 'Save Dance'
dance2 = FactoryGirl.build(:box_the_gnat_contra, user: user, choreographer: choreographer)
dance1.reload
expect(current_path).to eq dance_path dance1.id
%w[title start_type figures_json hook preamble notes].each do |message|
expect(dance1.send message).to eql dance2.send message
end
expect(dance1.choreographer.name).to eql dance2.choreographer.name
end
end
it 'editing a dance saves form values (except figure editor edits)' do
with_login do |user|
dance = FactoryGirl.create(:box_the_gnat_contra, user: user)
visit edit_dance_path dance.id
fill_in 'dance_title', with: 'Call Me'
fill_in 'dance[choreographer_name]', with: 'Cary Ravitz'
fill_in 'dance[start_type]', with: 'Beckett'
fill_in 'dance[hook]', with: 'wombatty'
fill_in 'dance[preamble]', with: 'prerambling'
fill_in 'dance[notes]', with: 'notey'
choose 'Publish'
click_button 'Save Dance'
dance.reload
expect(dance.title).to eq('Call Me')
expect(dance.choreographer.name).to eq('Cary Ravitz')
expect(dance.start_type).to eq('Beckett')
expect(dance.hook).to eq('wombatty')
expect(dance.preamble).to eq('prerambling')
expect(dance.notes).to eq('notey')
end
end
it 'rewrites on the fly for 1st shadown & 2nd neigbor' do
with_login do |user|
dance = FactoryGirl.create(:dance_with_all_shadows_and_neigbors, user: user)
visit edit_dance_path dance.id
expect(page).to_not have_content('next neighbors')
expect(page).to have_content('2nd neighbors')
expect(page).to have_content('1st shadows')
click_link('3rd neighbors swing')
select('partners')
expect(page).to have_content('next neighbors')
expect(page).to_not have_content('2nd neighbors')
click_link('2nd shadows swing')
select('partners')
expect(page).to_not have_content('1st shadows')
select('2nd shadows')
expect(page).to have_content('1st shadows')
end
end
end
|
require 'rails_helper'
describe Experience do
describe 'GET /' do
context 'there is no experiences on the archive' do
it 'displays an empty archive message' do
visit root_path
expect(page).to have_content I18n.t('experiences.no_data')
end
end
context 'there is some content on the archive' do
it 'displays a list of experiences' do
create_list(:experience, 5, is_approved: true)
visit root_path
expect(page).to have_selector('li.experience', count: 5)
end
end
end
describe 'GET /experience/:some_experience' do
it 'displays the mandatory contents of an experience' do
experience = create(:experience, is_approved: true)
visit experience_path(experience)
expect(page).to have_selector('div.experience_data')
expect(page).to have_selector('div.report')
expect(page).not_to have_selector('div.cocktail_data')
end
it 'displays the contents of an experience and its cocktail information' do
experience = create(:experience_with_cocktail_data, is_approved: true)
visit experience_path(experience)
expect(page).to have_selector('div.experience_data')
expect(page).to have_selector('div.cocktail_data')
expect(page).to have_selector('div.report')
end
end
describe 'GET /experiences/new' do
it 'shows a disclaimer about the approval process' do
visit new_experience_path
expect(page).to have_content I18n.t('experiences.addition.disclaimer')
end
it 'shows a simple form to fill your report' do
visit new_experience_path
expect(page).to have_selector('form')
end
it 'has and requires to fill a CAPTCHA in order to submit' do
visit new_experience_path
expect(page).to have_xpath "//input[contains(@id, 'gotcha')]"
end
end
describe 'POST /experiences' do
before do
Gotcha.skip_validation = true
@form_fields = %w(title pseudonym set setting body).map { |field| "experience[#{field}]" }
end
it 'sends a report back to us for approval' do
visit new_experience_path
@form_fields.each do |field|
fill_in field, with: 'Nature to be enslaved must enslave us.'
end
click_button I18n.t('experiences.addition.submit')
expect(page).to have_content I18n.t('experiences.waiting_for_approval')
end
end
describe 'GET /search' do
context 'there is no content on the site' do
it 'isn\'t available if there is no content on the site' do
visit root_path
expect(page).not_to have_content('div.search')
end
end
context 'there is some content on the site' do
before do
@experience = create(:experience, is_approved: true)
end
it 'appears on the homepage' do
visit root_path
expect(page).to have_selector('div.search')
end
it 'returns acceptable results' do
visit root_path
fill_in :query, with: (query = 'report')
click_button I18n.t('experiences.search')
expect(page).to have_content /Resultados de la búsqueda para: #{query}/
expect(page).to have_link I18n.t('experiences.random.keep_reading'),
href: experience_path(@experience)
end
end
end
end
Fix based on Hound comments.
require 'rails_helper'
describe Experience do
describe 'GET /' do
context 'there is no experiences on the archive' do
it 'displays an empty archive message' do
visit root_path
expect(page).to have_content I18n.t('experiences.no_data')
end
end
context 'there is some content on the archive' do
it 'displays a list of experiences' do
create_list(:experience, 5, is_approved: true)
visit root_path
expect(page).to have_selector('li.experience', count: 5)
end
end
end
describe 'GET /experience/:some_experience' do
it 'displays the mandatory contents of an experience' do
experience = create(:experience, is_approved: true)
visit experience_path(experience)
expect(page).to have_selector('div.experience_data')
expect(page).to have_selector('div.report')
expect(page).not_to have_selector('div.cocktail_data')
end
it 'displays the contents of an experience and its cocktail information' do
experience = create(:experience_with_cocktail_data, is_approved: true)
visit experience_path(experience)
expect(page).to have_selector('div.experience_data')
expect(page).to have_selector('div.cocktail_data')
expect(page).to have_selector('div.report')
end
end
describe 'GET /experiences/new' do
it 'shows a disclaimer about the approval process' do
visit new_experience_path
expect(page).to have_content I18n.t('experiences.addition.disclaimer')
end
it 'shows a simple form to fill your report' do
visit new_experience_path
expect(page).to have_selector('form')
end
it 'has and requires to fill a CAPTCHA in order to submit' do
visit new_experience_path
expect(page).to have_xpath "//input[contains(@id, 'gotcha')]"
end
end
describe 'POST /experiences' do
before do
Gotcha.skip_validation = true
@fields = %w(title pseudonym set setting body).map { |f| "experience[#{f}]" }
end
it 'sends a report back to us for approval' do
visit new_experience_path
@fields.each do |field|
fill_in field, with: 'Nature to be enslaved must enslave us.'
end
click_button I18n.t('experiences.addition.submit')
expect(page).to have_content I18n.t('experiences.waiting_for_approval')
end
end
describe 'GET /search' do
context 'there is no content on the site' do
it 'isn\'t available if there is no content on the site' do
visit root_path
expect(page).not_to have_content('div.search')
end
end
context 'there is some content on the site' do
before do
@experience = create(:experience, is_approved: true)
end
it 'appears on the homepage' do
visit root_path
expect(page).to have_selector('div.search')
end
it 'returns acceptable results' do
visit root_path
fill_in :query, with: (query = 'report')
click_button I18n.t('experiences.search')
expect(page).to have_content /Resultados de la búsqueda para: #{query}/
expect(page).to have_link I18n.t('experiences.random.keep_reading'),
href: experience_path(@experience)
end
end
end
end
|
# coding: utf-8
require 'pathname'
def read_fixture(name)
Pathname.new(__FILE__).dirname.join('fixtures', name + '.json').read
end
require 'guard/jasmine/runner'
RSpec.describe Guard::Jasmine::Runner do
let(:formatter) { Guard::Jasmine::Formatter }
let(:defaults) do
Guard::Jasmine::DEFAULT_OPTIONS.merge(
jasmine_url: 'http://localhost:8888/jasmine',
phantomjs_bin: '/usr/local/bin/phantomjs',
spec_dir: 'spec/javascripts'
)
end
let(:runner) { Guard::Jasmine::Runner.new(defaults) }
let(:phantomjs_empty_response) do
''
end
let(:phantomjs_invalid_response) do
<<-JSON
{ 1 }
JSON
end
let(:phantomjs_failure_response) { read_fixture('failure') }
let(:phantomjs_success_response) { read_fixture('success') }
let(:phantomjs_coverage_response) { read_fixture('coverage') }
let(:phantomjs_error_response) { '{ "error": "Cannot request Jasmine specs" }' }
let(:phantomjs_command) { "/usr/local/bin/phantomjs #{@project_path}/lib/guard/jasmine/phantomjs/guard-jasmine.js" }
before do
allow(formatter).to receive(:info)
allow(formatter).to receive(:debug)
allow(formatter).to receive(:error)
allow(formatter).to receive(:spec_failed)
allow(formatter).to receive(:suite_name)
allow(formatter).to receive(:notify)
allow(runner).to receive(:`) # `
allow(runner).to receive(:update_coverage)
allow(Guard::Compat::UI).to receive(:color_enabled?).and_return(true)
allow(Guard::Compat::UI).to receive(:info)
end
describe '#run' do
before do
allow(File).to receive(:foreach).and_yield 'describe "ErrorTest", ->'
allow(File).to receive(:exist?).and_return(true)
allow(IO).to receive(:popen).and_return StringIO.new(phantomjs_error_response)
end
context 'when passed an empty paths list' do
it 'returns false' do
expect(runner.run([])).to be_empty
end
end
context 'when the spec file does not exist' do
it 'does nothing' do
allow(File).to receive(:exist?).with('spec/javascripts').and_return(false)
expect(runner).not_to receive(:evaluate_response)
runner.run(['spec/javascripts'])
end
end
context 'when passed a line number' do
before do
allow(File).to receive(:readlines).and_return([
'describe "TestContext", ->', # 1
' describe "Inner TestContext", ->', # 2
' describe "Unrelated TestContext", ->', # 3
' it "does something", ->', # 4
' # some code', # 5
' # some assertion', # 6
' it "does something else", ->', # 7
' # some assertion', # 8
' it "does something a lot else", ->', # 9
' # some assertion' # 10
])
end
context 'with custom parameters' do
it 'sets the url query parmeters' do
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?debug=true&myval=1&spec=ErrorTest\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/a.js.coffee'], query_params: { debug: true, myval: 1 })
end
end
context 'with the spec file name' do
it 'executes the example for line number on example' do
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?spec=TestContext%20Inner%20TestContext%20does%20something%20else\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/a.js.coffee:7'])
end
it 'executes the example for line number within example' do
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?spec=TestContext%20Inner%20TestContext%20does%20something%20else\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/a.js.coffee:8'])
end
it 'executes all examples within describe' do
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?spec=TestContext\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/a.js.coffee:1'])
end
end
context 'with the cli argument' do
it 'executes the example for line number on example' do
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?spec=TestContext%20Inner%20TestContext%20does%20something%20else\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/a.js.coffee'], line_number: 7)
end
it 'also sets custom parameters' do
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?debug=true&spec=TestContext%20Inner%20TestContext%20does%20something%20else\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/a.js.coffee'], line_number: 7, query_params: { debug: true })
end
end
end
context 'when passed the spec directory' do
it 'requests all jasmine specs from the server' do
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts'], notification: false)
end
it 'shows a start information in the console' do
expect(formatter).to receive(:info).with('Run all Jasmine suites', reset: true)
runner.run(['spec/javascripts'])
end
end
context 'for an erroneous Jasmine runner' do
it 'requests the jasmine specs from the server' do
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?spec=ErrorTest\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/a.js.coffee'])
end
it 'shows the error in the console' do
expect(formatter).to receive(:error).with(
'An error occurred: Cannot request Jasmine specs'
)
runner.run(['spec/javascripts/a.js.coffee'])
end
it 'returns the errors' do
response = runner.run(['spec/javascripts/a.js.coffee'])
expect(response).to have_key('spec/javascripts/a.js.coffee')
end
it 'does not show coverage' do
expect(runner).not_to receive(:notify_coverage_result)
runner.run(['spec/javascripts/a.js.coffee'])
end
context 'with notifications' do
it 'shows an error notification' do
expect(formatter).to receive(:notify).with(
'An error occurred: Cannot request Jasmine specs',
title: 'Jasmine error',
image: :failed,
priority: 2
)
runner.run(['spec/javascripts/a.js.coffee'])
end
end
context 'without notifications' do
it 'does not shows an error notification' do
expect(formatter).not_to receive(:notify)
runner.run(['spec/javascripts/a.js.coffee'], notification: false)
end
end
end
context 'exceptions for the CLI runner' do
before do
allow(File).to receive(:foreach).and_yield 'describe "FailureTest", ->'
end
it 'raises an error with an empty JSON response' do
allow(IO).to receive(:popen).and_return StringIO.new(phantomjs_empty_response)
expect do
runner.run(['spec/javascripts/x/b.js.coffee'], is_cli: true)
end.to raise_error 'No response from Jasmine runner'
end
it 'raises an error with an invalid JSON response' do
allow(IO).to receive(:popen).and_return StringIO.new(phantomjs_invalid_response)
expect do
runner.run(['spec/javascripts/x/b.js.coffee'], is_cli: true)
end.to raise_error "Cannot decode JSON from PhantomJS runner, message received was:\n#{phantomjs_invalid_response}"
end
it 'raises an error with an error JSON response' do
allow(IO).to receive(:popen).and_return StringIO.new(phantomjs_error_response)
expect do
runner.run(['spec/javascripts/x/b.js.coffee'], is_cli: true)
end.to raise_error 'Runner error: Cannot request Jasmine specs'
end
end
context 'for a failing Jasmine runner' do
before do
allow(File).to receive(:foreach).and_yield 'describe "FailureTest", ->'
allow(IO).to receive(:popen).and_return StringIO.new(phantomjs_failure_response)
end
it 'requests the jasmine specs from the server' do
expect(File).to receive(:foreach).with('spec/javascripts/x/b.js.coffee').and_yield 'describe "FailureTest", ->'
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?spec=FailureTest\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/x/b.js.coffee'])
end
it 'returns the failures' do
response = runner.run(['spec/javascripts/x/b.js.coffee'])
expect(response).to have_key('spec/javascripts/x/b.js.coffee')
end
it 'does not show coverage' do
expect(runner).not_to receive(:notify_coverage_result)
runner.run(['spec/javascripts/a.js.coffee'])
end
context 'with the specdoc set to :always' do
it 'shows the pendign specs' do
expect(formatter).to receive(:spec_pending).with(
' ○ Horribly Broken Spec'
)
runner.run(['spec/javascripts/x/b.js.coffee'], specdoc: :always, console: :never, errors: :never)
end
end
context 'with the specdoc set to :never' do
context 'and console and errors set to :never' do
it 'shows the summary in the console' do
expect(formatter).to receive(:info).with(
'Run Jasmine suite spec/javascripts/x/b.js.coffee', reset: true
)
expect(formatter).not_to receive(:suite_name)
expect(formatter).not_to receive(:spec_failed)
expect(formatter).to receive(:error).with('4 specs, 1 pending, 2 failures')
runner.run(['spec/javascripts/x/b.js.coffee'], specdoc: :never, console: :never, errors: :never)
end
it 'hides the pending specs' do
expect(formatter).to_not receive(:spec_pending).with(
' ○ Horribly Broken Spec'
)
runner.run(['spec/javascripts/x/b.js.coffee'], specdoc: :never, console: :never, errors: :never)
end
end
context 'and console set to :failure' do
it 'hides all messages' do
expect(formatter).not_to receive(:suite_name)
expect(formatter).not_to receive(:spec_failed)
expect(formatter).not_to receive(:spec_failed)
expect(formatter).not_to receive(:suite_name)
expect(formatter).not_to receive(:spec_failed)
expect(formatter).not_to receive(:spec_failed)
expect(formatter).not_to receive(:success)
expect(formatter).to receive(:info).with(
'Run Jasmine suite spec/javascripts/x/b.js.coffee', reset: true
)
expect(formatter).to receive(:info).with(
'Finished in 0.01 seconds'
)
runner.run(['spec/javascripts/x/b.js.coffee'], specdoc: :never)
end
end
context 'and console set to :always' do
it 'hides all messages' do
expect(formatter).not_to receive(:suite_name)
expect(formatter).not_to receive(:spec_failed)
expect(formatter).to_not receive(:spec_failed)
expect(formatter).to_not receive(:suite_name)
expect(formatter).to_not receive(:spec_failed)
expect(formatter).to_not receive(:spec_failed)
expect(formatter).to_not receive(:success)
expect(formatter).to receive(:info).with(
'Run Jasmine suite spec/javascripts/x/b.js.coffee', reset: true
)
expect(formatter).to receive(:info).with(
'Finished in 0.01 seconds'
)
runner.run(['spec/javascripts/x/b.js.coffee'], specdoc: :never, console: :always)
end
end
end
context 'with the specdoc set either :always or :failure' do
it 'shows the failed suites' do
expect(formatter).to receive(:suite_name).with(
'Failure suite'
)
expect(formatter).to receive(:spec_failed).with(
' ✘ Failure spec tests something'
)
expect(formatter).to receive(:spec_failed).with(
' ➤ ReferenceError: Can\'t find variable: a'
)
expect(formatter).to receive(:spec_failed).with(
' ➜ /path/to/file.js on line 255'
)
expect(formatter).to receive(:suite_name).with(
' Nested failure suite'
)
expect(formatter).to receive(:spec_failed).with(
' ✘ Failure spec 2 tests something'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :always)
end
context 'with focus enabled' do
context 'and console and error set to :never' do
it 'does not show the passed specs' do
expect(formatter).not_to receive(:success).with(
' ✔ Success spec tests something'
)
expect(formatter).not_to receive(:spec_failed).with(
' ➜ Exception: Another error message in /path/to/file.js on line 255'
)
expect(formatter).not_to receive(:info).with(
' • Another console.log message'
)
expect(formatter).not_to receive(:info).with(
' • WARN: And even more console.log messages'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :never, errors: :never, focus: true)
end
end
context 'and console and errors set to :failure' do
it 'shows the failed specs with logs' do
expect(formatter).to receive(:info).with(
' • console.log message'
)
expect(formatter).to_not receive(:success).with(
' ✔ Success spec tests something'
)
expect(formatter).to_not receive(:spec_failed).with(
' ➜ Exception: Another error message in /path/to/file.js on line 255'
)
expect(formatter).to_not receive(:info).with(
' • Another console.log message'
)
expect(formatter).to_not receive(:info).with(
' • WARN: And even more console.log messages'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :failure, errors: :failure, focus: true)
end
end
context 'and console set to :always' do
it 'shows the passed specs with logs' do
expect(formatter).to_not receive(:success).with(
' ✔ Success spec tests something'
)
expect(formatter).to_not receive(:spec_failed).with(
' ➜ Exception: Another error message in /path/to/file.js on line 255'
)
expect(formatter).to_not receive(:info).with(
' • Another console.log message'
)
expect(formatter).to_not receive(:info).with(
' • WARN: And even more console.log messages'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :always, errors: :always, focus: true)
end
end
end
context 'with focus pending' do
it 'does show the passed specs' do
expect(formatter).to receive(:info).with(
' • Another console.log message'
)
expect(formatter).to receive(:info).with(
' • WARN: And even more console.log messages'
)
expect(formatter).to receive(:success).with(
' ✔ Success spec tests something'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :always, focus: false)
end
end
context 'with console logs set to :always' do
it 'shows the failed console logs' do
expect(formatter).to receive(:info).with(
' • console.log message'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :always)
end
end
context 'with error logs set to :always' do
it 'shows the errors logs' do
expect(formatter).to receive(:spec_failed).with(
" ➤ ReferenceError: Can't find variable: a"
)
runner.run(['spec/javascripts/x/b.js.coffee'], errors: :always)
end
end
context 'with console logs set to :never' do
it 'does not shows the console logs' do
expect(formatter).to_not receive(:info).with(
' • console.log message'
)
expect(formatter).to_not receive(:info).with(
' • Another console.log message'
)
expect(formatter).to_not receive(:info).with(
' • WARN: And even more console.log messages'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :never)
end
end
context 'with error logs set to :never' do
it 'does not show the errors logs' do
expect(formatter).to_not receive(:spec_failed).with(
' ➜ Exception: Error message in /path/to/file.js on line 255'
)
expect(formatter).to_not receive(:spec_failed).with(
' ➜ Exception: Another error message in /path/to/file.js on line 255'
)
runner.run(['spec/javascripts/x/b.js.coffee'], errors: :never)
end
end
context 'with console logs set to :failure' do
it 'shows the console logs for failed specs' do
expect(formatter).to receive(:info).with(
' • console.log message'
)
expect(formatter).to_not receive(:info).with(
' • WARN: And even more console.log messages'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :failure)
end
end
context 'with error logs set to :failure' do
it 'shows the error logs for failed specs' do
expect(formatter).to receive(:spec_failed).with(
" ➤ ReferenceError: Can't find variable: a"
)
expect(formatter).to_not receive(:spec_failed).with(
' ➜ Exception: Another error message in /path/to/file.js on line 255'
)
runner.run(['spec/javascripts/x/b.js.coffee'], errors: :failure)
end
end
end
context 'with notifications' do
it 'shows the failing spec notification' do
expect(formatter).to receive(:notify).with(
"ReferenceError: Can't find variable: a in /path/to/file.js:255\nExpected true to equal false. in /path/to/file.js:255\nundefined' is not an object (evaluating 'killer.deployRobots') in model_spec.js:27\n4 specs, 1 pending, 2 failures\nin 0.01 seconds",
title: 'Jasmine suite failed',
image: :failed,
priority: 2
)
runner.run(['spec/javascripts/x/b.js.coffee'])
end
context 'with :max_error_notify' do
it 'shows only a single failing spec notification when set to 1' do
expect(formatter).to receive(:notify).with(
"ReferenceError: Can't find variable: a in /path/to/file.js:255\nExpected true to equal false. in /path/to/file.js:255\n4 specs, 1 pending, 2 failures\nin 0.01 seconds",
title: 'Jasmine suite failed',
image: :failed, priority: 2
)
runner.run(['spec/javascripts/x/b.js.coffee'], max_error_notify: 1)
end
it 'shows two failing specs notification when set to 2' do
expect(formatter).to receive(:notify).with(
"ReferenceError: Can't find variable: a in /path/to/file.js:255\nExpected true to equal false. in /path/to/file.js:255\nundefined' is not an object (evaluating 'killer.deployRobots') in model_spec.js:27\n4 specs, 1 pending, 2 failures\nin 0.01 seconds",
title: 'Jasmine suite failed',
image: :failed, priority: 2
)
runner.run(['spec/javascripts/x/b.js.coffee'], max_error_notify: 2)
end
end
context 'without notifications' do
it 'does not show a failure notification' do
expect(formatter).to_not receive(:notify)
runner.run(['spec/javascripts/x/b.js.coffee'], notification: false)
end
end
end
end
context 'for a successful Jasmine runner' do
before do
allow(File).to receive(:foreach).and_yield 'describe("SuccessTest", function() {'
allow(IO).to receive(:popen).and_return StringIO.new(phantomjs_success_response)
end
it 'requests the jasmine specs from the server' do
expect(File).to receive(:foreach).with('spec/javascripts/t.js').and_yield 'describe("SuccessTest", function() {'
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?spec=SuccessTest\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/t.js'])
end
it 'returns the success' do
response = runner.run(['spec/javascripts/x/b.js.coffee'])
expect(response).to be_empty
end
context 'with coverage' do
context 'when coverage is present' do
before do
allow(IO).to receive(:popen).and_return StringIO.new(phantomjs_coverage_response)
allow(runner).to receive(:coverage_bin).and_return('/bin/istanbul')
allow(runner).to receive(:coverage_file).and_return('tmp/coverage.json')
allow(runner).to receive(:coverage_root).and_return('/projects/secret')
end
it 'notifies coverage when present' do
expect(runner).to receive(:notify_coverage_result)
runner.run(['spec/javascripts/t.js.coffee'], coverage: true)
end
context 'checking the coverage' do
before do
allow(runner).to receive(:generate_text_report)
end
it 'can check for statements coverage' do
expect(runner).to receive(:`).with('/bin/istanbul check-coverage --statements 12 tmp/coverage.json 2>&1').and_return '' # `
runner.run(['spec/javascripts/t.js.coffee'], coverage: true, statements_threshold: 12)
end
it 'can check for functions coverage' do
expect(runner).to receive(:`).with('/bin/istanbul check-coverage --functions 12 tmp/coverage.json 2>&1').and_return '' # `
runner.run(['spec/javascripts/t.js.coffee'], coverage: true, functions_threshold: 12)
end
it 'can check for branches coverage' do
expect(runner).to receive(:`).with('/bin/istanbul check-coverage --branches 12 tmp/coverage.json 2>&1').and_return '' # `
runner.run(['spec/javascripts/t.js.coffee'], coverage: true, branches_threshold: 12)
end
it 'can check for lines coverage' do
expect(runner).to receive(:`).with('/bin/istanbul check-coverage --lines 12 tmp/coverage.json 2>&1').and_return ''
runner.run(['spec/javascripts/t.js.coffee'], coverage: true, lines_threshold: 12)
end
context 'when enough is covered' do
before do
expect(runner).to receive(:`).and_return '' # `
end
it 'shows the success message' do
expect(formatter).to receive(:success).with("1 spec, 0 failures")
expect(formatter).to receive(:success).with('Code coverage succeed')
runner.run(['spec/javascripts/t.js.coffee'], coverage: true, lines_threshold: 12)
end
it 'notifies the coverage success when not turned off' do
expect(formatter).to receive(:notify).with('All code is adequately covered with specs', title: 'Code coverage succeed')
runner.run(['spec/javascripts/t.js.coffee'], coverage: true, lines_threshold: 12)
end
end
end
context 'without coverage summary' do
let(:text_report) do
<<-EOL
Using reporter [text]
-------------------------------+-----------+-----------+-----------+-----------+
File | % Stmts |% Branches | % Funcs | % Lines |
-------------------------------+-----------+-----------+-----------+-----------+
app/ | 98.04 | 75.86 | 86.67 | 98.04 |
test1.js.coffee.erb | 98.04 | 75.86 | 86.67 | 98.04 |
test2.js.coffee.erb | 98.04 | 75.86 | 86.67 | 98.04 |
-------------------------------+-----------+-----------+-----------+-----------+
All files | 98.04 | 75.86 | 86.67 | 98.04 |
-------------------------------+-----------+-----------+-----------+-----------+
done
EOL
end
before do
expect(runner).to receive(:`).with('/bin/istanbul report --root /projects/secret text tmp/coverage.json').and_return text_report # `
allow(runner).to receive(:check_coverage)
allow(runner).to receive(:puts)
end
it 'shows the summary text info' do
expect(formatter).to receive(:info).with('Spec coverage details:')
runner.run(['app/test1.js.coffee'], coverage: true)
end
context 'when running all specs' do
it 'shows all the important text report entries' do
expect(runner).to receive(:puts).with ''
expect(runner).to receive(:puts).with '-------------------------------+-----------+-----------+-----------+-----------+'
expect(runner).to receive(:puts).with 'File | % Stmts |% Branches | % Funcs | % Lines |'
expect(runner).to receive(:puts).with '-------------------------------+-----------+-----------+-----------+-----------+'
expect(runner).to receive(:puts).with ' app/ | 98.04 | 75.86 | 86.67 | 98.04 |'
expect(runner).to receive(:puts).with ' test1.js.coffee.erb | 98.04 | 75.86 | 86.67 | 98.04 |'
expect(runner).to receive(:puts).with ' test2.js.coffee.erb | 98.04 | 75.86 | 86.67 | 98.04 |'
expect(runner).to receive(:puts).with '-------------------------------+-----------+-----------+-----------+-----------+'
expect(runner).to receive(:puts).with 'All files | 98.04 | 75.86 | 86.67 | 98.04 |'
expect(runner).to receive(:puts).with '-------------------------------+-----------+-----------+-----------+-----------+'
expect(runner).to receive(:puts).with ''
runner.run(['spec/javascripts'], coverage: true)
end
end
context 'when running a single spec' do
it 'shows the single text report entry with its directory' do
expect(runner).to receive(:puts).with ''
expect(runner).to receive(:puts).with '-------------------------------+-----------+-----------+-----------+-----------+'
expect(runner).to receive(:puts).with 'File | % Stmts |% Branches | % Funcs | % Lines |'
expect(runner).to receive(:puts).with '-------------------------------+-----------+-----------+-----------+-----------+'
expect(runner).to receive(:puts).with ' app/ | 98.04 | 75.86 | 86.67 | 98.04 |'
expect(runner).to receive(:puts).with ' test1.js.coffee.erb | 98.04 | 75.86 | 86.67 | 98.04 |'
expect(runner).to receive(:puts).with '-------------------------------+-----------+-----------+-----------+-----------+'
expect(runner).to receive(:puts).with 'All files | 98.04 | 75.86 | 86.67 | 98.04 |'
expect(runner).to receive(:puts).with '-------------------------------+-----------+-----------+-----------+-----------+'
expect(runner).to receive(:puts).with ''
runner.run(['app/test1.js.coffee'], coverage: true)
end
end
end
context 'with coverage summary' do
let(:text_summary_report) do
<<-EOL
Using reporter [text-summary]
Statements : 98.04% ( 50/51 )
Branches : 75.86% ( 22/29 )
Functions : 86.67% ( 13/15 )
Lines : 98.04% ( 50/51 )
done
EOL
end
before do
expect(runner).to receive(:`).with('/bin/istanbul report --root /projects/secret text-summary tmp/coverage.json').and_return text_summary_report # `
allow(runner).to receive(:check_coverage)
allow(runner).to receive(:puts)
end
it 'shows the summary text info' do
expect(formatter).to receive(:info).with('Spec coverage summary:')
runner.run(['app/test1.js.coffee'], coverage: true, coverage_summary: true)
end
it 'shows the summary text report' do
expect(runner).to receive(:puts).with ''
expect(runner).to receive(:puts).with 'Statements : 98.04% ( 50/51 )'
expect(runner).to receive(:puts).with 'Branches : 75.86% ( 22/29 )'
expect(runner).to receive(:puts).with 'Functions : 86.67% ( 13/15 )'
expect(runner).to receive(:puts).with 'Lines : 98.04% ( 50/51 )'
expect(runner).to receive(:puts).with ''
runner.run(['app/test1.js.coffee'], coverage: true, coverage_summary: true)
end
end
context 'with coverage html report enabled' do
before do
allow(runner).to receive(:generate_text_report)
allow(runner).to receive(:`) # `
allow(runner).to receive(:check_coverage)
allow(runner).to receive(:coverage_report_directory).and_return('/coverage/report/directory')
end
it 'generates the html report' do
expect(runner).to receive(:`).with('/bin/istanbul report --dir /coverage/report/directory --root /projects/secret html tmp/coverage.json') # `
runner.run(['app/test1.js.coffee'], coverage: true, coverage_html: true)
end
it 'outputs the html report index page' do
expect(formatter).to receive(:info).with('Updated HTML report available at: /coverage/report/directory/index.html')
runner.run(['app/test1.js.coffee'], coverage: true, coverage_html: true)
end
end
context 'with the coverage html directory set' do
before do
allow(runner).to receive(:generate_text_report)
allow(runner).to receive(:`) # `
allow(runner).to receive(:check_coverage)
end
it 'uses the passed in file path' do
expect(runner).to receive(:coverage_report_directory)
runner.run(['app/test1.js.coffee'], coverage: true, coverage_html: true, coverage_html_dir: 'test/directory/')
end
end
context 'when istanbul is not found' do
it 'prints an error message telling the user istanbul could not be found' do
allow(runner).to receive(:coverage_bin).and_return(nil)
expect(formatter).to receive(:error).with('Skipping coverage report: unable to locate istanbul in your PATH')
runner.run(['spec/javascripts/t.js.coffee'], coverage: true, statements_threshold: 12)
end
end
end
end
context 'with the specdoc set to :always' do
it 'shows the specdoc in the console' do
expect(formatter).to receive(:info).with(
'Run Jasmine suite spec/javascripts/x/t.js', reset: true
)
expect(formatter).to receive(:suite_name).with(
'Success suite'
)
expect(formatter).to receive(:suite_name).with(
' Nested success suite'
)
expect(formatter).to receive(:success).with(
' ✔ Success nested test tests something'
)
expect(formatter).to receive(:success).with(
'3 specs, 0 failures'
)
expect(formatter).to receive(:success).with(
' ✔ Success test tests something'
)
expect(formatter).to receive(:success).with(
' ✔ Another success test tests something'
)
runner.run(['spec/javascripts/x/t.js'], specdoc: :always)
end
context 'with console logs set to :always' do
it 'shows the console logs' do
expect(formatter).to receive(:info).with(
'Run Jasmine suite spec/javascripts/x/b.js.coffee', reset: true
)
expect(formatter).to receive(:info).with(
' • I can haz console.logs'
)
runner.run(['spec/javascripts/x/b.js.coffee'], specdoc: :always, console: :always)
end
end
context 'with console logs set to :never' do
it 'does not shows the console logs' do
expect(formatter).to_not receive(:info).with(
' • I can haz console.logs'
)
runner.run(['spec/javascripts/x/b.js.coffee'], specdoc: :always, console: :never)
end
end
end
context 'with the specdoc set to :never or :failure' do
it 'shows the summary in the console' do
expect(formatter).to receive(:info).with(
'Run Jasmine suite spec/javascripts/x/t.js', reset: true
)
expect(formatter).to_not receive(:suite_name)
expect(formatter).to receive(:success).with(
'3 specs, 0 failures'
)
runner.run(['spec/javascripts/x/t.js'], specdoc: :never)
end
context 'with console logs set to :always' do
it 'does not show the console logs' do
expect(formatter).to_not receive(:info).with(
' • I\'m a nested spec'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :always)
end
end
end
context 'with notifications' do
it 'shows a success notification' do
expect(formatter).to receive(:notify).with(
"3 specs, 0 failures\nin 0.01 seconds",
title: 'Jasmine suite passed'
)
runner.run(['spec/javascripts/t.js'])
end
context 'with hide success notifications' do
it 'does not shows a success notification' do
expect(formatter).to_not receive(:notify)
runner.run(['spec/javascripts/t.js'], notification: true, hide_success: true)
end
end
end
context 'without notifications' do
it 'does not shows a success notification' do
expect(formatter).to_not receive(:notify)
runner.run(['spec/javascripts/t.js'], notification: false)
end
end
end
end
end
quote backticks to fix editor syntax highlighting
# coding: utf-8
require 'pathname'
def read_fixture(name)
Pathname.new(__FILE__).dirname.join('fixtures', name + '.json').read
end
require 'guard/jasmine/runner'
RSpec.describe Guard::Jasmine::Runner do
let(:formatter) { Guard::Jasmine::Formatter }
let(:defaults) do
Guard::Jasmine::DEFAULT_OPTIONS.merge(
jasmine_url: 'http://localhost:8888/jasmine',
phantomjs_bin: '/usr/local/bin/phantomjs',
spec_dir: 'spec/javascripts'
)
end
let(:runner) { Guard::Jasmine::Runner.new(defaults) }
let(:phantomjs_empty_response) do
''
end
let(:phantomjs_invalid_response) do
<<-JSON
{ 1 }
JSON
end
let(:phantomjs_failure_response) { read_fixture('failure') }
let(:phantomjs_success_response) { read_fixture('success') }
let(:phantomjs_coverage_response) { read_fixture('coverage') }
let(:phantomjs_error_response) { '{ "error": "Cannot request Jasmine specs" }' }
let(:phantomjs_command) { "/usr/local/bin/phantomjs #{@project_path}/lib/guard/jasmine/phantomjs/guard-jasmine.js" }
before do
allow(formatter).to receive(:info)
allow(formatter).to receive(:debug)
allow(formatter).to receive(:error)
allow(formatter).to receive(:spec_failed)
allow(formatter).to receive(:suite_name)
allow(formatter).to receive(:notify)
allow(runner).to receive(:'`')
allow(runner).to receive(:update_coverage)
allow(Guard::Compat::UI).to receive(:color_enabled?).and_return(true)
allow(Guard::Compat::UI).to receive(:info)
end
describe '#run' do
before do
allow(File).to receive(:foreach).and_yield 'describe "ErrorTest", ->'
allow(File).to receive(:exist?).and_return(true)
allow(IO).to receive(:popen).and_return StringIO.new(phantomjs_error_response)
end
context 'when passed an empty paths list' do
it 'returns false' do
expect(runner.run([])).to be_empty
end
end
context 'when the spec file does not exist' do
it 'does nothing' do
allow(File).to receive(:exist?).with('spec/javascripts').and_return(false)
expect(runner).not_to receive(:evaluate_response)
runner.run(['spec/javascripts'])
end
end
context 'when passed a line number' do
before do
allow(File).to receive(:readlines).and_return([
'describe "TestContext", ->', # 1
' describe "Inner TestContext", ->', # 2
' describe "Unrelated TestContext", ->', # 3
' it "does something", ->', # 4
' # some code', # 5
' # some assertion', # 6
' it "does something else", ->', # 7
' # some assertion', # 8
' it "does something a lot else", ->', # 9
' # some assertion' # 10
])
end
context 'with custom parameters' do
it 'sets the url query parmeters' do
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?debug=true&myval=1&spec=ErrorTest\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/a.js.coffee'], query_params: { debug: true, myval: 1 })
end
end
context 'with the spec file name' do
it 'executes the example for line number on example' do
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?spec=TestContext%20Inner%20TestContext%20does%20something%20else\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/a.js.coffee:7'])
end
it 'executes the example for line number within example' do
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?spec=TestContext%20Inner%20TestContext%20does%20something%20else\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/a.js.coffee:8'])
end
it 'executes all examples within describe' do
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?spec=TestContext\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/a.js.coffee:1'])
end
end
context 'with the cli argument' do
it 'executes the example for line number on example' do
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?spec=TestContext%20Inner%20TestContext%20does%20something%20else\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/a.js.coffee'], line_number: 7)
end
it 'also sets custom parameters' do
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?debug=true&spec=TestContext%20Inner%20TestContext%20does%20something%20else\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/a.js.coffee'], line_number: 7, query_params: { debug: true })
end
end
end
context 'when passed the spec directory' do
it 'requests all jasmine specs from the server' do
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts'], notification: false)
end
it 'shows a start information in the console' do
expect(formatter).to receive(:info).with('Run all Jasmine suites', reset: true)
runner.run(['spec/javascripts'])
end
end
context 'for an erroneous Jasmine runner' do
it 'requests the jasmine specs from the server' do
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?spec=ErrorTest\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/a.js.coffee'])
end
it 'shows the error in the console' do
expect(formatter).to receive(:error).with(
'An error occurred: Cannot request Jasmine specs'
)
runner.run(['spec/javascripts/a.js.coffee'])
end
it 'returns the errors' do
response = runner.run(['spec/javascripts/a.js.coffee'])
expect(response).to have_key('spec/javascripts/a.js.coffee')
end
it 'does not show coverage' do
expect(runner).not_to receive(:notify_coverage_result)
runner.run(['spec/javascripts/a.js.coffee'])
end
context 'with notifications' do
it 'shows an error notification' do
expect(formatter).to receive(:notify).with(
'An error occurred: Cannot request Jasmine specs',
title: 'Jasmine error',
image: :failed,
priority: 2
)
runner.run(['spec/javascripts/a.js.coffee'])
end
end
context 'without notifications' do
it 'does not shows an error notification' do
expect(formatter).not_to receive(:notify)
runner.run(['spec/javascripts/a.js.coffee'], notification: false)
end
end
end
context 'exceptions for the CLI runner' do
before do
allow(File).to receive(:foreach).and_yield 'describe "FailureTest", ->'
end
it 'raises an error with an empty JSON response' do
allow(IO).to receive(:popen).and_return StringIO.new(phantomjs_empty_response)
expect do
runner.run(['spec/javascripts/x/b.js.coffee'], is_cli: true)
end.to raise_error 'No response from Jasmine runner'
end
it 'raises an error with an invalid JSON response' do
allow(IO).to receive(:popen).and_return StringIO.new(phantomjs_invalid_response)
expect do
runner.run(['spec/javascripts/x/b.js.coffee'], is_cli: true)
end.to raise_error "Cannot decode JSON from PhantomJS runner, message received was:\n#{phantomjs_invalid_response}"
end
it 'raises an error with an error JSON response' do
allow(IO).to receive(:popen).and_return StringIO.new(phantomjs_error_response)
expect do
runner.run(['spec/javascripts/x/b.js.coffee'], is_cli: true)
end.to raise_error 'Runner error: Cannot request Jasmine specs'
end
end
context 'for a failing Jasmine runner' do
before do
allow(File).to receive(:foreach).and_yield 'describe "FailureTest", ->'
allow(IO).to receive(:popen).and_return StringIO.new(phantomjs_failure_response)
end
it 'requests the jasmine specs from the server' do
expect(File).to receive(:foreach).with('spec/javascripts/x/b.js.coffee').and_yield 'describe "FailureTest", ->'
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?spec=FailureTest\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/x/b.js.coffee'])
end
it 'returns the failures' do
response = runner.run(['spec/javascripts/x/b.js.coffee'])
expect(response).to have_key('spec/javascripts/x/b.js.coffee')
end
it 'does not show coverage' do
expect(runner).not_to receive(:notify_coverage_result)
runner.run(['spec/javascripts/a.js.coffee'])
end
context 'with the specdoc set to :always' do
it 'shows the pendign specs' do
expect(formatter).to receive(:spec_pending).with(
' ○ Horribly Broken Spec'
)
runner.run(['spec/javascripts/x/b.js.coffee'], specdoc: :always, console: :never, errors: :never)
end
end
context 'with the specdoc set to :never' do
context 'and console and errors set to :never' do
it 'shows the summary in the console' do
expect(formatter).to receive(:info).with(
'Run Jasmine suite spec/javascripts/x/b.js.coffee', reset: true
)
expect(formatter).not_to receive(:suite_name)
expect(formatter).not_to receive(:spec_failed)
expect(formatter).to receive(:error).with('4 specs, 1 pending, 2 failures')
runner.run(['spec/javascripts/x/b.js.coffee'], specdoc: :never, console: :never, errors: :never)
end
it 'hides the pending specs' do
expect(formatter).to_not receive(:spec_pending).with(
' ○ Horribly Broken Spec'
)
runner.run(['spec/javascripts/x/b.js.coffee'], specdoc: :never, console: :never, errors: :never)
end
end
context 'and console set to :failure' do
it 'hides all messages' do
expect(formatter).not_to receive(:suite_name)
expect(formatter).not_to receive(:spec_failed)
expect(formatter).not_to receive(:spec_failed)
expect(formatter).not_to receive(:suite_name)
expect(formatter).not_to receive(:spec_failed)
expect(formatter).not_to receive(:spec_failed)
expect(formatter).not_to receive(:success)
expect(formatter).to receive(:info).with(
'Run Jasmine suite spec/javascripts/x/b.js.coffee', reset: true
)
expect(formatter).to receive(:info).with(
'Finished in 0.01 seconds'
)
runner.run(['spec/javascripts/x/b.js.coffee'], specdoc: :never)
end
end
context 'and console set to :always' do
it 'hides all messages' do
expect(formatter).not_to receive(:suite_name)
expect(formatter).not_to receive(:spec_failed)
expect(formatter).to_not receive(:spec_failed)
expect(formatter).to_not receive(:suite_name)
expect(formatter).to_not receive(:spec_failed)
expect(formatter).to_not receive(:spec_failed)
expect(formatter).to_not receive(:success)
expect(formatter).to receive(:info).with(
'Run Jasmine suite spec/javascripts/x/b.js.coffee', reset: true
)
expect(formatter).to receive(:info).with(
'Finished in 0.01 seconds'
)
runner.run(['spec/javascripts/x/b.js.coffee'], specdoc: :never, console: :always)
end
end
end
context 'with the specdoc set either :always or :failure' do
it 'shows the failed suites' do
expect(formatter).to receive(:suite_name).with(
'Failure suite'
)
expect(formatter).to receive(:spec_failed).with(
' ✘ Failure spec tests something'
)
expect(formatter).to receive(:spec_failed).with(
' ➤ ReferenceError: Can\'t find variable: a'
)
expect(formatter).to receive(:spec_failed).with(
' ➜ /path/to/file.js on line 255'
)
expect(formatter).to receive(:suite_name).with(
' Nested failure suite'
)
expect(formatter).to receive(:spec_failed).with(
' ✘ Failure spec 2 tests something'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :always)
end
context 'with focus enabled' do
context 'and console and error set to :never' do
it 'does not show the passed specs' do
expect(formatter).not_to receive(:success).with(
' ✔ Success spec tests something'
)
expect(formatter).not_to receive(:spec_failed).with(
' ➜ Exception: Another error message in /path/to/file.js on line 255'
)
expect(formatter).not_to receive(:info).with(
' • Another console.log message'
)
expect(formatter).not_to receive(:info).with(
' • WARN: And even more console.log messages'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :never, errors: :never, focus: true)
end
end
context 'and console and errors set to :failure' do
it 'shows the failed specs with logs' do
expect(formatter).to receive(:info).with(
' • console.log message'
)
expect(formatter).to_not receive(:success).with(
' ✔ Success spec tests something'
)
expect(formatter).to_not receive(:spec_failed).with(
' ➜ Exception: Another error message in /path/to/file.js on line 255'
)
expect(formatter).to_not receive(:info).with(
' • Another console.log message'
)
expect(formatter).to_not receive(:info).with(
' • WARN: And even more console.log messages'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :failure, errors: :failure, focus: true)
end
end
context 'and console set to :always' do
it 'shows the passed specs with logs' do
expect(formatter).to_not receive(:success).with(
' ✔ Success spec tests something'
)
expect(formatter).to_not receive(:spec_failed).with(
' ➜ Exception: Another error message in /path/to/file.js on line 255'
)
expect(formatter).to_not receive(:info).with(
' • Another console.log message'
)
expect(formatter).to_not receive(:info).with(
' • WARN: And even more console.log messages'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :always, errors: :always, focus: true)
end
end
end
context 'with focus pending' do
it 'does show the passed specs' do
expect(formatter).to receive(:info).with(
' • Another console.log message'
)
expect(formatter).to receive(:info).with(
' • WARN: And even more console.log messages'
)
expect(formatter).to receive(:success).with(
' ✔ Success spec tests something'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :always, focus: false)
end
end
context 'with console logs set to :always' do
it 'shows the failed console logs' do
expect(formatter).to receive(:info).with(
' • console.log message'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :always)
end
end
context 'with error logs set to :always' do
it 'shows the errors logs' do
expect(formatter).to receive(:spec_failed).with(
" ➤ ReferenceError: Can't find variable: a"
)
runner.run(['spec/javascripts/x/b.js.coffee'], errors: :always)
end
end
context 'with console logs set to :never' do
it 'does not shows the console logs' do
expect(formatter).to_not receive(:info).with(
' • console.log message'
)
expect(formatter).to_not receive(:info).with(
' • Another console.log message'
)
expect(formatter).to_not receive(:info).with(
' • WARN: And even more console.log messages'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :never)
end
end
context 'with error logs set to :never' do
it 'does not show the errors logs' do
expect(formatter).to_not receive(:spec_failed).with(
' ➜ Exception: Error message in /path/to/file.js on line 255'
)
expect(formatter).to_not receive(:spec_failed).with(
' ➜ Exception: Another error message in /path/to/file.js on line 255'
)
runner.run(['spec/javascripts/x/b.js.coffee'], errors: :never)
end
end
context 'with console logs set to :failure' do
it 'shows the console logs for failed specs' do
expect(formatter).to receive(:info).with(
' • console.log message'
)
expect(formatter).to_not receive(:info).with(
' • WARN: And even more console.log messages'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :failure)
end
end
context 'with error logs set to :failure' do
it 'shows the error logs for failed specs' do
expect(formatter).to receive(:spec_failed).with(
" ➤ ReferenceError: Can't find variable: a"
)
expect(formatter).to_not receive(:spec_failed).with(
' ➜ Exception: Another error message in /path/to/file.js on line 255'
)
runner.run(['spec/javascripts/x/b.js.coffee'], errors: :failure)
end
end
end
context 'with notifications' do
it 'shows the failing spec notification' do
expect(formatter).to receive(:notify).with(
"ReferenceError: Can't find variable: a in /path/to/file.js:255\nExpected true to equal false. in /path/to/file.js:255\nundefined' is not an object (evaluating 'killer.deployRobots') in model_spec.js:27\n4 specs, 1 pending, 2 failures\nin 0.01 seconds",
title: 'Jasmine suite failed',
image: :failed,
priority: 2
)
runner.run(['spec/javascripts/x/b.js.coffee'])
end
context 'with :max_error_notify' do
it 'shows only a single failing spec notification when set to 1' do
expect(formatter).to receive(:notify).with(
"ReferenceError: Can't find variable: a in /path/to/file.js:255\nExpected true to equal false. in /path/to/file.js:255\n4 specs, 1 pending, 2 failures\nin 0.01 seconds",
title: 'Jasmine suite failed',
image: :failed, priority: 2
)
runner.run(['spec/javascripts/x/b.js.coffee'], max_error_notify: 1)
end
it 'shows two failing specs notification when set to 2' do
expect(formatter).to receive(:notify).with(
"ReferenceError: Can't find variable: a in /path/to/file.js:255\nExpected true to equal false. in /path/to/file.js:255\nundefined' is not an object (evaluating 'killer.deployRobots') in model_spec.js:27\n4 specs, 1 pending, 2 failures\nin 0.01 seconds",
title: 'Jasmine suite failed',
image: :failed, priority: 2
)
runner.run(['spec/javascripts/x/b.js.coffee'], max_error_notify: 2)
end
end
context 'without notifications' do
it 'does not show a failure notification' do
expect(formatter).to_not receive(:notify)
runner.run(['spec/javascripts/x/b.js.coffee'], notification: false)
end
end
end
end
context 'for a successful Jasmine runner' do
before do
allow(File).to receive(:foreach).and_yield 'describe("SuccessTest", function() {'
allow(IO).to receive(:popen).and_return StringIO.new(phantomjs_success_response)
end
it 'requests the jasmine specs from the server' do
expect(File).to receive(:foreach).with('spec/javascripts/t.js').and_yield 'describe("SuccessTest", function() {'
expect(IO).to receive(:popen).with("#{phantomjs_command} \"http://localhost:8888/jasmine?spec=SuccessTest\" 60000", 'r:UTF-8')
runner.run(['spec/javascripts/t.js'])
end
it 'returns the success' do
response = runner.run(['spec/javascripts/x/b.js.coffee'])
expect(response).to be_empty
end
context 'with coverage' do
context 'when coverage is present' do
before do
allow(IO).to receive(:popen).and_return StringIO.new(phantomjs_coverage_response)
allow(runner).to receive(:coverage_bin).and_return('/bin/istanbul')
allow(runner).to receive(:coverage_file).and_return('tmp/coverage.json')
allow(runner).to receive(:coverage_root).and_return('/projects/secret')
end
it 'notifies coverage when present' do
expect(runner).to receive(:notify_coverage_result)
runner.run(['spec/javascripts/t.js.coffee'], coverage: true)
end
context 'checking the coverage' do
before do
allow(runner).to receive(:generate_text_report)
end
it 'can check for statements coverage' do
expect(runner).to receive(:'`').with('/bin/istanbul check-coverage --statements 12 tmp/coverage.json 2>&1').and_return ''
runner.run(['spec/javascripts/t.js.coffee'], coverage: true, statements_threshold: 12)
end
it 'can check for functions coverage' do
expect(runner).to receive(:'`').with('/bin/istanbul check-coverage --functions 12 tmp/coverage.json 2>&1').and_return ''
runner.run(['spec/javascripts/t.js.coffee'], coverage: true, functions_threshold: 12)
end
it 'can check for branches coverage' do
expect(runner).to receive(:'`').with('/bin/istanbul check-coverage --branches 12 tmp/coverage.json 2>&1').and_return ''
runner.run(['spec/javascripts/t.js.coffee'], coverage: true, branches_threshold: 12)
end
it 'can check for lines coverage' do
expect(runner).to receive(:'`').with('/bin/istanbul check-coverage --lines 12 tmp/coverage.json 2>&1').and_return ''
runner.run(['spec/javascripts/t.js.coffee'], coverage: true, lines_threshold: 12)
end
context 'when enough is covered' do
before do
expect(runner).to receive(:'`').and_return ''
end
it 'shows the success message' do
expect(formatter).to receive(:success).with("1 spec, 0 failures")
expect(formatter).to receive(:success).with('Code coverage succeed')
runner.run(['spec/javascripts/t.js.coffee'], coverage: true, lines_threshold: 12)
end
it 'notifies the coverage success when not turned off' do
expect(formatter).to receive(:notify).with('All code is adequately covered with specs', title: 'Code coverage succeed')
runner.run(['spec/javascripts/t.js.coffee'], coverage: true, lines_threshold: 12)
end
end
end
context 'without coverage summary' do
let(:text_report) do
<<-EOL
Using reporter [text]
-------------------------------+-----------+-----------+-----------+-----------+
File | % Stmts |% Branches | % Funcs | % Lines |
-------------------------------+-----------+-----------+-----------+-----------+
app/ | 98.04 | 75.86 | 86.67 | 98.04 |
test1.js.coffee.erb | 98.04 | 75.86 | 86.67 | 98.04 |
test2.js.coffee.erb | 98.04 | 75.86 | 86.67 | 98.04 |
-------------------------------+-----------+-----------+-----------+-----------+
All files | 98.04 | 75.86 | 86.67 | 98.04 |
-------------------------------+-----------+-----------+-----------+-----------+
done
EOL
end
before do
expect(runner).to receive(:'`').with('/bin/istanbul report --root /projects/secret text tmp/coverage.json').and_return text_report
allow(runner).to receive(:check_coverage)
allow(runner).to receive(:puts)
end
it 'shows the summary text info' do
expect(formatter).to receive(:info).with('Spec coverage details:')
runner.run(['app/test1.js.coffee'], coverage: true)
end
context 'when running all specs' do
it 'shows all the important text report entries' do
expect(runner).to receive(:puts).with ''
expect(runner).to receive(:puts).with '-------------------------------+-----------+-----------+-----------+-----------+'
expect(runner).to receive(:puts).with 'File | % Stmts |% Branches | % Funcs | % Lines |'
expect(runner).to receive(:puts).with '-------------------------------+-----------+-----------+-----------+-----------+'
expect(runner).to receive(:puts).with ' app/ | 98.04 | 75.86 | 86.67 | 98.04 |'
expect(runner).to receive(:puts).with ' test1.js.coffee.erb | 98.04 | 75.86 | 86.67 | 98.04 |'
expect(runner).to receive(:puts).with ' test2.js.coffee.erb | 98.04 | 75.86 | 86.67 | 98.04 |'
expect(runner).to receive(:puts).with '-------------------------------+-----------+-----------+-----------+-----------+'
expect(runner).to receive(:puts).with 'All files | 98.04 | 75.86 | 86.67 | 98.04 |'
expect(runner).to receive(:puts).with '-------------------------------+-----------+-----------+-----------+-----------+'
expect(runner).to receive(:puts).with ''
runner.run(['spec/javascripts'], coverage: true)
end
end
context 'when running a single spec' do
it 'shows the single text report entry with its directory' do
expect(runner).to receive(:puts).with ''
expect(runner).to receive(:puts).with '-------------------------------+-----------+-----------+-----------+-----------+'
expect(runner).to receive(:puts).with 'File | % Stmts |% Branches | % Funcs | % Lines |'
expect(runner).to receive(:puts).with '-------------------------------+-----------+-----------+-----------+-----------+'
expect(runner).to receive(:puts).with ' app/ | 98.04 | 75.86 | 86.67 | 98.04 |'
expect(runner).to receive(:puts).with ' test1.js.coffee.erb | 98.04 | 75.86 | 86.67 | 98.04 |'
expect(runner).to receive(:puts).with '-------------------------------+-----------+-----------+-----------+-----------+'
expect(runner).to receive(:puts).with 'All files | 98.04 | 75.86 | 86.67 | 98.04 |'
expect(runner).to receive(:puts).with '-------------------------------+-----------+-----------+-----------+-----------+'
expect(runner).to receive(:puts).with ''
runner.run(['app/test1.js.coffee'], coverage: true)
end
end
end
context 'with coverage summary' do
let(:text_summary_report) do
<<-EOL
Using reporter [text-summary]
Statements : 98.04% ( 50/51 )
Branches : 75.86% ( 22/29 )
Functions : 86.67% ( 13/15 )
Lines : 98.04% ( 50/51 )
done
EOL
end
before do
expect(runner).to receive(:'`').with('/bin/istanbul report --root /projects/secret text-summary tmp/coverage.json').and_return text_summary_report
allow(runner).to receive(:check_coverage)
allow(runner).to receive(:puts)
end
it 'shows the summary text info' do
expect(formatter).to receive(:info).with('Spec coverage summary:')
runner.run(['app/test1.js.coffee'], coverage: true, coverage_summary: true)
end
it 'shows the summary text report' do
expect(runner).to receive(:puts).with ''
expect(runner).to receive(:puts).with 'Statements : 98.04% ( 50/51 )'
expect(runner).to receive(:puts).with 'Branches : 75.86% ( 22/29 )'
expect(runner).to receive(:puts).with 'Functions : 86.67% ( 13/15 )'
expect(runner).to receive(:puts).with 'Lines : 98.04% ( 50/51 )'
expect(runner).to receive(:puts).with ''
runner.run(['app/test1.js.coffee'], coverage: true, coverage_summary: true)
end
end
context 'with coverage html report enabled' do
before do
allow(runner).to receive(:generate_text_report)
allow(runner).to receive(:'`')
allow(runner).to receive(:check_coverage)
allow(runner).to receive(:coverage_report_directory).and_return('/coverage/report/directory')
end
it 'generates the html report' do
expect(runner).to receive(:'`').with('/bin/istanbul report --dir /coverage/report/directory --root /projects/secret html tmp/coverage.json')
runner.run(['app/test1.js.coffee'], coverage: true, coverage_html: true)
end
it 'outputs the html report index page' do
expect(formatter).to receive(:info).with('Updated HTML report available at: /coverage/report/directory/index.html')
runner.run(['app/test1.js.coffee'], coverage: true, coverage_html: true)
end
end
context 'with the coverage html directory set' do
before do
allow(runner).to receive(:generate_text_report)
allow(runner).to receive(:'`')
allow(runner).to receive(:check_coverage)
end
it 'uses the passed in file path' do
expect(runner).to receive(:coverage_report_directory)
runner.run(['app/test1.js.coffee'], coverage: true, coverage_html: true, coverage_html_dir: 'test/directory/')
end
end
context 'when istanbul is not found' do
it 'prints an error message telling the user istanbul could not be found' do
allow(runner).to receive(:coverage_bin).and_return(nil)
expect(formatter).to receive(:error).with('Skipping coverage report: unable to locate istanbul in your PATH')
runner.run(['spec/javascripts/t.js.coffee'], coverage: true, statements_threshold: 12)
end
end
end
end
context 'with the specdoc set to :always' do
it 'shows the specdoc in the console' do
expect(formatter).to receive(:info).with(
'Run Jasmine suite spec/javascripts/x/t.js', reset: true
)
expect(formatter).to receive(:suite_name).with(
'Success suite'
)
expect(formatter).to receive(:suite_name).with(
' Nested success suite'
)
expect(formatter).to receive(:success).with(
' ✔ Success nested test tests something'
)
expect(formatter).to receive(:success).with(
'3 specs, 0 failures'
)
expect(formatter).to receive(:success).with(
' ✔ Success test tests something'
)
expect(formatter).to receive(:success).with(
' ✔ Another success test tests something'
)
runner.run(['spec/javascripts/x/t.js'], specdoc: :always)
end
context 'with console logs set to :always' do
it 'shows the console logs' do
expect(formatter).to receive(:info).with(
'Run Jasmine suite spec/javascripts/x/b.js.coffee', reset: true
)
expect(formatter).to receive(:info).with(
' • I can haz console.logs'
)
runner.run(['spec/javascripts/x/b.js.coffee'], specdoc: :always, console: :always)
end
end
context 'with console logs set to :never' do
it 'does not shows the console logs' do
expect(formatter).to_not receive(:info).with(
' • I can haz console.logs'
)
runner.run(['spec/javascripts/x/b.js.coffee'], specdoc: :always, console: :never)
end
end
end
context 'with the specdoc set to :never or :failure' do
it 'shows the summary in the console' do
expect(formatter).to receive(:info).with(
'Run Jasmine suite spec/javascripts/x/t.js', reset: true
)
expect(formatter).to_not receive(:suite_name)
expect(formatter).to receive(:success).with(
'3 specs, 0 failures'
)
runner.run(['spec/javascripts/x/t.js'], specdoc: :never)
end
context 'with console logs set to :always' do
it 'does not show the console logs' do
expect(formatter).to_not receive(:info).with(
' • I\'m a nested spec'
)
runner.run(['spec/javascripts/x/b.js.coffee'], console: :always)
end
end
end
context 'with notifications' do
it 'shows a success notification' do
expect(formatter).to receive(:notify).with(
"3 specs, 0 failures\nin 0.01 seconds",
title: 'Jasmine suite passed'
)
runner.run(['spec/javascripts/t.js'])
end
context 'with hide success notifications' do
it 'does not shows a success notification' do
expect(formatter).to_not receive(:notify)
runner.run(['spec/javascripts/t.js'], notification: true, hide_success: true)
end
end
end
context 'without notifications' do
it 'does not shows a success notification' do
expect(formatter).to_not receive(:notify)
runner.run(['spec/javascripts/t.js'], notification: false)
end
end
end
end
end
|
RSpec::Matchers.define :have_no_selections do
match do |page|
expect(page).to have_no_selector(".timeline-selection *")
end
end
RSpec::Matchers.define :have_fencepost do |expected|
match do |page|
time = expected.to_time.to_i * 1000
synchronize do
position = page.evaluate_script "$('#timeline').timeline('timeToPosition', #{time})"
# We match against the first few significant figures because there seems to be a double -> float
# conversion going on in capybara-webkit that messes with precision. Poltergeist gets it right.
matches = page.all(".timeline-selection line[x1^=\"#{position.to_s[0, 3]}\"]")
expect(matches.size).to be > 0
end
end
end
RSpec::Matchers.define :have_highlighted_selection do |start, stop|
match do |page|
start_time = start.to_time.to_i * 1000
stop_time = stop.to_time.to_i * 1000
synchronize do
start_pos = page.evaluate_script "$('#timeline').timeline('timeToPosition', #{start_time})"
stop_pos = page.evaluate_script "$('#timeline').timeline('timeToPosition', #{stop_time})"
width = stop_pos - start_pos
matches = page.all(".timeline-selection rect[x^=\"#{start_pos.to_s[0, 3]}\"][width^=\"#{width.to_s[0, 3]}\"]")
expect(matches.size).to be > 0
end
expect(page).to have_fencepost(start)
expect(page).to have_fencepost(stop)
end
end
RSpec::Matchers.define :have_timeline_range do |start, stop|
match do |page|
expected_start_time = start.to_time.to_i * 1000
expected_end_time = stop.to_time.to_i * 1000
synchronize do
actual_start_time = page.evaluate_script "$('#timeline').timeline('startTime')"
actual_end_time = page.evaluate_script "$('#timeline').timeline('endTime')"
# Allow 1% variance for rounding errors
delta = 1000 * (start.to_i - stop.to_i).abs / 100
expect(actual_start_time).to be_within(delta).of(expected_start_time)
expect(actual_end_time).to be_within(delta).of(expected_end_time)
end
end
failure_message_for_should do |page|
actual_start_time = page.evaluate_script "$('#timeline').timeline('startTime')"
actual_start_time = Time.at(actual_start_time / 1000).utc.to_datetime if actual_start_time.present?
actual_end_time = page.evaluate_script "$('#timeline').timeline('endTime')"
actual_end_time = Time.at(actual_end_time / 1000).utc.to_datetime if actual_end_time.present?
"expected to find a timeline range of #{start} - #{stop}, got #{actual_start_time} - #{actual_end_time}"
end
end
RSpec::Matchers.define :have_end_time do |dt|
match do |page|
present = DateTime.new(2014, 3, 1, 0, 0, 0, '+0')
expected_end_time = (present + dt).to_i * 1000
synchronize do
actual_end_time = page.evaluate_script "$('#timeline').timeline('endTime')"
# Allow 1% variance for rounding errors
delta = 1000 * (dt.to_i).abs / 100
expect(actual_end_time).to be_within(delta).of(expected_end_time)
end
true
end
failure_message_for_should do |page|
expected_end_time = Time.now + dt
actual_end_time = page.evaluate_script "$('#timeline').timeline('endTime')"
actual_end_time = Time.at(actual_end_time / 1000).utc.to_datetime if actual_end_time.present?
"expected timeline to have end time of #{expected_end_time}, got #{actual_end_time}"
end
end
RSpec::Matchers.define :have_time_offset do |selector, dt|
match do |page|
expected_dt_ms = dt.to_i * 1000
synchronize do
expected_dx = page.evaluate_script "-$('#timeline').timeline('timeSpanToPx', #{expected_dt_ms});"
actual_dx = page.evaluate_script """
(function() {
var transform = $('#timeline').find('#{selector}').attr('transform');
return (transform && transform.length > 0) ? parseInt(transform.replace('translate(', ''), 10) : (-1234 + 48);
})();
"""
# No rounding
actual_dx -= 48
# Allow 5px variance for rounding errors
delta = 5
expect(expected_dx).to be_within(delta).of(actual_dx)
end
true
end
end
RSpec::Matchers.define :have_focused_time_span do |start, stop|
match do |page|
expected_start_time = start.to_time.to_i * 1000
expected_end_time = stop.to_time.to_i * 1000
expected_start_px = page.evaluate_script "$('#timeline').timeline('timeToPosition', #{expected_start_time});"
expected_end_px = page.evaluate_script "$('#timeline').timeline('timeToPosition', #{expected_end_time});"
expect(page).to have_selector("rect[width^=\"#{(expected_start_px + 1000000).to_i}\"]")
expect(page).to have_selector("rect[x^=\"#{expected_end_px.to_i}\"]")
end
end
RSpec::Matchers.define :have_temporal do |start, stop, range=nil, dataset_n=nil|
match do |page|
condition = []
condition << start.to_i
condition << stop.to_i
condition += range unless range.nil?
script = "(function(temporal) {"
script += " return temporal.queryCondition();"
if dataset_n.nil?
script += "})(edsc.page.query.temporal.applied);"
else
script += "})(edsc.page.project.datasets()[#{dataset_n}].granulesModel.temporal.applied);"
end
synchronize do
actual = page.evaluate_script(script).split(',')
actual[0] = DateTime.parse(actual[0]).to_i
actual[1] = DateTime.parse(actual[1]).to_i
delta = (start.to_i - stop.to_i).abs / 100
expect(actual.size).to eql(condition.size)
expect(actual[0]).to be_within(delta).of(condition[0])
expect(actual[1]).to be_within(delta).of(condition[1])
if actual.size > 2
expect(actual[2]).to eql(condition[2])
expect(actual[3]).to eql(condition[2])
end
end
true
end
failure_message_for_should do |page|
script = "(function(temporal) {"
script += " return temporal.queryCondition();"
if dataset_n.nil?
script += "})(edsc.page.query.temporal.applied);"
else
script += "})(edsc.page.project.datasets()[#{dataset_n}].granulesModel.temporal.applied);"
end
actual = page.evaluate_script(script).split(',').join(' - ')
"expected a temporal range of #{start} - #{stop}, got #{actual}"
end
end
RSpec::Matchers.define :have_no_temporal do |dataset_n=nil|
match do |page|
script = "(function(temporal) {"
script += " return temporal.queryCondition();"
if dataset_n.nil?
script += "})(edsc.page.query.temporal.applied);"
else
script += "})(edsc.page.project.datasets()[#{dataset_n}].granulesModel.temporal.applied);"
end
synchronize do
actual = page.evaluate_script(script)
expect(actual).to eql('')
end
true
end
end
EDSC-363: Update width calculation for timeline focus matcher
RSpec::Matchers.define :have_no_selections do
match do |page|
expect(page).to have_no_selector(".timeline-selection *")
end
end
RSpec::Matchers.define :have_fencepost do |expected|
match do |page|
time = expected.to_time.to_i * 1000
synchronize do
position = page.evaluate_script "$('#timeline').timeline('timeToPosition', #{time})"
# We match against the first few significant figures because there seems to be a double -> float
# conversion going on in capybara-webkit that messes with precision. Poltergeist gets it right.
matches = page.all(".timeline-selection line[x1^=\"#{position.to_s[0, 3]}\"]")
expect(matches.size).to be > 0
end
end
end
RSpec::Matchers.define :have_highlighted_selection do |start, stop|
match do |page|
start_time = start.to_time.to_i * 1000
stop_time = stop.to_time.to_i * 1000
synchronize do
start_pos = page.evaluate_script "$('#timeline').timeline('timeToPosition', #{start_time})"
stop_pos = page.evaluate_script "$('#timeline').timeline('timeToPosition', #{stop_time})"
width = stop_pos - start_pos
matches = page.all(".timeline-selection rect[x^=\"#{start_pos.to_s[0, 3]}\"][width^=\"#{width.to_s[0, 3]}\"]")
expect(matches.size).to be > 0
end
expect(page).to have_fencepost(start)
expect(page).to have_fencepost(stop)
end
end
RSpec::Matchers.define :have_timeline_range do |start, stop|
match do |page|
expected_start_time = start.to_time.to_i * 1000
expected_end_time = stop.to_time.to_i * 1000
synchronize do
actual_start_time = page.evaluate_script "$('#timeline').timeline('startTime')"
actual_end_time = page.evaluate_script "$('#timeline').timeline('endTime')"
# Allow 1% variance for rounding errors
delta = 1000 * (start.to_i - stop.to_i).abs / 100
expect(actual_start_time).to be_within(delta).of(expected_start_time)
expect(actual_end_time).to be_within(delta).of(expected_end_time)
end
end
failure_message_for_should do |page|
actual_start_time = page.evaluate_script "$('#timeline').timeline('startTime')"
actual_start_time = Time.at(actual_start_time / 1000).utc.to_datetime if actual_start_time.present?
actual_end_time = page.evaluate_script "$('#timeline').timeline('endTime')"
actual_end_time = Time.at(actual_end_time / 1000).utc.to_datetime if actual_end_time.present?
"expected to find a timeline range of #{start} - #{stop}, got #{actual_start_time} - #{actual_end_time}"
end
end
RSpec::Matchers.define :have_end_time do |dt|
match do |page|
present = DateTime.new(2014, 3, 1, 0, 0, 0, '+0')
expected_end_time = (present + dt).to_i * 1000
synchronize do
actual_end_time = page.evaluate_script "$('#timeline').timeline('endTime')"
# Allow 1% variance for rounding errors
delta = 1000 * (dt.to_i).abs / 100
expect(actual_end_time).to be_within(delta).of(expected_end_time)
end
true
end
failure_message_for_should do |page|
expected_end_time = Time.now + dt
actual_end_time = page.evaluate_script "$('#timeline').timeline('endTime')"
actual_end_time = Time.at(actual_end_time / 1000).utc.to_datetime if actual_end_time.present?
"expected timeline to have end time of #{expected_end_time}, got #{actual_end_time}"
end
end
RSpec::Matchers.define :have_time_offset do |selector, dt|
match do |page|
expected_dt_ms = dt.to_i * 1000
synchronize do
expected_dx = page.evaluate_script "-$('#timeline').timeline('timeSpanToPx', #{expected_dt_ms});"
actual_dx = page.evaluate_script """
(function() {
var transform = $('#timeline').find('#{selector}').attr('transform');
return (transform && transform.length > 0) ? parseInt(transform.replace('translate(', ''), 10) : (-1234 + 48);
})();
"""
# No rounding
actual_dx -= 48
# Allow 5px variance for rounding errors
delta = 5
expect(expected_dx).to be_within(delta).of(actual_dx)
end
true
end
end
RSpec::Matchers.define :have_focused_time_span do |start, stop|
match do |page|
expected_start_time = start.to_time.to_i * 1000
expected_end_time = stop.to_time.to_i * 1000
expected_start_px = page.evaluate_script "$('#timeline').timeline('timeToPosition', #{expected_start_time});"
expected_end_px = page.evaluate_script "$('#timeline').timeline('timeToPosition', #{expected_end_time});"
expect(page).to have_selector("rect[width^=\"#{(expected_start_px + 100000).to_i}\"]")
expect(page).to have_selector("rect[x^=\"#{expected_end_px.to_i}\"]")
end
end
RSpec::Matchers.define :have_temporal do |start, stop, range=nil, dataset_n=nil|
match do |page|
condition = []
condition << start.to_i
condition << stop.to_i
condition += range unless range.nil?
script = "(function(temporal) {"
script += " return temporal.queryCondition();"
if dataset_n.nil?
script += "})(edsc.page.query.temporal.applied);"
else
script += "})(edsc.page.project.datasets()[#{dataset_n}].granulesModel.temporal.applied);"
end
synchronize do
actual = page.evaluate_script(script).split(',')
actual[0] = DateTime.parse(actual[0]).to_i
actual[1] = DateTime.parse(actual[1]).to_i
delta = (start.to_i - stop.to_i).abs / 100
expect(actual.size).to eql(condition.size)
expect(actual[0]).to be_within(delta).of(condition[0])
expect(actual[1]).to be_within(delta).of(condition[1])
if actual.size > 2
expect(actual[2]).to eql(condition[2])
expect(actual[3]).to eql(condition[2])
end
end
true
end
failure_message_for_should do |page|
script = "(function(temporal) {"
script += " return temporal.queryCondition();"
if dataset_n.nil?
script += "})(edsc.page.query.temporal.applied);"
else
script += "})(edsc.page.project.datasets()[#{dataset_n}].granulesModel.temporal.applied);"
end
actual = page.evaluate_script(script).split(',').join(' - ')
"expected a temporal range of #{start} - #{stop}, got #{actual}"
end
end
RSpec::Matchers.define :have_no_temporal do |dataset_n=nil|
match do |page|
script = "(function(temporal) {"
script += " return temporal.queryCondition();"
if dataset_n.nil?
script += "})(edsc.page.query.temporal.applied);"
else
script += "})(edsc.page.project.datasets()[#{dataset_n}].granulesModel.temporal.applied);"
end
synchronize do
actual = page.evaluate_script(script)
expect(actual).to eql('')
end
true
end
end
|
require File.dirname(__FILE__) + '/../spec_helper'
include ApplicationHelper
include UsersHelper
include AuthenticatedTestHelper
describe UsersHelper do
let(:user) { User.make! }
describe "link_to_user" do
it "should return an error string on a nil user" do
expect( link_to_user( nil ) ).to eq 'deleted user'
end
it "should link to the given user" do
expect( link_to_user( user ) ).to have_tag( "a[href='http://test.host/people/#{user.login}']" )
end
it "should use given link text if :content_text is specified" do
expect( link_to_user( user, content_text: "Hello there!" ) ).to have_tag( "a", "Hello there!" )
end
it "should use the login as link text with no :content_method specified" do
expect( link_to_user( user ) ).to have_tag( "a", user.login )
end
it "should use the name as link text with :content_method => :name" do
expect( link_to_user( user, content_method: :name ) ).to have_tag( "a", user.name )
end
it "should use the login as title with no :title_method specified" do
expect( link_to_user( user ) ).to have_tag( "a[title='#{user.login}']" )
end
it "should use the name as link title with :content_method => :name" do
expect( link_to_user( user , title_method: :name ) ).to have_tag( "a[title='#{user.name}']" )
end
it "should have nickname as a class by default" do
expect( link_to_user( user ) ).to have_tag( "a.nickname" )
end
it "should take other classes and no longer have the nickname class" do
result = link_to_user( user , class: "foo bar" )
expect( result ).to have_tag( "a.foo" )
expect( result ).to have_tag( "a.bar" )
end
end
end
Minor spec fix
require File.dirname(__FILE__) + '/../spec_helper'
include ApplicationHelper
include UsersHelper
include AuthenticatedTestHelper
describe UsersHelper do
let(:user) { User.make! }
describe "link_to_user" do
it "should return an error string on a nil user" do
expect( link_to_user( nil ) ).to eq 'deleted user'
end
it "should link to the given user" do
expect( link_to_user( user ) ).to have_tag( "a[href='http://test.host/people/#{user.login}']" )
end
it "should use given link text if :content_text is specified" do
expect( link_to_user( user, content_text: "Hello there!" ) ).to have_tag( "a", "Hello there!" )
end
it "should use the login as link text with no :content_method specified" do
expect( link_to_user( user ) ).to have_tag( "a", user.login )
end
it "should use the name as link text with :content_method => :name" do
expect( link_to_user( user, content_method: :name ) ).to have_tag( "a", user.name )
end
it "should use the login as title with no :title_method specified" do
expect( link_to_user( user ) ).to have_tag( "a[title='#{user.login}']" )
end
it "should use the name as link title with :title_method => :name" do
# The test matcher gets confused with the name has an apostrophe, even though the HTML is fine
user.update_attributes( name: "Balthazar Brogdonovich" )
expect( link_to_user( user , title_method: :name ) ).to have_tag( "a", with: { title: user.name } )
end
it "should have nickname as a class by default" do
expect( link_to_user( user ) ).to have_tag( "a.nickname" )
end
it "should take other classes and no longer have the nickname class" do
result = link_to_user( user , class: "foo bar" )
expect( result ).to have_tag( "a.foo" )
expect( result ).to have_tag( "a.bar" )
end
end
end
|
require "spec_helper"
describe Henson::Source::GitHub do
subject(:it) { described_class.new("foo", ">= 0", "bar/puppet-foo") }
it "can be instantiated" do
expect(it).to_not be_nil
end
it "inherits Henson::Source::Tarball" do
expect(it).to be_a(Henson::Source::Tarball)
end
describe "#repo" do
it "should return the repository name for the module" do
expect(it.repo).to eq("bar/puppet-foo")
end
end
describe "#installed?" do
it "should always return false" do
expect(it.installed?).to be_false
end
end
describe "#download!" do
let(:ui) { mock }
before do
Henson.ui = ui
end
it "should make an API request to download the module" do
ui.expects(:debug).
with("Downloading bar/puppet-foo@#{it.send(:version)} to #{it.send(:cache_path)}")
it.send(:api).expects(:download_tag_for_repo).with(
"bar/puppet-foo", it.send(:version), it.send(:cache_path)
)
it.send(:download!)
end
end
describe "#install_path" do
it "should return a Pathname object" do
expect(it.send(:install_path)).to be_a(Pathname)
end
it "should return the path that the module will be installed into" do
path = Pathname.new(Henson.settings[:path]) + "foo"
expect(it.send(:install_path)).to eq(path)
end
end
end
Fix some more specs
require "spec_helper"
describe Henson::Source::GitHub do
subject(:it) { described_class.new("foo", ">= 0", "bar/puppet-foo") }
it "can be instantiated" do
expect(it).to_not be_nil
end
it "inherits Henson::Source::Tarball" do
expect(it).to be_a(Henson::Source::Tarball)
end
describe "#repo" do
it "should return the repository name for the module" do
expect(it.repo).to eq("bar/puppet-foo")
end
end
describe "#installed?" do
it "should always return false" do
expect(it.installed?).to be_false
end
end
describe "#download!" do
let(:ui) { mock }
before do
Henson.ui = ui
end
it "should make an API request to download the module" do
it.expects(:version).returns("1.1.2").at_least(3)
ui.expects(:debug).
with("Downloading bar/puppet-foo@1.1.2 to /Users/wfarr/src/henson/.henson/cache/github/foo-1.1.2.tar.gz...")
it.send(:api).expects(:download_tag_for_repo).with(
'bar/puppet-foo',
'1.1.2',
'/Users/wfarr/src/henson/.henson/cache/github/foo-1.1.2.tar.gz'
)
it.send(:download!)
end
end
describe "#install_path" do
it "should return a Pathname object" do
expect(it.send(:install_path)).to be_a(Pathname)
end
it "should return the path that the module will be installed into" do
path = Pathname.new(Henson.settings[:path]) + "foo"
expect(it.send(:install_path)).to eq(path)
end
end
end
|
require 'spec_helper'
describe 'integration of negative' do
describe 'with a addition' do
subject do
Danica::Wrapper::Negative.new(
Danica::Operator::Addition.new(1,2,3)
)
end
describe '#to_gnu' do
it 'returns the correct string' do
expect(subject.to_gnu).to eq('-(1 + 2 + 3)')
end
end
describe '#to_tex' do
it 'returns the correct string' do
expect(subject.to_tex).to eq('-\left(1 + 2 + 3\right)')
end
end
end
end
Add more specs over negativity
require 'spec_helper'
describe 'integration of negative' do
describe 'with a addition' do
subject do
Danica::Wrapper::Negative.new(
Danica::Operator::Addition.new(1,2,3)
)
end
describe '#to_gnu' do
it 'returns the correct string' do
expect(subject.to_gnu).to eq('-(1 + 2 + 3)')
end
end
describe '#to_tex' do
it 'returns the correct string' do
expect(subject.to_tex).to eq('-\left(1 + 2 + 3\right)')
end
end
describe 'when it is the result of an expression' do
let(:x) { Danica::Wrapper::Variable.new(:x) }
let(:y) { Danica::Wrapper::Variable.new(:y) }
let(:z) { Danica::Wrapper::Variable.new(:z) }
let(:negative_parcel) { y + z }
subject do
x - (y + z)
end
it 'wraps parcel into a group' do
expect(subject.to_gnu).to eq('x -(y + z)')
end
context 'when the negative parcel is an expression' do
let(:negative_parcel) { Danica.build(:y, :z) { x + z } }
it 'wraps parcel into a group' do
expect(subject.to_gnu).to eq('x -(y + z)')
end
end
end
end
end
|
describe Bumbleworks::Task do
subject { described_class.new(workflow_item) }
let(:workflow_item) {
Ruote::Workitem.new({
'fields' => {
'params' => {'task' => 'go_to_work'},
'dispatched_at' => 'some time ago'
}
})
}
before :each do
Bumbleworks::Ruote.register_participants
Bumbleworks.start_worker!
end
it_behaves_like "an entity holder" do
let(:holder) { described_class.new(workflow_item) }
let(:storage_workitem) { Bumbleworks::Workitem.new(workflow_item) }
end
it_behaves_like "comparable" do
subject { described_class.new(workflow_item) }
let(:other) { described_class.new(workflow_item) }
before(:each) do
allow(workflow_item).to receive(:sid).and_return('blah-123-blah')
end
end
describe '#not_completable_error_message' do
it 'defaults to generic message' do
task = described_class.new(workflow_item)
expect(task.not_completable_error_message).to eq(
"This task is not currently completable."
)
end
end
describe '.autoload_all' do
it 'autoloads all task modules in directory' do
Bumbleworks.root = File.join(fixtures_path, 'apps', 'with_default_directories')
expect(Object).to receive(:autoload).with(:MakeSomeHoneyTask,
File.join(Bumbleworks.root, 'tasks', 'make_some_honey_task.rb'))
expect(Object).to receive(:autoload).with(:TasteThatMolassesTask,
File.join(Bumbleworks.root, 'tasks', 'taste_that_molasses_task.rb'))
described_class.autoload_all
end
it 'does nothing if using default path and directory does not exist' do
Bumbleworks.root = File.join(fixtures_path, 'apps', 'minimal')
described_class.autoload_all
end
it 'raises exception if using custom path and participants file does not exist' do
Bumbleworks.root = File.join(fixtures_path, 'apps', 'minimal')
Bumbleworks.tasks_directory = 'oysters'
expect {
described_class.autoload_all
}.to raise_error(Bumbleworks::InvalidSetting)
end
end
describe '#dispatched_at' do
it 'returns dispatched_at timestamp from workitem' do
expect(subject.dispatched_at).to eq 'some time ago'
end
end
describe '#completable?' do
it 'defaults to true on base task' do
expect(subject).to be_completable
end
end
describe '.new' do
it 'raises an error if workitem is nil' do
expect {
described_class.new(nil)
}.to raise_error(ArgumentError, "Not a valid workitem")
end
it 'raises an error if workitem not a Ruote::Workitem' do
expect {
described_class.new('a string!')
}.to raise_error(ArgumentError, "Not a valid workitem")
end
it 'succeeds when given workitem' do
expect {
described_class.new(workflow_item)
}.not_to raise_error
end
it 'extends new object with task module' do
expect_any_instance_of(described_class).to receive(:extend_module)
described_class.new(workflow_item)
end
end
describe '#reload' do
it 'reloads the workitem from the storage participant' do
allow(subject).to receive(:sid).and_return(:the_sid)
expect(Bumbleworks.dashboard.storage_participant).to receive(
:[]).with(:the_sid).and_return(:amazing_workitem)
subject.reload
expect(subject.instance_variable_get(:@workitem)).to eq(:amazing_workitem)
end
end
[:before, :after].each do |phase|
describe "#call_#{phase}_hooks" do
it "calls #{phase} hooks on task and all observers" do
observer1, observer2 = double('observer1'), double('observer2')
Bumbleworks.observers = [observer1, observer2]
expect(subject).to receive(:"#{phase}_snoogle").with(:chachunga, :faloop)
expect(observer1).to receive(:"#{phase}_snoogle").with(:chachunga, :faloop)
expect(observer2).to receive(:"#{phase}_snoogle").with(:chachunga, :faloop)
subject.send(:"call_#{phase}_hooks", :snoogle, :chachunga, :faloop)
end
end
end
describe '#on_dispatch' do
it 'logs dispatch' do
expect(subject).to receive(:log).with(:dispatch)
subject.on_dispatch
end
it 'calls after hooks' do
allow(subject).to receive(:log)
expect(subject).to receive(:call_after_hooks).with(:dispatch)
subject.on_dispatch
end
end
describe '#extend_module' do
it 'extends with base module and task module' do
expect(subject).to receive(:task_module).and_return(:task_module_double)
expect(subject).to receive(:extend).with(Bumbleworks::Task::Base).ordered
expect(subject).to receive(:extend).with(:task_module_double).ordered
subject.extend_module
end
it 'extends only with base module if no nickname' do
allow(subject).to receive(:nickname).and_return(nil)
expect(subject).to receive(:extend).with(Bumbleworks::Task::Base)
subject.extend_module
end
it 'extends only with base module if task module does not exist' do
expect(subject).to receive(:extend).with(Bumbleworks::Task::Base)
subject.extend_module
end
end
describe '#task_module' do
it 'returns nil if no nickname' do
allow(subject).to receive(:nickname).and_return(nil)
expect(subject.task_module).to be_nil
end
it 'returns constantized task nickname with "Task" appended' do
subject
allow(Bumbleworks::Support).to receive(:constantize).with("GoToWorkTask").and_return(:the_task_module)
expect(subject.task_module).to eq(:the_task_module)
end
end
describe '#id' do
it 'returns the sid from the workitem' do
allow(workflow_item).to receive(:sid).and_return(:an_exciting_id)
expect(subject.id).to eq(:an_exciting_id)
end
end
describe '.find_by_id' do
it 'returns the task for the given id' do
Bumbleworks.define_process 'planting_a_noodle' do
concurrence do
noodle_gardener :task => 'plant_noodle_seed'
horse_feeder :task => 'give_the_horse_a_bon_bon'
end
end
Bumbleworks.launch!('planting_a_noodle')
Bumbleworks.dashboard.wait_for(:horse_feeder)
plant_noodle_seed_task = described_class.for_role('noodle_gardener').first
give_the_horse_a_bon_bon_task = described_class.for_role('horse_feeder').first
# checking for equality by comparing sid, which is the flow expression id
# that identifies not only the expression, but its instance
expect(described_class.find_by_id(plant_noodle_seed_task.id).sid).to eq(
plant_noodle_seed_task.sid
)
expect(described_class.find_by_id(give_the_horse_a_bon_bon_task.id).sid).to eq(
give_the_horse_a_bon_bon_task.sid
)
end
it 'raises an error if id is nil' do
expect {
described_class.find_by_id(nil)
}.to raise_error(described_class::MissingWorkitem)
end
it 'raises an error if workitem not found for given id' do
expect {
described_class.find_by_id('asdfasdf')
}.to raise_error(described_class::MissingWorkitem)
end
it 'raises an error if id is unparseable by storage participant' do
expect {
described_class.find_by_id(:unparseable_because_i_am_a_symbol)
}.to raise_error(described_class::MissingWorkitem)
end
end
context 'ordering' do
before :each do
Bumbleworks.define_process 'emergency_hamster_bullet' do
concurrence do
doctor :task => 'evince_concern', :priority => 3, :importance => 1000
patient :task => 'panic', :priority => 2, :importance => 5
nurse :task => 'roll_eyes', :priority => 4, :importance => 1000
officer :task => 'appear_authoritative', :priority => 1, :importance => 1000
rhubarb :task => 'sit_quietly', :importance => 80
end
end
end
context 'by params' do
before(:each) do
Bumbleworks.launch!('emergency_hamster_bullet')
Bumbleworks.dashboard.wait_for(:rhubarb)
end
describe '.order_by_param' do
it 'orders returned tasks by given param ascending by default' do
tasks = described_class.order_by_param(:priority)
expect(tasks.map(&:nickname)).to eq([
'appear_authoritative',
'panic',
'evince_concern',
'roll_eyes',
'sit_quietly'
])
end
it 'can order in reverse' do
tasks = described_class.order_by_param(:priority, :desc)
expect(tasks.map(&:nickname)).to eq([
'sit_quietly',
'roll_eyes',
'evince_concern',
'panic',
'appear_authoritative'
])
end
end
describe '.order_by_params' do
it 'orders by multiple parameters' do
tasks = described_class.order_by_params(:importance => :desc, :priority => :asc)
expect(tasks.map(&:nickname)).to eq([
'appear_authoritative',
'evince_concern',
'roll_eyes',
'sit_quietly',
'panic'
])
end
end
end
context 'by fields' do
before(:each) do
@wf3 = Bumbleworks.launch!('emergency_hamster_bullet', :group => 2, :strength => 3)
Bumbleworks.dashboard.wait_for(:officer)
@wf1 = Bumbleworks.launch!('emergency_hamster_bullet', :group => 2, :strength => 1)
Bumbleworks.dashboard.wait_for(:officer)
@wf2 = Bumbleworks.launch!('emergency_hamster_bullet', :group => 1, :strength => 2)
Bumbleworks.dashboard.wait_for(:officer)
@wf4 = Bumbleworks.launch!('emergency_hamster_bullet', :group => 1, :strength => 4)
Bumbleworks.dashboard.wait_for(:officer)
@wf5 = Bumbleworks.launch!('emergency_hamster_bullet', :group => 1)
Bumbleworks.dashboard.wait_for(:officer)
end
describe '.order_by_field' do
it 'orders returned tasks by given param ascending by default' do
tasks = described_class.for_role('doctor').order_by_field(:strength)
expect(tasks.map { |t| [t.nickname, t.wfid] }).to eq([
['evince_concern', @wf1.wfid],
['evince_concern', @wf2.wfid],
['evince_concern', @wf3.wfid],
['evince_concern', @wf4.wfid],
['evince_concern', @wf5.wfid]
])
end
it 'can order in reverse' do
tasks = described_class.for_role('doctor').order_by_field(:strength, :desc)
expect(tasks.map { |t| [t.nickname, t.wfid] }).to eq([
['evince_concern', @wf5.wfid],
['evince_concern', @wf4.wfid],
['evince_concern', @wf3.wfid],
['evince_concern', @wf2.wfid],
['evince_concern', @wf1.wfid]
])
end
end
describe '.order_by_fields' do
it 'orders by multiple parameters' do
tasks = described_class.for_role('doctor').order_by_fields(:group => :asc, :strength => :desc)
expect(tasks.map { |t| [t.nickname, t.wfid] }).to eq([
['evince_concern', @wf5.wfid],
['evince_concern', @wf4.wfid],
['evince_concern', @wf2.wfid],
['evince_concern', @wf3.wfid],
['evince_concern', @wf1.wfid]
])
end
end
end
end
describe '.for_roles' do
before :each do
Bumbleworks.define_process 'lowering_penguin_self_esteem' do
concurrence do
heckler :task => 'comment_on_dancing_ability'
mother :oh_no => 'this_is_not_a_task'
mother :task => 'ignore_pleas_for_attention'
father :task => 'sit_around_watching_penguin_tv'
end
end
Bumbleworks.launch!('lowering_penguin_self_esteem')
end
it 'returns tasks for all given roles' do
Bumbleworks.dashboard.wait_for(:father)
tasks = described_class.for_roles(['heckler', 'mother'])
expect(tasks.map(&:nickname)).to eq([
'comment_on_dancing_ability',
'ignore_pleas_for_attention'
])
end
it 'works with symbolized role names' do
Bumbleworks.dashboard.wait_for(:father)
tasks = described_class.for_roles([:heckler, :mother])
expect(tasks.map(&:nickname)).to eq([
'comment_on_dancing_ability',
'ignore_pleas_for_attention'
])
end
it 'returns empty array if no tasks found for given roles' do
Bumbleworks.dashboard.wait_for(:father)
expect(described_class.for_roles(['elephant'])).to be_empty
end
it 'returns empty array if given empty array' do
Bumbleworks.dashboard.wait_for(:father)
expect(described_class.for_roles([])).to be_empty
end
it 'returns empty array if given nil' do
Bumbleworks.dashboard.wait_for(:father)
expect(described_class.for_roles(nil)).to be_empty
end
end
describe '.for_processes' do
before :each do
Bumbleworks.define_process 'spunking' do
concurrence do
spunker :task => 'spunk'
nonspunker :task => 'complain'
end
end
Bumbleworks.define_process 'rooting' do
concurrence do
rooter :task => 'get_the_rooting_on'
armchair_critic :task => 'scoff'
end
end
@spunking_process = Bumbleworks.launch!('spunking')
@rooting_process_1 = Bumbleworks.launch!('rooting')
@rooting_process_2 = Bumbleworks.launch!('rooting')
Bumbleworks.dashboard.wait_for(:armchair_critic)
end
it 'returns tasks for given processes' do
spunking_tasks = described_class.for_processes([@spunking_process])
rooting_tasks = described_class.for_processes([@rooting_process_1])
tasks_for_both = described_class.for_processes([@spunking_process, @rooting_process_1])
expect(spunking_tasks.map(&:nickname)).to match_array(['spunk', 'complain'])
expect(rooting_tasks.map(&:nickname)).to match_array(['get_the_rooting_on', 'scoff'])
expect(tasks_for_both.map(&:nickname)).to match_array(['spunk', 'complain', 'get_the_rooting_on', 'scoff'])
end
it 'works with process ids as well' do
spunking_tasks = described_class.for_processes([@spunking_process.id])
expect(spunking_tasks.map(&:nickname)).to match_array(['spunk', 'complain'])
end
it 'returns empty array when no tasks for given process id' do
expect(described_class.for_processes(['boop'])).to be_empty
end
it 'returns empty array if given empty array' do
expect(described_class.for_processes([])).to be_empty
end
it 'returns empty array if given nil' do
expect(described_class.for_processes(nil)).to be_empty
end
end
describe '.for_process' do
it 'acts as shortcut to .for_processes with one process' do
allow_any_instance_of(described_class::Finder).to receive(:for_processes).with([:one_guy]).and_return(:aha)
expect(described_class.for_process(:one_guy)).to eq(:aha)
end
end
describe '.for_role' do
it 'returns all tasks for given role' do
Bumbleworks.define_process 'chalking' do
concurrence do
chalker :task => 'make_chalk_drawings'
chalker :task => 'chalk_it_good_baby'
hagrid :task => 'moan_endearingly'
end
end
Bumbleworks.launch!('chalking')
Bumbleworks.dashboard.wait_for(:hagrid)
tasks = described_class.for_role('chalker')
expect(tasks.map(&:nickname)).to eq([
'make_chalk_drawings',
'chalk_it_good_baby'
])
end
end
describe '.unclaimed' do
it 'returns all unclaimed tasks' do
Bumbleworks.define_process 'dog-lifecycle' do
concurrence do
dog :task => 'eat'
dog :task => 'bark'
dog :task => 'pet_dog'
cat :task => 'skip_and_jump'
end
dog :task => 'nap'
end
Bumbleworks.launch!('dog-lifecycle')
Bumbleworks.dashboard.wait_for(:cat)
@unclaimed = described_class.unclaimed
expect(@unclaimed.map(&:nickname)).to match_array(['eat', 'bark', 'pet_dog', 'skip_and_jump'])
described_class.all.each do |t|
t.claim('radish') unless ['pet_dog', 'bark'].include?(t.nickname)
end
@unclaimed = described_class.unclaimed
expect(@unclaimed.map(&:nickname)).to match_array(['pet_dog', 'bark'])
end
end
describe '.claimed' do
it 'returns all claimed tasks' do
Bumbleworks.define_process 'dog-lifecycle' do
concurrence do
dog :task => 'eat'
dog :task => 'bark'
dog :task => 'pet_dog'
cat :task => 'skip_and_jump'
end
dog :task => 'nap'
end
Bumbleworks.launch!('dog-lifecycle')
Bumbleworks.dashboard.wait_for(:cat)
expect(described_class.claimed).to be_empty
described_class.all.each_with_index do |t, i|
t.claim("radish_#{i}") unless ['pet_dog', 'bark'].include?(t.nickname)
end
@claimed = described_class.claimed
expect(@claimed.map(&:nickname)).to match_array(['eat', 'skip_and_jump'])
end
end
describe '.completable' do
it 'filters by completability' do
module WuggleHandsTask
def completable?
false
end
end
Bumbleworks.define_process 'hand_waggling' do
concurrence do
a_fella :task => 'waggle_hands'
a_monkey :task => 'wuggle_hands'
a_lady :task => 'wiggle_hands'
end
end
Bumbleworks.launch!('hand_waggling')
Bumbleworks.dashboard.wait_for(:a_lady)
tasks = described_class.completable
expect(tasks.map { |t| [t.role, t.nickname] }).to eq([
['a_fella', 'waggle_hands'],
['a_lady', 'wiggle_hands']
])
tasks = described_class.completable(false)
expect(tasks.map { |t| [t.role, t.nickname] }).to eq([
['a_monkey', 'wuggle_hands']
])
end
end
context 'iterators' do
before :each do
Bumbleworks.define_process 'life_on_tha_street' do
concurrence do
oscar :task => 'grouch_it_up'
elmo :task => 'sing_a_tune'
elmo :task => 'steal_booze'
snuffy :task => 'eat_cabbage'
end
end
Bumbleworks.launch!('life_on_tha_street')
Bumbleworks.dashboard.wait_for(:snuffy)
end
describe '.each' do
it 'executes for each found task' do
list = []
described_class.each { |t| list << t.nickname }
expect(list).to match_array(['grouch_it_up', 'sing_a_tune', 'steal_booze', 'eat_cabbage'])
end
end
describe '.map' do
it 'maps result of yielding block with each task' do
list = described_class.map { |t| t.nickname }
expect(list).to match_array(['grouch_it_up', 'sing_a_tune', 'steal_booze', 'eat_cabbage'])
end
end
context 'with queries' do
it 'checks filters' do
list = described_class.for_role('elmo').map { |t| t.nickname }
expect(list).to match_array(['sing_a_tune', 'steal_booze'])
end
end
end
describe '.all' do
before :each do
Bumbleworks.define_process 'dog-lifecycle' do
concurrence do
dog_teeth :task => 'eat'
dog_mouth :task => 'bark'
everyone :task => 'pet_dog'
the_universe_is_wonderful
dog_legs :task => 'skip_and_jump'
end
dog_brain :task => 'nap'
end
Bumbleworks.launch!('dog-lifecycle')
end
it 'returns all tasks (with task param) in queue regardless of role' do
Bumbleworks.dashboard.wait_for(:dog_legs)
tasks = described_class.all
expect(tasks.map { |t| [t.role, t.nickname] }).to eq([
['dog_teeth', 'eat'],
['dog_mouth', 'bark'],
['everyone', 'pet_dog'],
['dog_legs', 'skip_and_jump']
])
end
it 'uses subclass for generation of tasks' do
class MyOwnTask < Bumbleworks::Task; end
Bumbleworks.dashboard.wait_for(:dog_legs)
tasks = MyOwnTask.all
expect(tasks).to be_all { |t| t.class == MyOwnTask }
Object.send(:remove_const, :MyOwnTask)
end
end
describe '#[], #[]=' do
it 'sets values on workitem fields' do
subject['hive'] = 'bees at work'
expect(workflow_item.fields['hive']).to eq('bees at work')
end
it 'retuns value from workitem params' do
workflow_item.fields['nest'] = 'queen resting'
expect(subject['nest']).to eq('queen resting')
end
end
describe '#nickname' do
it 'returns the "task" param' do
expect(subject.nickname).to eq('go_to_work')
end
it 'is immutable; cannot be changed by modifying the param' do
expect(subject.nickname).to eq('go_to_work')
subject.params['task'] = 'what_is_wrong_with_you?'
expect(subject.nickname).to eq('go_to_work')
end
end
describe '#role' do
it 'returns the workitem participant_name' do
Bumbleworks.define_process 'planting_a_noodle' do
noodle_gardener :task => 'plant_noodle_seed'
end
Bumbleworks.launch!('planting_a_noodle')
Bumbleworks.dashboard.wait_for(:noodle_gardener)
expect(described_class.all.first.role).to eq('noodle_gardener')
end
end
describe '.for_claimant' do
it 'returns all tasks claimed by given claimant' do
Bumbleworks.define_process 'dog-lifecycle' do
concurrence do
dog :task => 'eat'
dog :task => 'bark'
dog :task => 'pet_dog'
the_universe_is_wonderful
cat :task => 'skip_and_jump'
end
dog :task => 'nap'
end
Bumbleworks.launch!('dog-lifecycle')
Bumbleworks.dashboard.wait_for(:cat)
expect(described_class.for_claimant('radish')).to be_empty
described_class.all.each do |t|
t.claim('radish') unless t.nickname == 'pet_dog'
end
@tasks = described_class.for_claimant('radish')
expect(@tasks.map(&:nickname)).to match_array(['eat', 'bark', 'skip_and_jump'])
end
end
describe '.with_fields' do
before(:each) do
Bumbleworks.define_process 'divergination' do
concurrence do
sequence do
set 'bumby' => 'fancy'
bumber :task => 'wear_monocle'
end
sequence do
set 'bumby' => 'not_fancy'
concurrence do
bumber :task => 'wear_natties'
loofer :task => 'snuffle'
end
end
end
end
Bumbleworks.launch!('divergination', :grumbles => true)
Bumbleworks.dashboard.wait_for(:loofer)
end
it 'returns all tasks with given field' do
expect(described_class.with_fields(:grumbles => true).count).to eq(3)
expect(described_class.with_fields(:bumby => 'fancy').count).to eq(1)
expect(described_class.with_fields(:bumby => 'not_fancy').count).to eq(2)
expect(described_class.with_fields(:what => 'ever')).to be_empty
end
it 'looks up multiple fields at once' do
expect(described_class.with_fields(:grumbles => true, :bumby => 'not_fancy').count).to eq(2)
expect(described_class.with_fields(:grumbles => false, :bumby => 'not_fancy')).to be_empty
end
it 'can be chained' do
expect(described_class.with_fields(:grumbles => true).with_fields(:bumby => 'fancy').count).to eq(1)
expect(described_class.with_fields(:grumbles => false).with_fields(:bumby => 'not_fancy')).to be_empty
end
end
describe '.for_entity' do
it 'returns all tasks associated with given entity' do
fake_sandwich = OpenStruct.new(:identifier => 'rubies')
Bumbleworks.define_process 'existential_pb_and_j' do
concurrence do
sandwich :task => 'be_made'
sandwich :task => 'contemplate_being'
end
end
Bumbleworks.launch!('existential_pb_and_j', :entity => fake_sandwich)
Bumbleworks.dashboard.wait_for(:sandwich)
tasks = described_class.for_entity(fake_sandwich)
expect(tasks.size).to eq(2)
end
end
context '.by_nickname' do
it 'returns all tasks with given nickname' do
Bumbleworks.define_process 'animal_disagreements' do
concurrence do
turtle :task => 'be_a_big_jerk'
goose :task => 'punch_turtle'
rabbit :task => 'punch_turtle'
end
end
Bumbleworks.launch!('animal_disagreements')
Bumbleworks.dashboard.wait_for(:rabbit)
tasks = described_class.by_nickname('punch_turtle')
expect(tasks.map(&:role)).to match_array(['goose', 'rabbit'])
end
end
context 'claiming things' do
before :each do
Bumbleworks.define_process 'planting_a_noodle' do
noodle_gardener :task => 'plant_noodle_seed'
end
Bumbleworks.launch!('planting_a_noodle')
Bumbleworks.dashboard.wait_for(:noodle_gardener)
@task = described_class.for_role('noodle_gardener').first
@task.claim('boss')
end
describe '#claim' do
it 'sets token on "claimant" param' do
expect(@task.params['claimant']).to eq('boss')
end
it 'sets claimed_at param' do
expect(@task.params['claimed_at']).not_to be_nil
end
it 'raises an error if already claimed by someone else' do
expect{@task.claim('peon')}.to raise_error described_class::AlreadyClaimed
end
it 'does not raise an error if attempting to claim by same token' do
expect{@task.claim('boss')}.not_to raise_error
end
it 'calls before_claim and after_claim callbacks' do
allow(subject).to receive(:log)
expect(subject).to receive(:before_claim).with(:doctor_claim).ordered
expect(subject).to receive(:set_claimant).ordered
expect(subject).to receive(:after_claim).with(:doctor_claim).ordered
subject.claim(:doctor_claim)
end
it 'skips callbacks if requested' do
task = described_class.new(workflow_item)
allow(task).to receive(:log)
expect(task).to receive(:before_claim).never
expect(task).to receive(:set_claimant)
expect(task).to receive(:after_claim).never
task.claim(:doctor_claim, :skip_callbacks => true)
end
it 'logs event' do
log_entry = Bumbleworks.logger.entries.last[:entry]
expect(log_entry[:action]).to eq(:claim)
expect(log_entry[:actor]).to eq('boss')
end
end
describe '#claimant' do
it 'returns token of who has claim' do
expect(@task.claimant).to eq('boss')
end
end
describe '#claimed_at' do
it 'returns claimed_at param' do
expect(@task.claimed_at).to eq(@task.params['claimed_at'])
end
end
describe '#claimed?' do
it 'returns true if claimed' do
expect(@task.claimed?).to be_truthy
end
it 'false otherwise' do
@task.params['claimant'] = nil
expect(@task.claimed?).to be_falsy
end
end
describe '#release' do
it "release claim on workitem" do
expect(@task).to be_claimed
@task.release
expect(@task).not_to be_claimed
end
it 'clears claimed_at param' do
@task.release
expect(@task.params['claimed_at']).to be_nil
end
it 'calls with hooks' do
expect(@task).to receive(:call_before_hooks).with(:release, 'boss').ordered
expect(@task).to receive(:set_claimant).ordered
expect(@task).to receive(:call_after_hooks).with(:release, 'boss').ordered
@task.release
end
it 'skips callbacks if requested' do
expect(@task).to receive(:call_before_hooks).never
expect(@task).to receive(:set_claimant)
expect(@task).to receive(:call_after_hooks).never
@task.release(:skip_callbacks => true)
end
it 'logs event' do
@task.release
log_entry = Bumbleworks.logger.entries.last[:entry]
expect(log_entry[:action]).to eq(:release)
expect(log_entry[:actor]).to eq('boss')
end
end
end
context 'updating workflow engine' do
before :each do
Bumbleworks.define_process 'dog-lifecycle' do
dog_mouth :task => 'eat_dinner', :state => 'still cooking'
dog_brain :task => 'cat_nap', :by => 'midnight'
end
Bumbleworks.launch!('dog-lifecycle')
end
describe '#update' do
it 'saves fields and params, but does not proceed process' do
event = Bumbleworks.dashboard.wait_for :dog_mouth
task = described_class.for_role('dog_mouth').first
task.params['state'] = 'is ready'
task.fields['meal'] = 'salted_rhubarb'
task.update
task = described_class.for_role('dog_mouth').first
expect(task.params['state']).to eq('is ready')
expect(task.fields['meal']).to eq('salted_rhubarb')
end
it 'calls with hooks' do
allow(subject).to receive(:log)
expect(subject).to receive(:call_before_hooks).with(:update, :argue_mints).ordered
expect(subject).to receive(:update_workitem).ordered
expect(subject).to receive(:call_after_hooks).with(:update, :argue_mints).ordered
subject.update(:argue_mints)
end
it 'skips callbacks if requested' do
task = described_class.new(workflow_item)
allow(task).to receive(:log)
expect(task).to receive(:call_before_hooks).never
expect(task).to receive(:update_workitem)
expect(task).to receive(:call_after_hooks).never
task.update({:actual => :params}, {:skip_callbacks => true})
end
it 'reloads after updating workitem' do
event = Bumbleworks.dashboard.wait_for :dog_mouth
task = described_class.for_role('dog_mouth').first
allow(task).to receive(:log)
expect(described_class.storage_participant).to receive(:update).with(task.workitem).ordered
expect(task).to receive(:reload).ordered
task.update(:noofles)
end
it 'logs event' do
event = Bumbleworks.dashboard.wait_for :dog_mouth
task = described_class.for_role('dog_mouth').first
task.params['claimant'] = :some_user
task.update(:extra_data => :fancy)
expect(Bumbleworks.logger.entries.last).to eq({
:level => :info, :entry => {
:actor => "some_user", # claimant is a string after #reload
:action => :update,
:target_type => 'Task',
:target_id => task.id,
:metadata => {
:extra_data => :fancy,
:current_fields => task.fields
}
}
})
end
end
describe '#complete' do
it 'saves fields and proceeds to next expression' do
event = Bumbleworks.dashboard.wait_for :dog_mouth
task = described_class.for_role('dog_mouth').first
task.params['state'] = 'is ready'
task.fields['meal'] = 'root beer and a kite'
task.complete
expect(described_class.for_role('dog_mouth')).to be_empty
event = Bumbleworks.dashboard.wait_for :dog_brain
task = described_class.for_role('dog_brain').first
expect(task.params['state']).to be_nil
expect(task.fields['meal']).to eq('root beer and a kite')
end
it 'throws exception if task is not completable' do
event = Bumbleworks.dashboard.wait_for :dog_mouth
task = described_class.for_role('dog_mouth').first
allow(task).to receive(:completable?).and_return(false)
allow(task).to receive(:not_completable_error_message).and_return('hogwash!')
expect(task).to receive(:before_update).never
expect(task).to receive(:before_complete).never
expect(task).to receive(:proceed_workitem).never
expect(task).to receive(:after_complete).never
expect(task).to receive(:after_update).never
expect {
task.complete
}.to raise_error(Bumbleworks::Task::NotCompletable, "hogwash!")
expect(described_class.for_role('dog_mouth')).not_to be_empty
end
it 'calls update and complete callbacks' do
allow(subject).to receive(:log)
expect(subject).to receive(:call_before_hooks).with(:update, :argue_mints).ordered
expect(subject).to receive(:call_before_hooks).with(:complete, :argue_mints).ordered
expect(subject).to receive(:proceed_workitem).ordered
expect(subject).to receive(:call_after_hooks).with(:complete, :argue_mints).ordered
expect(subject).to receive(:call_after_hooks).with(:update, :argue_mints).ordered
subject.complete(:argue_mints)
end
it 'skips callbacks if requested' do
task = described_class.new(workflow_item)
allow(task).to receive(:log)
expect(task).to receive(:call_before_hooks).never
expect(task).to receive(:proceed_workitem)
expect(task).to receive(:call_after_hooks).never
task.complete({:actual => :params}, {:skip_callbacks => true})
end
it 'logs event' do
event = Bumbleworks.dashboard.wait_for :dog_mouth
task = described_class.for_role('dog_mouth').first
task.params['claimant'] = :some_user
task.complete(:extra_data => :fancy)
expect(Bumbleworks.logger.entries.last).to eq({
:level => :info, :entry => {
:actor => :some_user,
:action => :complete,
:target_type => 'Task',
:target_id => task.id,
:metadata => {
:extra_data => :fancy,
:current_fields => task.fields
}
}
})
end
end
end
describe 'chained queries' do
before(:each) do
module BeProudTask
def completable?
role == 'pink'
end
end
Bumbleworks.define_process 'the_big_kachunko' do
concurrence do
red :task => 'be_really_mad'
blue :task => 'be_a_bit_sad'
yellow :task => 'be_scared'
green :task => 'be_envious'
green :task => 'be_proud'
pink :task => 'be_proud'
end
end
Bumbleworks.launch!('the_big_kachunko')
Bumbleworks.dashboard.wait_for(:pink)
described_class.by_nickname('be_really_mad').first.claim('crayon_box')
described_class.by_nickname('be_a_bit_sad').first.claim('crayon_box')
described_class.by_nickname('be_scared').first.claim('crayon_box')
end
it 'allows for AND-ed chained finders' do
tasks = described_class.
for_roles(['green', 'pink']).
by_nickname('be_proud')
expect(tasks.map(&:nickname)).to match_array(['be_proud', 'be_proud'])
tasks = described_class.
for_roles(['green', 'pink', 'blue']).
completable.
by_nickname('be_proud')
expect(tasks.map(&:nickname)).to match_array(['be_proud'])
expect(tasks.first.role).to eq('pink')
tasks = described_class.
for_claimant('crayon_box').
for_roles(['red', 'yellow', 'green'])
expect(tasks.map(&:nickname)).to match_array(['be_really_mad', 'be_scared'])
tasks = described_class.
for_claimant('crayon_box').
by_nickname('be_a_bit_sad').
for_role('blue')
expect(tasks.map(&:nickname)).to eq(['be_a_bit_sad'])
end
it 'allows for OR-ed chained finders' do
tasks = described_class.where_any.
for_role('blue').
by_nickname('be_proud')
expect(tasks.map(&:nickname)).to match_array(['be_a_bit_sad', 'be_proud', 'be_proud'])
tasks = described_class.where_any.
completable.
claimed
expect(tasks.map(&:nickname)).to match_array(['be_really_mad', 'be_scared', 'be_a_bit_sad', 'be_envious', 'be_proud'])
end
it 'allows for combination of AND-ed and OR-ed finders' do
tasks = described_class.
for_claimant('crayon_box').
for_roles(['red', 'yellow', 'green']).
where_any(:nickname => 'spittle', :role => 'red')
expect(tasks.map(&:nickname)).to match_array(['be_really_mad'])
end
end
describe 'method missing' do
it 'calls method on new Finder object' do
allow_any_instance_of(described_class::Finder).to receive(:shabam!).with(:yay).and_return(:its_a_me)
expect(described_class.shabam!(:yay)).to eq(:its_a_me)
end
it 'falls back to method missing if no finder method' do
expect {
described_class.kerplunk!(:oh_no)
}.to raise_error
end
end
describe '.next_available' do
it 'waits for one task to show up and returns it' do
Bumbleworks.define_process "lazy_bum_and_cool_guy" do
concurrence do
cool_guy :task => 'get_it_going_man'
sequence do
wait '2s'
bum :task => 'finally_get_a_round_tuit'
end
end
end
start_time = Time.now
Bumbleworks.launch!('lazy_bum_and_cool_guy')
task = described_class.for_role('bum').next_available
end_time = Time.now
expect(task.nickname).to eq('finally_get_a_round_tuit')
expect(end_time - start_time).to be >= 2
end
it 'times out if task does not appear in time' do
Bumbleworks.define_process "really_lazy_bum_and_cool_guy" do
concurrence do
cool_guy :task => 'good_golly_never_mind_you'
sequence do
wait '2s'
bum :task => 'whatever_these_socks_are_tasty'
end
end
end
Bumbleworks.launch!('really_lazy_bum_and_cool_guy')
expect {
described_class.for_role('bum').next_available(:timeout => 0.5)
}.to raise_error(Bumbleworks::Task::AvailabilityTimeout)
end
end
describe '#humanize' do
it "returns humanized version of task name when no entity" do
expect(subject.humanize).to eq('Go to work')
end
it "returns humanized version of task name with entity" do
subject[:entity_id] = '45'
subject[:entity_type] = 'RhubarbSandwich'
expect(subject.humanize).to eq('Go to work: Rhubarb sandwich 45')
end
it "returns humanized version of task name without entity if requested" do
subject[:entity_id] = '45'
subject[:entity_type] = 'RhubarbSandwich'
expect(subject.humanize(:entity => false)).to eq('Go to work')
end
end
describe '#to_s' do
it "is aliased to #titleize" do
allow(subject).to receive(:titleize).with(:the_args).and_return(:see_i_told_you_so)
expect(subject.to_s(:the_args)).to eq(:see_i_told_you_so)
end
end
describe '#titleize' do
it "returns titleized version of task name when no entity" do
expect(subject.titleize).to eq('Go To Work')
end
it "returns titleized version of task name with entity" do
subject[:entity_id] = '45'
subject[:entity_type] = 'RhubarbSandwich'
expect(subject.titleize).to eq('Go To Work: Rhubarb Sandwich 45')
end
it "returns titleized version of task name without entity if requested" do
subject[:entity_id] = '45'
subject[:entity_type] = 'RhubarbSandwich'
expect(subject.titleize(:entity => false)).to eq('Go To Work')
end
end
describe '#temporary_storage' do
it 'returns an empty hash by default' do
task = described_class.new(workflow_item)
expect(task.temporary_storage).to eq({})
end
it 'persists stored values' do
task = described_class.new(workflow_item)
task.temporary_storage[:foo] = :bar
expect(task.temporary_storage[:foo]).to eq(:bar)
end
end
it 'has a CompletionFailed error class' do
expect(described_class::CompletionFailed.new).to be_a(StandardError)
end
end
More test refactoring from rebase
describe Bumbleworks::Task do
subject { described_class.new(workflow_item) }
let(:workflow_item) {
Ruote::Workitem.new({
'fields' => {
'params' => {'task' => 'go_to_work'},
'dispatched_at' => 'some time ago'
}
})
}
before :each do
Bumbleworks::Ruote.register_participants
Bumbleworks.start_worker!
end
it_behaves_like "an entity holder" do
let(:holder) { described_class.new(workflow_item) }
let(:storage_workitem) { Bumbleworks::Workitem.new(workflow_item) }
end
it_behaves_like "comparable" do
let(:other) { described_class.new(workflow_item) }
before(:each) do
allow(workflow_item).to receive(:sid).and_return('blah-123-blah')
end
end
describe '#not_completable_error_message' do
it 'defaults to generic message' do
task = described_class.new(workflow_item)
expect(task.not_completable_error_message).to eq(
"This task is not currently completable."
)
end
end
describe '.autoload_all' do
it 'autoloads all task modules in directory' do
Bumbleworks.root = File.join(fixtures_path, 'apps', 'with_default_directories')
expect(Object).to receive(:autoload).with(:MakeSomeHoneyTask,
File.join(Bumbleworks.root, 'tasks', 'make_some_honey_task.rb'))
expect(Object).to receive(:autoload).with(:TasteThatMolassesTask,
File.join(Bumbleworks.root, 'tasks', 'taste_that_molasses_task.rb'))
described_class.autoload_all
end
it 'does nothing if using default path and directory does not exist' do
Bumbleworks.root = File.join(fixtures_path, 'apps', 'minimal')
described_class.autoload_all
end
it 'raises exception if using custom path and participants file does not exist' do
Bumbleworks.root = File.join(fixtures_path, 'apps', 'minimal')
Bumbleworks.tasks_directory = 'oysters'
expect {
described_class.autoload_all
}.to raise_error(Bumbleworks::InvalidSetting)
end
end
describe '#dispatched_at' do
it 'returns dispatched_at timestamp from workitem' do
expect(subject.dispatched_at).to eq 'some time ago'
end
end
describe '#completable?' do
it 'defaults to true on base task' do
expect(subject).to be_completable
end
end
describe '.new' do
it 'raises an error if workitem is nil' do
expect {
described_class.new(nil)
}.to raise_error(ArgumentError, "Not a valid workitem")
end
it 'raises an error if workitem not a Ruote::Workitem' do
expect {
described_class.new('a string!')
}.to raise_error(ArgumentError, "Not a valid workitem")
end
it 'succeeds when given workitem' do
expect {
described_class.new(workflow_item)
}.not_to raise_error
end
it 'extends new object with task module' do
expect_any_instance_of(described_class).to receive(:extend_module)
described_class.new(workflow_item)
end
end
describe '#reload' do
it 'reloads the workitem from the storage participant' do
allow(subject).to receive(:sid).and_return(:the_sid)
expect(Bumbleworks.dashboard.storage_participant).to receive(
:[]).with(:the_sid).and_return(:amazing_workitem)
subject.reload
expect(subject.instance_variable_get(:@workitem)).to eq(:amazing_workitem)
end
end
[:before, :after].each do |phase|
describe "#call_#{phase}_hooks" do
it "calls #{phase} hooks on task and all observers" do
observer1, observer2 = double('observer1'), double('observer2')
Bumbleworks.observers = [observer1, observer2]
expect(subject).to receive(:"#{phase}_snoogle").with(:chachunga, :faloop)
expect(observer1).to receive(:"#{phase}_snoogle").with(:chachunga, :faloop)
expect(observer2).to receive(:"#{phase}_snoogle").with(:chachunga, :faloop)
subject.send(:"call_#{phase}_hooks", :snoogle, :chachunga, :faloop)
end
end
end
describe '#on_dispatch' do
it 'logs dispatch' do
expect(subject).to receive(:log).with(:dispatch)
subject.on_dispatch
end
it 'calls after hooks' do
allow(subject).to receive(:log)
expect(subject).to receive(:call_after_hooks).with(:dispatch)
subject.on_dispatch
end
end
describe '#extend_module' do
it 'extends with base module and task module' do
expect(subject).to receive(:task_module).and_return(:task_module_double)
expect(subject).to receive(:extend).with(Bumbleworks::Task::Base).ordered
expect(subject).to receive(:extend).with(:task_module_double).ordered
subject.extend_module
end
it 'extends only with base module if no nickname' do
allow(subject).to receive(:nickname).and_return(nil)
expect(subject).to receive(:extend).with(Bumbleworks::Task::Base)
subject.extend_module
end
it 'extends only with base module if task module does not exist' do
expect(subject).to receive(:extend).with(Bumbleworks::Task::Base)
subject.extend_module
end
end
describe '#task_module' do
it 'returns nil if no nickname' do
allow(subject).to receive(:nickname).and_return(nil)
expect(subject.task_module).to be_nil
end
it 'returns constantized task nickname with "Task" appended' do
subject
allow(Bumbleworks::Support).to receive(:constantize).with("GoToWorkTask").and_return(:the_task_module)
expect(subject.task_module).to eq(:the_task_module)
end
end
describe '#id' do
it 'returns the sid from the workitem' do
allow(workflow_item).to receive(:sid).and_return(:an_exciting_id)
expect(subject.id).to eq(:an_exciting_id)
end
end
describe '.find_by_id' do
it 'returns the task for the given id' do
Bumbleworks.define_process 'planting_a_noodle' do
concurrence do
noodle_gardener :task => 'plant_noodle_seed'
horse_feeder :task => 'give_the_horse_a_bon_bon'
end
end
Bumbleworks.launch!('planting_a_noodle')
Bumbleworks.dashboard.wait_for(:horse_feeder)
plant_noodle_seed_task = described_class.for_role('noodle_gardener').first
give_the_horse_a_bon_bon_task = described_class.for_role('horse_feeder').first
# checking for equality by comparing sid, which is the flow expression id
# that identifies not only the expression, but its instance
expect(described_class.find_by_id(plant_noodle_seed_task.id).sid).to eq(
plant_noodle_seed_task.sid
)
expect(described_class.find_by_id(give_the_horse_a_bon_bon_task.id).sid).to eq(
give_the_horse_a_bon_bon_task.sid
)
end
it 'raises an error if id is nil' do
expect {
described_class.find_by_id(nil)
}.to raise_error(described_class::MissingWorkitem)
end
it 'raises an error if workitem not found for given id' do
expect {
described_class.find_by_id('asdfasdf')
}.to raise_error(described_class::MissingWorkitem)
end
it 'raises an error if id is unparseable by storage participant' do
expect {
described_class.find_by_id(:unparseable_because_i_am_a_symbol)
}.to raise_error(described_class::MissingWorkitem)
end
end
context 'ordering' do
before :each do
Bumbleworks.define_process 'emergency_hamster_bullet' do
concurrence do
doctor :task => 'evince_concern', :priority => 3, :importance => 1000
patient :task => 'panic', :priority => 2, :importance => 5
nurse :task => 'roll_eyes', :priority => 4, :importance => 1000
officer :task => 'appear_authoritative', :priority => 1, :importance => 1000
rhubarb :task => 'sit_quietly', :importance => 80
end
end
end
context 'by params' do
before(:each) do
Bumbleworks.launch!('emergency_hamster_bullet')
Bumbleworks.dashboard.wait_for(:rhubarb)
end
describe '.order_by_param' do
it 'orders returned tasks by given param ascending by default' do
tasks = described_class.order_by_param(:priority)
expect(tasks.map(&:nickname)).to eq([
'appear_authoritative',
'panic',
'evince_concern',
'roll_eyes',
'sit_quietly'
])
end
it 'can order in reverse' do
tasks = described_class.order_by_param(:priority, :desc)
expect(tasks.map(&:nickname)).to eq([
'sit_quietly',
'roll_eyes',
'evince_concern',
'panic',
'appear_authoritative'
])
end
end
describe '.order_by_params' do
it 'orders by multiple parameters' do
tasks = described_class.order_by_params(:importance => :desc, :priority => :asc)
expect(tasks.map(&:nickname)).to eq([
'appear_authoritative',
'evince_concern',
'roll_eyes',
'sit_quietly',
'panic'
])
end
end
end
context 'by fields' do
before(:each) do
@wf3 = Bumbleworks.launch!('emergency_hamster_bullet', :group => 2, :strength => 3)
Bumbleworks.dashboard.wait_for(:officer)
@wf1 = Bumbleworks.launch!('emergency_hamster_bullet', :group => 2, :strength => 1)
Bumbleworks.dashboard.wait_for(:officer)
@wf2 = Bumbleworks.launch!('emergency_hamster_bullet', :group => 1, :strength => 2)
Bumbleworks.dashboard.wait_for(:officer)
@wf4 = Bumbleworks.launch!('emergency_hamster_bullet', :group => 1, :strength => 4)
Bumbleworks.dashboard.wait_for(:officer)
@wf5 = Bumbleworks.launch!('emergency_hamster_bullet', :group => 1)
Bumbleworks.dashboard.wait_for(:officer)
end
describe '.order_by_field' do
it 'orders returned tasks by given param ascending by default' do
tasks = described_class.for_role('doctor').order_by_field(:strength)
expect(tasks.map { |t| [t.nickname, t.wfid] }).to eq([
['evince_concern', @wf1.wfid],
['evince_concern', @wf2.wfid],
['evince_concern', @wf3.wfid],
['evince_concern', @wf4.wfid],
['evince_concern', @wf5.wfid]
])
end
it 'can order in reverse' do
tasks = described_class.for_role('doctor').order_by_field(:strength, :desc)
expect(tasks.map { |t| [t.nickname, t.wfid] }).to eq([
['evince_concern', @wf5.wfid],
['evince_concern', @wf4.wfid],
['evince_concern', @wf3.wfid],
['evince_concern', @wf2.wfid],
['evince_concern', @wf1.wfid]
])
end
end
describe '.order_by_fields' do
it 'orders by multiple parameters' do
tasks = described_class.for_role('doctor').order_by_fields(:group => :asc, :strength => :desc)
expect(tasks.map { |t| [t.nickname, t.wfid] }).to eq([
['evince_concern', @wf5.wfid],
['evince_concern', @wf4.wfid],
['evince_concern', @wf2.wfid],
['evince_concern', @wf3.wfid],
['evince_concern', @wf1.wfid]
])
end
end
end
end
describe '.for_roles' do
before :each do
Bumbleworks.define_process 'lowering_penguin_self_esteem' do
concurrence do
heckler :task => 'comment_on_dancing_ability'
mother :oh_no => 'this_is_not_a_task'
mother :task => 'ignore_pleas_for_attention'
father :task => 'sit_around_watching_penguin_tv'
end
end
Bumbleworks.launch!('lowering_penguin_self_esteem')
end
it 'returns tasks for all given roles' do
Bumbleworks.dashboard.wait_for(:father)
tasks = described_class.for_roles(['heckler', 'mother'])
expect(tasks.map(&:nickname)).to eq([
'comment_on_dancing_ability',
'ignore_pleas_for_attention'
])
end
it 'works with symbolized role names' do
Bumbleworks.dashboard.wait_for(:father)
tasks = described_class.for_roles([:heckler, :mother])
expect(tasks.map(&:nickname)).to eq([
'comment_on_dancing_ability',
'ignore_pleas_for_attention'
])
end
it 'returns empty array if no tasks found for given roles' do
Bumbleworks.dashboard.wait_for(:father)
expect(described_class.for_roles(['elephant'])).to be_empty
end
it 'returns empty array if given empty array' do
Bumbleworks.dashboard.wait_for(:father)
expect(described_class.for_roles([])).to be_empty
end
it 'returns empty array if given nil' do
Bumbleworks.dashboard.wait_for(:father)
expect(described_class.for_roles(nil)).to be_empty
end
end
describe '.for_processes' do
before :each do
Bumbleworks.define_process 'spunking' do
concurrence do
spunker :task => 'spunk'
nonspunker :task => 'complain'
end
end
Bumbleworks.define_process 'rooting' do
concurrence do
rooter :task => 'get_the_rooting_on'
armchair_critic :task => 'scoff'
end
end
@spunking_process = Bumbleworks.launch!('spunking')
@rooting_process_1 = Bumbleworks.launch!('rooting')
@rooting_process_2 = Bumbleworks.launch!('rooting')
Bumbleworks.dashboard.wait_for(:armchair_critic)
end
it 'returns tasks for given processes' do
spunking_tasks = described_class.for_processes([@spunking_process])
rooting_tasks = described_class.for_processes([@rooting_process_1])
tasks_for_both = described_class.for_processes([@spunking_process, @rooting_process_1])
expect(spunking_tasks.map(&:nickname)).to match_array(['spunk', 'complain'])
expect(rooting_tasks.map(&:nickname)).to match_array(['get_the_rooting_on', 'scoff'])
expect(tasks_for_both.map(&:nickname)).to match_array(['spunk', 'complain', 'get_the_rooting_on', 'scoff'])
end
it 'works with process ids as well' do
spunking_tasks = described_class.for_processes([@spunking_process.id])
expect(spunking_tasks.map(&:nickname)).to match_array(['spunk', 'complain'])
end
it 'returns empty array when no tasks for given process id' do
expect(described_class.for_processes(['boop'])).to be_empty
end
it 'returns empty array if given empty array' do
expect(described_class.for_processes([])).to be_empty
end
it 'returns empty array if given nil' do
expect(described_class.for_processes(nil)).to be_empty
end
end
describe '.for_process' do
it 'acts as shortcut to .for_processes with one process' do
allow_any_instance_of(described_class::Finder).to receive(:for_processes).with([:one_guy]).and_return(:aha)
expect(described_class.for_process(:one_guy)).to eq(:aha)
end
end
describe '.for_role' do
it 'returns all tasks for given role' do
Bumbleworks.define_process 'chalking' do
concurrence do
chalker :task => 'make_chalk_drawings'
chalker :task => 'chalk_it_good_baby'
hagrid :task => 'moan_endearingly'
end
end
Bumbleworks.launch!('chalking')
Bumbleworks.dashboard.wait_for(:hagrid)
tasks = described_class.for_role('chalker')
expect(tasks.map(&:nickname)).to eq([
'make_chalk_drawings',
'chalk_it_good_baby'
])
end
end
describe '.unclaimed' do
it 'returns all unclaimed tasks' do
Bumbleworks.define_process 'dog-lifecycle' do
concurrence do
dog :task => 'eat'
dog :task => 'bark'
dog :task => 'pet_dog'
cat :task => 'skip_and_jump'
end
dog :task => 'nap'
end
Bumbleworks.launch!('dog-lifecycle')
Bumbleworks.dashboard.wait_for(:cat)
@unclaimed = described_class.unclaimed
expect(@unclaimed.map(&:nickname)).to match_array(['eat', 'bark', 'pet_dog', 'skip_and_jump'])
described_class.all.each do |t|
t.claim('radish') unless ['pet_dog', 'bark'].include?(t.nickname)
end
@unclaimed = described_class.unclaimed
expect(@unclaimed.map(&:nickname)).to match_array(['pet_dog', 'bark'])
end
end
describe '.claimed' do
it 'returns all claimed tasks' do
Bumbleworks.define_process 'dog-lifecycle' do
concurrence do
dog :task => 'eat'
dog :task => 'bark'
dog :task => 'pet_dog'
cat :task => 'skip_and_jump'
end
dog :task => 'nap'
end
Bumbleworks.launch!('dog-lifecycle')
Bumbleworks.dashboard.wait_for(:cat)
expect(described_class.claimed).to be_empty
described_class.all.each_with_index do |t, i|
t.claim("radish_#{i}") unless ['pet_dog', 'bark'].include?(t.nickname)
end
@claimed = described_class.claimed
expect(@claimed.map(&:nickname)).to match_array(['eat', 'skip_and_jump'])
end
end
describe '.completable' do
it 'filters by completability' do
module WuggleHandsTask
def completable?
false
end
end
Bumbleworks.define_process 'hand_waggling' do
concurrence do
a_fella :task => 'waggle_hands'
a_monkey :task => 'wuggle_hands'
a_lady :task => 'wiggle_hands'
end
end
Bumbleworks.launch!('hand_waggling')
Bumbleworks.dashboard.wait_for(:a_lady)
tasks = described_class.completable
expect(tasks.map { |t| [t.role, t.nickname] }).to eq([
['a_fella', 'waggle_hands'],
['a_lady', 'wiggle_hands']
])
tasks = described_class.completable(false)
expect(tasks.map { |t| [t.role, t.nickname] }).to eq([
['a_monkey', 'wuggle_hands']
])
end
end
context 'iterators' do
before :each do
Bumbleworks.define_process 'life_on_tha_street' do
concurrence do
oscar :task => 'grouch_it_up'
elmo :task => 'sing_a_tune'
elmo :task => 'steal_booze'
snuffy :task => 'eat_cabbage'
end
end
Bumbleworks.launch!('life_on_tha_street')
Bumbleworks.dashboard.wait_for(:snuffy)
end
describe '.each' do
it 'executes for each found task' do
list = []
described_class.each { |t| list << t.nickname }
expect(list).to match_array(['grouch_it_up', 'sing_a_tune', 'steal_booze', 'eat_cabbage'])
end
end
describe '.map' do
it 'maps result of yielding block with each task' do
list = described_class.map { |t| t.nickname }
expect(list).to match_array(['grouch_it_up', 'sing_a_tune', 'steal_booze', 'eat_cabbage'])
end
end
context 'with queries' do
it 'checks filters' do
list = described_class.for_role('elmo').map { |t| t.nickname }
expect(list).to match_array(['sing_a_tune', 'steal_booze'])
end
end
end
describe '.all' do
before :each do
Bumbleworks.define_process 'dog-lifecycle' do
concurrence do
dog_teeth :task => 'eat'
dog_mouth :task => 'bark'
everyone :task => 'pet_dog'
the_universe_is_wonderful
dog_legs :task => 'skip_and_jump'
end
dog_brain :task => 'nap'
end
Bumbleworks.launch!('dog-lifecycle')
end
it 'returns all tasks (with task param) in queue regardless of role' do
Bumbleworks.dashboard.wait_for(:dog_legs)
tasks = described_class.all
expect(tasks.map { |t| [t.role, t.nickname] }).to eq([
['dog_teeth', 'eat'],
['dog_mouth', 'bark'],
['everyone', 'pet_dog'],
['dog_legs', 'skip_and_jump']
])
end
it 'uses subclass for generation of tasks' do
class MyOwnTask < Bumbleworks::Task; end
Bumbleworks.dashboard.wait_for(:dog_legs)
tasks = MyOwnTask.all
expect(tasks).to be_all { |t| t.class == MyOwnTask }
Object.send(:remove_const, :MyOwnTask)
end
end
describe '#[], #[]=' do
it 'sets values on workitem fields' do
subject['hive'] = 'bees at work'
expect(workflow_item.fields['hive']).to eq('bees at work')
end
it 'retuns value from workitem params' do
workflow_item.fields['nest'] = 'queen resting'
expect(subject['nest']).to eq('queen resting')
end
end
describe '#nickname' do
it 'returns the "task" param' do
expect(subject.nickname).to eq('go_to_work')
end
it 'is immutable; cannot be changed by modifying the param' do
expect(subject.nickname).to eq('go_to_work')
subject.params['task'] = 'what_is_wrong_with_you?'
expect(subject.nickname).to eq('go_to_work')
end
end
describe '#role' do
it 'returns the workitem participant_name' do
Bumbleworks.define_process 'planting_a_noodle' do
noodle_gardener :task => 'plant_noodle_seed'
end
Bumbleworks.launch!('planting_a_noodle')
Bumbleworks.dashboard.wait_for(:noodle_gardener)
expect(described_class.all.first.role).to eq('noodle_gardener')
end
end
describe '.for_claimant' do
it 'returns all tasks claimed by given claimant' do
Bumbleworks.define_process 'dog-lifecycle' do
concurrence do
dog :task => 'eat'
dog :task => 'bark'
dog :task => 'pet_dog'
the_universe_is_wonderful
cat :task => 'skip_and_jump'
end
dog :task => 'nap'
end
Bumbleworks.launch!('dog-lifecycle')
Bumbleworks.dashboard.wait_for(:cat)
expect(described_class.for_claimant('radish')).to be_empty
described_class.all.each do |t|
t.claim('radish') unless t.nickname == 'pet_dog'
end
@tasks = described_class.for_claimant('radish')
expect(@tasks.map(&:nickname)).to match_array(['eat', 'bark', 'skip_and_jump'])
end
end
describe '.with_fields' do
before(:each) do
Bumbleworks.define_process 'divergination' do
concurrence do
sequence do
set 'bumby' => 'fancy'
bumber :task => 'wear_monocle'
end
sequence do
set 'bumby' => 'not_fancy'
concurrence do
bumber :task => 'wear_natties'
loofer :task => 'snuffle'
end
end
end
end
Bumbleworks.launch!('divergination', :grumbles => true)
Bumbleworks.dashboard.wait_for(:loofer)
end
it 'returns all tasks with given field' do
expect(described_class.with_fields(:grumbles => true).count).to eq(3)
expect(described_class.with_fields(:bumby => 'fancy').count).to eq(1)
expect(described_class.with_fields(:bumby => 'not_fancy').count).to eq(2)
expect(described_class.with_fields(:what => 'ever')).to be_empty
end
it 'looks up multiple fields at once' do
expect(described_class.with_fields(:grumbles => true, :bumby => 'not_fancy').count).to eq(2)
expect(described_class.with_fields(:grumbles => false, :bumby => 'not_fancy')).to be_empty
end
it 'can be chained' do
expect(described_class.with_fields(:grumbles => true).with_fields(:bumby => 'fancy').count).to eq(1)
expect(described_class.with_fields(:grumbles => false).with_fields(:bumby => 'not_fancy')).to be_empty
end
end
describe '.for_entity' do
it 'returns all tasks associated with given entity' do
fake_sandwich = OpenStruct.new(:identifier => 'rubies')
Bumbleworks.define_process 'existential_pb_and_j' do
concurrence do
sandwich :task => 'be_made'
sandwich :task => 'contemplate_being'
end
end
Bumbleworks.launch!('existential_pb_and_j', :entity => fake_sandwich)
Bumbleworks.dashboard.wait_for(:sandwich)
tasks = described_class.for_entity(fake_sandwich)
expect(tasks.size).to eq(2)
end
end
context '.by_nickname' do
it 'returns all tasks with given nickname' do
Bumbleworks.define_process 'animal_disagreements' do
concurrence do
turtle :task => 'be_a_big_jerk'
goose :task => 'punch_turtle'
rabbit :task => 'punch_turtle'
end
end
Bumbleworks.launch!('animal_disagreements')
Bumbleworks.dashboard.wait_for(:rabbit)
tasks = described_class.by_nickname('punch_turtle')
expect(tasks.map(&:role)).to match_array(['goose', 'rabbit'])
end
end
context 'claiming things' do
before :each do
Bumbleworks.define_process 'planting_a_noodle' do
noodle_gardener :task => 'plant_noodle_seed'
end
Bumbleworks.launch!('planting_a_noodle')
Bumbleworks.dashboard.wait_for(:noodle_gardener)
@task = described_class.for_role('noodle_gardener').first
@task.claim('boss')
end
describe '#claim' do
it 'sets token on "claimant" param' do
expect(@task.params['claimant']).to eq('boss')
end
it 'sets claimed_at param' do
expect(@task.params['claimed_at']).not_to be_nil
end
it 'raises an error if already claimed by someone else' do
expect{@task.claim('peon')}.to raise_error described_class::AlreadyClaimed
end
it 'does not raise an error if attempting to claim by same token' do
expect{@task.claim('boss')}.not_to raise_error
end
it 'calls before_claim and after_claim callbacks' do
allow(subject).to receive(:log)
expect(subject).to receive(:before_claim).with(:doctor_claim).ordered
expect(subject).to receive(:set_claimant).ordered
expect(subject).to receive(:after_claim).with(:doctor_claim).ordered
subject.claim(:doctor_claim)
end
it 'skips callbacks if requested' do
allow(subject).to receive(:log)
expect(subject).to receive(:before_claim).never
expect(subject).to receive(:set_claimant)
expect(subject).to receive(:after_claim).never
subject.claim(:doctor_claim, :skip_callbacks => true)
end
it 'logs event' do
log_entry = Bumbleworks.logger.entries.last[:entry]
expect(log_entry[:action]).to eq(:claim)
expect(log_entry[:actor]).to eq('boss')
end
end
describe '#claimant' do
it 'returns token of who has claim' do
expect(@task.claimant).to eq('boss')
end
end
describe '#claimed_at' do
it 'returns claimed_at param' do
expect(@task.claimed_at).to eq(@task.params['claimed_at'])
end
end
describe '#claimed?' do
it 'returns true if claimed' do
expect(@task.claimed?).to be_truthy
end
it 'false otherwise' do
@task.params['claimant'] = nil
expect(@task.claimed?).to be_falsy
end
end
describe '#release' do
it "release claim on workitem" do
expect(@task).to be_claimed
@task.release
expect(@task).not_to be_claimed
end
it 'clears claimed_at param' do
@task.release
expect(@task.params['claimed_at']).to be_nil
end
it 'calls with hooks' do
expect(@task).to receive(:call_before_hooks).with(:release, 'boss').ordered
expect(@task).to receive(:set_claimant).ordered
expect(@task).to receive(:call_after_hooks).with(:release, 'boss').ordered
@task.release
end
it 'skips callbacks if requested' do
expect(@task).to receive(:call_before_hooks).never
expect(@task).to receive(:set_claimant)
expect(@task).to receive(:call_after_hooks).never
@task.release(:skip_callbacks => true)
end
it 'logs event' do
@task.release
log_entry = Bumbleworks.logger.entries.last[:entry]
expect(log_entry[:action]).to eq(:release)
expect(log_entry[:actor]).to eq('boss')
end
end
end
context 'updating workflow engine' do
before :each do
Bumbleworks.define_process 'dog-lifecycle' do
dog_mouth :task => 'eat_dinner', :state => 'still cooking'
dog_brain :task => 'cat_nap', :by => 'midnight'
end
Bumbleworks.launch!('dog-lifecycle')
end
describe '#update' do
it 'saves fields and params, but does not proceed process' do
event = Bumbleworks.dashboard.wait_for :dog_mouth
task = described_class.for_role('dog_mouth').first
task.params['state'] = 'is ready'
task.fields['meal'] = 'salted_rhubarb'
task.update
task = described_class.for_role('dog_mouth').first
expect(task.params['state']).to eq('is ready')
expect(task.fields['meal']).to eq('salted_rhubarb')
end
it 'calls with hooks' do
allow(subject).to receive(:log)
expect(subject).to receive(:call_before_hooks).with(:update, :argue_mints).ordered
expect(subject).to receive(:update_workitem).ordered
expect(subject).to receive(:call_after_hooks).with(:update, :argue_mints).ordered
subject.update(:argue_mints)
end
it 'skips callbacks if requested' do
allow(subject).to receive(:log)
expect(subject).to receive(:call_before_hooks).never
expect(subject).to receive(:update_workitem)
expect(subject).to receive(:call_after_hooks).never
subject.update({:actual => :params}, {:skip_callbacks => true})
end
it 'reloads after updating workitem' do
event = Bumbleworks.dashboard.wait_for :dog_mouth
task = described_class.for_role('dog_mouth').first
allow(task).to receive(:log)
expect(described_class.storage_participant).to receive(:update).with(task.workitem).ordered
expect(task).to receive(:reload).ordered
task.update(:noofles)
end
it 'logs event' do
event = Bumbleworks.dashboard.wait_for :dog_mouth
task = described_class.for_role('dog_mouth').first
task.params['claimant'] = :some_user
task.update(:extra_data => :fancy)
expect(Bumbleworks.logger.entries.last).to eq({
:level => :info, :entry => {
:actor => "some_user", # claimant is a string after #reload
:action => :update,
:target_type => 'Task',
:target_id => task.id,
:metadata => {
:extra_data => :fancy,
:current_fields => task.fields
}
}
})
end
end
describe '#complete' do
it 'saves fields and proceeds to next expression' do
event = Bumbleworks.dashboard.wait_for :dog_mouth
task = described_class.for_role('dog_mouth').first
task.params['state'] = 'is ready'
task.fields['meal'] = 'root beer and a kite'
task.complete
expect(described_class.for_role('dog_mouth')).to be_empty
event = Bumbleworks.dashboard.wait_for :dog_brain
task = described_class.for_role('dog_brain').first
expect(task.params['state']).to be_nil
expect(task.fields['meal']).to eq('root beer and a kite')
end
it 'throws exception if task is not completable' do
event = Bumbleworks.dashboard.wait_for :dog_mouth
task = described_class.for_role('dog_mouth').first
allow(task).to receive(:completable?).and_return(false)
allow(task).to receive(:not_completable_error_message).and_return('hogwash!')
expect(task).to receive(:before_update).never
expect(task).to receive(:before_complete).never
expect(task).to receive(:proceed_workitem).never
expect(task).to receive(:after_complete).never
expect(task).to receive(:after_update).never
expect {
task.complete
}.to raise_error(Bumbleworks::Task::NotCompletable, "hogwash!")
expect(described_class.for_role('dog_mouth')).not_to be_empty
end
it 'calls update and complete callbacks' do
allow(subject).to receive(:log)
expect(subject).to receive(:call_before_hooks).with(:update, :argue_mints).ordered
expect(subject).to receive(:call_before_hooks).with(:complete, :argue_mints).ordered
expect(subject).to receive(:proceed_workitem).ordered
expect(subject).to receive(:call_after_hooks).with(:complete, :argue_mints).ordered
expect(subject).to receive(:call_after_hooks).with(:update, :argue_mints).ordered
subject.complete(:argue_mints)
end
it 'skips callbacks if requested' do
allow(subject).to receive(:log)
expect(subject).to receive(:call_before_hooks).never
expect(subject).to receive(:proceed_workitem)
expect(subject).to receive(:call_after_hooks).never
subject.complete({:actual => :params}, {:skip_callbacks => true})
end
it 'logs event' do
event = Bumbleworks.dashboard.wait_for :dog_mouth
task = described_class.for_role('dog_mouth').first
task.params['claimant'] = :some_user
task.complete(:extra_data => :fancy)
expect(Bumbleworks.logger.entries.last).to eq({
:level => :info, :entry => {
:actor => :some_user,
:action => :complete,
:target_type => 'Task',
:target_id => task.id,
:metadata => {
:extra_data => :fancy,
:current_fields => task.fields
}
}
})
end
end
end
describe 'chained queries' do
before(:each) do
module BeProudTask
def completable?
role == 'pink'
end
end
Bumbleworks.define_process 'the_big_kachunko' do
concurrence do
red :task => 'be_really_mad'
blue :task => 'be_a_bit_sad'
yellow :task => 'be_scared'
green :task => 'be_envious'
green :task => 'be_proud'
pink :task => 'be_proud'
end
end
Bumbleworks.launch!('the_big_kachunko')
Bumbleworks.dashboard.wait_for(:pink)
described_class.by_nickname('be_really_mad').first.claim('crayon_box')
described_class.by_nickname('be_a_bit_sad').first.claim('crayon_box')
described_class.by_nickname('be_scared').first.claim('crayon_box')
end
it 'allows for AND-ed chained finders' do
tasks = described_class.
for_roles(['green', 'pink']).
by_nickname('be_proud')
expect(tasks.map(&:nickname)).to match_array(['be_proud', 'be_proud'])
tasks = described_class.
for_roles(['green', 'pink', 'blue']).
completable.
by_nickname('be_proud')
expect(tasks.map(&:nickname)).to match_array(['be_proud'])
expect(tasks.first.role).to eq('pink')
tasks = described_class.
for_claimant('crayon_box').
for_roles(['red', 'yellow', 'green'])
expect(tasks.map(&:nickname)).to match_array(['be_really_mad', 'be_scared'])
tasks = described_class.
for_claimant('crayon_box').
by_nickname('be_a_bit_sad').
for_role('blue')
expect(tasks.map(&:nickname)).to eq(['be_a_bit_sad'])
end
it 'allows for OR-ed chained finders' do
tasks = described_class.where_any.
for_role('blue').
by_nickname('be_proud')
expect(tasks.map(&:nickname)).to match_array(['be_a_bit_sad', 'be_proud', 'be_proud'])
tasks = described_class.where_any.
completable.
claimed
expect(tasks.map(&:nickname)).to match_array(['be_really_mad', 'be_scared', 'be_a_bit_sad', 'be_envious', 'be_proud'])
end
it 'allows for combination of AND-ed and OR-ed finders' do
tasks = described_class.
for_claimant('crayon_box').
for_roles(['red', 'yellow', 'green']).
where_any(:nickname => 'spittle', :role => 'red')
expect(tasks.map(&:nickname)).to match_array(['be_really_mad'])
end
end
describe 'method missing' do
it 'calls method on new Finder object' do
allow_any_instance_of(described_class::Finder).to receive(:shabam!).with(:yay).and_return(:its_a_me)
expect(described_class.shabam!(:yay)).to eq(:its_a_me)
end
it 'falls back to method missing if no finder method' do
expect {
described_class.kerplunk!(:oh_no)
}.to raise_error
end
end
describe '.next_available' do
it 'waits for one task to show up and returns it' do
Bumbleworks.define_process "lazy_bum_and_cool_guy" do
concurrence do
cool_guy :task => 'get_it_going_man'
sequence do
wait '2s'
bum :task => 'finally_get_a_round_tuit'
end
end
end
start_time = Time.now
Bumbleworks.launch!('lazy_bum_and_cool_guy')
task = described_class.for_role('bum').next_available
end_time = Time.now
expect(task.nickname).to eq('finally_get_a_round_tuit')
expect(end_time - start_time).to be >= 2
end
it 'times out if task does not appear in time' do
Bumbleworks.define_process "really_lazy_bum_and_cool_guy" do
concurrence do
cool_guy :task => 'good_golly_never_mind_you'
sequence do
wait '2s'
bum :task => 'whatever_these_socks_are_tasty'
end
end
end
Bumbleworks.launch!('really_lazy_bum_and_cool_guy')
expect {
described_class.for_role('bum').next_available(:timeout => 0.5)
}.to raise_error(Bumbleworks::Task::AvailabilityTimeout)
end
end
describe '#humanize' do
it "returns humanized version of task name when no entity" do
expect(subject.humanize).to eq('Go to work')
end
it "returns humanized version of task name with entity" do
subject[:entity_id] = '45'
subject[:entity_type] = 'RhubarbSandwich'
expect(subject.humanize).to eq('Go to work: Rhubarb sandwich 45')
end
it "returns humanized version of task name without entity if requested" do
subject[:entity_id] = '45'
subject[:entity_type] = 'RhubarbSandwich'
expect(subject.humanize(:entity => false)).to eq('Go to work')
end
end
describe '#to_s' do
it "is aliased to #titleize" do
allow(subject).to receive(:titleize).with(:the_args).and_return(:see_i_told_you_so)
expect(subject.to_s(:the_args)).to eq(:see_i_told_you_so)
end
end
describe '#titleize' do
it "returns titleized version of task name when no entity" do
expect(subject.titleize).to eq('Go To Work')
end
it "returns titleized version of task name with entity" do
subject[:entity_id] = '45'
subject[:entity_type] = 'RhubarbSandwich'
expect(subject.titleize).to eq('Go To Work: Rhubarb Sandwich 45')
end
it "returns titleized version of task name without entity if requested" do
subject[:entity_id] = '45'
subject[:entity_type] = 'RhubarbSandwich'
expect(subject.titleize(:entity => false)).to eq('Go To Work')
end
end
describe '#temporary_storage' do
it 'returns an empty hash by default' do
expect(subject.temporary_storage).to eq({})
end
it 'persists stored values' do
subject.temporary_storage[:foo] = :bar
expect(subject.temporary_storage[:foo]).to eq(:bar)
end
end
it 'has a CompletionFailed error class' do
expect(described_class::CompletionFailed.new).to be_a(StandardError)
end
end
|
# -*- coding: utf-8 -*-
# Copyright 2014 TIS Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Consul
module Client
describe KV do
before do
original_method = Faraday.method(:new)
Faraday.stub(:new) do |*args, &block|
@stubs = Faraday::Adapter::Test::Stubs.new
@test = original_method.call(*args) do |builder|
builder.adapter :test, @stubs
yield block if block
end
end
@client = KV.new host: 'localhost'
end
describe '#get' do
def add_stub(path, value)
encoded_value = Base64.encode64(value).chomp
body = %([{"CreateIndex":5158,"ModifyIndex":5158,"LockIndex":0,"Key":"hoge","Flags":0,"Value":"#{encoded_value}"}])
@stubs.get(path) { [200, {}, body] }
end
it 'return nil if specified key does not exist' do
@stubs.get('/v1/kv/not_found') { [404, {}, ''] }
expect(@client.get('not_found')).to be_nil
end
it 'return string if consul does not return JSON format string' do
add_stub '/v1/kv/dummy', 'dummy_value'
expect(@client.get('dummy')).to be_is_a String
end
it 'request GET /v1/kv with key and return decoded response body' do
add_stub '/v1/kv/dummy', 'dummy_value'
expect(@client.get 'dummy').to eq('dummy_value')
end
it 'return hash if consul return JSON format string' do
add_stub '/v1/kv/json', '{ "key": "value" }'
expect(@client.get('json')).to be_is_a Hash
end
it 'request GET /v1/kv with key and return parsed decoded response body' do
add_stub '/v1/kv/json', '{ "key": "value" }'
expect(@client.get 'json').to eq('key' => 'value')
end
end
describe '#put' do
let(:should_yield) do
(-> {}).tap { |proc| proc.should_receive(:call) }
end
it 'will request PUT /v1/kv with key' do
@stubs.put('/v1/kv/dummy', &should_yield)
@client.put 'dummy', 'dummy_value'
end
it 'will request PUT /v1/kv with value' do
@stubs.put('/v1/kv/dummy') do |env|
expect(env.body).to eq('dummy_value')
end
@client.put 'dummy', 'dummy_value'
end
it 'will request PUT /v1/kv with JSON encoded value if value is Hash' do
@stubs.put('/v1/kv/dummy') do |env|
expect(env.body).to eq('{"key":"value"}')
end
@client.put 'dummy', key: 'value'
end
end
end
end
end
Refactoring: change timing to create stub that is used by Faraday
# -*- coding: utf-8 -*-
# Copyright 2014 TIS Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Consul
module Client
describe KV do
before do
@stubs = Faraday::Adapter::Test::Stubs.new
original_method = Faraday.method(:new)
Faraday.stub(:new) do |*args, &block|
original_method.call(*args) do |builder|
builder.adapter :test, @stubs
yield block if block
end
end
@client = KV.new host: 'localhost'
end
describe '#get' do
def add_stub(path, value)
encoded_value = Base64.encode64(value).chomp
body = %([{"CreateIndex":5158,"ModifyIndex":5158,"LockIndex":0,"Key":"hoge","Flags":0,"Value":"#{encoded_value}"}])
@stubs.get(path) { [200, {}, body] }
end
it 'return nil if specified key does not exist' do
@stubs.get('/v1/kv/not_found') { [404, {}, ''] }
expect(@client.get('not_found')).to be_nil
end
it 'return string if consul does not return JSON format string' do
add_stub '/v1/kv/dummy', 'dummy_value'
expect(@client.get('dummy')).to be_is_a String
end
it 'request GET /v1/kv with key and return decoded response body' do
add_stub '/v1/kv/dummy', 'dummy_value'
expect(@client.get 'dummy').to eq('dummy_value')
end
it 'return hash if consul return JSON format string' do
add_stub '/v1/kv/json', '{ "key": "value" }'
expect(@client.get('json')).to be_is_a Hash
end
it 'request GET /v1/kv with key and return parsed decoded response body' do
add_stub '/v1/kv/json', '{ "key": "value" }'
expect(@client.get 'json').to eq('key' => 'value')
end
end
describe '#put' do
let(:should_yield) do
(-> {}).tap { |proc| proc.should_receive(:call) }
end
it 'will request PUT /v1/kv with key' do
@stubs.put('/v1/kv/dummy', &should_yield)
@client.put 'dummy', 'dummy_value'
end
it 'will request PUT /v1/kv with value' do
@stubs.put('/v1/kv/dummy') do |env|
expect(env.body).to eq('dummy_value')
end
@client.put 'dummy', 'dummy_value'
end
it 'will request PUT /v1/kv with JSON encoded value if value is Hash' do
@stubs.put('/v1/kv/dummy') do |env|
expect(env.body).to eq('{"key":"value"}')
end
@client.put 'dummy', key: 'value'
end
end
end
end
end
|
require 'spec_helper'
require 'flapjack/pikelet'
describe Flapjack::Pikelet, :logger => true do
let(:config) { double('config') }
let(:lock) { double(Monitor) }
let(:stop_cond) { double(MonitorMixin::ConditionVariable) }
let(:finished_cond) { double(MonitorMixin::ConditionVariable) }
let(:thread) { double(Thread) }
let(:shutdown) { double(Proc) }
it "creates and starts a processor pikelet" do
expect(config).to receive(:[]).with('logger').and_return(nil)
expect(config).to receive(:[]).with('max_runs').and_return(nil)
expect(finished_cond).to receive(:signal)
expect(lock).to receive(:new_cond).twice.and_return(stop_cond, finished_cond)
expect(lock).to receive(:synchronize).and_yield
expect(Monitor).to receive(:new).and_return(lock)
processor = double('processor')
expect(processor).to receive(:start)
expect(Flapjack::Processor).to receive(:new).with(:lock => lock,
:stop_condition => stop_cond, :config => config).and_return(processor)
expect(Thread).to receive(:new).and_yield.and_return(thread)
pikelets = Flapjack::Pikelet.create('processor', shutdown, :config => config)
expect(pikelets).not_to be_nil
expect(pikelets.size).to eq(1)
pikelet = pikelets.first
expect(pikelet).to be_a(Flapjack::Pikelet::Generic)
pikelet.start
end
it "handles an exception from a processor pikelet, and restarts it, then shuts down" do
exc = RuntimeError.new
expect(config).to receive(:[]).with('logger').and_return(nil)
expect(config).to receive(:[]).with('max_runs').and_return(2)
expect(finished_cond).to receive(:signal)
expect(lock).to receive(:new_cond).twice.and_return(stop_cond, finished_cond)
expect(lock).to receive(:synchronize).and_yield
expect(Monitor).to receive(:new).and_return(lock)
processor = double('processor')
expect(processor).to receive(:start).twice.and_raise(exc)
expect(Flapjack::Processor).to receive(:new).with(:lock => lock,
:stop_condition => stop_cond, :config => config).and_return(processor)
expect(Thread).to receive(:new).and_yield.and_return(thread)
expect(shutdown).to receive(:call)
pikelets = Flapjack::Pikelet.create('processor', shutdown, :config => config)
expect(pikelets).not_to be_nil
expect(pikelets.size).to eq(1)
pikelet = pikelets.first
expect(pikelet).to be_a(Flapjack::Pikelet::Generic)
pikelet.start
end
it "creates and starts a http server gateway" do
expect(config).to receive(:[]).with('logger').and_return(nil)
expect(config).to receive(:[]).with('max_runs').and_return(nil)
expect(config).to receive(:[]).with('port').and_return(7654)
expect(config).to receive(:[]).with('timeout').and_return(90)
expect(finished_cond).to receive(:signal)
expect(lock).to receive(:new_cond).twice.and_return(stop_cond, finished_cond)
expect(lock).to receive(:synchronize).and_yield
expect(Monitor).to receive(:new).and_return(lock)
server = double('server')
expect(server).to receive(:mount).with('/', Rack::Handler::WEBrick,
Flapjack::Gateways::Web)
expect(server).to receive(:start)
expect(WEBrick::HTTPServer).to receive(:new).
with(:Port => 7654, :BindAddress => '127.0.0.1',
:AccessLog => [], :Logger => an_instance_of(::WEBrick::Log)).
and_return(server)
expect(Flapjack::Gateways::Web).to receive(:instance_variable_set).
with('@config', config)
expect(Thread).to receive(:new).and_yield.and_return(thread)
expect(Flapjack::Gateways::Web).to receive(:start)
pikelets = Flapjack::Pikelet.create('web', shutdown, :config => config)
expect(pikelets).not_to be_nil
expect(pikelets.size).to eq(1)
pikelet = pikelets.first
expect(pikelet).to be_a(Flapjack::Pikelet::HTTP)
pikelet.start
end
it "handles an exception from a http server gateway" do
exc = RuntimeError.new
expect(config).to receive(:[]).with('logger').and_return(nil)
expect(config).to receive(:[]).with('max_runs').and_return(nil)
expect(config).to receive(:[]).with('port').and_return(7654)
expect(config).to receive(:[]).with('timeout').and_return(90)
expect(finished_cond).to receive(:signal)
expect(lock).to receive(:new_cond).twice.and_return(stop_cond, finished_cond)
expect(lock).to receive(:synchronize).and_yield
expect(Monitor).to receive(:new).and_return(lock)
server = double('server')
expect(server).to receive(:mount).with('/', Rack::Handler::WEBrick,
Flapjack::Gateways::Web)
expect(server).to receive(:start).and_raise(exc)
expect(WEBrick::HTTPServer).to receive(:new).
with(:Port => 7654, :BindAddress => '127.0.0.1',
:AccessLog => [], :Logger => an_instance_of(::WEBrick::Log)).
and_return(server)
expect(Flapjack::Gateways::Web).to receive(:instance_variable_set).
with('@config', config)
expect(Thread).to receive(:new).and_yield.and_return(thread)
expect(shutdown).to receive(:call)
expect(Flapjack::Gateways::Web).to receive(:start)
pikelets = Flapjack::Pikelet.create('web', shutdown, :config => config)
expect(pikelets).not_to be_nil
expect(pikelets.size).to eq(1)
pikelet = pikelets.first
expect(pikelet).to be_a(Flapjack::Pikelet::HTTP)
pikelet.start
end
end
Update pikelet_spec to check 'bind_address' config option
require 'spec_helper'
require 'flapjack/pikelet'
describe Flapjack::Pikelet, :logger => true do
let(:config) { double('config') }
let(:lock) { double(Monitor) }
let(:stop_cond) { double(MonitorMixin::ConditionVariable) }
let(:finished_cond) { double(MonitorMixin::ConditionVariable) }
let(:thread) { double(Thread) }
let(:shutdown) { double(Proc) }
it "creates and starts a processor pikelet" do
expect(config).to receive(:[]).with('logger').and_return(nil)
expect(config).to receive(:[]).with('max_runs').and_return(nil)
expect(finished_cond).to receive(:signal)
expect(lock).to receive(:new_cond).twice.and_return(stop_cond, finished_cond)
expect(lock).to receive(:synchronize).and_yield
expect(Monitor).to receive(:new).and_return(lock)
processor = double('processor')
expect(processor).to receive(:start)
expect(Flapjack::Processor).to receive(:new).with(:lock => lock,
:stop_condition => stop_cond, :config => config).and_return(processor)
expect(Thread).to receive(:new).and_yield.and_return(thread)
pikelets = Flapjack::Pikelet.create('processor', shutdown, :config => config)
expect(pikelets).not_to be_nil
expect(pikelets.size).to eq(1)
pikelet = pikelets.first
expect(pikelet).to be_a(Flapjack::Pikelet::Generic)
pikelet.start
end
it "handles an exception from a processor pikelet, and restarts it, then shuts down" do
exc = RuntimeError.new
expect(config).to receive(:[]).with('logger').and_return(nil)
expect(config).to receive(:[]).with('max_runs').and_return(2)
expect(finished_cond).to receive(:signal)
expect(lock).to receive(:new_cond).twice.and_return(stop_cond, finished_cond)
expect(lock).to receive(:synchronize).and_yield
expect(Monitor).to receive(:new).and_return(lock)
processor = double('processor')
expect(processor).to receive(:start).twice.and_raise(exc)
expect(Flapjack::Processor).to receive(:new).with(:lock => lock,
:stop_condition => stop_cond, :config => config).and_return(processor)
expect(Thread).to receive(:new).and_yield.and_return(thread)
expect(shutdown).to receive(:call)
pikelets = Flapjack::Pikelet.create('processor', shutdown, :config => config)
expect(pikelets).not_to be_nil
expect(pikelets.size).to eq(1)
pikelet = pikelets.first
expect(pikelet).to be_a(Flapjack::Pikelet::Generic)
pikelet.start
end
it "creates and starts a http server gateway" do
expect(config).to receive(:[]).with('logger').and_return(nil)
expect(config).to receive(:[]).with('max_runs').and_return(nil)
expect(config).to receive(:[]).with('bind_address').and_return('127.0.0.1')
expect(config).to receive(:[]).with('port').and_return(7654)
expect(config).to receive(:[]).with('timeout').and_return(90)
expect(finished_cond).to receive(:signal)
expect(lock).to receive(:new_cond).twice.and_return(stop_cond, finished_cond)
expect(lock).to receive(:synchronize).and_yield
expect(Monitor).to receive(:new).and_return(lock)
server = double('server')
expect(server).to receive(:mount).with('/', Rack::Handler::WEBrick,
Flapjack::Gateways::Web)
expect(server).to receive(:start)
expect(WEBrick::HTTPServer).to receive(:new).
with(:Port => 7654, :BindAddress => '127.0.0.1',
:AccessLog => [], :Logger => an_instance_of(::WEBrick::Log)).
and_return(server)
expect(Flapjack::Gateways::Web).to receive(:instance_variable_set).
with('@config', config)
expect(Thread).to receive(:new).and_yield.and_return(thread)
expect(Flapjack::Gateways::Web).to receive(:start)
pikelets = Flapjack::Pikelet.create('web', shutdown, :config => config)
expect(pikelets).not_to be_nil
expect(pikelets.size).to eq(1)
pikelet = pikelets.first
expect(pikelet).to be_a(Flapjack::Pikelet::HTTP)
pikelet.start
end
it "handles an exception from a http server gateway" do
exc = RuntimeError.new
expect(config).to receive(:[]).with('logger').and_return(nil)
expect(config).to receive(:[]).with('max_runs').and_return(nil)
expect(config).to receive(:[]).with('bind_address').and_return('127.0.0.1')
expect(config).to receive(:[]).with('port').and_return(7654)
expect(config).to receive(:[]).with('timeout').and_return(90)
expect(finished_cond).to receive(:signal)
expect(lock).to receive(:new_cond).twice.and_return(stop_cond, finished_cond)
expect(lock).to receive(:synchronize).and_yield
expect(Monitor).to receive(:new).and_return(lock)
server = double('server')
expect(server).to receive(:mount).with('/', Rack::Handler::WEBrick,
Flapjack::Gateways::Web)
expect(server).to receive(:start).and_raise(exc)
expect(WEBrick::HTTPServer).to receive(:new).
with(:Port => 7654, :BindAddress => '127.0.0.1',
:AccessLog => [], :Logger => an_instance_of(::WEBrick::Log)).
and_return(server)
expect(Flapjack::Gateways::Web).to receive(:instance_variable_set).
with('@config', config)
expect(Thread).to receive(:new).and_yield.and_return(thread)
expect(shutdown).to receive(:call)
expect(Flapjack::Gateways::Web).to receive(:start)
pikelets = Flapjack::Pikelet.create('web', shutdown, :config => config)
expect(pikelets).not_to be_nil
expect(pikelets.size).to eq(1)
pikelet = pikelets.first
expect(pikelet).to be_a(Flapjack::Pikelet::HTTP)
pikelet.start
end
end
|
require "spec_helper"
require "hamster/set"
describe Hamster::Set do
describe "#none?" do
describe "when empty" do
before do
@set = Hamster.set
end
it "with a block returns true" do
@set.none? {}.should == true
end
it "with no block returns true" do
@set.none?.should == true
end
end
describe "when not empty" do
describe "with a block" do
before do
@set = Hamster.set("A", "B", "C", nil)
end
["A", "B", "C", nil].each do |value|
it "returns false if the block ever returns true (#{value.inspect})" do
@set.none? { |item| item == value }.should == false
end
end
it "returns true if the block always returns false" do
@set.none? { |item| item == "D" }.should == true
end
end
describe "with no block" do
it "returns false if any value is truthy" do
Hamster.set(nil, false, true, "A").none?.should == false
end
it "returns true if all values are falsey" do
Hamster.set(nil, false).none?.should == true
end
end
end
end
end
Updated style of Set#none? spec, added a test
require "spec_helper"
require "hamster/set"
describe Hamster::Set do
describe "#none?" do
context "when empty" do
it "with a block returns true" do
Hamster.set.none? {}.should == true
end
it "with no block returns true" do
Hamster.set.none?.should == true
end
end
context "when not empty" do
context "with a block" do
let(:set) { Hamster.set("A", "B", "C", nil) }
["A", "B", "C", nil].each do |value|
it "returns false if the block ever returns true (#{value.inspect})" do
set.none? { |item| item == value }.should == false
end
end
it "returns true if the block always returns false" do
set.none? { |item| item == "D" }.should == true
end
it "stops iterating as soon as the block returns true" do
yielded = []
set.none? { |item| yielded << item; true }
yielded.size.should == 1
end
end
context "with no block" do
it "returns false if any value is truthy" do
Hamster.set(nil, false, true, "A").none?.should == false
end
it "returns true if all values are falsey" do
Hamster.set(nil, false).none?.should == true
end
end
end
end
end |
require 'spec_helper'
describe Spree::Chimpy::Interface::Orders do
let(:interface) { described_class.new }
let(:api) { double() }
let(:list) { double() }
def create_order(options={})
user = create(:user, email: options[:email])
order = build(:completed_order_with_totals, user: user, email: options[:email])
order.source = Spree::Chimpy::OrderSource.new(email_id: options[:email_id], campaign_id: options[:campaign_id])
# we need to have a saved order in order to have a non-nil order number
# we need to stub :notify_mail_chimp otherwise sync will be called on the order on update!
order.stub(:notify_mail_chimp).and_return(true)
order.save
order
end
before do
Spree::Chimpy::Config.key = '1234'
Spree::Chimpy::Config.store_id = "super-store"
Spree::Chimpy.stub(list: list)
Mailchimp::API.should_receive(:new).with('1234', { timeout: 60 }).and_return(api)
end
context "adding an order" do
it "sync when member info matches" do
order = create_order(email_id: 'id-abcd', campaign_id: '1234', email: 'user@example.com')
list.should_receive(:info).with('id-abcd').and_return(email: 'User@Example.com')
api.should_receive(:ecomm_order_add) do |h|
expect(h[:order][:id]).to eq order.number
expect(h[:order][:email_id]).to eq 'id-abcd'
expect(h[:order][:campaign_id]).to eq '1234'
end
interface.add(order)
end
it "skips mismatches member" do
order = create_order(email_id: 'id-abcd', email: 'user@example.com')
list.should_receive(:info).with('id-abcd').and_return({email: 'other@home.com'})
api.should_receive(:ecomm_order_add) do |h|
expect(h[:order][:id]).to eq order.number
expect(h[:order][:email_id]).to be_nil
expect(h[:order][:campaign_id]).to be_nil
end
interface.add(order)
end
end
it "removes an order" do
order = create_order(email: 'foo@example.com')
api.should_receive(:ecomm_order_del).with({store_id: 'super-store', order_id: order.number, throws_exceptions: false}).and_return(true)
expect(interface.remove(order)).to be_true
end
end
put additional test expectation for the auto create users for new orders
require 'spec_helper'
describe Spree::Chimpy::Interface::Orders do
let(:interface) { described_class.new }
let(:api) { double() }
let(:list) { double() }
def create_order(options={})
user = create(:user, email: options[:email])
order = build(:completed_order_with_totals, user: user, email: options[:email])
order.source = Spree::Chimpy::OrderSource.new(email_id: options[:email_id], campaign_id: options[:campaign_id])
# we need to have a saved order in order to have a non-nil order number
# we need to stub :notify_mail_chimp otherwise sync will be called on the order on update!
order.stub(:notify_mail_chimp).and_return(true)
order.save
order
end
before do
Spree::Chimpy::Config.key = '1234'
Spree::Chimpy::Config.store_id = "super-store"
Spree::Chimpy.stub(list: list)
Mailchimp::API.should_receive(:new).with('1234', { timeout: 60 }).and_return(api)
end
context "adding an order" do
it "sync when member info matches" do
order = create_order(email_id: 'id-abcd', campaign_id: '1234', email: 'user@example.com')
list.should_receive(:info).with('id-abcd').and_return(email: 'User@Example.com')
list.should_receive(:subscribe).with('User@Example.com').and_return(nil)
api.should_receive(:ecomm_order_add) do |h|
expect(h[:order][:id]).to eq order.number
expect(h[:order][:email_id]).to eq 'id-abcd'
expect(h[:order][:campaign_id]).to eq '1234'
end
interface.add(order)
end
it "skips mismatches member" do
order = create_order(email_id: 'id-abcd', email: 'user@example.com')
list.should_receive(:info).with('id-abcd').and_return({email: 'other@home.com'})
list.should_receive(:subscribe).with('other@home.com').and_return(nil)
api.should_receive(:ecomm_order_add) do |h|
expect(h[:order][:id]).to eq order.number
expect(h[:order][:email_id]).to be_nil
expect(h[:order][:campaign_id]).to be_nil
end
interface.add(order)
end
end
it "removes an order" do
order = create_order(email: 'foo@example.com')
api.should_receive(:ecomm_order_del).with({store_id: 'super-store', order_id: order.number, throws_exceptions: false}).and_return(true)
expect(interface.remove(order)).to be_true
end
end
|
require File.expand_path('../spec_helper.rb', File.dirname(__FILE__))
require 'jcode'
# require 'result_processor' # already required by diff_to_html
require 'diff_to_html'
describe ResultProcessor do
before(:all) do
create_test_input
end
it :processor do
processor = ResultProcessor.new(@diff)
removal, addition = processor.results
removal.should have(1).line
removal.first.should be_include(' <span class="rr">b</span>')
removal.first.should be_include('<span class="rr">ton</span>')
removal.first.split('<span>').should have(1).span # one occurrence (beginning of string)
addition.should have(1).line
addition.first.should be_include(' <span class="aa">s</span>')
addition.first.should be_include('<span class="aa">bmi</span>')
addition.first.split('<span>').should have(1).span # one occurrence (beginning of string)
end
it "processor with almost no common part" do
@diff = [
{ :action => :match, :token => ' '},
{ :action => :match, :token => ' '},
{ :action => :discard_a, :token => 'button'},
{:action => :discard_b, :token => 'submit'},
{ :action => :match, :token => 'x'}
]
processor = ResultProcessor.new(@diff)
removal, addition = processor.results
removal.should have(1).line
removal.first.should == ' buttonx' # no highlight
addition.should have(1).line
addition.first.should == ' submitx' # no highlight
end
it "close span tag when having difference at the end" do
diff = []
s1 = " submit_to_remote 'create_btn', 'Create', :url => { :action => 'cre"
s2 = " submit_to_remote 'create_btn', 'Create', :url => { :action => 'sub"
s1[0..s1.size-6].each_char do |c|
diff << { :action => :match, :token => c}
end
diff << { :action => :discard_a, :token => 'c'}
diff << { :action => :discard_a, :token => 'r'}
diff << { :action => :discard_a, :token => 'e'}
diff << { :action => :discard_b, :token => 's'}
diff << { :action => :discard_b, :token => 'u'}
diff << { :action => :discard_b, :token => 'b'}
processor = ResultProcessor.new(diff)
removal, addition = processor.results
removal.should have(1).line
removal.first.should be_include('action =><span class="rr">cre</span>')
addition.should have(1).line
addition.first.should be_include('action =><span class="aa">sub</span>')
end
def create_test_input
@diff = []
s1 = " button_to_remote 'create_btn', 'Create', :url => { :action => 'create' }"
s2 = " submit_to_remote 'create_btn', 'Create', :url => { :action => 'create' }"
@diff = [
[:match, ' '],
[:match, ' '],
[:discard_a,'b'],
[:discard_b,'s'],
[:match, 'u'],
[:discard_b,'b'],
[:discard_b,'m'],
[:discard_b,'i'],
[:match, 't'],
[:discard_a,'t'],
[:discard_a,'o'],
[:discard_a,'n']]
@diff = @diff.collect { |d| { :action => d.first, :token => d.last}}
s1[@diff.size..-1].each_char do |c|
@diff << { :action => :match, :token => c}
end
end
end
Prettify spec
require File.expand_path('../spec_helper.rb', File.dirname(__FILE__))
require 'jcode'
# require 'result_processor' # already required by diff_to_html
require 'diff_to_html'
describe ResultProcessor do
before(:all) do
create_test_input
end
it :processor do
processor = ResultProcessor.new(@diff)
removal, addition = processor.results
removal.should have(1).line
removal.first.should be_include(' <span class="rr">b</span>')
removal.first.should be_include('<span class="rr">ton</span>')
removal.first.split('<span>').should have(1).span # one occurrence (beginning of string)
addition.should have(1).line
addition.first.should be_include(' <span class="aa">s</span>')
addition.first.should be_include('<span class="aa">bmi</span>')
addition.first.split('<span>').should have(1).span # one occurrence (beginning of string)
end
it "processor with almost no common part" do
@diff = [
{ :action => :match, :token => ' ' },
{ :action => :match, :token => ' ' },
{ :action => :discard_a, :token => 'button' },
{ :action => :discard_b, :token => 'submit' },
{ :action => :match, :token => 'x' }
]
processor = ResultProcessor.new(@diff)
removal, addition = processor.results
removal.should have(1).line
removal.first.should == ' buttonx' # no highlight
addition.should have(1).line
addition.first.should == ' submitx' # no highlight
end
it "close span tag when having difference at the end" do
diff = []
s1 = " submit_to_remote 'create_btn', 'Create', :url => { :action => 'cre"
s2 = " submit_to_remote 'create_btn', 'Create', :url => { :action => 'sub"
s1[0..s1.size-6].each_char do |c|
diff << { :action => :match, :token => c}
end
diff << { :action => :discard_a, :token => 'c' }
diff << { :action => :discard_a, :token => 'r' }
diff << { :action => :discard_a, :token => 'e' }
diff << { :action => :discard_b, :token => 's' }
diff << { :action => :discard_b, :token => 'u' }
diff << { :action => :discard_b, :token => 'b' }
processor = ResultProcessor.new(diff)
removal, addition = processor.results
removal.should have(1).line
removal.first.should be_include('action =><span class="rr">cre</span>')
addition.should have(1).line
addition.first.should be_include('action =><span class="aa">sub</span>')
end
def create_test_input
s1 = " button_to_remote 'create_btn', 'Create', :url => { :action => 'create' }"
s2 = " submit_to_remote 'create_btn', 'Create', :url => { :action => 'create' }"
@diff = [
[ :match, ' ' ],
[ :match, ' ' ],
[ :discard_a, 'b' ],
[ :discard_b, 's' ],
[ :match, 'u' ],
[ :discard_b, 'b' ],
[ :discard_b, 'm' ],
[ :discard_b, 'i' ],
[ :match, 't' ],
[ :discard_a, 't' ],
[ :discard_a, 'o' ],
[ :discard_a, 'n' ]
]
@diff = @diff.collect { |d| { :action => d.first, :token => d.last}}
s1[@diff.size..-1].each_char do |c|
@diff << { :action => :match, :token => c }
end
end
end
|
# encoding: UTF-8
require 'rails_helper'
RSpec.describe Duplicator, type: :model do
context 'when Plain Old Ruby Objects' do
class SimpleObject
def initialize(id)
@id = id
end
def initialize_duplicate(_duplicator)
end
def ==(other)
self.class == other.class && state == other.state
end
protected
def state
[@id]
end
end
# ComplexObject has children
class ComplexObject
attr_reader :children
def initialize(id, children)
@id = id
@children = children
end
def initialize_duplicate(duplicator)
new_children = []
@children.each do |child|
new_child = duplicator.duplicate(child)
new_children << new_child unless new_child.nil?
end
# example if object has no accessor for the children
# self is already duplicated by Duplicator. Find the duplicated version
duplicator.duplicate(self).instance_variable_set(:@children, new_children)
end
def ==(other)
self.class == other.class && state == other.state
end
protected
def state
[@id, @children]
end
end
def create_complex_objects
@s1 = SimpleObject.new(1)
@s2 = SimpleObject.new(2)
@s3 = SimpleObject.new(3)
# setup 2 objects with overlapping children
@c1 = ComplexObject.new(11, [@s1, @s2])
@c2 = ComplexObject.new(12, [@s1])
# setup an even more complicated object
@c3 = ComplexObject.new(13, [@c1, @c2, @s3])
end
def create_cyclic_graph
#
# ------→ c3 ------
# | ↑ ↓
# c1 → c2 | c5
# | | |
# ------→ c4 ← ----
# |
# ----→ s1
#
@s1 = SimpleObject.new(1)
@c4 = ComplexObject.new(14, []) # assign children later
@c5 = ComplexObject.new(15, [@c4])
@c3 = ComplexObject.new(13, [@c5])
@c2 = ComplexObject.new(12, [@c3, @c4])
@c1 = ComplexObject.new(11, [@c2])
@c4.instance_variable_set(:@children, [@s1, @c3]) # create cycle
end
def create_second_cyclic_graph
#
# ----> c22 <--
# | | |
# c21 -- | |
# | |--> |
# ----------> c23
#
@c22 = ComplexObject.new(22, []) # assign children later
@c23 = ComplexObject.new(23, [@c22])
@c21 = ComplexObject.new(21, [@c22, @c23])
@c22.instance_variable_set(:@children, [@c23])
end
context 'when SimpleObject is duplicated' do
before :each do
@obj_a = SimpleObject.new(2)
end
it 'is not duplicated if excluded' do
@duplicator = Duplicator.new([@obj_a])
duplicated_object = @duplicator.duplicate(@obj_a)
expect(duplicated_object).to be_nil
end
it 'is duplicated by default' do
@duplicator = Duplicator.new
duplicate_of_a = @duplicator.duplicate(@obj_a)
expect(duplicate_of_a).to_not be_nil
expect(duplicate_of_a).to_not be(@obj_a)
expect(duplicate_of_a).to eq(@obj_a)
end
it 'is duplicated once' do
@duplicator = Duplicator.new
duplicate_of_a = @duplicator.duplicate(@obj_a)
duplicate_of_a_2 = @duplicator.duplicate(@obj_a)
expect(duplicate_of_a).to_not be_nil
expect(duplicate_of_a).to be(duplicate_of_a_2)
end
end
context 'when ComplexObject is duplicated' do
context 'without exclusions' do
before :each do
create_complex_objects
@duplicator = Duplicator.new
end
it 'duplicates object' do
dup_c1 = @duplicator.duplicate(@c1)
orig_children = @c1.children
dup_children = dup_c1.children
expect(dup_c1).to eq(@c1)
expect(dup_c1).to_not be(@c1)
# both children should be duplicated, same values, different objects
expect(dup_children.length).to be(2)
(0..1).each do |i|
expect(orig_children[i]).to eq(dup_children[i])
expect(orig_children[i]).to_not be(dup_children[i])
end
end
it 'duplicates objects referenced in 2 places once' do
dup_c1 = @duplicator.duplicate(@c1)
dup_c2 = @duplicator.duplicate(@c2)
# dup_s1 should be the same object
expect(dup_c1.children[0]).to be(dup_c2.children[0])
# test that @c1 and @c2 have the same values but are not the same object
expect(dup_c1).to eq(@c1)
expect(dup_c2).to eq(@c2)
expect(dup_c1).to_not be(@c1)
expect(dup_c2).to_not be(@c2)
end
it 'duplicates objects with ComplexObject children' do
dup_c3 = @duplicator.duplicate(@c3)
expect(dup_c3.children.length).to be(3)
expect(dup_c3.children[0]).to eq(@c1)
expect(dup_c3.children[1]).to eq(@c2)
expect(dup_c3.children[0]).to_not be(@c1)
expect(dup_c3.children[1]).to_not be(@c2)
end
end
context 'with exclusions' do
before :each do
create_complex_objects
end
it 'duplicates ComplexObject but not excluded children' do
duplicator = Duplicator.new([@s1, @s2])
dup_c1 = duplicator.duplicate(@c1)
expect(dup_c1.children).to be_empty
expect(dup_c1).to_not be(@c1)
end
it 'partially duplicates objects when some children are excluded' do
duplicator = Duplicator.new([@s2, @c2])
dup_c3 = duplicator.duplicate(@c3)
dup_c1_children = dup_c3.children[0].children
expect(dup_c3.children.length).to be(2)
expect(dup_c1_children.length).to be(1)
expect(dup_c1_children[0]).to eq(@s1)
expect(dup_c1_children[0]).to_not be(@s1)
end
end
end
context 'when Plain Old Ruby Object graphs are duplicated' do
context 'with cycles' do
before :each do
create_cyclic_graph
end
it 'duplicates cyclic two object graph' do
c1 = ComplexObject.new(11, [])
c2 = ComplexObject.new(12, [c1])
c1.instance_variable_set(:@children, [c2])
duplicator = Duplicator.new
dup_c1 = duplicator.duplicate(c1)
expect(dup_c1).to eq(c1)
expect(dup_c1).to_not be(c1)
expect(duplicator.instance_variable_get(:@duplicated_objects).length).to be(2)
expect(dup_c1.children[0].children[0]).to be(dup_c1)
end
it 'duplicates cyclic graph' do
duplicator = Duplicator.new
dup_c1 = duplicator.duplicate(@c1)
expect(dup_c1).to eq(@c1)
expect(dup_c1).to_not be(@c1)
end
it 'duplicates cyclic graph without excluded tail' do
duplicator = Duplicator.new([@s1])
dup_c1 = duplicator.duplicate(@c1)
# get original and duplicated node c4
c4 = @c1.children[0].children[1]
dup_c4 = dup_c1.children[0].children[1]
expect(dup_c1).to_not eq(@c1)
expect(dup_c4.children.length).to_not eq(c4.children.length)
end
it 'duplicates cyclic graph without c5' do
duplicator = Duplicator.new([@c5])
dup_c1 = duplicator.duplicate(@c1)
dup_c3 = dup_c1.children[0].children[0]
expect(dup_c1).to_not be(@c1)
expect(dup_c3.children).to be_empty
end
it 'duplicates cyclic graph without c4' do
duplicator = Duplicator.new([@c4])
dup_c1 = duplicator.duplicate(@c1)
# should be left with c1 -> c2 -> c3 -> c5
dup_c2 = dup_c1.children[0]
dup_c3 = dup_c2.children[0]
dup_c5 = dup_c3.children[0]
expect(dup_c1.children.length).to be(1)
expect(dup_c2.children.length).to be(1)
expect(dup_c3.children.length).to be(1)
expect(dup_c5.children.length).to be(0)
end
it 'duplicates sub-graph from c3' do
duplicator = Duplicator.new
dup_c3 = duplicator.duplicate(@c3)
dup_c4 = dup_c3.children[0].children[0]
expect(dup_c3).to_not be(@c3)
expect(dup_c4.children[0]).to eq(@s1)
# check cycle
expect(dup_c4.children[1]).to be(dup_c3)
end
end
context 'when an array of objects is duplicated' do
before :each do
create_cyclic_graph
create_second_cyclic_graph
end
it 'duplicates objects mentioned twice without creating extras' do
duplicator = Duplicator.new
duplicated_stuff = duplicator.duplicate([@c1, @c3])
dup_c3 = duplicated_stuff[0].children[0].children[0]
expect(duplicated_stuff.length).to be(2)
expect(duplicated_stuff[0]).to eq(@c1)
expect(duplicated_stuff[0]).to_not be(@c1)
expect(duplicated_stuff[1]).to eq(@c3)
expect(duplicated_stuff[1]).to_not be(@c3)
expect(duplicated_stuff[1]).to be(dup_c3)
end
it 'duplicates disjoint graphs' do
duplicator = Duplicator.new
duplicated_stuff = duplicator.duplicate([@c1, @c21])
expect(duplicated_stuff.length).to be(2)
expect(duplicated_stuff[0]).to eq(@c1)
expect(duplicated_stuff[0]).to_not be(@c1)
expect(duplicated_stuff[1]).to eq(@c21)
expect(duplicated_stuff[1]).to_not be(@c21)
end
end
context 'when joined graphs are duplicated' do
before :each do
create_cyclic_graph
create_second_cyclic_graph
# join graphs
c1_children = @c1.children
c1_children << @c21
@c1.instance_variable_set(:@children, c1_children)
end
it 'duplicates all objects' do
duplicator = Duplicator.new
duplicator.duplicate(@c1)
duplicated_objects = duplicator.instance_variable_get(:@duplicated_objects)
expect(duplicated_objects.length).to be(9)
end
it 'duplicates cyclically joined graphs' do
duplicator = Duplicator.new
# join from c21 to c1
c21_children = @c21.children
c21_children << @c1
@c21.instance_variable_set(:@children, c21_children)
duplicator.duplicate(@c21)
duplicated_objects = duplicator.instance_variable_get(:@duplicated_objects)
expect(duplicated_objects.length).to be(9)
end
end
end
end
context 'when ActiveRecord objects' do
class SimpleActiveRecord < ActiveRecord::Base
def initialize_duplicate(_duplicator)
end
end
temporary_table(:simple_active_records) do |t|
t.integer :data
end
class ComplexActiveRecord < ActiveRecord::Base
has_and_belongs_to_many :children, class_name: 'ComplexActiveRecord',
foreign_key: 'parent_id',
join_table: :children_parents,
association_foreign_key: 'children_id'
has_and_belongs_to_many :parents, class_name: 'ComplexActiveRecord',
foreign_key: 'children_id',
join_table: :children_parents,
association_foreign_key: 'parent_id'
def initialize_duplicate(duplicator)
new_children = []
children.each do |child|
new_child = duplicator.duplicate(child)
new_children << new_child unless new_child.nil?
end
duplicator.duplicate(self).children = new_children
end
end
temporary_table(:complex_active_records) do |t|
t.integer :data
end
temporary_table(:children_parents) do |t|
t.integer :children_id, foreign_key: { references: :complex_active_records, primary_key: :id }
t.integer :parent_id, foreign_key: { references: :complex_active_records, primary_key: :id }
end
def create_ar_cyclic_graph
#
# ------→ c3 ------
# | ↑ ↓
# c1 → c2 | c5
# | | |
# ------→ c4 ← ----
# |
# ----→ c6
#
@car1 = ComplexActiveRecord.new(data: 11)
@car1.save
@car2 = @car1.children.create(data: 12)
@car3 = @car2.children.create(data: 13)
@car4 = @car2.children.create(data: 14)
@car5 = @car3.children.create(data: 15)
@car6 = @car4.children.create(data: 16)
@car4.children << @car3
@car5.children << @car4
end
context 'when SimpleActiveRecord objects are duplicated' do
before :each do
@sar_1 = SimpleActiveRecord.new(data: 1)
@sar_1.save
end
with_temporary_table(:simple_active_records) do
it 'is duplicated by default' do
duplicator = Duplicator.new
duplicator.duplicate(@sar_1).save
all_records = SimpleActiveRecord.all
expect(SimpleActiveRecord.count).to be(2)
expect(all_records[0].data).to eq(all_records[1].data)
expect(all_records[0].id).to_not eq(all_records[1].id)
end
it 'is not duplicated if excluded' do
duplicator = Duplicator.new([@sar_1])
dup_sar_1 = duplicator.duplicate(@sar_1)
expect(dup_sar_1).to be_nil
expect(SimpleActiveRecord.count).to be(1)
end
it 'is duplicated once' do
duplicator = Duplicator.new
duplicator.duplicate(@sar_1).save
duplicator.duplicate(@sar_1).save
expect(SimpleActiveRecord.count).to be(2)
end
end
end
# ComplexActiveRecord objects have associations to themselves
context 'when ComplexActiveRecord objects are duplicated' do
with_temporary_table(:complex_active_records) do
with_temporary_table(:children_parents) do
it 'duplicates a ComplexActiveRecord object' do
# create object and some children
c1 = ComplexActiveRecord.new(data: 11)
c1.save
c1.children.create(data: 12)
c1.children.create(data: 13)
# duplicate object
duplicator = Duplicator.new
dup_c1 = duplicator.duplicate(c1)
dup_c1.save
# tests
expect(ComplexActiveRecord.count).to be(6)
expect(c1.data).to eq(dup_c1.data)
expect(c1).to_not be(dup_c1)
# both children should be duplicated, same values, different objects
(0..1).each do |i|
expect(c1.children[i].data).to eq(dup_c1.children[i].data)
expect(c1.children[i]).to_not be(dup_c1.children[i])
end
end
it 'duplicates object referenced in 2 places once' do
#
# c1--|
# |---> c5
# c2--|
#
c1 = ComplexActiveRecord.new(data: 11)
c2 = ComplexActiveRecord.new(data: 12)
c1.save
c2.save
c1.children.create(data: 15)
c2.children = c1.children
duplicator = Duplicator.new
dup_c1 = duplicator.duplicate(c1)
dup_c1.save
dup_c2 = duplicator.duplicate(c2)
dup_c2.save
expect(ComplexActiveRecord.count).to be(6)
expect(dup_c1.children[0]).to be(dup_c2.children[0])
expect(dup_c1.children[0].data).to be(15)
end
it 'duplicates multi-layer graphs' do
#
# --> c2 ---> c3
# |
# c1 --> c4 ---> c5
# | ↑
# --> c6 ------
#
c1 = ComplexActiveRecord.new(data: 11)
c1.save
c2 = c1.children.create(data: 12)
c3 = c2.children.create(data: 13)
c4 = c1.children.create(data: 14)
c5 = c4.children.create(data: 15)
c6 = c1.children.create(data: 16)
c6.children = c4.children
duplicator = Duplicator.new
dup_c1 = duplicator.duplicate(c1)
dup_c1.save
dup_c2 = dup_c1.children[0]
dup_c3 = dup_c2.children[0]
dup_c4 = dup_c1.children[1]
dup_c5 = dup_c4.children[0]
dup_c6 = dup_c1.children[2]
# Check that data is duplicated correctly, duplicates not the same object
expect(ComplexActiveRecord.count).to be(12)
expect(dup_c1.data).to eq(c1.data)
expect(dup_c2.data).to eq(c2.data)
expect(dup_c3.data).to eq(c3.data)
expect(dup_c4.data).to eq(c4.data)
expect(dup_c5.data).to eq(c5.data)
expect(dup_c6.data).to eq(c6.data)
expect(dup_c1).to_not eq(c1)
expect(dup_c2).to_not eq(c2)
expect(dup_c3).to_not eq(c3)
expect(dup_c4).to_not eq(c4)
expect(dup_c5).to_not eq(c5)
expect(dup_c6).to_not eq(c6)
# Check associations
# dup_c1's children
expect(dup_c1.children.size).to be(3)
expect(dup_c1.children).to include(dup_c2)
expect(dup_c1.children).to include(dup_c4)
expect(dup_c1.children).to include(dup_c6)
# dup_c2's children
expect(dup_c2.children.size).to be(1)
expect(dup_c2.children).to include(dup_c3)
# dup_c3's children
expect(dup_c3.children.size).to be(0)
# dup_c4's children
expect(dup_c4.children.size).to be(1)
expect(dup_c4.children).to include(dup_c5)
# dup_c5's children
expect(dup_c5.children.size).to be(0)
# dup_c6's children
expect(dup_c6.children.size).to be(1)
expect(dup_c6.children).to include(dup_c5)
end
end
end
end
context 'when ComplexActiveRecord object graphs are duplicated' do
with_temporary_table(:complex_active_records) do
with_temporary_table(:children_parents) do
before :each do
create_ar_cyclic_graph
end
it 'duplicates cyclic two object graph' do
c1 = ComplexActiveRecord.new(data: 51)
c1.save
c2 = c1.children.create(data: 52)
c2.children << c1
duplicator = Duplicator.new
dup_c1 = duplicator.duplicate(c1)
dup_c2 = dup_c1.children[0]
# check that objects are duplicated
expect(dup_c1.data).to eq(c1.data)
expect(dup_c1).to_not be(c1)
expect(dup_c2.data).to eq(c2.data)
expect(dup_c2).to_not be(c2)
# check associations
expect(dup_c1.children).to include(dup_c2)
expect(dup_c2.children).to include(dup_c1)
expect(dup_c1.children).to_not include(c2)
expect(dup_c2.children).to_not include(c1)
end
it 'duplicates cyclic graph' do
duplicator = Duplicator.new
dup_c1 = duplicator.duplicate(@car1)
dup_c1.save
dup_c3 = dup_c1.children[0].children[0]
dup_c5 = dup_c3.children[0]
dup_c4 = dup_c1.children[0].children[1]
expect(ComplexActiveRecord.count).to be(12)
# check cycle data
expect(dup_c3.data).to be(13)
expect(dup_c4.data).to be(14)
expect(dup_c5.data).to be(15)
# check cycle associations
expect(dup_c5.children[0]).to be(dup_c4)
expect(dup_c4.children.count).to be(2)
expect(dup_c4.children).to include(dup_c3)
expect(dup_c3.children).to include(dup_c5)
expect(dup_c3.children.count).to be(1)
end
it 'duplicates cyclic graph without c4' do
duplicator = Duplicator.new([@car4])
dup_c1 = duplicator.duplicate(@car1)
dup_c1.save
# should be left with c1 -> c2 -> c3 -> c5
dup_c2 = dup_c1.children[0]
dup_c3 = dup_c2.children[0]
dup_c5 = dup_c3.children[0]
# check nodes duplicated
expect(dup_c1.data).to be(11)
expect(dup_c2.data).to be(12)
expect(dup_c3.data).to be(13)
expect(dup_c5.data).to be(15)
# check 1 child each
expect(dup_c1.children.length).to be(1)
expect(dup_c2.children.length).to be(1)
expect(dup_c3.children.length).to be(1)
expect(dup_c5.children.length).to be(0)
# check the children
expect(dup_c1.children).to include(dup_c2)
expect(dup_c2.children).to include(dup_c3)
expect(dup_c3.children).to include(dup_c5)
end
end
end
end
end
end
Add tests for ComplexActiveRecord
Make tests for ActiveRecord objects similar to those for Plain Old Ruby
Objects.
Tidy up and refactor some tests.
Write remaining tests so the 2 types of objects have similar tests.
Reorganise the ActiveRecord tests to match the structure of the Ruby
Objects.
Use ASCII characters in comments.
Use eq instead of be when comparing numbers.
Remove dependence on record count in some tests.
Remove hardcoding of data numbers in tests.
Use ActiveRecordClass.create instead of .new and then .save.
require 'rails_helper'
RSpec.describe Duplicator, type: :model do
context 'when Plain Old Ruby Objects' do
class SimpleObject
def initialize(id)
@id = id
end
def initialize_duplicate(_duplicator)
end
def ==(other)
self.class == other.class && state == other.state
end
protected
def state
[@id]
end
end
# ComplexObject has children
class ComplexObject
attr_reader :children
def initialize(id, children)
@id = id
@children = children
end
def initialize_duplicate(duplicator)
new_children = []
@children.each do |child|
new_child = duplicator.duplicate(child)
new_children << new_child unless new_child.nil?
end
# example if object has no accessor for the children
# self is already duplicated by Duplicator. Find the duplicated version
duplicator.duplicate(self).instance_variable_set(:@children, new_children)
end
def ==(other)
self.class == other.class && state == other.state
end
protected
def state
[@id, @children]
end
end
def create_complex_objects
#
# ---> c1 ---> s2
# | |
# | -----> s1
# | |
# c3 -----> c2
# |
# ---> s3
#
@s1 = SimpleObject.new(1)
@s2 = SimpleObject.new(2)
@s3 = SimpleObject.new(3)
# setup 2 objects with overlapping children
@c1 = ComplexObject.new(11, [@s1, @s2])
@c2 = ComplexObject.new(12, [@s1])
# setup an even more complicated object
@c3 = ComplexObject.new(13, [@c1, @c2, @s3])
end
def create_cyclic_graph
#
# ------> c3 ------
# | ^ v
# c1-> c2 | c5
# | | |
# ------> c4 <-----
# |
# ----> s1
#
@s1 = SimpleObject.new(1)
@c4 = ComplexObject.new(14, []) # assign children later
@c5 = ComplexObject.new(15, [@c4])
@c3 = ComplexObject.new(13, [@c5])
@c2 = ComplexObject.new(12, [@c3, @c4])
@c1 = ComplexObject.new(11, [@c2])
@c4.instance_variable_set(:@children, [@s1, @c3]) # create cycle
end
def create_second_cyclic_graph
#
# ----> c22 <--
# | | |
# c21 -- | |
# | |--> |
# ----------> c23
#
@c22 = ComplexObject.new(22, []) # assign children later
@c23 = ComplexObject.new(23, [@c22])
@c21 = ComplexObject.new(21, [@c22, @c23])
@c22.instance_variable_set(:@children, [@c23])
end
context 'when SimpleObject is duplicated' do
before :each do
@obj_a = SimpleObject.new(2)
end
it 'is not duplicated if excluded' do
@duplicator = Duplicator.new([@obj_a])
duplicated_object = @duplicator.duplicate(@obj_a)
expect(duplicated_object).to be_nil
end
it 'is duplicated by default' do
@duplicator = Duplicator.new
duplicate_of_a = @duplicator.duplicate(@obj_a)
expect(duplicate_of_a).to_not be_nil
expect(duplicate_of_a).to_not be(@obj_a)
expect(duplicate_of_a).to eq(@obj_a)
end
it 'is duplicated once' do
@duplicator = Duplicator.new
duplicate_of_a = @duplicator.duplicate(@obj_a)
duplicate_of_a_2 = @duplicator.duplicate(@obj_a)
expect(duplicate_of_a).to_not be_nil
expect(duplicate_of_a).to be(duplicate_of_a_2)
end
end
context 'when ComplexObject is duplicated' do
before :each do
create_complex_objects
end
context 'without exclusions' do
before :each do
@duplicator = Duplicator.new
end
it 'duplicates object' do
dup_c1 = @duplicator.duplicate(@c1)
orig_children = @c1.children
dup_children = dup_c1.children
expect(dup_c1).to eq(@c1)
expect(dup_c1).to_not be(@c1)
# both children should be duplicated, same values, different objects
expect(dup_children.length).to eq(2)
(0..1).each do |i|
expect(orig_children[i]).to eq(dup_children[i])
expect(orig_children[i]).to_not be(dup_children[i])
end
end
it 'duplicates objects referenced in 2 places once' do
dup_c1 = @duplicator.duplicate(@c1)
dup_c2 = @duplicator.duplicate(@c2)
# dup_s1 should be the same object
expect(dup_c1.children[0]).to be(dup_c2.children[0])
# test that @c1 and @c2 have the same values but are not the same object
expect(dup_c1).to eq(@c1)
expect(dup_c2).to eq(@c2)
expect(dup_c1).to_not be(@c1)
expect(dup_c2).to_not be(@c2)
end
it 'duplicates objects with ComplexObject children' do
dup_c3 = @duplicator.duplicate(@c3)
expect(dup_c3.children.length).to eq(3)
expect(dup_c3.children[0]).to eq(@c1)
expect(dup_c3.children[1]).to eq(@c2)
expect(dup_c3.children[0]).to_not be(@c1)
expect(dup_c3.children[1]).to_not be(@c2)
end
end
context 'with exclusions' do
it 'duplicates ComplexObject but not excluded children' do
duplicator = Duplicator.new([@s1, @s2])
dup_c1 = duplicator.duplicate(@c1)
expect(dup_c1.children).to be_empty
expect(dup_c1).to_not be(@c1)
end
it 'partially duplicates objects when some children are excluded' do
duplicator = Duplicator.new([@s2, @c2])
dup_c3 = duplicator.duplicate(@c3)
dup_c1_children = dup_c3.children[0].children
expect(dup_c3.children.length).to eq(2)
expect(dup_c1_children.length).to eq(1)
expect(dup_c1_children[0]).to eq(@s1)
expect(dup_c1_children[0]).to_not be(@s1)
end
end
end
context 'when Plain Old Ruby Object graphs are duplicated' do
context 'with cycles' do
before :each do
create_cyclic_graph
end
it 'duplicates cyclic two object graph' do
c1 = ComplexObject.new(11, [])
c2 = ComplexObject.new(12, [c1])
c1.instance_variable_set(:@children, [c2])
duplicator = Duplicator.new
dup_c1 = duplicator.duplicate(c1)
expect(dup_c1).to eq(c1)
expect(dup_c1).to_not be(c1)
expect(duplicator.instance_variable_get(:@duplicated_objects).length).to eq(2)
expect(dup_c1.children[0].children[0]).to be(dup_c1)
end
it 'duplicates cyclic graph' do
duplicator = Duplicator.new
dup_c1 = duplicator.duplicate(@c1)
expect(dup_c1).to eq(@c1)
expect(dup_c1).to_not be(@c1)
end
it 'duplicates cyclic graph without excluded tail c6' do
duplicator = Duplicator.new([@s1])
dup_c1 = duplicator.duplicate(@c1)
# get original and duplicated node c4
c4 = @c1.children[0].children[1]
dup_c4 = dup_c1.children[0].children[1]
expect(dup_c1).to_not eq(@c1)
expect(dup_c4.children.length).to_not eq(c4.children.length)
end
it 'duplicates cyclic graph without c5' do
duplicator = Duplicator.new([@c5])
dup_c1 = duplicator.duplicate(@c1)
dup_c3 = dup_c1.children[0].children[0]
expect(dup_c1).to_not be(@c1)
expect(dup_c3.children).to be_empty
end
it 'duplicates cyclic graph without c4' do
duplicator = Duplicator.new([@c4])
dup_c1 = duplicator.duplicate(@c1)
# should be left with c1 -> c2 -> c3 -> c5
dup_c2 = dup_c1.children[0]
dup_c3 = dup_c2.children[0]
dup_c5 = dup_c3.children[0]
expect(dup_c1.children.length).to eq(1)
expect(dup_c2.children.length).to eq(1)
expect(dup_c3.children.length).to eq(1)
expect(dup_c5.children.length).to eq(0)
end
it 'duplicates sub-graph from c3' do
duplicator = Duplicator.new
dup_c3 = duplicator.duplicate(@c3)
dup_c4 = dup_c3.children[0].children[0]
expect(dup_c3).to_not be(@c3)
expect(dup_c4.children[0]).to eq(@s1)
# check cycle
expect(dup_c4.children[1]).to be(dup_c3)
end
end
context 'when an array of objects is duplicated' do
before :each do
create_cyclic_graph
create_second_cyclic_graph
@duplicator = Duplicator.new
end
it 'duplicates objects mentioned twice without creating extras' do
duplicated_stuff = @duplicator.duplicate([@c1, @c3])
dup_c3 = duplicated_stuff[0].children[0].children[0]
expect(duplicated_stuff.length).to eq(2)
expect(duplicated_stuff[0]).to eq(@c1)
expect(duplicated_stuff[0]).to_not be(@c1)
expect(duplicated_stuff[1]).to eq(@c3)
expect(duplicated_stuff[1]).to_not be(@c3)
expect(duplicated_stuff[1]).to be(dup_c3)
end
it 'duplicates disjoint graphs' do
duplicated_stuff = @duplicator.duplicate([@c1, @c21])
expect(duplicated_stuff.length).to eq(2)
expect(duplicated_stuff[0]).to eq(@c1)
expect(duplicated_stuff[0]).to_not be(@c1)
expect(duplicated_stuff[1]).to eq(@c21)
expect(duplicated_stuff[1]).to_not be(@c21)
end
end
context 'when joined graphs are duplicated' do
before :each do
create_cyclic_graph
create_second_cyclic_graph
# join graphs
c1_children = @c1.children
c1_children << @c21
@c1.instance_variable_set(:@children, c1_children)
@duplicator = Duplicator.new
end
it 'duplicates all objects' do
@duplicator.duplicate(@c1)
duplicated_objects = @duplicator.instance_variable_get(:@duplicated_objects)
expect(duplicated_objects.length).to eq(9)
end
it 'duplicates cyclically joined graphs' do
# join from c21 to c1
c21_children = @c21.children
c21_children << @c1
@c21.instance_variable_set(:@children, c21_children)
@duplicator.duplicate(@c21)
duplicated_objects = @duplicator.instance_variable_get(:@duplicated_objects)
expect(duplicated_objects.length).to eq(9)
end
end
end
end
context 'when ActiveRecord objects' do
class SimpleActiveRecord < ActiveRecord::Base
def initialize_duplicate(_duplicator)
end
end
temporary_table(:simple_active_records) do |t|
t.integer :data
end
class ComplexActiveRecord < ActiveRecord::Base
has_and_belongs_to_many :children, class_name: 'ComplexActiveRecord',
foreign_key: 'parent_id',
join_table: :children_parents,
association_foreign_key: 'children_id'
has_and_belongs_to_many :parents, class_name: 'ComplexActiveRecord',
foreign_key: 'children_id',
join_table: :children_parents,
association_foreign_key: 'parent_id'
def initialize_duplicate(duplicator)
new_children = []
children.each do |child|
new_child = duplicator.duplicate(child)
new_children << new_child unless new_child.nil?
end
duplicator.duplicate(self).children = new_children
end
end
temporary_table(:complex_active_records) do |t|
t.integer :data
end
temporary_table(:children_parents) do |t|
t.integer :children_id, foreign_key: { references: :complex_active_records, primary_key: :id }
t.integer :parent_id, foreign_key: { references: :complex_active_records, primary_key: :id }
end
def create_ar_cyclic_graph
#
# ------> c3 ------
# | ^ v
# c1-> c2 | c5
# | | |
# ------> c4 <-----
# |
# ----> c6
#
@car1 = ComplexActiveRecord.create(data: 11)
@car2 = @car1.children.create(data: 12)
@car3 = @car2.children.create(data: 13)
@car4 = @car2.children.create(data: 14)
@car5 = @car3.children.create(data: 15)
@car6 = @car4.children.create(data: 16)
@car4.children << @car3
@car5.children << @car4
end
def create_ar_graph
#
# --> c22 ---> c23
# |
# c21 --> c24 ---> c25
# | ^
# --> c26 -------|
#
@car21 = ComplexActiveRecord.create(data: 21)
@car22 = @car21.children.create(data: 22)
@car23 = @car22.children.create(data: 23)
@car24 = @car21.children.create(data: 24)
@car25 = @car24.children.create(data: 25)
@car26 = @car21.children.create(data: 26)
@car26.children = @car24.children
end
with_temporary_table(:simple_active_records) do
context 'when SimpleActiveRecord objects are duplicated' do
before :each do
@sar_1 = SimpleActiveRecord.create(data: 1)
end
it 'is duplicated by default' do
duplicator = Duplicator.new
duplicator.duplicate(@sar_1).save
all_records = SimpleActiveRecord.all
expect(SimpleActiveRecord.count).to eq(2)
expect(all_records[0].data).to eq(all_records[1].data)
expect(all_records[0].id).to_not eq(all_records[1].id)
end
it 'is not duplicated if excluded' do
duplicator = Duplicator.new([@sar_1])
dup_sar_1 = duplicator.duplicate(@sar_1)
expect(dup_sar_1).to be_nil
end
it 'is duplicated once' do
duplicator = Duplicator.new
dup1 = duplicator.duplicate(@sar_1).save
dup2 = duplicator.duplicate(@sar_1).save
expect(dup1).to be(dup2)
end
end
end
with_temporary_table(:complex_active_records) do
with_temporary_table(:children_parents) do
# ComplexActiveRecord objects have associations to themselves
context 'when ComplexActiveRecord objects are duplicated' do
before :each do
create_ar_graph
end
context 'without exclusions' do
before :each do
@duplicator = Duplicator.new
end
it 'duplicates a ComplexActiveRecord object' do
dup_c22 = @duplicator.duplicate(@car22)
dup_c22.save
expect(dup_c22.data).to eq(@car22.data)
expect(dup_c22.children.size).to eq(1)
expect(dup_c22.children[0].data).to eq(@car23.data)
expect(dup_c22.children[0]).to_not be(@car23)
end
it 'duplicates object referenced in 2 places once' do
dup_c24 = @duplicator.duplicate(@car24)
dup_c24.save
dup_c26 = @duplicator.duplicate(@car26)
dup_c26.save
expect(dup_c24.children[0]).to be(dup_c26.children[0])
expect(dup_c24.children[0].data).to eq(@car25.data)
end
it 'duplicates ComplexActiveRecord object with children' do
dup_c21 = @duplicator.duplicate(@car21)
dup_c21.save
dup_c22 = dup_c21.children[0]
dup_c23 = dup_c22.children[0]
dup_c24 = dup_c21.children[1]
dup_c25 = dup_c24.children[0]
dup_c26 = dup_c21.children[2]
# Check that data is duplicated correctly, duplicates not the same object
expect(dup_c21.data).to eq(@car21.data)
expect(dup_c22.data).to eq(@car22.data)
expect(dup_c23.data).to eq(@car23.data)
expect(dup_c24.data).to eq(@car24.data)
expect(dup_c25.data).to eq(@car25.data)
expect(dup_c26.data).to eq(@car26.data)
expect(dup_c21).to_not be(@car21)
expect(dup_c22).to_not be(@car22)
expect(dup_c23).to_not be(@car23)
expect(dup_c24).to_not be(@car24)
expect(dup_c25).to_not be(@car25)
expect(dup_c26).to_not be(@car26)
# Check associations
# dup_c21's children
expect(dup_c21.children.size).to eq(3)
expect(dup_c21.children).to include(dup_c22)
expect(dup_c21.children).to include(dup_c24)
expect(dup_c21.children).to include(dup_c26)
# dup_c22's children
expect(dup_c22.children.size).to eq(1)
expect(dup_c22.children).to include(dup_c23)
# dup_c23's children
expect(dup_c23.children.size).to eq(0)
# dup_c24's children
expect(dup_c24.children.size).to eq(1)
expect(dup_c24.children).to include(dup_c25)
# dup_c25's children
expect(dup_c25.children.size).to eq(0)
# dup_c26's children
expect(dup_c26.children.size).to eq(1)
expect(dup_c26.children).to include(dup_c25)
end
end
context 'with exclusions' do
it 'duplicates ComplexActiveRecord object but not excluded children' do
duplicator = Duplicator.new([@car24, @car26])
dup_c21 = duplicator.duplicate(@car21)
dup_c22 = dup_c21.children[0]
dup_c23 = dup_c22.children[0]
expect(dup_c21.data).to eq(21)
expect(dup_c22.data).to eq(22)
expect(dup_c23.data).to eq(23)
expect(dup_c21).to_not be(@car21)
expect(dup_c22).to_not be(@car22)
expect(dup_c23).to_not be(@car23)
expect(dup_c21.children.size).to eq(1)
expect(dup_c22.children.size).to eq(1)
end
it 'partially duplicates objects when some children are excluded' do
duplicator = Duplicator.new([@car22, @car25])
dup_c21 = duplicator.duplicate(@car21)
dup_c24 = dup_c21.children[0]
dup_c26 = dup_c21.children[1]
expect(dup_c21.data).to eq(21)
expect(dup_c24.data).to eq(24)
expect(dup_c26.data).to eq(26)
expect(dup_c21).to_not be(@car21)
expect(dup_c24).to_not be(@car24)
expect(dup_c26).to_not be(@car26)
expect(dup_c21.children.size).to eq(2)
expect(dup_c24.children.size).to eq(0)
expect(dup_c26.children.size).to eq(0)
end
end
end
context 'when ComplexActiveRecord object graphs are duplicated' do
before :each do
create_ar_cyclic_graph
end
context 'with cycles' do
it 'duplicates cyclic two object graph' do
c1 = ComplexActiveRecord.create(data: 51)
c2 = c1.children.create(data: 52)
c2.children << c1
duplicator = Duplicator.new
dup_c1 = duplicator.duplicate(c1)
dup_c2 = dup_c1.children[0]
# check that objects are duplicated
expect(dup_c1.data).to eq(c1.data)
expect(dup_c1).to_not be(c1)
expect(dup_c2.data).to eq(c2.data)
expect(dup_c2).to_not be(c2)
# check associations
expect(dup_c1.children).to include(dup_c2)
expect(dup_c2.children).to include(dup_c1)
expect(dup_c1.children).to_not include(c2)
expect(dup_c2.children).to_not include(c1)
end
it 'duplicates cyclic graph' do
duplicator = Duplicator.new
dup_c1 = duplicator.duplicate(@car1)
dup_c1.save
dup_c3 = dup_c1.children[0].children[0]
dup_c5 = dup_c3.children[0]
dup_c4 = dup_c1.children[0].children[1]
expect(ComplexActiveRecord.count).to eq(12)
# check cycle data
expect(dup_c3.data).to eq(13)
expect(dup_c4.data).to eq(14)
expect(dup_c5.data).to eq(15)
# check cycle associations
expect(dup_c5.children[0]).to be(dup_c4)
expect(dup_c4.children.size).to eq(2)
expect(dup_c4.children).to include(dup_c3)
expect(dup_c3.children).to include(dup_c5)
expect(dup_c3.children.size).to eq(1)
end
it 'duplicates cyclic graph without c4' do
duplicator = Duplicator.new([@car4])
dup_c1 = duplicator.duplicate(@car1)
dup_c1.save
# should be left with c1 -> c2 -> c3 -> c5
dup_c2 = dup_c1.children[0]
dup_c3 = dup_c2.children[0]
dup_c5 = dup_c3.children[0]
# check nodes duplicated
expect(dup_c1.data).to eq(@car1.data)
expect(dup_c2.data).to eq(@car2.data)
expect(dup_c3.data).to eq(@car3.data)
expect(dup_c5.data).to eq(@car5.data)
# check 1 child each
expect(dup_c1.children.size).to eq(1)
expect(dup_c2.children.size).to eq(1)
expect(dup_c3.children.size).to eq(1)
expect(dup_c5.children.size).to eq(0)
# check the children
expect(dup_c1.children).to include(dup_c2)
expect(dup_c2.children).to include(dup_c3)
expect(dup_c3.children).to include(dup_c5)
end
it 'duplicates sub-graph from c3' do
duplicator = Duplicator.new
dup_c3 = duplicator.duplicate(@car3)
dup_c3.save
# check cycle
expect(dup_c3.children[0].children[0].children).to include(dup_c3)
end
it 'duplicates cyclic graph without c5' do
duplicator = Duplicator.new([@car5])
dup_c2 = duplicator.duplicate(@car2)
dup_c3 = dup_c2.children[0]
# check that @car3 has no children (because @car5 was excluded)
expect(dup_c3.children).to be_empty
expect(dup_c2.children[1].children).to include(dup_c3)
end
it 'duplicates cyclic graph without excluded tail c6' do
duplicator = Duplicator.new([@car6])
dup_c2 = duplicator.duplicate(@car2)
dup_c4 = dup_c2.children[1]
expect(dup_c4.data).to eq(@car4.data)
expect(dup_c4.children.size).to eq(1)
end
end
context 'when an array of objects are duplicated' do
before :each do
create_ar_graph
@duplicator = Duplicator.new
end
it 'duplicates disjoint graphs' do
duplicated_stuff = @duplicator.duplicate([@car1, @car21])
expect(duplicated_stuff.length).to eq(2)
expect(duplicated_stuff[0].data).to eq(@car1.data)
expect(duplicated_stuff[0]).to_not be(@car1)
expect(duplicated_stuff[1].data).to be(@car21.data)
expect(duplicated_stuff[1]).to_not be(@car21)
end
it 'duplicates objects mentioned twice without creating extras' do
duplicated_stuff = @duplicator.duplicate([@car2, @car3])
dup_c3 = duplicated_stuff[0].children[0]
expect(duplicated_stuff.length).to eq(2)
expect(duplicated_stuff[0].data).to eq(@car2.data)
expect(duplicated_stuff[0]).to_not be(@car2)
expect(duplicated_stuff[1].data).to be(@car3.data)
expect(duplicated_stuff[1]).to_not be(@car3)
expect(duplicated_stuff[1]).to be(dup_c3)
end
end
context 'when joined graphs are duplicated' do
before :each do
create_ar_graph
# join graphs
@car1.children << @car21
@duplicator = Duplicator.new
end
it 'duplicates all objects' do
dup_c1 = @duplicator.duplicate(@car1)
dup_c1.save
duplicated_objects = @duplicator.instance_variable_get(:@duplicated_objects)
expect(duplicated_objects.length).to eq(12)
end
it 'duplicates cyclically joined graphs' do
# join in the other direction
@car21.children << @car1
dup_c21 = @duplicator.duplicate(@car21)
dup_c21.save
duplicated_objects = @duplicator.instance_variable_get(:@duplicated_objects)
expect(duplicated_objects.length).to eq(12)
end
end
end
end
end
end
end
|
require "spec_helper"
describe Lob::Resources::Member do
before :each do
@sample_group_params = {
description: "Unsubscribe group",
metadata: {
test: 'stuff'
}
}
@sample_member_params = {
recipient: {
name: 'Sophie',
address_line1: '185 Berry St',
address_line2: '6100',
address_city: 'San Francisco',
address_state: 'CA',
address_zip: '94107'
},
description: "Some Member",
metadata: {
test: 'stuff'
}
}
end
subject { Lob::Client.new(api_key: API_KEY) }
describe "list" do
it "should list members in a group" do
new_group = subject.groups.create @sample_group_params
assert subject.members.list(new_group["id"])["object"] == "list"
end
end
describe "create" do
it "should create a member" do
new_group = subject.groups.create @sample_group_params
result = subject.members.create(new_group["id"], @sample_member_params)
result["description"].must_equal(@sample_member_params[:description])
end
end
describe "find" do
it "should find a member" do
new_group = subject.groups.create @sample_group_params
new_member = subject.members.create(new_group["id"], @sample_member_params)
find_result = subject.members.find(new_group["id"], new_member["id"])
find_result["description"].must_equal(@sample_member_params[:description])
end
end
describe "destroy" do
it "should delete a member" do
new_group = subject.groups.create @sample_group_params
new_member = subject.members.create(new_group["id"], @sample_member_params)
delete_result = subject.members.destroy(new_group["id"], new_member["id"])
assert_equal(new_member["id"], delete_result["id"])
end
end
end
move group add to beforeeach
require "spec_helper"
describe Lob::Resources::Member do
before :each do
@sample_group_params = {
description: "Unsubscribe group",
metadata: {
test: 'stuff'
}
}
@sample_member_params = {
recipient: {
name: 'Sophie',
address_line1: '185 Berry St',
address_line2: '6100',
address_city: 'San Francisco',
address_state: 'CA',
address_zip: '94107'
},
description: "Some Member",
metadata: {
test: 'stuff'
}
}
@new_group = subject.groups.create @sample_group_params
end
subject { Lob::Client.new(api_key: API_KEY) }
describe "list" do
it "should list members in a group" do
assert subject.members.list(@new_group["id"])["object"] == "list"
end
end
describe "create" do
it "should create a member" do
result = subject.members.create(@new_group["id"], @sample_member_params)
result["description"].must_equal(@sample_member_params[:description])
end
end
describe "find" do
it "should find a member" do
new_member = subject.members.create(@new_group["id"], @sample_member_params)
find_result = subject.members.find(@new_group["id"], new_member["id"])
find_result["description"].must_equal(@sample_member_params[:description])
end
end
describe "destroy" do
it "should delete a member" do
new_member = subject.members.create(@new_group["id"], @sample_member_params)
delete_result = subject.members.destroy(@new_group["id"], new_member["id"])
assert_equal(new_member["id"], delete_result["id"])
end
end
end
|
describe Metacrunch::UBPB::Transformations::MAB2SNR::VolumeCount do
it "456, 466, 476, 486, 496 works" do
["456", "466", "476", "486", "496"].each do |field|
mab = mab_builder do
datafield(field, ind2: "1") { subfield("a", "2") }
end
result = mab2snr(mab)
expect(result.first_value("control/volume_count")).to eq(2)
end
end
it "Lower fields have precedence over hight fields" do
mab = mab_builder do
datafield("456", ind2: "1") { subfield("a", "1") }
datafield("496", ind2: "1") { subfield("a", "2") }
end
result = mab2snr(mab)
expect(result.first_value("control/volume_count")).to eq(1)
end
end
Delete for now.
|
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
class TestShoe
extend Metamuse::Association
attr_accessor :color
def initialize(attrs={})
attrs.each {|k,v| send(:"#{k}=", v)}
end
end
class TestPerson
extend Metamuse::Association
has_many :shoes, TestShoe
end
TestShoe.class_eval { belongs_to :person, TestPerson }
describe Metamuse::Association do
it "creates a shoes accessor which returns an array" do
TestPerson.new.shoes.should be_kind_of(Array)
end
describe "adding shoes to the test person" do
before do
@person = TestPerson.new
end
it "adds a single red shoe" do
expect {
@person.shoes << {:color => "red"}
}.to change{@person.shoes.size}.by(1)
end
it "creates a new shoe when not appending a TestShoe" do
shoe = TestShoe.new
TestShoe.should_receive(:new).with({:color => 'red'}).and_return(shoe)
@person.shoes << {:color => "red"}
@person.shoes.last.should == shoe
end
it "appends a TestShoe" do
@person.shoes << TestShoe.new(:color => "red")
@person.shoes.last.should be_instance_of(TestShoe)
end
it "appends many shoes" do
@person.shoes << [TestShoe.new(:color => "red"), {:color => "blue"}]
@person.shoes.size.should == 2
@person.shoes.map {|s| s.class}.uniq.should == [TestShoe]
end
it "appends shoes when setting the collection" do
@person.shoes.should_receive(:<<).with({:color => 'red'})
@person.shoes = {:color => 'red'}
end
end
describe "TestShoe belongs to a TestPerson" do
before do
@person = TestPerson.new
end
it "sets the shoe's person equal to the person that added the shoe" do
@person.shoes << {:color => 'red', :person => @person}
@person.shoes.last.person.should == @person
end
it "sets the shoe's person when not explicitly available in the hash" do
@person.shoes << {:color => 'red'}
@person.shoes.last.person.should == @person
end
it "uses the person in the hash to override implicit belongs_to"
end
end
Remove misguided pending test
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
class TestShoe
extend Metamuse::Association
attr_accessor :color
def initialize(attrs={})
attrs.each {|k,v| send(:"#{k}=", v)}
end
end
class TestPerson
extend Metamuse::Association
has_many :shoes, TestShoe
end
TestShoe.class_eval { belongs_to :person, TestPerson }
describe Metamuse::Association do
it "creates a shoes accessor which returns an array" do
TestPerson.new.shoes.should be_kind_of(Array)
end
describe "adding shoes to the test person" do
before do
@person = TestPerson.new
end
it "adds a single red shoe" do
expect {
@person.shoes << {:color => "red"}
}.to change{@person.shoes.size}.by(1)
end
it "creates a new shoe when not appending a TestShoe" do
shoe = TestShoe.new
TestShoe.should_receive(:new).with({:color => 'red'}).and_return(shoe)
@person.shoes << {:color => "red"}
@person.shoes.last.should == shoe
end
it "appends a TestShoe" do
@person.shoes << TestShoe.new(:color => "red")
@person.shoes.last.should be_instance_of(TestShoe)
end
it "appends many shoes" do
@person.shoes << [TestShoe.new(:color => "red"), {:color => "blue"}]
@person.shoes.size.should == 2
@person.shoes.map {|s| s.class}.uniq.should == [TestShoe]
end
it "appends shoes when setting the collection" do
@person.shoes.should_receive(:<<).with({:color => 'red'})
@person.shoes = {:color => 'red'}
end
end
describe "TestShoe belongs to a TestPerson" do
before do
@person = TestPerson.new
end
it "sets the shoe's person equal to the person that added the shoe" do
@person.shoes << {:color => 'red', :person => @person}
@person.shoes.last.person.should == @person
end
it "sets the shoe's person when not explicitly available in the hash" do
@person.shoes << {:color => 'red'}
@person.shoes.last.person.should == @person
end
end
end
|
require 'spec_helper'
describe HetsInstance do
let(:general_version) { Hets.minimum_version.to_s }
let(:specific_version) { Hets.minimum_revision.to_s }
let(:hets_instance) { create_local_hets_instance }
context 'when registering a hets instance' do
context 'wrt. to update-jobs' do
before do
stub_request(:get, %r{http://localhost:8\d{3}/version}).
to_return(status: 200,
body: "v#{general_version}, #{specific_version}",
headers: {})
hets_instance
end
it 'should create a job' do
expect(HetsInstanceWorker.jobs.count).to eq(1)
end
it 'should have a job with the correct attributes' do
expect(HetsInstanceWorker.jobs.first).
to include('args' => [hets_instance.id])
end
end
end
context 'when choosing a hets instance' do
context 'and there is no hets instance recorded' do
it 'should raise the appropriate error' do
expect { HetsInstance.choose! }.
to raise_error(HetsInstance::NoRegisteredHetsInstanceError)
end
end
context 'and there is no acceptable hets instance' do
let(:hets_instance) { create_local_hets_instance }
before do
stub_request(:get, %r{http://localhost:8\d{3}/version}).
to_return(status: 500, body: "", headers: {})
hets_instance
end
it 'should raise the appropriate error' do
expect { HetsInstance.choose! }.
to raise_error(HetsInstance::NoSelectableHetsInstanceError)
end
context 'try again re-checks up-state' do
before do
allow(HetsInstance).to receive(:choose!).and_call_original
allow_any_instance_of(HetsInstance).
to receive(:set_up_state!).and_call_original
expect_any_instance_of(HetsInstance).
to receive(:set_up_state!).exactly(HetsInstance.count).times
begin
HetsInstance.choose!
rescue HetsInstance::NoSelectableHetsInstanceError
end
end
it 'should have tried again' do
expect(HetsInstance).to have_received(:choose!).with(no_args)
end
it 'should have not tried a second time' do
expect(HetsInstance).to have_received(:choose!).with(try_again: false)
end
it 'should have tried twice' do
expect(HetsInstance).to have_received(:choose!).twice
end
end
end
context 'and there is an acceptable hets instance' do
before do
stub_request(:get, %r{http://localhost:8\d{3}/version}).
to_return(status: 200,
body: "v#{general_version}, #{specific_version}",
headers: {})
hets_instance
end
it 'should return that hets instance' do
expect(HetsInstance.choose!).to eq(hets_instance)
end
end
context 'load balancing' do
before do
stub_request(:get, %r{http://localhost:8\d{3}/version}).
to_return({status: 200,
body: "v#{general_version}, #{specific_version}",
headers: {}})
end
context 'error on execution' do
let!(:hets_instance) { create :hets_instance, state: 'free' }
it 're-raises the error' do
expect do
HetsInstance.with_instance! { raise 'my_error' }
end.to raise_error('my_error')
end
it 'frees the instance' do
chosen = nil
begin
HetsInstance.with_instance! do |instance|
chosen = instance
raise 'my_error'
end
rescue StandardError
expect(chosen.state).to eq('free')
end
end
end
context 'free, force-free and busy are available' do
let!(:free) { create :hets_instance, state: 'free' }
let!(:force_free) { create :hets_instance, state: 'force-free' }
let!(:busy) { create :hets_instance, state: 'busy' }
it 'choose! chose the free instance' do
expect(HetsInstance.choose!.uri).to eq(free.uri)
end
it 'with_instance! returns the result of its block' do
expect(HetsInstance.with_instance! { |_i| :result }).to be(:result)
end
it 'with_instance! chose the free instance' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.uri).to eq(free.uri)
end
it 'choose! marks it as busy' do
instance = HetsInstance.choose!
expect(instance.state).to eq('busy')
end
it 'choose! does not increase the queue size' do
instance = HetsInstance.choose!
expect(instance.queue_size).to eq(0)
end
it 'with_instance! does not increase the queue size during the work' do
HetsInstance.with_instance! do |instance|
expect(instance.queue_size).to eq(0)
end
end
it 'with_instance! does not increase the queue size after the work' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.queue_size).to eq(0)
end
it 'finish_work! marks it as free' do
instance = HetsInstance.choose!
instance.finish_work!
expect(instance.state).to eq('free')
end
it 'finish_work! does not decrease the queue size' do
instance = HetsInstance.choose!
instance.finish_work!
expect(instance.queue_size).to eq(0)
end
it 'with_instance! marks it as busy during the work' do
HetsInstance.with_instance! do |instance|
expect(instance.state).to eq('busy')
end
end
it 'with_instance! marks it as free after the work' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.state).to eq('free')
end
end
context 'force-free and busy are available' do
let!(:force_free0) { create :hets_instance, state: 'force-free', queue_size: 0 }
let!(:force_free1) { create :hets_instance, state: 'force-free', queue_size: 1 }
let!(:busy) { create :hets_instance, state: 'busy' }
it 'choose! chose the force-free instance with queue size 0' do
expect(HetsInstance.choose!.uri).to eq(force_free0.uri)
end
it 'with_instance! chose the force-free instance with queue size 0' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.uri).to eq(force_free0.uri)
end
it 'choose! marks it as busy' do
instance = HetsInstance.choose!
expect(instance.state).to eq('busy')
end
it 'choose! increases the queue size' do
instance = HetsInstance.choose!
expect(instance.queue_size).to eq(1)
end
it 'with_instance! increases the queue size during the work' do
HetsInstance.with_instance! do |instance|
expect(instance.queue_size).to eq(1)
end
end
it 'with_instance! does not increase the queue size after the work' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.queue_size).to eq(0)
end
it 'finish_work! marks it as free' do
instance = HetsInstance.choose!
instance.finish_work!
expect(instance.state).to eq('free')
end
it 'finish_work! decreases the queue size' do
instance = HetsInstance.choose!
instance.finish_work!
expect(instance.queue_size).to eq(0)
end
it 'with_instance! marks it as busy during the work' do
HetsInstance.with_instance! do |instance|
expect(instance.state).to eq('busy')
end
end
it 'with_instance! marks it as free after the work' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.state).to eq('free')
end
end
context 'only busy are available' do
let!(:busy0) { create :hets_instance, state: 'busy', queue_size: 0 }
let!(:busy1) { create :hets_instance, state: 'busy', queue_size: 1 }
it 'choose! chose the busy instance with queue size 0' do
expect(HetsInstance.choose!.uri).to eq(busy0.uri)
end
it 'with_instance! chose the busy instance with queue size 0' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.uri).to eq(busy0.uri)
end
it 'choose! marks it as busy' do
instance = HetsInstance.choose!
expect(instance.state).to eq('busy')
end
it 'choose! increases the queue size' do
instance = HetsInstance.choose!
expect(instance.queue_size).to eq(1)
end
it 'with_instance! increases the queue size during the work' do
HetsInstance.with_instance! do |instance|
expect(instance.queue_size).to eq(1)
end
end
it 'with_instance! does not increase the queue size after the work' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.queue_size).to eq(0)
end
it 'finish_work! marks it as free' do
instance = HetsInstance.choose!
instance.finish_work!
expect(instance.state).to eq('free')
end
it 'finish_work! decreases the queue size' do
instance = HetsInstance.choose!
instance.finish_work!
expect(instance.queue_size).to eq(0)
end
it 'with_instance! marks it as busy during the work' do
HetsInstance.with_instance! do |instance|
expect(instance.state).to eq('busy')
end
end
it 'with_instance! marks it as free after the work' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.state).to eq('free')
end
end
context 'only very busy are available' do
let!(:busy0) { create :hets_instance, state: 'busy', queue_size: 1 }
let!(:busy1) { create :hets_instance, state: 'busy', queue_size: 2 }
it 'choose! chose the busy instance with queue size 0' do
expect(HetsInstance.choose!.uri).to eq(busy0.uri)
end
it 'with_instance! chose the busy instance with queue size 0' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.uri).to eq(busy0.uri)
end
it 'choose! marks it as busy' do
instance = HetsInstance.choose!
expect(instance.state).to eq('busy')
end
it 'choose! increases the queue size' do
instance = HetsInstance.choose!
expect(instance.queue_size).to eq(2)
end
it 'with_instance! increases the queue size during the work' do
HetsInstance.with_instance! do |instance|
expect(instance.queue_size).to eq(2)
end
end
it 'with_instance! does not increase the queue size after the work' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.queue_size).to eq(1)
end
it 'finish_work! still marks it as busy' do
instance = HetsInstance.choose!
instance.finish_work!
expect(instance.state).to eq('busy')
end
it 'finish_work! decreases the queue size' do
instance = HetsInstance.choose!
instance.finish_work!
expect(instance.queue_size).to eq(1)
end
it 'with_instance! marks it as busy during the work' do
HetsInstance.with_instance! do |instance|
expect(instance.state).to eq('busy')
end
end
it 'with_instance! still marks it as free after the work' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.state).to eq('busy')
end
end
end
end
context 'force-freeing an instance' do
before do
stub_request(:get, %r{http://localhost:8\d{3}/version}).
to_return(status: 500, body: "", headers: {})
end
context 'set_busy!' do
let!(:hets_instance) { create :hets_instance, state: 'free' }
before do
allow(HetsInstanceForceFreeWorker).to receive(:perform_in)
end
it 'calls the HetsInstanceForceFreeWorker' do
hets_instance.set_busy!
expect(HetsInstanceForceFreeWorker).
to have_received(:perform_in).
with(HetsInstance::FORCE_FREE_WAITING_PERIOD, hets_instance.id)
end
end
context 'set_force_free!' do
context 'on a free instance' do
let!(:hets_instance) { create :hets_instance, state: 'free' }
before { hets_instance.set_force_free! }
it 'is a no-op' do
expect(hets_instance.state).to eq('free')
end
end
context 'on a force-free instance' do
let!(:hets_instance) { create :hets_instance, state: 'force-free' }
before { hets_instance.set_force_free! }
it 'is a no-op' do
expect(hets_instance.state).to eq('force-free')
end
end
context 'on a busy instance' do
let!(:hets_instance) { create :hets_instance, state: 'busy' }
before { hets_instance.set_force_free! }
it 'change the state' do
expect(hets_instance.state).to eq('force-free')
end
end
end
end
context 'when creating a hets instance' do
context 'and it has a reachable uri' do
before do
stub_request(:get, %r{http://localhost:8\d{3}/version}).
to_return(status: 200,
body: "v#{general_version}, #{specific_version}",
headers: {})
end
it 'should have a up-state of true' do
expect(hets_instance.up).to be(true)
end
it 'should have a non-null version' do
expect(hets_instance.version).to_not be(nil)
end
it 'should have a correct general_version' do
expect(hets_instance.general_version).to eq(general_version)
end
it 'should have a correct specific version' do
expect(hets_instance.specific_version).to eq(specific_version)
end
end
context 'and it has a non-reachable uri' do
before do
stub_request(:get, %r{http://localhost:8\d{3}/version}).
to_return(status: 500, body: "", headers: {})
end
it 'should have a up-state of false' do
expect(hets_instance.up).to be(false)
end
it 'should have a nil version' do
expect(hets_instance.version).to be(nil)
end
it 'should have a nil general_version' do
expect(hets_instance.general_version).to be(nil)
end
it 'should have a nil specific version' do
expect(hets_instance.specific_version).to be(nil)
end
end
end
end
Test timeout and host unreachable behaviour for hets.
require 'spec_helper'
describe HetsInstance do
let(:general_version) { Hets.minimum_version.to_s }
let(:specific_version) { Hets.minimum_revision.to_s }
let(:hets_instance) { create_local_hets_instance }
context 'when registering a hets instance' do
context 'wrt. to update-jobs' do
before do
stub_request(:get, %r{http://localhost:8\d{3}/version}).
to_return(status: 200,
body: "v#{general_version}, #{specific_version}",
headers: {})
hets_instance
end
it 'should create a job' do
expect(HetsInstanceWorker.jobs.count).to eq(1)
end
it 'should have a job with the correct attributes' do
expect(HetsInstanceWorker.jobs.first).
to include('args' => [hets_instance.id])
end
end
end
context 'when a hets instance is not reachable' do
before do
stub_request(:get, %r{http://localhost:8\d{3}/version}).
to_return(status: 200,
body: "v#{general_version}, #{specific_version}",
headers: {})
# require creation of the object
hets_instance
end
context 'because of an unreachable host' do
before do
allow_any_instance_of(UriFetcher::GetCaller).
to receive(:make_http_request).and_raise(Errno::EHOSTUNREACH)
HetsInstance.check_up_state!(hets_instance.id)
end
it 'the up value is false' do
expect(hets_instance.reload.up).to be(false)
end
end
context 'because of a timeout' do
before do
allow_any_instance_of(UriFetcher::GetCaller).
to receive(:make_http_request).and_raise(Net::ReadTimeout)
HetsInstance.check_up_state!(hets_instance.id)
end
it 'the up value is false' do
expect(hets_instance.reload.up).to be(false)
end
end
end
context 'when choosing a hets instance' do
context 'and there is no hets instance recorded' do
it 'should raise the appropriate error' do
expect { HetsInstance.choose! }.
to raise_error(HetsInstance::NoRegisteredHetsInstanceError)
end
end
context 'and there is no acceptable hets instance' do
let(:hets_instance) { create_local_hets_instance }
before do
stub_request(:get, %r{http://localhost:8\d{3}/version}).
to_return(status: 500, body: "", headers: {})
hets_instance
end
it 'should raise the appropriate error' do
expect { HetsInstance.choose! }.
to raise_error(HetsInstance::NoSelectableHetsInstanceError)
end
context 'try again re-checks up-state' do
before do
allow(HetsInstance).to receive(:choose!).and_call_original
allow_any_instance_of(HetsInstance).
to receive(:set_up_state!).and_call_original
expect_any_instance_of(HetsInstance).
to receive(:set_up_state!).exactly(HetsInstance.count).times
begin
HetsInstance.choose!
rescue HetsInstance::NoSelectableHetsInstanceError
end
end
it 'should have tried again' do
expect(HetsInstance).to have_received(:choose!).with(no_args)
end
it 'should have not tried a second time' do
expect(HetsInstance).to have_received(:choose!).with(try_again: false)
end
it 'should have tried twice' do
expect(HetsInstance).to have_received(:choose!).twice
end
end
end
context 'and there is an acceptable hets instance' do
before do
stub_request(:get, %r{http://localhost:8\d{3}/version}).
to_return(status: 200,
body: "v#{general_version}, #{specific_version}",
headers: {})
hets_instance
end
it 'should return that hets instance' do
expect(HetsInstance.choose!).to eq(hets_instance)
end
end
context 'load balancing' do
before do
stub_request(:get, %r{http://localhost:8\d{3}/version}).
to_return({status: 200,
body: "v#{general_version}, #{specific_version}",
headers: {}})
end
context 'error on execution' do
let!(:hets_instance) { create :hets_instance, state: 'free' }
it 're-raises the error' do
expect do
HetsInstance.with_instance! { raise 'my_error' }
end.to raise_error('my_error')
end
it 'frees the instance' do
chosen = nil
begin
HetsInstance.with_instance! do |instance|
chosen = instance
raise 'my_error'
end
rescue StandardError
expect(chosen.state).to eq('free')
end
end
end
context 'free, force-free and busy are available' do
let!(:free) { create :hets_instance, state: 'free' }
let!(:force_free) { create :hets_instance, state: 'force-free' }
let!(:busy) { create :hets_instance, state: 'busy' }
it 'choose! chose the free instance' do
expect(HetsInstance.choose!.uri).to eq(free.uri)
end
it 'with_instance! returns the result of its block' do
expect(HetsInstance.with_instance! { |_i| :result }).to be(:result)
end
it 'with_instance! chose the free instance' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.uri).to eq(free.uri)
end
it 'choose! marks it as busy' do
instance = HetsInstance.choose!
expect(instance.state).to eq('busy')
end
it 'choose! does not increase the queue size' do
instance = HetsInstance.choose!
expect(instance.queue_size).to eq(0)
end
it 'with_instance! does not increase the queue size during the work' do
HetsInstance.with_instance! do |instance|
expect(instance.queue_size).to eq(0)
end
end
it 'with_instance! does not increase the queue size after the work' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.queue_size).to eq(0)
end
it 'finish_work! marks it as free' do
instance = HetsInstance.choose!
instance.finish_work!
expect(instance.state).to eq('free')
end
it 'finish_work! does not decrease the queue size' do
instance = HetsInstance.choose!
instance.finish_work!
expect(instance.queue_size).to eq(0)
end
it 'with_instance! marks it as busy during the work' do
HetsInstance.with_instance! do |instance|
expect(instance.state).to eq('busy')
end
end
it 'with_instance! marks it as free after the work' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.state).to eq('free')
end
end
context 'force-free and busy are available' do
let!(:force_free0) { create :hets_instance, state: 'force-free', queue_size: 0 }
let!(:force_free1) { create :hets_instance, state: 'force-free', queue_size: 1 }
let!(:busy) { create :hets_instance, state: 'busy' }
it 'choose! chose the force-free instance with queue size 0' do
expect(HetsInstance.choose!.uri).to eq(force_free0.uri)
end
it 'with_instance! chose the force-free instance with queue size 0' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.uri).to eq(force_free0.uri)
end
it 'choose! marks it as busy' do
instance = HetsInstance.choose!
expect(instance.state).to eq('busy')
end
it 'choose! increases the queue size' do
instance = HetsInstance.choose!
expect(instance.queue_size).to eq(1)
end
it 'with_instance! increases the queue size during the work' do
HetsInstance.with_instance! do |instance|
expect(instance.queue_size).to eq(1)
end
end
it 'with_instance! does not increase the queue size after the work' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.queue_size).to eq(0)
end
it 'finish_work! marks it as free' do
instance = HetsInstance.choose!
instance.finish_work!
expect(instance.state).to eq('free')
end
it 'finish_work! decreases the queue size' do
instance = HetsInstance.choose!
instance.finish_work!
expect(instance.queue_size).to eq(0)
end
it 'with_instance! marks it as busy during the work' do
HetsInstance.with_instance! do |instance|
expect(instance.state).to eq('busy')
end
end
it 'with_instance! marks it as free after the work' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.state).to eq('free')
end
end
context 'only busy are available' do
let!(:busy0) { create :hets_instance, state: 'busy', queue_size: 0 }
let!(:busy1) { create :hets_instance, state: 'busy', queue_size: 1 }
it 'choose! chose the busy instance with queue size 0' do
expect(HetsInstance.choose!.uri).to eq(busy0.uri)
end
it 'with_instance! chose the busy instance with queue size 0' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.uri).to eq(busy0.uri)
end
it 'choose! marks it as busy' do
instance = HetsInstance.choose!
expect(instance.state).to eq('busy')
end
it 'choose! increases the queue size' do
instance = HetsInstance.choose!
expect(instance.queue_size).to eq(1)
end
it 'with_instance! increases the queue size during the work' do
HetsInstance.with_instance! do |instance|
expect(instance.queue_size).to eq(1)
end
end
it 'with_instance! does not increase the queue size after the work' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.queue_size).to eq(0)
end
it 'finish_work! marks it as free' do
instance = HetsInstance.choose!
instance.finish_work!
expect(instance.state).to eq('free')
end
it 'finish_work! decreases the queue size' do
instance = HetsInstance.choose!
instance.finish_work!
expect(instance.queue_size).to eq(0)
end
it 'with_instance! marks it as busy during the work' do
HetsInstance.with_instance! do |instance|
expect(instance.state).to eq('busy')
end
end
it 'with_instance! marks it as free after the work' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.state).to eq('free')
end
end
context 'only very busy are available' do
let!(:busy0) { create :hets_instance, state: 'busy', queue_size: 1 }
let!(:busy1) { create :hets_instance, state: 'busy', queue_size: 2 }
it 'choose! chose the busy instance with queue size 0' do
expect(HetsInstance.choose!.uri).to eq(busy0.uri)
end
it 'with_instance! chose the busy instance with queue size 0' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.uri).to eq(busy0.uri)
end
it 'choose! marks it as busy' do
instance = HetsInstance.choose!
expect(instance.state).to eq('busy')
end
it 'choose! increases the queue size' do
instance = HetsInstance.choose!
expect(instance.queue_size).to eq(2)
end
it 'with_instance! increases the queue size during the work' do
HetsInstance.with_instance! do |instance|
expect(instance.queue_size).to eq(2)
end
end
it 'with_instance! does not increase the queue size after the work' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.queue_size).to eq(1)
end
it 'finish_work! still marks it as busy' do
instance = HetsInstance.choose!
instance.finish_work!
expect(instance.state).to eq('busy')
end
it 'finish_work! decreases the queue size' do
instance = HetsInstance.choose!
instance.finish_work!
expect(instance.queue_size).to eq(1)
end
it 'with_instance! marks it as busy during the work' do
HetsInstance.with_instance! do |instance|
expect(instance.state).to eq('busy')
end
end
it 'with_instance! still marks it as free after the work' do
chosen = nil
HetsInstance.with_instance! { |instance| chosen = instance }
expect(chosen.state).to eq('busy')
end
end
end
end
context 'force-freeing an instance' do
before do
stub_request(:get, %r{http://localhost:8\d{3}/version}).
to_return(status: 500, body: "", headers: {})
end
context 'set_busy!' do
let!(:hets_instance) { create :hets_instance, state: 'free' }
before do
allow(HetsInstanceForceFreeWorker).to receive(:perform_in)
end
it 'calls the HetsInstanceForceFreeWorker' do
hets_instance.set_busy!
expect(HetsInstanceForceFreeWorker).
to have_received(:perform_in).
with(HetsInstance::FORCE_FREE_WAITING_PERIOD, hets_instance.id)
end
end
context 'set_force_free!' do
context 'on a free instance' do
let!(:hets_instance) { create :hets_instance, state: 'free' }
before { hets_instance.set_force_free! }
it 'is a no-op' do
expect(hets_instance.state).to eq('free')
end
end
context 'on a force-free instance' do
let!(:hets_instance) { create :hets_instance, state: 'force-free' }
before { hets_instance.set_force_free! }
it 'is a no-op' do
expect(hets_instance.state).to eq('force-free')
end
end
context 'on a busy instance' do
let!(:hets_instance) { create :hets_instance, state: 'busy' }
before { hets_instance.set_force_free! }
it 'change the state' do
expect(hets_instance.state).to eq('force-free')
end
end
end
end
context 'when creating a hets instance' do
context 'and it has a reachable uri' do
before do
stub_request(:get, %r{http://localhost:8\d{3}/version}).
to_return(status: 200,
body: "v#{general_version}, #{specific_version}",
headers: {})
end
it 'should have a up-state of true' do
expect(hets_instance.up).to be(true)
end
it 'should have a non-null version' do
expect(hets_instance.version).to_not be(nil)
end
it 'should have a correct general_version' do
expect(hets_instance.general_version).to eq(general_version)
end
it 'should have a correct specific version' do
expect(hets_instance.specific_version).to eq(specific_version)
end
end
context 'and it has a non-reachable uri' do
before do
stub_request(:get, %r{http://localhost:8\d{3}/version}).
to_return(status: 500, body: "", headers: {})
end
it 'should have a up-state of false' do
expect(hets_instance.up).to be(false)
end
it 'should have a nil version' do
expect(hets_instance.version).to be(nil)
end
it 'should have a nil general_version' do
expect(hets_instance.general_version).to be(nil)
end
it 'should have a nil specific version' do
expect(hets_instance.specific_version).to be(nil)
end
end
end
end
|
require 'spec_helper'
describe ModuleResult do
describe 'associations' do
it { should have_one(:kalibro_module) }
# Usually we do not touch the database on unit tests. But this is kind of a intricated self-relationship so it's worth the cost.
context 'with children and parent associations' do
let(:parent_module_result) { FactoryGirl.create(:module_result) }
let(:child_module_result) { FactoryGirl.create(:module_result, parent: parent_module_result) }
describe 'children' do
it 'the parent should return the children' do
parent_module_result.children.should eq([child_module_result])
end
it 'should add a child' do
another_child = FactoryGirl.create(:module_result)
parent_module_result.children << another_child
parent_module_result.save
parent_module_result.children.should eq([another_child, child_module_result])
end
end
describe 'parent' do
it 'should return the child' do
child_module_result.parent.should eq(parent_module_result)
end
it 'should set the parent' do
another_parent = FactoryGirl.create(:module_result)
child_module_result.parent = another_parent
child_module_result.save
child_module_result.parent.should eq(another_parent)
end
end
end
end
describe 'method' do
describe 'initialize' do
context 'with valid attributes' do
let(:my_parent) { FactoryGirl.build(:module_result) }
let(:kalibro_module) { FactoryGirl.build(:kalibro_module) }
subject { FactoryGirl.build(:module_result, parent: my_parent) }
it 'should return an instance of ModuleResult' do
subject.should be_a(ModuleResult)
end
it 'should have the right attributes' do
subject.parent.should eq(my_parent)
subject.height.should eq(0)
subject.children.all.should be_empty
end
end
end
describe 'children' do
context 'when a module result has children' do
let(:child_module_result) { FactoryGirl.build(:module_result) }
let(:parent_module_result) { FactoryGirl.build(:module_result, children: [child_module_result]) }
it 'should set the children parents' do
child_module_result.parent = parent_module_result
parent_module_result.children.should eq([child_module_result])
child_module_result.parent.should eq(parent_module_result)
end
end
end
describe 'metric_result_for' do
subject { FactoryGirl.build(:module_result) }
let(:metric_result) {subject.metric_results.first}
context 'when a module result has the specific metric' do
let(:metric) { subject.metric_results.first.metric }
it 'should return the metric_result' do
subject.metric_result_for(metric).should eq(metric_result)
end
end
context 'when a module result has not the specific metric' do
let(:another_metric) { FactoryGirl.build(:native_metric) }
it 'should return the metric_result' do
subject.metric_result_for(another_metric).should be_nil
end
end
end
describe 'add metric_result (not a method)' do
subject { FactoryGirl.build(:module_result, metric_results: []) }
let(:metric_result) {subject.metric_results.first}
it 'should add a metric_result using <<' do
subject.metric_results << metric_result
subject.metric_results.should include(metric_result)
end
end
end
end
Implementing metric_results on module_result instead of implementing metric_results_of(module_result_id) on metric_result. Active record already did this for us. Implemented only the corresponding test.
Signed-off-by: Renan Fichberg <08ad865304d5ec1ca5f842325ec3a9ce1ca22754@gmail.com>
require 'spec_helper'
describe ModuleResult do
describe 'associations' do
it { should have_one(:kalibro_module) }
# Usually we do not touch the database on unit tests. But this is kind of a intricated self-relationship so it's worth the cost.
context 'with children and parent associations' do
let(:parent_module_result) { FactoryGirl.create(:module_result) }
let(:child_module_result) { FactoryGirl.create(:module_result, parent: parent_module_result) }
describe 'children' do
it 'the parent should return the children' do
parent_module_result.children.should eq([child_module_result])
end
it 'should add a child' do
another_child = FactoryGirl.create(:module_result)
parent_module_result.children << another_child
parent_module_result.save
parent_module_result.children.should eq([another_child, child_module_result])
end
end
describe 'parent' do
it 'should return the child' do
child_module_result.parent.should eq(parent_module_result)
end
it 'should set the parent' do
another_parent = FactoryGirl.create(:module_result)
child_module_result.parent = another_parent
child_module_result.save
child_module_result.parent.should eq(another_parent)
end
end
end
end
describe 'method' do
describe 'initialize' do
context 'with valid attributes' do
let(:my_parent) { FactoryGirl.build(:module_result) }
let(:kalibro_module) { FactoryGirl.build(:kalibro_module) }
subject { FactoryGirl.build(:module_result, parent: my_parent) }
it 'should return an instance of ModuleResult' do
subject.should be_a(ModuleResult)
end
it 'should have the right attributes' do
subject.parent.should eq(my_parent)
subject.height.should eq(0)
subject.children.all.should be_empty
end
end
end
describe 'children' do
context 'when a module result has children' do
let(:child_module_result) { FactoryGirl.build(:module_result) }
let(:parent_module_result) { FactoryGirl.build(:module_result, children: [child_module_result]) }
it 'should set the children parents' do
child_module_result.parent = parent_module_result
parent_module_result.children.should eq([child_module_result])
child_module_result.parent.should eq(parent_module_result)
end
end
end
describe 'metric_result_for' do
subject { FactoryGirl.build(:module_result) }
let(:metric_result) {subject.metric_results.first}
context 'when a module result has the specific metric' do
let(:metric) { subject.metric_results.first.metric }
it 'should return the metric_result' do
subject.metric_result_for(metric).should eq(metric_result)
end
end
context 'when a module result has not the specific metric' do
let(:another_metric) { FactoryGirl.build(:native_metric) }
it 'should return the metric_result' do
subject.metric_result_for(another_metric).should be_nil
end
end
end
describe 'add metric_result (not a method)' do
subject { FactoryGirl.build(:module_result, metric_results: []) }
let(:metric_result) {subject.metric_results.first}
it 'should add a metric_result using <<' do
subject.metric_results << metric_result
subject.metric_results.should include(metric_result)
end
end
end
describe 'records' do
context 'when accessing metric results (not a method)' do
subject { FactoryGirl.create(:module_result, metric_results: []) }
let(:metric_result) {subject.metric_results.first}
it 'should return the associated array of metric results' do
subject.metric_results << metric_result
subject.metric_results.should eq([metric_result])
end
end
end
end
|
RSpec.describe PageRegister do
let(:user) { FactoryGirl.build(:user) }
let(:params) { {} }
subject { described_class.new(page, params: params, current_user: user) }
describe '#save' do
before { allow(subject).to receive(:create_revision) }
describe 'saving the page' do
context 'user is an editor' do
let(:user) do
FactoryGirl.build(:user, role: Comfy::Cms::User.roles[:editor])
end
context 'new page' do
let(:page) { FactoryGirl.build(:page) }
it 'allows the object to be saved' do
expect { subject.save }.to raise_error(ActiveRecord::RecordInvalid)
expect(page.errors.full_messages)
.to include('Insufficient permissions to change')
end
end
context 'trying to save an "unsaved" existing page' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { { state_event: 'save_unsaved' } }
it 'updates the page' do
expect(page).to receive(:update_state!)
subject.save
end
end
context 'trying to save changes to an existing page' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { { state_event: 'save_changes' } }
it 'updates the page' do
expect(page).to receive(:update_state!)
subject.save
end
end
context 'trying to save changes to an existing PUBLISHED page' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { { state_event: 'save_draft_changes' } }
it 'updates the page' do
expect(page).to receive(:update_state!)
subject.save
end
end
context 'trying to save changes as draft' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { { state_event: 'save_changes_as_draft' } }
it 'updates the page' do
expect(page).to receive(:update_state!)
subject.save
end
end
context 'trying to publish an existing page' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { { state_event: 'publish' } }
it 'does not allow the object to be saved' do
expect { subject.save }.to raise_error(ActiveRecord::RecordInvalid)
expect(page.errors.full_messages)
.to include('Insufficient permissions to change')
end
end
context 'trying to unpublish an existing page' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { { state_event: 'unpublish' } }
it 'does not allow the object to be saved' do
expect { subject.save }.to raise_error(ActiveRecord::RecordInvalid)
expect(page.errors.full_messages)
.to include('Insufficient permissions to change')
end
end
context 'trying to schedule an existing page' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { { state_event: 'schedule' } }
it 'updates the page' do
expect(page).to receive(:update_state!)
subject.save
end
end
end
context 'new page record' do
let(:page) { FactoryGirl.build(:page) }
let(:params) { {} }
it 'saves the page' do
expect(page).to receive(:save!)
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
context 'existing page record with no state_event' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { {} }
it 'saves the page' do
expect(page).to receive(:save!)
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
context 'existing page record with state_event' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { { state_event: 'publish' } }
it 'does not save the page' do
expect(page).not_to receive(:save!)
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
end
describe 'updating page state' do
context 'new page record' do
let(:page) { FactoryGirl.build(:page) }
context 'PageRegister has state_event "save_unsaved"' do
let(:params) { { state_event: 'save_unsaved' } }
it 'updates the state of the page' do
expect(page).to receive(:update_state!).with('save_unsaved')
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
context 'PageRegister has state_event which is not "save_unsaved"' do
let(:params) { { state_event: 'publish' } }
it 'does not update the state of the page' do
expect(page).not_to receive(:update_state!)
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
end
context 'existing page record' do
let(:page) { FactoryGirl.create(:page) }
context 'PageRegister has state_event "save_unsaved"' do
let(:params) { { state_event: 'save_unsaved' } }
it 'updates the state of the page' do
expect(page).to receive(:update_state!).with('save_unsaved')
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
context 'PageRegister has state_event which is not "save_unsaved"' do
let(:params) { { state_event: 'publish' } }
it 'updates the state of the page' do
expect(page).to receive(:update_state!).with('publish')
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
context 'PageRegister has no state_event' do
let(:params) { {} }
it 'does not update the state of the page' do
expect(page).not_to receive(:update_state!)
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
end
context 'existing PUBLISHED page record' do
let(:page) { FactoryGirl.create(:page) }
context 'PageRegister has state_event "save_unsaved"' do
let(:params) { { state_event: 'save_draft_changes' } }
it 'updates the state of the page' do
expect(page).to receive(:update_state!).with('save_draft_changes')
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
context 'PageRegister has state_event which is not "save_draft_changes"' do
let(:params) { { state_event: 'publish' } }
it 'updates the state of the page' do
expect(page).to receive(:update_state!).with('publish')
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
context 'PageRegister has no state_event' do
let(:params) { {} }
it 'does not update the state of the page' do
expect(page).not_to receive(:update_state!)
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
end
end
end
end
Remove duplicate / redundant tests.
The following context “existing PUBLISHED page record” is pretty much identical.
RSpec.describe PageRegister do
let(:user) { FactoryGirl.build(:user) }
let(:params) { {} }
subject { described_class.new(page, params: params, current_user: user) }
describe '#save' do
before { allow(subject).to receive(:create_revision) }
describe 'saving the page' do
context 'user is an editor' do
let(:user) do
FactoryGirl.build(:user, role: Comfy::Cms::User.roles[:editor])
end
context 'new page' do
let(:page) { FactoryGirl.build(:page) }
it 'allows the object to be saved' do
expect { subject.save }.to raise_error(ActiveRecord::RecordInvalid)
expect(page.errors.full_messages)
.to include('Insufficient permissions to change')
end
end
context 'trying to save an "unsaved" existing page' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { { state_event: 'save_unsaved' } }
it 'updates the page' do
expect(page).to receive(:update_state!)
subject.save
end
end
context 'trying to save changes to an existing page' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { { state_event: 'save_changes' } }
it 'updates the page' do
expect(page).to receive(:update_state!)
subject.save
end
end
context 'trying to save changes to an existing PUBLISHED page' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { { state_event: 'save_draft_changes' } }
it 'updates the page' do
expect(page).to receive(:update_state!)
subject.save
end
end
context 'trying to save changes as draft' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { { state_event: 'save_changes_as_draft' } }
it 'updates the page' do
expect(page).to receive(:update_state!)
subject.save
end
end
context 'trying to publish an existing page' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { { state_event: 'publish' } }
it 'does not allow the object to be saved' do
expect { subject.save }.to raise_error(ActiveRecord::RecordInvalid)
expect(page.errors.full_messages)
.to include('Insufficient permissions to change')
end
end
context 'trying to unpublish an existing page' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { { state_event: 'unpublish' } }
it 'does not allow the object to be saved' do
expect { subject.save }.to raise_error(ActiveRecord::RecordInvalid)
expect(page.errors.full_messages)
.to include('Insufficient permissions to change')
end
end
context 'trying to schedule an existing page' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { { state_event: 'schedule' } }
it 'updates the page' do
expect(page).to receive(:update_state!)
subject.save
end
end
end
context 'new page record' do
let(:page) { FactoryGirl.build(:page) }
let(:params) { {} }
it 'saves the page' do
expect(page).to receive(:save!)
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
context 'existing page record with no state_event' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { {} }
it 'saves the page' do
expect(page).to receive(:save!)
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
context 'existing page record with state_event' do
let(:page) { FactoryGirl.create(:page) }
let(:params) { { state_event: 'publish' } }
it 'does not save the page' do
expect(page).not_to receive(:save!)
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
end
describe 'updating page state' do
context 'new page record' do
let(:page) { FactoryGirl.build(:page) }
context 'PageRegister has state_event "save_unsaved"' do
let(:params) { { state_event: 'save_unsaved' } }
it 'updates the state of the page' do
expect(page).to receive(:update_state!).with('save_unsaved')
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
context 'PageRegister has state_event which is not "save_unsaved"' do
let(:params) { { state_event: 'publish' } }
it 'does not update the state of the page' do
expect(page).not_to receive(:update_state!)
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
end
context 'existing PUBLISHED page record' do
let(:page) { FactoryGirl.create(:page) }
context 'PageRegister has state_event "save_unsaved"' do
let(:params) { { state_event: 'save_draft_changes' } }
it 'updates the state of the page' do
expect(page).to receive(:update_state!).with('save_draft_changes')
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
context 'PageRegister has state_event which is not "save_draft_changes"' do
let(:params) { { state_event: 'publish' } }
it 'updates the state of the page' do
expect(page).to receive(:update_state!).with('publish')
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
context 'PageRegister has no state_event' do
let(:params) { {} }
it 'does not update the state of the page' do
expect(page).not_to receive(:update_state!)
subject.save
end
it 'creates a revision' do
subject.save
expect(subject).to have_received(:create_revision)
end
end
end
end
end
end
|
search result model test
require 'rails_helper'
RSpec.describe User, :type => :model do
let(:search_result) {SearchResult.new(user_id: 1)}
describe "validate presence" do
it { expect(search_result).to validate_presence_of(:budget) }
end
end
|
require 'rails_helper'
describe SignUpSheet do
describe '.add_signup_topic' do
it 'will return an empty Hash when there are no topics' do
assignment = double(Assignment)
allow(assignment).to receive(:get_review_rounds) { nil }
allow(Assignment).to receive(:find) { assignment }
allow(SignUpTopic).to receive(:where) { nil }
expect(SignUpSheet.add_signup_topic(2)).to eql({})
end
it 'will return an empty Hash when there are no topics' do
assignment = double(Assignment)
allow(assignment).to receive(:get_review_rounds) { nil }
allow(Assignment).to receive(:find) { assignment }
allow(SignUpTopic).to receive(:where) { nil }
expect(SignUpSheet.add_signup_topic(2)).to eql({})
end
end
end
Removed accidental duplicate code
require 'rails_helper'
describe SignUpSheet do
describe '.add_signup_topic' do
it 'will return an empty Hash when there are no topics' do
assignment = double(Assignment)
allow(assignment).to receive(:get_review_rounds) { nil }
allow(Assignment).to receive(:find) { assignment }
allow(SignUpTopic).to receive(:where) { nil }
expect(SignUpSheet.add_signup_topic(2)).to eql({})
end
end
end |
require 'spec_helper'
require 'cancan/matchers'
require 'support/ability_helpers'
require 'spree/testing_support/bar_ability'
# Fake ability for testing registration of additional abilities
class FooAbility
include CanCan::Ability
def initialize(_user)
# allow anyone to perform index on Order
can :index, Spree::Order
# allow anyone to update an Order with id of 1
can :update, Spree::Order do |order|
order.id == 1
end
end
end
describe Spree::Ability do
let(:user) { create(:user) }
let(:ability) { Spree::Ability.new(user) }
let(:token) { nil }
before do
user.spree_roles.clear
end
TOKEN = 'token123'.freeze
after(:each) {
Spree::Ability.abilities = Set.new
user.spree_roles = []
}
context 'register_ability' do
it 'should add the ability to the list of abilties' do
Spree::Ability.register_ability(FooAbility)
expect(Spree::Ability.new(user).abilities).to_not be_empty
end
it 'should apply the registered abilities permissions' do
Spree::Ability.register_ability(FooAbility)
expect(Spree::Ability.new(user).can?(:update, build(:order, id: 1))).to be_truthy
end
end
context 'for general resource' do
let(:resource) { Object.new }
context 'with admin user' do
before(:each) { allow(user).to receive(:has_spree_role?).and_return(true) }
it_should_behave_like 'access granted'
it_should_behave_like 'index allowed'
end
context 'with customer' do
it_should_behave_like 'access denied'
it_should_behave_like 'no index allowed'
end
end
context 'for admin protected resources' do
let(:resource) { Object.new }
let(:resource_shipment) { Spree::Shipment.new }
let(:resource_product) { Spree::Product.new }
let(:resource_user) { Spree.user_class.new }
let(:resource_order) { Spree::Order.new }
let(:fakedispatch_user) { Spree.user_class.new }
let(:fakedispatch_ability) { Spree::Ability.new(fakedispatch_user) }
context 'with admin user' do
it 'should be able to admin' do
user.spree_roles << Spree::Role.find_or_create_by(name: 'admin')
expect(ability).to be_able_to :admin, resource
expect(ability).to be_able_to :index, resource_order
expect(ability).to be_able_to :show, resource_product
expect(ability).to be_able_to :create, resource_user
end
end
context 'with fakedispatch user' do
it 'should be able to admin on the order and shipment pages' do
user.spree_roles << Spree::Role.find_or_create_by(name: 'bar')
Spree::Ability.register_ability(BarAbility)
expect(ability).to_not be_able_to :admin, resource
expect(ability).to be_able_to :admin, resource_order
expect(ability).to be_able_to :index, resource_order
expect(ability).to_not be_able_to :update, resource_order
expect(ability).to be_able_to :admin, resource_shipment
expect(ability).to be_able_to :index, resource_shipment
expect(ability).to be_able_to :create, resource_shipment
expect(ability).to_not be_able_to :admin, resource_product
expect(ability).to_not be_able_to :update, resource_product
expect(ability).to_not be_able_to :admin, resource_user
expect(ability).to_not be_able_to :update, resource_user
expect(ability).to be_able_to :update, user
# It can create new users if is has access to the :admin, User!!
# TODO change the Ability class so only users and customers get the extra premissions?
Spree::Ability.remove_ability(BarAbility)
end
end
context 'with customer' do
it 'should not be able to admin' do
expect(ability).to_not be_able_to :admin, resource
expect(ability).to_not be_able_to :admin, resource_order
expect(ability).to_not be_able_to :admin, resource_product
expect(ability).to_not be_able_to :admin, resource_user
end
end
end
context 'as Guest User' do
context 'for Country' do
let(:resource) { Spree::Country.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for OptionType' do
let(:resource) { Spree::OptionType.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for OptionValue' do
let(:resource) { Spree::OptionType.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for Order' do
let(:resource) { Spree::Order.new }
context 'requested by same user' do
before(:each) { resource.user = user }
it_should_behave_like 'access granted'
it_should_behave_like 'no index allowed'
end
context 'requested by other user' do
before(:each) { resource.user = Spree.user_class.new }
it_should_behave_like 'create only'
end
context 'requested with proper token' do
let(:token) { 'TOKEN123' }
before(:each) { allow(resource).to receive_messages token: 'TOKEN123' }
it_should_behave_like 'access granted'
it_should_behave_like 'no index allowed'
end
context 'requested with inproper token' do
let(:token) { 'FAIL' }
before(:each) { allow(resource).to receive_messages token: 'TOKEN123' }
it_should_behave_like 'create only'
end
end
context 'for Product' do
let(:resource) { Spree::Product.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for ProductProperty' do
let(:resource) { Spree::Product.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for Property' do
let(:resource) { Spree::Product.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for State' do
let(:resource) { Spree::State.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for StockItem' do
let(:resource) { Spree::StockItem.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for StockLocation' do
let(:resource) { Spree::StockLocation.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for StockMovement' do
let(:resource) { Spree::StockMovement.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for Taxons' do
let(:resource) { Spree::Taxon.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for Taxonomy' do
let(:resource) { Spree::Taxonomy.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for User' do
context 'requested by same user' do
let(:resource) { user }
it_should_behave_like 'access granted'
it_should_behave_like 'no index allowed'
end
context 'requested by other user' do
let(:resource) { Spree.user_class.new }
it_should_behave_like 'create only'
end
end
context 'for Variant' do
let(:resource) { Spree::Variant.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for Zone' do
let(:resource) { Spree::Zone.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
end
end
Add needed fake ability
require 'spec_helper'
require 'cancan/matchers'
require 'support/ability_helpers'
# Fake ability for testing registration of additional abilities
class FooAbility
include CanCan::Ability
def initialize(_user)
# allow anyone to perform index on Order
can :index, Spree::Order
# allow anyone to update an Order with id of 1
can :update, Spree::Order do |order|
order.id == 1
end
end
end
describe Spree::Ability do
let(:user) { create(:user) }
let(:ability) { Spree::Ability.new(user) }
let(:token) { nil }
before do
user.spree_roles.clear
end
TOKEN = 'token123'.freeze
after(:each) {
Spree::Ability.abilities = Set.new
user.spree_roles = []
}
context 'register_ability' do
it 'should add the ability to the list of abilties' do
Spree::Ability.register_ability(FooAbility)
expect(Spree::Ability.new(user).abilities).to_not be_empty
end
it 'should apply the registered abilities permissions' do
Spree::Ability.register_ability(FooAbility)
expect(Spree::Ability.new(user).can?(:update, build(:order, id: 1))).to be_truthy
end
end
context 'for general resource' do
let(:resource) { Object.new }
context 'with admin user' do
before(:each) { allow(user).to receive(:has_spree_role?).and_return(true) }
it_should_behave_like 'access granted'
it_should_behave_like 'index allowed'
end
context 'with customer' do
it_should_behave_like 'access denied'
it_should_behave_like 'no index allowed'
end
end
context 'for admin protected resources' do
let(:resource) { Object.new }
let(:resource_shipment) { Spree::Shipment.new }
let(:resource_product) { Spree::Product.new }
let(:resource_user) { Spree.user_class.new }
let(:resource_order) { Spree::Order.new }
let(:fakedispatch_user) { Spree.user_class.new }
let(:fakedispatch_ability) { Spree::Ability.new(fakedispatch_user) }
context 'with admin user' do
it 'should be able to admin' do
user.spree_roles << Spree::Role.find_or_create_by(name: 'admin')
expect(ability).to be_able_to :admin, resource
expect(ability).to be_able_to :index, resource_order
expect(ability).to be_able_to :show, resource_product
expect(ability).to be_able_to :create, resource_user
end
end
context 'with fakedispatch user' do
class BarAbility
include CanCan::Ability
def initialize(user)
user ||= Spree::User.new
return unless user.has_spree_role?('bar')
can [:admin, :index, :show], Spree::Order
end
end
it 'should be able to admin on the order and shipment pages' do
user.spree_roles << Spree::Role.find_or_create_by(name: 'bar')
Spree::Ability.register_ability(BarAbility)
expect(ability).to_not be_able_to :admin, resource
expect(ability).to be_able_to :admin, resource_order
expect(ability).to be_able_to :index, resource_order
expect(ability).to_not be_able_to :update, resource_order
expect(ability).to be_able_to :admin, resource_shipment
expect(ability).to be_able_to :index, resource_shipment
expect(ability).to be_able_to :create, resource_shipment
expect(ability).to_not be_able_to :admin, resource_product
expect(ability).to_not be_able_to :update, resource_product
expect(ability).to_not be_able_to :admin, resource_user
expect(ability).to_not be_able_to :update, resource_user
expect(ability).to be_able_to :update, user
# It can create new users if is has access to the :admin, User!!
# TODO change the Ability class so only users and customers get the extra premissions?
Spree::Ability.remove_ability(BarAbility)
end
end
context 'with customer' do
it 'should not be able to admin' do
expect(ability).to_not be_able_to :admin, resource
expect(ability).to_not be_able_to :admin, resource_order
expect(ability).to_not be_able_to :admin, resource_product
expect(ability).to_not be_able_to :admin, resource_user
end
end
end
context 'as Guest User' do
context 'for Country' do
let(:resource) { Spree::Country.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for OptionType' do
let(:resource) { Spree::OptionType.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for OptionValue' do
let(:resource) { Spree::OptionType.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for Order' do
let(:resource) { Spree::Order.new }
context 'requested by same user' do
before(:each) { resource.user = user }
it_should_behave_like 'access granted'
it_should_behave_like 'no index allowed'
end
context 'requested by other user' do
before(:each) { resource.user = Spree.user_class.new }
it_should_behave_like 'create only'
end
context 'requested with proper token' do
let(:token) { 'TOKEN123' }
before(:each) { allow(resource).to receive_messages token: 'TOKEN123' }
it_should_behave_like 'access granted'
it_should_behave_like 'no index allowed'
end
context 'requested with inproper token' do
let(:token) { 'FAIL' }
before(:each) { allow(resource).to receive_messages token: 'TOKEN123' }
it_should_behave_like 'create only'
end
end
context 'for Product' do
let(:resource) { Spree::Product.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for ProductProperty' do
let(:resource) { Spree::Product.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for Property' do
let(:resource) { Spree::Product.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for State' do
let(:resource) { Spree::State.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for StockItem' do
let(:resource) { Spree::StockItem.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for StockLocation' do
let(:resource) { Spree::StockLocation.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for StockMovement' do
let(:resource) { Spree::StockMovement.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for Taxons' do
let(:resource) { Spree::Taxon.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for Taxonomy' do
let(:resource) { Spree::Taxonomy.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for User' do
context 'requested by same user' do
let(:resource) { user }
it_should_behave_like 'access granted'
it_should_behave_like 'no index allowed'
end
context 'requested by other user' do
let(:resource) { Spree.user_class.new }
it_should_behave_like 'create only'
end
end
context 'for Variant' do
let(:resource) { Spree::Variant.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
context 'for Zone' do
let(:resource) { Spree::Zone.new }
context 'requested by any user' do
it_should_behave_like 'read only'
end
end
end
end
|
require 'spec_helper'
RSpec.describe SubjectQueue, type: :model do
let(:locked_factory) { :subject_queue }
let(:locked_update) { {set_member_subject_ids: [1, 2, 3, 4]} }
it_behaves_like "optimistically locked"
it 'should have a valid factory' do
expect(build(:subject_queue)).to be_valid
end
it 'should not be valid with out a workflow' do
expect(build(:subject_queue, workflow: nil)).to_not be_valid
end
it 'should not be valid unless its is unique for the set, workflow, and user' do
q = create(:subject_queue)
expect(build(:subject_queue, subject_set: q.subject_set, workflow: q.workflow, user: q.user)).to_not be_valid
end
it 'should be valid if the subject set is different but the workflow and user are the same' do
q = create(:subject_queue)
expect(build(:subject_queue, workflow: q.workflow, user: q.user)).to be_valid
end
describe "::below_minimum" do
let(:smses) { create_list(:set_member_subject, 21) }
let!(:above_minimum) { create(:subject_queue, set_member_subjects: smses) }
let!(:below_minimum) { create(:subject_queue, set_member_subjects: smses[0..5]) }
it 'should return all the queue with less than the minimum number of subjects' do
expect(SubjectQueue.below_minimum).to include(below_minimum)
end
it 'should not return queue with more than minimum' do
expect(SubjectQueue.below_minimum).to_not include(above_minimum)
end
end
describe "::create_for_user" do
let(:workflow) {create(:workflow)}
let(:user) { create(:user) }
context "when no logged out queue" do
it 'should attempt to build a logged out queue' do
expect(EnqueueSubjectQueueWorker).to receive(:perform_async).with(workflow.id, nil, nil)
SubjectQueue.create_for_user(workflow, user)
end
it 'should return nil' do
expect(SubjectQueue.create_for_user(workflow, user)).to be_nil
end
end
context "queue saves" do
let!(:logged_out_queue) do
create(:subject_queue, workflow: workflow, user: nil, subject_set: nil)
end
let(:new_queue) { SubjectQueue.create_for_user(workflow, user)}
it 'should return the new queue' do
aggregate_failures "copied queue" do
expect(new_queue).to be_a(SubjectQueue)
expect(new_queue.id).to_not eq(logged_out_queue.id)
end
end
it 'should add the logged out subjects to the new queue' do
expect(new_queue.set_member_subject_ids).to match_array(logged_out_queue.set_member_subject_ids)
end
end
end
describe "::reload" do
let(:sms) { create(:set_member_subject) }
let(:smses) { create_list(:set_member_subject, 3).map(&:id) }
let(:workflow) { create(:workflow) }
context "when passed a subject set" do
let(:subject_set) { create(:subject_set) }
let(:not_updated_set) { create(:subject_set) }
context "when the queue exists" do
let!(:queue) do
create(:subject_queue,
user: nil,
workflow: workflow,
set_member_subject_ids: [sms.id],
subject_set: subject_set)
end
let!(:not_updated_queue) do
create(:subject_queue,
user: nil,
workflow: workflow,
set_member_subject_ids: [sms.id],
subject_set: not_updated_set)
end
before(:each) do
SubjectQueue.reload(workflow, smses, set_id: subject_set.id)
queue.reload
not_updated_queue.reload
end
it 'should completely replace the queue for the given set' do
expect(queue.set_member_subject_ids).to match_array(smses)
end
it 'should not update the set without the name' do
expect(not_updated_queue.set_member_subject_ids).to_not match_array(smses)
end
end
context "when no queue exists" do
before(:each) do
SubjectQueue.reload(workflow, smses, set_id: subject_set.id)
end
subject { SubjectQueue.find_by(workflow: workflow, subject_set: subject_set) }
it 'should create a new queue with the given workflow' do
expect(subject.workflow).to eq(workflow)
end
it 'should create a new queue with the given subject set' do
expect(subject.subject_set).to eq(subject_set)
end
it 'should queue subject' do
expect(subject.set_member_subject_ids).to match_array(smses)
end
end
end
context "when not passed a subject set" do
context "when a queue exists" do
let!(:queue) do
create(:subject_queue,
user: nil,
workflow: workflow,
set_member_subject_ids: [sms.id])
end
it 'should reload the workflow queue' do
SubjectQueue.reload(workflow, smses)
queue.reload
expect(queue.set_member_subject_ids).to eq(smses)
end
end
context "when a queue does not exist" do
before(:each) do
SubjectQueue.reload(workflow, smses)
end
subject { SubjectQueue.find_by(workflow: workflow) }
it 'should create a new queue with the given workflow' do
expect(subject.workflow).to eq(workflow)
end
it 'should queue subject' do
expect(subject.set_member_subject_ids).to eq(smses)
end
end
end
end
describe "::dequeue_for_all" do
let(:sms) { create(:set_member_subject) }
let(:workflow) { create(:workflow) }
let(:queue) { create_list(:subject_queue, 2, workflow: workflow, set_member_subject_ids: [sms.id]) }
it "should remove the subject for all queue of the workflow" do
SubjectQueue.dequeue_for_all(workflow.id, sms.id)
expect(SubjectQueue.all.map(&:set_member_subject_ids)).to all( be_empty )
end
end
describe "::enqueue_for_all" do
let(:sms) { create(:set_member_subject) }
let(:workflow) { create(:workflow) }
let(:queue) { create_list(:subject_queue, 2, workflow: workflow) }
it "should add the subject for all queue of the workflow" do
SubjectQueue.enqueue_for_all(workflow.id, sms.id)
expect(SubjectQueue.all.map(&:set_member_subject_ids)).to all( include(sms.id) )
end
end
describe "::enqueue" do
let(:workflow) { create(:workflow) }
let(:subject_set) { create(:subject_set, workflows: [workflow]) }
let(:sms) { create(:set_member_subject, subject_set: subject_set) }
context "with a user" do
let(:user) { create(:user) }
context "nothing for user" do
shared_examples "queue something" do
it 'should create a new user_enqueue_subject' do
expect do
SubjectQueue.enqueue(workflow,
ids,
user: user)
end.to change{ SubjectQueue.count }.from(0).to(1)
end
it 'should add subjects' do
SubjectQueue.enqueue(workflow, ids, user: user)
queue = SubjectQueue.find_by(workflow: workflow, user: user)
expect(queue.set_member_subject_ids).to include(*ids)
end
end
shared_examples "does not queue anything" do |arg|
it 'should not raise an error' do
expect {
SubjectQueue.enqueue(workflow, [], user: user)
}.to_not raise_error
end
it 'not attempt to find or create a queue' do
expect(SubjectQueue).to_not receive(:where)
SubjectQueue.enqueue(workflow, [], user: user)
end
it 'should not call #enqueue_update' do
expect_any_instance_of(SubjectQueue).to_not receive(:enqueue_update)
SubjectQueue.enqueue(workflow, [], user: user)
end
it 'should return nil' do
expect(SubjectQueue.enqueue(workflow, [], user: user)).to be_nil
end
end
context "passing one sms_id" do
let(:ids) { sms.id }
it_behaves_like "queue something"
end
context "passing a set of sms_ids" do
let(:ids) { create_list(:set_member_subject, 5).map(&:id) }
it_behaves_like "queue something"
end
context "passing an empty set of sms_ids" do
it_behaves_like "does not queue anything", []
end
context "passing a set with one nil value" do
it_behaves_like "does not queue anything", [nil]
end
end
context "subject queue exists for user" do
let(:smses) { create_list(:set_member_subject, 3, subject_set: sms.subject_set) }
let!(:sq) do
create(:subject_queue,
set_member_subject_ids: smses.map(&:id),
user: user,
workflow: workflow)
end
context "when the append queue contains a seen before" do
before(:each) do
create(:user_seen_subject, user: user, workflow: workflow, subject_ids: [sms.subject_id])
end
it "should not enqueue dups" do
SubjectQueue.enqueue(workflow, sms.id, user: user)
expect(sq.reload.set_member_subject_ids).to eq(smses.map(&:id))
end
end
context "when the queue is below the enqueue threshold" do
it 'should add the sms id to the existing subject queue' do
allow_any_instance_of(SubjectQueue).to receive(:below_minimum?).and_return(true)
SubjectQueue.enqueue(workflow, sms.id, user: user)
expect(sq.reload.set_member_subject_ids).to include(sms.id)
end
end
context "when the queue is above the enqueue threshold" do
it 'should not have more than the limit in the existing subject queue' do
sq.set_member_subject_ids = (0..22).to_a - [sms.id]
sq.save!
SubjectQueue.enqueue(workflow, sms.id, user: user)
expect(sq.reload.set_member_subject_ids.length).to eq(20)
end
end
it 'should not have a duplicate in the set' do
expected_ids = sq.set_member_subject_ids | [ sms.id ]
SubjectQueue.enqueue(workflow, sms.id, user: user)
new_sms_ids = sq.reload.set_member_subject_ids
expect(new_sms_ids).to match_array(expected_ids)
end
it 'should maintain the order of the set' do
ordered_list = sq.set_member_subject_ids | [ sms.id ]
SubjectQueue.enqueue(workflow, sms.id, user: user)
expect(sq.reload.set_member_subject_ids).to eq(ordered_list)
end
context "when a queue's existing SMSes are deleted", sidekiq: :inline do
before(:each) do
smses.map(&:id).each do |sms_id|
QueueRemovalWorker.perform_async(sms_id, workflow.id)
end
SubjectQueue.enqueue(workflow, sms.id, user: user)
end
it "should only have the enqueued subject id in the queue" do
expect(sq.reload.set_member_subject_ids).to eq([ sms.id ])
end
end
describe "duplicate id queueing" do
let(:q_dups) do
sq.set_member_subject_ids.sample(sq.set_member_subject_ids.size - 1)
end
let(:enq_ids_with_dups) { [ sms.id ] | q_dups }
let(:fake_sms_ids) { double(blank?: false) }
describe "non-modifying queue updates" do
before do
allow_any_instance_of(SeenSubjectRemover)
.to receive(:ids_to_enqueue)
.and_return(enqueue_set)
end
context "with an empty queue" do
let(:enqueue_set) { [] }
it "should not modify the queue" do
expect{
SubjectQueue.enqueue(workflow, fake_sms_ids, user: user)
}.not_to change{
sq.set_member_subject_ids
}
end
end
context "with the same ids" do
let(:enqueue_set) { sq.set_member_subject_ids }
it "should not modify the queue" do
expect{
SubjectQueue.enqueue(workflow, fake_sms_ids, user: user)
}.not_to change{
sq.set_member_subject_ids
}
end
end
end
context "when the append queue has dups" do
it "should not enqueue dups" do
SubjectQueue.enqueue(workflow, enq_ids_with_dups, user: user)
non_dups = sq.set_member_subject_ids | [ sms.id ]
expect(sq.reload.set_member_subject_ids).to match_array(non_dups)
end
end
context "when the append queue grows too large" do
it "should only queue #{SubjectQueue::DEFAULT_LENGTH} ids" do
start = smses.last.id+1
append_ids = (start..start+SubjectQueue::DEFAULT_LENGTH*2).to_a
SubjectQueue.enqueue(workflow, append_ids, user: user)
expect(sq.reload.set_member_subject_ids.length)
.to eq(SubjectQueue::DEFAULT_LENGTH)
end
end
end
end
end
end
describe "::dequeue" do
let(:workflow) { create(:workflow) }
let(:subject_set) { create(:subject_set, workflows: [workflow], project: workflow.project) }
let(:sms) { create(:set_member_subject, subject_set: subject_set) }
let!(:smses) { create_list(:set_member_subject, 3, subject_set: sms.subject_set) }
context "with a user" do
let(:user) { create(:user) }
let!(:sq) do
create(:subject_queue, user: user, workflow: workflow, set_member_subject_ids: smses.map(&:id))
end
let(:sms_id_to_dequeue) { smses.sample.id }
let(:dequeue_list) { [ sms_id_to_dequeue ] }
it 'should remove the subject given a user and workflow' do
SubjectQueue.dequeue(workflow, dequeue_list, user: user)
sms_ids = sq.reload.set_member_subject_ids
expect(sms_ids).to_not include(sms_id_to_dequeue)
end
it 'should maintain the order of the set' do
ordered_list = sq.set_member_subject_ids.reject { |id| id == sms_id_to_dequeue }
SubjectQueue.dequeue(workflow, dequeue_list, user: user)
expect(sq.reload.set_member_subject_ids).to eq(ordered_list)
end
context "passing an empty set of sms_ids" do
it 'should not raise an error' do
expect {
SubjectQueue.dequeue(workflow, [], user: user)
}.to_not raise_error
end
it 'not attempt find the queue' do
expect(SubjectQueue).to_not receive(:where)
SubjectQueue.dequeue(workflow, [], user: user)
end
it 'should not call #dequeue_update' do
expect_any_instance_of(SubjectQueue).to_not receive(:dequeue_update)
SubjectQueue.dequeue(workflow, [], user: user)
end
it 'should return nil' do
expect(SubjectQueue.dequeue(workflow, [], user: user)).to be_nil
end
end
end
end
describe "#next_subjects" do
let(:ids) { (0..60).to_a }
let(:sq) { build(:subject_queue, set_member_subject_ids: ids) }
shared_examples "selects from the queue" do
it 'should return a collection of ids' do
expect(sq.next_subjects).to all( be_a(Fixnum) )
end
it 'should return 10 by default' do
expect(sq.next_subjects.length).to eq(10)
end
it 'should accept an optional limit argument' do
expect(sq.next_subjects(20).length).to eq(20)
end
it 'should randomly sample from the subject_ids' do
expect(sq.next_subjects).to_not match_array(sq.set_member_subject_ids[0..9])
end
end
context "when the queue has a user" do
it_behaves_like "selects from the queue"
end
context "when the queue does not have a user" do
let(:sq) { build(:subject_queue, set_member_subject_ids: ids, user: nil) }
it_behaves_like "selects from the queue"
end
context "when the worklow is prioritized" do
it "should select in order from the head of the queue" do
allow_any_instance_of(Workflow).to receive(:prioritized).and_return(true)
expect(sq.next_subjects).to match_array(sq.set_member_subject_ids[0..9])
end
end
end
describe "#below_minimum?" do
let(:queue) { build(:subject_queue, set_member_subject_ids: subject_ids) }
context "when less than #{SubjectQueue::MINIMUM_LENGTH} items" do
let(:subject_ids) { create_list(:set_member_subject, 2).map(&:id) }
it 'should return true' do
expect(queue.below_minimum?).to be true
end
end
context "when more than #{SubjectQueue::MINIMUM_LENGTH} items" do
let(:subject_ids) do
create_list(:set_member_subject, SubjectQueue::MINIMUM_LENGTH+1)
.map(&:id)
end
it 'should return false' do
expect(queue.below_minimum?).to be false
end
end
end
end
spec optimizations
use id arrays where we can, no need to create lots of factory instances
require 'spec_helper'
RSpec.describe SubjectQueue, type: :model do
let(:locked_factory) { :subject_queue }
let(:locked_update) { {set_member_subject_ids: [1, 2, 3, 4]} }
it_behaves_like "optimistically locked"
it 'should have a valid factory' do
expect(build(:subject_queue)).to be_valid
end
it 'should not be valid with out a workflow' do
expect(build(:subject_queue, workflow: nil)).to_not be_valid
end
it 'should not be valid unless its is unique for the set, workflow, and user' do
q = create(:subject_queue)
expect(build(:subject_queue, subject_set: q.subject_set, workflow: q.workflow, user: q.user)).to_not be_valid
end
it 'should be valid if the subject set is different but the workflow and user are the same' do
q = create(:subject_queue)
expect(build(:subject_queue, workflow: q.workflow, user: q.user)).to be_valid
end
describe "::below_minimum" do
let(:sms_ids) { (1..11).to_a }
let!(:above_minimum) { create(:subject_queue, set_member_subject_ids: sms_ids) }
let!(:below_minimum) { create(:subject_queue, set_member_subject_ids: sms_ids[0..5]) }
it 'should return all the queue with less than the minimum number of subjects' do
expect(SubjectQueue.below_minimum).to include(below_minimum)
end
it 'should not return queue with more than minimum' do
expect(SubjectQueue.below_minimum).to_not include(above_minimum)
end
end
describe "::create_for_user" do
let(:workflow) {create(:workflow)}
let(:user) { create(:user) }
context "when no logged out queue" do
it 'should attempt to build a logged out queue' do
expect(EnqueueSubjectQueueWorker).to receive(:perform_async).with(workflow.id, nil, nil)
SubjectQueue.create_for_user(workflow, user)
end
it 'should return nil' do
expect(SubjectQueue.create_for_user(workflow, user)).to be_nil
end
end
context "queue saves" do
let!(:logged_out_queue) do
create(:subject_queue, workflow: workflow, user: nil, subject_set: nil)
end
let(:new_queue) { SubjectQueue.create_for_user(workflow, user)}
it 'should return the new queue' do
aggregate_failures "copied queue" do
expect(new_queue).to be_a(SubjectQueue)
expect(new_queue.id).to_not eq(logged_out_queue.id)
end
end
it 'should add the logged out subjects to the new queue' do
expect(new_queue.set_member_subject_ids).to match_array(logged_out_queue.set_member_subject_ids)
end
end
end
describe "::reload" do
let(:sms_id) { [1] }
let(:smses) { (2..4).to_a }
let(:workflow) { create(:workflow) }
context "when passed a subject set" do
let(:subject_set) { create(:subject_set) }
let(:not_updated_set) { create(:subject_set) }
context "when the queue exists" do
let!(:queue) do
create(:subject_queue,
user: nil,
workflow: workflow,
set_member_subject_ids: sms_id,
subject_set: subject_set)
end
let!(:not_updated_queue) do
create(:subject_queue,
user: nil,
workflow: workflow,
set_member_subject_ids: sms_id,
subject_set: not_updated_set)
end
before(:each) do
SubjectQueue.reload(workflow, smses, set_id: subject_set.id)
queue.reload
not_updated_queue.reload
end
it 'should completely replace the queue for the given set' do
expect(queue.set_member_subject_ids).to match_array(smses)
end
it 'should not update the set without the name' do
expect(not_updated_queue.set_member_subject_ids).to_not match_array(smses)
end
end
context "when no queue exists" do
before(:each) do
SubjectQueue.reload(workflow, smses, set_id: subject_set.id)
end
subject { SubjectQueue.find_by(workflow: workflow, subject_set: subject_set) }
it 'should create a new queue with the given workflow' do
expect(subject.workflow).to eq(workflow)
end
it 'should create a new queue with the given subject set' do
expect(subject.subject_set).to eq(subject_set)
end
it 'should queue subject' do
expect(subject.set_member_subject_ids).to match_array(smses)
end
end
end
context "when not passed a subject set" do
context "when a queue exists" do
let!(:queue) do
create(:subject_queue,
user: nil,
workflow: workflow,
set_member_subject_ids: sms_id)
end
it 'should reload the workflow queue' do
SubjectQueue.reload(workflow, smses)
queue.reload
expect(queue.set_member_subject_ids).to eq(smses)
end
end
context "when a queue does not exist" do
before(:each) do
SubjectQueue.reload(workflow, smses)
end
subject { SubjectQueue.find_by(workflow: workflow) }
it 'should create a new queue with the given workflow' do
expect(subject.workflow).to eq(workflow)
end
it 'should queue subject' do
expect(subject.set_member_subject_ids).to eq(smses)
end
end
end
end
describe "::dequeue_for_all" do
let(:sms_id) { [1] }
let(:workflow) { create(:workflow) }
let(:queue) do
create_list(:subject_queue, 2, workflow: workflow, set_member_subject_ids: sms_id)
end
it "should remove the subject for all queue of the workflow" do
SubjectQueue.dequeue_for_all(workflow.id, sms_id)
expect(SubjectQueue.all.map(&:set_member_subject_ids)).to all( be_empty )
end
end
describe "::enqueue_for_all" do
let(:sms_id) { [1] }
let(:workflow) { create(:workflow) }
let(:queue) { create_list(:subject_queue, 2, workflow: workflow) }
it "should add the subject for all queue of the workflow" do
SubjectQueue.enqueue_for_all(workflow.id, sms_id)
expect(SubjectQueue.all.map(&:set_member_subject_ids)).to all( include(sms_id) )
end
end
describe "::enqueue" do
let(:workflow) { create(:workflow) }
let(:subject_set) { create(:subject_set, workflows: [workflow]) }
let(:sms_id) { [1] }
context "with a user" do
let(:user) { create(:user) }
context "nothing for user" do
shared_examples "queue something" do
it 'should create a new user_enqueue_subject' do
expect do
SubjectQueue.enqueue(workflow, ids, user: user)
end.to change{ SubjectQueue.count }.from(0).to(1)
end
it 'should add subjects' do
SubjectQueue.enqueue(workflow, ids, user: user)
queue = SubjectQueue.find_by(workflow: workflow, user: user)
expect(queue.set_member_subject_ids).to include(*ids)
end
end
shared_examples "does not queue anything" do |arg|
it 'should not raise an error' do
expect {
SubjectQueue.enqueue(workflow, [], user: user)
}.to_not raise_error
end
it 'not attempt to find or create a queue' do
expect(SubjectQueue).to_not receive(:where)
SubjectQueue.enqueue(workflow, [], user: user)
end
it 'should not call #enqueue_update' do
expect_any_instance_of(SubjectQueue).to_not receive(:enqueue_update)
SubjectQueue.enqueue(workflow, [], user: user)
end
it 'should return nil' do
expect(SubjectQueue.enqueue(workflow, [], user: user)).to be_nil
end
end
context "passing one sms_id" do
let(:ids) { sms_id }
it_behaves_like "queue something"
end
context "passing a set of sms_ids" do
let(:ids) { (1..5).to_a }
it_behaves_like "queue something"
end
context "passing an empty set of sms_ids" do
it_behaves_like "does not queue anything", []
end
context "passing a set with one nil value" do
it_behaves_like "does not queue anything", [nil]
end
end
context "subject queue exists for user" do
let(:sms) { create(:set_member_subject) }
let(:smses) { create_list(:set_member_subject, 3, subject_set: sms.subject_set) }
let(:sms_ids) { smses.map(&:id) }
let!(:sq) do
create(:subject_queue,
set_member_subject_ids: sms_ids,
user: user,
workflow: workflow)
end
context "when the append queue contains a seen before" do
before(:each) do
create(:user_seen_subject, user: user, workflow: workflow, subject_ids: [sms.subject_id])
end
it "should not enqueue dups" do
SubjectQueue.enqueue(workflow, sms.id, user: user)
expect(sq.reload.set_member_subject_ids).to eq(smses.map(&:id))
end
end
context "when the queue is below the enqueue threshold" do
it 'should add the sms id to the existing subject queue' do
allow_any_instance_of(SubjectQueue).to receive(:below_minimum?).and_return(true)
SubjectQueue.enqueue(workflow, sms.id, user: user)
expect(sq.reload.set_member_subject_ids).to include(sms.id)
end
end
context "when the queue is above the enqueue threshold" do
it 'should not have more than the limit in the existing subject queue' do
sq.set_member_subject_ids = (0..22).to_a - [sms.id]
sq.save!
SubjectQueue.enqueue(workflow, sms.id, user: user)
expect(sq.reload.set_member_subject_ids.length).to eq(20)
end
end
it 'should not have a duplicate in the set' do
expected_ids = sq.set_member_subject_ids | [ sms.id ]
SubjectQueue.enqueue(workflow, sms.id, user: user)
new_sms_ids = sq.reload.set_member_subject_ids
expect(new_sms_ids).to match_array(expected_ids)
end
it 'should maintain the order of the set' do
ordered_list = sq.set_member_subject_ids | [ sms.id ]
SubjectQueue.enqueue(workflow, sms.id, user: user)
expect(sq.reload.set_member_subject_ids).to eq(ordered_list)
end
context "when the queue is empty" do
let(:sms_ids) { [] }
it "should only enqueue the passed ids " do
SubjectQueue.enqueue(workflow, sms.id, user: user)
expect(sq.reload.set_member_subject_ids).to eq([ sms.id ])
end
end
describe "duplicate id queueing" do
let(:q_dups) do
sq.set_member_subject_ids.sample(sq.set_member_subject_ids.size - 1)
end
let(:enq_ids_with_dups) { [ sms.id ] | q_dups }
let(:fake_sms_ids) { double(blank?: false) }
describe "non-modifying queue updates" do
before do
allow_any_instance_of(SeenSubjectRemover)
.to receive(:ids_to_enqueue)
.and_return(enqueue_set)
end
context "with an empty queue" do
let(:enqueue_set) { [] }
it "should not modify the queue" do
expect{
SubjectQueue.enqueue(workflow, fake_sms_ids, user: user)
}.not_to change{
sq.set_member_subject_ids
}
end
end
context "with the same ids" do
let(:enqueue_set) { sq.set_member_subject_ids }
it "should not modify the queue" do
expect{
SubjectQueue.enqueue(workflow, fake_sms_ids, user: user)
}.not_to change{
sq.set_member_subject_ids
}
end
end
end
context "when the append queue has dups" do
it "should not enqueue dups" do
SubjectQueue.enqueue(workflow, enq_ids_with_dups, user: user)
non_dups = sq.set_member_subject_ids | [ sms.id ]
expect(sq.reload.set_member_subject_ids).to match_array(non_dups)
end
end
context "when the append queue grows too large" do
it "should only queue #{SubjectQueue::DEFAULT_LENGTH} ids" do
start = smses.last.id+1
append_ids = (start..start+SubjectQueue::DEFAULT_LENGTH*2).to_a
SubjectQueue.enqueue(workflow, append_ids, user: user)
expect(sq.reload.set_member_subject_ids.length)
.to eq(SubjectQueue::DEFAULT_LENGTH)
end
end
end
end
end
end
describe "::dequeue" do
let(:workflow) { create(:workflow) }
let(:subject_set) { create(:subject_set, workflows: [workflow], project: workflow.project) }
let(:sms) { create(:set_member_subject, subject_set: subject_set) }
let!(:smses) { create_list(:set_member_subject, 3, subject_set: sms.subject_set) }
context "with a user" do
let(:user) { create(:user) }
let!(:sq) do
create(:subject_queue, user: user, workflow: workflow, set_member_subject_ids: smses.map(&:id))
end
let(:sms_id_to_dequeue) { smses.sample.id }
let(:dequeue_list) { [ sms_id_to_dequeue ] }
it 'should remove the subject given a user and workflow' do
SubjectQueue.dequeue(workflow, dequeue_list, user: user)
sms_ids = sq.reload.set_member_subject_ids
expect(sms_ids).to_not include(sms_id_to_dequeue)
end
it 'should maintain the order of the set' do
ordered_list = sq.set_member_subject_ids.reject { |id| id == sms_id_to_dequeue }
SubjectQueue.dequeue(workflow, dequeue_list, user: user)
expect(sq.reload.set_member_subject_ids).to eq(ordered_list)
end
context "passing an empty set of sms_ids" do
it 'should not raise an error' do
expect {
SubjectQueue.dequeue(workflow, [], user: user)
}.to_not raise_error
end
it 'not attempt find the queue' do
expect(SubjectQueue).to_not receive(:where)
SubjectQueue.dequeue(workflow, [], user: user)
end
it 'should not call #dequeue_update' do
expect_any_instance_of(SubjectQueue).to_not receive(:dequeue_update)
SubjectQueue.dequeue(workflow, [], user: user)
end
it 'should return nil' do
expect(SubjectQueue.dequeue(workflow, [], user: user)).to be_nil
end
end
end
end
describe "#next_subjects" do
let(:ids) { (0..60).to_a }
let(:sq) { build(:subject_queue, set_member_subject_ids: ids) }
shared_examples "selects from the queue" do
it 'should return a collection of ids' do
expect(sq.next_subjects).to all( be_a(Fixnum) )
end
it 'should return 10 by default' do
expect(sq.next_subjects.length).to eq(10)
end
it 'should accept an optional limit argument' do
expect(sq.next_subjects(20).length).to eq(20)
end
it 'should randomly sample from the subject_ids' do
expect(sq.next_subjects).to_not match_array(sq.set_member_subject_ids[0..9])
end
end
context "when the queue has a user" do
it_behaves_like "selects from the queue"
end
context "when the queue does not have a user" do
let(:sq) { build(:subject_queue, set_member_subject_ids: ids, user: nil) }
it_behaves_like "selects from the queue"
end
context "when the worklow is prioritized" do
it "should select in order from the head of the queue" do
allow_any_instance_of(Workflow).to receive(:prioritized).and_return(true)
expect(sq.next_subjects).to match_array(sq.set_member_subject_ids[0..9])
end
end
end
describe "#below_minimum?" do
let(:queue) { build(:subject_queue, set_member_subject_ids: subject_ids) }
context "when less than #{SubjectQueue::MINIMUM_LENGTH} items" do
let(:subject_ids) { [1, 2] }
it 'should return true' do
expect(queue.below_minimum?).to be true
end
end
context "when more than #{SubjectQueue::MINIMUM_LENGTH} items" do
let(:subject_ids) { (0..SubjectQueue::MINIMUM_LENGTH+1).to_a }
it 'should return false' do
expect(queue.below_minimum?).to be false
end
end
end
end
|
# frozen_string_literal: true
require "faker"
def force_destroy(klass)
klass.find_each { |o| o.try(:force_destroy=, true); o.destroy! }
end
Folio::Atom::Base.destroy_all
Folio::Account.destroy_all
Folio::Lead.destroy_all
Folio::File.destroy_all
force_destroy Folio::Menu
force_destroy Folio::Page
force_destroy Folio::Site
def unsplash_pic(square = false)
image = Folio::Image.new
scale = 0.5 + rand / 2
w = (scale * 2560).to_i
h = (square ? scale * 2560 : scale * 1440).to_i
image.file_url = "https://picsum.photos/#{w}/#{h}/?random"
image.save!
image
end
def file_pic(file_instance)
image = Folio::Image.new
image.file = file_instance
image.save!
image
end
2.times { unsplash_pic }
Folio::Site.create!(title: "Sinfin.digital",
domain: "sinfin.localhost",
locale: "cs",
locales: ["cs", "en", "de"],
email: "info@sinfin.cz",
phone: "+420 123 456 789",
social_links: {
facebook: "https://www.facebook.com/",
instagram: "https://www.instagram.com/",
twitter: "https://www.twitter.com/",
})
about = Folio::Page.create!(title: "O nás",
published: true,
published_at: 1.month.ago)
about.cover = unsplash_pic
3.times { about.images << unsplash_pic }
about.image_placements.each { |ip|
name = Faker::Name.name
ip.update_attributes!(alt: name, title: "Portrait of #{name}")
}
night_sky = Folio::Page.create!(title: "Noční obloha", published: true, published_at: 1.month.ago, locale: :cs)
night_photo = File.new(Rails.root.join("..", "fixtures", "folio", "photos", "night.jpg"))
night_sky.cover = file_pic(night_photo)
1.times { night_sky.images << file_pic(night_photo) }
reference = Folio::Page.create!(title: "Reference",
published: true,
published_at: 1.day.ago)
Folio::Page.create!(title: "Smart Cities", published: true, published_at: 1.month.ago)
vyvolejto = Folio::Page.create!(title: "Vyvolej.to", published: true, published_at: 1.month.ago)
iptc_test = File.new(Rails.root.join("..", "fixtures", "folio", "photos", "downsized-exif-samples", "jpg", "tests", "46_UnicodeEncodeError.jpg"))
vyvolejto.cover = file_pic(iptc_test)
Folio::Page.create!(title: "Hidden", published: false)
Folio::Page.create!(title: "DAM", published: true)
menu = Folio::Menu::Page.create!(locale: :cs)
Folio::MenuItem.create!(menu: menu,
title: "Reference",
target: reference,
position: 0)
Folio::MenuItem.create!(menu: menu,
title: "About",
target: about,
position: 1)
if Rails.env.development?
Folio::Account.create!(email: "test@test.test",
password: "test@test.test",
role: :superuser,
first_name: "Test",
last_name: "Dummy")
end
nestable_menu = Dummy::Menu::Nestable.create!(locale: :cs)
Folio::MenuItem.create!(menu: nestable_menu,
title: "Reference",
target: reference,
position: 0)
wrap = Folio::MenuItem.create!(menu: nestable_menu,
title: "Wrap",
position: 1)
[reference, about].each do |target|
Folio::MenuItem.create!(menu: nestable_menu,
target: target,
parent: wrap)
end
Folio::Lead.create!(name: "Test lead",
email: "test@lead.test",
note: "Hello",
additional_data: { test: "test", boolean: false })
update dummy db seeds
# frozen_string_literal: true
require "faker"
def force_destroy(klass)
klass.find_each { |o| o.try(:force_destroy=, true); o.destroy! }
end
Folio::Atom::Base.destroy_all
Folio::Account.destroy_all
Folio::Lead.destroy_all
Folio::File.destroy_all
force_destroy Folio::Menu
force_destroy Folio::Page
force_destroy Folio::Site
def unsplash_pic(square = false)
image = Folio::Image.new
scale = 0.5 + rand / 2
w = (scale * 2560).to_i
h = (square ? scale * 2560 : scale * 1440).to_i
image.file_url = "https://picsum.photos/#{w}/#{h}/?random"
image.save!
image
end
def file_pic(file_instance)
image = Folio::Image.new
image.file = file_instance
image.save!
image
end
2.times { unsplash_pic }
Folio::Site.create!(title: "Sinfin.digital",
domain: "sinfin.localhost",
locale: "cs",
locales: ["cs", "en", "de"],
email: "info@sinfin.cz",
phone: "+420 123 456 789",
social_links: {
facebook: "https://www.facebook.com/",
instagram: "https://www.instagram.com/",
twitter: "https://www.twitter.com/",
})
about = Folio::Page.create!(title: "O nás",
published: true,
published_at: 1.month.ago)
about.cover = unsplash_pic
3.times { about.images << unsplash_pic }
about.image_placements.each { |ip|
name = Faker::Name.name
ip.update_attributes!(alt: name, title: "Portrait of #{name}")
}
night_sky = Folio::Page.create!(title: "Noční obloha", published: true, published_at: 1.month.ago, locale: :cs)
night_photo = File.new(Folio::Engine.root.join("test/fixtures/folio/photos/night.jpg"))
night_sky.cover = file_pic(night_photo)
1.times { night_sky.images << file_pic(night_photo) }
reference = Folio::Page.create!(title: "Reference",
published: true,
published_at: 1.day.ago)
Folio::Page.create!(title: "Smart Cities", published: true, published_at: 1.month.ago)
vyvolejto = Folio::Page.create!(title: "Vyvolej.to", published: true, published_at: 1.month.ago)
iptc_test = File.new(Folio::Engine.root.join("test/fixtures/folio/photos/downsized-exif-samples/jpg/tests/46_UnicodeEncodeError.jpg"))
vyvolejto.cover = file_pic(iptc_test)
Folio::Page.create!(title: "Hidden", published: false)
Folio::Page.create!(title: "DAM", published: true)
menu = Folio::Menu::Page.create!(locale: :cs)
Folio::MenuItem.create!(menu: menu,
title: "Reference",
target: reference,
position: 0)
Folio::MenuItem.create!(menu: menu,
title: "About",
target: about,
position: 1)
if Rails.env.development?
Folio::Account.create!(email: "test@test.test",
password: "test@test.test",
role: :superuser,
first_name: "Test",
last_name: "Dummy")
end
nestable_menu = Dummy::Menu::Nestable.create!(locale: :cs)
Folio::MenuItem.create!(menu: nestable_menu,
title: "Reference",
target: reference,
position: 0)
wrap = Folio::MenuItem.create!(menu: nestable_menu,
title: "Wrap",
position: 1)
[reference, about].each do |target|
Folio::MenuItem.create!(menu: nestable_menu,
target: target,
parent: wrap)
end
Folio::Lead.create!(name: "Test lead",
email: "test@lead.test",
note: "Hello",
additional_data: { test: "test", boolean: false })
|
require 'rails_helper'
describe UserBuilding do
it { should have_many :units }
it { should validate_presence_of(:address) }
before(:each) do
UserBuilding.destroy_all # Clear out any newly-existing find-or-generate buildings.
@user_building = create(:user_building)
end
describe '.all_addresses' do
it 'returns as many addresses as there are user_buildings' do
expect(UserBuilding.all.count).to eq UserBuilding.all_addresses.count
end
end
describe '.find_or_generate' do
it 'should return a building by matching id' do
b = UserBuilding.find_or_generate(@user_building.id, nil)
expect(b).to eq(@user_building)
end
it 'should match exact address' do
b = UserBuilding.find_or_generate(nil, @user_building.address)
expect(b).to eq(@user_building)
end
[
'123 main st, cambridge, ma',
'123 main st cambridge ma', #fail
'123 Main st., Cambridge MA',
'123 main st., Cambridge, Massachussets 02138', #fail
'123 main st., Cambrdige, Masachussetts', #fail
'123 main st cambridge, massachussetts' #fail
].each do |address|
it "should return match or create by fuzzily-matching address '#{address}'" do
match = UserBuilding.find_or_generate(nil, address)
# Use this to see which addresses pass/fail.
# expect(match).to eq(@user_building)
# Expect match to be included in either the first (and only) building id or
# in the last building id.
expect([@user_building.id, UserBuilding.last.id]).to include(match.id)
# And just to be sure we're solid at our expected building count.
expect([1,2]).to include(UserBuilding.count)
end
# => Failed examples:
# rspec './spec/models/user_building_spec.rb[1:4:4]' # UserBuilding.find_or_generate should return match by address '123 main st cambridge ma'
# rspec './spec/models/user_building_spec.rb[1:4:6]' # UserBuilding.find_or_generate should return match by address '123 main st., Cambridge, Massachussets 02138'
# rspec './spec/models/user_building_spec.rb[1:4:7]' # UserBuilding.find_or_generate should return match by address '123 main st., Cambrdige, Masachussetts'
# rspec './spec/models/user_building_spec.rb[1:4:8]' # UserBuilding.find_or_generate should return match by address '123 main st cambridge, massachussetts'
end
end
end
test ub address component parsing and writing on after_save callback
require 'rails_helper'
describe UserBuilding do
it { should have_many :units }
it { should validate_presence_of(:address) }
before(:each) do
UserBuilding.destroy_all # Clear out any newly-existing find-or-generate buildings.
@user_building = create(:user_building)
end
describe '.all_addresses' do
it 'returns as many addresses as there are user_buildings' do
expect(UserBuilding.all.count).to eq UserBuilding.all_addresses.count
end
end
describe '.find_or_generate' do
it 'should return a building by matching id' do
b = UserBuilding.find_or_generate(@user_building.id, nil)
expect(b).to eq(@user_building)
end
it 'should match exact address' do
b = UserBuilding.find_or_generate(nil, @user_building.address)
expect(b).to eq(@user_building)
end
[
'123 main st, cambridge, ma',
'123 main st cambridge ma', #fail
'123 Main st., Cambridge MA',
'123 main st., Cambridge, Massachussets 02138', #fail
'123 main st., Cambrdige, Masachussetts', #fail
'123 main st cambridge, massachussetts' #fail
].each do |address|
it "should return match or create by fuzzily-matching address '#{address}'" do
match = UserBuilding.find_or_generate(nil, address)
# Use this to see which addresses pass/fail.
# expect(match).to eq(@user_building)
# Expect match to be included in either the first (and only) building id or
# in the last building id.
expect([@user_building.id, UserBuilding.last.id]).to include(match.id)
# And just to be sure we're solid at our expected building count.
expect([1,2]).to include(UserBuilding.count)
end
# => Failed examples:
# rspec './spec/models/user_building_spec.rb[1:4:4]' # UserBuilding.find_or_generate should return match by address '123 main st cambridge ma'
# rspec './spec/models/user_building_spec.rb[1:4:6]' # UserBuilding.find_or_generate should return match by address '123 main st., Cambridge, Massachussets 02138'
# rspec './spec/models/user_building_spec.rb[1:4:7]' # UserBuilding.find_or_generate should return match by address '123 main st., Cambrdige, Masachussetts'
# rspec './spec/models/user_building_spec.rb[1:4:8]' # UserBuilding.find_or_generate should return match by address '123 main st cambridge, massachussetts'
end
end
describe '.parse_and_save_address_granules' do
# Note that this tests only a well-formed address, and thus
# signifies only that the components are being parsed,
# not their being parsed well.
it '.parse_and_save_address_granules should save parsed address components via after_save callback' do
new_address = '115 Prospect St, Cambridge, MA'
new_build_attrs = {address: new_address}
created = UserBuilding.create(new_build_attrs)
# Basic address did save.
expect(created).to have_attributes(address: new_address)
# Parsed address components.
expect(created.number).to_not be_nil
expect(created.street).to_not be_nil
expect(created.city).to_not be_nil
expect(created.state).to_not be_nil
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.