CombinedText stringlengths 4 3.42M |
|---|
#!/usr/bin/ruby
require_relative 'dice'
result=(Dice::Parser.new ARGV[0].dup).terms.inject(Density.new) { |i,term| i+=term.density }
pp result>=-200
pp result
pp result<=5
#diedensity = ([DieDensity.new(3,[2])]*5).inject(:+)
#puts "Probability to have <=10 for 5d3r2:"
#pp (diedensity<=10)
#moddensity = ModifiedDieDensity.new(diedensity,3,StandardModifier.new)
#puts "Probability to have >40 for 3(5d3r2):"
#pp moddensity>40
nicer test output
#!/usr/bin/ruby
require_relative 'dice'
result=(Dice::Parser.new ARGV[0].dup).terms.inject(Density.new) { |i,term| i+=term.density }
if (result>=-200)==1
print "Densities sum up to 1 (OK)!\n"
else
print "Error: ", Rational(1-(result>=-200)).to_f, "\n"
end
print "Density: "
pp result
print "Probability that X<=5: "
print result<=5, "\n"
#diedensity = ([DieDensity.new(3,[2])]*5).inject(:+)
#puts "Probability to have <=10 for 5d3r2:"
#pp (diedensity<=10)
#moddensity = ModifiedDieDensity.new(diedensity,3,StandardModifier.new)
#puts "Probability to have >40 for 3(5d3r2):"
#pp moddensity>40
|
#!/usr/bin/ruby1.9.1
# -*- encoding: utf-8 -*-
require 'test/unit'
require './rcert'
class Tests < Test::Unit::TestCase
def test_random_string
size = 1 + rand(9)
s = Rcert::Problem.new(:problem_name).random_string(size)
assert_equal size, s.size
assert_match /\A\w+\z/, s
end
def test_problem_object
prob = Rcert::Problem.new :problem_name, <<-SRC
puts "<%= @say %>"
SRC
prob.option :say => 'hello'
prob.option :say => 'world'
prob.set_answer
assert_equal "以下のコードを実行したとき表示されるものを全て選択してください", prob.default_description
assert prob.select(0)[1]
assert !prob.select(1)[1]
end
def test_application_define_problem
app = Rcert::Application.new
prob = app.define_problem Rcert::Problem, :problem_name do
src <<-SRC
puts "#{random_string(5)}"[<%= @range %>]
SRC
option :range => "2..4"
option :range => "2...4"
end
prob.set_answer
assert prob.select(0)[1]
assert !prob.select(1)[1]
end
def test_problem
Rcert.application.clear
prob = problem :problem_name do
src <<-SRC
puts "#{random_string(5)}"[<%= @range %>]
SRC
option :range => "2..4"
option :range => "2...4"
end
prob.set_answer
assert prob.select(0)[1]
assert !prob.select(1)[1]
Rcert.application.clear
end
def test_application_run
Rcert.application.clear
problem :problem_1 do
src <<-SRC
puts "hello_world"[<%= @range %>]
SRC
option :range => "2..4"
option :range => "2...4"
end
method_problem :problem_2 do
src <<-SRC
x = ["Ruby", "Perl", "C"]
puts x.<%= @method_name %>
puts x[0]
SRC
option "first"
option "shift"
end
src1 = <<-RUBY
def foo
puts 'foo'
end
foo
RUBY
src2 = <<-RUBY
def foo
fail 'failed'
end
foo
RUBY
program_problem :problem_3 do
option src1
option src2
end
original_dir = Dir.pwd
Dir.chdir('./data/')
out = StringIO.new
Rcert.application.status(out)
assert_match %r"0/3$", out.string
begin
Rcert.application.run do|p|
p.set_answer
p.select(1)
end
out = StringIO.new
Rcert.application.status(out)
assert_match %r"0/3$", out.string
out.string = ""
Rcert.application.report_result(out)
assert_match /^problem_1:$/, out.string
assert_match /"llo\\n"/, out.string
assert_match /"ll\\n"$/, out.string
assert_match /^problem_2:$/, out.string
assert_match /first/, out.string
assert_match /shift/, out.string
assert_match /^problem_3:$/, out.string
assert_match /#{src1.sub(/\A\s*/, '')}/m, out.string
assert_match /#{src2.sub(/\A\s*/, '')}/m, out.string
Rcert.application.clear
ensure
Dir.chdir(original_dir)
end
end
def test_method_problem
Rcert.application.clear
prob = method_problem :problem_name do
src <<-SRC
x = ["Ruby", "Perl", "C"]
puts x.<%= @method_name %>
puts x[0]
SRC
option "first"
option "shift"
end
prob.set_answer
assert_equal "以下のように出力されるメソッドを全て選択してください", prob.default_description
out = prob.render
assert_match /Ruby\nRuby\n/, out
assert_match /first/, out
assert_match /shift/, out
assert prob.select(0)[1]
assert !prob.select(1)[1]
Rcert.application.clear
end
def test_problem_with_error_code
Rcert.application.clear
prob = method_problem :problem_1 do
src <<-SRC
[0, 1, 4, 9].<%= @method_name %> {|x, y| p [x, y]}
SRC
option "each_with_index"
option "no_such_method"
option "no_such_method2"
option "no_such_method3"
end
prob.set_answer
assert prob.select(0)[1]
assert !prob.select(3)[1]
prob = method_problem :problem_2 do
src <<-SRC
[0, 1, 4, 9].<%= @method_name %> {|x, y| p [x, y]}
SRC
option "no_such_method"
option "each_with_index"
option "no_such_method2"
option "no_such_method3"
end
prob.set_answer
assert !prob.select(0)[1]
assert prob.select(0, 2, 3)[1]
assert_equal "<error>", prob.options[0].out
Rcert.application.clear
end
def test_context
s = StringIO.new
$stdout = s
Rcert::Context.class_eval(<<-SRC, 'fname', 1)
def foo1234
puts "foo"
end
foo1234
SRC
assert_equal "foo\n", s.string
$stdout = STDOUT
end
def test_src_define_method
Rcert.application.clear
prob = program_problem :problem_name do
src <<-RUBY
def foo
<%= @src %>
end
foo
RUBY
option <<-RUBY
puts 'foo'
RUBY
option <<-RUBY
fail 'failed'
RUBY
end
prob.set_answer
assert_equal "以下のように出力されるコードを全て選択してください", prob.default_description
out = prob.render
assert_match /def foo/, out
assert_match /puts 'foo'/, out
assert_match /fail 'failed'/, out
assert prob.select(0)[1]
assert !prob.select(1)[1]
Rcert.application.clear
end
def test_application_run_argv
ARGV.clear
Rcert.application.clear
problem :p1 do
src <<-SRC
puts "hello_world"[<%= @range %>]
SRC
option :range => "2..4"
option :range => "2...4"
end
problem :p2 do
src <<-SRC
x = ["Ruby", "Perl", "C"]
puts x.<%= @method_name %>
puts x[0]
SRC
option :method_name => "first"
option :method_name => "shift"
end
problem :p3 do
src <<-SRC
x = ["Ruby", "Perl", "C"]
puts x.<%= @method_name %>
puts x[0]
SRC
option :method_name => "first"
option :method_name => "shift"
end
original_dir = Dir.pwd
Dir.chdir('./data/')
begin
plist = []
ARGV << "p2" << "p1"
Rcert.application.run do|p|
plist << p.name
[0, true]
end
assert_equal [:p1, :p2], plist
ensure
ARGV.clear
Rcert.application.clear
Dir.chdir(original_dir)
end
end
def test_rescue_syntax_error
Rcert.application.clear
program_problem :problem_name do
option "puts 'hello"
option "puts 'hello'"
end
Rcert.application.clear
end
def test_method_problem_select_multiple_answers
Rcert.application.clear
prob = method_problem :problem_name do
src <<-SRC
puts [1, 2, 3, 4].<%= @method_name %> {|x| x*x}.inspect
SRC
option "collect"
option "map"
option "each"
end
prob.set_answer
assert !prob.select(0)[1]
assert !prob.select(1)[1]
assert !prob.select(2)[1]
assert !prob.select(2, 1, 0)[1]
assert prob.select(0, 1)[1]
Rcert.application.clear
end
def test_problem_select_multiple_answers
Rcert.application.clear
prob = program_problem :problem_name do
option <<-SRC
puts ("Ca" 'fe')
SRC
option <<-'SRC'
puts (%q!Cafe!)
SRC
option <<-SRC
puts 0xCafe
SRC
option <<-SRC
puts ?C + ?a + ?f + ?e
SRC
option <<-SRC
puts (0800)
SRC
end
prob.set_answer
assert !prob.select(0)[1]
assert !prob.select(1)[1]
assert !prob.select(2)[1]
assert prob.select(0, 1, 3)[1]
Rcert.application.clear
end
def test_method_problem_error_messages
Rcert.application.clear
method_problem :problem_name do
src <<-SRC
puts [1, 2, 3, 4].<%= @method_name %> {|x| x*x}.inspect
SRC
option "collect"
option "map"
option "each"
end
original_dir = Dir.pwd
Dir.chdir('./data/')
Rcert.application.run do|p|
p.set_answer
p.select(0, 2)
end
out = StringIO.new
Rcert.application.report_result(out)
assert_match /["map", "collect"]/, out.string
assert_match /["collect", "each"]/, out.string
Rcert.application.clear
ensure
Dir.chdir(original_dir)
end
def test_program_problem_error_messages
Rcert.application.clear
program_problem :problem_name do
option <<-SRC
puts ("Ca" 'fe')
SRC
option <<-'SRC'
puts (%q!Cafe!)
SRC
option <<-SRC
puts 0xCafe
SRC
option <<-SRC
puts ?C + ?a + ?f + ?e
SRC
option <<-SRC
puts (0800)
SRC
end
original_dir = Dir.pwd
Dir.chdir('./data/')
Rcert.application.run do|p|
p.set_answer
p.select(0, 2)
end
out = StringIO.new
Rcert.application.report_result(out)
assert_match /puts \(%q!Cafe!\)/, out.string
assert_match /puts \?C \+ \?a \+ \?f \+ \?e/, out.string
assert_match /puts \("Ca" 'fe'\)/, out.string
assert_match /puts 0xCafe/, out.string
Rcert.application.clear
ensure
Dir.chdir(original_dir)
end
end
ruby2.1で動くようにクラス名などを修正
#!/usr/bin/env ruby
# -*- encoding: utf-8 -*-
require 'minitest/autorun'
require './rcert'
class Tests < MiniTest::Test
def test_random_string
size = 1 + rand(9)
s = Rcert::Problem.new(:problem_name).random_string(size)
assert_equal size, s.size
assert_match /\A\w+\z/, s
end
def test_problem_object
prob = Rcert::Problem.new :problem_name, <<-SRC
puts "<%= @say %>"
SRC
prob.option :say => 'hello'
prob.option :say => 'world'
prob.set_answer
assert_equal "以下のコードを実行したとき表示されるものを全て選択してください", prob.default_description
assert prob.select(0)[1]
assert !prob.select(1)[1]
end
def test_application_define_problem
app = Rcert::Application.new
prob = app.define_problem Rcert::Problem, :problem_name do
src <<-SRC
puts "#{random_string(5)}"[<%= @range %>]
SRC
option :range => "2..4"
option :range => "2...4"
end
prob.set_answer
assert prob.select(0)[1]
assert !prob.select(1)[1]
end
def test_problem
Rcert.application.clear
prob = problem :problem_name do
src <<-SRC
puts "#{random_string(5)}"[<%= @range %>]
SRC
option :range => "2..4"
option :range => "2...4"
end
prob.set_answer
assert prob.select(0)[1]
assert !prob.select(1)[1]
Rcert.application.clear
end
def test_application_run
Rcert.application.clear
problem :problem_1 do
src <<-SRC
puts "hello_world"[<%= @range %>]
SRC
option :range => "2..4"
option :range => "2...4"
end
method_problem :problem_2 do
src <<-SRC
x = ["Ruby", "Perl", "C"]
puts x.<%= @method_name %>
puts x[0]
SRC
option "first"
option "shift"
end
src1 = <<-RUBY
def foo
puts 'foo'
end
foo
RUBY
src2 = <<-RUBY
def foo
fail 'failed'
end
foo
RUBY
program_problem :problem_3 do
option src1
option src2
end
original_dir = Dir.pwd
Dir.chdir('./data/')
out = StringIO.new
Rcert.application.status(out)
assert_match %r"0/3$", out.string
begin
Rcert.application.run do|p|
p.set_answer
p.select(1)
end
out = StringIO.new
Rcert.application.status(out)
assert_match %r"0/3$", out.string
out.string = ""
Rcert.application.report_result(out)
assert_match /^problem_1:$/, out.string
assert_match /"llo\\n"/, out.string
assert_match /"ll\\n"$/, out.string
assert_match /^problem_2:$/, out.string
assert_match /first/, out.string
assert_match /shift/, out.string
assert_match /^problem_3:$/, out.string
assert_match /#{src1.sub(/\A\s*/, '')}/m, out.string
assert_match /#{src2.sub(/\A\s*/, '')}/m, out.string
Rcert.application.clear
ensure
Dir.chdir(original_dir)
end
end
def test_method_problem
Rcert.application.clear
prob = method_problem :problem_name do
src <<-SRC
x = ["Ruby", "Perl", "C"]
puts x.<%= @method_name %>
puts x[0]
SRC
option "first"
option "shift"
end
prob.set_answer
assert_equal "以下のように出力されるメソッドを全て選択してください", prob.default_description
out = prob.render
assert_match /Ruby\nRuby\n/, out
assert_match /first/, out
assert_match /shift/, out
assert prob.select(0)[1]
assert !prob.select(1)[1]
Rcert.application.clear
end
def test_problem_with_error_code
Rcert.application.clear
prob = method_problem :problem_1 do
src <<-SRC
[0, 1, 4, 9].<%= @method_name %> {|x, y| p [x, y]}
SRC
option "each_with_index"
option "no_such_method"
option "no_such_method2"
option "no_such_method3"
end
prob.set_answer
assert prob.select(0)[1]
assert !prob.select(3)[1]
prob = method_problem :problem_2 do
src <<-SRC
[0, 1, 4, 9].<%= @method_name %> {|x, y| p [x, y]}
SRC
option "no_such_method"
option "each_with_index"
option "no_such_method2"
option "no_such_method3"
end
prob.set_answer
assert !prob.select(0)[1]
assert prob.select(0, 2, 3)[1]
assert_equal "<error>", prob.options[0].out
Rcert.application.clear
end
def test_context
s = StringIO.new
$stdout = s
Rcert::Context.class_eval(<<-SRC, 'fname', 1)
def foo1234
puts "foo"
end
foo1234
SRC
assert_equal "foo\n", s.string
$stdout = STDOUT
end
def test_src_define_method
Rcert.application.clear
prob = program_problem :problem_name do
src <<-RUBY
def foo
<%= @src %>
end
foo
RUBY
option <<-RUBY
puts 'foo'
RUBY
option <<-RUBY
fail 'failed'
RUBY
end
prob.set_answer
assert_equal "以下のように出力されるコードを全て選択してください", prob.default_description
out = prob.render
assert_match /def foo/, out
assert_match /puts 'foo'/, out
assert_match /fail 'failed'/, out
assert prob.select(0)[1]
assert !prob.select(1)[1]
Rcert.application.clear
end
def test_application_run_argv
ARGV.clear
Rcert.application.clear
problem :p1 do
src <<-SRC
puts "hello_world"[<%= @range %>]
SRC
option :range => "2..4"
option :range => "2...4"
end
problem :p2 do
src <<-SRC
x = ["Ruby", "Perl", "C"]
puts x.<%= @method_name %>
puts x[0]
SRC
option :method_name => "first"
option :method_name => "shift"
end
problem :p3 do
src <<-SRC
x = ["Ruby", "Perl", "C"]
puts x.<%= @method_name %>
puts x[0]
SRC
option :method_name => "first"
option :method_name => "shift"
end
original_dir = Dir.pwd
Dir.chdir('./data/')
begin
plist = []
ARGV << "p2" << "p1"
Rcert.application.run do|p|
plist << p.name
[0, true]
end
assert_equal [:p1, :p2], plist
ensure
ARGV.clear
Rcert.application.clear
Dir.chdir(original_dir)
end
end
def test_rescue_syntax_error
Rcert.application.clear
program_problem :problem_name do
option "puts 'hello"
option "puts 'hello'"
end
Rcert.application.clear
end
def test_method_problem_select_multiple_answers
Rcert.application.clear
prob = method_problem :problem_name do
src <<-SRC
puts [1, 2, 3, 4].<%= @method_name %> {|x| x*x}.inspect
SRC
option "collect"
option "map"
option "each"
end
prob.set_answer
assert !prob.select(0)[1]
assert !prob.select(1)[1]
assert !prob.select(2)[1]
assert !prob.select(2, 1, 0)[1]
assert prob.select(0, 1)[1]
Rcert.application.clear
end
def test_problem_select_multiple_answers
Rcert.application.clear
prob = program_problem :problem_name do
option <<-SRC
puts ("Ca" 'fe')
SRC
option <<-'SRC'
puts (%q!Cafe!)
SRC
option <<-SRC
puts 0xCafe
SRC
option <<-SRC
puts ?C + ?a + ?f + ?e
SRC
option <<-SRC
puts (0800)
SRC
end
prob.set_answer
assert !prob.select(0)[1]
assert !prob.select(1)[1]
assert !prob.select(2)[1]
assert prob.select(0, 1, 3)[1]
Rcert.application.clear
end
def test_method_problem_error_messages
Rcert.application.clear
method_problem :problem_name do
src <<-SRC
puts [1, 2, 3, 4].<%= @method_name %> {|x| x*x}.inspect
SRC
option "collect"
option "map"
option "each"
end
original_dir = Dir.pwd
Dir.chdir('./data/')
Rcert.application.run do|p|
p.set_answer
p.select(0, 2)
end
out = StringIO.new
Rcert.application.report_result(out)
assert_match /["map", "collect"]/, out.string
assert_match /["collect", "each"]/, out.string
Rcert.application.clear
ensure
Dir.chdir(original_dir)
end
def test_program_problem_error_messages
Rcert.application.clear
program_problem :problem_name do
option <<-SRC
puts ("Ca" 'fe')
SRC
option <<-'SRC'
puts (%q!Cafe!)
SRC
option <<-SRC
puts 0xCafe
SRC
option <<-SRC
puts ?C + ?a + ?f + ?e
SRC
option <<-SRC
puts (0800)
SRC
end
original_dir = Dir.pwd
Dir.chdir('./data/')
Rcert.application.run do|p|
p.set_answer
p.select(0, 2)
end
out = StringIO.new
Rcert.application.report_result(out)
assert_match /puts \(%q!Cafe!\)/, out.string
assert_match /puts \?C \+ \?a \+ \?f \+ \?e/, out.string
assert_match /puts \("Ca" 'fe'\)/, out.string
assert_match /puts 0xCafe/, out.string
Rcert.application.clear
ensure
Dir.chdir(original_dir)
end
end
|
#!/usr/bin/env ruby
require 'factor_oracle'
def factors(a)
fs = []
(0..(a.length-1)).map do |x|
(x..(a.length-1)).map do |y|
fs.push a[x..y]
end
end
fs.sort
end
Removing extraneous file.
|
require 'dotenv'
Dotenv.load
ENV['RACK_ENV'] = 'test'
require_relative 'config/application'
require_relative 'app'
require 'minitest/autorun'
class SmokeTest < Minitest::Test
include Capybara::DSL
def setup
Capybara.app = Prototype.new
end
def teardown
Capybara.reset_sessions!
end
def test_homepage
visit '/'
assert page.has_content? 'The Pensions Guidance Service'
end
def test_book_appointment
visit '/'
click_on('speak to a pension expert')
click_on('Start now')
click_on('Book a phone session')
# Pick a date and time
select('Wednesday, 17 December - 4:00pm', from: 'slots[]')
click_on('Continue')
# Fill in contact details
within('.form') do
fill_in('First name', with: 'Clark')
fill_in('Last name', with: 'Kent')
fill_in('Email', with: 'clark.kent@gmail.com')
fill_in('Phone number to call you on', with: '07460123456')
fill_in('Memorable word', with: 'cryptonite')
end
click_on('Continue')
# Confirm booking
click_on('Confirm booking')
assert page.has_content? 'You’ve successfully booked a pension guidance session'
end
def test_find_local_branch
visit '/'
click_on('speak to a pension expert')
click_on('Start now')
# Fill in postcode
within('.form') do
fill_in('Postcode', with: 'SW18 4XP')
end
click_on('Find a local branch')
assert page.has_content? 'Tooting'
end
def test_tax_calculator
visit '/pension-tax-calculator'
# Fill in pension and income
fill_in('pension', with: '10000')
fill_in('income', with: '10000')
click_on('Calculate')
assert page.has_content? 'Total amount you may get after tax'
end
end
Fix assertion.
require 'dotenv'
Dotenv.load
ENV['RACK_ENV'] = 'test'
require_relative 'config/application'
require_relative 'app'
require 'minitest/autorun'
class SmokeTest < Minitest::Test
include Capybara::DSL
def setup
Capybara.app = Prototype.new
end
def teardown
Capybara.reset_sessions!
end
def test_homepage
visit '/'
assert page.has_content? 'The Pension Guidance Service'
end
def test_book_appointment
visit '/'
click_on('speak to a pension expert')
click_on('Start now')
click_on('Book a phone session')
# Pick a date and time
select('Wednesday, 17 December - 4:00pm', from: 'slots[]')
click_on('Continue')
# Fill in contact details
within('.form') do
fill_in('First name', with: 'Clark')
fill_in('Last name', with: 'Kent')
fill_in('Email', with: 'clark.kent@gmail.com')
fill_in('Phone number to call you on', with: '07460123456')
fill_in('Memorable word', with: 'cryptonite')
end
click_on('Continue')
# Confirm booking
click_on('Confirm booking')
assert page.has_content? 'You’ve successfully booked a pension guidance session'
end
def test_find_local_branch
visit '/'
click_on('speak to a pension expert')
click_on('Start now')
# Fill in postcode
within('.form') do
fill_in('Postcode', with: 'SW18 4XP')
end
click_on('Find a local branch')
assert page.has_content? 'Tooting'
end
def test_tax_calculator
visit '/pension-tax-calculator'
# Fill in pension and income
fill_in('pension', with: '10000')
fill_in('income', with: '10000')
click_on('Calculate')
assert page.has_content? 'Total amount you may get after tax'
end
end
|
# encoding: utf-8
require 'rubygems'
require 'mechanize'
# 参考
# https://github.com/CloCkWeRX/toowoomba/blob/master/scraper.rb
OpenSSL::SSL::VERIFY_PEER = OpenSSL::SSL::VERIFY_NONE
I_KNOW_THAT_OPENSSL_VERIFY_PEER_EQUALS_VERIFY_NONE_IS_WRONG = nil
class Onecoiner
attr_accessor :uri, :agent
def initialize
@uri = 'https://www.onecoin.eu/tech/other/getJoinedPeople'
@agent = 'Mac Safari'
end
def mechanize
Mechanize.new do |m|
m.user_agent_alias = agent
# http://sinyt.hateblo.jp/entry/2013/12/24/203528
m.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
end
def all
result = nil
mechanize.get(uri) do |page|
begin
result = page.body.strip
rescue
return result
end
end
result
end
end
fix some bugs
# encoding: utf-8
require 'rubygems'
require 'mechanize'
# 参考
# https://github.com/CloCkWeRX/toowoomba/blob/master/scraper.rb
OpenSSL::SSL::VERIFY_PEER = OpenSSL::SSL::VERIFY_NONE
I_KNOW_THAT_OPENSSL_VERIFY_PEER_EQUALS_VERIFY_NONE_IS_WRONG = nil
class Onecoiner
attr_accessor :uri, :agent
def initialize
@uri = 'https://www.onecoin.eu/tech/other/getJoinedPeople'
@agent = 'Mac Safari'
end
def mechanize
Mechanize.new do |m|
m.user_agent_alias = agent
# http://sinyt.hateblo.jp/entry/2013/12/24/203528
m.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
end
def all
result = nil
mechanize.get(uri) do |page|
begin
result = page.body.strip
rescue Mechanize::ResponseCodeError => e
return result
end
end
result
end
end
|
added a model
class ResponsesModel < ActiveRecord::Base
self.table_name = 'responses'
|
class TorrentSearch
def self.get_cid(type, category)
return nil if category.nil? || category == ''
case type
when 'extratorrent'
{
:movies => 4,
:tv => 8,
:music => 5,
:book => 2
}.fetch(category.to_sym, nil)
when 'thepiratebay'
{
:movies => 200,
:tv => 200,
:music => 100,
:book => 601
}.fetch(category.to_sym, nil)
when 'torrentleech'
{
:movies => 'Movies',
:tv => 'TV',
:book => 'Books'
}.fetch(category.to_sym, nil)
end
rescue => e
$speaker.tell_error(e, "TorrentSearch.get_cid")
end
def self.get_results(type, keyword, limit, category = '', filter_dead = 1, url = nil, sort_by = 'seeders', filter_out = [], qualities = {})
tries ||= 3
get_results = {}
cid = self.get_cid(type, category)
case type
when 'thepiratebay'
@search = Tpb::Search.new(keyword.gsub(/\'\w/,''), cid)
get_results = @search.links
when 'torrentleech'
@search = TorrentLeech::Search.new(keyword, url, cid)
get_results = @search.links
when 'yggtorrent'
@search = Yggtorrent::Search.new(keyword, url)
get_results = @search.links
when 'wop'
@search = Wop::Search.new(keyword, url)
get_results = @search.links
end
if get_results['torrents']
get_results['torrents'].select! do |t|
t['name'].match(Regexp.new(Utils.regexify(keyword, 0), Regexp::IGNORECASE))
end
filter_out.each do |fout|
get_results['torrents'].select! { |t| t[fout].to_i != 0 }
end
get_results['torrents'].select! { |t| t['size'].to_f >= qualities['min_size'].to_f * 1024 * 1024 } unless qualities['min_size'].nil?
get_results['torrents'].select! { |t| t['size'].to_f <= qualities['max_size'].to_f * 1024 * 1024 } unless qualities['max_size'].nil?
get_results['torrents'].select! { |t| t['seeders'].to_i > filter_dead.to_i } if filter_dead.to_i > 0
get_results['torrents'].sort_by! { |t| -t[sort_by].to_i }
get_results['torrents'] = get_results['torrents'].first(limit.to_i)
end
get_results
rescue => e
$speaker.tell_error(e, "TorrentSearch.get_results")
retry unless (tries -= 1) <= 0
end
def self.get_torrent_file(type, did, name = '', url = '', destination_folder = $temp_dir)
$speaker.speak_up("Will download torrent '#{name}' from #{url}")
return did if $pretend > 0
case type
when 'yggtorrent', 'wop', 'torrentleech'
@search.download(url, destination_folder, did)
end
did
rescue => e
$speaker.tell_error(e, "TorrentSearch.get_torrent_file")
nil
end
def self.random_pick(site:, url:, sort_by:, output: 1, destination_folder: $temp_dir)
case site
when 'yggtorrent', 'torrentleech'
search = get_results(site, '', 25, 'movies', 2, url, sort_by, ['leechers'])
else
search = []
end
(1..[output.to_i,1].max).each do |i|
download_id = search.empty? || search['torrents'].nil? || search['torrents'][i - 1].nil? ? 0 : i
return if download_id == 0
name = search['torrents'][download_id.to_i - 1]['name']
url = search['torrents'][download_id.to_i - 1]['torrent_link'] ? search['torrents'][download_id.to_i - 1]['torrent_link'] : ''
self.get_torrent_file(site, name, name, url, destination_folder) if (url && url != '')
end
end
def self.search(keywords:, limit: 50, category: '', no_prompt: 0, filter_dead: 1, move_completed: '', rename_main: '', main_only: 0, only_on_trackers: [], qualities: {})
success = nil
begin
keywords = eval(keywords)
rescue Exception
keywords = [keywords]
end
keywords.each do |keyword|
success = nil
TORRENT_TRACKERS.map{|x| x[:name]}.each do |type|
break if success
next if !only_on_trackers.nil? && !only_on_trackers.empty? && !only_on_trackers.include?(type)
next if TORRENT_TRACKERS.map{|x| x[:name]}.first != type && $speaker.ask_if_needed("Search for '#{keyword}' torrent on #{type}? (y/n)", no_prompt, 'y') != 'y'
success = self.t_search(type, keyword, limit, category, no_prompt, filter_dead, move_completed, rename_main, main_only, qualities)
end
end
success
rescue => e
$speaker.tell_error(e, "TorrentSearch.search")
nil
end
def self.sort_results(results, qualities)
MediaInfo.sort_media_files(results.map{|t|t[:file] = t['name']; t}, qualities)
end
def self.get_site_keywords(type, category = '')
category && category != '' && $config[type] && $config[type]['site_specific_kw'] && $config[type]['site_specific_kw'][category] ? " #{$config[type]['site_specific_kw'][category]}" : ''
end
def self.t_search(type, keyword, limit = 50, category = '', no_prompt = 0, filter_dead = 1, move_completed = '', rename_main = '', main_only = 0, qualities = {})
qualities = eval(qualities) if qualities.is_a?(String)
success = nil
keyword_s = keyword + self.get_site_keywords(type, category)
search = self.get_results(type, keyword_s, limit, category, filter_dead, nil, 'seeders', [], qualities)
search = self.get_results(type, keyword, limit, category, filter_dead, nil, 'seeders', [], qualities) if keyword_s != keyword && (search.empty? || search['torrents'].nil? || search['torrents'].empty?)
search = self.get_results(type, keyword.gsub(/\(?\d{4}\)?/,''), limit, category, filter_dead, nil, 'seeders', [], qualities) if keyword.gsub(/\(?\d{4}\)?/,'') != keyword&& (search.empty? || search['torrents'].nil? || search['torrents'].empty?)
search['torrents'] = sort_results(search['torrents'], qualities) if !qualities.nil? && !qualities.empty?
if no_prompt.to_i == 0
i = 1
if search['torrents'].nil? || search['torrents'].empty?
$speaker.speak_up("No results for '#{search['query']}' on #{type}")
return success
end
$speaker.speak_up("Showing result for '#{search['query']}' on #{type} (#{search['torrents'].length} out of total #{search['total'].to_i})")
search['torrents'].each do |torrent|
$speaker.speak_up('---------------------------------------------------------------')
$speaker.speak_up("Index: #{i}")
$speaker.speak_up("Name: #{torrent['name']}")
$speaker.speak_up("Size: #{(torrent['size'].to_f / 1024 / 1024 / 1024).round(2)} GB")
$speaker.speak_up("Seeders: #{torrent['seeders']}")
$speaker.speak_up("Leechers: #{torrent['leechers']}")
$speaker.speak_up("Added: #{torrent['added']}")
$speaker.speak_up("Link: #{URI.escape(torrent['link'])}")
$speaker.speak_up('---------------------------------------------------------------')
i += 1
end
end
download_id = $speaker.ask_if_needed('Enter the index of the torrent you want to download, or just hit Enter if you do not want to download anything: ', no_prompt, 1).to_i
if download_id.to_i > 0 && search['torrents'][download_id.to_i - 1]
did = (Time.now.to_f * 1000).to_i
name = search['torrents'][download_id.to_i - 1]['name']
url = search['torrents'][download_id.to_i - 1]['torrent_link'] ? search['torrents'][download_id.to_i - 1]['torrent_link'] : ''
magnet = search['torrents'][download_id.to_i - 1]['magnet_link']
if (url && url != '')
success = self.get_torrent_file(type, did, name, url)
elsif magnet && magnet != ''
$pending_magnet_links[did] = magnet
success = did
end
$deluge_options[did] = {
't_name' => name,
'move_completed' => move_completed,
'rename_main' => rename_main,
'main_only' => main_only.to_i
} if success
end
success
end
end
small fix
class TorrentSearch
def self.get_cid(type, category)
return nil if category.nil? || category == ''
case type
when 'extratorrent'
{
:movies => 4,
:tv => 8,
:music => 5,
:book => 2
}.fetch(category.to_sym, nil)
when 'thepiratebay'
{
:movies => 200,
:tv => 200,
:music => 100,
:book => 601
}.fetch(category.to_sym, nil)
when 'torrentleech'
{
:movies => 'Movies',
:tv => 'TV',
:book => 'Books'
}.fetch(category.to_sym, nil)
end
rescue => e
$speaker.tell_error(e, "TorrentSearch.get_cid")
end
def self.get_results(type, keyword, limit, category = '', filter_dead = 1, url = nil, sort_by = 'seeders', filter_out = [], qualities = {})
tries ||= 3
get_results = {}
cid = self.get_cid(type, category)
case type
when 'thepiratebay'
@search = Tpb::Search.new(keyword.gsub(/\'\w/, ''), cid)
get_results = @search.links
when 'torrentleech'
@search = TorrentLeech::Search.new(keyword, url, cid)
get_results = @search.links
when 'yggtorrent'
@search = Yggtorrent::Search.new(keyword, url)
get_results = @search.links
when 'wop'
@search = Wop::Search.new(keyword, url)
get_results = @search.links
end
if get_results['torrents']
get_results['torrents'].select! do |t|
t['name'].match(Regexp.new(Utils.regexify(keyword, 0), Regexp::IGNORECASE))
end
filter_out.each do |fout|
get_results['torrents'].select! { |t| t[fout].to_i != 0 }
end
get_results['torrents'].select! { |t| t['size'].to_f >= qualities['min_size'].to_f * 1024 * 1024 } unless qualities['min_size'].nil?
get_results['torrents'].select! { |t| t['size'].to_f <= qualities['max_size'].to_f * 1024 * 1024 } unless qualities['max_size'].nil?
get_results['torrents'].select! { |t| t['seeders'].to_i > filter_dead.to_i } if filter_dead.to_i > 0
get_results['torrents'].sort_by! { |t| -t[sort_by].to_i }
get_results['torrents'] = get_results['torrents'].first(limit.to_i)
end
get_results
rescue => e
$speaker.tell_error(e, "TorrentSearch.get_results")
retry unless (tries -= 1) <= 0
end
def self.get_torrent_file(type, did, name = '', url = '', destination_folder = $temp_dir)
$speaker.speak_up("Will download torrent '#{name}' from #{url}")
return did if $pretend > 0
case type
when 'yggtorrent', 'wop', 'torrentleech'
@search.download(url, destination_folder, did)
end
did
rescue => e
$speaker.tell_error(e, "TorrentSearch.get_torrent_file")
nil
end
def self.random_pick(site:, url:, sort_by:, output: 1, destination_folder: $temp_dir)
case site
when 'yggtorrent', 'torrentleech'
search = get_results(site, '', 25, 'movies', 2, url, sort_by, ['leechers'])
else
search = []
end
(1..[output.to_i, 1].max).each do |i|
download_id = search.empty? || search['torrents'].nil? || search['torrents'][i - 1].nil? ? 0 : i
return if download_id == 0
name = search['torrents'][download_id.to_i - 1]['name']
url = search['torrents'][download_id.to_i - 1]['torrent_link'] ? search['torrents'][download_id.to_i - 1]['torrent_link'] : ''
self.get_torrent_file(site, name, name, url, destination_folder) if (url && url != '')
end
end
def self.search(keywords:, limit: 50, category: '', no_prompt: 0, filter_dead: 1, move_completed: '', rename_main: '', main_only: 0, only_on_trackers: [], qualities: {})
success = nil
begin
keywords = eval(keywords)
rescue Exception
keywords = [keywords]
end
keywords.each do |keyword|
success = nil
TORRENT_TRACKERS.map { |x| x[:name] }.each do |type|
break if success
next if !only_on_trackers.nil? && !only_on_trackers.empty? && !only_on_trackers.include?(type)
next if TORRENT_TRACKERS.map { |x| x[:name] }.first != type && $speaker.ask_if_needed("Search for '#{keyword}' torrent on #{type}? (y/n)", no_prompt, 'y') != 'y'
success = self.t_search(type, keyword, limit, category, no_prompt, filter_dead, move_completed, rename_main, main_only, qualities)
end
end
success
rescue => e
$speaker.tell_error(e, "TorrentSearch.search")
nil
end
def self.sort_results(results, qualities)
MediaInfo.sort_media_files(results.map { |t| t[:file] = t['name']; t }, qualities)
end
def self.get_site_keywords(type, category = '')
category && category != '' && $config[type] && $config[type]['site_specific_kw'] && $config[type]['site_specific_kw'][category] ? " #{$config[type]['site_specific_kw'][category]}" : ''
end
def self.t_search(type, keyword, limit = 50, category = '', no_prompt = 0, filter_dead = 1, move_completed = '', rename_main = '', main_only = 0, qualities = {})
qualities = eval(qualities) if qualities.is_a?(String)
success = nil
keyword_s = keyword + self.get_site_keywords(type, category)
search = self.get_results(type, keyword_s, limit, category, filter_dead, nil, 'seeders', [], qualities)
search = self.get_results(type, keyword, limit, category, filter_dead, nil, 'seeders', [], qualities) if keyword_s != keyword && (search.empty? || search['torrents'].nil? || search['torrents'].empty?)
search = self.get_results(type, keyword.gsub(/\(?\d{4}\)?/, ''), limit, category, filter_dead, nil, 'seeders', [], qualities) if keyword.gsub(/\(?\d{4}\)?/, '') != keyword&& (search.empty? || search['torrents'].nil? || search['torrents'].empty?)
search['torrents'] = sort_results(search['torrents'], qualities) if !qualities.nil? && !qualities.empty?
i = 1
if search['torrents'].nil? || search['torrents'].empty?
$speaker.speak_up("No results for '#{search['query']}' on #{type}")
return success
end
$speaker.speak_up("Showing result for '#{search['query']}' on #{type} (#{search['torrents'].length} out of total #{search['total'].to_i})")
search['torrents'].each do |torrent|
$speaker.speak_up('---------------------------------------------------------------')
$speaker.speak_up("Index: #{i}")
$speaker.speak_up("Name: #{torrent['name']}")
$speaker.speak_up("Size: #{(torrent['size'].to_f / 1024 / 1024 / 1024).round(2)} GB")
$speaker.speak_up("Seeders: #{torrent['seeders']}")
$speaker.speak_up("Leechers: #{torrent['leechers']}")
$speaker.speak_up("Added: #{torrent['added']}")
$speaker.speak_up("Link: #{URI.escape(torrent['link'])}")
$speaker.speak_up('---------------------------------------------------------------')
i += 1
end
download_id = $speaker.ask_if_needed('Enter the index of the torrent you want to download, or just hit Enter if you do not want to download anything: ', no_prompt, 1).to_i
if download_id.to_i > 0 && search['torrents'][download_id.to_i - 1]
did = (Time.now.to_f * 1000).to_i
name = search['torrents'][download_id.to_i - 1]['name']
url = search['torrents'][download_id.to_i - 1]['torrent_link'] ? search['torrents'][download_id.to_i - 1]['torrent_link'] : ''
magnet = search['torrents'][download_id.to_i - 1]['magnet_link']
if (url && url != '')
success = self.get_torrent_file(type, did, name, url)
elsif magnet && magnet != ''
$pending_magnet_links[did] = magnet
success = did
end
$deluge_options[did] = {
't_name' => name,
'move_completed' => move_completed,
'rename_main' => rename_main,
'main_only' => main_only.to_i
} if success
end
success
end
end |
module R2Z2
module Commands
module Add
extend Discordrb::Commands::CommandContainer
command(:ping, bucket: :limit, description: 'Responds with pong') do |event|
event.respond "Pong!"
end
command(:add_role, description: 'Enables easily adding a user to a list of roles', usage: 'add_role roll user') do |event, *role_name|
break unless [289607519154864128, 289606790767837184].any? { |id| event.user.role?(id) }
role = event.server.roles { |r| r.name == role_name.join(' ') }
next "Role not found: #{role_name.join(' ')}" unless role
member = event.message.mentions.first.on(event.server)
member.add_role(role)
"I've added #{event.message.mentions.first} to #{role_name.join(' ')}"
end
command(:ice_cream, description: 'Gives out ice cream') do |event|
event.respond ":ice_cream: :ice_cream:"
end
command(:invite, bucket: :limit, description: 'Invite R2Z2 to your channel') do |event|
event.respond "Invite me via #{R2Z2.invite_url}"
end
command(:fish, bucket: :limit, rate_limit_message: '%User%, quit being a dick for %time% more seconds.', description: 'Apply directly to the forehead') do |event|
members = event.server.online_members
members.reject!(&:current_bot?).map! { |m| "#{m.id}" }
event.respond "*slaps around <@#{members.sample}> with a large trout*"
end
command(:search, description: 'Performs a duckduckgo search', usage: 'search <query>', min_args: 1) do |event, *query|
query = query.join(' ')
ddg = DuckDuckGo.new
search = ddg.zeroclickinfo(query)
event << search.related_topics["_"][0].text
end
command(:addstreamer, description: 'Adds a streamer', usage: 'addstreamer <username>', min_args: 1) do |event, name|
if (name.is_a? String) and !($streamer_hash.include? name.downcase)
streamer = R2Z2Twitch.new(name.downcase)
streamer.IDLookUp
event << "I've added " + name.downcase + " to the list of streamers"
else
event << "Enter a valid username"
end
end
command(:delstreamer, description: 'Removes a streamer', usage: 'delstreamer <username>', min_args: 1) do |event, name|
if (name.is_a? String) and ($streamer_hash.include? name)
$streamer_hash.delete(name)
open("#{Dir.pwd}/data/streamers.yaml", "w") { |f| f.write($streamer_hash.to_yaml) }
event << "I've removed " + name + " from the list of streamers"
else
event << "Enter a valid username"
end
end
$timer.cron '*/2 * * * *' do
message = $streamer_hash.keys.map do |key|
streamer = R2Z2Twitch.new(key)
streamer.IDLookUp
streamer.StreamStatus if streamer.started_streaming?
end.compact.join("\n")
R2Z2.send_message(289603265677492245, message)
end
command(:allstream, description: 'Checks all streamers', usage: 'allstream') do |event|
$streamer_hash.each do |key, value|
streamer = R2Z2Twitch.new(key)
streamer.IDLookUp
event << streamer.StreamStatus
end
return nil
end
command(:streamerstatus, description: 'Checks the status of a streamer', usage: 'streamerstatus <username>', min_args: 1) do |event, name|
if name.is_a? String
streamer = R2Z2Twitch.new(name)
streamer.IDLookUp
event << streamer.StreamStatus
else
event << "Enter a valid username"
end
end
command(:roll, description: 'Rolls a number of dice', usage: 'roll <number> <number>') do |event, number, num2|
if number.to_i.is_a? Numeric
if num2.to_i.is_a? Numeric
if number.to_i > 100
event << "No, fuck you."
else
event << "Rolling #{number}d#{num2}"
event << number.to_i.times.map{ 1 + Random.rand(num2.to_i) }
end
else
event << "I need numbers please."
end
else
event << "I need numbers please."
end
end
command(:stats, description: 'Shows bot statistics') do |event|
ping = ((Time.now - event.timestamp) * 1000).to_i
event << "Servers: #{$stats["servers"]}."
event << "Users: #{$stats["users"]}."
event << "Times mentioned: #{$stats["mentions"]}."
event << "Uptime: #{$stats["uptime"]}."
event << "Urls shortened: #{$stats["urls_shortened"]}."
event << "Youtube videos found: #{$stats["videos_found"]}"
event << "Songs played: #{$stats["songs_played"]}"
event << "Messages read: #{$stats["messages_read"]}."
event << "Ping: #{ping}ms."
end
command(:queue, description: 'Displays current music queue.') do |event|
if event.server.music_player.queue.empty?
'Queue is empty, use `add` to add more songs.'
else
"`#{event.server.music_player.table}`"
end
end
command(:repeat, description: 'Toggles repeat.', required_permissions: [:manage_server]) do |event|
event.server.music_player.repeat = !event.server.music_player.repeat
"Repeat is now #{bool_to_words(event.server.music_player.repeat)}."
end
command(:skip, description: 'Skips current song.', required_permissions: [:manage_server]) do |event|
break if event.server.music_player.queue.empty? || !event.server.music_player.playing?
event.server.music_player.skip = true
event.voice.stop_playing if event.voice
nil
end
command(:yt, description: 'Finds youtube videos.', min_args: 1, usage: 'yt <query>') do |event, *query|
video = GOOGLE.find_video(query.join(' '))
if video
$stats["videos_found"] += 1
event << "https://youtu.be/#{video}"
else
'Such video does not exist.'
end
end
command(:musichelp, description: 'Displays information on how to use music features.') do |event|
event << 'To start using music bot a user with `manage server` permission has to invite it to a channel by using `join` command.'
event << 'Then you can add songs by using `add` command.'
event << 'Use `queue` command to see added songs.'
event << 'Users with `manage server` permission can remove songs from queue by using `clearqueue <id>` command.'
event << 'Each song will start playing automaticlly after the last one finishes.'
event << "If you're not using music bot features anymore use `leave` command."
event << 'You can find more help for each of these commands by using `help <commandname>` command.'
end
command(:clearqueue, description: 'Deletes songs from server queue.', usage: 'clearqueue <index/all>', required_permissions: [:manage_server], min_args: 1) do |event, argument|
if argument.chomp == 'all'
event.server.music_player.disconnect
elsif argument.to_i.between?(1, event.server.music_player.queue.length)
index = argument.to_i - 1
if index.zero?
event.voice.stop_playing
event.server.music_player.repeat = false
else
event.server.music_player.delete_song_at(index)
end
elsif !argument.to_i.between?(1, event.server.music_player.queue.length)
next "Can't find song with such index"
else
next 'Unknown argument, use `all` or song index.'
end
nil
end
#Makes R2 Leave a voice channel
command(:leave, description: 'Makes the bot leave your voice channel.', required_permissions: [:manage_server]) do |event|
event.server.music_player.disconnect
nil
end
#Makes R2 join voice channel
command(:join, description: 'Makes the bot join your voice channel.', required_permissions: [:manage_server]) do |event|
channel = event.user.voice_channel
# Check if channel is valid.
if !channel || channel == event.server.afk_channel
next 'First join a valid voice channel.'
end
# Try to join the voice channel.
begin
event.bot.voice_connect(channel)
event.voice.encoder.use_avconv = true
rescue Discordrb::Errors::NoPermission
next 'Please make sure I have permission to join this channel.'
end
# Set voice object that should be used for playback.
event.server.music_player.voice = event.voice
# Set channel that should be used for bot responses.
event.server.music_player.channel = event.channel
LOGGER.debug "Music bot joined #{event.channel.id}."
"Joined \"#{channel.name}\". Use `add` command if you want to add songs to queue."
end
command(:exit, help_available: false) do |event|
break unless event.user.id == 216142038574301195
R2Z2.send_message(event.channel.id, 'Beep')
exit
end
# Adds a song to server queue and starts playing it.
command(:add, description: 'Adds a song to server queue and starts playing it.', usage: 'add <query>', min_args: 1) do |event, *query|
if !event.voice
next 'First make me join your voice channel by using `join` command.'
elsif event.server.music_player.queue.length >= MusicBot::MAX_SONGS_IN_QUEUE
next 'Music music queue is too long.'
end
# Find the video and let user know if it does not exist.
query = query.join(' ')
video_id = GOOGLE.find_video(query)
next 'Such video does not exist.' unless video_id
# Download the song and add it to queue.
# If this succeeds then start playing it unless music is already being played.
if event.server.music_player.add(video_id)
$stats["songs_played"] += 1
event.server.music_player.start_loop unless event.server.music_player.playing?
end
nil
end
end
end
end
Update commands.rb
module R2Z2
module Commands
module Add
extend Discordrb::Commands::CommandContainer
command(:ping, bucket: :limit, description: 'Responds with pong') do |event|
event.respond "Pong!"
end
command(:add_role, description: 'Enables easily adding a user to a list of roles', usage: 'add_role roll user') do |event, _mention, *role_name|
break unless [289607519154864128, 289606790767837184].any? { |id| event.user.role?(id) }
role = event.server.roles.find { |r| r.name == role_name.join(' ') }
next "Role not found: #{role_name.join(' ')}" unless role
member = event.message.mentions.first.on(event.server)
member.add_role(role)
"I've added that user to #{role_name.join(' ')}"
end
command(:eval, help_available: true) do |event, *code|
break unless event.user.id == 216142038574301195
begin
eval code.join(' ')
rescue
'An error occurred 😞'
end
end
command(:ice_cream, description: 'Gives out ice cream') do |event|
event.respond ":ice_cream: :ice_cream:"
end
command(:invite, bucket: :limit, description: 'Invite R2Z2 to your channel') do |event|
event.respond "Invite me via #{R2Z2.invite_url}"
end
command(:fish, bucket: :limit, rate_limit_message: '%User%, quit being a dick for %time% more seconds.', description: 'Apply directly to the forehead') do |event|
members = event.server.online_members
members.reject!(&:current_bot?).map! { |m| "#{m.id}" }
event.respond "*slaps around <@#{members.sample}> with a large trout*"
end
command(:search, description: 'Performs a duckduckgo search', usage: 'search <query>', min_args: 1) do |event, *query|
query = query.join(' ')
ddg = DuckDuckGo.new
search = ddg.zeroclickinfo(query)
event << search.related_topics["_"][0].text
end
command(:addstreamer, description: 'Adds a streamer', usage: 'addstreamer <username>', min_args: 1) do |event, name|
if (name.is_a? String) and !($streamer_hash.include? name.downcase)
streamer = R2Z2Twitch.new(name.downcase)
streamer.IDLookUp
event << "I've added " + name.downcase + " to the list of streamers"
else
event << "Enter a valid username"
end
end
command(:delstreamer, description: 'Removes a streamer', usage: 'delstreamer <username>', min_args: 1) do |event, name|
if (name.is_a? String) and ($streamer_hash.include? name)
$streamer_hash.delete(name)
open("#{Dir.pwd}/data/streamers.yaml", "w") { |f| f.write($streamer_hash.to_yaml) }
event << "I've removed " + name + " from the list of streamers"
else
event << "Enter a valid username"
end
end
$timer.cron '*/2 * * * *' do
message = $streamer_hash.keys.map do |key|
streamer = R2Z2Twitch.new(key)
streamer.IDLookUp
streamer.StreamStatus if streamer.started_streaming?
end.compact.join("\n")
R2Z2.send_message(289603265677492245, message)
end
command(:allstream, description: 'Checks all streamers', usage: 'allstream') do |event|
$streamer_hash.each do |key, value|
streamer = R2Z2Twitch.new(key)
streamer.IDLookUp
event << streamer.StreamStatus
end
return nil
end
command(:streamerstatus, description: 'Checks the status of a streamer', usage: 'streamerstatus <username>', min_args: 1) do |event, name|
if name.is_a? String
streamer = R2Z2Twitch.new(name)
streamer.IDLookUp
event << streamer.StreamStatus
else
event << "Enter a valid username"
end
end
command(:roll, description: 'Rolls a number of dice', usage: 'roll <number> <number>') do |event, number, num2|
if number.to_i.is_a? Numeric
if num2.to_i.is_a? Numeric
if number.to_i > 100
event << "No, fuck you."
else
event << "Rolling #{number}d#{num2}"
event << number.to_i.times.map{ 1 + Random.rand(num2.to_i) }
end
else
event << "I need numbers please."
end
else
event << "I need numbers please."
end
end
command(:stats, description: 'Shows bot statistics') do |event|
ping = ((Time.now - event.timestamp) * 1000).to_i
event << "Servers: #{$stats["servers"]}."
event << "Users: #{$stats["users"]}."
event << "Times mentioned: #{$stats["mentions"]}."
event << "Uptime: #{$stats["uptime"]}."
event << "Urls shortened: #{$stats["urls_shortened"]}."
event << "Youtube videos found: #{$stats["videos_found"]}"
event << "Songs played: #{$stats["songs_played"]}"
event << "Messages read: #{$stats["messages_read"]}."
event << "Ping: #{ping}ms."
end
command(:queue, description: 'Displays current music queue.') do |event|
if event.server.music_player.queue.empty?
'Queue is empty, use `add` to add more songs.'
else
"`#{event.server.music_player.table}`"
end
end
command(:repeat, description: 'Toggles repeat.', required_permissions: [:manage_server]) do |event|
event.server.music_player.repeat = !event.server.music_player.repeat
"Repeat is now #{bool_to_words(event.server.music_player.repeat)}."
end
command(:skip, description: 'Skips current song.', required_permissions: [:manage_server]) do |event|
break if event.server.music_player.queue.empty? || !event.server.music_player.playing?
event.server.music_player.skip = true
event.voice.stop_playing if event.voice
nil
end
command(:yt, description: 'Finds youtube videos.', min_args: 1, usage: 'yt <query>') do |event, *query|
video = GOOGLE.find_video(query.join(' '))
if video
$stats["videos_found"] += 1
event << "https://youtu.be/#{video}"
else
'Such video does not exist.'
end
end
command(:musichelp, description: 'Displays information on how to use music features.') do |event|
event << 'To start using music bot a user with `manage server` permission has to invite it to a channel by using `join` command.'
event << 'Then you can add songs by using `add` command.'
event << 'Use `queue` command to see added songs.'
event << 'Users with `manage server` permission can remove songs from queue by using `clearqueue <id>` command.'
event << 'Each song will start playing automaticlly after the last one finishes.'
event << "If you're not using music bot features anymore use `leave` command."
event << 'You can find more help for each of these commands by using `help <commandname>` command.'
end
command(:clearqueue, description: 'Deletes songs from server queue.', usage: 'clearqueue <index/all>', required_permissions: [:manage_server], min_args: 1) do |event, argument|
if argument.chomp == 'all'
event.server.music_player.disconnect
elsif argument.to_i.between?(1, event.server.music_player.queue.length)
index = argument.to_i - 1
if index.zero?
event.voice.stop_playing
event.server.music_player.repeat = false
else
event.server.music_player.delete_song_at(index)
end
elsif !argument.to_i.between?(1, event.server.music_player.queue.length)
next "Can't find song with such index"
else
next 'Unknown argument, use `all` or song index.'
end
nil
end
#Makes R2 Leave a voice channel
command(:leave, description: 'Makes the bot leave your voice channel.', required_permissions: [:manage_server]) do |event|
event.server.music_player.disconnect
nil
end
#Makes R2 join voice channel
command(:join, description: 'Makes the bot join your voice channel.', required_permissions: [:manage_server]) do |event|
channel = event.user.voice_channel
# Check if channel is valid.
if !channel || channel == event.server.afk_channel
next 'First join a valid voice channel.'
end
# Try to join the voice channel.
begin
event.bot.voice_connect(channel)
event.voice.encoder.use_avconv = true
rescue Discordrb::Errors::NoPermission
next 'Please make sure I have permission to join this channel.'
end
# Set voice object that should be used for playback.
event.server.music_player.voice = event.voice
# Set channel that should be used for bot responses.
event.server.music_player.channel = event.channel
LOGGER.debug "Music bot joined #{event.channel.id}."
"Joined \"#{channel.name}\". Use `add` command if you want to add songs to queue."
end
command(:exit, help_available: false) do |event|
break unless event.user.id == 216142038574301195
R2Z2.send_message(event.channel.id, 'Beep')
exit
end
# Adds a song to server queue and starts playing it.
command(:add, description: 'Adds a song to server queue and starts playing it.', usage: 'add <query>', min_args: 1) do |event, *query|
if !event.voice
next 'First make me join your voice channel by using `join` command.'
elsif event.server.music_player.queue.length >= MusicBot::MAX_SONGS_IN_QUEUE
next 'Music music queue is too long.'
end
# Find the video and let user know if it does not exist.
query = query.join(' ')
video_id = GOOGLE.find_video(query)
next 'Such video does not exist.' unless video_id
# Download the song and add it to queue.
# If this succeeds then start playing it unless music is already being played.
if event.server.music_player.add(video_id)
$stats["songs_played"] += 1
event.server.music_player.start_loop unless event.server.music_player.playing?
end
nil
end
end
end
end
|
module Z3PO
Z3PO.message(with_text: "4chan") do |event|
Z3PO.send_temporary_message(event.channel, "https://www.youtube.com/watch?v=GoRPVsN2SVM", 15)
end
Z3PO.message(with_text: "music help") do |event|
Z3PO.send_message(313009155969384448, "<@#{event.user.id}>, please allow me")
Z3PO.send_message(313009155969384448, "!musichelp")
end
Z3PO.message(with_text: /that's my fetish/i) do |event|
num = Random.rand(2)
if num == 1
event.message.delete
Z3PO.send_message(event.channel, "<@#{event.user}>: http://i.imgur.com/7PyvoJu.jpg")
else
event.message.delete
Z3PO.send_message(event.channel, "<@#{event.user}>: http://i.imgur.com/VOBdI06.jpg")
end
end
end
testing
module Z3PO
Z3PO.message(with_text: "4chan") do |event|
Z3PO.send_temporary_message(event.channel, "https://www.youtube.com/watch?v=GoRPVsN2SVM", 15)
end
Z3PO.message(with_text: "music help") do |event|
Z3PO.send_message(313009155969384448, "<@#{event.user.id}>, please allow me")
Z3PO.send_message(313009155969384448, "!musichelp")
end
Z3PO.message(with_text: /that's my fetish/i) do |event|
num = Random.rand(2)
if num == 1
event.message.delete
Z3PO.send_message(event.channel, "<@#{event.user.username}>: http://i.imgur.com/7PyvoJu.jpg")
else
event.message.delete
Z3PO.send_message(event.channel, "<@#{event.user.username}>: http://i.imgur.com/VOBdI06.jpg")
end
end
end
|
module CommandRunner
class Error < StandardError
attr_reader :output
attr_reader :error_output
def initialize(output, error_output, message)
@output = output
@error_output = error_output
super(message)
end
end
def run_command(*command_line)
env = {}
options = {
:out => @output_log_path.to_s,
:err => @error_output_log_path.to_s,
}
succeeded = system(env, *command_line, options)
output = @output_log_path.read
error_output = @error_output_log_path.read
unless succeeded
message = <<-MESSAGE.chomp
failed to run: #{command_line.join(" ")}
-- output start --
#{output.chomp}
-- output end --
-- error output start --
#{error_output.chomp}
-- error output end --
MESSAGE
raise Error.new(output, error_output, message)
end
[output, error_output]
end
def groonga(command, *arguments)
command_line = [
"groonga",
"--log-path", @log_path.to_s,
"--query-log-path", @query_log_path.to_s,
]
command_line << "-n" unless @database_path.exist?
command_line << @database_path.to_s
command_line << command
command_line.concat(arguments)
run_command(*command_line)
end
def grndb(command, *arguments)
command_line = [
"grndb",
command,
@database_path.to_s,
]
command_line.concat(arguments)
run_command(*command_line)
end
def find_program(name)
ENV["PATH"].split(File::PATH_SEPARATOR).each do |path|
lt_program_path = File.join(path, ".libs", "lt-#{name}")
return lt_program_path if File.exist?(lt_program_path)
program_path = File.join(path, name)
return program_path if File.exist?(program_path)
end
nil
end
def grndb_path
find_program("grndb")
end
end
test grndb recover: add tests
module CommandRunner
class Error < StandardError
attr_reader :output
attr_reader :error_output
def initialize(output, error_output, message)
@output = output
@error_output = error_output
super(message)
end
end
class Result
attr_reader :output
attr_reader :error_output
def initialize(output, error_output)
@output = output
@error_output = error_output
end
end
def run_command(*command_line)
env = {}
options = {
:out => @output_log_path.to_s,
:err => @error_output_log_path.to_s,
}
succeeded = system(env, *command_line, options)
output = @output_log_path.read
error_output = @error_output_log_path.read
unless succeeded
message = <<-MESSAGE.chomp
failed to run: #{command_line.join(" ")}
-- output start --
#{output.chomp}
-- output end --
-- error output start --
#{error_output.chomp}
-- error output end --
MESSAGE
raise Error.new(output, error_output, message)
end
Result.new(output, error_output)
end
def groonga(command, *arguments)
command_line = [
"groonga",
"--log-path", @log_path.to_s,
"--query-log-path", @query_log_path.to_s,
]
command_line << "-n" unless @database_path.exist?
command_line << @database_path.to_s
command_line << command
command_line.concat(arguments)
run_command(*command_line)
end
def grndb(command, *arguments)
command_line = [
"grndb",
command,
@database_path.to_s,
]
command_line.concat(arguments)
run_command(*command_line)
end
def find_program(name)
ENV["PATH"].split(File::PATH_SEPARATOR).each do |path|
lt_program_path = File.join(path, ".libs", "lt-#{name}")
return lt_program_path if File.exist?(lt_program_path)
program_path = File.join(path, name)
return program_path if File.exist?(program_path)
end
nil
end
def grndb_path
find_program("grndb")
end
end
|
require 'test_helper'
class PersonTest < Minitest::Test
def test_reading_popolo_people
popolo = Everypolitician::Popolo::JSON.new(persons: [{ id: '123', name: 'Bob' }])
assert_instance_of Everypolitician::Popolo::People, popolo.persons
person = popolo.persons.first
assert_instance_of Everypolitician::Popolo::Person, person
end
def test_no_persons_in_popolo_data
popolo = Everypolitician::Popolo::JSON.new(other_data: [{ id: '123', foo: 'Bar' }])
assert_equal true, popolo.persons.none?
end
def test_accessing_person_properties
popolo = Everypolitician::Popolo::JSON.new(persons: [{ id: '123', name: 'Bob' }])
person = popolo.persons.first
assert person.key?(:id)
assert_equal '123', person[:id]
end
def test_person_twitter_contact_details
person = Everypolitician::Popolo::Person.new(
contact_details: [{ type: 'twitter', value: 'bob' }]
)
assert_equal 'bob', person.twitter
end
def test_person_twitter_links
person = Everypolitician::Popolo::Person.new(
links: [{ note: 'twitter', url: 'https://twitter.com/bob' }]
)
assert_equal 'https://twitter.com/bob', person.twitter
end
def test_person_contact_details_and_twitter_links
person = Everypolitician::Popolo::Person.new(
contact_details: [{ note: 'cell', value: '+1-555-555-0100' }],
links: [{ note: 'twitter', url: 'https://twitter.com/bob' }]
)
assert_equal 'https://twitter.com/bob', person.twitter
end
def test_accessing_basic_person_attributes
person = Everypolitician::Popolo::Person.new(id: '123', name: 'Bob', other_names: [])
assert_equal '123', person.id
assert_equal 'Bob', person.name
assert_equal [], person.other_names
end
def test_person_name_at
person = Everypolitician::Popolo::Person.new(name: 'Bob')
assert_equal person.name_at('2016-01-11'), 'Bob'
person = Everypolitician::Popolo::Person.new(
name: 'Bob',
other_names: [
{ name: 'Robert', start_date: '1989-01-01', end_date: '1999-12-31' },
]
)
assert_equal 'Robert', person.name_at('1990-06-01')
person = Everypolitician::Popolo::Person.new(
name: 'Bob',
other_names: [
{ name: 'Robert', start_date: '1989-01-01', end_date: '1999-12-31' },
{ name: 'Bobby', start_date: '1989-01-01', end_date: '2012-12-31' },
]
)
assert_raises Everypolitician::Popolo::Person::Error do
person.name_at('1996-01-01')
end
end
def test_person_facebook
person = Everypolitician::Popolo::Person.new({})
assert_nil person.facebook
person = Everypolitician::Popolo::Person.new(
links: [{ note: 'facebook', url: 'https://www.facebook.com/bob' }]
)
assert_equal 'https://www.facebook.com/bob', person.facebook
end
def test_person_identifier
person = Everypolitician::Popolo::Person.new(
identifiers: [
{ scheme: 'foo', identifier: 'bar' },
{ scheme: 'wikidata', identifier: 'zap' },
]
)
assert_equal 'bar', person.identifier('foo')
assert_equal 'zap', person.identifier('wikidata')
end
def test_person_wikidata
person = Everypolitician::Popolo::Person.new({})
assert_nil person.wikidata
person = Everypolitician::Popolo::Person.new(
identifiers: [{ scheme: 'wikidata', identifier: 'Q153149' }]
)
assert_equal 'Q153149', person.wikidata
end
def test_person_contacts
person = Everypolitician::Popolo::Person.new(
contact_details: [
{ type: 'phone', value: '9304832' },
{ type: 'fax', value: '9304833' },
]
)
assert_equal '9304832', person.contact('phone')
assert_equal '9304832', person.phone
assert_equal '9304833', person.fax
end
def test_person_no_contacts
person = Everypolitician::Popolo::Person.new({})
assert_equal nil, person.contact('phone')
assert_equal nil, person.phone
assert_equal nil, person.fax
end
def test_person_sort_name
person = Everypolitician::Popolo::Person.new(name: 'Bob')
assert_equal 'Bob', person.sort_name
person = Everypolitician::Popolo::Person.new(name: 'Bob', sort_name: 'Robert')
assert_equal 'Robert', person.sort_name
end
def test_person_email
person = Everypolitician::Popolo::Person.new(name: 'Bob')
assert_equal nil, person.email
person = Everypolitician::Popolo::Person.new(name: 'Bob', email: 'bob@example.org')
assert_equal 'bob@example.org', person.email
end
def test_person_image
person = Everypolitician::Popolo::Person.new(name: 'Bob')
assert_equal nil, person.image
person = Everypolitician::Popolo::Person.new(name: 'Bob', image: 'http://example.org/img.jpeg')
assert_equal 'http://example.org/img.jpeg', person.image
end
def test_person_gender
person = Everypolitician::Popolo::Person.new(name: 'Bob')
assert_equal nil, person.gender
person = Everypolitician::Popolo::Person.new(name: 'Bob', gender: 'male')
assert_equal 'male', person.gender
end
def test_person_equality_based_on_id
person1 = Everypolitician::Popolo::Person.new(id: '123', name: 'Bob')
person2 = Everypolitician::Popolo::Person.new(id: '123', name: 'Bob', gender: 'male')
assert_equal person1, person2
end
def test_person_equality_based_on_class
person1 = Everypolitician::Popolo::Person.new(id: '123', name: 'Bob')
organization = Everypolitician::Popolo::Organization.new(id: '123')
refute_equal person1, organization
end
def test_persons_subtraction
person1 = { id: '123', name: 'Alice' }
person2 = { id: '456', name: 'Bob', gender: 'male' }
all_people = Everypolitician::Popolo::People.new([person1, person2])
just_person_1 = Everypolitician::Popolo::People.new([person1])
assert_equal [Everypolitician::Popolo::Person.new(person2)], all_people - just_person_1
end
def test_honorific_prefix
person1 = Everypolitician::Popolo::Person.new(id: '123', name: 'Bob', honorific_prefix: 'Dr')
person2 = Everypolitician::Popolo::Person.new(id: '123', name: 'Bob')
assert_equal 'Dr', person1.honorific_prefix
assert_nil person2.honorific_prefix
end
def test_honorific_suffix
person1 = Everypolitician::Popolo::Person.new(id: '123', name: 'Bob', honorific_suffix: 'PhD')
person2 = Everypolitician::Popolo::Person.new(id: '123', name: 'Bob')
assert_equal 'PhD', person1.honorific_suffix
assert_nil person2.honorific_suffix
end
def test_person_memberships
popolo = Everypolitician::Popolo::JSON.new(persons: [{ id: '123', name: 'Bob' }], memberships: [{ person_id: '123', start_date: '2016-01-01' }])
memberships = popolo.persons.first.memberships
assert_equal 1, memberships.size
assert_equal '2016-01-01', memberships.first.start_date
end
end
split wikidata and no-wikidata tests
require 'test_helper'
class PersonTest < Minitest::Test
def test_reading_popolo_people
popolo = Everypolitician::Popolo::JSON.new(persons: [{ id: '123', name: 'Bob' }])
assert_instance_of Everypolitician::Popolo::People, popolo.persons
person = popolo.persons.first
assert_instance_of Everypolitician::Popolo::Person, person
end
def test_no_persons_in_popolo_data
popolo = Everypolitician::Popolo::JSON.new(other_data: [{ id: '123', foo: 'Bar' }])
assert_equal true, popolo.persons.none?
end
def test_accessing_person_properties
popolo = Everypolitician::Popolo::JSON.new(persons: [{ id: '123', name: 'Bob' }])
person = popolo.persons.first
assert person.key?(:id)
assert_equal '123', person[:id]
end
def test_person_twitter_contact_details
person = Everypolitician::Popolo::Person.new(
contact_details: [{ type: 'twitter', value: 'bob' }]
)
assert_equal 'bob', person.twitter
end
def test_person_twitter_links
person = Everypolitician::Popolo::Person.new(
links: [{ note: 'twitter', url: 'https://twitter.com/bob' }]
)
assert_equal 'https://twitter.com/bob', person.twitter
end
def test_person_contact_details_and_twitter_links
person = Everypolitician::Popolo::Person.new(
contact_details: [{ note: 'cell', value: '+1-555-555-0100' }],
links: [{ note: 'twitter', url: 'https://twitter.com/bob' }]
)
assert_equal 'https://twitter.com/bob', person.twitter
end
def test_accessing_basic_person_attributes
person = Everypolitician::Popolo::Person.new(id: '123', name: 'Bob', other_names: [])
assert_equal '123', person.id
assert_equal 'Bob', person.name
assert_equal [], person.other_names
end
def test_person_name_at
person = Everypolitician::Popolo::Person.new(name: 'Bob')
assert_equal person.name_at('2016-01-11'), 'Bob'
person = Everypolitician::Popolo::Person.new(
name: 'Bob',
other_names: [
{ name: 'Robert', start_date: '1989-01-01', end_date: '1999-12-31' },
]
)
assert_equal 'Robert', person.name_at('1990-06-01')
person = Everypolitician::Popolo::Person.new(
name: 'Bob',
other_names: [
{ name: 'Robert', start_date: '1989-01-01', end_date: '1999-12-31' },
{ name: 'Bobby', start_date: '1989-01-01', end_date: '2012-12-31' },
]
)
assert_raises Everypolitician::Popolo::Person::Error do
person.name_at('1996-01-01')
end
end
def test_person_facebook
person = Everypolitician::Popolo::Person.new({})
assert_nil person.facebook
person = Everypolitician::Popolo::Person.new(
links: [{ note: 'facebook', url: 'https://www.facebook.com/bob' }]
)
assert_equal 'https://www.facebook.com/bob', person.facebook
end
def test_person_identifier
person = Everypolitician::Popolo::Person.new(
identifiers: [
{ scheme: 'foo', identifier: 'bar' },
{ scheme: 'wikidata', identifier: 'zap' },
]
)
assert_equal 'bar', person.identifier('foo')
assert_equal 'zap', person.identifier('wikidata')
end
def test_person_wikidata
person = Everypolitician::Popolo::Person.new(
identifiers: [{ scheme: 'wikidata', identifier: 'Q153149' }]
)
assert_equal 'Q153149', person.wikidata
end
def test_person_no_wikidata
person = Everypolitician::Popolo::Person.new({})
assert_nil person.wikidata
end
def test_person_contacts
person = Everypolitician::Popolo::Person.new(
contact_details: [
{ type: 'phone', value: '9304832' },
{ type: 'fax', value: '9304833' },
]
)
assert_equal '9304832', person.contact('phone')
assert_equal '9304832', person.phone
assert_equal '9304833', person.fax
end
def test_person_no_contacts
person = Everypolitician::Popolo::Person.new({})
assert_equal nil, person.contact('phone')
assert_equal nil, person.phone
assert_equal nil, person.fax
end
def test_person_sort_name
person = Everypolitician::Popolo::Person.new(name: 'Bob')
assert_equal 'Bob', person.sort_name
person = Everypolitician::Popolo::Person.new(name: 'Bob', sort_name: 'Robert')
assert_equal 'Robert', person.sort_name
end
def test_person_email
person = Everypolitician::Popolo::Person.new(name: 'Bob')
assert_equal nil, person.email
person = Everypolitician::Popolo::Person.new(name: 'Bob', email: 'bob@example.org')
assert_equal 'bob@example.org', person.email
end
def test_person_image
person = Everypolitician::Popolo::Person.new(name: 'Bob')
assert_equal nil, person.image
person = Everypolitician::Popolo::Person.new(name: 'Bob', image: 'http://example.org/img.jpeg')
assert_equal 'http://example.org/img.jpeg', person.image
end
def test_person_gender
person = Everypolitician::Popolo::Person.new(name: 'Bob')
assert_equal nil, person.gender
person = Everypolitician::Popolo::Person.new(name: 'Bob', gender: 'male')
assert_equal 'male', person.gender
end
def test_person_equality_based_on_id
person1 = Everypolitician::Popolo::Person.new(id: '123', name: 'Bob')
person2 = Everypolitician::Popolo::Person.new(id: '123', name: 'Bob', gender: 'male')
assert_equal person1, person2
end
def test_person_equality_based_on_class
person1 = Everypolitician::Popolo::Person.new(id: '123', name: 'Bob')
organization = Everypolitician::Popolo::Organization.new(id: '123')
refute_equal person1, organization
end
def test_persons_subtraction
person1 = { id: '123', name: 'Alice' }
person2 = { id: '456', name: 'Bob', gender: 'male' }
all_people = Everypolitician::Popolo::People.new([person1, person2])
just_person_1 = Everypolitician::Popolo::People.new([person1])
assert_equal [Everypolitician::Popolo::Person.new(person2)], all_people - just_person_1
end
def test_honorific_prefix
person1 = Everypolitician::Popolo::Person.new(id: '123', name: 'Bob', honorific_prefix: 'Dr')
person2 = Everypolitician::Popolo::Person.new(id: '123', name: 'Bob')
assert_equal 'Dr', person1.honorific_prefix
assert_nil person2.honorific_prefix
end
def test_honorific_suffix
person1 = Everypolitician::Popolo::Person.new(id: '123', name: 'Bob', honorific_suffix: 'PhD')
person2 = Everypolitician::Popolo::Person.new(id: '123', name: 'Bob')
assert_equal 'PhD', person1.honorific_suffix
assert_nil person2.honorific_suffix
end
def test_person_memberships
popolo = Everypolitician::Popolo::JSON.new(persons: [{ id: '123', name: 'Bob' }], memberships: [{ person_id: '123', start_date: '2016-01-01' }])
memberships = popolo.persons.first.memberships
assert_equal 1, memberships.size
assert_equal '2016-01-01', memberships.first.start_date
end
end
|
require 'test_helper'
class FoldersControllerTest < ActionController::TestCase
include AuthenticatedTestHelper
def setup
@member = Factory :user
@project = @member.person.projects.first
login_as @member
end
test "routes" do
assert_generates "/projects/1/folders", {:controller=>"folders",:action=>"index",:project_id=>"1"}
assert_generates "/projects/1/folders/7", {:controller=>"folders",:action=>"show",:project_id=>"1",:id=>"7"}
end
test "access as member" do
get :index,:project_id=>@project.id
assert_response :success
end
test "delete" do
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy)
folder = Factory :project_folder,:project_id=>@project.id
folder.add_assets(sop)
child = folder.add_child("fred")
child.save!
unsorted_folder = Factory :project_folder,:project_id=>@project.id,:incoming=>true
assert_difference("ProjectFolder.count",-2) do
delete :destroy, :id => folder.id,:project_id=>@project.id
end
assert_redirected_to :project_folders
unsorted_folder.reload
@project.reload
assert_equal [unsorted_folder],ProjectFolder.find(:all,:conditions=>{:project_id=>@project.id})
assert_equal [sop],unsorted_folder.assets
end
test "cannot delete if not deletable" do
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy)
folder = Factory :project_folder,:project_id=>@project.id,:deletable=>false
folder.add_assets(sop)
child = folder.add_child("fred")
child.save!
unsorted_folder = Factory :project_folder,:project_id=>@project.id,:incoming=>true
assert_no_difference("ProjectFolder.count") do
delete :destroy, :id => folder.id,:project_id=>@project.id
end
assert_redirected_to :project_folders
assert_not_nil flash[:error]
unsorted_folder.reload
folder.reload
@project.reload
assert_equal [folder,child,unsorted_folder],ProjectFolder.find(:all,:conditions=>{:project_id=>@project.id}).sort_by(&:id)
assert_equal [],unsorted_folder.assets
assert_equal [sop],folder.assets
end
test "cannot delete other project" do
project = Factory :project
sop = Factory :sop, :project_ids=>[project.id],:policy=>Factory(:public_policy)
folder = Factory :project_folder,:project_id=>project.id
folder.add_assets(sop)
child = folder.add_child("fred")
child.save!
unsorted_folder = Factory :project_folder,:project_id=>project.id,:incoming=>true
assert_no_difference("ProjectFolder.count") do
delete :destroy, :id => folder,:project_id=>project.id
end
assert_redirected_to :root
unsorted_folder.reload
project.reload
assert_equal [folder,child,unsorted_folder],ProjectFolder.find(:all,:conditions=>{:project_id=>project.id}).sort_by(&:id)
assert_equal [],unsorted_folder.assets
assert_equal [sop],folder.assets
end
test "defaults created and old items assigned" do
sop = Factory :sop, :project_ids=>[@projec.id],:policy=>Factory(:public_policy)
private_sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:private_policy)
sop2 = Factory :sop, :project_ids=>[Factory(:project).id],:policy=>Factory(:public_policy)
assert ProjectFolder.root_folders(@project).empty?
assert_difference("ProjectFolderAsset.count",2) do
get :index,:project_id=>@project.id
end
assert_response :success
@project.reload
assert !ProjectFolder.root_folders(@project).empty?
assert_equal 2, ProjectFolder.new_items_folder(@project).assets.count
assert ProjectFolder.new_items_folder(@project).assets.include?(sop)
assert ProjectFolder.new_items_folder(@project).assets.include?(private_sop)
assert !ProjectFolder.new_items_folder(@project).assets.include?(sop2)
end
test "defaults not created if exist" do
folder=Factory :project_folder,:project=>@project
assert_equal 1,ProjectFolder.root_folders(@project).count
assert_no_difference("ProjectFolder.count") do
get :index,:project_id=>@project.id
end
assert_response :success
assert_equal [folder],ProjectFolder.root_folders(@project)
end
test "blocked access as non member" do
login_as(Factory(:user))
get :index,:project_id=>@project.id
assert_redirected_to root_path
assert_not_nil flash[:error]
end
test "should not show when logged out" do
logout
get :index,:project_id=>@project.id
assert_redirected_to login_path
end
test "ajax request for folder contents" do
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy),:description=>"Ryz9z3Z9h70wzJ243we6k8RO5xI5f3UF"
folder = Factory :project_folder, :project_id=>@project.id
folder.add_assets(sop)
folder.save!
xhr(:post,:display_contents,{:id=>folder.id,:project_id=>folder.project.id})
assert_response :success
assert @response.body.match(/Description.*Ryz9z3Z9h70wzJ243we6k8RO5xI5f3UF/)
end
test "ajax request for assay folder contents" do
assay = Factory :experimental_assay,:contributor=>@member.person,:policy=>Factory(:public_policy),:title=>"Yp50U6BjlacF0r7HY5WXHEOP8E2UqXcv",:description=>"5Kx0432X6IbuzBi25BIi0OdY1xo4FRG3"
assay.study.investigation.projects=[@project]
assay.study.investigation.save!
assert assay.can_view?
xhr(:post,:display_contents,{:id=>"Assay_#{assay.id}",:project_id=>@project.id})
assert_response :success
assert @response.body.match(/Yp50U6BjlacF0r7HY5WXHEOP8E2UqXcv/)
assert @response.body.match(/5Kx0432X6IbuzBi25BIi0OdY1xo4FRG3/)
end
test "ajax request for hidden assay folder contents fails" do
assay = Factory :experimental_assay,:policy=>Factory(:private_policy),:title=>"Yp50U6BjlacF0r7HY5WXHEOP8E2UqXcv",:description=>"5Kx0432X6IbuzBi25BIi0OdY1xo4FRG3"
assay.study.investigation.projects=[@project]
assay.study.investigation.save!
assert !assay.can_view?
xhr(:post,:display_contents,{:id=>"Assay_#{assay.id}",:project_id=>@project.id})
assert_redirected_to root_path
assert !@response.body.match(/Yp50U6BjlacF0r7HY5WXHEOP8E2UqXcv/)
assert !@response.body.match(/5Kx0432X6IbuzBi25BIi0OdY1xo4FRG3/)
end
test "ajax request for folder contents rejected from non project member" do
login_as Factory(:user)
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy),:description=>"Ryz9z3Z9h70wzJ243we6k8RO5xI5f3UF"
folder = Factory :project_folder, :project_id=>@project.id
folder.add_assets(sop)
folder.save!
xhr(:post,:display_contents,{:id=>folder.id,:project_id=>folder.project.id})
assert_redirected_to root_path
assert @response.body.match(/Description.*Ryz9z3Z9h70wzJ243we6k8RO5xI5f3UF/).nil?
end
test "move between folders" do
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy),:description=>"Ryz9z3Z9h70wzJ243we6k8RO5xI5f3UF"
folder = Factory :project_folder, :project_id=>@project.id
other_folder = Factory :project_folder, :project_id=>@project.id
folder.add_assets(sop)
folder.save!
xhr(:post,:move_asset_to,{:asset_id=>sop.id,:asset_type=>"Sop",:id=>folder.id,:dest_folder_id=>other_folder.id,:project_id=>folder.project.id})
assert_response :success
sop.reload
other_folder.reload
folder.reload
assert_equal [other_folder],sop.folders
assert_equal [],folder.assets
assert_equal [sop],other_folder.assets
end
test "move asset to assay" do
assay = Factory :experimental_assay,:contributor=>@member.person,:policy=>Factory(:public_policy)
assay.study.investigation.projects=[@project]
assay.study.investigation.save!
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy)
folder = Factory :project_folder, :project_id=>@project.id
folder.add_assets(sop)
folder.save!
assert_difference("AssayAsset.count") do
xhr(:post,:move_asset_to,{:asset_id=>sop.id,
:asset_type=>"Sop",
:id=>folder.id,
:dest_folder_id=>"Assay_#{assay.id}",
:project_id=>folder.project.id,
:orig_folder_element_id=>"sdfhsdk",
:dest_folder_element_id=>"oosdo"})
end
assert_response :success
assay.reload
assert_equal [sop],assay.assets.collect{|a| a.parent}
assert_equal [sop],folder.assets
end
test "remove asset from assay" do
assay = Factory :experimental_assay,:contributor=>@member.person,:policy=>Factory(:public_policy)
assay.study.investigation.projects=[@project]
assay.study.investigation.save!
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy)
assay.relate(sop)
folder = Seek::AssayFolder.new assay,@project
assert_difference("AssayAsset.count",-1) do
xhr(:post,:remove_asset,{:asset_id=>sop.id,:asset_type=>"Sop",:id=>folder.id,:project_id=>folder.project.id,:orig_folder_element_id=>"sdfhsdk"})
end
assay.reload
assert_equal [],assay.assets
assert_equal [],folder.assets
end
test "cannot move to other project folder" do
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy),:description=>"Ryz9z3Z9h70wzJ243we6k8RO5xI5f3UF"
folder = Factory :project_folder, :project_id=>@project.id
other_folder = Factory :project_folder, :project_id=>Factory(:project).id
folder.add_assets(sop)
folder.save!
xhr(:post,:move_asset_to,{:asset_id=>sop.id,:asset_type=>"Sop",:id=>folder.id,:dest_folder_id=>other_folder.id,:project_id=>folder.project.id})
assert_response :success
sop.reload
other_folder.reload
folder.reload
assert_equal [folder],sop.folders
assert_equal [],other_folder.assets
assert_equal [sop],folder.assets
end
test "create a new child folder" do
folder = Factory :project_folder, :project=>@project
assert_difference("ProjectFolder.count") do
xhr(:post,:create_folder,{:project_id=>@project.id,:id=>folder.id,:title=>"fred"})
end
assert_response :success
end
test "authorization on assets" do
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy),:description=>"Ryz9z3Z9h70wzJ243we6k8RO5xI5f3UF"
hidden_sop = Factory :sop,:project_ids=>[@project.id],:policy=>Factory(:private_policy),:description=>"viu2q6ng3iZ0ppS5X679pPo11LfF62pS"
folder = Factory :project_folder, :project_id=>@project.id
disable_authorization_checks do
folder.add_assets([sop,hidden_sop])
folder.save!
end
xhr(:post,:display_contents,{:id=>folder.id,:project_id=>folder.project.id})
assert_response :success
assert @response.body.match(/Ryz9z3Z9h70wzJ243we6k8RO5xI5f3UF/)
assert @response.body.match(/viu2q6ng3iZ0ppS5X679pPo11LfF62pS/).nil?
end
test "display with assays" do
assay = Factory :experimental_assay,:contributor=>@member.person,:policy=>Factory(:public_policy)
assay.study.investigation.projects=[@project]
assay.study.investigation.save!
get :index,:project_id=>@project.id
assert_response :success
end
test 'breadcrumb for project folder' do
get :index,:project_id=>@project.id
assert_response :success
assert_select 'div.breadcrumbs', :text => /Home > Projects Index > #{@project.title} > Folders Index/, :count => 1 do
assert_select "a[href=?]", root_path, :count => 1
assert_select "a[href=?]", projects_url, :count => 1
assert_select "a[href=?]", project_url(@project), :count => 1
end
end
end
fix a typo mistake in a folderscontrollertest
require 'test_helper'
class FoldersControllerTest < ActionController::TestCase
include AuthenticatedTestHelper
def setup
@member = Factory :user
@project = @member.person.projects.first
login_as @member
end
test "routes" do
assert_generates "/projects/1/folders", {:controller=>"folders",:action=>"index",:project_id=>"1"}
assert_generates "/projects/1/folders/7", {:controller=>"folders",:action=>"show",:project_id=>"1",:id=>"7"}
end
test "access as member" do
get :index,:project_id=>@project.id
assert_response :success
end
test "delete" do
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy)
folder = Factory :project_folder,:project_id=>@project.id
folder.add_assets(sop)
child = folder.add_child("fred")
child.save!
unsorted_folder = Factory :project_folder,:project_id=>@project.id,:incoming=>true
assert_difference("ProjectFolder.count",-2) do
delete :destroy, :id => folder.id,:project_id=>@project.id
end
assert_redirected_to :project_folders
unsorted_folder.reload
@project.reload
assert_equal [unsorted_folder],ProjectFolder.find(:all,:conditions=>{:project_id=>@project.id})
assert_equal [sop],unsorted_folder.assets
end
test "cannot delete if not deletable" do
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy)
folder = Factory :project_folder,:project_id=>@project.id,:deletable=>false
folder.add_assets(sop)
child = folder.add_child("fred")
child.save!
unsorted_folder = Factory :project_folder,:project_id=>@project.id,:incoming=>true
assert_no_difference("ProjectFolder.count") do
delete :destroy, :id => folder.id,:project_id=>@project.id
end
assert_redirected_to :project_folders
assert_not_nil flash[:error]
unsorted_folder.reload
folder.reload
@project.reload
assert_equal [folder,child,unsorted_folder],ProjectFolder.find(:all,:conditions=>{:project_id=>@project.id}).sort_by(&:id)
assert_equal [],unsorted_folder.assets
assert_equal [sop],folder.assets
end
test "cannot delete other project" do
project = Factory :project
sop = Factory :sop, :project_ids=>[project.id],:policy=>Factory(:public_policy)
folder = Factory :project_folder,:project_id=>project.id
folder.add_assets(sop)
child = folder.add_child("fred")
child.save!
unsorted_folder = Factory :project_folder,:project_id=>project.id,:incoming=>true
assert_no_difference("ProjectFolder.count") do
delete :destroy, :id => folder,:project_id=>project.id
end
assert_redirected_to :root
unsorted_folder.reload
project.reload
assert_equal [folder,child,unsorted_folder],ProjectFolder.find(:all,:conditions=>{:project_id=>project.id}).sort_by(&:id)
assert_equal [],unsorted_folder.assets
assert_equal [sop],folder.assets
end
test "defaults created and old items assigned" do
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy)
private_sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:private_policy)
sop2 = Factory :sop, :project_ids=>[Factory(:project).id],:policy=>Factory(:public_policy)
assert ProjectFolder.root_folders(@project).empty?
assert_difference("ProjectFolderAsset.count",2) do
get :index,:project_id=>@project.id
end
assert_response :success
@project.reload
assert !ProjectFolder.root_folders(@project).empty?
assert_equal 2, ProjectFolder.new_items_folder(@project).assets.count
assert ProjectFolder.new_items_folder(@project).assets.include?(sop)
assert ProjectFolder.new_items_folder(@project).assets.include?(private_sop)
assert !ProjectFolder.new_items_folder(@project).assets.include?(sop2)
end
test "defaults not created if exist" do
folder=Factory :project_folder,:project=>@project
assert_equal 1,ProjectFolder.root_folders(@project).count
assert_no_difference("ProjectFolder.count") do
get :index,:project_id=>@project.id
end
assert_response :success
assert_equal [folder],ProjectFolder.root_folders(@project)
end
test "blocked access as non member" do
login_as(Factory(:user))
get :index,:project_id=>@project.id
assert_redirected_to root_path
assert_not_nil flash[:error]
end
test "should not show when logged out" do
logout
get :index,:project_id=>@project.id
assert_redirected_to login_path
end
test "ajax request for folder contents" do
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy),:description=>"Ryz9z3Z9h70wzJ243we6k8RO5xI5f3UF"
folder = Factory :project_folder, :project_id=>@project.id
folder.add_assets(sop)
folder.save!
xhr(:post,:display_contents,{:id=>folder.id,:project_id=>folder.project.id})
assert_response :success
assert @response.body.match(/Description.*Ryz9z3Z9h70wzJ243we6k8RO5xI5f3UF/)
end
test "ajax request for assay folder contents" do
assay = Factory :experimental_assay,:contributor=>@member.person,:policy=>Factory(:public_policy),:title=>"Yp50U6BjlacF0r7HY5WXHEOP8E2UqXcv",:description=>"5Kx0432X6IbuzBi25BIi0OdY1xo4FRG3"
assay.study.investigation.projects=[@project]
assay.study.investigation.save!
assert assay.can_view?
xhr(:post,:display_contents,{:id=>"Assay_#{assay.id}",:project_id=>@project.id})
assert_response :success
assert @response.body.match(/Yp50U6BjlacF0r7HY5WXHEOP8E2UqXcv/)
assert @response.body.match(/5Kx0432X6IbuzBi25BIi0OdY1xo4FRG3/)
end
test "ajax request for hidden assay folder contents fails" do
assay = Factory :experimental_assay,:policy=>Factory(:private_policy),:title=>"Yp50U6BjlacF0r7HY5WXHEOP8E2UqXcv",:description=>"5Kx0432X6IbuzBi25BIi0OdY1xo4FRG3"
assay.study.investigation.projects=[@project]
assay.study.investigation.save!
assert !assay.can_view?
xhr(:post,:display_contents,{:id=>"Assay_#{assay.id}",:project_id=>@project.id})
assert_redirected_to root_path
assert !@response.body.match(/Yp50U6BjlacF0r7HY5WXHEOP8E2UqXcv/)
assert !@response.body.match(/5Kx0432X6IbuzBi25BIi0OdY1xo4FRG3/)
end
test "ajax request for folder contents rejected from non project member" do
login_as Factory(:user)
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy),:description=>"Ryz9z3Z9h70wzJ243we6k8RO5xI5f3UF"
folder = Factory :project_folder, :project_id=>@project.id
folder.add_assets(sop)
folder.save!
xhr(:post,:display_contents,{:id=>folder.id,:project_id=>folder.project.id})
assert_redirected_to root_path
assert @response.body.match(/Description.*Ryz9z3Z9h70wzJ243we6k8RO5xI5f3UF/).nil?
end
test "move between folders" do
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy),:description=>"Ryz9z3Z9h70wzJ243we6k8RO5xI5f3UF"
folder = Factory :project_folder, :project_id=>@project.id
other_folder = Factory :project_folder, :project_id=>@project.id
folder.add_assets(sop)
folder.save!
xhr(:post,:move_asset_to,{:asset_id=>sop.id,:asset_type=>"Sop",:id=>folder.id,:dest_folder_id=>other_folder.id,:project_id=>folder.project.id})
assert_response :success
sop.reload
other_folder.reload
folder.reload
assert_equal [other_folder],sop.folders
assert_equal [],folder.assets
assert_equal [sop],other_folder.assets
end
test "move asset to assay" do
assay = Factory :experimental_assay,:contributor=>@member.person,:policy=>Factory(:public_policy)
assay.study.investigation.projects=[@project]
assay.study.investigation.save!
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy)
folder = Factory :project_folder, :project_id=>@project.id
folder.add_assets(sop)
folder.save!
assert_difference("AssayAsset.count") do
xhr(:post,:move_asset_to,{:asset_id=>sop.id,
:asset_type=>"Sop",
:id=>folder.id,
:dest_folder_id=>"Assay_#{assay.id}",
:project_id=>folder.project.id,
:orig_folder_element_id=>"sdfhsdk",
:dest_folder_element_id=>"oosdo"})
end
assert_response :success
assay.reload
assert_equal [sop],assay.assets.collect{|a| a.parent}
assert_equal [sop],folder.assets
end
test "remove asset from assay" do
assay = Factory :experimental_assay,:contributor=>@member.person,:policy=>Factory(:public_policy)
assay.study.investigation.projects=[@project]
assay.study.investigation.save!
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy)
assay.relate(sop)
folder = Seek::AssayFolder.new assay,@project
assert_difference("AssayAsset.count",-1) do
xhr(:post,:remove_asset,{:asset_id=>sop.id,:asset_type=>"Sop",:id=>folder.id,:project_id=>folder.project.id,:orig_folder_element_id=>"sdfhsdk"})
end
assay.reload
assert_equal [],assay.assets
assert_equal [],folder.assets
end
test "cannot move to other project folder" do
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy),:description=>"Ryz9z3Z9h70wzJ243we6k8RO5xI5f3UF"
folder = Factory :project_folder, :project_id=>@project.id
other_folder = Factory :project_folder, :project_id=>Factory(:project).id
folder.add_assets(sop)
folder.save!
xhr(:post,:move_asset_to,{:asset_id=>sop.id,:asset_type=>"Sop",:id=>folder.id,:dest_folder_id=>other_folder.id,:project_id=>folder.project.id})
assert_response :success
sop.reload
other_folder.reload
folder.reload
assert_equal [folder],sop.folders
assert_equal [],other_folder.assets
assert_equal [sop],folder.assets
end
test "create a new child folder" do
folder = Factory :project_folder, :project=>@project
assert_difference("ProjectFolder.count") do
xhr(:post,:create_folder,{:project_id=>@project.id,:id=>folder.id,:title=>"fred"})
end
assert_response :success
end
test "authorization on assets" do
sop = Factory :sop, :project_ids=>[@project.id],:policy=>Factory(:public_policy),:description=>"Ryz9z3Z9h70wzJ243we6k8RO5xI5f3UF"
hidden_sop = Factory :sop,:project_ids=>[@project.id],:policy=>Factory(:private_policy),:description=>"viu2q6ng3iZ0ppS5X679pPo11LfF62pS"
folder = Factory :project_folder, :project_id=>@project.id
disable_authorization_checks do
folder.add_assets([sop,hidden_sop])
folder.save!
end
xhr(:post,:display_contents,{:id=>folder.id,:project_id=>folder.project.id})
assert_response :success
assert @response.body.match(/Ryz9z3Z9h70wzJ243we6k8RO5xI5f3UF/)
assert @response.body.match(/viu2q6ng3iZ0ppS5X679pPo11LfF62pS/).nil?
end
test "display with assays" do
assay = Factory :experimental_assay,:contributor=>@member.person,:policy=>Factory(:public_policy)
assay.study.investigation.projects=[@project]
assay.study.investigation.save!
get :index,:project_id=>@project.id
assert_response :success
end
test 'breadcrumb for project folder' do
get :index,:project_id=>@project.id
assert_response :success
assert_select 'div.breadcrumbs', :text => /Home > Projects Index > #{@project.title} > Folders Index/, :count => 1 do
assert_select "a[href=?]", root_path, :count => 1
assert_select "a[href=?]", projects_url, :count => 1
assert_select "a[href=?]", project_url(@project), :count => 1
end
end
end
|
require 'test_helper'
class ReportsControllerTest < ActionController::TestCase
def setup
login_as(:bob)
end
context "on GET to :user_report" do
context "with html" do
setup { get :user }
should_respond_with :success
should_not_set_the_flash
end
context "with format=pdf" do
setup { get :user, :format => 'dpf' }
#TODO the download works, why does the test report a 406?
#should_respond_with :sucsess
end
context "with format=csv" do
setup { get :user, :format => 'csv' }
should_respond_with :success
end
end
end
added tests for all reports
require 'test_helper'
class ReportsControllerTest < ActionController::TestCase
def setup
login_as(:bob)
end
context "Accessing reports," do
reports = { :user => {},:activity => {}, :billing => { :month=>7, :year=>2009},:hours => {} }
reports.each do |report, params|
context "on GET to #{report.to_s}" do
context "with html" do
setup { get report }
should_respond_with :success
should respond_with_content_type(:html)
should_not_set_the_flash
end
context "with format=pdf" do
setup { get report, {:format => 'pdf', :tag=> tags(:timeflux).id }.merge(params) }
should_respond_with :success
should respond_with_content_type(:pdf)
end
context "with format=csv" do
setup { get report, {:format => 'csv', :tag=> tags(:timeflux).id }.merge(params) }
should_respond_with :success
should respond_with_content_type(:csv)
end
context "with format=text" do
setup { get report, {:format => 'text', :tag=> tags(:timeflux).id}.merge(params) }
should_respond_with :success
should respond_with_content_type(:text)
end
end
end
end
end
|
require File.dirname(__FILE__) + '/../test_helper'
class WelcomeControllerTest < ActionController::TestCase
# ログインしていない状態のウェルカムページ
def test_index_without_login
get :index
assert_response :success
# ログインフォームがあること
assert_tag :input, :attributes => {:id => 'login'}
end
end
git-svn-id: svn+ssh://ko.meadowy.net/home/svn/inside/kozuchi/trunk@543 c938ebee-df10-0410-95fc-be8c4b0ebdd5
require File.dirname(__FILE__) + '/../test_helper'
class WelcomeControllerTest < ActionController::TestCase
# ログインしていない状態のウェルカムページ
def test_index_without_login
get :index
assert_response :success
# ログインフォームがあること
assert_tag :input, :attributes => {:id => 'login'}
end
def test_index_with_login
get :index, nil, :user_id => users(:quentin).id
assert_response :success
# ログインフォームがないこと
assert_no_tag :input, :attributes => {:id => 'login'}
end
end
|
require 'test_helper'
class RemoteElavonTest < Test::Unit::TestCase
def setup
@gateway = ElavonGateway.new(fixtures(:elavon))
@credit_card = credit_card
@bad_credit_card = credit_card('invalid')
@options = {
:email => "paul@domain.com",
:description => 'Test Transaction',
:billing_address => address
}
@amount = 100
end
def test_successful_purchase
assert response = @gateway.purchase(@amount, @credit_card, @options)
assert_success response
assert response.test?
assert_equal 'APPROVED', response.message
assert response.authorization
end
def test_unsuccessful_purchase
assert response = @gateway.purchase(@amount, @bad_credit_card, @options)
assert_failure response
assert response.test?
assert_equal 'The Credit Card Number supplied in the authorization request appears to be invalid.', response.message
end
def test_authorize_and_capture
assert auth = @gateway.authorize(@amount, @credit_card, @options)
assert_success auth
assert_equal 'APPROVED', auth.message
assert auth.authorization
assert capture = @gateway.capture(@amount, auth.authorization, :credit_card => @credit_card)
assert_success capture
end
def test_unsuccessful_capture
assert response = @gateway.capture(@amount, '', :credit_card => @credit_card)
assert_failure response
assert_equal 'The FORCE Approval Code supplied in the authorization request appears to be invalid or blank. The FORCE Approval Code must be 6 or less alphanumeric characters.', response.message
end
def test_unsuccessful_authorization
@credit_card.number = "1234567890123"
assert response = @gateway.authorize(@amount, @credit_card, @options)
assert_failure response
assert_equal 'The Credit Card Number supplied in the authorization request appears to be invalid.', response.message
end
def test_purchase_and_credit
assert purchase = @gateway.purchase(@amount, @credit_card, @options)
assert_success purchase
assert purchase.authorization
assert credit = @gateway.credit(@amount, @credit_card, @options)
assert_success credit
assert credit.authorization
end
end
Elavon: Fix remote tests
It didn't like the test card we were using and they changed their
APPROVED message that comes back.
Closes #439.
require 'test_helper'
class RemoteElavonTest < Test::Unit::TestCase
def setup
@gateway = ElavonGateway.new(fixtures(:elavon))
@credit_card = credit_card('4222222222222')
@bad_credit_card = credit_card('invalid')
@options = {
:email => "paul@domain.com",
:description => 'Test Transaction',
:billing_address => address
}
@amount = 100
end
def test_successful_purchase
assert response = @gateway.purchase(@amount, @credit_card, @options)
assert_success response
assert response.test?
assert_equal 'APPROVAL', response.message
assert response.authorization
end
def test_unsuccessful_purchase
assert response = @gateway.purchase(@amount, @bad_credit_card, @options)
assert_failure response
assert response.test?
assert_equal 'The Credit Card Number supplied in the authorization request appears to be invalid.', response.message
end
def test_authorize_and_capture
assert auth = @gateway.authorize(@amount, @credit_card, @options)
assert_success auth
assert_equal 'APPROVAL', auth.message
assert auth.authorization
assert capture = @gateway.capture(@amount, auth.authorization, :credit_card => @credit_card)
assert_success capture
end
def test_unsuccessful_capture
assert response = @gateway.capture(@amount, '', :credit_card => @credit_card)
assert_failure response
assert_equal 'The FORCE Approval Code supplied in the authorization request appears to be invalid or blank. The FORCE Approval Code must be 6 or less alphanumeric characters.', response.message
end
def test_unsuccessful_authorization
@credit_card.number = "1234567890123"
assert response = @gateway.authorize(@amount, @credit_card, @options)
assert_failure response
assert_equal 'The Credit Card Number supplied in the authorization request appears to be invalid.', response.message
end
def test_purchase_and_credit
assert purchase = @gateway.purchase(@amount, @credit_card, @options)
assert_success purchase
assert purchase.authorization
assert credit = @gateway.credit(@amount, @credit_card, @options)
assert_success credit
assert credit.authorization
end
end
|
Added missing test file.
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
require 'test_helper'
module Seahorse
class Client
describe HttpResponse do
describe '#status_code' do
it 'defaults to nil' do
HttpResponse.new.status_code.must_equal(nil)
end
it 'can be set in the constructor' do
HttpResponse.new(status_code: 200).status_code.must_equal(200)
end
it 'is mustable' do
resp = HttpResponse.new
resp.status_code = 500
resp.status_code.must_equal(500)
end
end
describe '#headers' do
it 'is a HeaderHash' do
HttpResponse.new.headers.must_be_kind_of(HeaderHash)
end
it 'defaults to a empty hash' do
HttpResponse.new.headers.to_h.must_equal({})
end
it 'can be set in the constructor' do
headers = HeaderHash.new
response = HttpResponse.new(headers: headers)
response.headers.must_be_same_as(headers)
end
it 'can be set' do
headers = HeaderHash.new
response = HttpResponse.new
response.headers = headers
response.headers.must_be_same_as(headers)
end
end
describe '#body' do
it 'defaults to an empty string' do
HttpResponse.new.body.must_equal('')
end
it 'can be set in the constructor' do
HttpResponse.new(body: 'body').body.must_equal('body')
end
it 'can be set' do
response = HttpResponse.new
response.body = 'body'
response.body.must_equal('body')
end
end
end
end
end
|
require 'test_helper'
class UltimatepayUgcTest < Test::Unit::TestCase
def setup
@gateway = UltimatepayUgcGateway.new(
:merchant_code => 'UC04',
:login => 'login',
:password => 'password',
:secret_phrase => '5ebe2294ecd0e0f08eab7690d2a6ee69'
)
@authorize_options = {
:user_id => 1,
:username => 'shredmasterfresh',
:ugc_pin => '99999999999'
}
end
def test_valid_login?
assert !@gateway.valid_login?('login', 'wrong')
assert !@gateway.valid_login?('wrong', 'password')
assert @gateway.valid_login?('login', 'password')
end
def test_successful_authorize
@gateway.expects(:ssl_post).returns(successful_authorize_response)
assert response = @gateway.authorize(@authorize_options)
assert_instance_of Response, response
assert_success response
# Replace with authorization number from the successful response
assert_equal 'BMhFnN4SohBrWODtHZn62GTAx3tm11SVWldvoE1Ulpc', response.authorization
assert_equal 5.0, response.params['value']
assert_equal 'USD', response.params['currency']
assert response.test?
end
def test_unsuccessful_authorize
@gateway.expects(:ssl_post).returns(failed_authorize_response)
assert response = @gateway.authorize(@authorize_options)
assert_instance_of Response, response
assert_failure response
assert response.test?
end
private
def successful_authorize_response
[
'token=BMhFnN4SohBrWODtHZn62GTAx3tm11SVWldvoE1Ulpc',
'result=auth',
'status=active',
'value=5',
'currency=USD'
].join('&')
end
def failed_authorize_response
'token=6h85pxerDfjwRMSmIAae3lXLXbkE87xjZxy9ytE1bW&result=failed&errorDetail=ugc_pin'
end
def succesful_commit_response
"token=iONyv5B13mKRTNtuYHxvV6wUuHz8fZrt9uKcGW90dJx&result=paid"
end
def failed_authorize_response
'token=6h85pxerDfjwRMSmIAae3lXLXbkE87xjZxy9ytE1bW&result=failed&errorDetail=ugc_pin'
end
end
Switch purchase method to authorize and change required values/expectations
require 'test_helper'
class UltimatepayUgcTest < Test::Unit::TestCase
def setup
@gateway = UltimatepayUgcGateway.new(
:merchant_code => 'UC04',
:login => 'login',
:password => 'password',
:secret_phrase => '5ebe2294ecd0e0f08eab7690d2a6ee69'
)
@authorize_options = {
:user_id => 1,
:username => 'shredmasterfresh',
:ugc_pin => '99999999999'
}
@capture_options = {
:token => 'iONyv5B13mKRTNtuYHxvV6wUuHz8fZrt9uKcGW90dJx',
:ugc_pin => '99999999999'
}
end
def test_valid_login?
assert !@gateway.valid_login?('login', 'wrong')
assert !@gateway.valid_login?('wrong', 'password')
assert @gateway.valid_login?('login', 'password')
end
def test_successful_authorize
@gateway.expects(:ssl_post).returns(successful_authorize_response)
assert response = @gateway.authorize(@authorize_options)
assert_instance_of Response, response
assert_success response
# Replace with authorization number from the successful response
assert_equal 'BMhFnN4SohBrWODtHZn62GTAx3tm11SVWldvoE1Ulpc', response.authorization
assert_equal 5.0, response.params['value']
assert_equal 'USD', response.params['currency']
assert response.test?
end
def test_unsuccessful_authorize
@gateway.expects(:ssl_post).returns(failed_authorize_response)
assert response = @gateway.authorize(@authorize_options)
assert_instance_of Response, response
assert_failure response
assert response.test?
end
def test_successful_capture
@gateway.expects(:ssl_post).returns(successful_capture_response)
assert response = @gateway.capture(@capture_options)
assert_instance_of Response, response
assert_success response
# Replace with authorization number from the successful response
assert_equal 'BMhFnN4SohBrWODtHZn62GTAx3tm11SVWldvoE1Ulpc', response.authorization
assert response.test?
end
def test_unsuccessful_capture
@gateway.expects(:ssl_post).returns(failed_capture_response)
assert response = @gateway.capture(@capture_options)
assert_instance_of Response, response
assert_failure response
assert response.test?
end
private
def successful_authorize_response
[
'token=BMhFnN4SohBrWODtHZn62GTAx3tm11SVWldvoE1Ulpc',
'result=auth',
'status=active',
'value=5',
'currency=USD'
].join('&')
end
def failed_authorize_response
'token=6h85pxerDfjwRMSmIAae3lXLXbkE87xjZxy9ytE1bW&result=failed&errorDetail=ugc_pin'
end
def successful_capture_response
"token=BMhFnN4SohBrWODtHZn62GTAx3tm11SVWldvoE1Ulpc&result=paid"
end
def failed_capture_response
'token=6h85pxerDfjwRMSmIAae3lXLXbkE87xjZxy9ytE1bW&result=failed&errorDetail=ugc_pin'
end
end
|
require 'test_helper'
class Superstore::Types::IntegerRangeTypeTest < Superstore::Types::TestCase
test 'encode' do
assert_equal [4, 5], type.encode(4..5)
assert_equal [4, nil], type.encode(4..Float::INFINITY)
assert_equal [nil, 5], type.encode(-Float::INFINITY..5)
end
test 'decode' do
assert_equal 4..5, type.decode([4, 5])
assert_equal 4..Float::INFINITY, type.decode([4, nil])
assert_equal -Float::INFINITY..5, type.decode([nil, 5])
end
test 'typecast' do
assert_equal 1..5, type.typecast(1..5)
assert_equal 1..5, type.typecast([1, 5])
assert_equal 1..Float::INFINITY, type.typecast([1, nil])
assert_equal -Float::INFINITY..2, type.typecast([nil, 2])
end
end
Add a few more edge case tests.
1) Because ruby supports max..min style ranges, some fiddling is
necessary.
2) Ensure that [nil, nil] is handled. This is a valid postgres range
that could be decoded
require 'test_helper'
class Superstore::Types::IntegerRangeTypeTest < Superstore::Types::TestCase
test 'encode' do
assert_equal [4, 5], type.encode(4..5)
assert_equal [4, nil], type.encode(4..Float::INFINITY)
assert_equal [nil, 5], type.encode(-Float::INFINITY..5)
assert_equal [0, 20], type.encode(20..0)
end
test 'decode' do
assert_equal 4..5, type.decode([4, 5])
assert_equal 4..Float::INFINITY, type.decode([4, nil])
assert_equal -Float::INFINITY..5, type.decode([nil, 5])
assert_equal -Float::INFINITY..Float::INFINITY, type.decode([nil, nil])
end
test 'typecast' do
assert_equal 1..5, type.typecast(1..5)
assert_equal 1..5, type.typecast(5..1)
assert_equal 1..5, type.typecast([1, 5])
assert_equal 1..Float::INFINITY, type.typecast([1, nil])
assert_equal -Float::INFINITY..2, type.typecast([nil, 2])
assert_equal -Float::INFINITY..Float::INFINITY, type.typecast([nil, nil])
end
end
|
#!/usr/bin/ruby
require 'minitest'
require 'minitest/reporters'
require "minitest/autorun"
Minitest::Reporters.use! [Minitest::Reporters::SpecReporter.new(:color => true)]
require 'securerandom'
require_relative 'pubsub.rb'
SERVER=ENV["PUSHMODULE_SERVER"] || "127.0.0.1"
PORT=ENV["PUSHMODULE_PORT"] || "8082"
DEFAULT_CLIENT=:longpoll
#Typhoeus::Config.verbose = true
def short_id
SecureRandom.hex.to_i(16).to_s(36)[0..5]
end
def url(part="")
part=part[1..-1] if part[0]=="/"
"http://#{SERVER}:#{PORT}/#{part}"
end
puts "Server at #{url}"
def pubsub(concurrent_clients=1, opt={})
test_name = caller_locations(1,1)[0].label
urlpart=opt[:urlpart] || 'broadcast'
timeout = opt[:timeout]
sub_url=opt[:sub] || "sub/broadcast/"
pub_url=opt[:pub] || "pub/"
chan_id = opt[:channel] || SecureRandom.hex
sub = Subscriber.new url("#{sub_url}#{chan_id}?test=#{test_name}"), concurrent_clients, timeout: timeout, use_message_id: opt[:use_message_id], quit_message: 'FIN', gzip: opt[:gzip], retry_delay: opt[:retry_delay], client: opt[:client] || DEFAULT_CLIENT
pub = Publisher.new url("#{pub_url}#{chan_id}?test=#{test_name}")
return pub, sub
end
def verify(pub, sub, check_errors=true)
assert sub.errors.empty?, "There were subscriber errors: \r\n#{sub.errors.join "\r\n"}" if check_errors
ret, err = sub.messages.matches?(pub.messages)
assert ret, err || "Messages don't match"
i=0
sub.messages.each do |msg|
assert_equal sub.concurrency, msg.times_seen, "Concurrent subscribers didn't all receive message #{i}."
i+=1
end
end
class PubSubTest < Minitest::Test
def setup
Celluloid.boot
end
def test_interval_poll
pub, sub=pubsub 1, sub: "/sub/intervalpoll/", client: :intervalpoll, quit_message: 'FIN', retry_delay: 0.2
ws_sub=Subscriber.new(sub.url, 1, client: :websocket, quit_message: 'FIN')
sub.on_failure do |resp|
assert_equal resp.code, 404
false
end
#ws_sub.run
sub.run
sub.wait
sub.abort
sub.reset
sleep 0.4
assert ws_sub.match_errors(/code 403/), "expected 403 for all subscribers, got #{sub.errors.pretty_inspect}"
ws_sub.terminate
pub.post ["hello this", "is a thing"]
sleep 0.3
pub.post ["oh now what", "is this even a thing?"]
sleep 0.1
sub.on_failure do |resp|
assert_equal resp.code, 304
assert_equal resp.headers["Last-Modified"], sub.client.last_modified, "304 not ready should have the same last-modified header as last msg"
assert_equal resp.headers["Etag"], sub.client.etag, "304 not ready should have the same Etag header as last msg"
false
end
sub.run
sub.wait
#should get a 304 at this point
sub.abort
sub.reset
pub.post "FIN"
sleep 2
sub.run
sub.wait
verify pub, sub
sub.terminate
end
def test_channel_info
require 'json'
require 'nokogiri'
require 'yaml'
subs=20
chan=SecureRandom.hex
pub, sub = pubsub(subs, channel: chan)
pub.nofail=true
pub.get
assert_equal 404, pub.response_code
pub.post ["hello", "what is this i don't even"]
assert_equal 202, pub.response_code
pub.get
assert_equal 200, pub.response_code
assert_match /last requested: -?\d+ sec/, pub.response_body
pub.get "text/json"
info_json=JSON.parse pub.response_body
assert_equal 2, info_json["messages"]
#assert_equal 0, info_json["requested"]
assert_equal 0, info_json["subscribers"]
sub.run
sleep 1
pub.get "text/json"
info_json=JSON.parse pub.response_body
assert_equal 2, info_json["messages"]
#assert_equal 0, info_json["requested"]
assert_equal subs, info_json["subscribers"], "text/json subscriber count"
pub.get "text/xml"
ix = Nokogiri::XML pub.response_body
assert_equal 2, ix.at_xpath('//messages').content.to_i
#assert_equal 0, ix.at_xpath('//requested').content.to_i
assert_equal subs, ix.at_xpath('//subscribers').content.to_i
pub.get "text/yaml"
yaml_resp1=pub.response_body
pub.get "application/yaml"
yaml_resp2=pub.response_body
pub.get "application/x-yaml"
yaml_resp3=pub.response_body
yam=YAML.load pub.response_body
assert_equal 2, yam["messages"]
#assert_equal 0, yam["requested"]
assert_equal subs, yam["subscribers"]
assert_equal yaml_resp1, yaml_resp2
assert_equal yaml_resp2, yaml_resp3
pub.accept="text/json"
pub.post "FIN"
#stats right before FIN was issued
info_json=JSON.parse pub.response_body
assert_equal 3, info_json["messages"]
#assert_equal 0, info_json["requested"]
assert_equal subs, info_json["subscribers"]
sub.wait
pub.get "text/json"
info_json=JSON.parse pub.response_body
assert_equal 3, info_json["messages"], "number of messages received by channel is wrong"
#assert_equal 0, info_json["requested"]
assert_equal 0, info_json["subscribers"], "channel should say there are no subscribers"
sub.terminate
end
def multi_sub_url(pubs)
ids = pubs.map{|v| v.id}.shuffle
"/sub/multi/#{ids.join '/'}"
end
class MultiCheck
attr_accessor :id, :pub
def initialize(id)
self.id = id
self.pub = Publisher.new url("/pub/#{self.id}")
end
end
def test_multi_n(n=2)
pubs = []
n.times do |i|
pubs << MultiCheck.new(short_id)
end
n = 50
scrambles = 1
subs = []
scrambles.times do |i|
subs << Subscriber.new(url(multi_sub_url(pubs)), n, quit_message: 'FIN')
end
subs.each &:run
pubs.each {|p| p.pub.post "FIRST from #{p.id}" }
10.times do |i|
pubs.each {|p| p.pub.post "hello #{i} from #{p.id}" }
end
puts "yeeha"
#latesubs = Subscriber.new(url(multi_sub_url(pubs)), n, quit_message: 'FIN')
#subs << latesubs
#latesubs.run
pubs.first.pub.post "FIN"
subs.each &:wait
sleep 1
binding.pry
end
def test_message_delivery
pub, sub = pubsub
sub.run
sleep 0.2
assert_equal 0, sub.messages.messages.count
pub.post "hi there"
assert_equal 201, pub.response_code, "publisher response code"
sleep 0.2
assert_equal 1, sub.messages.messages.count, "received message count"
pub.post "FIN"
assert_equal 201, pub.response_code, "publisher response code"
sleep 0.2
assert_equal 2, sub.messages.messages.count, "recelived messages count"
assert sub.messages.matches? pub.messages
sub.terminate
end
def test_publish_then_subscribe
pub, sub = pubsub
pub.post "hi there"
sub.run
pub.post "FIN"
sub.wait
assert_equal 2, sub.messages.messages.count
assert sub.messages.matches? pub.messages
sub.terminate
end
def test_authorized_channels
#must be published to before subscribing
n=5
pub, sub = pubsub n, timeout: 6, sub: "sub/authorized/"
sub.on_failure { false }
sub.run
sleep 1
sub.wait
assert_equal n, sub.finished
assert sub.match_errors(/code 403/), "expected 403 for all subscribers, got #{sub.errors.pretty_inspect}"
sub.reset
pub.post %w( fweep )
assert_match /20[12]/, pub.response_code.to_s
sleep 0.1
sub.run
sleep 0.1
pub.post ["fwoop", "FIN"] { assert_match /20[12]/, pub.response_code.to_s }
sub.wait
verify pub, sub
sub.terminate
end
def test_deletion
#delete active channel
par=5
pub, sub = pubsub par, timeout: 10
sub.on_failure { false }
sub.run
sleep 0.2
pub.delete
sleep 0.1
assert_equal 200, pub.response_code
assert_equal par, pub.response_body.match(/subscribers:\s+(\d)/)[1].to_i, "subscriber count after deletion"
sub.wait
assert sub.match_errors(/code 410/), "Expected subscriber code 410: Gone, instead was \"#{sub.errors.first}\""
#delete channel with no subscribers
pub, sub = pubsub 5, timeout: 1
pub.post "hello"
assert_equal 202, pub.response_code
pub.delete
assert_equal 200, pub.response_code
#delete nonexistent channel
pub, sub = pubsub
pub.nofail=true
pub.delete
assert_equal 404, pub.response_code
end
def test_no_message_buffer
chan_id=SecureRandom.hex
pub = Publisher.new url("/pub/nobuffer/#{chan_id}")
sub=[]
40.times do
sub.push Subscriber.new(url("/sub/broadcast/#{chan_id}"), 1, use_message_id: false, quit_message: 'FIN')
end
pub.post ["this message should not be delivered", "nor this one"]
sub.each {|s| s.run}
sleep 1
pub.post "received1"
sleep 1
pub.post "received2"
sleep 1
pub.post "FIN"
sub.each {|s| s.wait}
sub.each do |s|
assert s.errors.empty?, "There were subscriber errors: \r\n#{s.errors.join "\r\n"}"
ret, err = s.messages.matches? ["received1", "received2", "FIN"]
assert ret, err || "Messages don't match"
end
end
def test_channel_isolation
rands= %w( foo bar baz bax qqqqqqqqqqqqqqqqqqq eleven andsoon andsoforth feh )
pub=[]
sub=[]
10.times do |i|
pub[i], sub[i]=pubsub 15
sub[i].run
end
pub.each do |p|
rand(1..10).times do
p.post rands.sample
end
end
sleep 1
pub.each do |p|
p.post 'FIN'
end
sub.each do |s|
s.wait
end
pub.each_with_index do |p, i|
verify p, sub[i]
end
sub.each {|s| s.terminate }
end
def test_broadcast_3
test_broadcast 3
end
def test_broadcast_20
test_broadcast 20
end
def test_broadcast(clients=400)
pub, sub = pubsub clients
pub.post "!!"
sub.run #celluloid async FTW
#sleep 2
pub.post ["!!!!", "what is this", "it's nothing", "nothing at all really"]
pub.post "FIN"
sub.wait
sleep 0.5
verify pub, sub
sub.terminate
end
def test_broadcast_10000
test_broadcast 10000
end
def dont_test_subscriber_concurrency
chan=SecureRandom.hex
pub_first = Publisher.new url("pub/first#{chan}")
pub_last = Publisher.new url("pub/last#{chan}")
sub_first, sub_last = [], []
{ url("sub/first/first#{chan}") => sub_first, url("sub/last/last#{chan}") => sub_last }.each do |url, arr|
3.times do
sub=Subscriber.new(url, 1, quit_message: 'FIN', timeout: 20)
sub.on_failure do |resp, req|
false
end
arr << sub
end
end
sub_first.each {|s| s.run; sleep 0.1 }
assert sub_first[0].no_errors?
sub_first[1..2].each do |s|
assert s.errors?
assert s.match_errors(/code 409/)
end
sub_last.each {|s| s.run; sleep 0.1 }
assert sub_last[2].no_errors?
sub_last[0..1].each do |s|
assert s.errors?
assert s.match_errors(/code 40[49]/)
end
pub_first.post %w( foo bar FIN )
pub_last.post %w( foobar baz somethingelse FIN )
sub_first[0].wait
sub_last[2].wait
verify pub_first, sub_first[0]
verify pub_last, sub_last[2]
sub_first[1..2].each{ |s| assert s.messages.count == 0 }
sub_last[0..1].each{ |s| assert s.messages.count == 0 }
[sub_first, sub_last].each {|sub| sub.each{|s| s.terminate}}
end
def test_queueing
pub, sub = pubsub 5
pub.post %w( what is this_thing andnow 555555555555555555555 eleven FIN ), 'text/plain'
sleep 0.3
sub.run
sub.wait
verify pub, sub
sub.terminate
end
def test_long_message(kb=1)
pub, sub = pubsub 10, timeout: 10
sub.run
sleep 0.2
pub.post ["#{"q"*((kb * 1024)-3)}end", "FIN"]
sub.wait
verify pub, sub
sub.terminate
end
#[5, 9, 9.5, 9.9, 10, 11, 15, 16, 17, 18, 19, 20, 30, 50, 100, 200, 300, 600, 900, 3000].each do |n|
[5, 10, 20, 200, 900].each do |n|
define_method "test_long_message_#{n}Kb" do
test_long_message n
end
end
def test_message_length_range
pub, sub = pubsub 2, timeout: 15
sub.run
n=5
while n <= 10000 do
pub.post "T" * n
n=(n*1.01) + 1
sleep 0.001
end
pub.post "FIN"
sub.wait
verify pub, sub
sub.terminate
end
def test_message_timeout
pub, sub = pubsub 1, pub: "/pub/2_sec_message_timeout/", timeout: 10
pub.post %w( foo bar etcetera ) #these shouldn't get delivered
pub.messages.clear
sleep 3
#binding.pry
sub.run
sleep 1
pub.post %w( what is this even FIN )
sub.wait
verify pub, sub
sub.terminate
end
def test_subscriber_timeout
chan=SecureRandom.hex
sub=Subscriber.new(url("sub/timeout/#{chan}"), 5, timeout: 10)
sub.on_failure { false }
pub=Publisher.new url("pub/#{chan}")
sub.run
sleep 0.1
pub.post "hello"
sub.wait
verify pub, sub, false
assert sub.match_errors(/code 304/)
sub.terminate
end
def assert_header_includes(response, header, str)
assert response.headers[header].include?(str), "Response header '#{header}:#{response.headers[header]}' must include \"#{str}\", but does not."
end
def test_options
chan=SecureRandom.hex
request = Typhoeus::Request.new url("sub/broadcast/#{chan}"), method: :OPTIONS
resp = request.run
assert_equal "*", resp.headers["Access-Control-Allow-Origin"]
%w( GET OPTIONS ).each {|v| assert_header_includes resp, "Access-Control-Allow-Methods", v}
%w( If-None-Match If-Modified-Since Origin ).each {|v| assert_header_includes resp, "Access-Control-Allow-Headers", v}
request = Typhoeus::Request.new url("pub/#{chan}"), method: :OPTIONS
resp = request.run
assert_equal "*", resp.headers["Access-Control-Allow-Origin"]
%w( GET POST DELETE OPTIONS ).each {|v| assert_header_includes resp, "Access-Control-Allow-Methods", v}
%w( Content-Type Origin ).each {|v| assert_header_includes resp, "Access-Control-Allow-Headers", v}
end
def test_gzip
#bug: turning on gzip cleared the response etag
pub, sub = pubsub 1, sub: "/sub/gzip/", gzip: true, retry_delay: 0.3
sub.run
sleep 0.1
pub.post ["2", "123456789A", "alsdjklsdhflsajkfhl", "boq"]
sleep 1
pub.post "foobar"
pub.post "FIN"
sleep 1
verify pub, sub
end
end
don't test multi just yet...
#!/usr/bin/ruby
require 'minitest'
require 'minitest/reporters'
require "minitest/autorun"
Minitest::Reporters.use! [Minitest::Reporters::SpecReporter.new(:color => true)]
require 'securerandom'
require_relative 'pubsub.rb'
SERVER=ENV["PUSHMODULE_SERVER"] || "127.0.0.1"
PORT=ENV["PUSHMODULE_PORT"] || "8082"
DEFAULT_CLIENT=:longpoll
#Typhoeus::Config.verbose = true
def short_id
SecureRandom.hex.to_i(16).to_s(36)[0..5]
end
def url(part="")
part=part[1..-1] if part[0]=="/"
"http://#{SERVER}:#{PORT}/#{part}"
end
puts "Server at #{url}"
def pubsub(concurrent_clients=1, opt={})
test_name = caller_locations(1,1)[0].label
urlpart=opt[:urlpart] || 'broadcast'
timeout = opt[:timeout]
sub_url=opt[:sub] || "sub/broadcast/"
pub_url=opt[:pub] || "pub/"
chan_id = opt[:channel] || SecureRandom.hex
sub = Subscriber.new url("#{sub_url}#{chan_id}?test=#{test_name}"), concurrent_clients, timeout: timeout, use_message_id: opt[:use_message_id], quit_message: 'FIN', gzip: opt[:gzip], retry_delay: opt[:retry_delay], client: opt[:client] || DEFAULT_CLIENT
pub = Publisher.new url("#{pub_url}#{chan_id}?test=#{test_name}")
return pub, sub
end
def verify(pub, sub, check_errors=true)
assert sub.errors.empty?, "There were subscriber errors: \r\n#{sub.errors.join "\r\n"}" if check_errors
ret, err = sub.messages.matches?(pub.messages)
assert ret, err || "Messages don't match"
i=0
sub.messages.each do |msg|
assert_equal sub.concurrency, msg.times_seen, "Concurrent subscribers didn't all receive message #{i}."
i+=1
end
end
class PubSubTest < Minitest::Test
def setup
Celluloid.boot
end
def test_interval_poll
pub, sub=pubsub 1, sub: "/sub/intervalpoll/", client: :intervalpoll, quit_message: 'FIN', retry_delay: 0.2
ws_sub=Subscriber.new(sub.url, 1, client: :websocket, quit_message: 'FIN')
sub.on_failure do |resp|
assert_equal resp.code, 404
false
end
#ws_sub.run
sub.run
sub.wait
sub.abort
sub.reset
sleep 0.4
assert ws_sub.match_errors(/code 403/), "expected 403 for all subscribers, got #{sub.errors.pretty_inspect}"
ws_sub.terminate
pub.post ["hello this", "is a thing"]
sleep 0.3
pub.post ["oh now what", "is this even a thing?"]
sleep 0.1
sub.on_failure do |resp|
assert_equal resp.code, 304
assert_equal resp.headers["Last-Modified"], sub.client.last_modified, "304 not ready should have the same last-modified header as last msg"
assert_equal resp.headers["Etag"], sub.client.etag, "304 not ready should have the same Etag header as last msg"
false
end
sub.run
sub.wait
#should get a 304 at this point
sub.abort
sub.reset
pub.post "FIN"
sleep 2
sub.run
sub.wait
verify pub, sub
sub.terminate
end
def test_channel_info
require 'json'
require 'nokogiri'
require 'yaml'
subs=20
chan=SecureRandom.hex
pub, sub = pubsub(subs, channel: chan)
pub.nofail=true
pub.get
assert_equal 404, pub.response_code
pub.post ["hello", "what is this i don't even"]
assert_equal 202, pub.response_code
pub.get
assert_equal 200, pub.response_code
assert_match /last requested: -?\d+ sec/, pub.response_body
pub.get "text/json"
info_json=JSON.parse pub.response_body
assert_equal 2, info_json["messages"]
#assert_equal 0, info_json["requested"]
assert_equal 0, info_json["subscribers"]
sub.run
sleep 1
pub.get "text/json"
info_json=JSON.parse pub.response_body
assert_equal 2, info_json["messages"]
#assert_equal 0, info_json["requested"]
assert_equal subs, info_json["subscribers"], "text/json subscriber count"
pub.get "text/xml"
ix = Nokogiri::XML pub.response_body
assert_equal 2, ix.at_xpath('//messages').content.to_i
#assert_equal 0, ix.at_xpath('//requested').content.to_i
assert_equal subs, ix.at_xpath('//subscribers').content.to_i
pub.get "text/yaml"
yaml_resp1=pub.response_body
pub.get "application/yaml"
yaml_resp2=pub.response_body
pub.get "application/x-yaml"
yaml_resp3=pub.response_body
yam=YAML.load pub.response_body
assert_equal 2, yam["messages"]
#assert_equal 0, yam["requested"]
assert_equal subs, yam["subscribers"]
assert_equal yaml_resp1, yaml_resp2
assert_equal yaml_resp2, yaml_resp3
pub.accept="text/json"
pub.post "FIN"
#stats right before FIN was issued
info_json=JSON.parse pub.response_body
assert_equal 3, info_json["messages"]
#assert_equal 0, info_json["requested"]
assert_equal subs, info_json["subscribers"]
sub.wait
pub.get "text/json"
info_json=JSON.parse pub.response_body
assert_equal 3, info_json["messages"], "number of messages received by channel is wrong"
#assert_equal 0, info_json["requested"]
assert_equal 0, info_json["subscribers"], "channel should say there are no subscribers"
sub.terminate
end
def multi_sub_url(pubs)
ids = pubs.map{|v| v.id}.shuffle
"/sub/multi/#{ids.join '/'}"
end
class MultiCheck
attr_accessor :id, :pub
def initialize(id)
self.id = id
self.pub = Publisher.new url("/pub/#{self.id}")
end
end
def no_test_multi_n(n=2)
pubs = []
n.times do |i|
pubs << MultiCheck.new(short_id)
end
n = 50
scrambles = 5
subs = []
scrambles.times do |i|
subs << Subscriber.new(url(multi_sub_url(pubs)), n, quit_message: 'FIN')
end
subs.each &:run
pubs.each {|p| p.pub.post "FIRST from #{p.id}" }
10.times do |i|
pubs.each {|p| p.pub.post "hello #{i} from #{p.id}" }
end
5.times do |i|
pubs.first.pub.post "yes #{i} from #{pubs.first.id}"
end
pubs.each do |p|
10.times do |i|
p.pub.post "hello #{i} from #{p.id}"
end
end
latesubs = Subscriber.new(url(multi_sub_url(pubs)), n, quit_message: 'FIN')
subs << latesubs
latesubs.run
10.times do |i|
pubs.each {|p| p.pub.post "hello again #{i} from #{p.id}" }
end
pubs.first.pub.post "FIN"
subs.each &:wait
sleep 1
binding.pry
end
def test_message_delivery
pub, sub = pubsub
sub.run
sleep 0.2
assert_equal 0, sub.messages.messages.count
pub.post "hi there"
assert_equal 201, pub.response_code, "publisher response code"
sleep 0.2
assert_equal 1, sub.messages.messages.count, "received message count"
pub.post "FIN"
assert_equal 201, pub.response_code, "publisher response code"
sleep 0.2
assert_equal 2, sub.messages.messages.count, "recelived messages count"
assert sub.messages.matches? pub.messages
sub.terminate
end
def test_publish_then_subscribe
pub, sub = pubsub
pub.post "hi there"
sub.run
pub.post "FIN"
sub.wait
assert_equal 2, sub.messages.messages.count
assert sub.messages.matches? pub.messages
sub.terminate
end
def test_authorized_channels
#must be published to before subscribing
n=5
pub, sub = pubsub n, timeout: 6, sub: "sub/authorized/"
sub.on_failure { false }
sub.run
sleep 1
sub.wait
assert_equal n, sub.finished
assert sub.match_errors(/code 403/), "expected 403 for all subscribers, got #{sub.errors.pretty_inspect}"
sub.reset
pub.post %w( fweep )
assert_match /20[12]/, pub.response_code.to_s
sleep 0.1
sub.run
sleep 0.1
pub.post ["fwoop", "FIN"] { assert_match /20[12]/, pub.response_code.to_s }
sub.wait
verify pub, sub
sub.terminate
end
def test_deletion
#delete active channel
par=5
pub, sub = pubsub par, timeout: 10
sub.on_failure { false }
sub.run
sleep 0.2
pub.delete
sleep 0.1
assert_equal 200, pub.response_code
assert_equal par, pub.response_body.match(/subscribers:\s+(\d)/)[1].to_i, "subscriber count after deletion"
sub.wait
assert sub.match_errors(/code 410/), "Expected subscriber code 410: Gone, instead was \"#{sub.errors.first}\""
#delete channel with no subscribers
pub, sub = pubsub 5, timeout: 1
pub.post "hello"
assert_equal 202, pub.response_code
pub.delete
assert_equal 200, pub.response_code
#delete nonexistent channel
pub, sub = pubsub
pub.nofail=true
pub.delete
assert_equal 404, pub.response_code
end
def test_no_message_buffer
chan_id=SecureRandom.hex
pub = Publisher.new url("/pub/nobuffer/#{chan_id}")
sub=[]
40.times do
sub.push Subscriber.new(url("/sub/broadcast/#{chan_id}"), 1, use_message_id: false, quit_message: 'FIN')
end
pub.post ["this message should not be delivered", "nor this one"]
sub.each {|s| s.run}
sleep 1
pub.post "received1"
sleep 1
pub.post "received2"
sleep 1
pub.post "FIN"
sub.each {|s| s.wait}
sub.each do |s|
assert s.errors.empty?, "There were subscriber errors: \r\n#{s.errors.join "\r\n"}"
ret, err = s.messages.matches? ["received1", "received2", "FIN"]
assert ret, err || "Messages don't match"
end
end
def test_channel_isolation
rands= %w( foo bar baz bax qqqqqqqqqqqqqqqqqqq eleven andsoon andsoforth feh )
pub=[]
sub=[]
10.times do |i|
pub[i], sub[i]=pubsub 15
sub[i].run
end
pub.each do |p|
rand(1..10).times do
p.post rands.sample
end
end
sleep 1
pub.each do |p|
p.post 'FIN'
end
sub.each do |s|
s.wait
end
pub.each_with_index do |p, i|
verify p, sub[i]
end
sub.each {|s| s.terminate }
end
def test_broadcast_3
test_broadcast 3
end
def test_broadcast_20
test_broadcast 20
end
def test_broadcast(clients=400)
pub, sub = pubsub clients
pub.post "!!"
sub.run #celluloid async FTW
#sleep 2
pub.post ["!!!!", "what is this", "it's nothing", "nothing at all really"]
pub.post "FIN"
sub.wait
sleep 0.5
verify pub, sub
sub.terminate
end
def test_broadcast_10000
test_broadcast 10000
end
def dont_test_subscriber_concurrency
chan=SecureRandom.hex
pub_first = Publisher.new url("pub/first#{chan}")
pub_last = Publisher.new url("pub/last#{chan}")
sub_first, sub_last = [], []
{ url("sub/first/first#{chan}") => sub_first, url("sub/last/last#{chan}") => sub_last }.each do |url, arr|
3.times do
sub=Subscriber.new(url, 1, quit_message: 'FIN', timeout: 20)
sub.on_failure do |resp, req|
false
end
arr << sub
end
end
sub_first.each {|s| s.run; sleep 0.1 }
assert sub_first[0].no_errors?
sub_first[1..2].each do |s|
assert s.errors?
assert s.match_errors(/code 409/)
end
sub_last.each {|s| s.run; sleep 0.1 }
assert sub_last[2].no_errors?
sub_last[0..1].each do |s|
assert s.errors?
assert s.match_errors(/code 40[49]/)
end
pub_first.post %w( foo bar FIN )
pub_last.post %w( foobar baz somethingelse FIN )
sub_first[0].wait
sub_last[2].wait
verify pub_first, sub_first[0]
verify pub_last, sub_last[2]
sub_first[1..2].each{ |s| assert s.messages.count == 0 }
sub_last[0..1].each{ |s| assert s.messages.count == 0 }
[sub_first, sub_last].each {|sub| sub.each{|s| s.terminate}}
end
def test_queueing
pub, sub = pubsub 5
pub.post %w( what is this_thing andnow 555555555555555555555 eleven FIN ), 'text/plain'
sleep 0.3
sub.run
sub.wait
verify pub, sub
sub.terminate
end
def test_long_message(kb=1)
pub, sub = pubsub 10, timeout: 10
sub.run
sleep 0.2
pub.post ["#{"q"*((kb * 1024)-3)}end", "FIN"]
sub.wait
verify pub, sub
sub.terminate
end
#[5, 9, 9.5, 9.9, 10, 11, 15, 16, 17, 18, 19, 20, 30, 50, 100, 200, 300, 600, 900, 3000].each do |n|
[5, 10, 20, 200, 900].each do |n|
define_method "test_long_message_#{n}Kb" do
test_long_message n
end
end
def test_message_length_range
pub, sub = pubsub 2, timeout: 15
sub.run
n=5
while n <= 10000 do
pub.post "T" * n
n=(n*1.01) + 1
sleep 0.001
end
pub.post "FIN"
sub.wait
verify pub, sub
sub.terminate
end
def test_message_timeout
pub, sub = pubsub 1, pub: "/pub/2_sec_message_timeout/", timeout: 10
pub.post %w( foo bar etcetera ) #these shouldn't get delivered
pub.messages.clear
sleep 3
#binding.pry
sub.run
sleep 1
pub.post %w( what is this even FIN )
sub.wait
verify pub, sub
sub.terminate
end
def test_subscriber_timeout
chan=SecureRandom.hex
sub=Subscriber.new(url("sub/timeout/#{chan}"), 5, timeout: 10)
sub.on_failure { false }
pub=Publisher.new url("pub/#{chan}")
sub.run
sleep 0.1
pub.post "hello"
sub.wait
verify pub, sub, false
assert sub.match_errors(/code 304/)
sub.terminate
end
def assert_header_includes(response, header, str)
assert response.headers[header].include?(str), "Response header '#{header}:#{response.headers[header]}' must include \"#{str}\", but does not."
end
def test_options
chan=SecureRandom.hex
request = Typhoeus::Request.new url("sub/broadcast/#{chan}"), method: :OPTIONS
resp = request.run
assert_equal "*", resp.headers["Access-Control-Allow-Origin"]
%w( GET OPTIONS ).each {|v| assert_header_includes resp, "Access-Control-Allow-Methods", v}
%w( If-None-Match If-Modified-Since Origin ).each {|v| assert_header_includes resp, "Access-Control-Allow-Headers", v}
request = Typhoeus::Request.new url("pub/#{chan}"), method: :OPTIONS
resp = request.run
assert_equal "*", resp.headers["Access-Control-Allow-Origin"]
%w( GET POST DELETE OPTIONS ).each {|v| assert_header_includes resp, "Access-Control-Allow-Methods", v}
%w( Content-Type Origin ).each {|v| assert_header_includes resp, "Access-Control-Allow-Headers", v}
end
def test_gzip
#bug: turning on gzip cleared the response etag
pub, sub = pubsub 1, sub: "/sub/gzip/", gzip: true, retry_delay: 0.3
sub.run
sleep 0.1
pub.post ["2", "123456789A", "alsdjklsdhflsajkfhl", "boq"]
sleep 1
pub.post "foobar"
pub.post "FIN"
sleep 1
verify pub, sub
end
end
|
class Repositext
class Process
class Fix
# Prepares primary repo for the first st_sync:
# * Copies time slices from current STM CSV files to subtitle_import dir
# This will guarantee that every content AT file has a corresponding
# subtitle import file with up-to-date time slices.
# * Sets the `last_st_sync_commit_for_this_file` attribute for every
# content AT file to the baseline from commit (st_sync_commit recorded
# in the primary repo's data.json file).
class PrepareInitialPrimarySubtitleSync
# Initialize a new fix process
# @param options [Hash] with stringified keys
# @option options [Config] 'config'
# @option options [Array<String>] 'file_list' can be used at command
# line via file-selector to limit which
# files should be synced.
# @option options [String, Nil] 'from-commit', optional, defaults to previous `to-commit`
# @option options [Repository] 'primary_repository' the primary repo
# @option options [IO] stids_inventory_file
# @option options [String, Nil] 'to-commit', optional, defaults to most recent local git commit
def initialize(options)
@config = options['config']
@file_list = options['file_list']
@from_git_commit = options['from-commit']
@primary_repository = options['primary_repository']
@processed_files_count = 0
end
def sync
puts
puts "Preparing primary files for initial primary subtitle sync".color(:blue)
puts
puts " - Load baseline 'from_git_commit' from primary repo's data.json file:".color(:blue)
@from_git_commit = @primary_repository.read_repo_level_data['st_sync_commit']
puts " - #{ @from_git_commit.inspect }"
puts " - Process primary files:".color(:blue)
# Pick any content_type, doesn't matter which one
content_type = ContentType.all(@primary_repository).first
@file_list.each do |content_at_file_path|
content_at_file = RFile::ContentAt.new(
File.read(content_at_file_path),
content_type.language,
content_at_file_path,
content_type
)
puts " - #{ content_at_file.repo_relative_path }"
process_primary_file(content_at_file)
end
puts " - Finalize operation".color(:blue)
finalize_operation
end
private
def process_primary_file(content_at_file)
subtitle_import_markers_filename = content_at_file.corresponding_subtitle_import_markers_filename
puts " - copy time slices from STM CSV file to #{ subtitle_import_markers_filename }"
# Load time slices from STM CSV file
stm_csv_file = content_at_file.corresponding_subtitle_markers_csv_file
time_slices = []
stm_csv_time_slices = stm_csv_file.each_row { |e|
time_slices << [e['relativeMS'], e['samples']]
}
# Write time slices to st_imp_filename
# Convert to CSV
csv_string = CSV.generate(col_sep: "\t") do |csv|
csv << Repositext::Utils::SubtitleMarkTools.csv_headers.first(2)
time_slices.each do |row|
csv << row
end
end
File.write(subtitle_import_markers_filename, csv_string)
puts " - set 'last_st_sync_commit_for_this_file'"
@processed_files_count += 1
end
# Computes new subtitle char_lengths for all subtitles in content_at.
# @param content_at_file [RFile::ContentAt] needs to be at toGitCommit
# @return [Array<Integer>]
def compute_new_char_lengths(content_at_file)
Repositext::Utils::SubtitleMarkTools.extract_captions(
content_at_file.contents
).map { |e| e[:char_length] }
end
def finalize_operation
puts " - Processed #{ @processed_files_count } primary files.".color(:green)
end
end
end
end
end
Updated Fix::PrepareInitialPrimarySubtitleSync: Added part where data.json file gets updated
class Repositext
class Process
class Fix
# Prepares primary repo for the first st_sync:
# * Copies time slices from current STM CSV files to subtitle_import dir
# This will guarantee that every content AT file has a corresponding
# subtitle import file with up-to-date time slices.
# * Sets the `last_st_sync_commit_for_this_file` attribute for every
# content AT file to the baseline from commit (st_sync_commit recorded
# in the primary repo's data.json file).
class PrepareInitialPrimarySubtitleSync
# Initialize a new fix process
# @param options [Hash] with stringified keys
# @option options [Config] 'config'
# @option options [Array<String>] 'file_list' can be used at command
# line via file-selector to limit which
# files should be synced.
# @option options [String, Nil] 'from-commit', optional, defaults to previous `to-commit`
# @option options [Repository] 'primary_repository' the primary repo
# @option options [IO] stids_inventory_file
# @option options [String, Nil] 'to-commit', optional, defaults to most recent local git commit
def initialize(options)
@config = options['config']
@file_list = options['file_list']
@primary_repository = options['primary_repository']
@processed_files_count = 0
end
def sync
puts
puts "Preparing primary files for initial primary subtitle sync".color(:blue)
puts
puts " - Load baseline 'from_git_commit' from primary repo's data.json file:".color(:blue)
@from_git_commit = @primary_repository.read_repo_level_data['st_sync_commit']
puts " - #{ @from_git_commit.inspect }"
puts " - Process primary files:".color(:blue)
# Pick any content_type, doesn't matter which one
content_type = ContentType.all(@primary_repository).first
@file_list.each do |content_at_file_path|
content_at_file = RFile::ContentAt.new(
File.read(content_at_file_path),
content_type.language,
content_at_file_path,
content_type
)
puts " - #{ content_at_file.repo_relative_path }"
process_primary_file(content_at_file)
end
puts " - Finalize operation".color(:blue)
finalize_operation
end
private
def process_primary_file(content_at_file)
subtitle_import_markers_filename = content_at_file.corresponding_subtitle_import_markers_filename
puts " - copy time slices from STM CSV file to #{ subtitle_import_markers_filename }"
# Load time slices from STM CSV file
stm_csv_file = content_at_file.corresponding_subtitle_markers_csv_file
time_slices = []
stm_csv_time_slices = stm_csv_file.each_row { |e|
time_slices << [e['relativeMS'], e['samples']]
}
# Write time slices to st_imp_filename
# Convert to CSV
csv_string = CSV.generate(col_sep: "\t") do |csv|
csv << Repositext::Utils::SubtitleMarkTools.csv_headers.first(2)
time_slices.each do |row|
csv << row
end
end
File.write(subtitle_import_markers_filename, csv_string)
puts " - set 'last_st_sync_commit_for_this_file'"
content_at_file.update_file_level_data(
'last_st_sync_commit_for_this_file' => @from_git_commit
)
@processed_files_count += 1
end
# Computes new subtitle char_lengths for all subtitles in content_at.
# @param content_at_file [RFile::ContentAt] needs to be at toGitCommit
# @return [Array<Integer>]
def compute_new_char_lengths(content_at_file)
Repositext::Utils::SubtitleMarkTools.extract_captions(
content_at_file.contents
).map { |e| e[:char_length] }
end
def finalize_operation
puts " - Processed #{ @processed_files_count } primary files.".color(:green)
end
end
end
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'dex/version'
Gem::Specification.new do |spec|
spec.name = "dex"
spec.version = Dex::VERSION
spec.authors = ["ollieshmollie"]
spec.email = ["oliverduncan@icloud.com"]
spec.summary = %q{A command line rolodex.}
spec.homepage = "https://www.github.com/ollieshmollie/dex"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.executables = ["dex"]
spec.require_paths = ["lib"]
spec.add_runtime_dependency "sqlite3", "~> 1.3"
spec.add_runtime_dependency "colored", "~> 1.2"
spec.add_development_dependency "bundler", "~> 1.13"
spec.add_development_dependency "rake", "~> 10.0"
end
Change allowed push host to rubygems
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'dex/version'
Gem::Specification.new do |spec|
spec.name = "dex"
spec.version = Dex::VERSION
spec.authors = ["ollieshmollie"]
spec.email = ["oliverduncan@icloud.com"]
spec.summary = %q{A command line rolodex.}
spec.homepage = "https://www.github.com/ollieshmollie/dex"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "https://rubygems.org"
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.executables = ["dex"]
spec.require_paths = ["lib"]
spec.add_runtime_dependency "sqlite3", "~> 1.3"
spec.add_runtime_dependency "colored", "~> 1.2"
spec.add_development_dependency "bundler", "~> 1.13"
spec.add_development_dependency "rake", "~> 10.0"
end
|
require 'set'
class Marpa
attr_accessor :chart, :source, :state_machine, :state_size
attr_accessor :previous_chart
def initialize grammar
@state_machine = StateMachine.new(grammar)
@state_size = state_machine.size
reset
end
def reset
@chart = Chart.new(0, state_size)
@previous_chart = nil
chart.add state_machine.starting_state, @chart
self
end
def parse source
@source = source
consumer = method(:marpa_pass)
if source.kind_of?(Enumerator)
source.each &consumer
else
source.each_char &consumer
end
success?
end
def marpa_pass sym
@previous_chart = chart
@previous_chart.memoize_transitions
@chart = Chart.new(@previous_chart.index.succ, state_size)
consume sym
end
def consume sym
scan_pass sym
#TODO: Slippers
return if chart.empty?
reduction_pass
end
def success?
chart.accept?
end
def scan_pass sym
@previous_chart.scan(sym, chart)
end
def reduction_pass
chart.reduce
chart.memoize_transitions
end
end
Save all charts in an array.
require 'set'
class Marpa
attr_accessor :chart, :source, :state_machine, :state_size
attr_accessor :previous_chart, :charts
def initialize grammar
@state_machine = StateMachine.new(grammar)
@state_size = state_machine.size
@charts = []
reset
end
def reset
charts[0] = @chart = Chart.new(0, state_size)
@previous_chart = nil
chart.add state_machine.starting_state, @chart
self
end
def parse source
@source = source
consumer = method(:marpa_pass)
generator = source.kind_of?(Enumerator) ? source : source.each_char
generator.each.with_index &consumer
success?
end
def marpa_pass sym, index
@previous_chart = chart
@previous_chart.memoize_transitions
@chart = Chart.new(@previous_chart.index.succ, state_size)
charts[index + 1] = chart
consume sym
end
def consume sym
scan_pass sym
#TODO: Slippers
return if chart.empty?
reduction_pass
end
def success?
chart.accept?
end
def scan_pass sym
@previous_chart.scan(sym, chart)
end
def reduction_pass
chart.reduce
chart.memoize_transitions
end
end
|
module Fog
module Brightbox
module Nullable
module Account; end
module Image; end
module Interface; end
module LoadBalancer; end
module Server; end
module Zone; end
end
end
end
Hash.send :include, Fog::Brightbox::Nullable::Account
NilClass.send :include, Fog::Brightbox::Nullable::Account
Hash.send :include, Fog::Brightbox::Nullable::Image
NilClass.send :include, Fog::Brightbox::Nullable::Image
Hash.send :include, Fog::Brightbox::Nullable::Interface
NilClass.send :include, Fog::Brightbox::Nullable::Interface
Hash.send :include, Fog::Brightbox::Nullable::LoadBalancer
NilClass.send :include, Fog::Brightbox::Nullable::LoadBalancer
Hash.send :include, Fog::Brightbox::Nullable::Server
NilClass.send :include, Fog::Brightbox::Nullable::Server
Hash.send :include, Fog::Brightbox::Nullable::Zone
NilClass.send :include, Fog::Brightbox::Nullable::Zone
class Brightbox
module Compute
module TestSupport
IMAGE_IDENTIFER = "img-2ab98" # Ubuntu Lucid 10.04 server (i686)
end
module Formats
module Struct
LB_LISTENER = {
"in" => Integer,
"out" => Integer,
"protocol" => String
}
LB_HEALTHCHECK = {
"type" => String,
"request" => String,
"port" => Integer,
"interval" => Integer,
"timeout" => Integer,
"threshold_up" => Integer,
"threshold_down" => Integer
}
end
module Nested
ACCOUNT = {
"name" => String,
"resource_type" => String,
"url" => String,
"id" => String,
"status" => String,
"ram_limit" => Integer,
"ram_used" => Integer,
"cloud_ips_limit" => Integer,
"cloud_ips_used" => Integer,
"load_balancers_limit" => Integer,
"load_balancers_used" => Integer
}
API_CLIENT = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"description" => String,
"revoked_at" => Fog::Nullable::String
}
CLOUD_IP = {
"id" => String,
"resource_type" => String,
"url" => String,
"public_ip" => String,
"status" => String,
"reverse_dns" => String
}
IMAGE = {
"name" => String,
"created_at" => String,
"resource_type" => String,
"arch" => String,
"url" => String,
"id" => String,
"description" => String,
"source" => String,
"status" => String,
"owner" => String
}
INTERFACE = {
"resource_type" => String,
"url" => String,
"id" => String,
"ipv4_address" => String,
"mac_address" => String
}
SERVER = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"status" => String,
"hostname" => String,
"created_at" => String,
"started_at" => Fog::Nullable::String,
"deleted_at" => Fog::Nullable::String
}
SERVER_TYPE = {
"name" => String,
"handle" => Fog::Nullable::String,
"cores" => Integer,
"resource_type" => String,
"disk_size" => Integer,
"url" => String,
"id" => String,
"ram" => Integer,
"status" => String
}
USER = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"email_address" => String
}
ZONE = {
"id" => String,
"resource_type" => String,
"url" => String,
"handle" => Fog::Nullable::String
}
end
module Collected
API_CLIENT = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"description" => String,
"revoked_at" => Fog::Nullable::String,
"account" => Brightbox::Compute::Formats::Nested::ACCOUNT
}
CLOUD_IP = {
"id" => String,
"resource_type" => String,
"url" => String,
"public_ip" => String,
"status" => String,
"reverse_dns" => String,
"account" => Brightbox::Compute::Formats::Nested::ACCOUNT,
"interface" => Fog::Brightbox::Nullable::Interface,
"load_balancer" => Fog::Brightbox::Nullable::LoadBalancer,
"server" => Fog::Brightbox::Nullable::Server
}
IMAGE = {
"name" => String,
"created_at" => String,
"resource_type" => String,
"arch" => String,
"url" => String,
"id" => String,
"description" => String,
"source" => String,
"source_type" => String,
"status" => String,
"owner" => String,
"public" => Fog::Boolean,
"official" => Fog::Boolean,
"compatibility_mode" => Fog::Boolean,
"virtual_size" => Integer,
"disk_size" => Integer,
"ancestor" => Fog::Brightbox::Nullable::Image
}
LOAD_BALANCER = {
"cloud_ips" => Array,
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"status" => String,
"created_at" => String,
"deleted_at" => Fog::Nullable::String
}
SERVER = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"status" => String,
"hostname" => String,
"created_at" => String,
"started_at" => Fog::Nullable::String,
"deleted_at" => Fog::Nullable::String,
"account" => Brightbox::Compute::Formats::Nested::ACCOUNT,
"server_type" => Brightbox::Compute::Formats::Nested::SERVER_TYPE,
"cloud_ips" => [Brightbox::Compute::Formats::Nested::CLOUD_IP],
"image" => Brightbox::Compute::Formats::Nested::IMAGE,
"snapshots" => [Brightbox::Compute::Formats::Nested::IMAGE],
"interfaces" => [Brightbox::Compute::Formats::Nested::INTERFACE],
"zone" => Fog::Brightbox::Nullable::Zone
}
SERVER_TYPE = {
"id" => String,
"resource_type" => String,
"url" => String,
"handle" => Fog::Nullable::String,
"name" => String,
"status" => String,
"cores" => Integer,
"ram" => Integer,
"disk_size" => Integer
}
USER = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"email_address" => String,
"email_verified" => Fog::Boolean,
"accounts" => [Brightbox::Compute::Formats::Nested::ACCOUNT],
"default_account" => NilClass
}
ZONE = {
"id" => String,
"resource_type" => String,
"url" => String,
"handle" => Fog::Nullable::String
}
end
module Full
ACCOUNT = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"status" => String,
"address_1" => String,
"address_2" => String,
"city" => String,
"county" => String,
"postcode" => String,
"country_code" => String,
"country_name" => String,
"vat_registration_number" => Fog::Nullable::String,
"telephone_number" => String,
"telephone_verified" => Fog::Boolean,
"created_at" => String,
"ram_limit" => Integer,
"ram_used" => Integer,
"cloud_ips_limit" => Integer,
"cloud_ips_used" => Integer,
"load_balancers_limit" => Integer,
"load_balancers_used" => Integer,
"library_ftp_host" => String,
"library_ftp_user" => String,
"library_ftp_password" => Fog::Nullable::String,
"verified_telephone" => Fog::Nullable::String,
"verified_at" => Fog::Nullable::String,
"verified_ip" => Fog::Nullable::String,
"owner" => Brightbox::Compute::Formats::Nested::USER,
"users" => [Brightbox::Compute::Formats::Nested::USER],
"clients" => [Brightbox::Compute::Formats::Nested::API_CLIENT],
"servers" => [Brightbox::Compute::Formats::Nested::SERVER],
"images" => [Brightbox::Compute::Formats::Nested::IMAGE],
"zones" => [Brightbox::Compute::Formats::Nested::ZONE]
}
API_CLIENT = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"description" => String,
"revoked_at" => Fog::Nullable::String,
"secret" => Fog::Nullable::String,
"account" => Brightbox::Compute::Formats::Nested::ACCOUNT
}
CLOUD_IP = {
"id" => String,
"resource_type" => String,
"url" => String,
"public_ip" => String,
"status" => String,
"reverse_dns" => String,
"account" => Fog::Brightbox::Nullable::Account,
"interface" => Fog::Brightbox::Nullable::Interface,
"load_balancer" => Fog::Brightbox::Nullable::LoadBalancer,
"server" => Fog::Brightbox::Nullable::Server
}
IMAGE = {
"name" => String,
"created_at" => String,
"resource_type" => String,
"arch" => String,
"url" => String,
"id" => String,
"description" => String,
"source" => String,
"source_type" => String,
"status" => String,
"owner" => String, # Account ID not object
"public" => Fog::Boolean,
"official" => Fog::Boolean,
"compatibility_mode" => Fog::Boolean,
"virtual_size" => Integer,
"disk_size" => Integer,
"ancestor" => Fog::Brightbox::Nullable::Image
}
INTERFACE = {
"resource_type" => String,
"url" => String,
"id" => String,
"ipv4_address" => String,
"mac_address" => String,
"server" => Brightbox::Compute::Formats::Nested::SERVER
}
LOAD_BALANCER = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"status" => String,
"listeners" => [Brightbox::Compute::Formats::Struct::LB_LISTENER],
"policy" => String,
"healthcheck" => Brightbox::Compute::Formats::Struct::LB_HEALTHCHECK,
"created_at" => String,
"deleted_at" => Fog::Nullable::String,
"account" => Brightbox::Compute::Formats::Nested::ACCOUNT,
"nodes" => [Brightbox::Compute::Formats::Nested::SERVER],
"cloud_ips" => [Brightbox::Compute::Formats::Nested::CLOUD_IP]
}
SERVER = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"status" => String,
"hostname" => String,
"created_at" => String,
"started_at" => Fog::Nullable::String,
"deleted_at" => Fog::Nullable::String,
"user_data" => Fog::Nullable::String,
"console_url" => Fog::Nullable::String,
"console_token" => Fog::Nullable::String,
"console_token_expires" => Fog::Nullable::String,
"account" => Brightbox::Compute::Formats::Nested::ACCOUNT,
"server_type" => Brightbox::Compute::Formats::Nested::SERVER_TYPE,
"cloud_ips" => [Brightbox::Compute::Formats::Nested::CLOUD_IP],
"image" => Brightbox::Compute::Formats::Nested::IMAGE,
"snapshots" => [Brightbox::Compute::Formats::Nested::IMAGE],
"interfaces" => [Brightbox::Compute::Formats::Nested::INTERFACE],
"zone" => Brightbox::Compute::Formats::Nested::ZONE
}
SERVER_TYPE = {
"id" => String,
"resource_type" => String,
"url" => String,
"handle" => Fog::Nullable::String,
"name" => String,
"status" => String,
"cores" => Integer,
"ram" => Integer,
"disk_size" => Integer
}
USER = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"email_address" => String,
"email_verified" => Fog::Boolean,
"accounts" => [Brightbox::Compute::Formats::Nested::ACCOUNT],
"default_account" => Fog::Brightbox::Nullable::Account,
"ssh_key" => Fog::Nullable::String,
"messaging_pref" => Fog::Boolean
}
ZONE = {
"id" => String,
"resource_type" => String,
"url" => String,
"handle" => String
}
end
module Collection
API_CLIENTS = [Brightbox::Compute::Formats::Collected::API_CLIENT]
CLOUD_IPS = [Brightbox::Compute::Formats::Collected::CLOUD_IP]
IMAGES = [Brightbox::Compute::Formats::Collected::IMAGE]
LOAD_BALANCERS = [Brightbox::Compute::Formats::Collected::LOAD_BALANCER]
SERVERS = [Brightbox::Compute::Formats::Collected::SERVER]
SERVER_TYPES = [Brightbox::Compute::Formats::Collected::SERVER_TYPE]
USERS = [Brightbox::Compute::Formats::Collected::USER]
ZONES = [Brightbox::Compute::Formats::Collected::ZONE]
end
end
end
end
[compute|brightbox] Updated Account format test to allow valid_credit_card flag
module Fog
module Brightbox
module Nullable
module Account; end
module Image; end
module Interface; end
module LoadBalancer; end
module Server; end
module Zone; end
end
end
end
Hash.send :include, Fog::Brightbox::Nullable::Account
NilClass.send :include, Fog::Brightbox::Nullable::Account
Hash.send :include, Fog::Brightbox::Nullable::Image
NilClass.send :include, Fog::Brightbox::Nullable::Image
Hash.send :include, Fog::Brightbox::Nullable::Interface
NilClass.send :include, Fog::Brightbox::Nullable::Interface
Hash.send :include, Fog::Brightbox::Nullable::LoadBalancer
NilClass.send :include, Fog::Brightbox::Nullable::LoadBalancer
Hash.send :include, Fog::Brightbox::Nullable::Server
NilClass.send :include, Fog::Brightbox::Nullable::Server
Hash.send :include, Fog::Brightbox::Nullable::Zone
NilClass.send :include, Fog::Brightbox::Nullable::Zone
class Brightbox
module Compute
module TestSupport
IMAGE_IDENTIFER = "img-2ab98" # Ubuntu Lucid 10.04 server (i686)
end
module Formats
module Struct
LB_LISTENER = {
"in" => Integer,
"out" => Integer,
"protocol" => String
}
LB_HEALTHCHECK = {
"type" => String,
"request" => String,
"port" => Integer,
"interval" => Integer,
"timeout" => Integer,
"threshold_up" => Integer,
"threshold_down" => Integer
}
end
module Nested
ACCOUNT = {
"name" => String,
"resource_type" => String,
"url" => String,
"id" => String,
"status" => String,
"ram_limit" => Integer,
"ram_used" => Integer,
"cloud_ips_limit" => Integer,
"cloud_ips_used" => Integer,
"load_balancers_limit" => Integer,
"load_balancers_used" => Integer
}
API_CLIENT = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"description" => String,
"revoked_at" => Fog::Nullable::String
}
CLOUD_IP = {
"id" => String,
"resource_type" => String,
"url" => String,
"public_ip" => String,
"status" => String,
"reverse_dns" => String
}
IMAGE = {
"name" => String,
"created_at" => String,
"resource_type" => String,
"arch" => String,
"url" => String,
"id" => String,
"description" => String,
"source" => String,
"status" => String,
"owner" => String
}
INTERFACE = {
"resource_type" => String,
"url" => String,
"id" => String,
"ipv4_address" => String,
"mac_address" => String
}
SERVER = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"status" => String,
"hostname" => String,
"created_at" => String,
"started_at" => Fog::Nullable::String,
"deleted_at" => Fog::Nullable::String
}
SERVER_TYPE = {
"name" => String,
"handle" => Fog::Nullable::String,
"cores" => Integer,
"resource_type" => String,
"disk_size" => Integer,
"url" => String,
"id" => String,
"ram" => Integer,
"status" => String
}
USER = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"email_address" => String
}
ZONE = {
"id" => String,
"resource_type" => String,
"url" => String,
"handle" => Fog::Nullable::String
}
end
module Collected
API_CLIENT = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"description" => String,
"revoked_at" => Fog::Nullable::String,
"account" => Brightbox::Compute::Formats::Nested::ACCOUNT
}
CLOUD_IP = {
"id" => String,
"resource_type" => String,
"url" => String,
"public_ip" => String,
"status" => String,
"reverse_dns" => String,
"account" => Brightbox::Compute::Formats::Nested::ACCOUNT,
"interface" => Fog::Brightbox::Nullable::Interface,
"load_balancer" => Fog::Brightbox::Nullable::LoadBalancer,
"server" => Fog::Brightbox::Nullable::Server
}
IMAGE = {
"name" => String,
"created_at" => String,
"resource_type" => String,
"arch" => String,
"url" => String,
"id" => String,
"description" => String,
"source" => String,
"source_type" => String,
"status" => String,
"owner" => String,
"public" => Fog::Boolean,
"official" => Fog::Boolean,
"compatibility_mode" => Fog::Boolean,
"virtual_size" => Integer,
"disk_size" => Integer,
"ancestor" => Fog::Brightbox::Nullable::Image
}
LOAD_BALANCER = {
"cloud_ips" => Array,
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"status" => String,
"created_at" => String,
"deleted_at" => Fog::Nullable::String
}
SERVER = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"status" => String,
"hostname" => String,
"created_at" => String,
"started_at" => Fog::Nullable::String,
"deleted_at" => Fog::Nullable::String,
"account" => Brightbox::Compute::Formats::Nested::ACCOUNT,
"server_type" => Brightbox::Compute::Formats::Nested::SERVER_TYPE,
"cloud_ips" => [Brightbox::Compute::Formats::Nested::CLOUD_IP],
"image" => Brightbox::Compute::Formats::Nested::IMAGE,
"snapshots" => [Brightbox::Compute::Formats::Nested::IMAGE],
"interfaces" => [Brightbox::Compute::Formats::Nested::INTERFACE],
"zone" => Fog::Brightbox::Nullable::Zone
}
SERVER_TYPE = {
"id" => String,
"resource_type" => String,
"url" => String,
"handle" => Fog::Nullable::String,
"name" => String,
"status" => String,
"cores" => Integer,
"ram" => Integer,
"disk_size" => Integer
}
USER = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"email_address" => String,
"email_verified" => Fog::Boolean,
"accounts" => [Brightbox::Compute::Formats::Nested::ACCOUNT],
"default_account" => NilClass
}
ZONE = {
"id" => String,
"resource_type" => String,
"url" => String,
"handle" => Fog::Nullable::String
}
end
module Full
ACCOUNT = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"status" => String,
"address_1" => String,
"address_2" => String,
"city" => String,
"county" => String,
"postcode" => String,
"country_code" => String,
"country_name" => String,
"vat_registration_number" => Fog::Nullable::String,
"telephone_number" => String,
"telephone_verified" => Fog::Boolean,
"created_at" => String,
"ram_limit" => Integer,
"ram_used" => Integer,
"cloud_ips_limit" => Integer,
"cloud_ips_used" => Integer,
"load_balancers_limit" => Integer,
"load_balancers_used" => Integer,
"library_ftp_host" => String,
"library_ftp_user" => String,
"library_ftp_password" => Fog::Nullable::String,
"verified_telephone" => Fog::Nullable::String,
"verified_at" => Fog::Nullable::String,
"verified_ip" => Fog::Nullable::String,
"valid_credit_card" => Fog::Boolean,
"owner" => Brightbox::Compute::Formats::Nested::USER,
"users" => [Brightbox::Compute::Formats::Nested::USER],
"clients" => [Brightbox::Compute::Formats::Nested::API_CLIENT],
"servers" => [Brightbox::Compute::Formats::Nested::SERVER],
"images" => [Brightbox::Compute::Formats::Nested::IMAGE],
"zones" => [Brightbox::Compute::Formats::Nested::ZONE]
}
API_CLIENT = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"description" => String,
"revoked_at" => Fog::Nullable::String,
"secret" => Fog::Nullable::String,
"account" => Brightbox::Compute::Formats::Nested::ACCOUNT
}
CLOUD_IP = {
"id" => String,
"resource_type" => String,
"url" => String,
"public_ip" => String,
"status" => String,
"reverse_dns" => String,
"account" => Fog::Brightbox::Nullable::Account,
"interface" => Fog::Brightbox::Nullable::Interface,
"load_balancer" => Fog::Brightbox::Nullable::LoadBalancer,
"server" => Fog::Brightbox::Nullable::Server
}
IMAGE = {
"name" => String,
"created_at" => String,
"resource_type" => String,
"arch" => String,
"url" => String,
"id" => String,
"description" => String,
"source" => String,
"source_type" => String,
"status" => String,
"owner" => String, # Account ID not object
"public" => Fog::Boolean,
"official" => Fog::Boolean,
"compatibility_mode" => Fog::Boolean,
"virtual_size" => Integer,
"disk_size" => Integer,
"ancestor" => Fog::Brightbox::Nullable::Image
}
INTERFACE = {
"resource_type" => String,
"url" => String,
"id" => String,
"ipv4_address" => String,
"mac_address" => String,
"server" => Brightbox::Compute::Formats::Nested::SERVER
}
LOAD_BALANCER = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"status" => String,
"listeners" => [Brightbox::Compute::Formats::Struct::LB_LISTENER],
"policy" => String,
"healthcheck" => Brightbox::Compute::Formats::Struct::LB_HEALTHCHECK,
"created_at" => String,
"deleted_at" => Fog::Nullable::String,
"account" => Brightbox::Compute::Formats::Nested::ACCOUNT,
"nodes" => [Brightbox::Compute::Formats::Nested::SERVER],
"cloud_ips" => [Brightbox::Compute::Formats::Nested::CLOUD_IP]
}
SERVER = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"status" => String,
"hostname" => String,
"created_at" => String,
"started_at" => Fog::Nullable::String,
"deleted_at" => Fog::Nullable::String,
"user_data" => Fog::Nullable::String,
"console_url" => Fog::Nullable::String,
"console_token" => Fog::Nullable::String,
"console_token_expires" => Fog::Nullable::String,
"account" => Brightbox::Compute::Formats::Nested::ACCOUNT,
"server_type" => Brightbox::Compute::Formats::Nested::SERVER_TYPE,
"cloud_ips" => [Brightbox::Compute::Formats::Nested::CLOUD_IP],
"image" => Brightbox::Compute::Formats::Nested::IMAGE,
"snapshots" => [Brightbox::Compute::Formats::Nested::IMAGE],
"interfaces" => [Brightbox::Compute::Formats::Nested::INTERFACE],
"zone" => Brightbox::Compute::Formats::Nested::ZONE
}
SERVER_TYPE = {
"id" => String,
"resource_type" => String,
"url" => String,
"handle" => Fog::Nullable::String,
"name" => String,
"status" => String,
"cores" => Integer,
"ram" => Integer,
"disk_size" => Integer
}
USER = {
"id" => String,
"resource_type" => String,
"url" => String,
"name" => String,
"email_address" => String,
"email_verified" => Fog::Boolean,
"accounts" => [Brightbox::Compute::Formats::Nested::ACCOUNT],
"default_account" => Fog::Brightbox::Nullable::Account,
"ssh_key" => Fog::Nullable::String,
"messaging_pref" => Fog::Boolean
}
ZONE = {
"id" => String,
"resource_type" => String,
"url" => String,
"handle" => String
}
end
module Collection
API_CLIENTS = [Brightbox::Compute::Formats::Collected::API_CLIENT]
CLOUD_IPS = [Brightbox::Compute::Formats::Collected::CLOUD_IP]
IMAGES = [Brightbox::Compute::Formats::Collected::IMAGE]
LOAD_BALANCERS = [Brightbox::Compute::Formats::Collected::LOAD_BALANCER]
SERVERS = [Brightbox::Compute::Formats::Collected::SERVER]
SERVER_TYPES = [Brightbox::Compute::Formats::Collected::SERVER_TYPE]
USERS = [Brightbox::Compute::Formats::Collected::USER]
ZONES = [Brightbox::Compute::Formats::Collected::ZONE]
end
end
end
end
|
require "formula"
class CommuniDesktop < Formula
homepage "https://github.com/communi/communi-desktop"
head "https://github.com/communi/communi-desktop.git"
depends_on "qt5" => :build
depends_on "libcommuni"
def install
ENV.deparallelize # if your formula fails when building in parallel
system "git", "submodule", "update", "--init", "--recursive"
system "qmake"
system "make", "install"
end
test do
system "true"
end
end
Don't need to call git submodule
require "formula"
class CommuniDesktop < Formula
homepage "https://github.com/communi/communi-desktop"
head "https://github.com/communi/communi-desktop.git"
depends_on "qt5" => :build
depends_on "libcommuni"
def install
ENV.deparallelize # if your formula fails when building in parallel
system "qmake"
system "make", "install"
end
test do
system "true"
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'components/version'
Gem::Specification.new do |spec|
spec.name = "components"
spec.version = Components::VERSION
spec.authors = ["Anton Pleshivtsev"]
spec.email = ["anton@emby.ru"]
spec.description = %q{TODO: Write a gem description}
spec.summary = %q{TODO: Write a gem summary}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "rails", "~> 3.2"
spec.add_development_dependency "angularjs-rails"
spec.add_development_dependency "jquery-rails"
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
spec.add_development_dependency "jasminerice"
spec.add_development_dependency "sass-rails"
spec.add_development_dependency "compass-rails"
end
remove notice from components.gemspec
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'components/version'
Gem::Specification.new do |spec|
spec.name = "components"
spec.version = Components::VERSION
spec.authors = ["Anton Pleshivtsev"]
spec.email = ["anton@emby.ru"]
spec.description = 'Write a gem description'
spec.summary = ' Write a gem summary'
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "rails", "~> 3.2"
spec.add_development_dependency "angularjs-rails"
spec.add_development_dependency "jquery-rails"
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
spec.add_development_dependency "jasminerice"
spec.add_development_dependency "sass-rails"
spec.add_development_dependency "compass-rails"
end
|
#!/usr/bin/env ruby
require 'io/console'
unless Comparable.instance_methods.include?(:clamp)
class Fixnum
def clamp(min, max)
if min > max
raise ArgumentError, 'min argument must be smaller than max argument'
end
return min if self <= min
return max if self >= max
self
end
end
end
module Femto
class Editor
def initialize(filename)
@filename = filename
data = read_file_data
@line_sep = data["\r\n"] || "\n"
@buffer = Buffer.new(lines_from_data(data))
@cursor = Cursor.new
@history = History.new
end
def self.open(filename)
new(filename).run
end
def run
IO.console.raw do
reset_screen
loop do
render
handle_input
end
end
end
private
attr_reader :buffer, :blank_buffer, :cursor, :history, :line_sep, :filename
def render
clear_screen
print buffer
ANSI.move_cursor(cursor.row, cursor.col)
end
def handle_input
char = $stdin.getc
case char
when "\cq" then quit
when "\cs" then save
when "\cp" then up
when "\cn" then down
when "\cf" then right
when "\cb" then left
when "\ca" then line_home
when "\ce" then line_end
when "\ch" then backspace
when "\cd" then delete
when "\cu" then delete_before
when "\ck" then delete_after
when "\c_" then history_undo
when "\cr" then history_redo
when "\r" then enter
else
insert_char(char) if char =~ /[[:print:]]/
end
end
def quit
reset_screen
exit
end
def up
@cursor = cursor.up(buffer)
end
def down
@cursor = cursor.down(buffer)
end
def right
@cursor = cursor.right(buffer)
end
def left
@cursor = cursor.left(buffer)
end
def backspace
return if cursor.beginning_of_file?
store_snapshot
if cursor.col == 0
cursor_left = buffer.lines[cursor.row].size + 1
@buffer = buffer.join_lines(cursor.row - 1)
cursor_left.times { @cursor = cursor.left(buffer) }
else
@buffer = buffer.delete_char(cursor.row, cursor.col - 1)
@cursor = cursor.left(buffer)
end
end
def delete
return if cursor.end_of_file?(buffer)
store_snapshot
if cursor.end_of_line?(buffer)
@buffer = buffer.join_lines(cursor.row)
else
@buffer = buffer.delete_char(cursor.row, cursor.col)
end
end
def data
data = buffer.lines.join(line_sep).chomp(line_sep)
data << line_sep unless data.empty?
data
end
def save
open(filename, 'w') {|f| f << data }
end
def enter
store_snapshot
@buffer = buffer.break_line(cursor.row, cursor.col)
@cursor = cursor.enter(buffer)
end
def history_undo
return unless history.can_undo?
store_snapshot(false) unless history.can_redo?
@buffer, @cursor = history.undo
end
def history_redo
return unless history.can_redo?
@buffer, @cursor = history.redo
end
def insert_char(char)
store_snapshot
@buffer = buffer.insert_char(char, cursor.row, cursor.col)
@cursor = cursor.right(buffer)
end
def store_snapshot(advance = true)
history.save([buffer, cursor], advance)
end
def line_home
@cursor = cursor.line_home
end
def line_end
@cursor = cursor.line_end(buffer)
end
def delete_before
store_snapshot
@buffer = buffer.delete_before(cursor.row, cursor.col)
line_home
end
def delete_after
store_snapshot
@buffer = buffer.delete_after(cursor.row, cursor.col)
end
def reset_screen
ANSI.move_cursor(0, 0)
ANSI.clear_screen
end
def clear_screen
ANSI.move_cursor(0, 0)
if blank_buffer
print blank_buffer # overwrite screen with spaces
ANSI.move_cursor(0, 0)
end
blank_lines = buffer.lines.map {|line| ' ' * line.size }
@blank_buffer = Buffer.new(blank_lines)
end
def read_file_data
if File.exist?(filename)
File.read(filename)
else
''
end
end
def lines_from_data(data)
if data.empty?
['']
else
data.split(line_sep)
end
end
end
class Buffer
attr_reader :lines
def initialize(lines)
@lines = lines
end
def to_s
lines.map {|line| "#{line}\r\n" }.join
end
def lines_count
lines.size
end
def line_length(row)
lines[row].size
end
def delete_char(row, col)
with_copy {|b| b.lines[row].slice!(col) }
end
def insert_char(char, row, col)
with_copy do |b|
b.lines[row] ||= '' # in case the file is empty
b.lines[row].insert(col, char)
end
end
def break_line(row, col)
with_copy do |b|
b.lines[row..row] = [b.lines[row][0...col], b.lines[row][col..-1]]
end
end
def delete_before(row, col)
with_copy {|b| b.lines[row][0...col] = '' }
end
def delete_after(row, col)
with_copy {|b| b.lines[row][col..-1] = '' }
end
def join_lines(row)
with_copy {|b| b.lines[row..row + 1] = b.lines[row..row + 1].join }
end
private
def with_copy
Buffer.new(lines.map(&:dup)).tap {|b| yield b }
end
end
class Cursor
attr_reader :row, :col
def initialize(row = 0, col = 0)
@row = row
@col = col
end
def up(buffer)
Cursor.new(row - 1, col).clamp(buffer)
end
def down(buffer)
Cursor.new(row + 1, col).clamp(buffer)
end
def right(buffer)
return Cursor.new(row, col + 1) unless end_of_line?(buffer)
return self if final_line?(buffer)
Cursor.new(row + 1, 0)
end
def left(buffer)
return Cursor.new(row, col - 1) if col > 0
return self if row == 0
Cursor.new(row - 1, buffer.line_length(row - 1))
end
def clamp(buffer)
@row = row.clamp(0, buffer.lines_count - 1)
@col = col.clamp(0, buffer.line_length(row))
self
end
def enter(buffer)
down(buffer).line_home
end
def line_home
Cursor.new(row, 0)
end
def line_end(buffer)
Cursor.new(row, buffer.line_length(row))
end
def end_of_line?(buffer)
col == buffer.line_length(row)
end
def final_line?(buffer)
row == buffer.lines_count - 1
end
def end_of_file?(buffer)
final_line?(buffer) && end_of_line?(buffer)
end
def beginning_of_file?
row == 0 && col == 0
end
end
class History
def initialize
@snapshots = []
@current = -1
end
def save(data, advance = true)
snapshots.slice!(current + 1..-1) # branching; purge redo history
snapshots << data
@current += 1 if advance
end
def can_undo?
!undo_snapshot.nil?
end
def undo
undo_snapshot.tap { @current -= 1 }
end
def can_redo?
!redo_snapshot.nil?
end
def redo
redo_snapshot.tap { @current += 1 }
end
private
attr_reader :snapshots, :current
def undo_snapshot
snapshots[current] if current >= 0
end
def redo_snapshot
snapshots[current + 2]
end
end
module ANSI
def self.clear_screen
print "\e[J"
end
def self.move_cursor(row, col)
print "\e[#{row + 1};#{col + 1}H"
end
end
end
if __FILE__ == $0
begin
Femto::Editor.open(ARGV.fetch(0))
rescue IndexError
puts "Usage: #$0 file"
end
end
Remove blank line among returns
#!/usr/bin/env ruby
require 'io/console'
unless Comparable.instance_methods.include?(:clamp)
class Fixnum
def clamp(min, max)
if min > max
raise ArgumentError, 'min argument must be smaller than max argument'
end
return min if self <= min
return max if self >= max
self
end
end
end
module Femto
class Editor
def initialize(filename)
@filename = filename
data = read_file_data
@line_sep = data["\r\n"] || "\n"
@buffer = Buffer.new(lines_from_data(data))
@cursor = Cursor.new
@history = History.new
end
def self.open(filename)
new(filename).run
end
def run
IO.console.raw do
reset_screen
loop do
render
handle_input
end
end
end
private
attr_reader :buffer, :blank_buffer, :cursor, :history, :line_sep, :filename
def render
clear_screen
print buffer
ANSI.move_cursor(cursor.row, cursor.col)
end
def handle_input
char = $stdin.getc
case char
when "\cq" then quit
when "\cs" then save
when "\cp" then up
when "\cn" then down
when "\cf" then right
when "\cb" then left
when "\ca" then line_home
when "\ce" then line_end
when "\ch" then backspace
when "\cd" then delete
when "\cu" then delete_before
when "\ck" then delete_after
when "\c_" then history_undo
when "\cr" then history_redo
when "\r" then enter
else
insert_char(char) if char =~ /[[:print:]]/
end
end
def quit
reset_screen
exit
end
def up
@cursor = cursor.up(buffer)
end
def down
@cursor = cursor.down(buffer)
end
def right
@cursor = cursor.right(buffer)
end
def left
@cursor = cursor.left(buffer)
end
def backspace
return if cursor.beginning_of_file?
store_snapshot
if cursor.col == 0
cursor_left = buffer.lines[cursor.row].size + 1
@buffer = buffer.join_lines(cursor.row - 1)
cursor_left.times { @cursor = cursor.left(buffer) }
else
@buffer = buffer.delete_char(cursor.row, cursor.col - 1)
@cursor = cursor.left(buffer)
end
end
def delete
return if cursor.end_of_file?(buffer)
store_snapshot
if cursor.end_of_line?(buffer)
@buffer = buffer.join_lines(cursor.row)
else
@buffer = buffer.delete_char(cursor.row, cursor.col)
end
end
def data
data = buffer.lines.join(line_sep).chomp(line_sep)
data << line_sep unless data.empty?
data
end
def save
open(filename, 'w') {|f| f << data }
end
def enter
store_snapshot
@buffer = buffer.break_line(cursor.row, cursor.col)
@cursor = cursor.enter(buffer)
end
def history_undo
return unless history.can_undo?
store_snapshot(false) unless history.can_redo?
@buffer, @cursor = history.undo
end
def history_redo
return unless history.can_redo?
@buffer, @cursor = history.redo
end
def insert_char(char)
store_snapshot
@buffer = buffer.insert_char(char, cursor.row, cursor.col)
@cursor = cursor.right(buffer)
end
def store_snapshot(advance = true)
history.save([buffer, cursor], advance)
end
def line_home
@cursor = cursor.line_home
end
def line_end
@cursor = cursor.line_end(buffer)
end
def delete_before
store_snapshot
@buffer = buffer.delete_before(cursor.row, cursor.col)
line_home
end
def delete_after
store_snapshot
@buffer = buffer.delete_after(cursor.row, cursor.col)
end
def reset_screen
ANSI.move_cursor(0, 0)
ANSI.clear_screen
end
def clear_screen
ANSI.move_cursor(0, 0)
if blank_buffer
print blank_buffer # overwrite screen with spaces
ANSI.move_cursor(0, 0)
end
blank_lines = buffer.lines.map {|line| ' ' * line.size }
@blank_buffer = Buffer.new(blank_lines)
end
def read_file_data
if File.exist?(filename)
File.read(filename)
else
''
end
end
def lines_from_data(data)
if data.empty?
['']
else
data.split(line_sep)
end
end
end
class Buffer
attr_reader :lines
def initialize(lines)
@lines = lines
end
def to_s
lines.map {|line| "#{line}\r\n" }.join
end
def lines_count
lines.size
end
def line_length(row)
lines[row].size
end
def delete_char(row, col)
with_copy {|b| b.lines[row].slice!(col) }
end
def insert_char(char, row, col)
with_copy do |b|
b.lines[row] ||= '' # in case the file is empty
b.lines[row].insert(col, char)
end
end
def break_line(row, col)
with_copy do |b|
b.lines[row..row] = [b.lines[row][0...col], b.lines[row][col..-1]]
end
end
def delete_before(row, col)
with_copy {|b| b.lines[row][0...col] = '' }
end
def delete_after(row, col)
with_copy {|b| b.lines[row][col..-1] = '' }
end
def join_lines(row)
with_copy {|b| b.lines[row..row + 1] = b.lines[row..row + 1].join }
end
private
def with_copy
Buffer.new(lines.map(&:dup)).tap {|b| yield b }
end
end
class Cursor
attr_reader :row, :col
def initialize(row = 0, col = 0)
@row = row
@col = col
end
def up(buffer)
Cursor.new(row - 1, col).clamp(buffer)
end
def down(buffer)
Cursor.new(row + 1, col).clamp(buffer)
end
def right(buffer)
return Cursor.new(row, col + 1) unless end_of_line?(buffer)
return self if final_line?(buffer)
Cursor.new(row + 1, 0)
end
def left(buffer)
return Cursor.new(row, col - 1) if col > 0
return self if row == 0
Cursor.new(row - 1, buffer.line_length(row - 1))
end
def clamp(buffer)
@row = row.clamp(0, buffer.lines_count - 1)
@col = col.clamp(0, buffer.line_length(row))
self
end
def enter(buffer)
down(buffer).line_home
end
def line_home
Cursor.new(row, 0)
end
def line_end(buffer)
Cursor.new(row, buffer.line_length(row))
end
def end_of_line?(buffer)
col == buffer.line_length(row)
end
def final_line?(buffer)
row == buffer.lines_count - 1
end
def end_of_file?(buffer)
final_line?(buffer) && end_of_line?(buffer)
end
def beginning_of_file?
row == 0 && col == 0
end
end
class History
def initialize
@snapshots = []
@current = -1
end
def save(data, advance = true)
snapshots.slice!(current + 1..-1) # branching; purge redo history
snapshots << data
@current += 1 if advance
end
def can_undo?
!undo_snapshot.nil?
end
def undo
undo_snapshot.tap { @current -= 1 }
end
def can_redo?
!redo_snapshot.nil?
end
def redo
redo_snapshot.tap { @current += 1 }
end
private
attr_reader :snapshots, :current
def undo_snapshot
snapshots[current] if current >= 0
end
def redo_snapshot
snapshots[current + 2]
end
end
module ANSI
def self.clear_screen
print "\e[J"
end
def self.move_cursor(row, col)
print "\e[#{row + 1};#{col + 1}H"
end
end
end
if __FILE__ == $0
begin
Femto::Editor.open(ARGV.fetch(0))
rescue IndexError
puts "Usage: #$0 file"
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE
# Instead, edit Jeweler::Tasks in Rakefile, and run `rake gemspec`
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{esv}
s.version = "0.1.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Geoffrey Dagley"]
s.date = %q{2009-09-08}
s.email = %q{gdagley@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
".gitignore",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"esv.gemspec",
"lib/esv.rb",
"lib/esv/bible.rb",
"lib/esv/reading_plan.rb",
"spec/esv/bible_spec.rb",
"spec/esv/reading_plan_spec.rb",
"spec/esv_spec.rb",
"spec/fixtures/passage_query.html",
"spec/fixtures/passage_query.xml",
"spec/fixtures/reading_plan_info.xml",
"spec/fixtures/reading_plan_query.html",
"spec/fixtures/reading_plan_query.xml",
"spec/spec_helper.rb"
]
s.homepage = %q{http://github.com/gdagley/esv}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.5}
s.summary = %q{Wrapper for English Standard Version (ESV) Bible Web Service. See ESV API docs http://www.esvapi.org/}
s.test_files = [
"spec/esv/bible_spec.rb",
"spec/esv/reading_plan_spec.rb",
"spec/esv_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<httparty>, [">= 0"])
s.add_development_dependency(%q<rspec>, [">= 0"])
s.add_development_dependency(%q<fakeweb>, [">= 0"])
else
s.add_dependency(%q<httparty>, [">= 0"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<fakeweb>, [">= 0"])
end
else
s.add_dependency(%q<httparty>, [">= 0"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<fakeweb>, [">= 0"])
end
end
update gemspec
# Generated by jeweler
# DO NOT EDIT THIS FILE
# Instead, edit Jeweler::Tasks in Rakefile, and run `rake gemspec`
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{esv}
s.version = "0.1.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Geoffrey Dagley"]
s.date = %q{2009-09-16}
s.email = %q{gdagley@gmail.com}
s.executables = ["esv", "esv_daily_verse"]
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
".gitignore",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"bin/esv",
"bin/esv_daily_verse",
"esv.gemspec",
"lib/esv.rb",
"lib/esv/bible.rb",
"lib/esv/reading_plan.rb",
"spec/esv/bible_spec.rb",
"spec/esv/reading_plan_spec.rb",
"spec/esv_spec.rb",
"spec/fixtures/passage_query.html",
"spec/fixtures/passage_query.xml",
"spec/fixtures/reading_plan_info.xml",
"spec/fixtures/reading_plan_query.html",
"spec/fixtures/reading_plan_query.xml",
"spec/spec_helper.rb"
]
s.homepage = %q{http://github.com/gdagley/esv}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.5}
s.summary = %q{Wrapper for English Standard Version (ESV) Bible Web Service. See ESV API docs http://www.esvapi.org/}
s.test_files = [
"spec/esv/bible_spec.rb",
"spec/esv/reading_plan_spec.rb",
"spec/esv_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<httparty>, [">= 0"])
s.add_development_dependency(%q<rspec>, [">= 0"])
s.add_development_dependency(%q<fakeweb>, [">= 0"])
else
s.add_dependency(%q<httparty>, [">= 0"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<fakeweb>, [">= 0"])
end
else
s.add_dependency(%q<httparty>, [">= 0"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<fakeweb>, [">= 0"])
end
end
|
Import Formula: QGIS
Formula imported from Sharpie/homebrew-science.
require 'formula'
def grass?
ARGV.include? "--with-grass"
end
def postgis?
ARGV.include? "--with-postgis"
end
def py_version
`python -c 'import sys;print sys.version[:3]'`.chomp
end
# QWT 6.x has an insane build system---can't use the framework files it
# produces as they don't link properly. So, we use an internal static brew of
# QWT 5.2.2.
class Qwt52 < Formula
url 'http://sourceforge.net/projects/qwt/files/qwt/5.2.2/qwt-5.2.2.tar.bz2'
homepage 'http://qwt.sourceforge.net'
md5 '70d77e4008a6cc86763737f0f24726ca'
end
# QGIS requires a newer version of bison than OS X provides.
class Bison < Formula
url 'http://ftpmirror.gnu.org/bison/bison-2.4.3.tar.bz2'
homepage 'http://www.gnu.org/software/bison/'
md5 'c1d3ea81bc370dbd43b6f0b2cd21287e'
end
class Qgis <Formula
homepage 'http://www.qgis.org'
url 'http://qgis.org/downloads/qgis-1.7.4.tar.bz2'
md5 'ad6e2bd8c5eb0c486939c420af5d8c44'
head 'https://github.com/qgis/Quantum-GIS.git', :branch => 'master'
devel do
url 'https://github.com/qgis/Quantum-GIS.git', :branch => 'release-1_8'
version '1.8dev'
end
def options
[
['--with-grass', 'Build support for GRASS GIS.'],
['--with-postgis', 'Build support for PostGIS databases.']
]
end
depends_on 'cmake' => :build
depends_on 'gsl'
depends_on 'PyQt'
depends_on 'gdal'
depends_on 'grass' if grass?
depends_on 'gettext' if grass? # For libintl
depends_on 'postgis' if postgis?
fails_with :clang do
build 318
cause 'Cant resolve std::ostrem<< in SpatialIndex.h'
end
def install
internal_qwt = Pathname.new(Dir.getwd) + 'qwt52'
internal_bison = Pathname.new(Dir.getwd) + 'bison'
Bison.new.brew do
system "./configure", "--prefix=#{internal_bison}", "--disable-debug", "--disable-dependency-tracking"
system 'make install'
end
Qwt52.new.brew do
inreplace 'qwtconfig.pri' do |s|
# change_make_var won't work because there are leading spaces
s.gsub! /^\s*QWT_INSTALL_PREFIX\s*=(.*)$/, "QWT_INSTALL_PREFIX=#{internal_qwt}"
s.gsub! /^\s*INSTALLBASE\s*=(.*)$/, "INSTALLBASE=#{internal_qwt}"
# Removing the `QwtDll` config option will cause Qwt to build as a
# satic library. We could build dynamic, but we would need to hit the
# results with `install_name_tool` to make sure the paths are right. As
# the QGIS main executable seems to be the only thing that links
# against this, I'm keeping it simple with a static lib.
s.gsub! /^(\s*CONFIG.*QwtDll)$/, ''
end
system 'qmake -spec macx-g++ -config release'
system 'make install'
end
cmake_args = std_cmake_parameters.split
cmake_args.concat %W[
-DQWT_INCLUDE_DIR=#{internal_qwt}/include
-DQWT_LIBRARY=#{internal_qwt}/lib/libqwt.a
-DBISON_EXECUTABLE=#{internal_bison}/bin/bison
]
if grass?
grass = Formula.factory 'grass'
gettext = Formula.factory 'gettext'
cmake_args << "-DGRASS_PREFIX=#{Dir[grass.prefix + 'grass-*']}"
# So that `libintl.h` can be found
ENV.append 'CXXFLAGS', "-I#{gettext.include}"
end
Dir.mkdir 'build'
Dir.chdir 'build' do
system 'cmake', '..', *cmake_args
system 'make install'
end
# Symlink the PyQGIS Python module somewhere convienant for users to put on
# their PYTHONPATH
py_lib = lib + "python#{py_version}/site-packages"
qgis_modules = prefix + 'QGIS.app/Contents/Resources/python/qgis'
py_lib.mkpath
ln_s qgis_modules, py_lib + 'qgis'
# Create script to launch QGIS app
(bin + 'qgis').write <<-EOS.undent
#!/bin/sh
# Ensure Python modules can be found when QGIS is running.
env PYTHONPATH=#{HOMEBREW_PREFIX}/lib/python#{py_version}/site-packages:$PYTHONPATH\\
open #{prefix}/QGIS.app
EOS
end
def caveats
<<-EOS
QGIS has been built as an application bundle. To make it easily available, a
wrapper script has been written that launches the app with environment
variables set so that Python modules will be functional:
qgis
The QGIS python modules have been symlinked to:
#{HOMEBREW_PREFIX}/lib/python#{py_version}/site-packages
If you are interested in PyQGIS development and are not using the Homebrew
Python formula, then you will need to ensure this directory is on your
PYTHONPATH.
EOS
end
end
|
class Freec < Formula
homepage "http://bioinfo.curie.fr/projects/freec/"
# tag "bioinformatics"
# doi "10.1093/bioinformatics/btr670"
url "http://bioinfo.curie.fr/projects/freec/src/FREEC_Linux64.tar.gz"
sha256 "dd8c0768ea0ed5bd36169fa68f9a3f48dd6f15889b9a60c7977b27bdb6da995d"
version "7.2"
def install
# FAQ #20 Mac OS X building: http://bioinfo.curie.fr/projects/freec/FAQ.html
if OS.mac?
inreplace "myFunc.cpp", "values.h", "limits.h"
end
system "make"
bin.install "freec"
end
test do
assert_match "FREEC v#{version}", shell_output("freec 2>&1")
end
end
freec: add 7.2 bottle.
class Freec < Formula
homepage "http://bioinfo.curie.fr/projects/freec/"
# tag "bioinformatics"
# doi "10.1093/bioinformatics/btr670"
url "http://bioinfo.curie.fr/projects/freec/src/FREEC_Linux64.tar.gz"
sha256 "dd8c0768ea0ed5bd36169fa68f9a3f48dd6f15889b9a60c7977b27bdb6da995d"
version "7.2"
bottle do
root_url "https://homebrew.bintray.com/bottles-science"
cellar :any
sha256 "af7558fda0442a9c242abeb6a9492d82926197f14b31b3e0059a067189e1ae93" => :yosemite
sha256 "f02914ae0075e54a4378d771f9dd5a98aa67da035606040b707758f9ead7163d" => :mavericks
sha256 "d7571b435829f2f7356cefdf542cd4563f5e0df038673ce201ab7237bc3ff73b" => :mountain_lion
end
def install
# FAQ #20 Mac OS X building: http://bioinfo.curie.fr/projects/freec/FAQ.html
if OS.mac?
inreplace "myFunc.cpp", "values.h", "limits.h"
end
system "make"
bin.install "freec"
end
test do
assert_match "FREEC v#{version}", shell_output("freec 2>&1")
end
end
|
require 'rubygems'
require 'bud'
require 'progress_timer'
require 'membership'
module RaftProtocol
end
module Raft
include RaftProtocol
include StaticMembership
import ProgressTimer => :timer
state do
# see Figure 2 in Raft paper to see definitions of RPCs
# TODO: do we need from field in responses?
channel :request_vote_request, [:@dest, :from, :term, :last_log_index, :last_log_term]
channel :request_vote_response, [:@dest, :from, :term, :is_granted]
channel :append_entries_request, [:@dest, :from, :term, :prev_log_index, :prev_log_term, :request_entry, :commit_index]
channel :append_entries_response, [:@dest, :from, :term, :is_success]
# all of the members in the system, host is respective ip_port
table :members, [:host]
table :server_state, [] => [:state]
table :current_term, [] => [:term]
scratch :max_term, [] => [:term]
# server we voted for in current term
table :voted_for, [:term] => [:candidate]
scratch :voted_for_in_current_term, [] => [:candidate]
scratch :voted_for_in_current_step, [] => [:candidate]
# keep record of all votes
table :votes, [:term, :from] => [:is_granted]
scratch :votes_granted_in_current_term, [:from]
scratch :request_vote_term_max, current_term.schema
end
# TODO: is <= right to update an empty key in a table? does it overwrite or result in error?
bootstrap do
# add all the members of the system except yourself
# TODO: create mechanism to add all members programatically
members <= [['localhost:54321'], ['localhost:54322'], ['localhost:54323'], ['localhost:54324'], ['localhost:54325']]
# TODO: is this going to work to remove yourself? need it to happen now, not later
members <- [[ip_port]]
server_state <= [['follower']]
current_term <= [[1]]
# start the timer with random timeout between 100-500 ms
timer.set_alarm <= [[100 + rand(400)]]
end
bloom :step_down do
# if we discover our term is stale, step down to follower and update our term
# TODO: do we have to reset timer if we revert to follower here?
server_state <+- (server_state * request_vote_response * current_term).combos do |s, v, t|
['follower'] if s.state == 'candidate' or s.state == 'leader' and v.term > t.term
end
max_term <= request_vote_response.argmax([:term], :term) {|v| [v.term]}
current_term <+- (max_term * current_term).pairs do |m,c|
[m.term] if m.term > c.term
end
# sdfsdfsdf
# if we discover our term is stale, step down to follower and update our term
server_state <+- (server_state * request_vote_request * current_term).combos do |s, v, t|
['follower'] if s.state == 'candidate' or s.state == 'leader' and v.term > t.term
end
max_term <= request_vote_request.argmax([:term], :term) {|v| [v.term]}
current_term <+- (max_term * current_term).pairs do |m,c|
[m.term] if m.term > c.term
end
end
bloom :timeout do
stdio <~ [["timeout"]]
# increment current term
current_term <+- (timer.alarm * current_term).pairs {|a,t| [t.term + 1]}
# transition to candidate state
server_state <+- timer.alarm {|t| [['candidate']]}
# vote for yourself
votes <= (timer.alarm * current_term).pairs {|a,t| [t.term, ip_port, true]}
# reset the alarm
timer.set_alarm <= timer.alarm {|a| [100 + rand(400)]}
# send out request vote RPCs
request_vote_request <~ (timer.alarm * members * current_term).combos do |a,m,t|
# TODO: put actual indicies in here after we implement logs
[m.host, ip_port, t.term, 0, 0]
end
stdio <~ [["end timeout"]]
end
# TODO: this might need to be done if we have to continually send if we don't get response
bloom :wait_for_vote_responses do
end
# TODO: have to change names of max_term and current_term and integrate because we are doing the same thing for vote_counting and vote_casting but on diff channels, maybe make a block for that?
bloom :vote_counting do
stdio <~ [["begin vote_counting"]]
# record votes if we are in the correct term
votes <= (server_state * request_vote_response * current_term).combos do |s, v, t|
[v.term, v.from, v.is_granted] if s.state == 'candidate' and v.term == t.term
end
# store votes granted in the current term
votes_granted_in_current_term <+ (server_state * votes * current_term).combos(votes.term => current_term.term) do |s, v, t|
[v.from] if s.state == 'candidate' and v.is_granted
end
# if we have the majority of votes, then we are leader
server_state <+- (server_state * votes_granted_in_current_term).pairs do |s, v|
['leader'] if s.state == 'candidate' and votes_granted_in_current_term.count > (members.count/2)
end
stdio <~ [["end vote_counting"]]
end
bloom :vote_casting do
stdio <~ [["begin vote_casting"]]
# TODO: if voted_for in current term is null AND the candidate's log is at least as complete as our local log, then grant our vote, reject others, and reset the election timeout
voted_for_in_current_term <= (voted_for * current_term).pairs(:term => :term) {|v, t| [v.candidate]}
voted_for_in_current_step <= request_vote_request.argagg(:choose, :dest, [])
request_vote_response <~ (request_vote_request * voted_for_in_current_step * current_term).combos do |r, v, t|
if r.from == v.candidate and voted_for_in_current_term.count == 0
[r.from, ip_port, t.term, true]
else
[r.from, ip_port, t.term, false]
end
end
timer.set_alarm <= (request_vote_request * voted_for_in_current_step * current_term).combos do |r, v, t|
[100 + rand(400)] if r.from == v.candidate and voted_for_in_current_term.count == 0
end
voted_for <+ (voted_for_in_current_step * current_term).pairs do |v, t|
[t.term, v.candidate] if voted_for_in_current_term.count == 0
end
stdio <~ [["end vote_casting"]]
end
bloom :send_heartbeats do
end
end
arg switch back
require 'rubygems'
require 'bud'
require 'progress_timer'
require 'membership'
module RaftProtocol
end
module Raft
include RaftProtocol
include StaticMembership
import ProgressTimer => :timer
state do
# see Figure 2 in Raft paper to see definitions of RPCs
# TODO: do we need from field in responses?
channel :request_vote_request, [:@dest, :from, :term, :last_log_index, :last_log_term]
channel :request_vote_response, [:@dest, :from, :term, :is_granted]
channel :append_entries_request, [:@dest, :from, :term, :prev_log_index, :prev_log_term, :request_entry, :commit_index]
channel :append_entries_response, [:@dest, :from, :term, :is_success]
# all of the members in the system, host is respective ip_port
table :members, [:host]
table :server_state, [] => [:state]
table :current_term, [] => [:term]
scratch :max_term, [] => [:term]
# server we voted for in current term
table :voted_for, [:term] => [:candidate]
scratch :voted_for_in_current_term, [] => [:candidate]
scratch :voted_for_in_current_step, [] => [:candidate]
# keep record of all votes
table :votes, [:term, :from] => [:is_granted]
scratch :votes_granted_in_current_term, [:from]
scratch :request_vote_term_max, current_term.schema
end
# TODO: is <= right to update an empty key in a table? does it overwrite or result in error?
bootstrap do
# add all the members of the system except yourself
# TODO: create mechanism to add all members programatically
members <= [['localhost:54321'], ['localhost:54322'], ['localhost:54323'], ['localhost:54324'], ['localhost:54325']]
# TODO: is this going to work to remove yourself? need it to happen now, not later
members <- [[ip_port]]
server_state <= [['follower']]
current_term <= [[1]]
# start the timer with random timeout between 100-500 ms
timer.set_alarm <= [[100 + rand(400)]]
end
bloom :step_down do
# if we discover our term is stale, step down to follower and update our term
# TODO: do we have to reset timer if we revert to follower here?
server_state <+- (server_state * request_vote_response * current_term).combos do |s, v, t|
['follower'] if s.state == 'candidate' or s.state == 'leader' and v.term > t.term
end
max_term <= request_vote_response.argmax([:term], :term) {|v| [v.term]}
current_term <+- (max_term * current_term).pairs do |m,c|
[m.term] if m.term > c.term
end
# sdfsdfsdf
# if we discover our term is stale, step down to follower and update our term
server_state <+- (server_state * request_vote_request * current_term).combos do |s, v, t|
['follower'] if s.state == 'candidate' or s.state == 'leader' and v.term > t.term
end
max_term <= request_vote_request.argmax([:term], :term) {|v| [v.term]}
current_term <+- (max_term * current_term).pairs do |m,c|
[m.term] if m.term > c.term
end
end
bloom :timeout do
stdio <~ [["timeout"]]
# increment current term
current_term <+- (timer.alarm * current_term).pairs {|a,t| [t.term + 1]}
# transition to candidate state
server_state <+- timer.alarm {|t| [['candidate']]}
# vote for yourself
votes <= (timer.alarm * current_term).pairs {|a,t| [t.term, ip_port, true]}
# reset the alarm
timer.set_alarm <= timer.alarm {|a| [100 + rand(400)]}
# send out request vote RPCs
request_vote_request <~ (timer.alarm * members * current_term).combos do |a,m,t|
# TODO: put actual indicies in here after we implement logs
[m.host, ip_port, t.term, 0, 0]
end
stdio <~ [["end timeout"]]
end
# TODO: this might need to be done if we have to continually send if we don't get response
bloom :wait_for_vote_responses do
end
# TODO: have to change names of max_term and current_term and integrate because we are doing the same thing for vote_counting and vote_casting but on diff channels, maybe make a block for that?
bloom :vote_counting do
stdio <~ [["begin vote_counting"]]
# record votes if we are in the correct term
votes <= (server_state * request_vote_response * current_term).combos do |s, v, t|
[v.term, v.from, v.is_granted] if s.state == 'candidate' and v.term == t.term
end
# store votes granted in the current term
votes_granted_in_current_term <+ (server_state * votes * current_term).combos(votes.term => current_term.term) do |s, v, t|
[v.from] if s.state == 'candidate' and v.is_granted
end
# if we have the majority of votes, then we are leader
server_state <+- (server_state * votes_granted_in_current_term).pairs do |s, v|
['leader'] if s.state == 'candidate' and votes_granted_in_current_term.count > (members.count/2)
end
stdio <~ [["end vote_counting"]]
end
bloom :vote_casting do
stdio <~ [["begin vote_casting"]]
# TODO: if voted_for in current term is null AND the candidate's log is at least as complete as our local log, then grant our vote, reject others, and reset the election timeout
voted_for_in_current_term <= (voted_for * current_term).pairs(:term => :term) {|v, t| [v.candidate]}
voted_for_in_current_step <= request_vote_request.argagg(:choose, [], :dest)
request_vote_response <~ (request_vote_request * voted_for_in_current_step * current_term).combos do |r, v, t|
if r.from == v.candidate and voted_for_in_current_term.count == 0
[r.from, ip_port, t.term, true]
else
[r.from, ip_port, t.term, false]
end
end
timer.set_alarm <= (request_vote_request * voted_for_in_current_step * current_term).combos do |r, v, t|
[100 + rand(400)] if r.from == v.candidate and voted_for_in_current_term.count == 0
end
voted_for <+ (voted_for_in_current_step * current_term).pairs do |v, t|
[t.term, v.candidate] if voted_for_in_current_term.count == 0
end
stdio <~ [["end vote_casting"]]
end
bloom :send_heartbeats do
end
end
|
require 'formula'
def cxx?
ARGV.include? '--enable-cxx'
end
def fortran?
ARGV.include? '--enable-fortran'
end
def java?
ARGV.include? '--enable-java'
end
def objc?
ARGV.include? '--enable-objc'
end
def objcxx?
ARGV.include? '--enable-objcxx'
end
def build_everything?
ARGV.include? '--enable-all-languages'
end
def nls?
ARGV.include? '--enable-nls'
end
def profiledbuild?
ARGV.include? '--enable-profiled-build'
end
class Ecj < Formula
# Little Known Fact: ecj, Eclipse Java Complier, is required in order to
# produce a gcj compiler that can actually parse Java source code.
url 'ftp://sourceware.org/pub/java/ecj-4.5.jar'
sha1 '58c1d79c64c8cd718550f32a932ccfde8d1e6449'
end
class Gcc45 < Formula
homepage 'http://gcc.gnu.org'
url 'http://ftpmirror.gnu.org/gcc/gcc-4.5.3/gcc-4.5.3.tar.bz2'
mirror 'http://ftp.gnu.org/gnu/gcc/gcc-4.5.3/gcc-4.5.3.tar.bz2'
sha1 '73c45dfda5eef6b124be53e56828b5925198cc1b'
depends_on 'gmp'
depends_on 'libmpc'
depends_on 'mpfr'
def options
[
['--enable-cxx', 'Build the g++ compiler'],
['--enable-fortran', 'Build the gfortran compiler'],
['--enable-java', 'Buld the gcj compiler'],
['--enable-objc', 'Enable Objective-C language support'],
['--enable-objcxx', 'Enable Objective-C++ language support'],
['--enable-all-languages', 'Enable all compilers and languages, except Ada'],
['--enable-nls', 'Build with natural language support'],
['--enable-profiled-build', 'Make use of profile guided optimization when bootstrapping GCC']
]
end
# Dont strip compilers.
skip_clean :all
def install
# Force 64-bit on systems that use it. Build failures reported for some
# systems when this is not done.
ENV.m64 if MacOS.prefer_64_bit?
# GCC will suffer build errors if forced to use a particular linker.
ENV.delete 'LD'
# This is required on systems running a version newer than 10.6, and
# it's probably a good idea regardless.
#
# https://trac.macports.org/ticket/27237
ENV.append 'CXXFLAGS', '-U_GLIBCXX_DEBUG -U_GLIBCXX_DEBUG_PEDANTIC'
gmp = Formula.factory 'gmp'
mpfr = Formula.factory 'mpfr'
libmpc = Formula.factory 'libmpc'
# Sandbox the GCC lib, libexec and include directories so they don't wander
# around telling small children there is no Santa Claus. This results in a
# partially keg-only brew following suggestions outlined in the "How to
# install multiple versions of GCC" section of the GCC FAQ:
# http://gcc.gnu.org/faq.html#multiple
gcc_prefix = prefix + 'gcc'
args = [
# Sandbox everything...
"--prefix=#{gcc_prefix}",
# ...except the stuff in share...
"--datarootdir=#{share}",
# ...and the binaries...
"--bindir=#{bin}",
# ...which are tagged with a suffix to distinguish them.
"--program-suffix=-#{version.to_s.slice(/\d\.\d/)}",
"--with-gmp=#{gmp.prefix}",
"--with-mpfr=#{mpfr.prefix}",
"--with-mpc=#{libmpc.prefix}",
"--with-system-zlib",
"--enable-stage1-checking",
"--enable-plugin",
"--disable-lto"
]
args << '--disable-nls' unless nls?
if build_everything?
# Everything but Ada, which requires a pre-existing GCC Ada compiler
# (gnat) to bootstrap.
languages = %w[c c++ fortran java objc obj-c++]
else
# The C compiler is always built, but additional defaults can be added
# here.
languages = %w[c]
languages << 'c++' if cxx?
languages << 'fortran' if fortran?
languages << 'java' if java?
languages << 'objc' if objc?
languages << 'obj-c++' if objcxx?
end
if java? or build_everything?
source_dir = Pathname.new Dir.pwd
Ecj.new.brew do |ecj|
# Copying ecj.jar into the toplevel of the GCC source tree will cause
# gcc to automagically package it into the installation. It *must* be
# named ecj.jar and not ecj-version.jar in order for this to happen.
mv "ecj-#{ecj.version}.jar", (source_dir + 'ecj.jar')
end
end
mkdir 'build' do
system '../configure', "--enable-languages=#{languages.join(',')}", *args
if profiledbuild?
# Takes longer to build, may bug out. Provided for those who want to
# optimise all the way to 11.
system 'make profiledbootstrap'
else
system 'make bootstrap'
end
# At this point `make check` could be invoked to run the testsuite. The
# deja-gnu formula must be installed in order to do this.
system 'make install'
# `make install` neglects to transfer an essential plugin header file.
Pathname.new(Dir[gcc_prefix.join *%w[** plugin include config]].first).install '../gcc/config/darwin-sections.def'
end
end
end
Avoid conflicts between different GCC installations
* Depends on external formulae ecj, drop internal ecj logic.
* Remove #{prefix}/share/man/man7.
* Backport changes from up-to-date GCC version
- New option syntax.
- Multilib support.
- Xcode-only support.
Closes #64.
Signed-off-by: Jack Nagel <43386ce32af96f5c56f2a88e458cb94cebee3751@gmail.com>
require 'formula'
class Gcc45 < Formula
homepage 'http://gcc.gnu.org'
url 'http://ftpmirror.gnu.org/gcc/gcc-4.5.3/gcc-4.5.3.tar.bz2'
mirror 'http://ftp.gnu.org/gnu/gcc/gcc-4.5.3/gcc-4.5.3.tar.bz2'
sha1 '73c45dfda5eef6b124be53e56828b5925198cc1b'
option 'enable-cxx', 'Build the g++ compiler'
option 'enable-fortran', 'Build the gfortran compiler'
option 'enable-java', 'Buld the gcj compiler'
option 'enable-objc', 'Enable Objective-C language support'
option 'enable-objcxx', 'Enable Objective-C++ language support'
option 'enable-all-languages', 'Enable all compilers and languages, except Ada'
option 'enable-nls', 'Build with native language support'
option 'enable-profiled-build', 'Make use of profile guided optimization when bootstrapping GCC'
option 'enable-multilib', 'Build with multilib support'
depends_on 'gmp'
depends_on 'libmpc'
depends_on 'mpfr'
depends_on 'ecj' if build.include? 'enable-java' or build.include? 'enable-all-languages'
# Dont strip compilers.
skip_clean :all
def install
# Force 64-bit on systems that use it. Build failures reported for some
# systems when this is not done.
ENV.m64 if MacOS.prefer_64_bit?
# GCC will suffer build errors if forced to use a particular linker.
ENV.delete 'LD'
# This is required on systems running a version newer than 10.6, and
# it's probably a good idea regardless.
#
# https://trac.macports.org/ticket/27237
ENV.append 'CXXFLAGS', '-U_GLIBCXX_DEBUG -U_GLIBCXX_DEBUG_PEDANTIC'
gmp = Formula.factory 'gmp'
mpfr = Formula.factory 'mpfr'
libmpc = Formula.factory 'libmpc'
# Sandbox the GCC lib, libexec and include directories so they don't wander
# around telling small children there is no Santa Claus. This results in a
# partially keg-only brew following suggestions outlined in the "How to
# install multiple versions of GCC" section of the GCC FAQ:
# http://gcc.gnu.org/faq.html#multiple
gcc_prefix = prefix + 'gcc'
args = [
# Sandbox everything...
"--prefix=#{gcc_prefix}",
# ...except the stuff in share...
"--datarootdir=#{share}",
# ...and the binaries...
"--bindir=#{bin}",
# ...which are tagged with a suffix to distinguish them.
"--program-suffix=-#{version.to_s.slice(/\d\.\d/)}",
"--with-gmp=#{gmp.prefix}",
"--with-mpfr=#{mpfr.prefix}",
"--with-mpc=#{libmpc.prefix}",
"--with-system-zlib",
"--enable-stage1-checking",
"--enable-plugin",
"--disable-lto"
]
args << '--disable-nls' unless build.include? 'enable-nls'
if build_everything?
# Everything but Ada, which requires a pre-existing GCC Ada compiler
# (gnat) to bootstrap.
languages = %w[c c++ fortran java objc obj-c++]
else
# The C compiler is always built, but additional defaults can be added
# here.
languages = %w[c]
languages << 'c++' if build.include? 'enable-cxx'
languages << 'fortran' if build.include? 'enable-fortran'
languages << 'java' if build.include? 'enable-java'
languages << 'objc' if build.include? 'enable-objc'
languages << 'obj-c++' if build.include? 'enable-objcxx'
end
if build.include? 'enable-java' or build.include? 'enable-all-languages'
ecj = Formula.factory 'ecj'
args << "--with-ecj-jar=#{ecj.opt_prefix}/share/java/ecj.jar"
end
if build.include? 'enable-multilib'
args << '--enable-multilib'
else
args << '--disable-multilib'
end
mkdir 'build' do
unless MacOS::CLT.installed?
# For Xcode-only systems, we need to tell the sysroot path.
# 'native-system-header's will be appended
args << "--with-native-system-header-dir=/usr/include"
args << "--with-sysroot=#{MacOS.sdk_path}"
end
system '../configure', "--enable-languages=#{languages.join(',')}", *args
if build.include? 'enable-profiled-build'
# Takes longer to build, may bug out. Provided for those who want to
# optimise all the way to 11.
system 'make profiledbootstrap'
else
system 'make bootstrap'
end
# At this point `make check` could be invoked to run the testsuite. The
# deja-gnu formula must be installed in order to do this.
system 'make install'
# `make install` neglects to transfer an essential plugin header file.
Pathname.new(Dir[gcc_prefix.join *%w[** plugin include config]].first).install '../gcc/config/darwin-sections.def'
# Remove conflicting manpages in man7
man7.rmtree
end
end
end
|
New formula: GCC 4.6
* Version 4.6.4.
* Enable graphite loop optimization.
* Miscellaneous improvements.
require 'formula'
class Gcc46 < Formula
def arch
if Hardware::CPU.type == :intel
if MacOS.prefer_64_bit?
'x86_64'
else
'i686'
end
elsif Hardware::CPU.type == :ppc
if MacOS.prefer_64_bit?
'ppc64'
else
'ppc'
end
end
end
def osmajor
`uname -r`.chomp
end
homepage 'http://gcc.gnu.org'
url 'http://ftpmirror.gnu.org/gcc/gcc-4.6.4/gcc-4.6.4.tar.bz2'
mirror 'http://ftp.gnu.org/gnu/gcc/gcc-4.6.4/gcc-4.6.4.tar.bz2'
sha1 '63933a8a5cf725626585dbba993c8b0f6db1335d'
option 'enable-cxx', 'Build the g++ compiler'
option 'enable-fortran', 'Build the gfortran compiler'
option 'enable-java', 'Buld the gcj compiler'
option 'enable-objc', 'Enable Objective-C language support'
option 'enable-objcxx', 'Enable Objective-C++ language support'
option 'enable-all-languages', 'Enable all compilers and languages, except Ada'
option 'enable-nls', 'Build with native language support (localization)'
option 'enable-profiled-build', 'Make use of profile guided optimization when bootstrapping GCC'
option 'enable-multilib', 'Build with multilib support'
depends_on 'gmp4'
depends_on 'libmpc08'
depends_on 'mpfr2'
depends_on 'ppl011'
depends_on 'cloog-ppl015'
depends_on 'ecj' if build.include? 'enable-java' or build.include? 'enable-all-languages'
def install
# GCC will suffer build errors if forced to use a particular linker.
ENV.delete 'LD'
if build.include? 'enable-all-languages'
# Everything but Ada, which requires a pre-existing GCC Ada compiler
# (gnat) to bootstrap. GCC 4.6.0 add go as a language option, but it is
# currently only compilable on Linux.
languages = %w[c c++ fortran java objc obj-c++]
else
# The C compiler is always built, but additional defaults can be added
# here.
languages = %w[c]
languages << 'c++' if build.include? 'enable-cxx'
languages << 'fortran' if build.include? 'enable-fortran'
languages << 'java' if build.include? 'enable-java'
languages << 'objc' if build.include? 'enable-objc'
languages << 'obj-c++' if build.include? 'enable-objcxx'
end
# Sandbox the GCC lib, libexec and include directories so they don't wander
# around telling small children there is no Santa Claus. This results in a
# partially keg-only brew following suggestions outlined in the "How to
# install multiple versions of GCC" section of the GCC FAQ:
# http://gcc.gnu.org/faq.html#multiple
gcc_prefix = prefix + 'gcc'
args = [
"--build=#{arch}-apple-darwin#{osmajor}",
# Sandbox everything...
"--prefix=#{gcc_prefix}",
# ...except the stuff in share...
"--datarootdir=#{share}",
# ...and the binaries...
"--bindir=#{bin}",
# ...which are tagged with a suffix to distinguish them.
"--enable-languages=#{languages.join(',')}",
"--program-suffix=-#{version.to_s.slice(/\d\.\d/)}",
"--with-gmp=#{Formula.factory('gmp4').opt_prefix}",
"--with-mpfr=#{Formula.factory('mpfr2').opt_prefix}",
"--with-mpc=#{Formula.factory('libmpc08').opt_prefix}",
"--with-ppl=#{Formula.factory('ppl011').opt_prefix}",
"--with-cloog=#{Formula.factory('cloog-ppl015').opt_prefix}",
"--with-system-zlib",
"--enable-libstdcxx-time=yes",
"--enable-stage1-checking",
"--enable-checking=release",
"--enable-plugin",
"--enable-lto",
# a no-op unless --HEAD is built because in head warnings will raise errs.
"--disable-werror"
]
args << '--disable-nls' unless build.include? 'enable-nls'
if build.include? 'enable-java' or build.include? 'enable-all-languages'
args << "--with-ecj-jar=#{Formula.factory('ecj').opt_prefix}/share/java/ecj.jar"
end
if build.include? 'enable-multilib'
args << '--enable-multilib'
else
args << '--disable-multilib'
end
mkdir 'build' do
unless MacOS::CLT.installed?
# For Xcode-only systems, we need to tell the sysroot path.
# 'native-system-header's will be appended
args << "--with-native-system-header-dir=/usr/include"
args << "--with-sysroot=#{MacOS.sdk_path}"
end
system '../configure', *args
if build.include? 'enable-profiled-build'
# Takes longer to build, may bug out. Provided for those who want to
# optimise all the way to 11.
system 'make profiledbootstrap'
else
system 'make bootstrap'
end
# At this point `make check` could be invoked to run the testsuite. The
# deja-gnu and autogen formulae must be installed in order to do this.
system 'make install'
# Remove conflicting manpages in man7
man7.rmtree
end
end
end
|
class Sumo < Formula
desc "Simulation of Urban Mobility"
homepage "https://sourceforge.net/projects/sumo/"
url "https://downloads.sourceforge.net/project/sumo/sumo/version%200.25.0/sumo-all-0.25.0.tar.gz"
sha256 "e56552e4cd997ccab59b5c6828ca1e044e71e3ffe8c780831bf5aa18c5fdd18a"
revision 1
bottle do
cellar :any
sha256 "60e40e20adc9983d0ddf124b256ac5c1fd50ad7fccc1b27c703edc56d469231f" => :el_capitan
sha256 "1d68c7d7c1955bd548c2e02b70607798e644a073568069121d0d6946c927be60" => :yosemite
sha256 "ead5586821fcf6e32a1d8bbfe040b1f6349973f7963559fb0d44321f3ed84af1" => :mavericks
end
option "with-test", "Enable additional build-time checking"
deprecated_option "with-check" => "with-test"
depends_on :x11
depends_on "xerces-c"
depends_on "libpng"
depends_on "jpeg"
depends_on "libtiff"
depends_on "proj"
depends_on "gdal"
depends_on "fox"
depends_on :python
resource "gtest" do
url "https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/googletest/gtest-1.7.0.zip"
sha256 "247ca18dd83f53deb1328be17e4b1be31514cedfc1e3424f672bf11fd7e0d60d"
end
resource "TextTest" do
url "https://pypi.python.org/packages/source/T/TextTest/TextTest-3.28.2.zip"
sha256 "2343b59425da2f24e3f9bea896e212e4caa370786c0a71312a4d9bd90ce1033b"
end
def install
resource("gtest").stage do
system "./configure"
system "make"
buildpath.install "../gtest-1.7.0"
end
ENV["LDFLAGS"] = "-lpython" # My compilation fails without this flag, despite :python dependency.
ENV.append_to_cflags "-I#{buildpath}/gtest-1.7.0/include"
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--with-python",
"--with-gtest-config=gtest-1.7.0/scripts/gtest-config"
system "make", "install"
# Copy tools/ to cellar. These contain some Python modules that have no setup.py.
prefix.install "tools"
# Basic tests, they are fast, so execute them always.
system "unittest/src/sumo-unittest"
# Additional tests. These take more time, and some fail on my machine...
if build.with? "test"
ENV.prepend_create_path "PYTHONPATH", buildpath/"vendor/lib/python2.7/site-packages"
resource("TextTest").stage { Language::Python.setup_install "python", buildpath/"vendor" }
ENV.prepend_create_path "PATH", buildpath/"vendor/bin"
system "tests/runTests.sh", "-l", "-zen", "-b", "homebrew-compilation-check"
end
rm bin/"sumo-unittest"
end
def caveats; <<-EOS.undent
Some SUMO scripts require SUMO_HOME environmental variable:
export SUMO_HOME=#{prefix}
EOS
end
test do
# A simple hand-made test to see if sumo compiled and linked well.
(testpath/"hello.nod.xml").write <<-EOS.undent
<nodes>
<node id="1" x="-250.0" y="0.0" />
<node id="2" x="+250.0" y="0.0" />
<node id="3" x="+251.0" y="0.0" />
</nodes>
EOS
(testpath/"hello.edg.xml").write <<-EOS.undent
<edges>
<edge from="1" id="1to2" to="2" />
<edge from="2" id="out" to="3" />
</edges>
EOS
system "#{bin}/netconvert", "--node-files=#{testpath}/hello.nod.xml", "--edge-files=#{testpath}/hello.edg.xml", "--output-file=#{testpath}/hello.net.xml"
(testpath/"hello.rou.xml").write <<-EOS.undent
<routes>
<vType accel="1.0" decel="5.0" id="Car" length="2.0" maxSpeed="100.0" sigma="0.0" />
<route id="route0" edges="1to2 out"/>
<vehicle depart="1" id="veh0" route="route0" type="Car" />
</routes>
EOS
(testpath/"hello.sumocfg").write <<-EOS.undent
<configuration>
<input>
<net-file value="hello.net.xml"/>
<route-files value="hello.rou.xml"/>
</input>
<time>
<begin value="0"/>
<end value="10000"/>
</time>
</configuration>
EOS
system "#{bin}/sumo", "-c", "hello.sumocfg"
end
end
sumo: update 0.25.0_1 bottle.
class Sumo < Formula
desc "Simulation of Urban Mobility"
homepage "https://sourceforge.net/projects/sumo/"
url "https://downloads.sourceforge.net/project/sumo/sumo/version%200.25.0/sumo-all-0.25.0.tar.gz"
sha256 "e56552e4cd997ccab59b5c6828ca1e044e71e3ffe8c780831bf5aa18c5fdd18a"
revision 1
bottle do
cellar :any
sha256 "de307bfd548b7598b4e0b440ef162eccd5abf8e6dec0005429b81156c040ca80" => :sierra
sha256 "5e865ad63c4897d59d145bb41fd0bb234e93939568d675c3fee9161ced86751d" => :el_capitan
sha256 "9ee6366bf8ac1d71427b0988a4009f11188a78111e17a351b22b721372ac66d6" => :yosemite
end
option "with-test", "Enable additional build-time checking"
deprecated_option "with-check" => "with-test"
depends_on :x11
depends_on "xerces-c"
depends_on "libpng"
depends_on "jpeg"
depends_on "libtiff"
depends_on "proj"
depends_on "gdal"
depends_on "fox"
depends_on :python
resource "gtest" do
url "https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/googletest/gtest-1.7.0.zip"
sha256 "247ca18dd83f53deb1328be17e4b1be31514cedfc1e3424f672bf11fd7e0d60d"
end
resource "TextTest" do
url "https://pypi.python.org/packages/source/T/TextTest/TextTest-3.28.2.zip"
sha256 "2343b59425da2f24e3f9bea896e212e4caa370786c0a71312a4d9bd90ce1033b"
end
def install
resource("gtest").stage do
system "./configure"
system "make"
buildpath.install "../gtest-1.7.0"
end
ENV["LDFLAGS"] = "-lpython" # My compilation fails without this flag, despite :python dependency.
ENV.append_to_cflags "-I#{buildpath}/gtest-1.7.0/include"
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--with-python",
"--with-gtest-config=gtest-1.7.0/scripts/gtest-config"
system "make", "install"
# Copy tools/ to cellar. These contain some Python modules that have no setup.py.
prefix.install "tools"
# Basic tests, they are fast, so execute them always.
system "unittest/src/sumo-unittest"
# Additional tests. These take more time, and some fail on my machine...
if build.with? "test"
ENV.prepend_create_path "PYTHONPATH", buildpath/"vendor/lib/python2.7/site-packages"
resource("TextTest").stage { Language::Python.setup_install "python", buildpath/"vendor" }
ENV.prepend_create_path "PATH", buildpath/"vendor/bin"
system "tests/runTests.sh", "-l", "-zen", "-b", "homebrew-compilation-check"
end
rm bin/"sumo-unittest"
end
def caveats; <<-EOS.undent
Some SUMO scripts require SUMO_HOME environmental variable:
export SUMO_HOME=#{prefix}
EOS
end
test do
# A simple hand-made test to see if sumo compiled and linked well.
(testpath/"hello.nod.xml").write <<-EOS.undent
<nodes>
<node id="1" x="-250.0" y="0.0" />
<node id="2" x="+250.0" y="0.0" />
<node id="3" x="+251.0" y="0.0" />
</nodes>
EOS
(testpath/"hello.edg.xml").write <<-EOS.undent
<edges>
<edge from="1" id="1to2" to="2" />
<edge from="2" id="out" to="3" />
</edges>
EOS
system "#{bin}/netconvert", "--node-files=#{testpath}/hello.nod.xml", "--edge-files=#{testpath}/hello.edg.xml", "--output-file=#{testpath}/hello.net.xml"
(testpath/"hello.rou.xml").write <<-EOS.undent
<routes>
<vType accel="1.0" decel="5.0" id="Car" length="2.0" maxSpeed="100.0" sigma="0.0" />
<route id="route0" edges="1to2 out"/>
<vehicle depart="1" id="veh0" route="route0" type="Car" />
</routes>
EOS
(testpath/"hello.sumocfg").write <<-EOS.undent
<configuration>
<input>
<net-file value="hello.net.xml"/>
<route-files value="hello.rou.xml"/>
</input>
<time>
<begin value="0"/>
<end value="10000"/>
</time>
</configuration>
EOS
system "#{bin}/sumo", "-c", "hello.sumocfg"
end
end
|
#!/usr/bin/env ruby
begin
require 'Win32/Console/ANSI' if RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/
%w(socket thread slop timeout).each { |r| require r }
rescue LoadError
abort "#{$0} requires slop and, if you're on Windows, win32console\nPlease run 'gem install slop win32console'"
end
# Why isn't this enabled by default?
Thread.abort_on_exception = true
# Put standard output into syncronised mode
$stdout.sync = true
# Version values
$ver_maj, $ver_min, $ver_rev = 2, 0, 0
$ver_str = "#{$ver_maj}.#{$ver_min}.#{$ver_rev}"
config = {
"out-dir" => './',
"skip-existing" => false,
"servers" => {} }
def puts_error msg
puts "! \e[31mERROR\e[0m: #{msg}"
end
def puts_abort msg
abort "! \e[31mERROR\e[0m: #{msg}"
end
def puts_warning msg
puts "! \e[33mWARNING:\e[0m: #{msg}"
end
# Extend IO to readlines without blocking
class IO
def gets_nonblock
@rlnb_buffer ||= ""
ch = nil
while ch = self.read_nonblock(1)
@rlnb_buffer += ch
if ch == "\n" then
res = @rlnb_buffer
@rlnb_buffer = ""
return res
end
end
end
end
# Extend Array to get averages
class Array
def average
inject(:+) / count
end
end
# Class to hold XDCC requests
class XDCC_REQ
attr_accessor :serv, :chan, :bot, :pack, :info
def initialize serv, chan, bot, pack, info = "*"
@serv = serv
@chan = chan
@bot = bot
@pack = pack
@info = info
end
def eql? other
self.serv == other.serv and self.chan == other.chan and self.bot == other.bot and self.pack == other.pack
end
def to_s
"[ #{self.serv}, #{self.chan}, #{self.bot}, #{self.pack}, #{self.info} ]"
end
end
# Class to hold DCC SEND info for when waiting for DCC ACCEPT
class XDCC_SEND
attr_accessor :fname, :fsize, :ip, :port
def initialize fname, fsize, ip, port
@fname = fname
@fsize = fsize
@ip = ip
@port = port
end
def to_s
"[ #{self.fname}, #{self.fsize}, #{self.ip}, #{self.port} ]"
end
end
# Class to emit events
module Emitter
def callbacks
@callbacks ||= Hash.new { |h, k| h[k] = [] }
end
def on type, &block
callbacks[type] << block
self
end
def emit type, *args
callbacks[type].each do |block|
block.call(*args)
end
end
end
# Class to handle IRC stream and emit events
class Stream
include Emitter
attr_accessor :io, :buf
def initialize serv
@buf = []
timeout(5) { @io = TCPSocket.new serv, 6667 }
rescue SocketError => e
puts_abort "Failed to connect to #{serv}! #{e.message}"
rescue Timeout::Error
puts_abort "Connection to #{serv} timed out!"
end
def disconnect
@io.puts 'QUIT'
end
def << data
@buf << data
end
def write
@buf.each do |x|
@io.puts x
emit :WROTE, x
end
@buf = []
rescue EOFError, Errno::ECONNRESET
emit :CLOSED
end
def read
read = @io.gets_nonblock
emit :READ, read
rescue IO::WaitReadable
emit :WAITING
rescue EOFError, Errno::ECONNRESET
emit :CLOSED
end
end
# Class to handle IRC stream
class Bot
attr_reader :stream
def initialize stream
@stream = stream
stream.on :CLOSED do stop; end
end
def start
@running = true
tick while @running
end
def stop
@running = false
end
def tick
stream.read
stream.write
end
end
# Get relative size from bytes
def bytes_to_closest bytes
fsize_arr = [ 'B', 'KB', 'MB', 'GB', 'TB' ]
exp = (Math.log(bytes) / Math.log(1024)).to_i
exp = fsize_arr.length if exp > fsize_arr.length
bytes /= 1024.0 ** exp
return "#{bytes.round(2)}#{fsize_arr[exp]}"
end
# Loop until there is no file with the same name
def safe_fname fname
return fname unless File.exists? fname
ext = File.extname fname
base = File.basename fname, ext
dir = File.dirname fname
cur = 2
while true
test = "#{dir}/#{base} (#{cur})#{ext}"
return test unless File.exists? test
cur += 1
end
end
# Get a close relative time remaining, in words
def time_distance t
if t < 60
case t
when 0 then "- nevermind, done!"
when 1..4 then "in a moment!"
when 5..9 then "less than 10 seconds"
when 10..19 then "less than 20 seconds"
when 20..39 then "half a minute"
else "less than a minute"
end
else # Use minutes, to aovid big numbers
t = t / 60.0
case t.to_i
when 1 then "about a minute"
when 2..45 then "#{t.round} minutes"
when 45..90 then "about an hour"
when 91..1440 then "about #{(t / 60.0).round} hours"
when 1441..2520 then "about a day"
when 2521..86400 then "about #{(t / 1440.0).round} days"
else "about #{(t/ 43200.0).round} months"
end
end
end
# Get elapsed time in words
def time_elapsed t
return "instantly!" if t <= 0
# Get the GMTime from seconds and split
ta = Time.at(t).gmtime.strftime('%S|%M|%H|%-d|%-m|%Y').split('|', 6).collect { |i| i.to_i }
ta[-1] -= 1970 # fuck the police
ta[-2] -= 1 # fuck, fuck
ta[-3] -= 1 # fuck the police
# Remove the 0 digets
i = 0
ta.reverse.each do |x|
break if x != 0
i += 1
end
# Unit suffixes
suffix = [ "seconds", "minutes", "hours", "days", "months", "years" ];
# Don't use plural if x is 1
plural = ->(x, y) { x == 1 ? y[0..-2] : y }
# Format string to "value unit"
format_str = ->(x) { "#{ta[x]} #{plural[ta[x], suffix[x]]}, " }
# Form the string
ta = ta.take(ta.length - i)
str = ""
(ta.length - 1).downto(0) { |x| str += format_str[x] }
"in #{str[0..-3]}"
end
# DCC download handler
def dcc_download ip, port, fname, fsize, read = 0
sock = nil
begin
timeout(5) { sock = TCPSocket.new ip, port }
rescue Timeout::Error
puts_abort "Connection to #{ip} timed out!"
end
puts_abort "Failed to connect to \"#{ip}:#{port}\": #{e}" if sock.nil?
fsize_clean = bytes_to_closest fsize
avgs, last_check, start_time = [], Time.now - 2, Time.now
fh = File.open fname, (read == 0 ? "w" : "a") # Write or append
# Form the status bar
print_bar = ->() {
print "\r\e[0K> [ \e[1;37m"
pc = read.to_f / fsize.to_f * 100.0
bars = (pc / 10).to_i
bars.times { print "#" }
(10 - bars).times { print " " }
avg = avgs.average * 1024.0
time_rem = time_distance ((fsize - read) / avg) * 8.0
print "\e[0m ] #{pc.round(2)}% #{bytes_to_closest read}/#{fsize_clean} \e[1;37m@\e[0m #{bytes_to_closest avg}/s \e[1;37min\e[0m #{time_rem}"
last_check = Time.now
avgs.clear
}
while buf = sock.readpartial(8192)
read += buf.bytesize
avgs << buf.bytesize
print_bar[] if (Time.now - last_check) > 1 and not avgs.empty?
begin
sock.write_nonblock [read].pack('N')
rescue Errno::EWOULDBLOCK
rescue Errno::EAGAIN => e
puts_error "#{File.basename fname} timed out! #{e}"
end
fh << buf
break if read >= fsize
end
print_bar.call unless avgs.empty?
elapsed_time = time_elapsed (Time.now - start_time).to_i
sock.close
fh.close
puts "\n! \e[1;32mSUCCESS\e[0m: downloaded #{File.basename fname} #{elapsed_time}"
rescue EOFError, SocketError => e
puts "\n! ERROR: #{File.basename fname} failed to download! #{e}"
end
if __FILE__ == $0 then
opts = Slop.parse! do
banner " Usage: #{$0} [options] [value] [links] [--files] [file1:file2:file3]\n"
on :help, :ignore_case => true
on 'v', 'version', 'Print version' do
puts "#{$0}: v#{$ver_str}"
exit
end
on 'config=', 'Config file location'
on 'user=', 'IRC \'USER\' for Ident'
on 'nick=', 'IRC nick'
on 'pass=', 'IRC \'PASS\' for Ident'
on 'realname=', 'Realname for \'USER\' Ident'
on 'nickserv=', 'Password for Nickserv'
on 'files=', 'Pass list of files to parse for links', as: Array, delimiter: ':'
on 'out-dir=', 'Output directory to save fiels to', :default => "./"
on 'skip-existing', 'Don\' download files that already exist', :default => false
end
if opts.help?
puts opts
puts "\n Examples"
puts " \txget.rb --config config.conf --nick test"
puts " \txget.rb --files test1.txt:test2.txt:test3.txt"
puts " \txget.rb irc.rizon.net/#news/ginpachi-sensei/1"
puts " \txget.rb irc.rizon.net/#news/ginpachi-sensei/41..46"
exit
end
# Get the config location
config_loc = opts["config"]
if config_loc.nil? or not File.exists? config_loc
config_loc = File.expand_path "~/.xget.conf"
config_loc = ".xget.conf" unless File.exists? config_loc
end
# Insert config settings from arguments into config hash
cur_block = "*"
config["servers"][cur_block] = {}
%w(user nick pass real nserv).each do |x|
config["servers"][cur_block][x.to_sym] = opts[x] unless opts[x].nil?
end
# Check if specified output directory actually exists
puts_abort "Out directory, \"#{opts["out-dir"]}\" doesn't exist!" unless Dir.exists? opts["out-dir"]
config["out-dir"] = opts["out-dir"].dup
config["out-dir"] << "/" unless config["out-dir"][-1] == "/"
# Parse config
config_copies = {}
File.open(config_loc, "r").each_line do |line|
next if line.length <= 1 or line[0] == '#'
if line =~ /^\[(\S+)\]$/ # Check if header
cur_block = $1
if cur_block.include? ',' # Check if header contains more than one server
tmp_split = cur_block.split(",")
next unless tmp_split[0] =~ /^(\w+?).(\w+?).(\w+?)$/
config_copies[tmp_split[0]] = []
tmp_split.each do |x| # Add all copies to copies hash
next if x == tmp_split[0] or not x =~ /^(\w+?).(\w+?).(\w+?)$/
config_copies[tmp_split[0]].push x unless config_copies[tmp_split[0]].include? x
end
cur_block = tmp_split[0]
end
# Set current block to the new header
config["servers"][cur_block] = {} unless config["servers"].has_key? cur_block
elsif line =~ /^(\S+)=(.*+?)$/
# Check if current line is specifying out directory
case $1
when "out-dir"
t_out_dir = File.expand_path $2
puts_abort "Out directory, \"#{t_out_dir}\" doesn't exist!" unless Dir.exists? t_out_dir
config[$1] = t_out_dir
config[$1] << "/" unless config[$1][-1] == "/"
next
when "skip-existing" then config[$1] = ($2 == "true")
else
# Add value to current header, default is *
t_sym = $1.downcase.to_sym
config["servers"][cur_block][t_sym] = $2 unless config["servers"][cur_block].has_key? t_sym
end
end
end
# Go through each and make copies of the original
unless config_copies.empty?
config_copies.each do |k,v|
v.each { |x| config["servers"][x] = config["servers"][k] }
end
end
# Take remaining arguments and all lines from --files arg and put into array
to_check = ($*)
if opts['files'] != nil and not opts['files'].empty?
opts['files'].each do |x|
File.open(x, "r").each_line { |y| to_check << y.chomp } if File.exists? x
end
end
if to_check.empty?
puts opts
abort "\n No jobs, nothing to do!"
end
# Parse to_check array for valid XDCC links, irc.serv.org/#chan/bot/pack
tmp_requests, tmp_range = [], []
to_check.each do |x|
if x =~ /^(\w+?).(\w+?).(\w+?)\/#(\S+)\/(\S+)\/(\d+)(..\d+)?$/
serv = [$1, $2, $3].join(".")
info = (config["servers"].has_key?(serv) ? serv : "*")
chan = "##{$4}"
bot = $5
pack = $6.to_i
unless $7.nil?
to_range = $7[2..-1].to_i # Clip off the ".."
if pack > to_range or pack == to_range
puts_error "Invalid range #{pack} to #{to_range} in \"#{x}\""
next
end
tmp_range =* (pack + 1)..to_range
end
tmp_requests.push XDCC_REQ.new serv, chan, bot, pack, info
# Convert range array to new requests
unless tmp_range.empty?
rmp_range.each { |y| tmp_requests.push XDCC_REQ.new serv, chan, bot, y, info }
tmp_range.clear
end
else
puts_abort "#{x} is not a valid XDCC address\n XDCC Address format: irc.serv.com/#chan/bot/pack"
end
end
# Remove duplicate entries from requests
i = j = 0
to_pop = []
tmp_requests.each do |x|
tmp_requests.each do |y|
to_pop << j if x.eql? y if i != j
j += 1
end
i += 1
end
to_pop.each { |x| tmp_requests.delete_at(x) }
# Sort requests array to hash, serv {} -> chan {} -> requests []
requests = {}
tmp_requests.each do |x|
requests[x.serv] = [] unless requests.has_key? x.serv
requests[x.serv] << x
end
if requests.empty?
puts opts
abort "\n No jobs, nothing to do!"
end
# Sort requests by pack
requests.each do |k,v|
puts "#{k} \e[1;37m->\e[0m"
v = v.sort_by { |x| [x.chan, x.pack] }.each { |x| puts "\t#{x}" }
end
puts
# H-h-here we g-go...
requests.each do |k, v|
req, info = v[0], config["servers"][v[0].info]
last_chan, cur_req, motd = "", -1, false
nick_sent, nick_check, nick_valid = false, false, false
xdcc_sent, xdcc_accepted = false, false
xdcc_accept_time, xdcc_ret, req_send_time = nil, nil, nil
stream = Stream.new req.serv
bot = Bot.new stream
stream << "NICK #{info[:nick]}"
stream << "USER #{info[:user]} 0 * #{info[:real]}"
stream << "PASS #{info[:pass]}" unless info[:pass].nil?
# Handle read data
stream.on :READ do |data|
/^(?:[:](?<prefix>\S+) )?(?<type>\S+)(?: (?!:)(?<dest>.+?))?(?: [:](?<msg>.+))?$/ =~ data
#puts "\e[1;37m>>\e[0m #{prefix} | #{type} | #{dest} | #{msg}"
case type
when 'NOTICE'
if dest == 'AUTH'
if msg =~ /erroneous nickname/i
puts_error 'Login failed'
stream.disconnect
end
puts "> \e[1;32m#{msg}\e[0m"
else
if prefix =~ /^NickServ!/
if not nick_sent and info[:nserv] != nil
stream << "PRIVMSG NickServ :IDENTIFY #{info[:nserv]}"
nick_sent = true
elsif nick_sent and not nick_check
case msg
when /password incorrect/i
nick_valid = false
nick_check = true
when /password accepted/i
nick_valid = true
nick_check = true
end
end
puts "> \e[1;33m#{msg}\e[0m"
elsif prefix =~ /^#{Regexp.escape req.bot}!(.*)$/i
case msg
when /already requested that pack/i, /closing connection/i, /you have a dcc pending/i, /you can only have (\d+?) transfer at a time/i
puts_error msg
stream << "PRIVMSG #{req.bot} :XDCC CANCEL"
stream << 'QUIT'
else
puts "! #{prefix}: #{msg}"
end
end
end
when 'PRIVMSG'
if xdcc_sent and not xdcc_accepted and prefix =~ /#{Regexp.escape req.bot}!(.*)$/i
/^\001DCC SEND (?<fname>((".*?").*?|(\S+))) (?<ip>\d+) (?<port>\d+) (?<fsize>\d+)\001\015$/ =~ msg
unless $~.nil?
req_send_time = nil
tmp_fname = fname
fname = $1 if tmp_fname =~ /^"(.*)"$/
puts "Preparing to download: \e[36m#{fname}\e[0m"
fname = (config["out-dir"].dup << fname)
xdcc_ret = XDCC_SEND.new fname, fsize.to_i, [ip.to_i].pack('N').unpack('C4') * '.', port.to_i
# Check if the for unfinished download amd try to resume
if File.exists? xdcc_ret.fname and File.stat(xdcc_ret.fname).size < xdcc_ret.fsize
stream << "PRIVMSG #{req.bot} :\001DCC RESUME #{tmp_fname} #{xdcc_ret.port} #{File.stat(xdcc_ret.fname).size}\001"
xdcc_accepted = true
print "! Incomplete file detected. Attempting to resume..."
next # Skip and wait for "DCC ACCEPT"
elsif File.exists? xdcc_ret.fname
if config["skip-existing"]
puts_warning "File already exists, skipping..."
stream << "PRIVMSG #{req.bot} :XDCC CANCEL"
xdcc_sent, xdcc_accepted = false, false
xdcc_accept_time, xdcc_ret = nil, nil
next
else
puts_warnings "File already existing, using a safe name..."
xdcc_ret.fname = safe_fname xdcc_ret.fname
end
end
# It's a new download, start from beginning
Thread.new do
pid = fork do
puts "Connecting to: #{req.bot} @ #{xdcc_ret.ip}:#{xdcc_ret.port}"
dcc_download xdcc_ret.ip, xdcc_ret.port, xdcc_ret.fname, xdcc_ret.fsize
end
Process.wait pid
xdcc_sent, xdcc_accepted = false, false
xdcc_accept_time, xdcc_ret = nil, nil
end
end
elsif xdcc_accepted and xdcc_ret != nil and msg =~ /^\001DCC ACCEPT ((".*?").*?|(\S+)) (\d+) (\d+)\001\015$/
# DCC RESUME request accepted, continue the download!
xdcc_accept_time = nil
xdcc_accepted = false
puts "\e[1;32mSUCCESS\e[0m!"
Thread.new do
pid = fork do
puts "Connecting to: #{req.bot} @ #{xdcc_ret.ip}:#{xdcc_ret.port}"
dcc_download xdcc_ret.ip, xdcc_ret.port, xdcc_ret.fname, xdcc_ret.fsize, File.stat(xdcc_ret.fname).size
end
Process.wait pid
xdcc_sent, xdcc_accepted = false, false
xdcc_accept_time, xdcc_ret = nil, nil
end
end
when /^\d+?$/
type_i = type.to_i
case type_i
when 1 # Print welcome message, because it's nice
msg.sub!(/#{Regexp.escape info[:nick]}/, "\e[34m#{info[:nick]}\e[0m")
puts "! #{msg}"
when 400..533 # Handle errors, except 439
next if type_i == 439 # Skip 439
puts_error "#{msg}"
stream.disconnect
when 376 then motd = true # Mark the end of the MOTD
end
when 'PING' then stream << "PONG :#{msg}"
when 'ERROR' then (msg =~ /closing link/i ? puts(msg) : puts_error(msg))
end
end
# Handle things while waiting for data
stream.on :WAITING do
unless xdcc_accepted
if motd and not xdcc_sent
cur_req += 1
if cur_req >= v.length
stream.disconnect
next
end
req = v[cur_req]
if req.chan != last_chan
stream << "PART #{last_chan}" unless last_chan == ""
last_chan = req.chan
stream << "JOIN #{req.chan}"
end
sleep 1 unless cur_req == 0 # Cooldown between downloads
stream << "PRIVMSG #{req.bot} :XDCC SEND #{req.pack}"
req_send_time = Time.now
xdcc_sent = true
end
# Wait 3 seconds for DCC SEND response, if there isn't one, abort
if xdcc_sent and not req_send_time.nil? and not xdcc_accepted
if (Time.now - req_send_time).floor > 3
puts_error "#{req.bot} took too long to respond, are you sure it's a bot?"
stream.disconnect
bot.stop
end
end
# Wait 3 seconds for a DCC ACCEPT response, if there isn't one, don't resume
if xdcc_sent and xdcc_accepted and not xdcc_accept_time.nil?
if (Time.now - xdcc_accept_time).floor > 3
puts "FAILED! Bot client doesn't support resume!"
puts "Connecting to: #{req.bot} @ #{xdcc_ret.ip}:#{xdcc_ret.port}"
dcc_download xdcc_ret.ip, xdcc_ret.port, xdcc_ret.fname, xdcc_ret.fsize
end
end
end
end
# Print sent data, for debugging only really
stream.on :WROTE do |data|
#puts "\e[1;37m<<\e[0m #{data}"
end
# Start the bot
bot.start
end
end
Added license to xget.rb
#!/usr/bin/env ruby
# xget.rb - xget
# Created by Rusty Shackleford on 2013/05/19
# Copyright (c) 2013, Rusty Shackleford
# All rights reserved.
begin
require 'Win32/Console/ANSI' if RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/
%w(socket thread slop timeout).each { |r| require r }
rescue LoadError
abort "#{$0} requires slop and, if you're on Windows, win32console\nPlease run 'gem install slop win32console'"
end
# Why isn't this enabled by default?
Thread.abort_on_exception = true
# Put standard output into syncronised mode
$stdout.sync = true
# Version values
$ver_maj, $ver_min, $ver_rev = 2, 0, 0
$ver_str = "#{$ver_maj}.#{$ver_min}.#{$ver_rev}"
config = {
"out-dir" => './',
"skip-existing" => false,
"servers" => {} }
def puts_error msg
puts "! \e[31mERROR\e[0m: #{msg}"
end
def puts_abort msg
abort "! \e[31mERROR\e[0m: #{msg}"
end
def puts_warning msg
puts "! \e[33mWARNING:\e[0m: #{msg}"
end
# Extend IO to readlines without blocking
class IO
def gets_nonblock
@rlnb_buffer ||= ""
ch = nil
while ch = self.read_nonblock(1)
@rlnb_buffer += ch
if ch == "\n" then
res = @rlnb_buffer
@rlnb_buffer = ""
return res
end
end
end
end
# Extend Array to get averages
class Array
def average
inject(:+) / count
end
end
# Class to hold XDCC requests
class XDCC_REQ
attr_accessor :serv, :chan, :bot, :pack, :info
def initialize serv, chan, bot, pack, info = "*"
@serv = serv
@chan = chan
@bot = bot
@pack = pack
@info = info
end
def eql? other
self.serv == other.serv and self.chan == other.chan and self.bot == other.bot and self.pack == other.pack
end
def to_s
"[ #{self.serv}, #{self.chan}, #{self.bot}, #{self.pack}, #{self.info} ]"
end
end
# Class to hold DCC SEND info for when waiting for DCC ACCEPT
class XDCC_SEND
attr_accessor :fname, :fsize, :ip, :port
def initialize fname, fsize, ip, port
@fname = fname
@fsize = fsize
@ip = ip
@port = port
end
def to_s
"[ #{self.fname}, #{self.fsize}, #{self.ip}, #{self.port} ]"
end
end
# Class to emit events
module Emitter
def callbacks
@callbacks ||= Hash.new { |h, k| h[k] = [] }
end
def on type, &block
callbacks[type] << block
self
end
def emit type, *args
callbacks[type].each do |block|
block.call(*args)
end
end
end
# Class to handle IRC stream and emit events
class Stream
include Emitter
attr_accessor :io, :buf
def initialize serv
@buf = []
timeout(5) { @io = TCPSocket.new serv, 6667 }
rescue SocketError => e
puts_abort "Failed to connect to #{serv}! #{e.message}"
rescue Timeout::Error
puts_abort "Connection to #{serv} timed out!"
end
def disconnect
@io.puts 'QUIT'
end
def << data
@buf << data
end
def write
@buf.each do |x|
@io.puts x
emit :WROTE, x
end
@buf = []
rescue EOFError, Errno::ECONNRESET
emit :CLOSED
end
def read
read = @io.gets_nonblock
emit :READ, read
rescue IO::WaitReadable
emit :WAITING
rescue EOFError, Errno::ECONNRESET
emit :CLOSED
end
end
# Class to handle IRC stream
class Bot
attr_reader :stream
def initialize stream
@stream = stream
stream.on :CLOSED do stop; end
end
def start
@running = true
tick while @running
end
def stop
@running = false
end
def tick
stream.read
stream.write
end
end
# Get relative size from bytes
def bytes_to_closest bytes
fsize_arr = [ 'B', 'KB', 'MB', 'GB', 'TB' ]
exp = (Math.log(bytes) / Math.log(1024)).to_i
exp = fsize_arr.length if exp > fsize_arr.length
bytes /= 1024.0 ** exp
return "#{bytes.round(2)}#{fsize_arr[exp]}"
end
# Loop until there is no file with the same name
def safe_fname fname
return fname unless File.exists? fname
ext = File.extname fname
base = File.basename fname, ext
dir = File.dirname fname
cur = 2
while true
test = "#{dir}/#{base} (#{cur})#{ext}"
return test unless File.exists? test
cur += 1
end
end
# Get a close relative time remaining, in words
def time_distance t
if t < 60
case t
when 0 then "- nevermind, done!"
when 1..4 then "in a moment!"
when 5..9 then "less than 10 seconds"
when 10..19 then "less than 20 seconds"
when 20..39 then "half a minute"
else "less than a minute"
end
else # Use minutes, to aovid big numbers
t = t / 60.0
case t.to_i
when 1 then "about a minute"
when 2..45 then "#{t.round} minutes"
when 45..90 then "about an hour"
when 91..1440 then "about #{(t / 60.0).round} hours"
when 1441..2520 then "about a day"
when 2521..86400 then "about #{(t / 1440.0).round} days"
else "about #{(t/ 43200.0).round} months"
end
end
end
# Get elapsed time in words
def time_elapsed t
return "instantly!" if t <= 0
# Get the GMTime from seconds and split
ta = Time.at(t).gmtime.strftime('%S|%M|%H|%-d|%-m|%Y').split('|', 6).collect { |i| i.to_i }
ta[-1] -= 1970 # fuck the police
ta[-2] -= 1 # fuck, fuck
ta[-3] -= 1 # fuck the police
# Remove the 0 digets
i = 0
ta.reverse.each do |x|
break if x != 0
i += 1
end
# Unit suffixes
suffix = [ "seconds", "minutes", "hours", "days", "months", "years" ];
# Don't use plural if x is 1
plural = ->(x, y) { x == 1 ? y[0..-2] : y }
# Format string to "value unit"
format_str = ->(x) { "#{ta[x]} #{plural[ta[x], suffix[x]]}, " }
# Form the string
ta = ta.take(ta.length - i)
str = ""
(ta.length - 1).downto(0) { |x| str += format_str[x] }
"in #{str[0..-3]}"
end
# DCC download handler
def dcc_download ip, port, fname, fsize, read = 0
sock = nil
begin
timeout(5) { sock = TCPSocket.new ip, port }
rescue Timeout::Error
puts_abort "Connection to #{ip} timed out!"
end
puts_abort "Failed to connect to \"#{ip}:#{port}\": #{e}" if sock.nil?
fsize_clean = bytes_to_closest fsize
avgs, last_check, start_time = [], Time.now - 2, Time.now
fh = File.open fname, (read == 0 ? "w" : "a") # Write or append
# Form the status bar
print_bar = ->() {
print "\r\e[0K> [ \e[1;37m"
pc = read.to_f / fsize.to_f * 100.0
bars = (pc / 10).to_i
bars.times { print "#" }
(10 - bars).times { print " " }
avg = avgs.average * 1024.0
time_rem = time_distance ((fsize - read) / avg) * 8.0
print "\e[0m ] #{pc.round(2)}% #{bytes_to_closest read}/#{fsize_clean} \e[1;37m@\e[0m #{bytes_to_closest avg}/s \e[1;37min\e[0m #{time_rem}"
last_check = Time.now
avgs.clear
}
while buf = sock.readpartial(8192)
read += buf.bytesize
avgs << buf.bytesize
print_bar[] if (Time.now - last_check) > 1 and not avgs.empty?
begin
sock.write_nonblock [read].pack('N')
rescue Errno::EWOULDBLOCK
rescue Errno::EAGAIN => e
puts_error "#{File.basename fname} timed out! #{e}"
end
fh << buf
break if read >= fsize
end
print_bar.call unless avgs.empty?
elapsed_time = time_elapsed (Time.now - start_time).to_i
sock.close
fh.close
puts "\n! \e[1;32mSUCCESS\e[0m: downloaded #{File.basename fname} #{elapsed_time}"
rescue EOFError, SocketError => e
puts "\n! ERROR: #{File.basename fname} failed to download! #{e}"
end
if __FILE__ == $0 then
opts = Slop.parse! do
banner " Usage: #{$0} [options] [value] [links] [--files] [file1:file2:file3]\n"
on :help, :ignore_case => true
on 'v', 'version', 'Print version' do
puts "#{$0}: v#{$ver_str}"
exit
end
on 'config=', 'Config file location'
on 'user=', 'IRC \'USER\' for Ident'
on 'nick=', 'IRC nick'
on 'pass=', 'IRC \'PASS\' for Ident'
on 'realname=', 'Realname for \'USER\' Ident'
on 'nickserv=', 'Password for Nickserv'
on 'files=', 'Pass list of files to parse for links', as: Array, delimiter: ':'
on 'out-dir=', 'Output directory to save fiels to', :default => "./"
on 'skip-existing', 'Don\' download files that already exist', :default => false
end
if opts.help?
puts opts
puts "\n Examples"
puts " \txget.rb --config config.conf --nick test"
puts " \txget.rb --files test1.txt:test2.txt:test3.txt"
puts " \txget.rb irc.rizon.net/#news/ginpachi-sensei/1"
puts " \txget.rb irc.rizon.net/#news/ginpachi-sensei/41..46"
exit
end
# Get the config location
config_loc = opts["config"]
if config_loc.nil? or not File.exists? config_loc
config_loc = File.expand_path "~/.xget.conf"
config_loc = ".xget.conf" unless File.exists? config_loc
end
# Insert config settings from arguments into config hash
cur_block = "*"
config["servers"][cur_block] = {}
%w(user nick pass real nserv).each do |x|
config["servers"][cur_block][x.to_sym] = opts[x] unless opts[x].nil?
end
# Check if specified output directory actually exists
puts_abort "Out directory, \"#{opts["out-dir"]}\" doesn't exist!" unless Dir.exists? opts["out-dir"]
config["out-dir"] = opts["out-dir"].dup
config["out-dir"] << "/" unless config["out-dir"][-1] == "/"
# Parse config
config_copies = {}
File.open(config_loc, "r").each_line do |line|
next if line.length <= 1 or line[0] == '#'
if line =~ /^\[(\S+)\]$/ # Check if header
cur_block = $1
if cur_block.include? ',' # Check if header contains more than one server
tmp_split = cur_block.split(",")
next unless tmp_split[0] =~ /^(\w+?).(\w+?).(\w+?)$/
config_copies[tmp_split[0]] = []
tmp_split.each do |x| # Add all copies to copies hash
next if x == tmp_split[0] or not x =~ /^(\w+?).(\w+?).(\w+?)$/
config_copies[tmp_split[0]].push x unless config_copies[tmp_split[0]].include? x
end
cur_block = tmp_split[0]
end
# Set current block to the new header
config["servers"][cur_block] = {} unless config["servers"].has_key? cur_block
elsif line =~ /^(\S+)=(.*+?)$/
# Check if current line is specifying out directory
case $1
when "out-dir"
t_out_dir = File.expand_path $2
puts_abort "Out directory, \"#{t_out_dir}\" doesn't exist!" unless Dir.exists? t_out_dir
config[$1] = t_out_dir
config[$1] << "/" unless config[$1][-1] == "/"
next
when "skip-existing" then config[$1] = ($2 == "true")
else
# Add value to current header, default is *
t_sym = $1.downcase.to_sym
config["servers"][cur_block][t_sym] = $2 unless config["servers"][cur_block].has_key? t_sym
end
end
end
# Go through each and make copies of the original
unless config_copies.empty?
config_copies.each do |k,v|
v.each { |x| config["servers"][x] = config["servers"][k] }
end
end
# Take remaining arguments and all lines from --files arg and put into array
to_check = ($*)
if opts['files'] != nil and not opts['files'].empty?
opts['files'].each do |x|
File.open(x, "r").each_line { |y| to_check << y.chomp } if File.exists? x
end
end
if to_check.empty?
puts opts
abort "\n No jobs, nothing to do!"
end
# Parse to_check array for valid XDCC links, irc.serv.org/#chan/bot/pack
tmp_requests, tmp_range = [], []
to_check.each do |x|
if x =~ /^(\w+?).(\w+?).(\w+?)\/#(\S+)\/(\S+)\/(\d+)(..\d+)?$/
serv = [$1, $2, $3].join(".")
info = (config["servers"].has_key?(serv) ? serv : "*")
chan = "##{$4}"
bot = $5
pack = $6.to_i
unless $7.nil?
to_range = $7[2..-1].to_i # Clip off the ".."
if pack > to_range or pack == to_range
puts_error "Invalid range #{pack} to #{to_range} in \"#{x}\""
next
end
tmp_range =* (pack + 1)..to_range
end
tmp_requests.push XDCC_REQ.new serv, chan, bot, pack, info
# Convert range array to new requests
unless tmp_range.empty?
rmp_range.each { |y| tmp_requests.push XDCC_REQ.new serv, chan, bot, y, info }
tmp_range.clear
end
else
puts_abort "#{x} is not a valid XDCC address\n XDCC Address format: irc.serv.com/#chan/bot/pack"
end
end
# Remove duplicate entries from requests
i = j = 0
to_pop = []
tmp_requests.each do |x|
tmp_requests.each do |y|
to_pop << j if x.eql? y if i != j
j += 1
end
i += 1
end
to_pop.each { |x| tmp_requests.delete_at(x) }
# Sort requests array to hash, serv {} -> chan {} -> requests []
requests = {}
tmp_requests.each do |x|
requests[x.serv] = [] unless requests.has_key? x.serv
requests[x.serv] << x
end
if requests.empty?
puts opts
abort "\n No jobs, nothing to do!"
end
# Sort requests by pack
requests.each do |k,v|
puts "#{k} \e[1;37m->\e[0m"
v = v.sort_by { |x| [x.chan, x.pack] }.each { |x| puts "\t#{x}" }
end
puts
# H-h-here we g-go...
requests.each do |k, v|
req, info = v[0], config["servers"][v[0].info]
last_chan, cur_req, motd = "", -1, false
nick_sent, nick_check, nick_valid = false, false, false
xdcc_sent, xdcc_accepted = false, false
xdcc_accept_time, xdcc_ret, req_send_time = nil, nil, nil
stream = Stream.new req.serv
bot = Bot.new stream
stream << "NICK #{info[:nick]}"
stream << "USER #{info[:user]} 0 * #{info[:real]}"
stream << "PASS #{info[:pass]}" unless info[:pass].nil?
# Handle read data
stream.on :READ do |data|
/^(?:[:](?<prefix>\S+) )?(?<type>\S+)(?: (?!:)(?<dest>.+?))?(?: [:](?<msg>.+))?$/ =~ data
#puts "\e[1;37m>>\e[0m #{prefix} | #{type} | #{dest} | #{msg}"
case type
when 'NOTICE'
if dest == 'AUTH'
if msg =~ /erroneous nickname/i
puts_error 'Login failed'
stream.disconnect
end
puts "> \e[1;32m#{msg}\e[0m"
else
if prefix =~ /^NickServ!/
if not nick_sent and info[:nserv] != nil
stream << "PRIVMSG NickServ :IDENTIFY #{info[:nserv]}"
nick_sent = true
elsif nick_sent and not nick_check
case msg
when /password incorrect/i
nick_valid = false
nick_check = true
when /password accepted/i
nick_valid = true
nick_check = true
end
end
puts "> \e[1;33m#{msg}\e[0m"
elsif prefix =~ /^#{Regexp.escape req.bot}!(.*)$/i
case msg
when /already requested that pack/i, /closing connection/i, /you have a dcc pending/i, /you can only have (\d+?) transfer at a time/i
puts_error msg
stream << "PRIVMSG #{req.bot} :XDCC CANCEL"
stream << 'QUIT'
else
puts "! #{prefix}: #{msg}"
end
end
end
when 'PRIVMSG'
if xdcc_sent and not xdcc_accepted and prefix =~ /#{Regexp.escape req.bot}!(.*)$/i
/^\001DCC SEND (?<fname>((".*?").*?|(\S+))) (?<ip>\d+) (?<port>\d+) (?<fsize>\d+)\001\015$/ =~ msg
unless $~.nil?
req_send_time = nil
tmp_fname = fname
fname = $1 if tmp_fname =~ /^"(.*)"$/
puts "Preparing to download: \e[36m#{fname}\e[0m"
fname = (config["out-dir"].dup << fname)
xdcc_ret = XDCC_SEND.new fname, fsize.to_i, [ip.to_i].pack('N').unpack('C4') * '.', port.to_i
# Check if the for unfinished download amd try to resume
if File.exists? xdcc_ret.fname and File.stat(xdcc_ret.fname).size < xdcc_ret.fsize
stream << "PRIVMSG #{req.bot} :\001DCC RESUME #{tmp_fname} #{xdcc_ret.port} #{File.stat(xdcc_ret.fname).size}\001"
xdcc_accepted = true
print "! Incomplete file detected. Attempting to resume..."
next # Skip and wait for "DCC ACCEPT"
elsif File.exists? xdcc_ret.fname
if config["skip-existing"]
puts_warning "File already exists, skipping..."
stream << "PRIVMSG #{req.bot} :XDCC CANCEL"
xdcc_sent, xdcc_accepted = false, false
xdcc_accept_time, xdcc_ret = nil, nil
next
else
puts_warnings "File already existing, using a safe name..."
xdcc_ret.fname = safe_fname xdcc_ret.fname
end
end
# It's a new download, start from beginning
Thread.new do
pid = fork do
puts "Connecting to: #{req.bot} @ #{xdcc_ret.ip}:#{xdcc_ret.port}"
dcc_download xdcc_ret.ip, xdcc_ret.port, xdcc_ret.fname, xdcc_ret.fsize
end
Process.wait pid
xdcc_sent, xdcc_accepted = false, false
xdcc_accept_time, xdcc_ret = nil, nil
end
end
elsif xdcc_accepted and xdcc_ret != nil and msg =~ /^\001DCC ACCEPT ((".*?").*?|(\S+)) (\d+) (\d+)\001\015$/
# DCC RESUME request accepted, continue the download!
xdcc_accept_time = nil
xdcc_accepted = false
puts "\e[1;32mSUCCESS\e[0m!"
Thread.new do
pid = fork do
puts "Connecting to: #{req.bot} @ #{xdcc_ret.ip}:#{xdcc_ret.port}"
dcc_download xdcc_ret.ip, xdcc_ret.port, xdcc_ret.fname, xdcc_ret.fsize, File.stat(xdcc_ret.fname).size
end
Process.wait pid
xdcc_sent, xdcc_accepted = false, false
xdcc_accept_time, xdcc_ret = nil, nil
end
end
when /^\d+?$/
type_i = type.to_i
case type_i
when 1 # Print welcome message, because it's nice
msg.sub!(/#{Regexp.escape info[:nick]}/, "\e[34m#{info[:nick]}\e[0m")
puts "! #{msg}"
when 400..533 # Handle errors, except 439
next if type_i == 439 # Skip 439
puts_error "#{msg}"
stream.disconnect
when 376 then motd = true # Mark the end of the MOTD
end
when 'PING' then stream << "PONG :#{msg}"
when 'ERROR' then (msg =~ /closing link/i ? puts(msg) : puts_error(msg))
end
end
# Handle things while waiting for data
stream.on :WAITING do
unless xdcc_accepted
if motd and not xdcc_sent
cur_req += 1
if cur_req >= v.length
stream.disconnect
next
end
req = v[cur_req]
if req.chan != last_chan
stream << "PART #{last_chan}" unless last_chan == ""
last_chan = req.chan
stream << "JOIN #{req.chan}"
end
sleep 1 unless cur_req == 0 # Cooldown between downloads
stream << "PRIVMSG #{req.bot} :XDCC SEND #{req.pack}"
req_send_time = Time.now
xdcc_sent = true
end
# Wait 3 seconds for DCC SEND response, if there isn't one, abort
if xdcc_sent and not req_send_time.nil? and not xdcc_accepted
if (Time.now - req_send_time).floor > 3
puts_error "#{req.bot} took too long to respond, are you sure it's a bot?"
stream.disconnect
bot.stop
end
end
# Wait 3 seconds for a DCC ACCEPT response, if there isn't one, don't resume
if xdcc_sent and xdcc_accepted and not xdcc_accept_time.nil?
if (Time.now - xdcc_accept_time).floor > 3
puts "FAILED! Bot client doesn't support resume!"
puts "Connecting to: #{req.bot} @ #{xdcc_ret.ip}:#{xdcc_ret.port}"
dcc_download xdcc_ret.ip, xdcc_ret.port, xdcc_ret.fname, xdcc_ret.fsize
end
end
end
end
# Print sent data, for debugging only really
stream.on :WROTE do |data|
#puts "\e[1;37m<<\e[0m #{data}"
end
# Start the bot
bot.start
end
end
|
#!/usr/bin/env ruby
# xget.rb - xget
# Created by Rusty Shackleford on 2013/05/19
# Copyright (c) 2013, Rusty Shackleford
# All rights reserved.
begin
require 'Win32/Console/ANSI' if RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/
%w(socket thread slop timeout).each { |r| require r }
rescue LoadError
abort "#{$0} requires slop and, if you're on Windows, win32console\nPlease run 'gem install slop win32console'"
end
# Why isn't this enabled by default?
Thread.abort_on_exception = true
# Put standard output into syncronised mode
$stdout.sync = true
# Version values
$ver_maj, $ver_min, $ver_rev = 2, 1, 0
$ver_str = "#{$ver_maj}.#{$ver_min}.#{$ver_rev}"
config = {
"out-dir" => './',
"skip-existing" => false,
"servers" => {},
"sleep-interval" => 5,
"allow-queueing" => false
}
def puts_error msg
puts "! \e[31mERROR\e[0m: #{msg}"
end
def puts_abort msg
abort "! \e[31mERROR\e[0m: #{msg}"
end
def puts_warning msg
puts "! \e[33mWARNING:\e[0m: #{msg}"
end
# Extend IO to readlines without blocking
class IO
def gets_nonblock
@rlnb_buffer ||= ""
ch = nil
while ch = self.read_nonblock(1)
@rlnb_buffer += ch
if ch == "\n" then
res = @rlnb_buffer
@rlnb_buffer = ""
return res
end
end
end
end
# Extend Array to get averages
class Array
def average
inject(:+) / count
end
end
# Class to hold XDCC requests
class XDCC_REQ
attr_accessor :serv, :chan, :bot, :pack, :info
def initialize serv, chan, bot, pack, info = "*"
@serv = serv
@chan = chan
@bot = bot
@pack = pack
@info = info
end
def eql? other
self.serv == other.serv and self.chan == other.chan and self.bot == other.bot and self.pack == other.pack
end
def to_s
"[ #{self.serv}, #{self.chan}, #{self.bot}, #{self.pack}, #{self.info} ]"
end
end
# Class to hold DCC SEND info for when waiting for DCC ACCEPT
class XDCC_SEND
attr_accessor :fname, :fsize, :ip, :port
def initialize fname, fsize, ip, port
@fname = fname
@fsize = fsize
@ip = ip
@port = port
end
def to_s
"[ #{self.fname}, #{self.fsize}, #{self.ip}, #{self.port} ]"
end
end
# Class to emit events
module Emitter
def callbacks
@callbacks ||= Hash.new { |h, k| h[k] = [] }
end
def on type, &block
callbacks[type] << block
self
end
def emit type, *args
callbacks[type].each do |block|
block.call(*args)
end
end
end
# Class to handle IRC stream and emit events
class Stream
include Emitter
attr_accessor :io, :buf
def initialize serv
@buf = []
timeout(5) { @io = TCPSocket.new serv, 6667 }
rescue SocketError => e
puts_abort "Failed to connect to #{serv}! #{e.message}"
rescue Timeout::Error
puts_abort "Connection to #{serv} timed out!"
end
def disconnect
@io.puts 'QUIT'
end
def << data
@buf << data
end
def write
@buf.each do |x|
@io.puts x
emit :WROTE, x
end
@buf = []
rescue EOFError, Errno::ECONNRESET
emit :CLOSED
end
def read
read = @io.gets_nonblock
emit :READ, read
rescue IO::WaitReadable
emit :WAITING
rescue EOFError, Errno::ECONNRESET
emit :CLOSED
end
end
# Class to handle IRC stream
class Bot
attr_reader :stream
def initialize stream
@stream = stream
stream.on :CLOSED do stop; end
end
def start
@running = true
tick while @running
end
def stop
@running = false
end
def tick
stream.read
stream.write
end
end
# Get relative size from bytes
def bytes_to_closest bytes
fsize_arr = [ 'B', 'KB', 'MB', 'GB', 'TB' ]
exp = (Math.log(bytes) / Math.log(1024)).to_i
exp = fsize_arr.length if exp > fsize_arr.length
bytes /= 1024.0 ** exp
return "#{bytes.round(2)}#{fsize_arr[exp]}"
end
# Loop until there is no file with the same name
def safe_fname fname
return fname unless File.exists? fname
ext = File.extname fname
base = File.basename fname, ext
dir = File.dirname fname
cur = 2
while true
test = "#{dir}/#{base} (#{cur})#{ext}"
return test unless File.exists? test
cur += 1
end
end
# Get a close relative time remaining, in words
def time_distance t
if t < 60
case t
when 0 then "- nevermind, done!"
when 1..4 then "in a moment!"
when 5..9 then "less than 10 seconds"
when 10..19 then "less than 20 seconds"
when 20..39 then "half a minute"
else "less than a minute"
end
else # Use minutes, to aovid big numbers
t = t / 60.0
case t.to_i
when 1 then "about a minute"
when 2..45 then "#{t.round} minutes"
when 45..90 then "about an hour"
when 91..1440 then "about #{(t / 60.0).round} hours"
when 1441..2520 then "about a day"
when 2521..86400 then "about #{(t / 1440.0).round} days"
else "about #{(t/ 43200.0).round} months"
end
end
end
# Get elapsed time in words
def time_elapsed t
return "instantly!" if t <= 0
# Get the GMTime from seconds and split
ta = Time.at(t).gmtime.strftime('%S|%M|%H|%-d|%-m|%Y').split('|', 6).collect { |i| i.to_i }
ta[-1] -= 1970 # fuck the police
ta[-2] -= 1 # fuck, fuck
ta[-3] -= 1 # fuck the police
# Remove the 0 digets
i = 0
ta.reverse.each do |x|
break if x != 0
i += 1
end
# Unit suffixes
suffix = [ "seconds", "minutes", "hours", "days", "months", "years" ];
# Don't use plural if x is 1
plural = ->(x, y) { x == 1 ? y[0..-2] : y }
# Format string to "value unit"
format_str = ->(x) { "#{ta[x]} #{plural[ta[x], suffix[x]]}, " }
# Form the string
ta = ta.take(ta.length - i)
str = ""
(ta.length - 1).downto(0) { |x| str += format_str[x] }
"in #{str[0..-3]}"
end
# DCC download handler
def dcc_download ip, port, fname, fsize, read = 0
sock = nil
begin
timeout(5) { sock = TCPSocket.new ip, port }
rescue Timeout::Error
puts_abort "Connection to #{ip} timed out!"
end
puts_abort "Failed to connect to \"#{ip}:#{port}\": #{e}" if sock.nil?
fsize_clean = bytes_to_closest fsize
avgs, last_check, start_time = [], Time.now - 2, Time.now
fh = File.open fname, (read == 0 ? "w" : "a") # Write or append
# Form the status bar
print_bar = ->() {
print "\r\e[0K> [ \e[1;37m"
pc = read.to_f / fsize.to_f * 100.0
bars = (pc / 10).to_i
bars.times { print "#" }
(10 - bars).times { print " " }
avg = avgs.average * 1024.0
time_rem = time_distance ((fsize - read) / avg) * 8.0
print "\e[0m ] #{pc.round(2)}% #{bytes_to_closest read}/#{fsize_clean} \e[1;37m@\e[0m #{bytes_to_closest avg}/s \e[1;37min\e[0m #{time_rem}"
last_check = Time.now
avgs.clear
}
while buf = sock.readpartial(8192)
read += buf.bytesize
avgs << buf.bytesize
print_bar[] if (Time.now - last_check) > 1 and not avgs.empty?
begin
sock.write_nonblock [read].pack('N')
rescue Errno::EWOULDBLOCK
rescue Errno::EAGAIN => e
puts_error "#{File.basename fname} timed out! #{e}"
end
fh << buf
break if read >= fsize
end
print_bar.call unless avgs.empty?
elapsed_time = time_elapsed (Time.now - start_time).to_i
sock.close
fh.close
puts "\n! \e[1;32mSUCCESS\e[0m: downloaded #{File.basename fname} #{elapsed_time}"
rescue EOFError, SocketError => e
puts "\n! ERROR: #{File.basename fname} failed to download! #{e}"
end
if __FILE__ == $0 then
opts = Slop.parse do |o|
o.banner = " Usage: #{$0} [options] [value] [links] [--files] [file1:file2:file3]\n"
o.bool '-h', '--help', 'Prints help'
o.on '-v', '--version', 'Print version' do
puts "#{$0}: v#{$ver_str}"
exit
end
o.string '--config', 'Config file location'
o.string '--user', 'IRC \'USER\' for Ident'
o.string '--nick', 'IRC nick'
o.string '--pass', 'IRC \'PASS\' for Ident'
o.string '--realname', 'Realname for \'USER\' Ident'
o.string '--nickserv', 'Password for Nickserv'
o.array '--files', 'Pass list of files to parse for links', as: Array, delimiter: ':'
o.string '--out-dir', 'Output directory to save fiels to', :default => "./"
o.bool '--skip-existing', 'Don\' download files that already exist'
o.bool '--allow-queueing', 'Wait for pack to start downloading rather than fail immediately when queued'
o.int '--sleep-interval', 'Time in seconds to sleep before requesting next pack. Zero for no sleep.'
end
if opts.help?
puts opts
puts "\n Examples"
puts " \txget.rb --config config.conf --nick test"
puts " \txget.rb --files test1.txt:test2.txt:test3.txt"
puts " \txget.rb irc.rizon.net/#news/ginpachi-sensei/1"
puts " \txget.rb irc.rizon.net/#news/ginpachi-sensei/41..46"
exit
end
# Get the config location
config_loc = opts["config"]
config_loc = File.expand_path config_loc unless config_loc.nil?
if config_loc.nil? or not File.exists? config_loc
config_loc = File.expand_path "~/.xget.conf"
config_loc = ".xget.conf" unless File.exists? config_loc
unless File.exists? config_loc
puts "ERROR! Invalid config path '#{config_loc}''. Exiting!"
exit
end
end
# Insert config settings from arguments into config hash
cur_block = "*"
config["servers"][cur_block] = {}
%w(user nick pass real nserv).each do |x|
config["servers"][cur_block][x.to_sym] = opts[x] unless opts[x].nil?
end
# Check if specified output directory actually exists
puts_abort "Out directory, \"#{opts["out-dir"]}\" doesn't exist!" unless Dir.exists? opts["out-dir"]
config["out-dir"] = opts["out-dir"].dup
config["out-dir"] << "/" unless config["out-dir"][-1] == "/"
# Parse config
config_copies = {}
File.open(config_loc, "r").each_line do |line|
next if line.length <= 1 or line[0] == '#'
if line =~ /^\[(\S+)\]$/ # Check if header
cur_block = $1
if cur_block.include? ',' # Check if header contains more than one server
tmp_split = cur_block.split(",")
next unless tmp_split[0] =~ /^(\w+?).(\w+?).(\w+?)$/
config_copies[tmp_split[0]] = []
tmp_split.each do |x| # Add all copies to copies hash
next if x == tmp_split[0] or not x =~ /^(\w+?).(\w+?).(\w+?)$/
config_copies[tmp_split[0]].push x unless config_copies[tmp_split[0]].include? x
end
cur_block = tmp_split[0]
end
# Set current block to the new header
config["servers"][cur_block] = {} unless config["servers"].has_key? cur_block
elsif line =~ /^(\S+)=(.*+?)$/
# Check if current line is specifying out directory
case $1
when "out-dir"
t_out_dir = File.expand_path $2
puts_abort "Out directory, \"#{t_out_dir}\" doesn't exist!" unless Dir.exists? t_out_dir
config[$1] = t_out_dir
config[$1] << "/" unless config[$1][-1] == "/"
next
when "sleep-interval" then config[$1] = $2.to_i
when "skip-existing" then config[$1] = ($2 == "true")
when "allow-queueing" then config[$1] = ($2 == "true")
else
# Add value to current header, default is *
t_sym = $1.downcase.to_sym
config["servers"][cur_block][t_sym] = $2 unless config["servers"][cur_block].has_key? t_sym
end
end
end
# Go through each and make copies of the original
unless config_copies.empty?
config_copies.each do |k,v|
v.each { |x| config["servers"][x] = config["servers"][k] }
end
end
# Set the set the command line config options if specified
config["skip-existing"] = opts["skip-existing"] if opts["skip-existing"]
config["allow-queueing"] = opts["allow-queueing"] if opts["allow-queueing"]
config["sleep-interval"] = opts["sleep-interval"] unless opts["sleep-interval"].nil?
# Take remaining arguments and all lines from --files arg and put into array
to_check = ($*)
if opts['files'] != nil and not opts['files'].empty?
opts['files'].each do |x|
File.open(x, "r").each_line { |y| to_check << y.chomp } if File.exists? x
end
end
if to_check.empty?
puts opts
abort "\n No jobs, nothing to do!"
end
# Parse to_check array for valid XDCC links, irc.serv.org/#chan/bot/pack
tmp_requests, tmp_range = [], []
to_check.each do |x|
if x =~ /^(\w+?).(\w+?).(\w+?)\/#(\S+)\/(\S+)\/(\d+)(..\d+(\|\d+)?)?$/
serv = [$1, $2, $3].join(".")
info = (config["servers"].has_key?(serv) ? serv : "*")
chan = "##{$4}"
bot = $5
pack = $6.to_i
if $7.nil?
tmp_requests.push XDCC_REQ.new serv, chan, bot, pack, info
else
step = $8.nil? ? 1 : $8[1..-1].to_i
to_range = $7[2..-1].gsub(/(\|\d+)?$/, '').to_i # Clip off the ".." and the interval if present
if pack > to_range or pack == to_range
puts_error "Invalid range #{pack} to #{to_range} in \"#{x}\""
next
end
tmp_range =* (pack..to_range).step(step)
end
# Convert range array to new requests
unless tmp_range.empty?
tmp_range.each { |y| tmp_requests.push XDCC_REQ.new serv, chan, bot, y, info }
tmp_range.clear
end
else
puts_abort "#{x} is not a valid XDCC address\n XDCC Address format: irc.serv.com/#chan/bot/pack"
end
end
# Remove duplicate entries from requests
i = j = 0
to_pop = []
tmp_requests.each do |x|
tmp_requests.each do |y|
to_pop << j if x.eql? y if i != j
j += 1
end
i += 1
end
to_pop.each { |x| tmp_requests.delete_at(x) }
# Sort requests array to hash, serv {} -> chan {} -> requests []
requests = {}
tmp_requests.each do |x|
requests[x.serv] = [] unless requests.has_key? x.serv
requests[x.serv] << x
end
if requests.empty?
puts opts
abort "\n No jobs, nothing to do!"
end
# Sort requests by pack
requests.each do |k,v|
puts "#{k} \e[1;37m->\e[0m"
v = v.sort_by { |x| [x.chan, x.pack] }.each { |x| puts "\t#{x}" }
end
puts
# H-h-here we g-go...
requests.each do |k, v|
req, info = v[0], config["servers"][v[0].info]
last_chan, cur_req, motd = "", -1, false
nick_sent, nick_check, nick_valid = false, false, false
xdcc_sent, xdcc_accepted, xdcc_queued = false, false, false
xdcc_accept_time, xdcc_ret, req_send_time = nil, nil, nil
stream = Stream.new req.serv
bot = Bot.new stream
stream << "NICK #{info[:nick]}"
stream << "USER #{info[:user]} 0 * #{info[:real]}"
stream << "PASS #{info[:pass]}" unless info[:pass].nil?
# Handle read data
stream.on :READ do |data|
/^(?:[:](?<prefix>\S+) )?(?<type>\S+)(?: (?!:)(?<dest>.+?))?(?: [:](?<msg>.+))?$/ =~ data
#puts "\e[1;37m>>\e[0m #{prefix} | #{type} | #{dest} | #{msg}"
case type
when 'NOTICE'
if dest == 'AUTH'
if msg =~ /erroneous nickname/i
puts_error 'Login failed'
stream.disconnect
end
puts "> \e[1;32m#{msg}\e[0m"
else
if prefix =~ /^NickServ!/
if not nick_sent and info[:nserv] != nil
stream << "PRIVMSG NickServ :IDENTIFY #{info[:nserv]}"
nick_sent = true
elsif nick_sent and not nick_check
case msg
when /password incorrect/i
nick_valid = false
nick_check = true
when /password accepted/i
nick_valid = true
nick_check = true
end
end
puts "> \e[1;33m#{msg}\e[0m"
elsif prefix =~ /^#{Regexp.escape req.bot}!(.*)$/i
case msg
when /already requested that pack/i, /closing connection/i, /you have a dcc pending/i
puts_error msg
stream << "PRIVMSG #{req.bot} :XDCC CANCEL"
stream << 'QUIT'
when /you can only have (\d+?) transfer at a time/i
if config["allow-queueing"]
puts "! #{prefix}: #{msg}"
puts_warning "Pack queued, waiting for transfer to start..."
xdcc_queued = true
else
puts_error msg
stream << "PRIVMSG #{req.bot} :XDCC CANCEL"
stream << 'QUIT'
end
else
puts "! #{prefix}: #{msg}"
end
end
end
when 'PRIVMSG'
if xdcc_sent and not xdcc_accepted and prefix =~ /#{Regexp.escape req.bot}!(.*)$/i
/^\001DCC SEND (?<fname>((".*?").*?|(\S+))) (?<ip>\d+) (?<port>\d+) (?<fsize>\d+)\001\015$/ =~ msg
unless $~.nil?
req_send_time = nil
tmp_fname = fname
fname = $1 if tmp_fname =~ /^"(.*)"$/
puts "Preparing to download: \e[36m#{fname}\e[0m"
fname = (config["out-dir"].dup << fname)
xdcc_ret = XDCC_SEND.new fname, fsize.to_i, [ip.to_i].pack('N').unpack('C4') * '.', port.to_i
# Check if the for unfinished download amd try to resume
if File.exists? xdcc_ret.fname and File.stat(xdcc_ret.fname).size < xdcc_ret.fsize
stream << "PRIVMSG #{req.bot} :\001DCC RESUME #{tmp_fname} #{xdcc_ret.port} #{File.stat(xdcc_ret.fname).size}\001"
xdcc_accepted = true
print "! Incomplete file detected. Attempting to resume..."
next # Skip and wait for "DCC ACCEPT"
elsif File.exists? xdcc_ret.fname
if config["skip-existing"]
puts_warning "File already exists, skipping..."
stream << "PRIVMSG #{req.bot} :XDCC CANCEL"
xdcc_sent, xdcc_accepted, xdcc_queued = false, false, false
xdcc_accept_time, xdcc_ret = nil, nil
next
else
puts_warnings "File already existing, using a safe name..."
xdcc_ret.fname = safe_fname xdcc_ret.fname
end
end
# It's a new download, start from beginning
Thread.new do
pid = fork do
puts "Connecting to: #{req.bot} @ #{xdcc_ret.ip}:#{xdcc_ret.port}"
dcc_download xdcc_ret.ip, xdcc_ret.port, xdcc_ret.fname, xdcc_ret.fsize
end
Process.wait pid
xdcc_sent, xdcc_accepted, xdcc_queued = false, false, false
xdcc_accept_time, xdcc_ret = nil, nil
end
end
elsif xdcc_accepted and xdcc_ret != nil and msg =~ /^\001DCC ACCEPT ((".*?").*?|(\S+)) (\d+) (\d+)\001\015$/
# DCC RESUME request accepted, continue the download!
xdcc_accept_time = nil
xdcc_accepted = false
puts "\e[1;32mSUCCESS\e[0m!"
Thread.new do
pid = fork do
puts "Connecting to: #{req.bot} @ #{xdcc_ret.ip}:#{xdcc_ret.port}"
dcc_download xdcc_ret.ip, xdcc_ret.port, xdcc_ret.fname, xdcc_ret.fsize, File.stat(xdcc_ret.fname).size
end
Process.wait pid
xdcc_sent, xdcc_accepted, xdcc_queued = false, false, false
xdcc_accept_time, xdcc_ret = nil, nil
end
end
when /^\d+?$/
type_i = type.to_i
case type_i
when 1 # Print welcome message, because it's nice
msg.sub!(/#{Regexp.escape info[:nick]}/, "\e[34m#{info[:nick]}\e[0m")
puts "! #{msg}"
when 400..533 # Handle errors, except 439
next if type_i == 439 # Skip 439
puts_error "#{msg}"
stream.disconnect
when 376 then motd = true # Mark the end of the MOTD
end
when 'PING' then stream << "PONG :#{msg}"
when 'ERROR' then (msg =~ /closing link/i ? puts(msg) : puts_error(msg))
end
end
# Handle things while waiting for data
stream.on :WAITING do
unless xdcc_accepted
if motd and not xdcc_sent
cur_req += 1
if cur_req >= v.length
stream.disconnect
next
end
req = v[cur_req]
if req.chan != last_chan
stream << "PART #{last_chan}" unless last_chan == ""
last_chan = req.chan
stream << "JOIN #{req.chan}"
end
# Cooldown between downloads
if cur_req > 0
puts "Sleeping for #{config["sleep-interval"]} seconds before requesting the next pack"
sleep(config["sleep-interval"])
end
stream << "PRIVMSG #{req.bot} :XDCC SEND #{req.pack}"
req_send_time = Time.now
xdcc_sent = true
end
# Wait 3 seconds for DCC SEND response, if there isn't one, abort
if xdcc_sent and not req_send_time.nil? and not xdcc_accepted
if config["allow-queueing"] and xdcc_queued
next
end
if (Time.now - req_send_time).floor > 3
puts_error "#{req.bot} took too long to respond, are you sure it's a bot?"
stream.disconnect
bot.stop
end
end
# Wait 3 seconds for a DCC ACCEPT response, if there isn't one, don't resume
if xdcc_sent and xdcc_accepted and not xdcc_accept_time.nil?
if (Time.now - xdcc_accept_time).floor > 3
puts "FAILED! Bot client doesn't support resume!"
puts "Connecting to: #{req.bot} @ #{xdcc_ret.ip}:#{xdcc_ret.port}"
dcc_download xdcc_ret.ip, xdcc_ret.port, xdcc_ret.fname, xdcc_ret.fsize
end
end
end
end
# Print sent data, for debugging only really
stream.on :WROTE do |data|
#puts "\e[1;37m<<\e[0m #{data}"
end
# Start the bot
bot.start
end
end
Revert "Fixed last change"
This reverts commit 18da9851fe6403b12dfd10e6edd78ccab7312ce3.
#!/usr/bin/env ruby
# xget.rb - xget
# Created by Rusty Shackleford on 2013/05/19
# Copyright (c) 2013, Rusty Shackleford
# All rights reserved.
begin
require 'Win32/Console/ANSI' if RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/
%w(socket thread slop timeout).each { |r| require r }
rescue LoadError
abort "#{$0} requires slop and, if you're on Windows, win32console\nPlease run 'gem install slop win32console'"
end
# Why isn't this enabled by default?
Thread.abort_on_exception = true
# Put standard output into syncronised mode
$stdout.sync = true
# Version values
$ver_maj, $ver_min, $ver_rev = 2, 1, 0
$ver_str = "#{$ver_maj}.#{$ver_min}.#{$ver_rev}"
config = {
"out-dir" => './',
"skip-existing" => false,
"servers" => {},
"sleep-interval" => 5,
"allow-queueing" => false
}
def puts_error msg
puts "! \e[31mERROR\e[0m: #{msg}"
end
def puts_abort msg
abort "! \e[31mERROR\e[0m: #{msg}"
end
def puts_warning msg
puts "! \e[33mWARNING:\e[0m: #{msg}"
end
# Extend IO to readlines without blocking
class IO
def gets_nonblock
@rlnb_buffer ||= ""
ch = nil
while ch = self.read_nonblock(1)
@rlnb_buffer += ch
if ch == "\n" then
res = @rlnb_buffer
@rlnb_buffer = ""
return res
end
end
end
end
# Extend Array to get averages
class Array
def average
inject(:+) / count
end
end
# Class to hold XDCC requests
class XDCC_REQ
attr_accessor :serv, :chan, :bot, :pack, :info
def initialize serv, chan, bot, pack, info = "*"
@serv = serv
@chan = chan
@bot = bot
@pack = pack
@info = info
end
def eql? other
self.serv == other.serv and self.chan == other.chan and self.bot == other.bot and self.pack == other.pack
end
def to_s
"[ #{self.serv}, #{self.chan}, #{self.bot}, #{self.pack}, #{self.info} ]"
end
end
# Class to hold DCC SEND info for when waiting for DCC ACCEPT
class XDCC_SEND
attr_accessor :fname, :fsize, :ip, :port
def initialize fname, fsize, ip, port
@fname = fname
@fsize = fsize
@ip = ip
@port = port
end
def to_s
"[ #{self.fname}, #{self.fsize}, #{self.ip}, #{self.port} ]"
end
end
# Class to emit events
module Emitter
def callbacks
@callbacks ||= Hash.new { |h, k| h[k] = [] }
end
def on type, &block
callbacks[type] << block
self
end
def emit type, *args
callbacks[type].each do |block|
block.call(*args)
end
end
end
# Class to handle IRC stream and emit events
class Stream
include Emitter
attr_accessor :io, :buf
def initialize serv
@buf = []
timeout(5) { @io = TCPSocket.new serv, 6667 }
rescue SocketError => e
puts_abort "Failed to connect to #{serv}! #{e.message}"
rescue Timeout::Error
puts_abort "Connection to #{serv} timed out!"
end
def disconnect
@io.puts 'QUIT'
end
def << data
@buf << data
end
def write
@buf.each do |x|
@io.puts x
emit :WROTE, x
end
@buf = []
rescue EOFError, Errno::ECONNRESET
emit :CLOSED
end
def read
read = @io.gets_nonblock
emit :READ, read
rescue IO::WaitReadable
emit :WAITING
rescue EOFError, Errno::ECONNRESET
emit :CLOSED
end
end
# Class to handle IRC stream
class Bot
attr_reader :stream
def initialize stream
@stream = stream
stream.on :CLOSED do stop; end
end
def start
@running = true
tick while @running
end
def stop
@running = false
end
def tick
stream.read
stream.write
end
end
# Get relative size from bytes
def bytes_to_closest bytes
fsize_arr = [ 'B', 'KB', 'MB', 'GB', 'TB' ]
exp = (Math.log(bytes) / Math.log(1024)).to_i
exp = fsize_arr.length if exp > fsize_arr.length
bytes /= 1024.0 ** exp
return "#{bytes.round(2)}#{fsize_arr[exp]}"
end
# Loop until there is no file with the same name
def safe_fname fname
return fname unless File.exists? fname
ext = File.extname fname
base = File.basename fname, ext
dir = File.dirname fname
cur = 2
while true
test = "#{dir}/#{base} (#{cur})#{ext}"
return test unless File.exists? test
cur += 1
end
end
# Get a close relative time remaining, in words
def time_distance t
if t < 60
case t
when 0 then "- nevermind, done!"
when 1..4 then "in a moment!"
when 5..9 then "less than 10 seconds"
when 10..19 then "less than 20 seconds"
when 20..39 then "half a minute"
else "less than a minute"
end
else # Use minutes, to aovid big numbers
t = t / 60.0
case t.to_i
when 1 then "about a minute"
when 2..45 then "#{t.round} minutes"
when 45..90 then "about an hour"
when 91..1440 then "about #{(t / 60.0).round} hours"
when 1441..2520 then "about a day"
when 2521..86400 then "about #{(t / 1440.0).round} days"
else "about #{(t/ 43200.0).round} months"
end
end
end
# Get elapsed time in words
def time_elapsed t
return "instantly!" if t <= 0
# Get the GMTime from seconds and split
ta = Time.at(t).gmtime.strftime('%S|%M|%H|%-d|%-m|%Y').split('|', 6).collect { |i| i.to_i }
ta[-1] -= 1970 # fuck the police
ta[-2] -= 1 # fuck, fuck
ta[-3] -= 1 # fuck the police
# Remove the 0 digets
i = 0
ta.reverse.each do |x|
break if x != 0
i += 1
end
# Unit suffixes
suffix = [ "seconds", "minutes", "hours", "days", "months", "years" ];
# Don't use plural if x is 1
plural = ->(x, y) { x == 1 ? y[0..-2] : y }
# Format string to "value unit"
format_str = ->(x) { "#{ta[x]} #{plural[ta[x], suffix[x]]}, " }
# Form the string
ta = ta.take(ta.length - i)
str = ""
(ta.length - 1).downto(0) { |x| str += format_str[x] }
"in #{str[0..-3]}"
end
# DCC download handler
def dcc_download ip, port, fname, fsize, read = 0
sock = nil
begin
timeout(5) { sock = TCPSocket.new ip, port }
rescue Timeout::Error
puts_abort "Connection to #{ip} timed out!"
end
puts_abort "Failed to connect to \"#{ip}:#{port}\": #{e}" if sock.nil?
fsize_clean = bytes_to_closest fsize
avgs, last_check, start_time = [], Time.now - 2, Time.now
fh = File.open fname, (read == 0 ? "w" : "a") # Write or append
# Form the status bar
print_bar = ->() {
print "\r\e[0K> [ \e[1;37m"
pc = read.to_f / fsize.to_f * 100.0
bars = (pc / 10).to_i
bars.times { print "#" }
(10 - bars).times { print " " }
avg = avgs.average * 1024.0
time_rem = time_distance ((fsize - read) / avg) * 8.0
print "\e[0m ] #{pc.round(2)}% #{bytes_to_closest read}/#{fsize_clean} \e[1;37m@\e[0m #{bytes_to_closest avg}/s \e[1;37min\e[0m #{time_rem}"
last_check = Time.now
avgs.clear
}
while buf = sock.readpartial(8192)
read += buf.bytesize
avgs << buf.bytesize
print_bar[] if (Time.now - last_check) > 1 and not avgs.empty?
begin
sock.write_nonblock [read].pack('N')
rescue Errno::EWOULDBLOCK
rescue Errno::EAGAIN => e
puts_error "#{File.basename fname} timed out! #{e}"
end
fh << buf
break if read >= fsize
end
print_bar.call unless avgs.empty?
elapsed_time = time_elapsed (Time.now - start_time).to_i
sock.close
fh.close
puts "\n! \e[1;32mSUCCESS\e[0m: downloaded #{File.basename fname} #{elapsed_time}"
rescue EOFError, SocketError => e
puts "\n! ERROR: #{File.basename fname} failed to download! #{e}"
end
if __FILE__ == $0 then
opts = Slop.parse do |o|
o.banner = " Usage: #{$0} [options] [value] [links] [--files] [file1:file2:file3]\n"
o.bool '-h', '--help', 'Prints help'
o.on '-v', '--version', 'Print version' do
puts "#{$0}: v#{$ver_str}"
exit
end
o.string '--config', 'Config file location'
o.string '--user', 'IRC \'USER\' for Ident'
o.string '--nick', 'IRC nick'
o.string '--pass', 'IRC \'PASS\' for Ident'
o.string '--realname', 'Realname for \'USER\' Ident'
o.string '--nickserv', 'Password for Nickserv'
o.array '--files', 'Pass list of files to parse for links', as: Array, delimiter: ':'
o.string '--out-dir', 'Output directory to save fiels to', :default => "./"
o.bool '--skip-existing', 'Don\' download files that already exist'
o.bool '--allow-queueing', 'Wait for pack to start downloading rather than fail immediately when queued'
o.int '--sleep-interval', 'Time in seconds to sleep before requesting next pack. Zero for no sleep.'
end
if opts.help?
puts opts
puts "\n Examples"
puts " \txget.rb --config config.conf --nick test"
puts " \txget.rb --files test1.txt:test2.txt:test3.txt"
puts " \txget.rb irc.rizon.net/#news/ginpachi-sensei/1"
puts " \txget.rb irc.rizon.net/#news/ginpachi-sensei/41..46"
exit
end
# Get the config location
config_loc = File.expand_path opts["config"]
if config_loc.nil? or not File.exists? config_loc
config_loc = File.expand_path "~/.xget.conf"
config_loc = ".xget.conf" unless File.exists? config_loc
unless File.exists? config_loc
puts "ERROR! Invalid config path '#{config_loc}''. Exiting!"
exit
end
end
# Insert config settings from arguments into config hash
cur_block = "*"
config["servers"][cur_block] = {}
%w(user nick pass real nserv).each do |x|
config["servers"][cur_block][x.to_sym] = opts[x] unless opts[x].nil?
end
# Check if specified output directory actually exists
puts_abort "Out directory, \"#{opts["out-dir"]}\" doesn't exist!" unless Dir.exists? opts["out-dir"]
config["out-dir"] = opts["out-dir"].dup
config["out-dir"] << "/" unless config["out-dir"][-1] == "/"
# Parse config
config_copies = {}
File.open(config_loc, "r").each_line do |line|
next if line.length <= 1 or line[0] == '#'
if line =~ /^\[(\S+)\]$/ # Check if header
cur_block = $1
if cur_block.include? ',' # Check if header contains more than one server
tmp_split = cur_block.split(",")
next unless tmp_split[0] =~ /^(\w+?).(\w+?).(\w+?)$/
config_copies[tmp_split[0]] = []
tmp_split.each do |x| # Add all copies to copies hash
next if x == tmp_split[0] or not x =~ /^(\w+?).(\w+?).(\w+?)$/
config_copies[tmp_split[0]].push x unless config_copies[tmp_split[0]].include? x
end
cur_block = tmp_split[0]
end
# Set current block to the new header
config["servers"][cur_block] = {} unless config["servers"].has_key? cur_block
elsif line =~ /^(\S+)=(.*+?)$/
# Check if current line is specifying out directory
case $1
when "out-dir"
t_out_dir = File.expand_path $2
puts_abort "Out directory, \"#{t_out_dir}\" doesn't exist!" unless Dir.exists? t_out_dir
config[$1] = t_out_dir
config[$1] << "/" unless config[$1][-1] == "/"
next
when "sleep-interval" then config[$1] = $2.to_i
when "skip-existing" then config[$1] = ($2 == "true")
when "allow-queueing" then config[$1] = ($2 == "true")
else
# Add value to current header, default is *
t_sym = $1.downcase.to_sym
config["servers"][cur_block][t_sym] = $2 unless config["servers"][cur_block].has_key? t_sym
end
end
end
# Go through each and make copies of the original
unless config_copies.empty?
config_copies.each do |k,v|
v.each { |x| config["servers"][x] = config["servers"][k] }
end
end
# Set the set the command line config options if specified
config["skip-existing"] = opts["skip-existing"] if opts["skip-existing"]
config["allow-queueing"] = opts["allow-queueing"] if opts["allow-queueing"]
config["sleep-interval"] = opts["sleep-interval"] unless opts["sleep-interval"].nil?
# Take remaining arguments and all lines from --files arg and put into array
to_check = ($*)
if opts['files'] != nil and not opts['files'].empty?
opts['files'].each do |x|
File.open(x, "r").each_line { |y| to_check << y.chomp } if File.exists? x
end
end
if to_check.empty?
puts opts
abort "\n No jobs, nothing to do!"
end
# Parse to_check array for valid XDCC links, irc.serv.org/#chan/bot/pack
tmp_requests, tmp_range = [], []
to_check.each do |x|
if x =~ /^(\w+?).(\w+?).(\w+?)\/#(\S+)\/(\S+)\/(\d+)(..\d+(\|\d+)?)?$/
serv = [$1, $2, $3].join(".")
info = (config["servers"].has_key?(serv) ? serv : "*")
chan = "##{$4}"
bot = $5
pack = $6.to_i
if $7.nil?
tmp_requests.push XDCC_REQ.new serv, chan, bot, pack, info
else
step = $8.nil? ? 1 : $8[1..-1].to_i
to_range = $7[2..-1].gsub(/(\|\d+)?$/, '').to_i # Clip off the ".." and the interval if present
if pack > to_range or pack == to_range
puts_error "Invalid range #{pack} to #{to_range} in \"#{x}\""
next
end
tmp_range =* (pack..to_range).step(step)
end
# Convert range array to new requests
unless tmp_range.empty?
tmp_range.each { |y| tmp_requests.push XDCC_REQ.new serv, chan, bot, y, info }
tmp_range.clear
end
else
puts_abort "#{x} is not a valid XDCC address\n XDCC Address format: irc.serv.com/#chan/bot/pack"
end
end
# Remove duplicate entries from requests
i = j = 0
to_pop = []
tmp_requests.each do |x|
tmp_requests.each do |y|
to_pop << j if x.eql? y if i != j
j += 1
end
i += 1
end
to_pop.each { |x| tmp_requests.delete_at(x) }
# Sort requests array to hash, serv {} -> chan {} -> requests []
requests = {}
tmp_requests.each do |x|
requests[x.serv] = [] unless requests.has_key? x.serv
requests[x.serv] << x
end
if requests.empty?
puts opts
abort "\n No jobs, nothing to do!"
end
# Sort requests by pack
requests.each do |k,v|
puts "#{k} \e[1;37m->\e[0m"
v = v.sort_by { |x| [x.chan, x.pack] }.each { |x| puts "\t#{x}" }
end
puts
# H-h-here we g-go...
requests.each do |k, v|
req, info = v[0], config["servers"][v[0].info]
last_chan, cur_req, motd = "", -1, false
nick_sent, nick_check, nick_valid = false, false, false
xdcc_sent, xdcc_accepted, xdcc_queued = false, false, false
xdcc_accept_time, xdcc_ret, req_send_time = nil, nil, nil
stream = Stream.new req.serv
bot = Bot.new stream
stream << "NICK #{info[:nick]}"
stream << "USER #{info[:user]} 0 * #{info[:real]}"
stream << "PASS #{info[:pass]}" unless info[:pass].nil?
# Handle read data
stream.on :READ do |data|
/^(?:[:](?<prefix>\S+) )?(?<type>\S+)(?: (?!:)(?<dest>.+?))?(?: [:](?<msg>.+))?$/ =~ data
#puts "\e[1;37m>>\e[0m #{prefix} | #{type} | #{dest} | #{msg}"
case type
when 'NOTICE'
if dest == 'AUTH'
if msg =~ /erroneous nickname/i
puts_error 'Login failed'
stream.disconnect
end
puts "> \e[1;32m#{msg}\e[0m"
else
if prefix =~ /^NickServ!/
if not nick_sent and info[:nserv] != nil
stream << "PRIVMSG NickServ :IDENTIFY #{info[:nserv]}"
nick_sent = true
elsif nick_sent and not nick_check
case msg
when /password incorrect/i
nick_valid = false
nick_check = true
when /password accepted/i
nick_valid = true
nick_check = true
end
end
puts "> \e[1;33m#{msg}\e[0m"
elsif prefix =~ /^#{Regexp.escape req.bot}!(.*)$/i
case msg
when /already requested that pack/i, /closing connection/i, /you have a dcc pending/i
puts_error msg
stream << "PRIVMSG #{req.bot} :XDCC CANCEL"
stream << 'QUIT'
when /you can only have (\d+?) transfer at a time/i
if config["allow-queueing"]
puts "! #{prefix}: #{msg}"
puts_warning "Pack queued, waiting for transfer to start..."
xdcc_queued = true
else
puts_error msg
stream << "PRIVMSG #{req.bot} :XDCC CANCEL"
stream << 'QUIT'
end
else
puts "! #{prefix}: #{msg}"
end
end
end
when 'PRIVMSG'
if xdcc_sent and not xdcc_accepted and prefix =~ /#{Regexp.escape req.bot}!(.*)$/i
/^\001DCC SEND (?<fname>((".*?").*?|(\S+))) (?<ip>\d+) (?<port>\d+) (?<fsize>\d+)\001\015$/ =~ msg
unless $~.nil?
req_send_time = nil
tmp_fname = fname
fname = $1 if tmp_fname =~ /^"(.*)"$/
puts "Preparing to download: \e[36m#{fname}\e[0m"
fname = (config["out-dir"].dup << fname)
xdcc_ret = XDCC_SEND.new fname, fsize.to_i, [ip.to_i].pack('N').unpack('C4') * '.', port.to_i
# Check if the for unfinished download amd try to resume
if File.exists? xdcc_ret.fname and File.stat(xdcc_ret.fname).size < xdcc_ret.fsize
stream << "PRIVMSG #{req.bot} :\001DCC RESUME #{tmp_fname} #{xdcc_ret.port} #{File.stat(xdcc_ret.fname).size}\001"
xdcc_accepted = true
print "! Incomplete file detected. Attempting to resume..."
next # Skip and wait for "DCC ACCEPT"
elsif File.exists? xdcc_ret.fname
if config["skip-existing"]
puts_warning "File already exists, skipping..."
stream << "PRIVMSG #{req.bot} :XDCC CANCEL"
xdcc_sent, xdcc_accepted, xdcc_queued = false, false, false
xdcc_accept_time, xdcc_ret = nil, nil
next
else
puts_warnings "File already existing, using a safe name..."
xdcc_ret.fname = safe_fname xdcc_ret.fname
end
end
# It's a new download, start from beginning
Thread.new do
pid = fork do
puts "Connecting to: #{req.bot} @ #{xdcc_ret.ip}:#{xdcc_ret.port}"
dcc_download xdcc_ret.ip, xdcc_ret.port, xdcc_ret.fname, xdcc_ret.fsize
end
Process.wait pid
xdcc_sent, xdcc_accepted, xdcc_queued = false, false, false
xdcc_accept_time, xdcc_ret = nil, nil
end
end
elsif xdcc_accepted and xdcc_ret != nil and msg =~ /^\001DCC ACCEPT ((".*?").*?|(\S+)) (\d+) (\d+)\001\015$/
# DCC RESUME request accepted, continue the download!
xdcc_accept_time = nil
xdcc_accepted = false
puts "\e[1;32mSUCCESS\e[0m!"
Thread.new do
pid = fork do
puts "Connecting to: #{req.bot} @ #{xdcc_ret.ip}:#{xdcc_ret.port}"
dcc_download xdcc_ret.ip, xdcc_ret.port, xdcc_ret.fname, xdcc_ret.fsize, File.stat(xdcc_ret.fname).size
end
Process.wait pid
xdcc_sent, xdcc_accepted, xdcc_queued = false, false, false
xdcc_accept_time, xdcc_ret = nil, nil
end
end
when /^\d+?$/
type_i = type.to_i
case type_i
when 1 # Print welcome message, because it's nice
msg.sub!(/#{Regexp.escape info[:nick]}/, "\e[34m#{info[:nick]}\e[0m")
puts "! #{msg}"
when 400..533 # Handle errors, except 439
next if type_i == 439 # Skip 439
puts_error "#{msg}"
stream.disconnect
when 376 then motd = true # Mark the end of the MOTD
end
when 'PING' then stream << "PONG :#{msg}"
when 'ERROR' then (msg =~ /closing link/i ? puts(msg) : puts_error(msg))
end
end
# Handle things while waiting for data
stream.on :WAITING do
unless xdcc_accepted
if motd and not xdcc_sent
cur_req += 1
if cur_req >= v.length
stream.disconnect
next
end
req = v[cur_req]
if req.chan != last_chan
stream << "PART #{last_chan}" unless last_chan == ""
last_chan = req.chan
stream << "JOIN #{req.chan}"
end
# Cooldown between downloads
if cur_req > 0
puts "Sleeping for #{config["sleep-interval"]} seconds before requesting the next pack"
sleep(config["sleep-interval"])
end
stream << "PRIVMSG #{req.bot} :XDCC SEND #{req.pack}"
req_send_time = Time.now
xdcc_sent = true
end
# Wait 3 seconds for DCC SEND response, if there isn't one, abort
if xdcc_sent and not req_send_time.nil? and not xdcc_accepted
if config["allow-queueing"] and xdcc_queued
next
end
if (Time.now - req_send_time).floor > 3
puts_error "#{req.bot} took too long to respond, are you sure it's a bot?"
stream.disconnect
bot.stop
end
end
# Wait 3 seconds for a DCC ACCEPT response, if there isn't one, don't resume
if xdcc_sent and xdcc_accepted and not xdcc_accept_time.nil?
if (Time.now - xdcc_accept_time).floor > 3
puts "FAILED! Bot client doesn't support resume!"
puts "Connecting to: #{req.bot} @ #{xdcc_ret.ip}:#{xdcc_ret.port}"
dcc_download xdcc_ret.ip, xdcc_ret.port, xdcc_ret.fname, xdcc_ret.fsize
end
end
end
end
# Print sent data, for debugging only really
stream.on :WROTE do |data|
#puts "\e[1;37m<<\e[0m #{data}"
end
# Start the bot
bot.start
end
end
|
class IO
include(Kernel.dup.class_eval do
(instance_methods - %i(hash)).each{ |m| remove_method m }
self
end)
end
implemented IO#{read,write}_nonblock for mruby
class IO
include(Kernel.dup.class_eval do
(instance_methods - %i(hash)).each{ |m| remove_method m }
self
end)
unless const_defined? :EAGAIN
class EAGAIN < Exception; end
end
unless const_defined? :WaitReadable
module WaitReadable; end
class EAGAIN
include WaitReadable
end
end
unless const_defined? :WaitWritable
module WaitWritable; end
class EAGAIN
include WaitWritable
end
end
unless method_defined? :read_nonblock
def read_nonblock(maxlen, outbuf = nil)
if IO.select [self], nil, nil, 0
sysread(maxlen, outbuf)
else
raise EAGAIN, 'Resource temporarily unavailable - read would block'
end
end
end
unless method_defined? :write_nonblock
def write_nonblock(string)
if IO.select nil, [self], nil, 0
syswrite(string)
else
raise EAGAIN, 'Resource temporarily unavailable - write would block'
end
end
end
end |
#!/usr/bin/env ruby
#
# gem install trollop
#
work = []
if File.exist?("/etc/apt/sources.list")
work.push("/etc/apt/sources.list")
end
if File.directory?("/etc/apt/sources.list.d")
work += Dir["/etc/apt/sources.list.d/*.list"]
end
if work.length == 0
puts "Nothing to be done."
exit
end
require 'rubygems'
require 'trollop'
require 'net/http'
opts = Trollop::options do
version "apt-repair-sources 0.1.0 (c) 2011 Till Klampaeckel"
banner <<-EOS
This tool helps you clean out bad entries from apt's sources.
Usage:
sudo apt-repair-sources --dry-run|--fix-it-for-me
EOS
opt :dry_run, "Display bad entries, this is enabled by default (no changes)", :default => false
opt :fix_it_for_me, "Remove bad entries from the sources (changes will be made)", :default => false
end
p opts
exit
class AptRepairSources
def initialize(line)
@e = line.split(" ")
end
def self.find_platform
return `dpkg --print-architecture`.gsub(/\s+/, "")
end
def get_el
el = @e
el.shift
el.shift
el.shift
return el
end
def get_type
return @e[0]
end
def get_url
url = @e[1]
if url[-1,1] != "/"
url += "/"
end
url += "dists/" + @e[2] + "/"
return url
end
end
dry_run = true
p = AptRepairSources::find_platform
work.each do |f|
File.open(f, "r") do |infile|
keep = []
while (l = infile.gets)
if l.nil? || l.empty?
next
end
unless l[0,3] == 'deb' || l[0,7] == 'deb-src'
next
end
helper = AptRepairSources.new(l)
type = helper.get_type
url = helper.get_url
el = helper.get_el
el.each do |t|
uri = url + t
if type == 'deb'
uri += "/binary-#{p}/Packages.gz"
else
uri += "/source/Sources.gz"
end
u = URI(uri)
Net::HTTP.start(u.host, u.port) do |http|
http.open_timeout = 1
http.read_timeout = 1
res = http.head(u.path)
if res.code == "200"
keep.push(l)
next
end
if dry_run == true
puts "#{f}: #{uri} >> #{res.code}"
end
keep.push("#" + "#{l}");
end
end
end
# save to be safe
if dry_run != true
puts f
puts keep
end
end
end
make dry_run work
#!/usr/bin/env ruby
#
# gem install trollop
#
work = []
if File.exist?("/etc/apt/sources.list")
work.push("/etc/apt/sources.list")
end
if File.directory?("/etc/apt/sources.list.d")
work += Dir["/etc/apt/sources.list.d/*.list"]
end
if work.length == 0
puts "Nothing to be done."
exit
end
require 'rubygems'
require 'trollop'
require 'net/http'
opts = Trollop::options do
version "apt-repair-sources 0.1.0 (c) 2011 Till Klampaeckel"
banner <<-EOS
This tool helps you clean out bad entries from apt's sources.
Usage:
sudo apt-repair-sources --dry-run|--fix-it-for-me
EOS
opt :dry_run, "Display bad entries, this is enabled by default (no changes)", :default => false
opt :fix_it_for_me, "Remove bad entries from the sources (changes will be made)", :default => false
end
if opts[:dry_run] == true && opts[:fix_it_for_me] == true
puts "Cannot have both."
exit 1
else
if opts[:fix_it_for_me_given] && opts[:fix_it_for_me] == true
dry_run = false
else
dry_run = true
end
end
class AptRepairSources
def initialize(line)
@e = line.split(" ")
end
def self.find_platform
return `dpkg --print-architecture`.gsub(/\s+/, "")
end
def get_el
el = @e
el.shift
el.shift
el.shift
return el
end
def get_type
return @e[0]
end
def get_url
url = @e[1]
if url[-1,1] != "/"
url += "/"
end
url += "dists/" + @e[2] + "/"
return url
end
end
dry_run = true
p = AptRepairSources::find_platform
work.each do |f|
File.open(f, "r") do |infile|
keep = []
while (l = infile.gets)
if l.nil? || l.empty?
next
end
unless l[0,3] == 'deb' || l[0,7] == 'deb-src'
next
end
helper = AptRepairSources.new(l)
type = helper.get_type
url = helper.get_url
el = helper.get_el
el.each do |t|
uri = url + t
if type == 'deb'
uri += "/binary-#{p}/Packages.gz"
else
uri += "/source/Sources.gz"
end
u = URI(uri)
Net::HTTP.start(u.host, u.port) do |http|
http.open_timeout = 1
http.read_timeout = 1
res = http.head(u.path)
if res.code == "200"
keep.push(l)
next
end
if dry_run == true
puts "#{f}: #{uri} >> #{res.code}"
end
keep.push("#" + "#{l}");
end
end
end
# save to be safe
if dry_run != true
puts f
puts keep
end
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'git_commit_autouser/version'
Gem::Specification.new do |spec|
spec.name = "git-commit-autouser"
spec.version = GitCommitAutouser::VERSION
spec.authors = ["Ryota Arai"]
spec.email = ["ryota.arai@gmail.com"]
spec.description = %q{TODO: Write a gem description}
spec.summary = %q{TODO: Write a gem summary}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
end
spec.description and spec.summary
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'git_commit_autouser/version'
Gem::Specification.new do |spec|
spec.name = "git-commit-autouser"
spec.version = GitCommitAutouser::VERSION
spec.authors = ["Ryota Arai"]
spec.email = ["ryota.arai@gmail.com"]
spec.description = %q{git-commit with auto user select}
spec.summary = %q{git-commit with auto user select}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
end
|
module Daybreak
# Daybreak::DB contains the public api for Daybreak. It includes
# Enumerable for functional goodies like map, each, reduce and friends.
# @api public
class DB
include Enumerable
attr_reader :file, :logsize
attr_writer :default
# @api private
def self.databases
at_exit do
until @databases.empty?
warn "Database #{@databases.first.file} was not closed, state might be inconsistent"
@databases.first.close
end
end unless @databases
@databases ||= []
end
# Create a new Daybreak::DB. The second argument is the default value
# to store when accessing a previously unset key, this follows the
# Hash standard.
# @param [String] file the path to the db file
# @param default the default value to store and return when a key is
# not yet in the database.
# @yield [key] a block that will return the default value to store.
# @yieldparam [String] key the key to be stored.
def initialize(file, options = {}, &block)
@file = file
@serializer = (options[:serializer] || Serializer::Default).new
@format = (options[:format] || Format).new(@serializer)
@default = block ? block : options[:default]
@queue = Queue.new
@out = File.open(@file, 'ab')
if @out.stat.size == 0
@out.write(@format.header)
@out.flush
end
reset
@thread = Thread.new(&method(:worker))
@mutex = Mutex.new # a global mutex for lock
sync
self.class.databases << self
end
# Return default value belonging to key
# @param key the default value to retrieve.
def default(key = nil)
@default.respond_to?(:call) ? @default.call(key) : @default
end
# Retrieve a value at key from the database. If the default value was specified
# when this database was created, that value will be set and returned. Aliased
# as <tt>get</tt>.
# @param key the value to retrieve from the database.
def [](key)
skey = @serializer.key_for(key)
if @table.has_key?(skey)
@table[skey]
elsif @default
value = default(key)
@queue << [skey, value]
@table[skey] = value
end
end
alias_method :get, :'[]'
# Set a key in the database to be written at some future date. If the data
# needs to be persisted immediately, call <tt>db.set(key, value, true)</tt>.
# @param [#to_s] key the key of the storage slot in the database
# @param value the value to store
def []=(key, value)
key = @serializer.key_for(key)
@queue << [key, value]
@table[key] = value
end
alias_method :set, :'[]='
# set! flushes data immediately to disk.
# @param key the key of the storage slot in the database
# @param value the value to store
def set!(key, value)
set(key, value)
flush
value
end
# Delete a key from the database
# @param key the key of the storage slot in the database
def delete(key)
key = @serializer.key_for(key)
@queue << [key]
@table.delete(key)
end
# delete! immediately deletes the key on disk.
# @param key the key of the storage slot in the database
def delete!(key)
value = delete(key)
flush
value
end
# Does this db have a value for this key?
# @param key the key to check if the DB has a key.
def has_key?(key)
@table.has_key?(@serializer.key_for(key))
end
alias_method :key?, :has_key?
alias_method :include?, :has_key?
alias_method :member?, :has_key?
def has_value?(value)
@table.has_value?(value)
end
alias_method :value?, :has_value?
# Return the number of stored items.
# @return [Integer]
def size
@table.size
end
alias_method :length, :size
# Return true if database is empty
# @return [Boolean]
def empty?
@table.empty?
end
# Iterate over the key, value pairs in the database.
# @yield [key, value] blk the iterator for each key value pair.
# @yieldparam key the key.
# @yieldparam value the value from the database.
def each(&block)
@table.each(&block)
end
# Return the keys in the db.
# @return [Array]
def keys
@table.keys
end
# Flush all changes
def flush
@queue.flush
end
# Sync the database with what is on disk, by first flushing changes, and
# then reading the file if necessary.
def sync
flush
buf = new_records
until buf.empty?
record = @format.deserialize(buf)
if record.size == 1
@table.delete(record.first)
else
@table[record.first] = record.last
end
@logsize += 1
end
end
# Lock the database for an exclusive commit accross processes and threads
# @yield a block where every change to the database is synced
def lock
@mutex.synchronize do
exclusive do
sync
result = yield
flush
result
end
end
end
# Remove all keys and values from the database
def clear
with_tmpfile do |path, file|
file.write(@format.header)
file.close
flush
# Clear acts like a compactification
File.rename(path, @file)
@in.close
reset
end
self
end
# Compact the database to remove stale commits and reduce the file size.
def compact
with_tmpfile do |path, file|
sync
compactsize = file.write(dump)
exclusive do
stat = @in.stat
# Return if database was compactified at the same time
# or compactified database has the same size.
return self if stat.nlink == 0 || stat.size == compactsize
# Append changed journal records if the database changed during compactification
file.write(@in.read(stat.size - @in.pos)) if stat.size > @in.pos
file.close
File.rename(path, @file)
end
sync
end
self
end
# Close the database for reading and writing.
def close
@queue << nil
@thread.join
@in.close
@out.close
@queue.stop if @queue.respond_to?(:stop)
self.class.databases.delete(self)
nil
end
private
# Read new records from journal log and return buffer
def new_records
stat = nil
loop do
@in.flock(File::LOCK_SH) unless @exclusive
stat = @in.stat
# Check if database was compactified in the meantime
# break if not
break if stat.nlink > 0
@in.close
reset
end
# Read new journal records
stat.size > @in.pos ? @in.read(stat.size - @in.pos) : ''
ensure
@in.flock(File::LOCK_UN) unless @exclusive
end
# Reset database reader
def reset
@logsize = 0
@in = File.open(@file, 'rb')
@format.read_header(@in)
@table = {}
end
# Return database dump as string
def dump
dump = @format.header
@table.each do |record|
dump << @format.serialize(record)
end
dump
end
# Worker thread
def worker
loop do
record = @queue.next
write_record(record) if record
@queue.pop
break unless record
end
rescue Exception => ex
warn "Daybreak worker: #{ex.message}"
retry
end
# Write record to output stream and
# advance input stream
def write_record(record)
record = @format.serialize(record)
exclusive do
@out.write(record)
# Flush to make sure the file is really updated
@out.flush
size = @out.stat.size
end
@in.pos = size if size == @in.pos + record.size
@logsize += 1
end
# Lock database exclusively
def exclusive
return yield if @exclusive
begin
loop do
@out.flock(File::LOCK_EX)
# Check if database was compactified in the meantime
# break if not
break if @out.stat.nlink > 0
@out.close
@out = File.open(@file, 'ab')
end
@exclusive = true
yield
ensure
@out.flock(File::LOCK_UN)
@exclusive = false
end
end
# Open temporary file and pass it to the block
def with_tmpfile
path = [@file, $$.to_s(36), Thread.current.object_id.to_s(36)].join
file = File.open(path, 'wb')
yield(path, file)
ensure
file.close unless file.closed?
File.unlink(path) if File.exists?(path)
end
end
end
add reopen methods
module Daybreak
# Daybreak::DB contains the public api for Daybreak. It includes
# Enumerable for functional goodies like map, each, reduce and friends.
# @api public
class DB
include Enumerable
attr_reader :file, :logsize
attr_writer :default
# @api private
def self.databases
at_exit do
until @databases.empty?
warn "Database #{@databases.first.file} was not closed, state might be inconsistent"
@databases.first.close
end
end unless @databases
@databases ||= []
end
# Create a new Daybreak::DB. The second argument is the default value
# to store when accessing a previously unset key, this follows the
# Hash standard.
# @param [String] file the path to the db file
# @param default the default value to store and return when a key is
# not yet in the database.
# @yield [key] a block that will return the default value to store.
# @yieldparam [String] key the key to be stored.
def initialize(file, options = {}, &block)
@file = file
@serializer = (options[:serializer] || Serializer::Default).new
@format = (options[:format] || Format).new(@serializer)
@default = block ? block : options[:default]
@queue = Queue.new
@table = {}
reopen
@thread = Thread.new(&method(:worker))
@mutex = Mutex.new # a global mutex for lock
sync
self.class.databases << self
end
# Return default value belonging to key
# @param key the default value to retrieve.
def default(key = nil)
@default.respond_to?(:call) ? @default.call(key) : @default
end
# Retrieve a value at key from the database. If the default value was specified
# when this database was created, that value will be set and returned. Aliased
# as <tt>get</tt>.
# @param key the value to retrieve from the database.
def [](key)
skey = @serializer.key_for(key)
if @table.has_key?(skey)
@table[skey]
elsif @default
value = default(key)
@queue << [skey, value]
@table[skey] = value
end
end
alias_method :get, :'[]'
# Set a key in the database to be written at some future date. If the data
# needs to be persisted immediately, call <tt>db.set(key, value, true)</tt>.
# @param [#to_s] key the key of the storage slot in the database
# @param value the value to store
def []=(key, value)
key = @serializer.key_for(key)
@queue << [key, value]
@table[key] = value
end
alias_method :set, :'[]='
# set! flushes data immediately to disk.
# @param key the key of the storage slot in the database
# @param value the value to store
def set!(key, value)
set(key, value)
flush
value
end
# Delete a key from the database
# @param key the key of the storage slot in the database
def delete(key)
key = @serializer.key_for(key)
@queue << [key]
@table.delete(key)
end
# delete! immediately deletes the key on disk.
# @param key the key of the storage slot in the database
def delete!(key)
value = delete(key)
flush
value
end
# Does this db have a value for this key?
# @param key the key to check if the DB has a key.
def has_key?(key)
@table.has_key?(@serializer.key_for(key))
end
alias_method :key?, :has_key?
alias_method :include?, :has_key?
alias_method :member?, :has_key?
def has_value?(value)
@table.has_value?(value)
end
alias_method :value?, :has_value?
# Return the number of stored items.
# @return [Integer]
def size
@table.size
end
alias_method :length, :size
# Return true if database is empty
# @return [Boolean]
def empty?
@table.empty?
end
# Iterate over the key, value pairs in the database.
# @yield [key, value] blk the iterator for each key value pair.
# @yieldparam key the key.
# @yieldparam value the value from the database.
def each(&block)
@table.each(&block)
end
# Return the keys in the db.
# @return [Array]
def keys
@table.keys
end
# Flush all changes
def flush
@queue.flush
end
# Sync the database with what is on disk, by first flushing changes, and
# then reading the file if necessary.
def sync
flush
buf = new_records
until buf.empty?
record = @format.deserialize(buf)
if record.size == 1
@table.delete(record.first)
else
@table[record.first] = record.last
end
@logsize += 1
end
end
# Lock the database for an exclusive commit accross processes and threads
# @yield a block where every change to the database is synced
def lock
@mutex.synchronize do
exclusive do
sync
result = yield
flush
result
end
end
end
# Remove all keys and values from the database
def clear
with_tmpfile do |path, file|
file.write(@format.header)
file.close
flush
# Clear acts like a compactification
File.rename(path, @file)
@table.clear
reopen
end
self
end
# Compact the database to remove stale commits and reduce the file size.
def compact
with_tmpfile do |path, file|
sync
compactsize = file.write(dump)
exclusive do
stat = @in.stat
# Return if database was compactified at the same time
# or compactified database has the same size.
return self if stat.nlink == 0 || stat.size == compactsize
# Append changed journal records if the database changed during compactification
file.write(@in.read(stat.size - @in.pos)) if stat.size > @in.pos
file.close
File.rename(path, @file)
end
reopen
sync
end
self
end
# Close the database for reading and writing.
def close
@queue << nil
@thread.join
@in.close
@out.close
@queue.stop if @queue.respond_to?(:stop)
self.class.databases.delete(self)
nil
end
private
# Read new records from journal log and return buffer
def new_records
stat = nil
loop do
@in.flock(File::LOCK_SH) unless @exclusive
stat = @in.stat
# Check if database was compactified in the meantime
# break if not
break if stat.nlink > 0
@table.clear
reopen_in
end
# Read new journal records
stat.size > @in.pos ? @in.read(stat.size - @in.pos) : ''
ensure
@in.flock(File::LOCK_UN) unless @exclusive
end
# Reopen input
def reopen_in
@logsize = 0
@in.close if @in
@in = File.open(@file, 'rb')
@format.read_header(@in)
end
# Reopen output
def reopen_out
@out.close if @out
@out = File.open(@file, 'ab')
if @out.stat.size == 0
@out.write(@format.header)
@out.flush
end
end
# Reopen output and input
def reopen
reopen_out
reopen_in
end
# Return database dump as string
def dump
dump = @format.header
@table.each do |record|
dump << @format.serialize(record)
end
dump
end
# Worker thread
def worker
loop do
record = @queue.next
write_record(record) if record
@queue.pop
break unless record
end
rescue Exception => ex
warn "Daybreak worker: #{ex.message}"
retry
end
# Write record to output stream and
# advance input stream
def write_record(record)
record = @format.serialize(record)
exclusive do
@out.write(record)
# Flush to make sure the file is really updated
@out.flush
size = @out.stat.size
end
@in.pos = size if size == @in.pos + record.size
@logsize += 1
end
# Lock database exclusively
def exclusive
return yield if @exclusive
begin
loop do
@out.flock(File::LOCK_EX)
# Check if database was compactified in the meantime
# break if not
break if @out.stat.nlink > 0
reopen_out
end
@exclusive = true
yield
ensure
@out.flock(File::LOCK_UN)
@exclusive = false
end
end
# Open temporary file and pass it to the block
def with_tmpfile
path = [@file, $$.to_s(36), Thread.current.object_id.to_s(36)].join
file = File.open(path, 'wb')
yield(path, file)
ensure
file.close unless file.closed?
File.unlink(path) if File.exists?(path)
end
end
end
|
module Daybreak
# Daybreak::DB contains the public api for Daybreak. It includes
# Enumerable for functional goodies like map, each, reduce and friends.
# @api public
class DB
include Enumerable
# Database file name
attr_reader :file
# Counter of how many records are in
attr_reader :logsize
# Set default value, can be a callable
attr_writer :default
@@databases = []
@@databases_mutex = Mutex.new
# A handler that will ensure that databases are closed and synced when the
# current process exits.
# @api private
def self.exit_handler
loop do
db = @@databases_mutex.synchronize { @@databases.first }
break unless db
warn "Daybreak database #{db.file} was not closed, state might be inconsistent"
begin
db.close
rescue Exception => ex
warn "Failed to close daybreak database: #{ex.message}"
end
end
end
at_exit { Daybreak::DB.exit_handler }
# Create a new Daybreak::DB. The second argument is the default value
# to store when accessing a previously unset key, this follows the
# Hash standard.
# @param [String] file the path to the db file
# @param [Hash] options a hash that contains the options for creating a new
# database. You can pass in :serializer, :format or :default.
# @yield [key] a block that will return the default value to store.
# @yieldparam [String] key the key to be stored.
def initialize(file, options = {}, &block)
@file = file
@serializer = (options[:serializer] || Serializer::Default).new
@format = (options[:format] || Format).new
@queue = Queue.new
@table = Hash.new(&method(:hash_default))
@default = block ? block : options[:default]
open
@mutex = Mutex.new # Mutex to make #lock thread safe
@worker = Thread.new(&method(:worker))
@worker.priority = -1
load
@@databases_mutex.synchronize { @@databases << self }
end
# Return default value belonging to key
# @param key the default value to retrieve.
def default(key = nil)
@table.default(key)
end
# Retrieve a value at key from the database. If the default value was specified
# when this database was created, that value will be set and returned. Aliased
# as <tt>get</tt>.
# @param key the value to retrieve from the database.
def [](key)
@table[@serializer.key_for(key)]
end
alias_method :get, :'[]'
# Set a key in the database to be written at some future date. If the data
# needs to be persisted immediately, call <tt>db.set(key, value, true)</tt>.
# @param [#to_s] key the key of the storage slot in the database
# @param value the value to store
def []=(key, value)
key = @serializer.key_for(key)
@queue << [key, value]
@table[key] = value
end
alias_method :set, :'[]='
# set! flushes data immediately to disk.
# @param key the key of the storage slot in the database
# @param value the value to store
def set!(key, value)
set(key, value)
flush
value
end
# Delete a key from the database
# @param key the key of the storage slot in the database
def delete(key)
key = @serializer.key_for(key)
@queue << [key]
@table.delete(key)
end
# Immediately delete the key on disk.
# @param key the key of the storage slot in the database
def delete!(key)
value = delete(key)
flush
value
end
# Update database with hash (Fast batch update)
def update(hash)
shash = {}
hash.each do |key, value|
shash[@serializer.key_for(key)] = value
end
@queue << shash
@table.update(shash)
self
end
# Updata database and flush data to disk.
def update!(hash)
update(hash)
flush
end
# Does this db have a value for this key?
# @param key the key to check if the DB has a key.
def has_key?(key)
@table.has_key?(@serializer.key_for(key))
end
alias_method :key?, :has_key?
alias_method :include?, :has_key?
alias_method :member?, :has_key?
def has_value?(value)
@table.has_value?(value)
end
alias_method :value?, :has_value?
# Return the number of stored items.
# @return [Integer]
def size
@table.size
end
alias_method :length, :size
# Utility method that will return the size of the database in bytes,
# useful for determining when to compact
def bytesize
@fd.stat.size unless closed?
end
# Return true if database is empty.
# @return [Boolean]
def empty?
@table.empty?
end
# Iterate over the key, value pairs in the database.
# @yield [key, value] blk the iterator for each key value pair.
# @yieldparam key the key.
# @yieldparam value the value from the database.
def each(&block)
@table.each(&block)
end
# Return the keys in the db.
# @return [Array]
def keys
@table.keys
end
# Flush all changes to disk.
def flush
@queue.flush
self
end
# Sync the database with what is on disk, by first flushing changes, and
# then reading the file if necessary.
def sync
flush
load
end
# Lock the database for an exclusive commit accross processes and threads
# @yield a block where every change to the database is synced
def lock
@mutex.synchronize do
# Flush everything to start with a clean state
# and to protect the @locked variable
flush
with_flock(File::LOCK_EX) do
load
result = yield
flush
result
end
end
end
# Remove all keys and values from the database.
def clear
flush
with_tmpfile do |path, file|
file.write(@format.header)
file.close
# Clear acts like a compactification
File.rename(path, @file)
end
@table.clear
open
self
end
# Compact the database to remove stale commits and reduce the file size.
def compact
sync
with_tmpfile do |path, file|
# Compactified database has the same size -> return
return self if @pos == file.write(dump)
with_flock(File::LOCK_EX) do
# Database was compactified in the meantime
if @pos != nil
# Append changed journal records if the database changed during compactification
file.write(read)
file.close
File.rename(path, @file)
end
end
end
open
load
end
# Close the database for reading and writing.
def close
@queue << nil
@worker.join
@fd.close
@queue.stop if @queue.respond_to?(:stop)
@@databases_mutex.synchronize { @@databases.delete(self) }
nil
end
# Check to see if we've already closed the database.
def closed?
@fd.closed?
end
private
# The block used in @table for new entries
def hash_default(_, key)
if @default != nil
value = @default.respond_to?(:call) ? @default.call(key) : @default
@queue << [key, value]
@table[key] = value
end
end
# Update the @table with records
def load
buf = read
until buf.empty?
record = @format.parse(buf)
if record.size == 1
@table.delete(record.first)
else
@table[record.first] = @serializer.load(record.last)
end
@logsize += 1
end
self
end
# Open or reopen file
def open
@fd.close if @fd
@fd = File.open(@file, 'ab+')
@fd.advise(:sequential) if @fd.respond_to? :advise
stat = @fd.stat
@inode = stat.ino
@logsize = 0
write(@format.header) if stat.size == 0
@pos = nil
end
# Read new file content
def read
with_flock(File::LOCK_SH) do
# File was opened
unless @pos
@fd.pos = 0
@format.read_header(@fd)
else
@fd.pos = @pos
end
buf = @fd.read
@pos = @fd.pos
buf
end
end
# Return database dump as string
def dump
dump = @format.header
# each is faster than inject
@table.each do |record|
record[1] = @serializer.dump(record.last)
dump << @format.dump(record)
end
dump
end
# Worker thread
def worker
loop do
case record = @queue.next
when Hash
write_batch(record)
when nil
@queue.pop
break
else
write_record(record)
end
@queue.pop
end
rescue Exception => ex
warn "Daybreak worker: #{ex.message}"
retry
end
# Write batch update
def write_batch(records)
dump = ''
records.each do |record|
record[1] = @serializer.dump(record.last)
dump << @format.dump(record)
end
write(dump)
@logsize += records.size
end
# Write single record
def write_record(record)
record[1] = @serializer.dump(record.last) if record.size > 1
write(@format.dump(record))
@logsize += 1
end
# Write data to output stream and advance @pos
def write(dump)
with_flock(File::LOCK_EX) do
@fd.write(dump)
# Flush to make sure the file is really updated
@fd.flush
end
@pos = @fd.pos if @pos && @fd.pos == @pos + dump.bytesize
end
# Block with file lock
def with_flock(mode)
return yield if @locked
begin
loop do
# HACK: JRuby returns false if the process is already hold by the same process
# see https://github.com/jruby/jruby/issues/496
Thread.pass until @fd.flock(mode)
# Check if database was compactified in the meantime
# break if not
stat = @fd.stat
break if stat.nlink > 0 && stat.ino == @inode
open
end
@locked = true
yield
ensure
@fd.flock(File::LOCK_UN)
@locked = false
end
end
# Open temporary file and pass it to the block
def with_tmpfile
path = [@file, $$.to_s(36), Thread.current.object_id.to_s(36)].join
file = File.open(path, 'wb')
yield(path, file)
ensure
file.close unless file.closed?
File.unlink(path) if File.exists?(path)
end
end
end
mark class variables as private
[ci skip]
module Daybreak
# Daybreak::DB contains the public api for Daybreak. It includes
# Enumerable for functional goodies like map, each, reduce and friends.
# @api public
class DB
include Enumerable
# Database file name
attr_reader :file
# Counter of how many records are in
attr_reader :logsize
# Set default value, can be a callable
attr_writer :default
# @api private
@@databases = []
# @api private
@@databases_mutex = Mutex.new
# A handler that will ensure that databases are closed and synced when the
# current process exits.
# @api private
def self.exit_handler
loop do
db = @@databases_mutex.synchronize { @@databases.first }
break unless db
warn "Daybreak database #{db.file} was not closed, state might be inconsistent"
begin
db.close
rescue Exception => ex
warn "Failed to close daybreak database: #{ex.message}"
end
end
end
at_exit { Daybreak::DB.exit_handler }
# Create a new Daybreak::DB. The second argument is the default value
# to store when accessing a previously unset key, this follows the
# Hash standard.
# @param [String] file the path to the db file
# @param [Hash] options a hash that contains the options for creating a new
# database. You can pass in :serializer, :format or :default.
# @yield [key] a block that will return the default value to store.
# @yieldparam [String] key the key to be stored.
def initialize(file, options = {}, &block)
@file = file
@serializer = (options[:serializer] || Serializer::Default).new
@format = (options[:format] || Format).new
@queue = Queue.new
@table = Hash.new(&method(:hash_default))
@default = block ? block : options[:default]
open
@mutex = Mutex.new # Mutex to make #lock thread safe
@worker = Thread.new(&method(:worker))
@worker.priority = -1
load
@@databases_mutex.synchronize { @@databases << self }
end
# Return default value belonging to key
# @param key the default value to retrieve.
def default(key = nil)
@table.default(key)
end
# Retrieve a value at key from the database. If the default value was specified
# when this database was created, that value will be set and returned. Aliased
# as <tt>get</tt>.
# @param key the value to retrieve from the database.
def [](key)
@table[@serializer.key_for(key)]
end
alias_method :get, :'[]'
# Set a key in the database to be written at some future date. If the data
# needs to be persisted immediately, call <tt>db.set(key, value, true)</tt>.
# @param [#to_s] key the key of the storage slot in the database
# @param value the value to store
def []=(key, value)
key = @serializer.key_for(key)
@queue << [key, value]
@table[key] = value
end
alias_method :set, :'[]='
# set! flushes data immediately to disk.
# @param key the key of the storage slot in the database
# @param value the value to store
def set!(key, value)
set(key, value)
flush
value
end
# Delete a key from the database
# @param key the key of the storage slot in the database
def delete(key)
key = @serializer.key_for(key)
@queue << [key]
@table.delete(key)
end
# Immediately delete the key on disk.
# @param key the key of the storage slot in the database
def delete!(key)
value = delete(key)
flush
value
end
# Update database with hash (Fast batch update)
def update(hash)
shash = {}
hash.each do |key, value|
shash[@serializer.key_for(key)] = value
end
@queue << shash
@table.update(shash)
self
end
# Updata database and flush data to disk.
def update!(hash)
update(hash)
flush
end
# Does this db have a value for this key?
# @param key the key to check if the DB has a key.
def has_key?(key)
@table.has_key?(@serializer.key_for(key))
end
alias_method :key?, :has_key?
alias_method :include?, :has_key?
alias_method :member?, :has_key?
def has_value?(value)
@table.has_value?(value)
end
alias_method :value?, :has_value?
# Return the number of stored items.
# @return [Integer]
def size
@table.size
end
alias_method :length, :size
# Utility method that will return the size of the database in bytes,
# useful for determining when to compact
def bytesize
@fd.stat.size unless closed?
end
# Return true if database is empty.
# @return [Boolean]
def empty?
@table.empty?
end
# Iterate over the key, value pairs in the database.
# @yield [key, value] blk the iterator for each key value pair.
# @yieldparam key the key.
# @yieldparam value the value from the database.
def each(&block)
@table.each(&block)
end
# Return the keys in the db.
# @return [Array]
def keys
@table.keys
end
# Flush all changes to disk.
def flush
@queue.flush
self
end
# Sync the database with what is on disk, by first flushing changes, and
# then reading the file if necessary.
def sync
flush
load
end
# Lock the database for an exclusive commit accross processes and threads
# @yield a block where every change to the database is synced
def lock
@mutex.synchronize do
# Flush everything to start with a clean state
# and to protect the @locked variable
flush
with_flock(File::LOCK_EX) do
load
result = yield
flush
result
end
end
end
# Remove all keys and values from the database.
def clear
flush
with_tmpfile do |path, file|
file.write(@format.header)
file.close
# Clear acts like a compactification
File.rename(path, @file)
end
@table.clear
open
self
end
# Compact the database to remove stale commits and reduce the file size.
def compact
sync
with_tmpfile do |path, file|
# Compactified database has the same size -> return
return self if @pos == file.write(dump)
with_flock(File::LOCK_EX) do
# Database was compactified in the meantime
if @pos != nil
# Append changed journal records if the database changed during compactification
file.write(read)
file.close
File.rename(path, @file)
end
end
end
open
load
end
# Close the database for reading and writing.
def close
@queue << nil
@worker.join
@fd.close
@queue.stop if @queue.respond_to?(:stop)
@@databases_mutex.synchronize { @@databases.delete(self) }
nil
end
# Check to see if we've already closed the database.
def closed?
@fd.closed?
end
private
# The block used in @table for new entries
def hash_default(_, key)
if @default != nil
value = @default.respond_to?(:call) ? @default.call(key) : @default
@queue << [key, value]
@table[key] = value
end
end
# Update the @table with records
def load
buf = read
until buf.empty?
record = @format.parse(buf)
if record.size == 1
@table.delete(record.first)
else
@table[record.first] = @serializer.load(record.last)
end
@logsize += 1
end
self
end
# Open or reopen file
def open
@fd.close if @fd
@fd = File.open(@file, 'ab+')
@fd.advise(:sequential) if @fd.respond_to? :advise
stat = @fd.stat
@inode = stat.ino
@logsize = 0
write(@format.header) if stat.size == 0
@pos = nil
end
# Read new file content
def read
with_flock(File::LOCK_SH) do
# File was opened
unless @pos
@fd.pos = 0
@format.read_header(@fd)
else
@fd.pos = @pos
end
buf = @fd.read
@pos = @fd.pos
buf
end
end
# Return database dump as string
def dump
dump = @format.header
# each is faster than inject
@table.each do |record|
record[1] = @serializer.dump(record.last)
dump << @format.dump(record)
end
dump
end
# Worker thread
def worker
loop do
case record = @queue.next
when Hash
write_batch(record)
when nil
@queue.pop
break
else
write_record(record)
end
@queue.pop
end
rescue Exception => ex
warn "Daybreak worker: #{ex.message}"
retry
end
# Write batch update
def write_batch(records)
dump = ''
records.each do |record|
record[1] = @serializer.dump(record.last)
dump << @format.dump(record)
end
write(dump)
@logsize += records.size
end
# Write single record
def write_record(record)
record[1] = @serializer.dump(record.last) if record.size > 1
write(@format.dump(record))
@logsize += 1
end
# Write data to output stream and advance @pos
def write(dump)
with_flock(File::LOCK_EX) do
@fd.write(dump)
# Flush to make sure the file is really updated
@fd.flush
end
@pos = @fd.pos if @pos && @fd.pos == @pos + dump.bytesize
end
# Block with file lock
def with_flock(mode)
return yield if @locked
begin
loop do
# HACK: JRuby returns false if the process is already hold by the same process
# see https://github.com/jruby/jruby/issues/496
Thread.pass until @fd.flock(mode)
# Check if database was compactified in the meantime
# break if not
stat = @fd.stat
break if stat.nlink > 0 && stat.ino == @inode
open
end
@locked = true
yield
ensure
@fd.flock(File::LOCK_UN)
@locked = false
end
end
# Open temporary file and pass it to the block
def with_tmpfile
path = [@file, $$.to_s(36), Thread.current.object_id.to_s(36)].join
file = File.open(path, 'wb')
yield(path, file)
ensure
file.close unless file.closed?
File.unlink(path) if File.exists?(path)
end
end
end
|
module EvmTestHelper
VMDB_SPECS = FileList["spec/**/*_spec.rb"].exclude(/^spec\/(replication|gems|migrations|automation)/)
METRICS_SPECS = VMDB_SPECS + ['spec/coverage_helper.rb']
REPLICATION_SPECS = FileList['spec/replication/**/*_spec.rb']
MIGRATION_SPECS = FileList['spec/migrations/**/*_spec.rb'].sort
AUTOMATION_SPECS = FileList['spec/automation/**/*_spec.rb']
def self.init_rspec_task(t, rspec_opts = [])
rspec_opts_file = ".rspec#{"_ci" if ENV['CI']}"
t.rspec_opts = ['--options', "\"#{Rails.root.join(rspec_opts_file)}\""] + rspec_opts
t.verbose = false
end
def self.run_rake_via_shell(rake_command, env = {})
cmd = "bundle exec rake #{rake_command}"
cmd << " --trace" if Rake.application.options.trace
_pid, status = Process.wait2(Kernel.spawn(env, cmd, :chdir => Rails.root))
exit(status.exitstatus) if status.exitstatus != 0
end
def self.cc_start_top
return if $cc_top_parent_process_id
if ENV['CC_BUILD_ARTIFACTS'] && File.exist?(ENV['CC_BUILD_ARTIFACTS'])
dest = File.join(ENV['CC_BUILD_ARTIFACTS'], 'top_output.log')
max_run_time = 2.hours
top_interval = 30.seconds
top_iterations = max_run_time / top_interval
# top
# -b batch mode
# -d delay time between top runs(in seconds)
# -n number of iterations
$cc_top_parent_process_id = Process.pid
system("top -b -d #{top_interval} -n #{top_iterations} >> #{dest} &")
at_exit { system('killall top') if $cc_top_parent_process_id == Process.pid }
end
end
end
Allow rspec options from `.rspec` to be overridden
Defaults are defined in `.rspec`
- If you specify `--options .rspec`, then only `.rspec` is used.
- If you don’t specify it, `.rspec` will still be used, but
files like `.rspec-local` will override.
Now, it passes in `--options .rspec_ci` for ci systems,
but does not pass it in for developers, since it is not needed.
module EvmTestHelper
VMDB_SPECS = FileList["spec/**/*_spec.rb"].exclude(/^spec\/(replication|gems|migrations|automation)/)
METRICS_SPECS = VMDB_SPECS + ['spec/coverage_helper.rb']
REPLICATION_SPECS = FileList['spec/replication/**/*_spec.rb']
MIGRATION_SPECS = FileList['spec/migrations/**/*_spec.rb'].sort
AUTOMATION_SPECS = FileList['spec/automation/**/*_spec.rb']
def self.init_rspec_task(t, rspec_opts = [])
rspec_opts = ['--options', "\"#{Rails.root.join(".rspec_ci")}\""] + rspec_opts if ENV['CI']
t.rspec_opts = rspec_opts
t.verbose = false
end
def self.run_rake_via_shell(rake_command, env = {})
cmd = "bundle exec rake #{rake_command}"
cmd << " --trace" if Rake.application.options.trace
_pid, status = Process.wait2(Kernel.spawn(env, cmd, :chdir => Rails.root))
exit(status.exitstatus) if status.exitstatus != 0
end
def self.cc_start_top
return if $cc_top_parent_process_id
if ENV['CC_BUILD_ARTIFACTS'] && File.exist?(ENV['CC_BUILD_ARTIFACTS'])
dest = File.join(ENV['CC_BUILD_ARTIFACTS'], 'top_output.log')
max_run_time = 2.hours
top_interval = 30.seconds
top_iterations = max_run_time / top_interval
# top
# -b batch mode
# -d delay time between top runs(in seconds)
# -n number of iterations
$cc_top_parent_process_id = Process.pid
system("top -b -d #{top_interval} -n #{top_iterations} >> #{dest} &")
at_exit { system('killall top') if $cc_top_parent_process_id == Process.pid }
end
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'mspire/mass/version'
Gem::Specification.new do |spec|
spec.name = "mspire-mass"
spec.version = Mspire::Mass::VERSION
spec.authors = ["John T. Prince"]
spec.email = ["jtprince@gmail.com"]
spec.summary = %q{mspire library for mass calculations.}
spec.description = %q{mspire library for mass calculations. Mainly holds constants for simple lookup.}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
[
["mspire-isotope", "~> 0.1.0"],
].each do |args|
spec.add_dependency(*args)
end
[
["bundler", "~> 1.6.2"],
["rake"],
["rspec", "~> 2.14.1"],
["rdoc", "~> 4.1.1"],
["simplecov", "~> 0.8.2"],
].each do |args|
spec.add_development_dependency(*args)
end
end
progress
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'mspire/mass/version'
Gem::Specification.new do |spec|
spec.name = "mspire-mass"
spec.version = Mspire::Mass::VERSION
spec.authors = ["John T. Prince"]
spec.email = ["jtprince@gmail.com"]
spec.summary = %q{mspire library for mass calculations.}
spec.description = %q{mspire library for mass calculations. Mainly holds constants for simple lookup.}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
[
# should probably be its only dependency
["mspire-isotope", "~> 0.1.0"],
].each do |args|
spec.add_dependency(*args)
end
[
["bundler", "~> 1.6.2"],
["rake"],
["rspec", "~> 2.14.1"],
["rdoc", "~> 4.1.1"],
["simplecov", "~> 0.8.2"],
].each do |args|
spec.add_development_dependency(*args)
end
end
|
Pod::Spec.new do |s|
s.name = "mage-ios-sdk"
s.version = "0.0.2"
s.summary = "iOS SDK for MAGE"
s.description = <<-DESC
iOS SDK for MAGE, assist with:
* MAGE authentication.
* MAGE observations CRUD operations
* MAGE location services
DESC
s.homepage = "https://www.nga.mil"
s.license = 'DOD'
s.author = { "NGA" => "newmanw@bit-sys.com" }
s.source = { :git => "https://git.***REMOVED***/mage/mage-ios-sdk.git", :tag => s.version.to_s }
s.platform = :ios, '7.0'
s.ios.deployment_target = '7.0'
s.requires_arc = true
s.source_files = 'MAGE/**/*.{h,m}'
#s.ios.exclude_files = 'Classes/osx'
#s.osx.exclude_files = 'Classes/ios'
# s.public_header_files = 'Classes/**/*.h'
s.frameworks = 'Foundation'
s.dependency 'AFNetworking', '~> 2.1.0'
end
preferences in a bundle
Pod::Spec.new do |s|
s.name = "mage-ios-sdk"
s.version = "0.0.2"
s.summary = "iOS SDK for MAGE"
s.description = <<-DESC
iOS SDK for MAGE, assist with:
* MAGE authentication.
* MAGE observations CRUD operations
* MAGE location services
DESC
s.homepage = "https://www.nga.mil"
s.license = 'DOD'
s.author = { "NGA" => "newmanw@bit-sys.com" }
s.source = { :git => "https://git.***REMOVED***/mage/mage-ios-sdk.git", :tag => s.version.to_s }
s.platform = :ios, '7.0'
s.ios.deployment_target = '7.0'
s.requires_arc = true
s.source_files = 'MAGE/**/*.{h,m}'
#s.ios.exclude_files = 'Classes/osx'
#s.osx.exclude_files = 'Classes/ios'
# s.public_header_files = 'Classes/**/*.h'
s.resource_bundle = { 'MageSDK' => ['MAGE/**/*.plist'] }
s.frameworks = 'Foundation'
s.dependency 'AFNetworking', '~> 2.1.0'
end
|
Testing if git clone was successful
This is a test!
|
#!/usr/bin/env ruby
#
# Dummy test
#
# Copyright (c) 2013 Michael Weibel <michael.weibel@gmail.com>
#
# License: MIT
#
# Requirements to use:
# Don't overload the HSR servers with the cronjob!
#
require 'rubygems'
require 'bundler'
require 'nokogiri'
require './didipasshsr'
Bundler.require(:default, (ENV['RACK_ENV'] ||= :test.to_s).to_sym)
TEST_GRADES = {
'Test 1 for DidIPassHSR' => '***',
'Test 2 for DidIPassHSR' => '5.5',
'Test 3 for DidIPassHSR' => '4.5',
'Test 4 for DidIPassHSR' => '3.0'
}
TEST_SEMESTER = 'TestSemester'
runner = DidIPassHSR::Runner.new(ENV)
# clear cache
runner.cache.flush
#
# You think this is ugly or not a valid test? Contribute.
#
def test_parse(runner)
puts "Test Parse...."
html = File.read(File.join(Dir.pwd, 'test', 'Semesterreport.html'))
page = Nokogiri::HTML::Document.parse(html)
semester, new_grades = runner.parse(page)
if semester == TEST_SEMESTER and TEST_GRADES == new_grades
puts 'SUCCESS'
else
puts 'FAIL'
end
puts "\n"
end
def test_notify(runner, expected)
puts "Test Notify..."
num_grades = runner.notify(TEST_SEMESTER, TEST_GRADES)
if num_grades == expected
puts 'SUCCESS'
else
puts 'FAIL'
end
if expected > 0
puts "\n"
puts 'You may want to check the recipient now.'
end
puts "\n"
return num_grades
end
def test_cache(runner)
puts "Test cache (calling Test notify with expected = 0)..."
test_notify(runner, 0)
end
# run it
test_parse(runner)
test_notify(runner, 3)
test_cache(runner)
Fix test (maybe?)
#!/usr/bin/env ruby
#
# Dummy test
#
# Copyright (c) 2013 Michael Weibel <michael.weibel@gmail.com>
#
# License: MIT
#
# Requirements to use:
# Don't overload the HSR servers with the cronjob!
#
require 'nokogiri'
require './didipasshsr'
TEST_GRADES = {
'Test 1 for DidIPassHSR' => '***',
'Test 2 for DidIPassHSR' => '5.5',
'Test 3 for DidIPassHSR' => '4.5',
'Test 4 for DidIPassHSR' => '3.0'
}
TEST_SEMESTER = 'TestSemester'
runner = DidIPassHSR::Runner.new(ENV)
# clear cache
runner.cache.flush
#
# You think this is ugly or not a valid test? Contribute.
#
def test_parse(runner)
puts "Test Parse...."
html = File.read(File.join(Dir.pwd, 'test', 'Semesterreport.html'))
page = Nokogiri::HTML::Document.parse(html)
semester, new_grades = runner.parse(page)
if semester == TEST_SEMESTER and TEST_GRADES == new_grades
puts 'SUCCESS'
else
puts 'FAIL'
end
puts "\n"
end
def test_notify(runner, expected)
puts "Test Notify..."
num_grades = runner.notify(TEST_SEMESTER, TEST_GRADES)
if num_grades == expected
puts 'SUCCESS'
else
puts 'FAIL'
end
if expected > 0
puts "\n"
puts 'You may want to check the recipient now.'
end
puts "\n"
return num_grades
end
def test_cache(runner)
puts "Test cache (calling Test notify with expected = 0)..."
test_notify(runner, 0)
end
# run it
test_parse(runner)
test_notify(runner, 3)
test_cache(runner)
|
first commit
puts "hello world!" |
module Watir
class InputElement < Element
def locate
@o = @container.locate_input_element(@how, @what, self.class::INPUT_TYPES)
end
def initialize(container, how, what)
set_container container
@how = how
@what = what
super(nil)
end
end
#
# Input: Select
#
# This class is the way in which select boxes are manipulated.
# Normally a user would not need to create this object as it is returned by the Watir::Container#select_list method
class SelectList < InputElement
INPUT_TYPES = ["select-one", "select-multiple"]
attr_accessor :o
# This method clears the selected items in the select box
def clear
assert_exists
highlight(:set)
wait = false
@o.each do |selectBoxItem|
if selectBoxItem.selected
selectBoxItem.selected = false
wait = true
end
end
@container.wait if wait
highlight(:clear)
end
# private :clearSelection
# This method selects an item, or items in a select box, by text.
# Raises NoValueFoundException if the specified value is not found.
# * item - the thing to select, string or reg exp
def select(item)
select_item_in_select_list(:text, item)
end
alias :set :select
# Selects an item, or items in a select box, by value.
# Raises NoValueFoundException if the specified value is not found.
# * item - the value of the thing to select, string, reg exp or an array of string and reg exps
def select_value(item)
select_item_in_select_list(:value, item)
end
# BUG: Should be private
# Selects something from the select box
# * name - symbol :value or :text - how we find an item in the select box
# * item - string or reg exp - what we are looking for
def select_item_in_select_list(attribute, value)
assert_exists
highlight(:set)
doBreak = false
@container.log "Setting box #{@o.name} to #{attribute} #{value} "
@o.each do |option| # items in the list
if value.matches(option.invoke(attribute.to_s))
if option.selected
doBreak = true
break
else
option.selected = true
@o.fireEvent("onChange")
@container.wait
doBreak = true
break
end
end
end
unless doBreak
raise NoValueFoundException,
"No option with #{attribute.to_s} of #{value} in this select element"
end
highlight(:clear)
end
# Returns all the items in the select list as an array.
# An empty array is returned if the select box has no contents.
# Raises UnknownObjectException if the select box is not found
def options
assert_exists
@container.log "There are #{@o.length} items"
returnArray = []
@o.each { |thisItem| returnArray << thisItem.text }
return returnArray
end
# Returns the selected items as an array.
# Raises UnknownObjectException if the select box is not found.
def selected_options
assert_exists
returnArray = []
@container.log "There are #{@o.length} items"
@o.each do |thisItem|
if thisItem.selected
@container.log "Item (#{thisItem.text}) is selected"
returnArray << thisItem.text
end
end
return returnArray
end
# Does the SelectList include the specified option (text)?
def include? text_or_regexp
getAllContents.grep(text_or_regexp).size > 0
end
# Is the specified option (text) selected? Raises exception of option does not exist.
def selected? text_or_regexp
unless includes? text_or_regexp
raise UnknownObjectException, "Option #{text_or_regexp.inspect} not found."
end
getSelectedItems.grep(text_or_regexp).size > 0
end
def option(attribute, value)
assert_exists
Option.new(self, attribute, value)
end
end
module OptionAccess
def text
@option.text
end
def value
@option.value
end
def selected
@option.selected
end
end
class OptionWrapper
include OptionAccess
def initialize(option)
@option = option
end
end
# An item in a select list
class Option
include OptionAccess
include Watir::Exception
def initialize(select_list, attribute, value)
@select_list = select_list
@how = attribute
@what = value
@option = nil
unless [:text, :value, :label].include? attribute
raise MissingWayOfFindingObjectException,
"Option does not support attribute #{@how}"
end
@select_list.o.each do |option| # items in the list
if value.matches(option.invoke(attribute.to_s))
@option = option
break
end
end
end
def assert_exists
unless @option
raise UnknownObjectException,
"Unable to locate an option using #{@how} and #{@what}"
end
end
private :assert_exists
def select
assert_exists
@select_list.select_item_in_select_list(@how, @what)
end
end
#
# Input: Button
#
# Returned by the Watir::Container#button method
class Button < InputElement
INPUT_TYPES = ["button", "submit", "image", "reset"]
end
#
# Input: Text
#
# This class is the main class for Text Fields
# Normally a user would not need to create this object as it is returned by the Watir::Container#text_field method
class TextField < InputElement
INPUT_TYPES = ["text", "password", "textarea"]
def_wrap_guard :size
def maxlength
assert_exists
begin
ole_object.invoke('maxlength').to_i
rescue WIN32OLERuntimeError
0
end
end
# Returns true or false if the text field is read only.
# Raises UnknownObjectException if the object can't be found.
def_wrap :readonly?, :readOnly
def text_string_creator
n = []
n << "length:".ljust(TO_S_SIZE) + self.size.to_s
n << "max length:".ljust(TO_S_SIZE) + self.maxlength.to_s
n << "read only:".ljust(TO_S_SIZE) + self.readonly?.to_s
n
end
private :text_string_creator
def to_s
assert_exists
r = string_creator
r += text_string_creator
r.join("\n")
end
def assert_not_readonly
if self.readonly?
raise ObjectReadOnlyException,
"Textfield #{@how} and #{@what} is read only."
end
end
# Returns true if the text field contents is matches the specified target,
# which can be either a string or a regular expression.
# Raises UnknownObjectException if the object can't be found
def verify_contains(target) # FIXME: verify_contains should have same name and semantics as IE#contains_text (prolly make this work for all elements)
assert_exists
if target.kind_of? String
return true if self.value == target
elsif target.kind_of? Regexp
return true if self.value.match(target) != nil
end
return false
end
# Drag the entire contents of the text field to another text field
# 19 Jan 2005 - It is added as prototype functionality, and may change
# * destination_how - symbol, :id, :name how we identify the drop target
# * destination_what - string or regular expression, the name, id, etc of the text field that will be the drop target
def drag_contents_to(destination_how, destination_what)
assert_exists
destination = @container.text_field(destination_how, destination_what)
unless destination.exists?
raise UnknownObjectException, "Unable to locate destination using #{destination_how } and #{destination_what } "
end
@o.focus
@o.select
value = self.value
@o.fireEvent("onSelect")
@o.fireEvent("ondragstart")
@o.fireEvent("ondrag")
destination.fireEvent("onDragEnter")
destination.fireEvent("onDragOver")
destination.fireEvent("ondrop")
@o.fireEvent("ondragend")
destination.value = destination.value + value.to_s
self.value = ""
end
# Clears the contents of the text box.
# Raises UnknownObjectException if the object can't be found
# Raises ObjectDisabledException if the object is disabled
# Raises ObjectReadOnlyException if the object is read only
def clear
assert_enabled
assert_not_readonly
highlight(:set)
@o.scrollIntoView
@o.focus
@o.select
@o.fireEvent("onSelect")
@o.value = ""
@o.fireEvent("onKeyPress")
@o.fireEvent("onChange")
@container.wait
highlight(:clear)
end
# Appends the specified string value to the contents of the text box.
# Raises UnknownObjectException if the object cant be found
# Raises ObjectDisabledException if the object is disabled
# Raises ObjectReadOnlyException if the object is read only
def append(value)
assert_enabled
assert_not_readonly
highlight(:set)
@o.scrollIntoView
@o.focus
type_by_character(value)
highlight(:clear)
end
# Sets the contents of the text box to the specified text value
# Raises UnknownObjectException if the object cant be found
# Raises ObjectDisabledException if the object is disabled
# Raises ObjectReadOnlyException if the object is read only
def set(value)
assert_enabled
assert_not_readonly
highlight(:set)
@o.scrollIntoView
if type_keys
@o.focus
@o.select
@o.fireEvent("onSelect")
@o.fireEvent("onKeyPress")
@o.value = ""
type_by_character(value)
@o.fireEvent("onChange")
@o.fireEvent("onBlur")
else
@o.value = limit_to_maxlength(value)
end
highlight(:clear)
end
# Sets the value of the text field directly.
# It causes no events to be fired or exceptions to be raised,
# so generally shouldn't be used.
# It is preffered to use the set method.
def value=(v)
assert_exists
@o.value = v.to_s
end
def requires_typing
@type_keys = true
self
end
def abhors_typing
@type_keys = false
self
end
private
# Type the characters in the specified string (value) one by one.
# It should not be used externally.
# * value - string - The string to enter into the text field
def type_by_character(value)
value = limit_to_maxlength(value)
characters_in(value) do |c|
sleep @container.typingspeed
@o.value = @o.value.to_s + c
@o.fireEvent("onKeyDown")
@o.fireEvent("onKeyPress")
@o.fireEvent("onKeyUp")
end
end
# Supports double-byte characters
def characters_in(value)
index = 0
while index < value.length
len = value[index] > 128 ? 2 : 1
yield value[index, len]
index += len
end
end
# Return the value (a string), limited to the maxlength of the element.
def limit_to_maxlength(value)
return value if @o.invoke('type') =~ /textarea/i # text areas don't have maxlength
if value.length > maxlength
value = value[0 .. maxlength - 1]
@container.log " Supplied string is #{value.length} chars, which exceeds the max length (#{maxlength}) of the field. Using value: #{value}"
end
value
end
end
# this class can be used to access hidden field objects
# Normally a user would not need to create this object as it is returned by the Watir::Container#hidden method
class Hidden < TextField
INPUT_TYPES = ["hidden"]
# set is overriden in this class, as there is no way to set focus to a hidden field
def set(n)
self.value = n
end
# override the append method, so that focus isnt set to the hidden object
def append(n)
self.value = self.value.to_s + n.to_s
end
# override the clear method, so that focus isnt set to the hidden object
def clear
self.value = ""
end
# this method will do nothing, as you cant set focus to a hidden field
def focus
end
# Hidden element is never visible - returns false.
def visible?
assert_exists
false
end
end
# For fields that accept file uploads
# Windows dialog is opened and handled in this case by autoit
# launching into a new process.
class FileField < InputElement
INPUT_TYPES = ["file"]
# set the file location in the Choose file dialog in a new process
# will raise a Watir Exception if AutoIt is not correctly installed
def set(path_to_file)
assert_exists
require 'watir/windowhelper'
WindowHelper.check_autoit_installed
begin
thrd = Thread.new do
popup_title = 'Choose file'
file_field_set =
"rubyw -e
\"require 'win32ole'
@autoit=WIN32OLE.new('AutoItX3.Control')
waitresult=@autoit.WinWait('#{popup_title}', '', 15)
sleep 1
if waitresult == 1
@autoit.ControlSetText('#{popup_title}', '', 'Edit1', '#{path_to_file}')
@autoit.ControlSend('#{popup_title}', '', 'Button2', '{ENTER}')
end\""
system file_field_set
end
thrd.join(1)
rescue
raise Watir::Exception::WatirException, "Problem accessing Choose file dialog"
end
click
end
end
# This class is the class for radio buttons and check boxes.
# It contains methods common to both.
# Normally a user would not need to create this object as it is returned by the Watir::Container#checkbox or Watir::Container#radio methods
#
# most of the methods available to this element are inherited from the Element class
#
class RadioCheckCommon < InputElement
def locate
@o = @container.locate_input_element(@how, @what, self.class::INPUT_TYPES, @value)
end
def initialize(container, how, what, value=nil)
super container, how, what
@value = value
end
# This method determines if a radio button or check box is set.
# Returns true is set/checked or false if not set/checked.
# Raises UnknownObjectException if its unable to locate an object.
def set? # could be just "checked?"
assert_exists
return @o.checked
end
alias checked? set?
# This method is the common code for setting or clearing checkboxes and radio.
def set_clear_item(set)
@o.checked = set
@o.fireEvent("onClick")
@container.wait
end
private :set_clear_item
end
#--
# this class makes the docs better
#++
# This class is the watir representation of a radio button.
class Radio < RadioCheckCommon
INPUT_TYPES = ["radio"]
# This method clears a radio button. One of them will almost always be set.
# Returns true if set or false if not set.
# Raises UnknownObjectException if its unable to locate an object
# ObjectDisabledException IF THE OBJECT IS DISABLED
def clear
assert_enabled
highlight(:set)
set_clear_item(false)
highlight(:clear)
end
# This method sets the radio list item.
# Raises UnknownObjectException if it's unable to locate an object
# ObjectDisabledException if the object is disabled
def set
assert_enabled
highlight(:set)
@o.scrollIntoView
set_clear_item(true)
highlight(:clear)
end
end
# This class is the watir representation of a check box.
class CheckBox < RadioCheckCommon
INPUT_TYPES = ["checkbox"]
# With no arguments supplied, sets the check box.
# If the optional value is supplied, the checkbox is set, when its true and
# cleared when its false
# Raises UnknownObjectException if it's unable to locate an object
# ObjectDisabledException if the object is disabled
def set(value=true)
assert_enabled
highlight :set
unless @o.checked == value
set_clear_item value
end
highlight :clear
end
# Clears a check box.
# Raises UnknownObjectException if its unable to locate an object
# ObjectDisabledException if the object is disabled
def clear
set false
end
end
end
refactoring: whitespace
module Watir
class InputElement < Element
def locate
@o = @container.locate_input_element(@how, @what, self.class::INPUT_TYPES)
end
def initialize(container, how, what)
set_container container
@how = how
@what = what
super(nil)
end
end
#
# Input: Select
#
# This class is the way in which select boxes are manipulated.
# Normally a user would not need to create this object as it is returned by the Watir::Container#select_list method
class SelectList < InputElement
INPUT_TYPES = ["select-one", "select-multiple"]
attr_accessor :o
# This method clears the selected items in the select box
def clear
assert_exists
highlight(:set)
wait = false
@o.each do |selectBoxItem|
if selectBoxItem.selected
selectBoxItem.selected = false
wait = true
end
end
@container.wait if wait
highlight(:clear)
end
# private :clearSelection
# This method selects an item, or items in a select box, by text.
# Raises NoValueFoundException if the specified value is not found.
# * item - the thing to select, string or reg exp
def select(item)
select_item_in_select_list(:text, item)
end
alias :set :select
# Selects an item, or items in a select box, by value.
# Raises NoValueFoundException if the specified value is not found.
# * item - the value of the thing to select, string, reg exp or an array of string and reg exps
def select_value(item)
select_item_in_select_list(:value, item)
end
# BUG: Should be private
# Selects something from the select box
# * name - symbol :value or :text - how we find an item in the select box
# * item - string or reg exp - what we are looking for
def select_item_in_select_list(attribute, value)
assert_exists
highlight(:set)
doBreak = false
@container.log "Setting box #{@o.name} to #{attribute} #{value} "
@o.each do |option| # items in the list
if value.matches(option.invoke(attribute.to_s))
if option.selected
doBreak = true
break
else
option.selected = true
@o.fireEvent("onChange")
@container.wait
doBreak = true
break
end
end
end
unless doBreak
raise NoValueFoundException,
"No option with #{attribute.to_s} of #{value} in this select element"
end
highlight(:clear)
end
# Returns all the items in the select list as an array.
# An empty array is returned if the select box has no contents.
# Raises UnknownObjectException if the select box is not found
def options
assert_exists
@container.log "There are #{@o.length} items"
returnArray = []
@o.each { |thisItem| returnArray << thisItem.text }
return returnArray
end
# Returns the selected items as an array.
# Raises UnknownObjectException if the select box is not found.
def selected_options
assert_exists
returnArray = []
@container.log "There are #{@o.length} items"
@o.each do |thisItem|
if thisItem.selected
@container.log "Item (#{thisItem.text}) is selected"
returnArray << thisItem.text
end
end
return returnArray
end
# Does the SelectList include the specified option (text)?
def include? text_or_regexp
getAllContents.grep(text_or_regexp).size > 0
end
# Is the specified option (text) selected? Raises exception of option does not exist.
def selected? text_or_regexp
unless includes? text_or_regexp
raise UnknownObjectException, "Option #{text_or_regexp.inspect} not found."
end
getSelectedItems.grep(text_or_regexp).size > 0
end
def option(attribute, value)
assert_exists
Option.new(self, attribute, value)
end
end
module OptionAccess
def text
@option.text
end
def value
@option.value
end
def selected
@option.selected
end
end
class OptionWrapper
include OptionAccess
def initialize(option)
@option = option
end
end
# An item in a select list
class Option
include OptionAccess
include Watir::Exception
def initialize(select_list, attribute, value)
@select_list = select_list
@how = attribute
@what = value
@option = nil
unless [:text, :value, :label].include? attribute
raise MissingWayOfFindingObjectException,
"Option does not support attribute #{@how}"
end
@select_list.o.each do |option| # items in the list
if value.matches(option.invoke(attribute.to_s))
@option = option
break
end
end
end
def assert_exists
unless @option
raise UnknownObjectException,
"Unable to locate an option using #{@how} and #{@what}"
end
end
private :assert_exists
def select
assert_exists
@select_list.select_item_in_select_list(@how, @what)
end
end
#
# Input: Button
#
# Returned by the Watir::Container#button method
class Button < InputElement
INPUT_TYPES = ["button", "submit", "image", "reset"]
end
#
# Input: Text
#
# This class is the main class for Text Fields
# Normally a user would not need to create this object as it is returned by the Watir::Container#text_field method
class TextField < InputElement
INPUT_TYPES = ["text", "password", "textarea"]
def_wrap_guard :size
def maxlength
assert_exists
begin
ole_object.invoke('maxlength').to_i
rescue WIN32OLERuntimeError
0
end
end
# Returns true or false if the text field is read only.
# Raises UnknownObjectException if the object can't be found.
def_wrap :readonly?, :readOnly
def text_string_creator
n = []
n << "length:".ljust(TO_S_SIZE) + self.size.to_s
n << "max length:".ljust(TO_S_SIZE) + self.maxlength.to_s
n << "read only:".ljust(TO_S_SIZE) + self.readonly?.to_s
n
end
private :text_string_creator
def to_s
assert_exists
r = string_creator
r += text_string_creator
r.join("\n")
end
def assert_not_readonly
if self.readonly?
raise ObjectReadOnlyException,
"Textfield #{@how} and #{@what} is read only."
end
end
# Returns true if the text field contents is matches the specified target,
# which can be either a string or a regular expression.
# Raises UnknownObjectException if the object can't be found
def verify_contains(target) # FIXME: verify_contains should have same name and semantics as IE#contains_text (prolly make this work for all elements)
assert_exists
if target.kind_of? String
return true if self.value == target
elsif target.kind_of? Regexp
return true if self.value.match(target) != nil
end
return false
end
# Drag the entire contents of the text field to another text field
# 19 Jan 2005 - It is added as prototype functionality, and may change
# * destination_how - symbol, :id, :name how we identify the drop target
# * destination_what - string or regular expression, the name, id, etc of the text field that will be the drop target
def drag_contents_to(destination_how, destination_what)
assert_exists
destination = @container.text_field(destination_how, destination_what)
unless destination.exists?
raise UnknownObjectException, "Unable to locate destination using #{destination_how } and #{destination_what } "
end
@o.focus
@o.select
value = self.value
@o.fireEvent("onSelect")
@o.fireEvent("ondragstart")
@o.fireEvent("ondrag")
destination.fireEvent("onDragEnter")
destination.fireEvent("onDragOver")
destination.fireEvent("ondrop")
@o.fireEvent("ondragend")
destination.value = destination.value + value.to_s
self.value = ""
end
# Clears the contents of the text box.
# Raises UnknownObjectException if the object can't be found
# Raises ObjectDisabledException if the object is disabled
# Raises ObjectReadOnlyException if the object is read only
def clear
assert_enabled
assert_not_readonly
highlight(:set)
@o.scrollIntoView
@o.focus
@o.select
@o.fireEvent("onSelect")
@o.value = ""
@o.fireEvent("onKeyPress")
@o.fireEvent("onChange")
@container.wait
highlight(:clear)
end
# Appends the specified string value to the contents of the text box.
# Raises UnknownObjectException if the object cant be found
# Raises ObjectDisabledException if the object is disabled
# Raises ObjectReadOnlyException if the object is read only
def append(value)
assert_enabled
assert_not_readonly
highlight(:set)
@o.scrollIntoView
@o.focus
type_by_character(value)
highlight(:clear)
end
# Sets the contents of the text box to the specified text value
# Raises UnknownObjectException if the object cant be found
# Raises ObjectDisabledException if the object is disabled
# Raises ObjectReadOnlyException if the object is read only
def set(value)
assert_enabled
assert_not_readonly
highlight(:set)
@o.scrollIntoView
if type_keys
@o.focus
@o.select
@o.fireEvent("onSelect")
@o.fireEvent("onKeyPress")
@o.value = ""
type_by_character(value)
@o.fireEvent("onChange")
@o.fireEvent("onBlur")
else
@o.value = limit_to_maxlength(value)
end
highlight(:clear)
end
# Sets the value of the text field directly.
# It causes no events to be fired or exceptions to be raised,
# so generally shouldn't be used.
# It is preffered to use the set method.
def value=(v)
assert_exists
@o.value = v.to_s
end
def requires_typing
@type_keys = true
self
end
def abhors_typing
@type_keys = false
self
end
private
# Type the characters in the specified string (value) one by one.
# It should not be used externally.
# * value - string - The string to enter into the text field
def type_by_character(value)
value = limit_to_maxlength(value)
characters_in(value) do |c|
sleep @container.typingspeed
@o.value = @o.value.to_s + c
@o.fireEvent("onKeyDown")
@o.fireEvent("onKeyPress")
@o.fireEvent("onKeyUp")
end
end
# Supports double-byte characters
def characters_in(value)
index = 0
while index < value.length
len = value[index] > 128 ? 2 : 1
yield value[index, len]
index += len
end
end
# Return the value (a string), limited to the maxlength of the element.
def limit_to_maxlength(value)
return value if @o.invoke('type') =~ /textarea/i # text areas don't have maxlength
if value.length > maxlength
value = value[0 .. maxlength - 1]
@container.log " Supplied string is #{value.length} chars, which exceeds the max length (#{maxlength}) of the field. Using value: #{value}"
end
value
end
end
# this class can be used to access hidden field objects
# Normally a user would not need to create this object as it is returned by the Watir::Container#hidden method
class Hidden < TextField
INPUT_TYPES = ["hidden"]
# set is overriden in this class, as there is no way to set focus to a hidden field
def set(n)
self.value = n
end
# override the append method, so that focus isnt set to the hidden object
def append(n)
self.value = self.value.to_s + n.to_s
end
# override the clear method, so that focus isnt set to the hidden object
def clear
self.value = ""
end
# this method will do nothing, as you cant set focus to a hidden field
def focus
end
# Hidden element is never visible - returns false.
def visible?
assert_exists
false
end
end
# For fields that accept file uploads
# Windows dialog is opened and handled in this case by autoit
# launching into a new process.
class FileField < InputElement
INPUT_TYPES = ["file"]
# set the file location in the Choose file dialog in a new process
# will raise a Watir Exception if AutoIt is not correctly installed
def set(path_to_file)
assert_exists
require 'watir/windowhelper'
WindowHelper.check_autoit_installed
begin
thrd = Thread.new do
popup_title = 'Choose file'
file_field_set =
"rubyw -e
\"require 'win32ole'
@autoit = WIN32OLE.new('AutoItX3.Control')
waitresult = @autoit.WinWait('#{popup_title}', '', 15)
sleep 1
if waitresult == 1
@autoit.ControlSetText('#{popup_title}', '', 'Edit1', '#{path_to_file}')
@autoit.ControlSend('#{popup_title}', '', 'Button2', '{ENTER}')
end\""
system file_field_set
end
thrd.join(1)
rescue
raise Watir::Exception::WatirException, "Problem accessing Choose file dialog"
end
click
end
end
# This class is the class for radio buttons and check boxes.
# It contains methods common to both.
# Normally a user would not need to create this object as it is returned by the Watir::Container#checkbox or Watir::Container#radio methods
#
# most of the methods available to this element are inherited from the Element class
#
class RadioCheckCommon < InputElement
def locate
@o = @container.locate_input_element(@how, @what, self.class::INPUT_TYPES, @value)
end
def initialize(container, how, what, value=nil)
super container, how, what
@value = value
end
# This method determines if a radio button or check box is set.
# Returns true is set/checked or false if not set/checked.
# Raises UnknownObjectException if its unable to locate an object.
def set? # could be just "checked?"
assert_exists
return @o.checked
end
alias checked? set?
# This method is the common code for setting or clearing checkboxes and radio.
def set_clear_item(set)
@o.checked = set
@o.fireEvent("onClick")
@container.wait
end
private :set_clear_item
end
#--
# this class makes the docs better
#++
# This class is the watir representation of a radio button.
class Radio < RadioCheckCommon
INPUT_TYPES = ["radio"]
# This method clears a radio button. One of them will almost always be set.
# Returns true if set or false if not set.
# Raises UnknownObjectException if its unable to locate an object
# ObjectDisabledException IF THE OBJECT IS DISABLED
def clear
assert_enabled
highlight(:set)
set_clear_item(false)
highlight(:clear)
end
# This method sets the radio list item.
# Raises UnknownObjectException if it's unable to locate an object
# ObjectDisabledException if the object is disabled
def set
assert_enabled
highlight(:set)
@o.scrollIntoView
set_clear_item(true)
highlight(:clear)
end
end
# This class is the watir representation of a check box.
class CheckBox < RadioCheckCommon
INPUT_TYPES = ["checkbox"]
# With no arguments supplied, sets the check box.
# If the optional value is supplied, the checkbox is set, when its true and
# cleared when its false
# Raises UnknownObjectException if it's unable to locate an object
# ObjectDisabledException if the object is disabled
def set(value=true)
assert_enabled
highlight :set
unless @o.checked == value
set_clear_item value
end
highlight :clear
end
# Clears a check box.
# Raises UnknownObjectException if its unable to locate an object
# ObjectDisabledException if the object is disabled
def clear
set false
end
end
end |
module Pakyow
class Application
class << self
attr_accessor :routes_proc, :middleware_proc, :configurations, :error_handlers
# Sets the path to the application file so it can be reloaded later.
#
def inherited(subclass)
Pakyow::Configuration::App.application_path = parse_path_from_caller(caller[0])
end
def parse_path_from_caller(caller)
caller.match(/^(.+)(:?:\d+(:?:in `.+')?$)/)[1]
end
# Runs the application. Accepts the environment(s) to run, for example:
# run(:development)
# run([:development, :staging])
#
def run(*args)
self.load_config args.empty? || args.first.nil? ? [Configuration::Base.app.default_environment] : args
return if running?
@running = true
builder = Rack::Builder.new
builder.use(Rack::MethodOverride)
builder.instance_eval(&self.middleware_proc) if self.middleware_proc
builder.run(self.new)
detect_handler.run(builder, :Host => Pakyow::Configuration::Base.server.host, :Port => Pakyow::Configuration::Base.server.port)
end
# Stages the application. Everything is loaded but the application is
# not started. Accepts the same arguments as #run.
#
def stage(*args)
load_config args.empty? || args.first.nil? ? [Configuration::Base.app.default_environment] : args
return if staged?
app = self.new
@staged = true
end
# Returns true if the application is running.
#
def running?
@running
end
# Returns true if the application is staged.
#
def staged?
@staged
end
# Convenience method for base configuration class.
#
def config
Pakyow::Configuration::Base
end
# Creates configuration for a particular environment. Example:
# configure(:development) { app.auto_reload = true }
#
def configure(environment, &block)
self.configurations ||= {}
self.configurations[environment] = block
end
# Creates routes. Example:
# routes { get '/' { # do something } }
#
def routes(&block)
self.routes_proc = block
end
# Creates an error handler (currently 404 and 500 errors are handled).
# The handler can be created one of two ways:
#
# Define a controller/action for a particular error:
# error(404, :ApplicationController, :handle_404)
#
# Specify a block for a particular error:
# error(404) { # handle error }
#
def error(*args, &block)
self.error_handlers ||= {}
code, controller, action = args
if block
self.error_handlers[code] = block
else
self.error_handlers[code] = {
:controller => controller,
:action => action
}
end
end
def middleware(&block)
self.middleware_proc = block
end
protected
def load_config(args)
if self.configurations
args << Configuration::Base.app.default_environment if args.empty?
args.each do |env|
next unless config = self.configurations[env]
Configuration::Base.instance_eval(&config)
end
end
end
def detect_handler
['thin', 'mongrel', 'webrick'].each do |server|
begin
return Rack::Handler.get(server)
rescue LoadError
rescue NameError
end
end
end
end
include Helpers
attr_accessor :request, :response, :presenter, :route_store, :restful_routes
def initialize
Pakyow.app = self
# Create static handler
@static_handler = Rack::File.new(Configuration::Base.app.public_dir)
# This configuration option will be set if a presenter is to be used
if Configuration::Base.app.presenter
# Create a new instance of the presenter
self.presenter = Configuration::Base.app.presenter.new
end
# Load application files
load_app
end
# Interrupts the application and returns response immediately.
#
def interrupt!
@interrupted = true
throw :halt, self.response
end
# Called on every request.
#
def call(env)
start_time = Time.now.to_f
# Handle static files
if env['PATH_INFO'] =~ /\.(.*)$/ && File.exists?(File.join(Configuration::Base.app.public_dir, env['PATH_INFO']))
@static = true
@static_handler.call(env)
else
# The request object
self.request = Request.new(env)
# Reload application files
load_app
if Configuration::Base.app.presenter
# Handle presentation for this request
self.presenter.present_for_request(request)
end
Log.enter "Processing #{env['PATH_INFO']} (#{env['REMOTE_ADDR']} at #{Time.now}) [#{env['REQUEST_METHOD']}]"
# The response object
self.response = Rack::Response.new
rhs = nil
just_the_path, format = StringUtils.split_at_last_dot(self.request.path)
self.request.format = ((format && (format[format.length - 1, 1] == '/')) ? format[0, format.length - 1] : format)
catch(:halt) do
rhs, packet = @route_store.get_block(just_the_path, self.request.method)
packet[:vars].each_pair { |var, val|
request.params[var] = val
}
self.request.route_spec = packet[:data][:route_spec] if packet[:data]
restful_info = packet[:data][:restful] if packet[:data]
self.request.restful = restful_info
rhs.call() if rhs && !Pakyow::Configuration::App.ignore_routes
end
if !self.interrupted?
if Configuration::Base.app.presenter
self.response.body = [self.presenter.content]
end
# 404 if no facts matched and no views were found
if !rhs && (!self.presenter || !self.presenter.presented?)
self.handle_error(404)
Log.enter "[404] Not Found"
self.response.status = 404
end
end
return finish!
end
rescue StandardError => error
self.request.error = error
self.handle_error(500)
Log.enter "[500] #{error}\n"
Log.enter error.backtrace.join("\n") + "\n\n"
self.response = Rack::Response.new
if Configuration::Base.app.errors_in_browser
# Show errors in browser
self.response.body << "<h4>#{CGI.escapeHTML(error.to_s)}</h4>"
self.response.body << error.backtrace.join("<br />")
end
self.response.status = 500
return finish!
ensure
unless @static
end_time = Time.now.to_f
difference = ((end_time - start_time) * 1000).to_f
Log.enter "Completed in #{difference}ms | #{self.response.status} | [#{self.request.url}]"
Log.enter
end
end
# Sends a file in the response (immediately). Accepts a File object. Mime
# type is automatically detected.
#
def send_file(source_file, send_as = nil, type = nil)
path = source_file.is_a?(File) ? source_file.path : source_file
send_as ||= path
type ||= Rack::Mime.mime_type(".#{send_as.split('.')[-1]}")
data = ""
File.open(path, "r").each_line { |line| data << line }
self.response = Rack::Response.new(data, self.response.status, self.response.header.merge({ "Content-Type" => type }))
interrupt!
end
# Sends data in the response (immediately). Accepts the data, mime type,
# and optional file name.
#
def send_data(data, type, file_name = nil)
status = self.response ? self.response.status : 200
headers = self.response ? self.response.header : {}
headers = headers.merge({ "Content-Type" => type })
headers = headers.merge({ "Content-disposition" => "attachment; filename=#{file_name}"}) if file_name
self.response = Rack::Response.new(data, status, headers)
interrupt!
end
# Redirects to location (immediately).
#
def redirect_to(location, status_code = 302)
headers = self.response ? self.response.header : {}
headers = headers.merge({'Location' => location})
self.response = Rack::Response.new('', status_code, headers)
interrupt!
end
# Registers a route for GET requests. Route can be defined one of two ways:
# get('/', :ControllerClass, :action_method)
# get('/') { # do something }
#
# Routes for namespaced controllers (e.g. Admin::ControllerClass) can be defined like this:
# get('/', :Admin_ControllerClass, :action_method)
#
def get(route, controller = nil, action = nil, &block)
register_route(route, block, :get, controller, action)
end
# Registers a route for POST requests (see #get).
#
def post(route, controller = nil, action = nil, &block)
register_route(route, block, :post, controller, action)
end
# Registers a route for PUT requests (see #get).
#
def put(route, controller = nil, action = nil, &block)
register_route(route, block, :put, controller, action)
end
# Registers a route for DELETE requests (see #get).
#
def delete(route, controller = nil, action = nil, &block)
register_route(route, block, :delete, controller, action)
end
# Registers the default route (see #get).
#
def default(controller = nil, action = nil, &block)
register_route('/', block, :get, controller, action)
end
# Creates REST routes for a resource. Arguments: url, controller, model
#
def restful(*args, &block)
url, controller, model = args
with_scope(:url => url.gsub(/^[\/]+|[\/]+$/,""), :model => model) do
nest_scope(&block) if block_given?
@restful_routes ||= {}
@restful_routes[model] ||= {} if model
@@restful_actions.each do |opts|
action_url = current_path
if suffix = opts[:url_suffix]
action_url = File.join(action_url, suffix)
end
# Create the route
register_route(action_url, nil, opts[:method], controller, opts[:action], true)
# Store url for later use (currently used by Binder#action)
@restful_routes[model][opts[:action]] = action_url if model
end
remove_scope
end
end
@@restful_actions = [
{ :action => :index, :method => :get },
{ :action => :show, :method => :get, :url_suffix => ':id' },
{ :action => :new, :method => :get, :url_suffix => 'new' },
{ :action => :create, :method => :post },
{ :action => :edit, :method => :get, :url_suffix => 'edit/:id' },
{ :action => :update, :method => :put, :url_suffix => ':id' },
{ :action => :delete, :method => :delete, :url_suffix => ':id' }
]
protected
def interrupted?
@interrupted
end
# Handles route registration.
#
def register_route(route, block, method, controller = nil, action = nil, restful = false)
if controller
controller = eval(controller.to_s)
action ||= Configuration::Base.app.default_action
block = lambda {
instance = controller.new
request.controller = instance
request.action = action
instance.send(action)
}
end
data = {:route_spec=>route}
if restful
data[:restful] = {:restful_action=>action}
end
@route_store.add_route(route, block, method, data)
end
def with_scope(opts)
@scope ||= {}
@scope[:path] ||= []
@scope[:model] = opts[:model]
@scope[:path] << opts[:url]
yield
end
def remove_scope
@scope[:path].pop
end
def nest_scope(&block)
@scope[:path].insert(-1, ":#{StringUtils.underscore(@scope[:model].to_s)}_id")
yield
@scope[:path].pop
end
def current_path
@scope[:path].join('/')
end
def handle_error(code)
return unless self.class.error_handlers
return unless handler = self.class.error_handlers[code]
if handler.is_a? Proc
handler.call
else
c = eval(handler[:controller].to_s).new
c.send(handler[:action])
end
end
def set_cookies
if self.request.cookies && self.request.cookies != {}
self.request.cookies.each do |key, value|
if value.is_a?(Hash)
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800}.merge(value))
elsif value.is_a?(String)
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800}.merge({:value => value}))
else
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800 * -1 }.merge({:value => value}))
end
end
end
end
# Reloads all application files in application_path and presenter (if specified).
#
def load_app
return if @loaded && !Configuration::Base.app.auto_reload
@loaded = true
# Reload Application
load(Configuration::App.application_path)
@loader = Loader.new unless @loader
@loader.load!(Configuration::Base.app.src_dir)
load_routes
# Reload views
if Configuration::Base.app.presenter
self.presenter.reload!
end
end
def load_routes
@route_store = RouteStore.new
self.instance_eval(&self.class.routes_proc) if self.class.routes_proc
end
# Send the response and cleanup.
#
def finish!
@interrupted = false
@static = false
# Set cookies
set_cookies
# Finished
self.response.finish
end
end
end
Helpers now available in error handlers
Presents views defined in error handlers
module Pakyow
class Application
class << self
attr_accessor :routes_proc, :middleware_proc, :configurations, :error_handlers
# Sets the path to the application file so it can be reloaded later.
#
def inherited(subclass)
Pakyow::Configuration::App.application_path = parse_path_from_caller(caller[0])
end
def parse_path_from_caller(caller)
caller.match(/^(.+)(:?:\d+(:?:in `.+')?$)/)[1]
end
# Runs the application. Accepts the environment(s) to run, for example:
# run(:development)
# run([:development, :staging])
#
def run(*args)
self.load_config args.empty? || args.first.nil? ? [Configuration::Base.app.default_environment] : args
return if running?
@running = true
builder = Rack::Builder.new
builder.use(Rack::MethodOverride)
builder.instance_eval(&self.middleware_proc) if self.middleware_proc
builder.run(self.new)
detect_handler.run(builder, :Host => Pakyow::Configuration::Base.server.host, :Port => Pakyow::Configuration::Base.server.port)
end
# Stages the application. Everything is loaded but the application is
# not started. Accepts the same arguments as #run.
#
def stage(*args)
load_config args.empty? || args.first.nil? ? [Configuration::Base.app.default_environment] : args
return if staged?
app = self.new
@staged = true
end
# Returns true if the application is running.
#
def running?
@running
end
# Returns true if the application is staged.
#
def staged?
@staged
end
# Convenience method for base configuration class.
#
def config
Pakyow::Configuration::Base
end
# Creates configuration for a particular environment. Example:
# configure(:development) { app.auto_reload = true }
#
def configure(environment, &block)
self.configurations ||= {}
self.configurations[environment] = block
end
# Creates routes. Example:
# routes { get '/' { # do something } }
#
def routes(&block)
self.routes_proc = block
end
# Creates an error handler (currently 404 and 500 errors are handled).
# The handler can be created one of two ways:
#
# Define a controller/action for a particular error:
# error(404, :ApplicationController, :handle_404)
#
# Specify a block for a particular error:
# error(404) { # handle error }
#
def error(*args, &block)
self.error_handlers ||= {}
code, controller, action = args
if block
self.error_handlers[code] = block
else
self.error_handlers[code] = {
:controller => controller,
:action => action
}
end
end
def middleware(&block)
self.middleware_proc = block
end
protected
def load_config(args)
if self.configurations
args << Configuration::Base.app.default_environment if args.empty?
args.each do |env|
next unless config = self.configurations[env]
Configuration::Base.instance_eval(&config)
end
end
end
def detect_handler
['thin', 'mongrel', 'webrick'].each do |server|
begin
return Rack::Handler.get(server)
rescue LoadError
rescue NameError
end
end
end
end
include Helpers
attr_accessor :request, :response, :presenter, :route_store, :restful_routes
def initialize
Pakyow.app = self
# Create static handler
@static_handler = Rack::File.new(Configuration::Base.app.public_dir)
# This configuration option will be set if a presenter is to be used
if Configuration::Base.app.presenter
# Create a new instance of the presenter
self.presenter = Configuration::Base.app.presenter.new
end
# Load application files
load_app
end
# Interrupts the application and returns response immediately.
#
def interrupt!
@interrupted = true
throw :halt, self.response
end
# Called on every request.
#
def call(env)
start_time = Time.now.to_f
# Handle static files
if env['PATH_INFO'] =~ /\.(.*)$/ && File.exists?(File.join(Configuration::Base.app.public_dir, env['PATH_INFO']))
@static = true
@static_handler.call(env)
else
# The request object
self.request = Request.new(env)
# Reload application files
load_app
if Configuration::Base.app.presenter
# Handle presentation for this request
self.presenter.present_for_request(request)
end
Log.enter "Processing #{env['PATH_INFO']} (#{env['REMOTE_ADDR']} at #{Time.now}) [#{env['REQUEST_METHOD']}]"
# The response object
self.response = Rack::Response.new
rhs = nil
just_the_path, format = StringUtils.split_at_last_dot(self.request.path)
self.request.format = ((format && (format[format.length - 1, 1] == '/')) ? format[0, format.length - 1] : format)
catch(:halt) do
rhs, packet = @route_store.get_block(just_the_path, self.request.method)
packet[:vars].each_pair { |var, val|
request.params[var] = val
}
self.request.route_spec = packet[:data][:route_spec] if packet[:data]
restful_info = packet[:data][:restful] if packet[:data]
self.request.restful = restful_info
rhs.call() if rhs && !Pakyow::Configuration::App.ignore_routes
end
if !self.interrupted?
if Configuration::Base.app.presenter
self.response.body = [self.presenter.content]
end
# 404 if no facts matched and no views were found
if !rhs && (!self.presenter || !self.presenter.presented?)
self.handle_error(404)
Log.enter "[404] Not Found"
self.response.status = 404
end
end
return finish!
end
rescue StandardError => error
self.request.error = error
self.handle_error(500)
Log.enter "[500] #{error}\n"
Log.enter error.backtrace.join("\n") + "\n\n"
# self.response = Rack::Response.new
if Configuration::Base.app.errors_in_browser
# Show errors in browser
self.response.body = []
self.response.body << "<h4>#{CGI.escapeHTML(error.to_s)}</h4>"
self.response.body << error.backtrace.join("<br />")
end
self.response.status = 500
return finish!
ensure
unless @static
end_time = Time.now.to_f
difference = ((end_time - start_time) * 1000).to_f
Log.enter "Completed in #{difference}ms | #{self.response.status} | [#{self.request.url}]"
Log.enter
end
end
# Sends a file in the response (immediately). Accepts a File object. Mime
# type is automatically detected.
#
def send_file(source_file, send_as = nil, type = nil)
path = source_file.is_a?(File) ? source_file.path : source_file
send_as ||= path
type ||= Rack::Mime.mime_type(".#{send_as.split('.')[-1]}")
data = ""
File.open(path, "r").each_line { |line| data << line }
self.response = Rack::Response.new(data, self.response.status, self.response.header.merge({ "Content-Type" => type }))
interrupt!
end
# Sends data in the response (immediately). Accepts the data, mime type,
# and optional file name.
#
def send_data(data, type, file_name = nil)
status = self.response ? self.response.status : 200
headers = self.response ? self.response.header : {}
headers = headers.merge({ "Content-Type" => type })
headers = headers.merge({ "Content-disposition" => "attachment; filename=#{file_name}"}) if file_name
self.response = Rack::Response.new(data, status, headers)
interrupt!
end
# Redirects to location (immediately).
#
def redirect_to(location, status_code = 302)
headers = self.response ? self.response.header : {}
headers = headers.merge({'Location' => location})
self.response = Rack::Response.new('', status_code, headers)
interrupt!
end
# Registers a route for GET requests. Route can be defined one of two ways:
# get('/', :ControllerClass, :action_method)
# get('/') { # do something }
#
# Routes for namespaced controllers (e.g. Admin::ControllerClass) can be defined like this:
# get('/', :Admin_ControllerClass, :action_method)
#
def get(route, controller = nil, action = nil, &block)
register_route(route, block, :get, controller, action)
end
# Registers a route for POST requests (see #get).
#
def post(route, controller = nil, action = nil, &block)
register_route(route, block, :post, controller, action)
end
# Registers a route for PUT requests (see #get).
#
def put(route, controller = nil, action = nil, &block)
register_route(route, block, :put, controller, action)
end
# Registers a route for DELETE requests (see #get).
#
def delete(route, controller = nil, action = nil, &block)
register_route(route, block, :delete, controller, action)
end
# Registers the default route (see #get).
#
def default(controller = nil, action = nil, &block)
register_route('/', block, :get, controller, action)
end
# Creates REST routes for a resource. Arguments: url, controller, model
#
def restful(*args, &block)
url, controller, model = args
with_scope(:url => url.gsub(/^[\/]+|[\/]+$/,""), :model => model) do
nest_scope(&block) if block_given?
@restful_routes ||= {}
@restful_routes[model] ||= {} if model
@@restful_actions.each do |opts|
action_url = current_path
if suffix = opts[:url_suffix]
action_url = File.join(action_url, suffix)
end
# Create the route
register_route(action_url, nil, opts[:method], controller, opts[:action], true)
# Store url for later use (currently used by Binder#action)
@restful_routes[model][opts[:action]] = action_url if model
end
remove_scope
end
end
@@restful_actions = [
{ :action => :index, :method => :get },
{ :action => :show, :method => :get, :url_suffix => ':id' },
{ :action => :new, :method => :get, :url_suffix => 'new' },
{ :action => :create, :method => :post },
{ :action => :edit, :method => :get, :url_suffix => 'edit/:id' },
{ :action => :update, :method => :put, :url_suffix => ':id' },
{ :action => :delete, :method => :delete, :url_suffix => ':id' }
]
protected
def interrupted?
@interrupted
end
# Handles route registration.
#
def register_route(route, block, method, controller = nil, action = nil, restful = false)
if controller
controller = eval(controller.to_s)
action ||= Configuration::Base.app.default_action
block = lambda {
instance = controller.new
request.controller = instance
request.action = action
instance.send(action)
}
end
data = {:route_spec=>route}
if restful
data[:restful] = {:restful_action=>action}
end
@route_store.add_route(route, block, method, data)
end
def with_scope(opts)
@scope ||= {}
@scope[:path] ||= []
@scope[:model] = opts[:model]
@scope[:path] << opts[:url]
yield
end
def remove_scope
@scope[:path].pop
end
def nest_scope(&block)
@scope[:path].insert(-1, ":#{StringUtils.underscore(@scope[:model].to_s)}_id")
yield
@scope[:path].pop
end
def current_path
@scope[:path].join('/')
end
def handle_error(code)
return unless self.class.error_handlers
return unless handler = self.class.error_handlers[code]
if handler.is_a? Proc
Pakyow.app.instance_eval(&handler)
else
c = eval(handler[:controller].to_s).new
c.send(handler[:action])
end
self.response.body = [self.presenter.content]
end
def set_cookies
if self.request.cookies && self.request.cookies != {}
self.request.cookies.each do |key, value|
if value.is_a?(Hash)
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800}.merge(value))
elsif value.is_a?(String)
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800}.merge({:value => value}))
else
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800 * -1 }.merge({:value => value}))
end
end
end
end
# Reloads all application files in application_path and presenter (if specified).
#
def load_app
return if @loaded && !Configuration::Base.app.auto_reload
@loaded = true
# Reload Application
load(Configuration::App.application_path)
@loader = Loader.new unless @loader
@loader.load!(Configuration::Base.app.src_dir)
load_routes
# Reload views
if Configuration::Base.app.presenter
self.presenter.reload!
end
end
def load_routes
@route_store = RouteStore.new
self.instance_eval(&self.class.routes_proc) if self.class.routes_proc
end
# Send the response and cleanup.
#
def finish!
@interrupted = false
@static = false
# Set cookies
set_cookies
# Finished
self.response.finish
end
end
end
|
module Pakyow
class Application
class << self
attr_accessor :routes_proc, :handlers_proc, :middleware_proc, :configurations
# Sets the path to the application file so it can be reloaded later.
#
def inherited(subclass)
Pakyow::Configuration::App.application_path = parse_path_from_caller(caller[0])
end
def parse_path_from_caller(caller)
caller.match(/^(.+)(:?:\d+(:?:in `.+')?$)/)[1]
end
# Runs the application. Accepts the environment(s) to run, for example:
# run(:development)
# run([:development, :staging])
#
def run(*args)
return if running?
@running = true
self.builder.run(self.prepare(*args))
detect_handler.run(builder, :Host => Pakyow::Configuration::Base.server.host, :Port => Pakyow::Configuration::Base.server.port)
end
# Stages the application. Everything is loaded but the application is
# not started. Accepts the same arguments as #run.
#
def stage(*args)
return if staged?
@staged = true
prepare(*args)
end
def builder
@builder ||= Rack::Builder.new
end
def prepared?
@prepared
end
# Returns true if the application is running.
#
def running?
@running
end
# Returns true if the application is staged.
#
def staged?
@staged
end
# Convenience method for base configuration class.
#
def config
Pakyow::Configuration::Base
end
# Creates configuration for a particular environment. Example:
# configure(:development) { app.auto_reload = true }
#
def configure(environment, &block)
self.configurations ||= {}
self.configurations[environment] = block
end
# Creates routes. Example:
# routes { get '/' { # do something } }
#
def routes(&block)
self.routes_proc = block
end
# Creates handlers for later execution.
# The handler can be created one of two ways:
#
# Define a controller/action handler with an associate response status:
# handler(name, 404, :ApplicationController, :handle_404)
#
# Specify a block as a handler:
# handler(name, 404) { # handle error }
#
# If a controller calls #invoke_handler!(name) then the
# handler defined for that code will be invoked.
#
def handlers(&block)
self.handlers_proc = block
end
def middleware(&block)
self.middleware_proc = block
end
protected
# Prepares the application for running or staging and returns an instance
# of the application.
def prepare(*args)
self.load_config args.empty? || args.first.nil? ? [Configuration::Base.app.default_environment] : args
return if prepared?
self.builder.use(Rack::MethodOverride)
self.builder.instance_eval(&self.middleware_proc) if self.middleware_proc
self.builder.use(Pakyow::Static) if Configuration::Base.app.static
self.builder.use(Pakyow::Logger) if Configuration::Base.app.log
self.builder.use(Pakyow::Reloader) if Configuration::Base.app.auto_reload
@prepared = true
$:.unshift(Dir.pwd) unless $:.include? Dir.pwd
return self.new
end
def load_config(args)
if self.configurations
args << Configuration::Base.app.default_environment if args.empty?
args.each do |env|
next unless config = self.configurations[env]
Configuration::Base.instance_eval(&config)
end
end
end
def detect_handler
['thin', 'mongrel', 'webrick'].each do |server|
begin
return Rack::Handler.get(server)
rescue LoadError
rescue NameError
end
end
end
end
include Helpers
attr_accessor :request, :response, :presenter, :route_store, :restful_routes, :handler_store
def initialize
Pakyow.app = self
@handler_name_to_code = {}
@handler_code_to_name = {}
# This configuration option will be set if a presenter is to be used
if Configuration::Base.app.presenter
# Create a new instance of the presenter
self.presenter = Configuration::Base.app.presenter.new
end
# Load application files
load_app
end
# Interrupts the application and returns response immediately.
#
def halt!
throw :halt, self.response
end
def invoke_route!(route, method=nil)
self.request.working_path = route
self.request.working_method = method if method
block = prepare_route_block(route, self.request.working_method)
throw :new_block, block
end
def invoke_handler!(name_or_code)
# TODO Think about all this
if block = @handler_store[name_or_code]
# we are given a name
code = @handler_name_to_code[name_or_code]
self.response.status = code if code
throw :new_block, block
elsif name = @handler_code_to_name[name_or_code]
# we are given a code
block = @handler_store[name]
self.response.status = name_or_code
throw :new_block, block
else
# no block to be found
# do we assume code if a number and set status?
self.response.status = name_or_code if name_or_code.is_a?(Fixnum)
# still need to stop execution, I think? But do nothing.
throw :new_block, nil
end
end
#TODO move to protected section
def prepare_route_block(route, method)
set_request_format_from_route(route)
controller_block, packet = @route_store.get_block(route, method)
self.request.params.merge!(HashUtils.strhash(packet[:vars]))
self.request.route_spec = packet[:data][:route_spec] if packet[:data]
self.request.restful = packet[:data][:restful] if packet[:data]
controller_block
end
#TODO move to protected section
def trampoline(block)
while block do
block = catch(:new_block) {
block.call()
# Getting here means that call() returned normally (not via a throw)
# By definition, we do not have a 404 since we matched a route to get the block to call
nil
} # end :invoke_route catch block
# If invoke_route! or invoke_handler! was called in the block, block will have a new value.
# If neither was called, block will be nil
if block && self.presenter
self.presenter.prepare_for_request(self.request)
end
end
end
# Called on every request.
#
def call(env)
self.request = Request.new(env)
self.response = Rack::Response.new
self.request.working_path = self.request.path
self.request.working_method = self.request.method
have_initial_route = false
catch(:halt) {
route_block = prepare_route_block(self.request.path, self.request.method)
have_initial_route = true if route_block
if self.presenter
self.presenter.prepare_for_request(self.request)
end
trampoline(route_block) if !Pakyow::Configuration::App.ignore_routes
if self.presenter
self.response.body = [self.presenter.content]
end
# 404 if no route matched and no views were found
if !have_initial_route && (!self.presenter || !self.presenter.presented?)
Log.enter "[404] Not Found"
handler404 = @handler_store[@handler_code_to_name[404]] if @handler_code_to_name[404]
if handler404
catch(:halt) {
if self.presenter
self.presenter.prepare_for_request(self.request)
end
trampoline(handler404)
if self.presenter then
self.response.body = [self.presenter.content]
end
}
end
self.response.status = 404
end
} #end :halt catch block
# This needs to be in the 'return' position (last statement)
finish!
rescue StandardError => error
self.request.error = error
handler500 = @handler_store[@handler_code_to_name[500]] if @handler_code_to_name[500]
Log.enter "Checking for a 500 handler"
if handler500
Log.enter "Have a 500 handler"
catch(:halt) {
if self.presenter
self.presenter.prepare_for_request(self.request)
end
trampoline(handler500)
if self.presenter then
self.response.body = [self.presenter.content]
end
} #end :halt catch block
end
self.response.status = 500
if Configuration::Base.app.errors_in_browser
self.response.body = []
self.response.body << "<h4>#{CGI.escapeHTML(error.to_s)}</h4>"
self.response.body << error.backtrace.join("<br />")
end
begin
# caught by other middleware (e.g. logger)
throw :error, error
rescue ArgumentError
end
finish!
end
# Sends a file in the response (immediately). Accepts a File object. Mime
# type is automatically detected.
#
def send_file(source_file, send_as = nil, type = nil)
path = source_file.is_a?(File) ? source_file.path : source_file
send_as ||= path
type ||= Rack::Mime.mime_type(".#{send_as.split('.')[-1]}")
data = ""
File.open(path, "r").each_line { |line| data << line }
self.response = Rack::Response.new(data, self.response.status, self.response.header.merge({ "Content-Type" => type }))
halt!
end
# Sends data in the response (immediately). Accepts the data, mime type,
# and optional file name.
#
def send_data(data, type, file_name = nil)
status = self.response ? self.response.status : 200
headers = self.response ? self.response.header : {}
headers = headers.merge({ "Content-Type" => type })
headers = headers.merge({ "Content-disposition" => "attachment; filename=#{file_name}"}) if file_name
self.response = Rack::Response.new(data, status, headers)
halt!
end
# Redirects to location (immediately).
#
def redirect_to(location, status_code = 302)
headers = self.response ? self.response.header : {}
headers = headers.merge({'Location' => location})
self.response = Rack::Response.new('', status_code, headers)
halt!
end
# Registers a route for GET requests. Route can be defined one of two ways:
# get('/', :ControllerClass, :action_method)
# get('/') { # do something }
#
# Routes for namespaced controllers (e.g. Admin::ControllerClass) can be defined like this:
# get('/', :Admin_ControllerClass, :action_method)
#
def get(route, *args, &block)
register_route(:user, route, block, :get, *args)
end
# Registers a route for POST requests (see #get).
#
def post(route, *args, &block)
register_route(:user, route, block, :post, *args)
end
# Registers a route for PUT requests (see #get).
#
def put(route, *args, &block)
register_route(:user, route, block, :put, *args)
end
# Registers a route for DELETE requests (see #get).
#
def delete(route, *args, &block)
register_route(:user, route, block, :delete, *args)
end
# Registers the default route (see #get).
#
def default(*args, &block)
register_route(:user, '/', block, :get, *args)
end
# Creates REST routes for a resource. Arguments: url, controller, model, hooks
#
def restful(url, controller, *args, &block)
model, hooks = parse_restful_args(args)
with_scope(:url => url.gsub(/^[\/]+|[\/]+$/,""), :model => model) do
nest_scope(&block) if block_given?
@restful_routes ||= {}
@restful_routes[model] ||= {} if model
@@restful_actions.each do |opts|
action_url = current_path
if suffix = opts[:url_suffix]
action_url = File.join(action_url, suffix)
end
# Create the route
register_route(:restful, action_url, nil, opts[:method], controller, opts[:action], hooks)
# Store url for later use (currently used by Binder#action)
@restful_routes[model][opts[:action]] = action_url if model
end
remove_scope
end
end
@@restful_actions = [
{ :action => :edit, :method => :get, :url_suffix => 'edit/:id' },
{ :action => :show, :method => :get, :url_suffix => ':id' },
{ :action => :new, :method => :get, :url_suffix => 'new' },
{ :action => :update, :method => :put, :url_suffix => ':id' },
{ :action => :delete, :method => :delete, :url_suffix => ':id' },
{ :action => :index, :method => :get },
{ :action => :create, :method => :post }
]
def hook(name, controller = nil, action = nil, &block)
block = build_controller_block(controller, action) if controller
@route_store.add_hook(name, block)
end
def handler(name, *args, &block)
code, controller, action = parse_handler_args(args)
if block_given?
@handler_store[name] = block
else
@handler_store[name] = build_controller_block(controller, action)
end
if code
@handler_name_to_code[name] = code
@handler_code_to_name[code] = name
end
end
#TODO: don't like this...
def reload
load_app
end
protected
def parse_route_args(args)
controller = args[0] if args[0] && (args[0].is_a?(Symbol) || args[0].is_a?(String))
action = args[1] if controller
hooks = args[2] if controller
unless controller
hooks = args[0] if args[0] && args[0].is_a?(Hash)
end
return controller, action, hooks
end
def parse_restful_args(args)
model = args[0] if args[0] && (args[0].is_a?(Symbol) || args[0].is_a?(String))
hooks = args[1] if model
unless model
hooks = args[0] if args[0] && args[0].is_a?(Hash)
end
return model, hooks
end
def parse_handler_args(args)
code = args[0] if args.length == 1 || args.length == 3
controller = args[1] if code
action = args[2] if code && args[2]
unless code
controller = args[0]
action = args[1] if args[1]
end
return code, controller, action
end
# Handles route registration.
#
def register_route(type, route, block, method, *args)
controller, action, hooks = parse_route_args(args)
if controller
block = build_controller_block(controller, action)
end
data = {:route_type=>type, :route_spec=>route}
if type == :restful
data[:restful] = {:restful_action=>action}
end
@route_store.add_route(route, block, method, data, hooks)
end
def build_controller_block(controller, action)
controller = eval(controller.to_s)
action ||= Configuration::Base.app.default_action
block = lambda {
instance = controller.new
request.controller = instance
request.action = action
instance.send(action)
}
block
end
def set_request_format_from_route(route)
route, format = StringUtils.split_at_last_dot(route)
self.request.format = ((format && (format[format.length - 1, 1] == '/')) ? format[0, format.length - 1] : format)
end
def with_scope(opts)
@scope ||= {}
@scope[:path] ||= []
@scope[:model] = opts[:model]
@scope[:path] << opts[:url]
yield
end
def remove_scope
@scope[:path].pop
end
def nest_scope(&block)
@scope[:path].insert(-1, ":#{StringUtils.underscore(@scope[:model].to_s)}_id")
yield
@scope[:path].pop
end
def current_path
@scope[:path].join('/')
end
def set_cookies
if self.request.cookies && self.request.cookies != {}
self.request.cookies.each do |key, value|
if value.is_a?(Hash)
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800}.merge(value))
elsif value.is_a?(String)
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800}.merge({:value => value}))
else
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800 * -1 }.merge({:value => value}))
end
end
end
end
# Reloads all application files in application_path and presenter (if specified).
#
def load_app
load(Configuration::App.application_path)
@loader = Loader.new unless @loader
@loader.load!(Configuration::Base.app.src_dir)
load_handlers
load_routes
# Reload views
if self.presenter
self.presenter.load
end
end
def load_handlers
@handler_store = {}
self.instance_eval(&self.class.handlers_proc) if self.class.handlers_proc
end
def load_routes
@route_store = RouteStore.new
self.instance_eval(&self.class.routes_proc) if self.class.routes_proc
end
# Send the response and cleanup.
#
def finish!
set_cookies
self.response.finish
end
end
end
Fixed handler definition parsing.
module Pakyow
class Application
class << self
attr_accessor :routes_proc, :handlers_proc, :middleware_proc, :configurations
# Sets the path to the application file so it can be reloaded later.
#
def inherited(subclass)
Pakyow::Configuration::App.application_path = parse_path_from_caller(caller[0])
end
def parse_path_from_caller(caller)
caller.match(/^(.+)(:?:\d+(:?:in `.+')?$)/)[1]
end
# Runs the application. Accepts the environment(s) to run, for example:
# run(:development)
# run([:development, :staging])
#
def run(*args)
return if running?
@running = true
self.builder.run(self.prepare(*args))
detect_handler.run(builder, :Host => Pakyow::Configuration::Base.server.host, :Port => Pakyow::Configuration::Base.server.port)
end
# Stages the application. Everything is loaded but the application is
# not started. Accepts the same arguments as #run.
#
def stage(*args)
return if staged?
@staged = true
prepare(*args)
end
def builder
@builder ||= Rack::Builder.new
end
def prepared?
@prepared
end
# Returns true if the application is running.
#
def running?
@running
end
# Returns true if the application is staged.
#
def staged?
@staged
end
# Convenience method for base configuration class.
#
def config
Pakyow::Configuration::Base
end
# Creates configuration for a particular environment. Example:
# configure(:development) { app.auto_reload = true }
#
def configure(environment, &block)
self.configurations ||= {}
self.configurations[environment] = block
end
# Creates routes. Example:
# routes { get '/' { # do something } }
#
def routes(&block)
self.routes_proc = block
end
# Creates handlers for later execution.
# The handler can be created one of two ways:
#
# Define a controller/action handler with an associate response status:
# handler(name, 404, :ApplicationController, :handle_404)
#
# Specify a block as a handler:
# handler(name, 404) { # handle error }
#
# If a controller calls #invoke_handler!(name) then the
# handler defined for that code will be invoked.
#
def handlers(&block)
self.handlers_proc = block
end
def middleware(&block)
self.middleware_proc = block
end
protected
# Prepares the application for running or staging and returns an instance
# of the application.
def prepare(*args)
self.load_config args.empty? || args.first.nil? ? [Configuration::Base.app.default_environment] : args
return if prepared?
self.builder.use(Rack::MethodOverride)
self.builder.instance_eval(&self.middleware_proc) if self.middleware_proc
self.builder.use(Pakyow::Static) if Configuration::Base.app.static
self.builder.use(Pakyow::Logger) if Configuration::Base.app.log
self.builder.use(Pakyow::Reloader) if Configuration::Base.app.auto_reload
@prepared = true
$:.unshift(Dir.pwd) unless $:.include? Dir.pwd
return self.new
end
def load_config(args)
if self.configurations
args << Configuration::Base.app.default_environment if args.empty?
args.each do |env|
next unless config = self.configurations[env]
Configuration::Base.instance_eval(&config)
end
end
end
def detect_handler
['thin', 'mongrel', 'webrick'].each do |server|
begin
return Rack::Handler.get(server)
rescue LoadError
rescue NameError
end
end
end
end
include Helpers
attr_accessor :request, :response, :presenter, :route_store, :restful_routes, :handler_store
def initialize
Pakyow.app = self
@handler_name_to_code = {}
@handler_code_to_name = {}
# This configuration option will be set if a presenter is to be used
if Configuration::Base.app.presenter
# Create a new instance of the presenter
self.presenter = Configuration::Base.app.presenter.new
end
# Load application files
load_app
end
# Interrupts the application and returns response immediately.
#
def halt!
throw :halt, self.response
end
def invoke_route!(route, method=nil)
self.request.working_path = route
self.request.working_method = method if method
block = prepare_route_block(route, self.request.working_method)
throw :new_block, block
end
def invoke_handler!(name_or_code)
# TODO Think about all this
if block = @handler_store[name_or_code]
# we are given a name
code = @handler_name_to_code[name_or_code]
self.response.status = code if code
throw :new_block, block
elsif name = @handler_code_to_name[name_or_code]
# we are given a code
block = @handler_store[name]
self.response.status = name_or_code
throw :new_block, block
else
# no block to be found
# do we assume code if a number and set status?
self.response.status = name_or_code if name_or_code.is_a?(Fixnum)
# still need to stop execution, I think? But do nothing.
throw :new_block, nil
end
end
#TODO move to protected section
def prepare_route_block(route, method)
set_request_format_from_route(route)
controller_block, packet = @route_store.get_block(route, method)
self.request.params.merge!(HashUtils.strhash(packet[:vars]))
self.request.route_spec = packet[:data][:route_spec] if packet[:data]
self.request.restful = packet[:data][:restful] if packet[:data]
controller_block
end
#TODO move to protected section
def trampoline(block)
while block do
block = catch(:new_block) {
block.call()
# Getting here means that call() returned normally (not via a throw)
# By definition, we do not have a 404 since we matched a route to get the block to call
nil
} # end :invoke_route catch block
# If invoke_route! or invoke_handler! was called in the block, block will have a new value.
# If neither was called, block will be nil
if block && self.presenter
self.presenter.prepare_for_request(self.request)
end
end
end
# Called on every request.
#
def call(env)
self.request = Request.new(env)
self.response = Rack::Response.new
self.request.working_path = self.request.path
self.request.working_method = self.request.method
have_initial_route = false
catch(:halt) {
route_block = prepare_route_block(self.request.path, self.request.method)
have_initial_route = true if route_block
if self.presenter
self.presenter.prepare_for_request(self.request)
end
trampoline(route_block) if !Pakyow::Configuration::App.ignore_routes
if self.presenter
self.response.body = [self.presenter.content]
end
# 404 if no route matched and no views were found
if !have_initial_route && (!self.presenter || !self.presenter.presented?)
Log.enter "[404] Not Found"
handler404 = @handler_store[@handler_code_to_name[404]] if @handler_code_to_name[404]
if handler404
catch(:halt) {
if self.presenter
self.presenter.prepare_for_request(self.request)
end
trampoline(handler404)
if self.presenter then
self.response.body = [self.presenter.content]
end
}
end
self.response.status = 404
end
} #end :halt catch block
# This needs to be in the 'return' position (last statement)
finish!
rescue StandardError => error
self.request.error = error
handler500 = @handler_store[@handler_code_to_name[500]] if @handler_code_to_name[500]
if handler500
catch(:halt) {
if self.presenter
self.presenter.prepare_for_request(self.request)
end
trampoline(handler500)
if self.presenter then
self.response.body = [self.presenter.content]
end
} #end :halt catch block
end
self.response.status = 500
if Configuration::Base.app.errors_in_browser
self.response.body = []
self.response.body << "<h4>#{CGI.escapeHTML(error.to_s)}</h4>"
self.response.body << error.backtrace.join("<br />")
end
begin
# caught by other middleware (e.g. logger)
throw :error, error
rescue ArgumentError
end
finish!
end
# Sends a file in the response (immediately). Accepts a File object. Mime
# type is automatically detected.
#
def send_file(source_file, send_as = nil, type = nil)
path = source_file.is_a?(File) ? source_file.path : source_file
send_as ||= path
type ||= Rack::Mime.mime_type(".#{send_as.split('.')[-1]}")
data = ""
File.open(path, "r").each_line { |line| data << line }
self.response = Rack::Response.new(data, self.response.status, self.response.header.merge({ "Content-Type" => type }))
halt!
end
# Sends data in the response (immediately). Accepts the data, mime type,
# and optional file name.
#
def send_data(data, type, file_name = nil)
status = self.response ? self.response.status : 200
headers = self.response ? self.response.header : {}
headers = headers.merge({ "Content-Type" => type })
headers = headers.merge({ "Content-disposition" => "attachment; filename=#{file_name}"}) if file_name
self.response = Rack::Response.new(data, status, headers)
halt!
end
# Redirects to location (immediately).
#
def redirect_to(location, status_code = 302)
headers = self.response ? self.response.header : {}
headers = headers.merge({'Location' => location})
self.response = Rack::Response.new('', status_code, headers)
halt!
end
# Registers a route for GET requests. Route can be defined one of two ways:
# get('/', :ControllerClass, :action_method)
# get('/') { # do something }
#
# Routes for namespaced controllers (e.g. Admin::ControllerClass) can be defined like this:
# get('/', :Admin_ControllerClass, :action_method)
#
def get(route, *args, &block)
register_route(:user, route, block, :get, *args)
end
# Registers a route for POST requests (see #get).
#
def post(route, *args, &block)
register_route(:user, route, block, :post, *args)
end
# Registers a route for PUT requests (see #get).
#
def put(route, *args, &block)
register_route(:user, route, block, :put, *args)
end
# Registers a route for DELETE requests (see #get).
#
def delete(route, *args, &block)
register_route(:user, route, block, :delete, *args)
end
# Registers the default route (see #get).
#
def default(*args, &block)
register_route(:user, '/', block, :get, *args)
end
# Creates REST routes for a resource. Arguments: url, controller, model, hooks
#
def restful(url, controller, *args, &block)
model, hooks = parse_restful_args(args)
with_scope(:url => url.gsub(/^[\/]+|[\/]+$/,""), :model => model) do
nest_scope(&block) if block_given?
@restful_routes ||= {}
@restful_routes[model] ||= {} if model
@@restful_actions.each do |opts|
action_url = current_path
if suffix = opts[:url_suffix]
action_url = File.join(action_url, suffix)
end
# Create the route
register_route(:restful, action_url, nil, opts[:method], controller, opts[:action], hooks)
# Store url for later use (currently used by Binder#action)
@restful_routes[model][opts[:action]] = action_url if model
end
remove_scope
end
end
@@restful_actions = [
{ :action => :edit, :method => :get, :url_suffix => 'edit/:id' },
{ :action => :show, :method => :get, :url_suffix => ':id' },
{ :action => :new, :method => :get, :url_suffix => 'new' },
{ :action => :update, :method => :put, :url_suffix => ':id' },
{ :action => :delete, :method => :delete, :url_suffix => ':id' },
{ :action => :index, :method => :get },
{ :action => :create, :method => :post }
]
def hook(name, controller = nil, action = nil, &block)
block = build_controller_block(controller, action) if controller
@route_store.add_hook(name, block)
end
def handler(name, *args, &block)
code, controller, action = parse_handler_args(args)
if block_given?
@handler_store[name] = block
else
@handler_store[name] = build_controller_block(controller, action)
end
if code
@handler_name_to_code[name] = code
@handler_code_to_name[code] = name
end
end
#TODO: don't like this...
def reload
load_app
end
protected
def parse_route_args(args)
controller = args[0] if args[0] && (args[0].is_a?(Symbol) || args[0].is_a?(String))
action = args[1] if controller
hooks = args[2] if controller
unless controller
hooks = args[0] if args[0] && args[0].is_a?(Hash)
end
return controller, action, hooks
end
def parse_restful_args(args)
model = args[0] if args[0] && (args[0].is_a?(Symbol) || args[0].is_a?(String))
hooks = args[1] if model
unless model
hooks = args[0] if args[0] && args[0].is_a?(Hash)
end
return model, hooks
end
def parse_handler_args(args)
code = args[0] if args[0] && args[0].is_a?(Fixnum)
controller = args[1] if code && args[1]
action = args[2] if controller && args[2]
unless code
controller = args[0] if args[0]
action = args[1] if controller && args[1]
end
return code, controller, action
end
# Handles route registration.
#
def register_route(type, route, block, method, *args)
controller, action, hooks = parse_route_args(args)
if controller
block = build_controller_block(controller, action)
end
data = {:route_type=>type, :route_spec=>route}
if type == :restful
data[:restful] = {:restful_action=>action}
end
@route_store.add_route(route, block, method, data, hooks)
end
def build_controller_block(controller, action)
controller = eval(controller.to_s)
action ||= Configuration::Base.app.default_action
block = lambda {
instance = controller.new
request.controller = instance
request.action = action
instance.send(action)
}
block
end
def set_request_format_from_route(route)
route, format = StringUtils.split_at_last_dot(route)
self.request.format = ((format && (format[format.length - 1, 1] == '/')) ? format[0, format.length - 1] : format)
end
def with_scope(opts)
@scope ||= {}
@scope[:path] ||= []
@scope[:model] = opts[:model]
@scope[:path] << opts[:url]
yield
end
def remove_scope
@scope[:path].pop
end
def nest_scope(&block)
@scope[:path].insert(-1, ":#{StringUtils.underscore(@scope[:model].to_s)}_id")
yield
@scope[:path].pop
end
def current_path
@scope[:path].join('/')
end
def set_cookies
if self.request.cookies && self.request.cookies != {}
self.request.cookies.each do |key, value|
if value.is_a?(Hash)
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800}.merge(value))
elsif value.is_a?(String)
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800}.merge({:value => value}))
else
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800 * -1 }.merge({:value => value}))
end
end
end
end
# Reloads all application files in application_path and presenter (if specified).
#
def load_app
load(Configuration::App.application_path)
@loader = Loader.new unless @loader
@loader.load!(Configuration::Base.app.src_dir)
load_handlers
load_routes
# Reload views
if self.presenter
self.presenter.load
end
end
def load_handlers
@handler_store = {}
self.instance_eval(&self.class.handlers_proc) if self.class.handlers_proc
end
def load_routes
@route_store = RouteStore.new
self.instance_eval(&self.class.routes_proc) if self.class.routes_proc
end
# Send the response and cleanup.
#
def finish!
set_cookies
self.response.finish
end
end
end
|
version = File.read(File.join(File.expand_path("../../VERSION", __FILE__))).strip
presenter_path = File.exists?('pakyow-mailer') ? 'pakyow-mailer' : '.'
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'pakyow-mailer'
s.version = version
s.summary = 'A library for delivering Pakyow views as mail.'
s.description = 'A library for delivering Pakyow views as mail.'
s.required_ruby_version = '>= 2.0.0'
s.license = 'MIT'
s.authors = ['Bryan Powell', 'Bret Young']
s.email = 'bryan@metabahn.com'
s.homepage = 'http://pakyow.com'
s.rubyforge_project = 'pakyow-mailer'
s.files = Dir[
File.join(presenter_path, 'CHANGES'),
File.join(presenter_path, 'README'),
File.join(presenter_path, 'MIT-LICENSE'),
File.join(presenter_path, 'lib','**','*')
]
s.require_path = File.join(presenter_path, 'lib')
s.add_dependency('pakyow-core', version)
s.add_dependency('pakyow-presenter', version)
s.add_dependency('mail', '~> 2.5')
s.add_dependency('premailer', '~> 1.8')
s.add_development_dependency('minitest', '~> 5.0')
end
Update the mail dependency
version = File.read(File.join(File.expand_path("../../VERSION", __FILE__))).strip
presenter_path = File.exists?('pakyow-mailer') ? 'pakyow-mailer' : '.'
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'pakyow-mailer'
s.version = version
s.summary = 'A library for delivering Pakyow views as mail.'
s.description = 'A library for delivering Pakyow views as mail.'
s.required_ruby_version = '>= 2.0.0'
s.license = 'MIT'
s.authors = ['Bryan Powell', 'Bret Young']
s.email = 'bryan@metabahn.com'
s.homepage = 'http://pakyow.com'
s.rubyforge_project = 'pakyow-mailer'
s.files = Dir[
File.join(presenter_path, 'CHANGES'),
File.join(presenter_path, 'README'),
File.join(presenter_path, 'MIT-LICENSE'),
File.join(presenter_path, 'lib','**','*')
]
s.require_path = File.join(presenter_path, 'lib')
s.add_dependency('pakyow-core', version)
s.add_dependency('pakyow-presenter', version)
s.add_dependency('mail', '~> 2.6')
s.add_dependency('premailer', '~> 1.8')
s.add_development_dependency('minitest', '~> 5.0')
end
|
# coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "conceptual/version"
Gem::Specification.new do |spec|
spec.name = "conceptual"
spec.version = Conceptual::VERSION
spec.authors = ["Takuya \"Mura-Mi\" Murakami"]
spec.email = ["mura.mi.nantoka@gmail.com"]
spec.summary = %q{Simple DSL to describe conceptual model}
spec.description = %q{Simple DSL to describe conceptual model}
spec.homepage = "http://murataku.yokohama/conceptual"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", ">= 1.14.6"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.3"
spec.add_development_dependency "guard-rspec"
spec.add_development_dependency "coveralls"
end
Enable to report test metrics in CircleCI
# coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "conceptual/version"
Gem::Specification.new do |spec|
spec.name = "conceptual"
spec.version = Conceptual::VERSION
spec.authors = ["Takuya \"Mura-Mi\" Murakami"]
spec.email = ["mura.mi.nantoka@gmail.com"]
spec.summary = %q{Simple DSL to describe conceptual model}
spec.description = %q{Simple DSL to describe conceptual model}
spec.homepage = "http://murataku.yokohama/conceptual"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", ">= 1.14.6"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.3"
spec.add_development_dependency "guard-rspec"
spec.add_development_dependency "coveralls"
spec.add_development_dependency "rspec_junit_formatter"
end
|
$:.push File.expand_path("../lib", __FILE__)
require "librato/rack/version"
Gem::Specification.new do |s|
s.name = "librato-rack"
s.version = Librato::Rack::VERSION
s.authors = ["Matt Sanders"]
s.email = ["matt@librato.com"]
s.homepage = "https://github.com/librato/librato-rack"
s.summary = "Use Librato Metrics with your rack application"
s.description = "Rack middleware to report key app statistics and custom instrumentation to the Librato Metrics service."
s.files = Dir["{app,config,db,lib}/**/*"] + ["LICENSE", "Rakefile", "README.md", "CHANGELOG.md"]
s.test_files = Dir["test/**/*"]
s.add_dependency "librato-metrics", "~> 1.0.2"
s.add_development_dependency "minitest"
end
Sign gem when packaging
$:.push File.expand_path("../lib", __FILE__)
require "librato/rack/version"
Gem::Specification.new do |s|
s.name = "librato-rack"
s.version = Librato::Rack::VERSION
s.authors = ["Matt Sanders"]
s.email = ["matt@librato.com"]
s.homepage = "https://github.com/librato/librato-rack"
s.summary = "Use Librato Metrics with your rack application"
s.description = "Rack middleware to report key app statistics and custom instrumentation to the Librato Metrics service."
s.files = Dir["{app,config,db,lib}/**/*"] + ["LICENSE", "Rakefile", "README.md", "CHANGELOG.md"]
s.test_files = Dir["test/**/*"]
s.add_dependency "librato-metrics", "~> 1.0.2"
s.add_development_dependency "minitest"
signing_key = File.expand_path("~/.gem/librato-private_key.pem")
if File.exists?(signing_key)
s.signing_key = signing_key
s.cert_chain = ["certs/librato-public.pem"]
end
end
|
class DbResetter
def self.reset()
ActiveRecord::Base.transaction do
Patient.destroy_all
PaRequest.destroy_all
Role.destroy_all
Prescription.destroy_all
Pharmacy.destroy_all
User.destroy_all
CmmCallback.destroy_all
Role.create! description: Role::DOCTOR
Role.create! description: Role::STAFF
patients = [
{first_name:'Autopick', last_name:'Smith', gender:'f', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com', bin:'773836', pcn:'MOCKPBM',
group_id:'ABC1'},
{first_name:'Autopick', last_name:'Johnson', gender:'m', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com', bin:'773836', pcn:'MOCKPBM',
group_id:'ABC1'},
{first_name:'Amber', last_name:'Williams', gender:'f', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com'},
{first_name:'Nathan', last_name:'Jones', gender:'m', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com'},
{first_name:'Becky', last_name:'Brown', gender:'f', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com'},
{first_name:'Mark', last_name:'Davis', gender:'m', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com'},
{first_name:'Mike', last_name:'Miller', gender:'m', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com'},
{first_name:'Amanda', last_name:'Wilson', gender:'f', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com'},
{first_name:'Caitlin', last_name:'Moore', gender:'f', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com'},
{first_name:'Suzy', last_name:'Taylor', gender:'f', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com'}
]
patients.each do |patient|
Patient.create! patient
end
pharmacies = [
{name:'CVS Pharmacy', street:'759 Neil Ave.', city:'Columbus', state:'OH',
fax:'555-555-5555', phone:'555-555-1212', zip:'43201'},
{name:'Crosbys', street:'2609 N High St.', city:'Columbus', state:'OH',
fax:'555-555-5555', phone:'555-555-1212', zip:'43201'},
{name:'Columbus Prescription Pharms', street:'1020 High St', city:'Worthington', state:'OH',
fax:'555-555-5555', phone:'555-555-1212', zip:'43201'},
{name:'Walgreens', street:'1162 Harrisburg Pike', city:'Columbus', state:'OH',
fax:'555-555-5555', phone:'555-555-1212', zip:'43201'},
{name:'Giant Eagle', street:'1451 W 5th Ave', city:'Columbus', state:'OH',
fax:'555-555-5555', phone:'555-555-1212', zip:'43201'},
{name:'Walgreens', street:'3015 E. Livingston Ave', city:'Columbus', state:'OH',
fax:'555-555-5555', phone:'555-555-1212', zip:'43201'},
{name:'Central Ohio Compounding Pharmacy', street:'7870 Olentangy River Rd.', city:'Columbus', state:'OH',
fax:'555-555-5555', phone:'555-555-1212', zip:'43201'}
]
pharmacies.each do |pharmacy|
Pharmacy.create! pharmacy
end
User.create!(first_name: 'Alexander',
last_name: 'Fleming',
role: Role.doctor,
npi: '1234567890',
email: 'afleming@example.com',
practice_name: 'CoverMyClinic',
practice_phone_number: '614-999-9999',
practice_street_1: '2 Miranova Pl.',
practice_street_2: 'Suite 1200',
practice_city: 'Columbus',
practice_state: 'OH',
practice_zip: '43215')
User.create!(first_name: 'Staff', role: Role.staff, npi: nil)
drugs = [
{drug_number: "175366", quantity: 30, frequency: "qD", refills: 1, dispense_as_written: true,
drug_name: "SpongeBob SquarePants Gummies", active: true },
{drug_number: "122704", quantity: 30, frequency: "qD", refills: 1, dispense_as_written: true,
drug_name: "Flintstones Gummies", active: true },
{drug_number: "003485", quantity: 30, frequency: "qD", refills: 1, dispense_as_written: true,
drug_name: "Bugs Bunny Vitamins/Minerals", active: true },
{drug_number: "091833", quantity: 30, frequency: "qD", refills: 1, dispense_as_written: true,
drug_name: "Bacon Flavor", active: true },
]
Patient.first(drugs.count).zip(drugs).each do |patient, drug|
create_pa(patient.prescriptions.new(drug.merge(date_prescribed: rand(1.year).seconds.ago)))
end
end
end
def self.create_pa(prescription)
pa_request = prescription.pa_requests.new(
user: User.doctors.first,
prescription: prescription,
form_id: nil)
response = CoverMyMeds.default_client.create_request RequestConfigurator.new(pa_request).request
pa_request.set_cmm_values(response)
pa_request.save!
end
end
address nil prescription problem
class DbResetter
def self.reset()
ActiveRecord::Base.transaction do
Patient.destroy_all
PaRequest.destroy_all
Role.destroy_all
Prescription.destroy_all
Pharmacy.destroy_all
User.destroy_all
CmmCallback.destroy_all
Role.create! description: Role::DOCTOR
Role.create! description: Role::STAFF
patients = [
{first_name:'Autopick', last_name:'Smith', gender:'f', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com', bin:'773836', pcn:'MOCKPBM',
group_id:'ABC1'},
{first_name:'Autopick', last_name:'Johnson', gender:'m', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com', bin:'773836', pcn:'MOCKPBM',
group_id:'ABC1'},
{first_name:'Amber', last_name:'Williams', gender:'f', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com'},
{first_name:'Nathan', last_name:'Jones', gender:'m', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com'},
{first_name:'Becky', last_name:'Brown', gender:'f', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com'},
{first_name:'Mark', last_name:'Davis', gender:'m', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com'},
{first_name:'Mike', last_name:'Miller', gender:'m', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com'},
{first_name:'Amanda', last_name:'Wilson', gender:'f', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com'},
{first_name:'Caitlin', last_name:'Moore', gender:'f', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com'},
{first_name:'Suzy', last_name:'Taylor', gender:'f', date_of_birth:'10/01/1971',
street_1:'221 Baker St.', street_2:'Apt B', city:'London', state:'OH', zip:'43210',
phone_number:'614-555-1212', email:'test@covermymeds.com'}
]
patients.each do |patient|
Patient.create! patient
end
pharmacies = [
{name:'CVS Pharmacy', street:'759 Neil Ave.', city:'Columbus', state:'OH',
fax:'555-555-5555', phone:'555-555-1212', zip:'43201'},
{name:'Crosbys', street:'2609 N High St.', city:'Columbus', state:'OH',
fax:'555-555-5555', phone:'555-555-1212', zip:'43201'},
{name:'Columbus Prescription Pharms', street:'1020 High St', city:'Worthington', state:'OH',
fax:'555-555-5555', phone:'555-555-1212', zip:'43201'},
{name:'Walgreens', street:'1162 Harrisburg Pike', city:'Columbus', state:'OH',
fax:'555-555-5555', phone:'555-555-1212', zip:'43201'},
{name:'Giant Eagle', street:'1451 W 5th Ave', city:'Columbus', state:'OH',
fax:'555-555-5555', phone:'555-555-1212', zip:'43201'},
{name:'Walgreens', street:'3015 E. Livingston Ave', city:'Columbus', state:'OH',
fax:'555-555-5555', phone:'555-555-1212', zip:'43201'},
{name:'Central Ohio Compounding Pharmacy', street:'7870 Olentangy River Rd.', city:'Columbus', state:'OH',
fax:'555-555-5555', phone:'555-555-1212', zip:'43201'}
]
pharmacies.each do |pharmacy|
Pharmacy.create! pharmacy
end
User.create!(first_name: 'Alexander',
last_name: 'Fleming',
role: Role.doctor,
npi: '1234567890',
email: 'afleming@example.com',
practice_name: 'CoverMyClinic',
practice_phone_number: '614-999-9999',
practice_street_1: '2 Miranova Pl.',
practice_street_2: 'Suite 1200',
practice_city: 'Columbus',
practice_state: 'OH',
practice_zip: '43215')
User.create!(first_name: 'Staff', role: Role.staff, npi: nil)
drugs = [
{drug_number: "175366", quantity: 30, frequency: "qD", refills: 1, dispense_as_written: true,
drug_name: "SpongeBob SquarePants Gummies", active: true },
{drug_number: "122704", quantity: 30, frequency: "qD", refills: 1, dispense_as_written: true,
drug_name: "Flintstones Gummies", active: true },
{drug_number: "003485", quantity: 30, frequency: "qD", refills: 1, dispense_as_written: true,
drug_name: "Bugs Bunny Vitamins/Minerals", active: true },
{drug_number: "091833", quantity: 30, frequency: "qD", refills: 1, dispense_as_written: true,
drug_name: "Bacon Flavor", active: true },
]
Patient.first(drugs.count).zip(drugs).each do |patient, drug|
create_pa(patient.prescriptions.create!(drug.merge(date_prescribed: rand(1.year).seconds.ago)))
end
end
end
def self.create_pa(prescription)
pa_request = prescription.pa_requests.new(
user: User.doctors.first,
prescription: prescription,
form_id: nil)
response = CoverMyMeds.default_client.create_request RequestConfigurator.new(pa_request).request
pa_request.set_cmm_values(response)
pa_request.save!
end
end
|
module Delayed
class DeserializationError < StandardError
end
# A job object that is persisted to the database.
# Contains the work object as a YAML field.
class Job < ActiveRecord::Base
MAX_ATTEMPTS = 25
MAX_RUN_TIME = 4.hours
set_table_name :delayed_jobs
# By default failed jobs are destroyed after too many attempts.
# If you want to keep them around (perhaps to inspect the reason
# for the failure), set this to false.
cattr_accessor :destroy_failed_jobs
self.destroy_failed_jobs = true
# Every worker has a unique name which by default is the pid of the process.
# There are some advantages to overriding this with something which survives worker retarts:
# Workers can safely resume working on tasks which are locked by themselves. The worker will assume that it crashed before.
cattr_accessor :worker_name
self.worker_name = "host:#{Socket.gethostname} pid:#{Process.pid}" rescue "pid:#{Process.pid}"
NextTaskSQL = '(run_at <= ? AND (locked_at IS NULL OR locked_at < ?) OR (locked_by = ?)) AND failed_at IS NULL'
NextTaskOrder = 'priority DESC, run_at ASC'
ParseObjectFromYaml = /\!ruby\/\w+\:([^\s]+)/
cattr_accessor :min_priority, :max_priority
self.min_priority = nil
self.max_priority = nil
# When a worker is exiting, make sure we don't have any locked jobs.
def self.clear_locks!
update_all("locked_by = null, locked_at = null", ["locked_by = ?", worker_name])
end
def failed?
failed_at
end
alias_method :failed, :failed?
def payload_object
@payload_object ||= deserialize(self['handler'])
end
def name
@name ||= begin
payload = payload_object
if payload.respond_to?(:display_name)
payload.display_name
else
payload.class.name
end
end
end
def payload_object=(object)
self['handler'] = object.to_yaml
end
# Reschedule the job in the future (when a job fails).
# Uses an exponential scale depending on the number of failed attempts.
def reschedule(message, backtrace = [], time = nil)
if self.attempts < MAX_ATTEMPTS
time ||= Job.db_time_now + (attempts ** 4) + 5
self.attempts += 1
self.run_at = time
self.last_error = message + "\n" + backtrace.join("\n")
self.unlock
save!
else
logger.info "* [JOB] PERMANENTLY removing #{self.name} because of #{attempts} consequetive failures."
destroy_failed_jobs ? destroy : update_attribute(:failed_at, Time.now)
end
end
# Try to run one job. Returns true/false (work done/work failed) or nil if job can't be locked.
def run_with_lock(max_run_time, worker_name)
logger.info "* [JOB] aquiring lock on #{name}"
unless lock_exclusively!(max_run_time, worker_name)
# We did not get the lock, some other worker process must have
logger.warn "* [JOB] failed to aquire exclusive lock for #{name}"
return nil # no work done
end
begin
runtime = Benchmark.realtime do
invoke_job # TODO: raise error if takes longer than max_run_time
destroy
end
# TODO: warn if runtime > max_run_time ?
logger.info "* [JOB] #{name} completed after %.4f" % runtime
return true # did work
rescue Exception => e
reschedule e.message, e.backtrace
log_exception(e)
return false # work failed
end
end
# Add a job to the queue
def self.enqueue(*args, &block)
object = block_given? ? EvaledJob.new(&block) : args.shift
unless object.respond_to?(:perform) || block_given?
raise ArgumentError, 'Cannot enqueue items which do not respond to perform'
end
priority = args.first || 0
run_at = args[1]
Job.create(:payload_object => object, :priority => priority.to_i, :run_at => run_at)
end
# Find a few candidate jobs to run (in case some immediately get locked by others).
# Return in random order prevent everyone trying to do same head job at once.
def self.find_available(limit = 5, max_run_time = MAX_RUN_TIME)
time_now = db_time_now
sql = NextTaskSQL.dup
conditions = [time_now, time_now - max_run_time, worker_name]
if self.min_priority
sql << ' AND (priority >= ?)'
conditions << min_priority
end
if self.max_priority
sql << ' AND (priority <= ?)'
conditions << max_priority
end
conditions.unshift(sql)
records = ActiveRecord::Base.silence do
find(:all, :conditions => conditions, :order => NextTaskOrder, :limit => limit)
end
records.sort_by { rand() }
end
# Run the next job we can get an exclusive lock on.
# If no jobs are left we return nil
def self.reserve_and_run_one_job(max_run_time = MAX_RUN_TIME)
# We get up to 5 jobs from the db. In case we cannot get exclusive access to a job we try the next.
# this leads to a more even distribution of jobs across the worker processes
find_available(5, max_run_time).each do |job|
t = job.run_with_lock(max_run_time, worker_name)
return t unless t == nil # return if we did work (good or bad)
end
nil # we didn't do any work, all 5 were not lockable
end
# Lock this job for this worker.
# Returns true if we have the lock, false otherwise.
def lock_exclusively!(max_run_time, worker = worker_name)
now = self.class.db_time_now
affected_rows = if locked_by != worker
# We don't own this job so we will update the locked_by name and the locked_at
self.class.update_all(["locked_at = ?, locked_by = ?", now, worker], ["id = ? and (locked_at is null or locked_at < ?)", id, (now - max_run_time.to_i)])
else
# We already own this job, this may happen if the job queue crashes.
# Simply resume and update the locked_at
self.class.update_all(["locked_at = ?", now], ["id = ? and locked_by = ?", id, worker])
end
if affected_rows == 1
self.locked_at = now
self.locked_by = worker
return true
else
return false
end
end
# Unlock this job (note: not saved to DB)
def unlock
self.locked_at = nil
self.locked_by = nil
end
# This is a good hook if you need to report job processing errors in additional or different ways
def log_exception(error)
logger.error "* [JOB] #{name} failed with #{error.class.name}: #{error.message} - #{attempts} failed attempts"
logger.error(error)
end
# Do num jobs and return stats on success/failure.
# Exit early if interrupted.
def self.work_off(num = 100)
success, failure = 0, 0
num.times do
case self.reserve_and_run_one_job
when true
success += 1
when false
failure += 1
else
break # leave if no work could be done
end
break if $exit # leave if we're exiting
end
return [success, failure]
end
# Moved into its own method so that new_relic can trace it.
def invoke_job
payload_object.perform
end
private
def deserialize(source)
handler = YAML.load(source) rescue nil
unless handler.respond_to?(:perform)
if handler.nil? && source =~ ParseObjectFromYaml
handler_class = $1
end
attempt_to_load(handler_class || handler.class)
handler = YAML.load(source)
end
return handler if handler.respond_to?(:perform)
raise DeserializationError,
'Job failed to load: Unknown handler. Try to manually require the appropiate file.'
rescue TypeError, LoadError, NameError => e
raise DeserializationError,
"Job failed to load: #{e.message}. Try to manually require the required file."
end
# Constantize the object so that ActiveSupport can attempt
# its auto loading magic. Will raise LoadError if not successful.
def attempt_to_load(klass)
klass.constantize
end
# Get the current time (GMT or local depending on DB)
# Note: This does not ping the DB to get the time, so all your clients
# must have syncronized clocks.
def self.db_time_now
(ActiveRecord::Base.default_timezone == :utc) ? Time.now.utc : Time.now
end
protected
def before_save
self.run_at ||= self.class.db_time_now
end
end
class EvaledJob
def initialize
@job = yield
end
def perform
eval(@job)
end
end
end
fixing Time.now to be Time.zone.now to honor the app set local TimeZone
module Delayed
class DeserializationError < StandardError
end
# A job object that is persisted to the database.
# Contains the work object as a YAML field.
class Job < ActiveRecord::Base
MAX_ATTEMPTS = 25
MAX_RUN_TIME = 4.hours
set_table_name :delayed_jobs
# By default failed jobs are destroyed after too many attempts.
# If you want to keep them around (perhaps to inspect the reason
# for the failure), set this to false.
cattr_accessor :destroy_failed_jobs
self.destroy_failed_jobs = true
# Every worker has a unique name which by default is the pid of the process.
# There are some advantages to overriding this with something which survives worker retarts:
# Workers can safely resume working on tasks which are locked by themselves. The worker will assume that it crashed before.
cattr_accessor :worker_name
self.worker_name = "host:#{Socket.gethostname} pid:#{Process.pid}" rescue "pid:#{Process.pid}"
NextTaskSQL = '(run_at <= ? AND (locked_at IS NULL OR locked_at < ?) OR (locked_by = ?)) AND failed_at IS NULL'
NextTaskOrder = 'priority DESC, run_at ASC'
ParseObjectFromYaml = /\!ruby\/\w+\:([^\s]+)/
cattr_accessor :min_priority, :max_priority
self.min_priority = nil
self.max_priority = nil
# When a worker is exiting, make sure we don't have any locked jobs.
def self.clear_locks!
update_all("locked_by = null, locked_at = null", ["locked_by = ?", worker_name])
end
def failed?
failed_at
end
alias_method :failed, :failed?
def payload_object
@payload_object ||= deserialize(self['handler'])
end
def name
@name ||= begin
payload = payload_object
if payload.respond_to?(:display_name)
payload.display_name
else
payload.class.name
end
end
end
def payload_object=(object)
self['handler'] = object.to_yaml
end
# Reschedule the job in the future (when a job fails).
# Uses an exponential scale depending on the number of failed attempts.
def reschedule(message, backtrace = [], time = nil)
if self.attempts < MAX_ATTEMPTS
time ||= Job.db_time_now + (attempts ** 4) + 5
self.attempts += 1
self.run_at = time
self.last_error = message + "\n" + backtrace.join("\n")
self.unlock
save!
else
logger.info "* [JOB] PERMANENTLY removing #{self.name} because of #{attempts} consequetive failures."
destroy_failed_jobs ? destroy : update_attribute(:failed_at, Time.now)
end
end
# Try to run one job. Returns true/false (work done/work failed) or nil if job can't be locked.
def run_with_lock(max_run_time, worker_name)
logger.info "* [JOB] aquiring lock on #{name}"
unless lock_exclusively!(max_run_time, worker_name)
# We did not get the lock, some other worker process must have
logger.warn "* [JOB] failed to aquire exclusive lock for #{name}"
return nil # no work done
end
begin
runtime = Benchmark.realtime do
invoke_job # TODO: raise error if takes longer than max_run_time
destroy
end
# TODO: warn if runtime > max_run_time ?
logger.info "* [JOB] #{name} completed after %.4f" % runtime
return true # did work
rescue Exception => e
reschedule e.message, e.backtrace
log_exception(e)
return false # work failed
end
end
# Add a job to the queue
def self.enqueue(*args, &block)
object = block_given? ? EvaledJob.new(&block) : args.shift
unless object.respond_to?(:perform) || block_given?
raise ArgumentError, 'Cannot enqueue items which do not respond to perform'
end
priority = args.first || 0
run_at = args[1]
Job.create(:payload_object => object, :priority => priority.to_i, :run_at => run_at)
end
# Find a few candidate jobs to run (in case some immediately get locked by others).
# Return in random order prevent everyone trying to do same head job at once.
def self.find_available(limit = 5, max_run_time = MAX_RUN_TIME)
time_now = db_time_now
sql = NextTaskSQL.dup
conditions = [time_now, time_now - max_run_time, worker_name]
if self.min_priority
sql << ' AND (priority >= ?)'
conditions << min_priority
end
if self.max_priority
sql << ' AND (priority <= ?)'
conditions << max_priority
end
conditions.unshift(sql)
records = ActiveRecord::Base.silence do
find(:all, :conditions => conditions, :order => NextTaskOrder, :limit => limit)
end
records.sort_by { rand() }
end
# Run the next job we can get an exclusive lock on.
# If no jobs are left we return nil
def self.reserve_and_run_one_job(max_run_time = MAX_RUN_TIME)
# We get up to 5 jobs from the db. In case we cannot get exclusive access to a job we try the next.
# this leads to a more even distribution of jobs across the worker processes
find_available(5, max_run_time).each do |job|
t = job.run_with_lock(max_run_time, worker_name)
return t unless t == nil # return if we did work (good or bad)
end
nil # we didn't do any work, all 5 were not lockable
end
# Lock this job for this worker.
# Returns true if we have the lock, false otherwise.
def lock_exclusively!(max_run_time, worker = worker_name)
now = self.class.db_time_now
affected_rows = if locked_by != worker
# We don't own this job so we will update the locked_by name and the locked_at
self.class.update_all(["locked_at = ?, locked_by = ?", now, worker], ["id = ? and (locked_at is null or locked_at < ?)", id, (now - max_run_time.to_i)])
else
# We already own this job, this may happen if the job queue crashes.
# Simply resume and update the locked_at
self.class.update_all(["locked_at = ?", now], ["id = ? and locked_by = ?", id, worker])
end
if affected_rows == 1
self.locked_at = now
self.locked_by = worker
return true
else
return false
end
end
# Unlock this job (note: not saved to DB)
def unlock
self.locked_at = nil
self.locked_by = nil
end
# This is a good hook if you need to report job processing errors in additional or different ways
def log_exception(error)
logger.error "* [JOB] #{name} failed with #{error.class.name}: #{error.message} - #{attempts} failed attempts"
logger.error(error)
end
# Do num jobs and return stats on success/failure.
# Exit early if interrupted.
def self.work_off(num = 100)
success, failure = 0, 0
num.times do
case self.reserve_and_run_one_job
when true
success += 1
when false
failure += 1
else
break # leave if no work could be done
end
break if $exit # leave if we're exiting
end
return [success, failure]
end
# Moved into its own method so that new_relic can trace it.
def invoke_job
payload_object.perform
end
private
def deserialize(source)
handler = YAML.load(source) rescue nil
unless handler.respond_to?(:perform)
if handler.nil? && source =~ ParseObjectFromYaml
handler_class = $1
end
attempt_to_load(handler_class || handler.class)
handler = YAML.load(source)
end
return handler if handler.respond_to?(:perform)
raise DeserializationError,
'Job failed to load: Unknown handler. Try to manually require the appropiate file.'
rescue TypeError, LoadError, NameError => e
raise DeserializationError,
"Job failed to load: #{e.message}. Try to manually require the required file."
end
# Constantize the object so that ActiveSupport can attempt
# its auto loading magic. Will raise LoadError if not successful.
def attempt_to_load(klass)
klass.constantize
end
# Get the current time (GMT or local depending on DB)
# Note: This does not ping the DB to get the time, so all your clients
# must have syncronized clocks.
def self.db_time_now
(ActiveRecord::Base.default_timezone == :utc) ? Time.now.utc : Time.zone.now
end
protected
def before_save
self.run_at ||= self.class.db_time_now
end
end
class EvaledJob
def initialize
@job = yield
end
def perform
eval(@job)
end
end
end
|
module Delayed
class DeserializationError < StandardError
end
class Job < ActiveRecord::Base
MAX_ATTEMPTS = 25
set_table_name :delayed_jobs
cattr_accessor :worker_name, :min_priority, :max_priority
self.worker_name = "pid:#{Process.pid}"
self.min_priority = nil
self.max_priority = nil
NextTaskSQL = '(`locked_by` = ?) OR (`run_at` <= ? AND (`locked_at` IS NULL OR `locked_at` < ?))'
NextTaskOrder = 'priority DESC, run_at ASC'
ParseObjectFromYaml = /\!ruby\/\w+\:([^\s]+)/
class LockError < StandardError
end
def self.clear_locks!
connection.execute "UPDATE #{table_name} SET `locked_by`=NULL, `locked_at`=NULL WHERE `locked_by`=#{quote_value(worker_name)}"
end
def payload_object
@payload_object ||= deserialize(self['handler'])
end
def name
text = handler.gsub(/\n/, ' ')
"#{id} (#{text.length > 40 ? "#{text[0..40]}..." : text})"
end
def payload_object=(object)
self['handler'] = object.to_yaml
end
def reschedule(message, time = nil)
if self.attempts < MAX_ATTEMPTS
time ||= Job.db_time_now + (attempts ** 4) + 5
self.attempts += 1
self.run_at = time
self.last_error = message
self.unlock
save!
else
logger.info "* [JOB] PERMANENTLY removing #{self.name} because of #{attempts} consequetive failures."
destroy
end
end
def self.enqueue(object, priority = 0)
unless object.respond_to?(:perform)
raise ArgumentError, 'Cannot enqueue items which do not respond to perform'
end
Job.create(:payload_object => object, :priority => priority.to_i)
end
def self.find_available(limit = 5)
time_now = db_time_now
sql = NextTaskSQL.dup
conditions = [time_now, time_now, worker_name]
if self.min_priority
sql << ' AND (`priority` >= ?)'
conditions << min_priority
end
if self.max_priority
sql << ' AND (`priority` <= ?)'
conditions << max_priority
end
conditions.unshift(sql)
ActiveRecord::Base.silence do
find(:all, :conditions => conditions, :order => NextTaskOrder, :limit => limit)
end
end
# Get the payload of the next job we can get an exclusive lock on.
# If no jobs are left we return nil
def self.reserve(max_run_time = 4.hours)
# We get up to 5 jobs from the db. In face we cannot get exclusive access to a job we try the next.
# this leads to a more even distribution of jobs across the worker processes
find_available(5).each do |job|
begin
logger.info "* [JOB] aquiring lock on #{job.name}"
job.lock_exclusively!(max_run_time, worker_name)
runtime = Benchmark.realtime do
yield job.payload_object
job.destroy
end
logger.info "* [JOB] #{job.name} completed after %.4f" % runtime
return job
rescue LockError
# We did not get the lock, some other worker process must have
logger.warn "* [JOB] failed to aquire exclusive lock for #{job.name}"
rescue StandardError => e
job.reschedule e.message
logger.error "* [JOB] #{job.name} failed with #{e.class.name}: #{e.message} - #{job.attempts} failed attempts"
logger.error(e)
return job
end
end
nil
end
# This method is used internally by reserve method to ensure exclusive access
# to the given job. It will rise a LockError if it cannot get this lock.
def lock_exclusively!(max_run_time, worker = worker_name)
now = self.class.db_time_now
affected_rows = if locked_by != worker
# We don't own this job so we will update the locked_by name and the locked_at
connection.update(<<-end_sql, "#{self.class.name} Update to aquire exclusive lock")
UPDATE #{self.class.table_name}
SET `locked_at`=#{quote_value(now)}, `locked_by`=#{quote_value(worker)}
WHERE #{self.class.primary_key} = #{quote_value(id)} AND (`locked_at` IS NULL OR `locked_at` < #{quote_value(now - max_run_time.to_i)})
end_sql
else
# We already own this job, this may happen if the job queue crashes.
# Simply resume and update the locked_at
connection.update(<<-end_sql, "#{self.class.name} Update exclusive lock")
UPDATE #{self.class.table_name}
SET `locked_at`=#{quote_value(now)}
WHERE #{self.class.primary_key} = #{quote_value(id)} AND (`locked_by`=#{quote_value(worker)})
end_sql
end
unless affected_rows == 1
raise LockError, "Attempted to aquire exclusive lock failed"
end
self.locked_at = now
self.locked_by = worker
end
def unlock
self.locked_at = nil
self.locked_by = nil
end
def self.work_off(num = 100)
success, failure = 0, 0
num.times do
job = self.reserve do |j|
begin
j.perform
success += 1
rescue
failure += 1
raise
end
end
break if job.nil?
end
return [success, failure]
end
private
def deserialize(source)
attempt_to_load_file = true
begin
handler = YAML.load(source) rescue nil
return handler if handler.respond_to?(:perform)
if handler.nil?
if source =~ ParseObjectFromYaml
# Constantize the object so that ActiveSupport can attempt
# its auto loading magic. Will raise LoadError if not successful.
attempt_to_load($1)
# If successful, retry the yaml.load
handler = YAML.load(source)
return handler if handler.respond_to?(:perform)
end
end
if handler.is_a?(YAML::Object)
# Constantize the object so that ActiveSupport can attempt
# its auto loading magic. Will raise LoadError if not successful.
attempt_to_load(handler.class)
# If successful, retry the yaml.load
handler = YAML.load(source)
return handler if handler.respond_to?(:perform)
end
raise DeserializationError, 'Job failed to load: Unknown handler. Try to manually require the appropiate file.'
rescue TypeError, LoadError, NameError => e
raise DeserializationError, "Job failed to load: #{e.message}. Try to manually require the required file."
end
end
def attempt_to_load(klass)
klass.constantize
end
def self.db_time_now
(ActiveRecord::Base.default_timezone == :utc) ? Time.now.utc : Time.now
end
protected
def before_save
self.run_at ||= self.class.db_time_now
end
end
end
Extract log_exception method so that applications can use it as a hook to supply custom exception logging/notification, like Hoptoad for example.
module Delayed
class DeserializationError < StandardError
end
class Job < ActiveRecord::Base
MAX_ATTEMPTS = 25
set_table_name :delayed_jobs
cattr_accessor :worker_name, :min_priority, :max_priority
self.worker_name = "pid:#{Process.pid}"
self.min_priority = nil
self.max_priority = nil
NextTaskSQL = '(`locked_by` = ?) OR (`run_at` <= ? AND (`locked_at` IS NULL OR `locked_at` < ?))'
NextTaskOrder = 'priority DESC, run_at ASC'
ParseObjectFromYaml = /\!ruby\/\w+\:([^\s]+)/
class LockError < StandardError
end
def self.clear_locks!
connection.execute "UPDATE #{table_name} SET `locked_by`=NULL, `locked_at`=NULL WHERE `locked_by`=#{quote_value(worker_name)}"
end
def payload_object
@payload_object ||= deserialize(self['handler'])
end
def name
text = handler.gsub(/\n/, ' ')
"#{id} (#{text.length > 40 ? "#{text[0..40]}..." : text})"
end
def payload_object=(object)
self['handler'] = object.to_yaml
end
def reschedule(message, time = nil)
if self.attempts < MAX_ATTEMPTS
time ||= Job.db_time_now + (attempts ** 4) + 5
self.attempts += 1
self.run_at = time
self.last_error = message
self.unlock
save!
else
logger.info "* [JOB] PERMANENTLY removing #{self.name} because of #{attempts} consequetive failures."
destroy
end
end
def self.enqueue(object, priority = 0)
unless object.respond_to?(:perform)
raise ArgumentError, 'Cannot enqueue items which do not respond to perform'
end
Job.create(:payload_object => object, :priority => priority.to_i)
end
def self.find_available(limit = 5)
time_now = db_time_now
sql = NextTaskSQL.dup
conditions = [time_now, time_now, worker_name]
if self.min_priority
sql << ' AND (`priority` >= ?)'
conditions << min_priority
end
if self.max_priority
sql << ' AND (`priority` <= ?)'
conditions << max_priority
end
conditions.unshift(sql)
ActiveRecord::Base.silence do
find(:all, :conditions => conditions, :order => NextTaskOrder, :limit => limit)
end
end
# Get the payload of the next job we can get an exclusive lock on.
# If no jobs are left we return nil
def self.reserve(max_run_time = 4.hours)
# We get up to 5 jobs from the db. In face we cannot get exclusive access to a job we try the next.
# this leads to a more even distribution of jobs across the worker processes
find_available(5).each do |job|
begin
logger.info "* [JOB] aquiring lock on #{job.name}"
job.lock_exclusively!(max_run_time, worker_name)
runtime = Benchmark.realtime do
yield job.payload_object
job.destroy
end
logger.info "* [JOB] #{job.name} completed after %.4f" % runtime
return job
rescue LockError
# We did not get the lock, some other worker process must have
logger.warn "* [JOB] failed to aquire exclusive lock for #{job.name}"
rescue StandardError => e
job.reschedule e.message
log_exception(job, error)
return job
end
end
nil
end
# This method is used internally by reserve method to ensure exclusive access
# to the given job. It will rise a LockError if it cannot get this lock.
def lock_exclusively!(max_run_time, worker = worker_name)
now = self.class.db_time_now
affected_rows = if locked_by != worker
# We don't own this job so we will update the locked_by name and the locked_at
connection.update(<<-end_sql, "#{self.class.name} Update to aquire exclusive lock")
UPDATE #{self.class.table_name}
SET `locked_at`=#{quote_value(now)}, `locked_by`=#{quote_value(worker)}
WHERE #{self.class.primary_key} = #{quote_value(id)} AND (`locked_at` IS NULL OR `locked_at` < #{quote_value(now - max_run_time.to_i)})
end_sql
else
# We already own this job, this may happen if the job queue crashes.
# Simply resume and update the locked_at
connection.update(<<-end_sql, "#{self.class.name} Update exclusive lock")
UPDATE #{self.class.table_name}
SET `locked_at`=#{quote_value(now)}
WHERE #{self.class.primary_key} = #{quote_value(id)} AND (`locked_by`=#{quote_value(worker)})
end_sql
end
unless affected_rows == 1
raise LockError, "Attempted to aquire exclusive lock failed"
end
self.locked_at = now
self.locked_by = worker
end
def unlock
self.locked_at = nil
self.locked_by = nil
end
# This is a good hook if you need to report job processing errors in additional or different ways
def self.log_exception(job, error)
logger.error "* [JOB] #{job.name} failed with #{error.class.name}: #{error.message} - #{job.attempts} failed attempts"
logger.error(error)
end
def self.work_off(num = 100)
success, failure = 0, 0
num.times do
job = self.reserve do |j|
begin
j.perform
success += 1
rescue
failure += 1
raise
end
end
break if job.nil?
end
return [success, failure]
end
private
def deserialize(source)
attempt_to_load_file = true
begin
handler = YAML.load(source) rescue nil
return handler if handler.respond_to?(:perform)
if handler.nil?
if source =~ ParseObjectFromYaml
# Constantize the object so that ActiveSupport can attempt
# its auto loading magic. Will raise LoadError if not successful.
attempt_to_load($1)
# If successful, retry the yaml.load
handler = YAML.load(source)
return handler if handler.respond_to?(:perform)
end
end
if handler.is_a?(YAML::Object)
# Constantize the object so that ActiveSupport can attempt
# its auto loading magic. Will raise LoadError if not successful.
attempt_to_load(handler.class)
# If successful, retry the yaml.load
handler = YAML.load(source)
return handler if handler.respond_to?(:perform)
end
raise DeserializationError, 'Job failed to load: Unknown handler. Try to manually require the appropiate file.'
rescue TypeError, LoadError, NameError => e
raise DeserializationError, "Job failed to load: #{e.message}. Try to manually require the required file."
end
end
def attempt_to_load(klass)
klass.constantize
end
def self.db_time_now
(ActiveRecord::Base.default_timezone == :utc) ? Time.now.utc : Time.now
end
protected
def before_save
self.run_at ||= self.class.db_time_now
end
end
end |
require_relative 'assert_system'
require_relative 'failed'
require_relative 'print_to'
require 'json'
# The Dockerfile installs
# o) curl
# o) the Travis gem
# o) the /app/post_trigger.sh script
class Travis
def initialize(triple)
@triple = triple
end
def validate_triple
if validated?
print_to STDOUT, triple.inspect
else
print_to STDERR, *triple_diagnostic
exit false
end
end
def trigger_dependents
trigger(dependent_repos)
end
private
attr_reader :triple
include AssertSystem
include Failed
include PrintTo
# - - - - - - - - - - - - - - - - - - - - -
def image_name
triple['image_name']
end
def from
triple['from']
end
def test_framework?
triple['test_framework']
end
# - - - - - - - - - - - - - - - - - - - - -
def validated?
found = triples.find { |_,tri| tri['image_name'] == image_name }
if found.nil?
return false
end
# TODO: check if > 1 found
found[1]['from'] == from &&
found[1]['test_framework'] == test_framework?
end
# - - - - - - - - - - - - - - - - - - - - -
def triples
@triples ||= curled_triples
end
def curled_triples
assert_system "curl -O --silent --fail #{triples_url}"
json_parse('./' + triples_filename)
end
def triples_url
github_org = 'https://raw.githubusercontent.com/cyber-dojo-languages'
repo = 'images_info'
branch = 'master'
"#{github_org}/#{repo}/#{branch}/#{triples_filename}"
end
def triples_filename
'images_info.json'
end
def triple_diagnostic
[ '',
triples_url,
'does not contain an entry for:',
'',
"#{quoted('REPO')}: {",
" #{quoted('from')}: #{quoted(from)},",
" #{quoted('image_name')}: #{quoted(image_name)},",
" #{quoted('test_framework')}: #{test_framework?}",
'},',
''
]
end
# - - - - - - - - - - - - - - - - - - - - -
def trigger(repos)
print_to STDOUT, "number of dependent repos: #{repos.size}"
# Travis limits the number of triggers to 10 per hour.
# You can see this in the trigger reponse:
# { ... "remaining_requests": 10, ... }
# Once you get past 10, the output you get is
# Forbidden
# Some repos have more than 10 immediate dependents on their own.
# I shuffle the repos so, over time, all dependents are triggered.
repos.shuffle.each do |repo_name|
puts " #{cdl}/#{repo_name}"
output = assert_backtick "/app/post_trigger.sh #{token} #{cdl} #{repo_name}"
print_to STDOUT, output
print_to STDOUT, "\n", '- - - - - - - - -'
end
end
# - - - - - - - - - - - - - - - - - - - - -
def token
@token ||= get_token
end
def get_token
login
begin
token = assert_backtick('travis token --org').strip
ensure
logout
end
end
def login
`travis login --skip-completion-check --github-token ${GITHUB_TOKEN}`
status = $?.exitstatus
unless status == success
failed "exit_status == #{status}"
end
end
def logout
assert_system 'travis logout'
end
# - - - - - - - - - - - - - - - - - - - - -
def dependent_repos
triples.keys.select { |key| triples[key]['from'] == image_name }
end
# - - - - - - - - - - - - - - - - - - - - -
def quoted(s)
'"' + s.to_s + '"'
end
# - - - - - - - - - - - - - - - - - - - - -
def cdl
'cyber-dojo-languages'
end
# - - - - - - - - - - - - - - - - - - - - -
def json_parse(filename)
begin
content = IO.read(filename)
JSON.parse(content)
rescue JSON::ParserError
failed "error parsing JSON file:#{filename}"
end
end
end
drop unused variable in notifier code
require_relative 'assert_system'
require_relative 'failed'
require_relative 'print_to'
require 'json'
# The Dockerfile installs
# o) curl
# o) the Travis gem
# o) the /app/post_trigger.sh script
class Travis
def initialize(triple)
@triple = triple
end
def validate_triple
if validated?
print_to STDOUT, triple.inspect
else
print_to STDERR, *triple_diagnostic
exit false
end
end
def trigger_dependents
trigger(dependent_repos)
end
private
attr_reader :triple
include AssertSystem
include Failed
include PrintTo
# - - - - - - - - - - - - - - - - - - - - -
def image_name
triple['image_name']
end
def from
triple['from']
end
def test_framework?
triple['test_framework']
end
# - - - - - - - - - - - - - - - - - - - - -
def validated?
found = triples.find { |_,tri| tri['image_name'] == image_name }
if found.nil?
return false
end
# TODO: check if > 1 found
found[1]['from'] == from &&
found[1]['test_framework'] == test_framework?
end
# - - - - - - - - - - - - - - - - - - - - -
def triples
@triples ||= curled_triples
end
def curled_triples
assert_system "curl -O --silent --fail #{triples_url}"
json_parse('./' + triples_filename)
end
def triples_url
github_org = 'https://raw.githubusercontent.com/cyber-dojo-languages'
repo = 'images_info'
branch = 'master'
"#{github_org}/#{repo}/#{branch}/#{triples_filename}"
end
def triples_filename
'images_info.json'
end
def triple_diagnostic
[ '',
triples_url,
'does not contain an entry for:',
'',
"#{quoted('REPO')}: {",
" #{quoted('from')}: #{quoted(from)},",
" #{quoted('image_name')}: #{quoted(image_name)},",
" #{quoted('test_framework')}: #{test_framework?}",
'},',
''
]
end
# - - - - - - - - - - - - - - - - - - - - -
def trigger(repos)
print_to STDOUT, "number of dependent repos: #{repos.size}"
# Travis limits the number of triggers to 10 per hour.
# You can see this in the trigger reponse:
# { ... "remaining_requests": 10, ... }
# Once you get past 10, the output you get is
# Forbidden
# Some repos have more than 10 immediate dependents on their own.
# I shuffle the repos so, over time, all dependents are triggered.
repos.shuffle.each do |repo_name|
puts " #{cdl}/#{repo_name}"
output = assert_backtick "/app/post_trigger.sh #{token} #{cdl} #{repo_name}"
print_to STDOUT, output
print_to STDOUT, "\n", '- - - - - - - - -'
end
end
# - - - - - - - - - - - - - - - - - - - - -
def token
@token ||= get_token
end
def get_token
login
begin
assert_backtick('travis token --org').strip # prints the token
ensure
logout
end
end
def login
`travis login --skip-completion-check --github-token ${GITHUB_TOKEN}`
status = $?.exitstatus
unless status == success
failed "exit_status == #{status}"
end
end
def logout
assert_system 'travis logout'
end
# - - - - - - - - - - - - - - - - - - - - -
def dependent_repos
triples.keys.select { |key| triples[key]['from'] == image_name }
end
# - - - - - - - - - - - - - - - - - - - - -
def quoted(s)
'"' + s.to_s + '"'
end
# - - - - - - - - - - - - - - - - - - - - -
def cdl
'cyber-dojo-languages'
end
# - - - - - - - - - - - - - - - - - - - - -
def json_parse(filename)
begin
content = IO.read(filename)
JSON.parse(content)
rescue JSON::ParserError
failed "error parsing JSON file:#{filename}"
end
end
end
|
require 'thor'
require 'desktop'
module Desktop
class CLI < Thor
desc 'set IMAGE_PATH', 'Set all desktops to the image at IMAGE_PATH'
long_desc <<-LONGDESC
`desktop set` will set the desktop image of all spaces on all monitors to
the image at `IMAGE_PATH`.
> $ desktop set /path/to/image.png
`IMAGE_PATH` can be a local file path or a URL.
> $ desktop set http://url.to/image.jpg
LONGDESC
option :default_image_path, :hide => true
option :skip_reload, :type => :boolean, :hide => true
def set(path, already_failed = false)
osx = OSX.new(options[:default_image_path], options[:skip_reload])
image = HTTP.uri?(path) ? WebImage.new(path) : LocalImage.new(path)
begin
osx.desktop_image = image
rescue OSX::DesktopImagePermissionsError => e
fail_with_permissions_error(e) if already_failed
print_permissions_message
osx.update_desktop_image_permissions
set path, true
rescue OSX::DesktopImageMissingError
fail_with_missing_image_error(image)
end
end
private
def fail_with_permissions_error(exception)
puts
print "Sorry, but I was unable to change your desktop image. "
puts "Please create an issue if you think this is my fault:"
puts
puts issue_url
puts
puts "Here's the error:"
puts
puts exception
fail
end
def fail_with_missing_image_error(image)
puts
puts "Sorry, but it looks like the image you provided does not exist:"
puts
puts image.path
puts
puts "Please create an issue if you think this is my fault:"
puts
puts issue_url
fail
end
def print_permissions_message
print "It looks like this is the first time you've tried to change "
puts "your desktop."
puts
print "We need to make your desktop image writable before we can "
puts "change it. This only needs to be done once."
puts
puts "$ #{OSX.chown_command}"
puts "$ #{OSX.chmod_command}"
puts
end
def issue_url
"https://github.com/chrishunt/desktop/issues/new"
end
end
end
Do not print stack trace on permissions error
require 'thor'
require 'desktop'
module Desktop
class CLI < Thor
desc 'set IMAGE_PATH', 'Set all desktops to the image at IMAGE_PATH'
long_desc <<-LONGDESC
`desktop set` will set the desktop image of all spaces on all monitors to
the image at `IMAGE_PATH`.
> $ desktop set /path/to/image.png
`IMAGE_PATH` can be a local file path or a URL.
> $ desktop set http://url.to/image.jpg
LONGDESC
option :default_image_path, :hide => true
option :skip_reload, :type => :boolean, :hide => true
def set(path, already_failed = false)
osx = OSX.new(options[:default_image_path], options[:skip_reload])
image = HTTP.uri?(path) ? WebImage.new(path) : LocalImage.new(path)
begin
osx.desktop_image = image
rescue OSX::DesktopImagePermissionsError => e
fail_with_permissions_error if already_failed
print_permissions_message
osx.update_desktop_image_permissions
set path, true
rescue OSX::DesktopImageMissingError
fail_with_missing_image_error image
end
end
private
def fail_with_permissions_error
puts
print "Sorry, but I was unable to change your desktop image. "
puts "The permissions are still incorrect."
puts
puts "Did you type your password incorrectly?"
print_issues_message
fail
end
def fail_with_missing_image_error(image)
puts
puts "Sorry, but it looks like the image you provided does not exist:"
puts
puts image.path
print_issues_message
fail
end
def print_permissions_message
print "It looks like this is the first time you've tried to change "
puts "your desktop."
puts
print "We need to make your desktop image writable before we can "
puts "change it. This only needs to be done once."
puts
puts "$ #{OSX.chown_command}"
puts "$ #{OSX.chmod_command}"
puts
end
def print_issues_message
puts
puts "Please create an issue if you think this is my fault:"
puts
puts "https://github.com/chrishunt/desktop/issues/new"
end
end
end
|
add nginx recipe
package 'http://nginx.org/packages/centos/7/noarch/RPMS/nginx-release-centos-7-0.el7.ngx.noarch.rpm' do
not_if 'rpm -q nginx-release-centos-6-0.el6.ngx.noarch'
end
package "nginx" do
action :install
end
service "nginx" do
action [:enable, :start]
end
|
class Article < ActiveRecord::Base
has_paper_trail :ignore => [:title, :abstract => Proc.new { |obj| ['ignore abstract', 'Other abstract'].include? obj.abstract } ],
:only => [:content, :abstract => Proc.new { |obj| obj.abstract.present? } ],
:skip => [:file_upload],
:meta => {
:answer => 42,
:action => :action_data_provider_method,
:question => Proc.new { "31 + 11 = #{31 + 11}" },
:article_id => Proc.new { |article| article.id },
:title => :title
}
def action_data_provider_method
self.object_id.to_s
end
end
Wrapping up hash args for :only and :ignore on Article model in dummy app to fix tests for Ruby18
class Article < ActiveRecord::Base
has_paper_trail :ignore => [:title, { :abstract => Proc.new { |obj| ['ignore abstract', 'Other abstract'].include? obj.abstract } } ],
:only => [:content, { :abstract => Proc.new { |obj| obj.abstract.present? } } ],
:skip => [:file_upload],
:meta => {
:answer => 42,
:action => :action_data_provider_method,
:question => Proc.new { "31 + 11 = #{31 + 11}" },
:article_id => Proc.new { |article| article.id },
:title => :title
}
def action_data_provider_method
self.object_id.to_s
end
end
|
require_relative 'feature_test_helper'
module TrafficSpy
Capybara.app = Server
class IdentifierTest < FeatureTest
include Rack::Test::Methods
include Capybara::DSL
def app
Server
end
def test_error_when_status_missing_rootUrl
post '/sources', "identifier=jumpstartlab"
assert_equal 400, last_response.status
end
def test_error_when_status_missing_identifier
post '/sources', "rootUrl=http://google.com"
assert_equal 400, last_response.status
end
def test_application_registration
post '/sources', "identifier=jumpstartlab&rootUrl=http://google.com"
assert_equal 200, last_response.status
end
end
end
added another test
require_relative 'feature_test_helper'
module TrafficSpy
Capybara.app = Server
class IdentifierTest < FeatureTest
include Rack::Test::Methods
include Capybara::DSL
def app
Server
end
def test_error_when_status_missing_rootUrl
post '/sources', "identifier=jumpstartlab"
assert_equal 400, last_response.status
end
def test_error_when_status_missing_identifier
post '/sources', "rootUrl=http://google.com"
assert_equal 400, last_response.status
end
def test_application_registration
post '/sources', "identifier=jumpstartlab&rootUrl=http://google.com"
assert_equal 200, last_response.status
end
def test_error_when_identifier_exists
post '/sources', "identifier=jumpstartlab&rootUrl=http://google.com"
assert_equal 200, last_response.status
post '/sources', "identifier=jumpstartlab&rootUrl=http://google.com"
assert_equal 403, last_response.status
end
end
end
|
# encoding: UTF-8
Gem::Specification.new do |s|
s.name = "array_metrics"
s.version = "0.0.1"
s.platform = Gem::Platform::RUBY
s.authors = ["Ricardo Panaggio"]
s.email = ["panaggio.ricardo@gmail.com"]
s.homepage = "http://github.com/panaggio/array_metrics"
s.summary = "A bundle of metrics for Arrays"
s.description = "A bundle (not that much by now) of metrics for Arrays"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files spec`.split("\n")
s.has_rdoc = true
s.require_paths = ["lib"]
s.add_development_dependency "rspec", "~> 2.0"
end
Bump up version (from 0.0.1 to 0.0.2)
# encoding: UTF-8
Gem::Specification.new do |s|
s.name = "array_metrics"
s.version = "0.0.2"
s.platform = Gem::Platform::RUBY
s.authors = ["Ricardo Panaggio"]
s.email = ["panaggio.ricardo@gmail.com"]
s.homepage = "http://github.com/panaggio/array_metrics"
s.summary = "A bundle of metrics for Arrays"
s.description = "A bundle (not that much by now) of metrics for Arrays"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files spec`.split("\n")
s.has_rdoc = true
s.require_paths = ["lib"]
s.add_development_dependency "rspec", "~> 2.0"
end
|
total stub of a removal script
#!/usr/bin/ruby
# TODO: add ability to redownload deb file and remove specific files... ala undeb but in reverse
`sudo dpkg -r "#{ARGV[0]}"`
|
# Class Warfare, Validate a Credit Card Number
# I worked on this challenge with Michael Whelpley.
# I spent [#] hours on this challenge.
# Pseudocode
# Input:
# Output:
# Steps:
# Remove the spaces amongst the numbers.
# Check for valid input. 16 digit long and integer.
# Define insance variable
# Task 1:
# Convert the integer to an string, and Split the string then turn string into an array.
# Double the element if the index number is even.
# Task 2:
# Define accumulator with initial value of 0.
# Iterate through every element if it is single character, add that to the accumulator.
# If it is double characters, convert it to string, split the string and convert both of the characters back to integer, and then Iterate through sub array by adding all values.
#Task 3:
# modulus the sum with 10, if it equals to 0 then it is valid credit card.
# If it is false, return false.
# Initial Solution
# Don't forget to check on initialization for a card length
# of exactly 16 digits
# (array.dup.to_s.gsub(" ", "").length)
#
# class CreditCard
#
# def initialize(input)
# raise ArgumentError.new('The card is not valid') unless (input.to_s.length == 16 ) && (input.is_a? Integer)
# @card = input
# @accumulator = 0
# end
#
# def double_even_index
# array = @card.to_s.split(//)
# array.map! {|character| character.to_i }
# (0..15).each { |index| (array[index] =(array[index] * 2)) if index.even? }
# return array
# end
#
# def sum_digits(array)
# array.each do |digit|
# if (digit < 10)
# @accumulator += digit
# else
# digit.to_s.split(//).each {|x| @accumulator += x.to_i }
# end
# end
# end
#
# def check_card
# sum_digits(double_even_index)
# @accumulator % 10 == 0
# end
# end
# cc=CreditCard.new(1234567891234567)
# p cc.check_card
# Refactored Solution
class CreditCard
def initialize(card_number)
raise ArgumentError, 'The card is not valid' unless card_number.to_s.length == 16
@card_number = card_number
end
def check_card
doubled_index = double_even_index_elements(@card_number)
summed_digits = sum_digits(doubled_index)
(summed_digits % 10) == 0
end
private
def double_even_index_elements(credit_card_number)
array = convert_to_array_of_digits(credit_card_number)
array.each_index do |index|
array[index] *= 2 if index.even?
end
end
def sum_digits(array_of_digits)
convert_array_to_single_digits(array_of_digits).reduce(:+)
end
def convert_to_array_of_digits(integer)
integer.to_s.split(//).map(&:to_i)
end
def convert_array_to_single_digits(array_of_digits)
convert_to_array_of_digits(array_of_digits.join.to_i)
end
end
# Reflection
Refactor inital solution for credit card
# Class Warfare, Validate a Credit Card Number
# I worked on this challenge with Michael Whelpley.
# I spent [#] hours on this challenge.
# Pseudocode
# Input:
# Output:
# Steps:
# Remove the spaces amongst the numbers.
# Check for valid input. 16 digit long and integer.
# Define insance variable
# Task 1:
# Convert the integer to an string, and Split the string then turn string into an array.
# Double the element if the index number is even.
# Task 2:
# Define accumulator with initial value of 0.
# Iterate through every element if it is single character, add that to the accumulator.
# If it is double characters, convert it to string, split the string and convert both of the characters back to integer, and then Iterate through sub array by adding all values.
#Task 3:
# modulus the sum with 10, if it equals to 0 then it is valid credit card.
# If it is false, return false.
# Initial Solution
# Don't forget to check on initialization for a card length
# of exactly 16 digits
# (array.dup.to_s.gsub(" ", "").length)
#
# class CreditCard
#
# def initialize(input)
# raise ArgumentError.new('The card is not valid') unless (input.to_s.length == 16 ) && (input.is_a? Integer)
# @card = input
# @accumulator = 0
# end
#
# def double_even_index
# array = @card.to_s.split(//)
# array.map! {|character| character.to_i }
# (0..15).each { |index| (array[index] =(array[index] * 2)) if index.even? }
# return array
# end
#
# def sum_digits(array)
# array.each do |digit|
# if (digit < 10)
# @accumulator += digit
# else
# digit.to_s.split(//).each {|x| @accumulator += x.to_i }
# end
# end
# end
#
# def check_card
# sum_digits(double_even_index)
# @accumulator % 10 == 0
# end
# end
# cc=CreditCard.new(1234567891234567)
# p cc.check_card
# Refactored Solution
class CreditCard
def initialize(card_number)
raise ArgumentError, 'The card is not valid' unless card_number.to_s.length == 16
@card_number = card_number
end
def check_card
doubled_index = double_even_index_elements(@card_number)
summed_digits = sum_digits(doubled_index)
(summed_digits % 10) == 0
end
private
def double_even_index_elements(credit_card_number)
array = convert_to_array_of_digits(credit_card_number)
array.each_index do |index|
array[index] *= 2 if index.even?
end
end
def sum_digits(array_of_digits)
convert_array_to_single_digits(array_of_digits).reduce(:+)
#convert_array_to_single_digits(array_of_digits).inject(0) { |sum, x| sum + x}
end
def convert_to_array_of_digits(integer)
integer.to_s.split(//).map(&:to_i)
end
def convert_array_to_single_digits(array_of_digits)
convert_to_array_of_digits(array_of_digits.join.to_i)
end
end
# Reflection
|
# Class Warfare, Validate a Credit Card Number
# I worked on this challenge [by myself, with: ].
# I spent [#] hours on this challenge.
# Pseudocode
# Input: number to test
# Output: true or false
# Steps:
# Create initialize method
# create instance variable
# Check to make sure input is the correct length -- .to_s.length == 16. Creat Argument error if not.
# Create new method to double every other number
#create new array filled with the 16 integers
#Iterate through the array and double ever odd integer
#Create new method to seperate double digit numbers
#iterate through array for numbers larger than 9
#turn into a string
#split into individual numbers
#flatten the array
#Create new method to add numbers
#sum all of the numbers in the array
# Create new method to check
#check if sum is mod 10== 0
#return true or false
# Initial Solution
# Don't forget to check on initialization for a card length
# # of exactly 16 digits
# class CreditCard
# def initialize(num)
# if num.to_s.length != 16
# raise ArgumentError.new("Number must be 16 digits")
# else
# @num = num
# end
# end
# def double
# @num = @num.to_s.split("")
# @num = @num.map.with_index do |x, y|
# if (y + 1).odd?
# x.to_i * 2
# else
# x.to_i
# end
# end
# end
# def separate
# @arr = @num.map do |num|
# if num > 9
# num.to_s.split("").map {|x| x.to_i }
# else
# num
# end
# end
# end
# def sum
# @arr.flatten!
# @sum = 0
# @arr.each do |z|
# @sum += z
# end
# return @sum
# end
# def check_card
# ans = nil
# if @sum % 10 == 0
# ans = true
# else
# ans = false
# end
# return ans
# end
# end
# Refactored Solution
class CreditCard
def initialize(num)
if num.to_s.length != 16
raise ArgumentError.new("Number must be 16 digits")
else
@num = num
end
end
def double
@num_array = @num.to_s.split("")
@num_array = @num_array.map.with_index do |value, index|
if (index + 1).odd?
value.to_i * 2
else
value.to_i
end
end
end
def separate
@arr_2 = @num_array.map do |num|
if num > 9
num.to_s.split("").map {|split_num| split_num.to_i }
else
num
end
end
end
def sum
@sum = @arr_2.flatten!.inject(:+)
end
def check_card
if @sum % 10 == 0
true
else
false
end
end
end
credit = CreditCard.new(4408041234567901)
credit.double
credit.separate
p credit.sum
p credit.check_card
# Reflection
=begin
What was the most difficult part of this challenge for you and your pair?
We had trouble getting the rspec to work. Our code worked fine but we were having an issue with the rspec. We had to stop before we figured it out but we deffinetl solved the challengfe our code works.
What new methods did you find to help you when you refactored?
I learned about the inject method which basically just allows a person to easily get the sum of an array, you can also do other stuff with it but that is waht we used it for.
What concepts or learnings were you able to solidify in this challenge?
I now much better understand how to use built in ruby methods to solve challenges. I also no want to learn more about rspec. I also feel more comfortable working with arrays.
=end
Finished 6.7
# Class Warfare, Validate a Credit Card Number
# I worked on this challenge [by myself, with: ].
# I spent [#] hours on this challenge.
# Pseudocode
# Input: number to test
# Output: true or false
# Steps:
# Create initialize method
# create instance variable
# Check to make sure input is the correct length -- .to_s.length == 16. Creat Argument error if not.
# Create new method to double every other number
#create new array filled with the 16 integers
#Iterate through the array and double ever odd integer
#Create new method to seperate double digit numbers
#iterate through array for numbers larger than 9
#turn into a string
#split into individual numbers
#flatten the array
#Create new method to add numbers
#sum all of the numbers in the array
# Create new method to check
#check if sum is mod 10== 0
#return true or false
# Initial Solution
# Don't forget to check on initialization for a card length
# # of exactly 16 digits
# class CreditCard
# def initialize(num)
# if num.to_s.length != 16
# raise ArgumentError.new("Number must be 16 digits")
# else
# @num = num
# end
# end
# def double
# @num = @num.to_s.split("")
# @num = @num.map.with_index do |x, y|
# if (y + 1).odd?
# x.to_i * 2
# else
# x.to_i
# end
# end
# end
# def separate
# @arr = @num.map do |num|
# if num > 9
# num.to_s.split("").map {|x| x.to_i }
# else
# num
# end
# end
# end
# def sum
# @arr.flatten!
# @sum = 0
# @arr.each do |z|
# @sum += z
# end
# return @sum
# end
# def check_card
# ans = nil
# if @sum % 10 == 0
# ans = true
# else
# ans = false
# end
# return ans
# end
# end
# Refactored Solution
class CreditCard
def initialize(num)
if num.to_s.length != 16
raise ArgumentError.new("Number must be 16 digits")
else
@num = num
end
end
def double
@num_array = @num.to_s.split("")
@num_array = @num_array.map.with_index do |value, index|
if (index + 1).odd?
value.to_i * 2
else
value.to_i
end
end
end
def separate
@arr_2 = @num_array.map do |num|
if num > 9
num.to_s.split("").map {|split_num| split_num.to_i }
else
num
end
end
end
def sum
@sum = @arr_2.flatten!.inject(:+)
end
def check_card
self.double
self.separate
self.sum
if @sum % 10 == 0
true
else
false
end
end
end
credit = CreditCard.new(4408041234567901)
credit.double
credit.separate
p credit.sum
p credit.check_card
# Reflection
=begin
What was the most difficult part of this challenge for you and your pair?
We had trouble getting the rspec to work. Our code worked fine but we were having an issue with the rspec. We had to stop before we figured it out but we deffinetl solved the challengfe our code works.
What new methods did you find to help you when you refactored?
I learned about the inject method which basically just allows a person to easily get the sum of an array, you can also do other stuff with it but that is waht we used it for.
What concepts or learnings were you able to solidify in this challenge?
I now much better understand how to use built in ruby methods to solve challenges. I also no want to learn more about rspec. I also feel more comfortable working with arrays.
=end |
add final solution
# Class Warfare, Validate a Credit Card Number
# I worked on this challenge [by myself, with: Gabriel].
# I spent [#] hours on this challenge.
# Pseudocode
# Input: Credit card number
# Output: if the credit card is valid or not (TRUE or FALSE)
# Steps:
# + Check the length of the credit card digits. If length doesn't equal 16 digits return Arrgument error.
# Write a method to separate each digit (with spaces), put this in an array.
# + iterate the index to double every other digit starting from second to last
# + Starting with the second to last digit, double every other digit until you reach the first digit.
# + Doubled digits need to be broken apart, 10 becomes 1 + 0.
# + Sum all the untouched digits and the doubled digits
# + Check if the total is a multiple of ten, return message valid credit card number
class CreditCard
def initialize(card_number)
@card_number = card_number
if card_number.to_s.length != 16
raise ArgumentError.new ("Card length provided is not
valid")
end
end
def check_card
@card_split_array = @card_number.to_s.split('')
@double_even_digits = @card_split_array.map.with_index do |number,index|
if index.odd?
number.to_i
else
number.to_i * 2
end
end
@join_double = @double_even_digits.join
@split_double = @join_double.split('')
sum = 0
@split_double.each do |element|
sum += element.to_i
end
if sum % 10 == 0
true
else
false
end
end
end
#refactor
#didn't have time to refactor
#reflections
Reflection
# What was the most difficult part of this challenge for you and your pair?
# We struggled for many hours to make a clean code with interesting methods but failed to make it work in the end
# Had to resort to making a very drawn out code.
# What new methods did you find to help you when you refactored?
# no new methods (for me) where used in the solution we eventually handed in. In our first solutions we worked with index.odd that we found.
# What concepts or learnings were you able to solidify in this challenge?
# Go back to basic if it doesn't work out. Better that the code works than it looks good and doesn't, right?
|
#!/usr/bin/env ruby
# -------------------------------------------------------------------------- #
# Copyright 2002-2015, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
# Set up the environment for the driver
ONE_LOCATION = ENV["ONE_LOCATION"]
if !ONE_LOCATION
RUBY_LIB_LOCATION = "/usr/lib/one/ruby"
MAD_LOCATION = "/usr/lib/one/mads"
ETC_LOCATION = "/etc/one/"
else
RUBY_LIB_LOCATION = ONE_LOCATION + "/lib/ruby"
MAD_LOCATION = ONE_LOCATION + "/lib/mads"
ETC_LOCATION = ONE_LOCATION + "/etc/"
end
$: << RUBY_LIB_LOCATION
$: << MAD_LOCATION
require "VirtualMachineDriver"
require 'one_vnm'
require 'one_tm'
require 'getoptlong'
require 'ssh_stream'
require 'rexml/document'
require 'pp'
class VmmAction
# List of xpaths required by the VNM driver actions
XPATH_LIST = %w(
ID DEPLOY_ID
TEMPLATE/NIC
TEMPLATE/SECURITY_GROUP_RULE
HISTORY_RECORDS/HISTORY/HOSTNAME
)
attr_reader :data
# Initialize a VmmAction object
# @param[OpenNebula::ExecDriver] Driver to be used for the actions
# @param[String] Id of the VM
# @param[String] name of the actions as described in the VMM protocol
# @param[xml_data] data sent from OpenNebula core
def initialize(driver, id, action, xml_data)
# Initialize object with xml data
@vmm = driver
@id = id
@main_action = action
@xml_data = @vmm.decode(xml_data)
@data = Hash.new
get_data(:host)
get_data(:net_drv)
get_data(:deploy_id)
get_data(:checkpoint_file)
get_data(:local_dfile, :LOCAL_DEPLOYMENT_FILE)
get_data(:remote_dfile, :REMOTE_DEPLOYMENT_FILE)
# For migration
get_data(:dest_host, :MIGR_HOST)
get_data(:dest_driver, :MIGR_NET_DRV)
# For disk hotplugging
get_data(:disk_target_path)
get_data(:tm_command)
# VM template
vm_template = @xml_data.elements['VM'].to_s
@data[:vm] = Base64.encode64(vm_template).delete("\n")
# VM data for VNM
vm_template_xml = REXML::Document.new(vm_template).root
vm_vnm_xml = REXML::Document.new('<VM></VM>').root
XPATH_LIST.each do |xpath|
elements = vm_template_xml.elements.each(xpath) do |element|
add_element_to_path(vm_vnm_xml, element, xpath)
end
end
# Initialize streams and vnm
@ssh_src = @vmm.get_ssh_stream(action, @data[:host], @id)
@vnm_src = VirtualNetworkDriver.new(@data[:net_drv],
:local_actions => @vmm.options[:local_actions],
:message => vm_vnm_xml.to_s,
:ssh_stream => @ssh_src)
if @data[:dest_host] and !@data[:dest_host].empty?
@ssh_dst = @vmm.get_ssh_stream(action, @data[:dest_host], @id)
@vnm_dst = VirtualNetworkDriver.new(@data[:dest_driver],
:local_actions => @vmm.options[:local_actions],
:message => vm_vnm_xml.to_s,
:ssh_stream => @ssh_dst)
end
@tm = TransferManagerDriver.new(nil)
end
#Execute a set of steps defined with
# - :driver :vmm or :vnm to execute the step
# - :action for the step
# - :parameters command line paremeters for the action
# - :destination use next host
# - :fail_action steps to be executed if steps fail
# - :stdin for the action
# @param [Array] of steps
def run(steps, info_on_success = nil)
result = execute_steps(steps)
@ssh_src.close if @ssh_src
@ssh_dst.close if @ssh_dst
#Prepare the info for the OpenNebula core
if DriverExecHelper.failed?(result)
info = @data[:failed_info]
else
info = @data["#{@main_action.to_s}_info".to_sym]
end
@vmm.send_message(VirtualMachineDriver::ACTION[@main_action],
result, @id, info)
end
private
DRIVER_NAMES = {
:vmm => "virtualization driver",
:vnm => "network driver",
:tm => "transfer manager driver"
}
# Executes a set of steps. If one step fails any recover action is performed
# and the step execution breaks.
# @param [Array] array of steps to be executed
# @return [String, Hash] "SUCCESS/FAILURE" for the step set, and
# information associated to each step (by :<action>_info). In case of
# failure information is also in [:failed_info]
def execute_steps(steps)
result = DriverExecHelper.const_get(:RESULT)[:failure]
steps.each do |step|
# Execute Step
case step[:driver]
when :vmm
if step[:destination]
host = @data[:dest_host]
ssh = @ssh_dst
else
host = @data[:host]
ssh = @ssh_src
end
stdin = step[:stdin] || @xml_data.to_s
result, info = @vmm.do_action(get_parameters(step[:parameters]),
@id,
host,
step[:action],
:ssh_stream => ssh,
:respond => false,
:stdin => stdin)
when :vnm
if step[:destination]
vnm = @vnm_dst
else
vnm = @vnm_src
end
result, info = vnm.do_action(@id, step[:action],
:parameters => get_parameters(step[:parameters]))
when :tm
result, info = @tm.do_transfer_action(@id, step[:parameters])
else
result = DriverExecHelper.const_get(:RESULT)[:failure]
info = "No driver in #{step[:action]}"
end
# Save the step info
@data["#{step[:action]}_info".to_sym] = info.strip
# Roll back steps, store failed info and break steps
if DriverExecHelper.failed?(result)
execute_steps(step[:fail_actions]) if step[:fail_actions]
@data[:failed_info] = info
@vmm.log(@id,
"Failed to execute #{DRIVER_NAMES[step[:driver]]} " \
"operation: #{step[:action]}.")
if step[:no_fail]
result = DriverExecHelper::RESULT[:success]
else
break
end
else
@vmm.log(@id,
"Successfully execute #{DRIVER_NAMES[step[:driver]]} " \
"operation: #{step[:action]}.")
end
end
return result
end
# Prepare the parameters for the action step generating a blank separated
# list of command arguments
# @param [Hash] an action step
def get_parameters(step_params)
parameters = step_params || []
parameters.map do |param|
if Symbol===param
"\'#{@data[param].to_s}\'"
else
"\'#{param}\'"
end
end.join(' ')
end
# Extracts data from the XML argument of the VMM action
# @param [Symbol] corresponding to a XML element
# @param [String] an xpath for the XML element
# @return [String] the element value
def get_data(name, xml_path=nil)
if xml_path
path=xml_path.to_s
else
path=name.to_s.upcase
end
if (elem = @xml_data.elements[path])
@data[name]=elem.text
end
end
# Adds a REXML node to a specific xpath
#
# @param [REXML::Element] xml document to add to
# @param [REXML::Element] element to add
# @param [String] path where the element is inserted in the xml document
# @return [REXML::Element]
def add_element_to_path(xml, element, path)
root = xml
path.split('/')[0..-2].each do |path_element|
xml = xml.add_element(path_element) if path_element
end
xml.add_element(element)
root
end
end
# The main class for the Sh driver
class ExecDriver < VirtualMachineDriver
attr_reader :options
# Initializes the VMM driver
# @param [String] hypervisor name identifies the plugin
# @param [OpenNebulaDriver::options]
def initialize(hypervisor, options={})
@options={
:threaded => true
}.merge!(options)
if options[:shell]
@shell=options[:shell]
else
@shell='bash'
end
super("vmm/#{hypervisor}", @options)
@hypervisor = hypervisor
end
# Creates an SshStream to execute commands on the target host
# @param[String] the hostname of the host
# @param[String] id of the VM to log messages
# @return [SshStreamCommand]
def get_ssh_stream(aname, host, id)
SshStreamCommand.new(host,
@remote_scripts_base_path,
log_method(id), nil, @shell)
end
#---------------------------------------------------------------------------
# Virtual Machine Manager Protocol Actions
#---------------------------------------------------------------------------
#
# DEPLOY action, sends the deployment file to remote host
#
def deploy(id, drv_message)
action = VmmAction.new(self, id, :deploy, drv_message)
# ----------------------------------------------------------------------
# Initialization of deployment data
# ----------------------------------------------------------------------
local_dfile=action.data[:local_dfile]
if !local_dfile || File.zero?(local_dfile)
send_message(ACTION[:deploy],RESULT[:failure],id,
"Cannot open deployment file #{local_dfile}")
return
end
domain = File.read(local_dfile)
if action_is_local?(:deploy)
dfile = action.data[:local_dfile]
else
dfile = action.data[:remote_dfile]
end
# ----------------------------------------------------------------------
# Deployment Steps
# ----------------------------------------------------------------------
steps=[
# Execute pre-boot networking setup
{
:driver => :vnm,
:action => :pre
},
# Boot the Virtual Machine
{
:driver => :vmm,
:action => :deploy,
:parameters => [dfile, :host],
:stdin => domain,
},
# Execute post-boot networking setup
{
:driver => :vnm,
:action => :post,
:parameters => [:deploy_info],
:fail_actions => [
{
:driver => :vmm,
:action => :cancel,
:parameters => [:deploy_info, :host]
}
]
}
]
action.run(steps)
end
#
# SHUTDOWN action, graceful shutdown and network clean up
#
def shutdown(id, drv_message)
action = VmmAction.new(self, id, :shutdown, drv_message)
steps=[
# Shutdown the Virtual Machine
{
:driver => :vmm,
:action => :shutdown,
:parameters => [:deploy_id, :host]
},
# Execute networking clean up operations
{
:driver => :vnm,
:action => :clean
}
]
action.run(steps)
end
#
# CANCEL action, destroys a VM and network clean up
#
def cancel(id, drv_message)
action = VmmAction.new(self, id, :cancel, drv_message)
steps=[
# Cancel the Virtual Machine
{
:driver => :vmm,
:action => :cancel,
:parameters => [:deploy_id, :host]
},
# Execute networking clean up operations
{
:driver => :vnm,
:action => :clean
}
]
action.run(steps)
end
#
# SAVE action, stops the VM and saves its state, network is cleaned up
#
def save(id, drv_message)
action = VmmAction.new(self, id, :save, drv_message)
steps=[
# Save the Virtual Machine state
{
:driver => :vmm,
:action => :save,
:parameters => [:deploy_id, :checkpoint_file, :host]
},
# Execute networking clean up operations
{
:driver => :vnm,
:action => :clean
}
]
action.run(steps)
end
#
# RESTORE action, restore a VM from a previous state, and restores network
#
def restore(id, drv_message)
action=VmmAction.new(self, id, :restore, drv_message)
steps=[
# Execute pre-boot networking setup
{
:driver => :vnm,
:action => :pre
},
# Restore the Virtual Machine from checkpoint
{
:driver => :vmm,
:action => :restore,
:parameters => [:checkpoint_file, :host, :deploy_id]
},
# Execute post-boot networking setup
{
:driver => :vnm,
:action => :post,
:parameters => [:deploy_id],
:fail_actions => [
{
:driver => :vmm,
:action => :cancel,
:parameters => [:deploy_id, :host]
}
],
}
]
action.run(steps)
end
#
# MIGRATE (live) action, migrates a VM to another host creating network
#
def migrate(id, drv_message)
action = VmmAction.new(self, id, :migrate, drv_message)
pre = "PRE"
post = "POST"
failed = "FAIL"
pre << action.data[:tm_command] << " " << action.data[:vm]
post << action.data[:tm_command] << " " << action.data[:vm]
failed << action.data[:tm_command] << " " << action.data[:vm]
steps=[
# Execute a pre-migrate TM setup
{
:driver => :tm,
:action => :tm_premigrate,
:parameters => pre.split
},
# Execute pre-boot networking setup on migrating host
{
:driver => :vnm,
:action => :pre,
:destination => true
},
# Migrate the Virtual Machine
{
:driver => :vmm,
:action => :migrate,
:parameters => [:deploy_id, :dest_host, :host],
:fail_actions => [
{
:driver => :tm,
:action => :tm_failmigrate,
:parameters => failed.split,
:no_fail => true
}
]
},
# Execute networking clean up operations
# NOTE: VM is now in the new host. If we fail from now on, oned will
# assume that the VM is in the previous host but it is in fact
# migrated. Log errors will be shown in vm.log
{
:driver => :vnm,
:action => :clean,
:no_fail => true
},
# Execute post-boot networking setup on migrating host
{
:driver => :vnm,
:action => :post,
:parameters => [:deploy_id],
:destination => :true,
:no_fail => true
},
{
:driver => :tm,
:action => :tm_postmigrate,
:parameters => post.split,
:no_fail => true
},
]
action.run(steps)
end
#
# POLL action, gets information of a VM
#
def poll(id, drv_message)
data = decode(drv_message)
host = data.elements['HOST'].text
deploy_id = data.elements['DEPLOY_ID'].text
do_action("#{deploy_id} #{host}", id, host, ACTION[:poll])
end
#
# REBOOT action, reboots a running VM
#
def reboot(id, drv_message)
data = decode(drv_message)
host = data.elements['HOST'].text
deploy_id = data.elements['DEPLOY_ID'].text
do_action("#{deploy_id} #{host}", id, host, ACTION[:reboot])
end
#
# RESET action, resets a running VM
#
def reset(id, drv_message)
data = decode(drv_message)
host = data.elements['HOST'].text
deploy_id = data.elements['DEPLOY_ID'].text
do_action("#{deploy_id} #{host}", id, host, ACTION[:reset])
end
#
# ATTACHDISK action, attaches a disk to a running VM
#
def attach_disk(id, drv_message)
action = ACTION[:attach_disk]
xml_data = decode(drv_message)
tm_command = ensure_xpath(xml_data, id, action, 'TM_COMMAND') || return
tm_rollback= xml_data.elements['TM_COMMAND_ROLLBACK'].text.strip
target_xpath = "VM/TEMPLATE/DISK[ATTACH='YES']/TARGET"
target = ensure_xpath(xml_data, id, action, target_xpath) || return
target_index = target.downcase[-1..-1].unpack('c').first - 97
action = VmmAction.new(self, id, :attach_disk, drv_message)
# Bug #1355, argument character limitation in ESX
# Message not used in vmware anyway
if @hypervisor == "vmware"
drv_message = "drv_message"
end
steps = [
# Perform a PROLOG on the disk
{
:driver => :tm,
:action => :tm_attach,
:parameters => tm_command.split
},
# Run the attach vmm script
{
:driver => :vmm,
:action => :attach_disk,
:parameters => [
:deploy_id,
:disk_target_path,
target,
target_index,
drv_message
],
:fail_actions => [
{
:driver => :tm,
:action => :tm_detach,
:parameters => tm_rollback.split
}
]
}
]
action.run(steps)
end
#
# DETACHDISK action, attaches a disk to a running VM
#
def detach_disk(id, drv_message)
action = ACTION[:detach_disk]
xml_data = decode(drv_message)
tm_command = ensure_xpath(xml_data, id, action, 'TM_COMMAND') || return
target_xpath = "VM/TEMPLATE/DISK[ATTACH='YES']/TARGET"
target = ensure_xpath(xml_data, id, action, target_xpath) || return
target_index = target.downcase[-1..-1].unpack('c').first - 97
action = VmmAction.new(self, id, :detach_disk, drv_message)
steps = [
# Run the detach vmm script
{
:driver => :vmm,
:action => :detach_disk,
:parameters => [
:deploy_id,
:disk_target_path,
target,
target_index
]
},
# Perform an EPILOG on the disk
{
:driver => :tm,
:action => :tm_detach,
:parameters => tm_command.split
}
]
action.run(steps)
end
#
# SNAPSHOTCREATE action, creates a new system snapshot
#
def snapshot_create(id, drv_message)
xml_data = decode(drv_message)
host = xml_data.elements['HOST'].text
deploy_id = xml_data.elements['DEPLOY_ID'].text
snap_id_xpath = "VM/TEMPLATE/SNAPSHOT[ACTIVE='YES']/SNAPSHOT_ID"
snap_id = xml_data.elements[snap_id_xpath].text.to_i
do_action("#{deploy_id} #{snap_id}",
id,
host,
ACTION[:snapshot_create],
:script_name => "snapshot_create")
end
#
# SNAPSHOTREVERT action, reverts to a system snapshot
#
def snapshot_revert(id, drv_message)
xml_data = decode(drv_message)
host = xml_data.elements['HOST'].text
deploy_id = xml_data.elements['DEPLOY_ID'].text
snap_id_xpath = "VM/TEMPLATE/SNAPSHOT[ACTIVE='YES']/HYPERVISOR_ID"
snapshot_name = xml_data.elements[snap_id_xpath].text
do_action("#{deploy_id} #{snapshot_name}",
id,
host,
ACTION[:snapshot_revert],
:script_name => "snapshot_revert")
end
#
# SNAPSHOTDELETE action, deletes a system snapshot
#
def snapshot_delete(id, drv_message)
xml_data = decode(drv_message)
host = xml_data.elements['HOST'].text
deploy_id = xml_data.elements['DEPLOY_ID'].text
snap_id_xpath = "VM/TEMPLATE/SNAPSHOT[ACTIVE='YES']/HYPERVISOR_ID"
snapshot_name = xml_data.elements[snap_id_xpath].text
do_action("#{deploy_id} #{snapshot_name}",
id,
host,
ACTION[:snapshot_delete],
:script_name => "snapshot_delete")
end
#
# CLEANUP action, frees resources allocated in a host: VM and disk images
#
def cleanup(id, drv_message)
xml_data = decode(drv_message)
tm_command = xml_data.elements['TM_COMMAND'].text
mhost = xml_data.elements['MIGR_HOST'].text
deploy_id = xml_data.elements['DEPLOY_ID'].text
action = VmmAction.new(self, id, :cleanup, drv_message)
steps = Array.new
# Cancel the VM at host (only if we have a valid deploy-id)
if deploy_id && !deploy_id.empty?
steps <<
{
:driver => :vmm,
:action => :cancel,
:parameters => [:deploy_id, :host],
:no_fail => true
}
steps <<
{
:driver => :vnm,
:action => :clean,
:no_fail => true
}
end
# Cancel the VM at the previous host (in case of migration)
if mhost && !mhost.empty?
steps <<
{
:driver => :vmm,
:action => :cancel,
:parameters => [:deploy_id, :dest_host],
:destination => true,
:no_fail => true
}
steps <<
{
:driver => :vnm,
:action => :clean,
:destination => true,
:no_fail => true
}
end
# Cleans VM disk images and directory
tm_command.each_line { |tc|
tc.strip!
steps <<
{
:driver => :tm,
:action => :tm_delete,
:parameters => tc.split,
:no_fail => true
} if !tc.empty?
} if tm_command
action.run(steps)
end
#
# ATTACHNIC action to attach a new nic interface
#
def attach_nic(id, drv_message)
xml_data = decode(drv_message)
begin
source = xml_data.elements["VM/TEMPLATE/NIC[ATTACH='YES']/BRIDGE"]
source_ovs =
xml_data.elements["VM/TEMPLATE/NIC[ATTACH='YES']/BRIDGE_OVS"]
mac = xml_data.elements["VM/TEMPLATE/NIC[ATTACH='YES']/MAC"]
source = source.text.strip
source_ovs = source_ovs.text.strip if source_ovs
mac = mac.text.strip
rescue
send_message(action, RESULT[:failure], id,
"Error in #{ACTION[:attach_nic]}, BRIDGE and MAC needed in NIC")
return
end
model = xml_data.elements["VM/TEMPLATE/NIC[ATTACH='YES']/MODEL"]
model = model.text if !model.nil?
model = model.strip if !model.nil?
model = "-" if model.nil?
net_drv = xml_data.elements["NET_DRV"]
net_drv = net_drv.text if !net_drv.nil?
net_drv = net_drv.strip if !net_drv.nil?
net_drv = "-" if net_drv.nil?
source = source_ovs if net_drv == 'ovswitch' && source_ovs
action = VmmAction.new(self, id, :attach_nic, drv_message)
steps=[
# Execute pre-attach networking setup
{
:driver => :vnm,
:action => :pre
},
# Attach the new NIC
{
:driver => :vmm,
:action => :attach_nic,
:parameters => [:deploy_id, mac, source, model, net_drv]
},
# Execute post-boot networking setup
{
:driver => :vnm,
:action => :post,
:parameters => [:deploy_id],
:fail_actions => [
{
:driver => :vmm,
:action => :detach_nic,
:parameters => [:deploy_id, mac]
}
]
}
]
action.run(steps)
end
#
# DETACHNIC action to detach a nic interface
#
def detach_nic(id, drv_message)
xml_data = decode(drv_message)
begin
mac = xml_data.elements["VM/TEMPLATE/NIC[ATTACH='YES']/MAC"]
mac = mac.text.strip
rescue
send_message(action, RESULT[:failure], id,
"Error in #{ACTION[:detach_nic]}, MAC needed in NIC")
return
end
action = VmmAction.new(self, id, :detach_nic, drv_message)
steps=[
# Detach the NIC
{
:driver => :vmm,
:action => :detach_nic,
:parameters => [:deploy_id, mac]
},
# Clean networking setup
{
:driver => :vnm,
:action => :clean
}
]
action.run(steps)
end
#
# DISKSNAPSHOTCREATE action, takes a snapshot of a disk
#
def disk_snapshot_create(id, drv_message)
snap_action = prepare_snap_action(id, drv_message,
:disk_snapshot_create)
action = snap_action[:action]
strategy = snap_action[:strategy]
drv_message = snap_action[:drv_message]
target = snap_action[:target]
target_index = snap_action[:target_index]
xml_data = snap_action[:xml_data]
# Get TM command
tm_command = ensure_xpath(xml_data, id, action, 'TM_COMMAND') || return
# Build the process
case strategy
when :live
tm_command_split = tm_command.split
tm_command_split[0] += "_LIVE"
steps = [
{
:driver => :tm,
:action => :tm_snap_create_live,
:parameters => tm_command_split,
:no_fail => true
}
]
when :detach
steps = [
# detach_disk or save
{
:driver => :vmm,
:action => :detach_disk,
:parameters => [:deploy_id, :disk_target_path, target,
target_index]
},
# run TM
{
:driver => :tm,
:action => :tm_snap_create,
:parameters => tm_command.split,
:no_fail => true
},
# attach_disk or restore
{
:driver => :vmm,
:action => :attach_disk,
:parameters => [:deploy_id, :disk_target_path, target,
target_index, drv_message]
}
]
when :suspend
steps = [
# detach_disk or save
{
:driver => :vmm,
:action => :save,
:parameters => [:deploy_id, :checkpoint_file, :host]
},
# network drivers (clean)
{
:driver => :vnm,
:action => :clean
},
# run TM
{
:driver => :tm,
:action => :tm_snap_create,
:parameters => tm_command.split,
:no_fail => true
},
# network drivers (pre)
{
:driver => :vnm,
:action => :pre
},
# attach_disk or restore
{
:driver => :vmm,
:action => :restore,
:parameters => [:checkpoint_file, :host, :deploy_id]
},
# network drivers (post)
{
:driver => :vnm,
:action => :post,
:parameters => [:deploy_id],
:fail_actions => [
{
:driver => :vmm,
:action => :cancel,
:parameters => [:deploy_id, :host]
}
]
}
]
else
return
end
action.run(steps)
end
#
# DISKSNAPSHOTREVERT action, takes a snapshot of a disk
#
def disk_snapshot_revert(id, drv_message)
snap_action = prepare_snap_action(id, drv_message,
:disk_snapshot_revert)
action = snap_action[:action]
strategy = @options[:snapshots_strategy]
drv_message = snap_action[:drv_message]
target = snap_action[:target]
target_index = snap_action[:target_index]
xml_data = snap_action[:xml_data]
# Get TM command
tm_command = ensure_xpath(xml_data, id, action, 'TM_COMMAND') || return
case strategy
when :detach
steps = [
# Save VM state / detach the disk
{
:driver => :vmm,
:action => :detach_disk,
:parameters => [:deploy_id, :disk_target_path, target, target_index]
},
# Do the snapshot
{
:driver => :tm,
:action => :tm_snap_revert,
:parameters => tm_command.split,
:no_fail => true,
},
# Restore VM / attach the disk
{
:driver => :vmm,
:action => :attach_disk,
:parameters => [:deploy_id, :disk_target_path, target, target_index,
drv_message]
}
]
when :suspend
steps = [
# Save VM state / detach the disk
{
:driver => :vmm,
:action => :save,
:parameters => [:deploy_id, :checkpoint_file, :host]
},
# network drivers (clean)
{
:driver => :vnm,
:action => :clean
},
# Do the snapshot
{
:driver => :tm,
:action => :tm_snap_revert,
:parameters => tm_command.split,
:no_fail => true,
},
# network drivers (pre)
{
:driver => :vnm,
:action => :pre
},
# Restore VM / attach the disk
{
:driver => :vmm,
:action => :restore,
:parameters => [:checkpoint_file, :host, :deploy_id]
},
# network drivers (post)
{
:driver => :vnm,
:action => :post,
:parameters => [:deploy_id],
:fail_actions => [
{
:driver => :vmm,
:action => :cancel,
:parameters => [:deploy_id, :host]
}
]
}
]
else
return
end
action.run(steps)
end
private
def ensure_xpath(xml_data, id, action, xpath)
begin
value = xml_data.elements[xpath].text.strip
raise if value.empty?
value
rescue
send_message(action, RESULT[:failure], id,
"Cannot perform #{action}, expecting #{xpath}")
nil
end
end
def prepare_snap_action(id, drv_message, action)
xml_data = decode(drv_message)
# Make sure disk target has been defined
target_xpath = "VM/TEMPLATE/DISK[DISK_SNAPSHOT_ACTIVE='YES']/TARGET"
target = ensure_xpath(xml_data, id, ACTION[action],
target_xpath) || return
target_index = target.downcase[-1..-1].unpack('c').first - 97
# Always send ATTACH='YES' for the selected target in case it will end
# up being a 'detach' strategy
disk = xml_data.elements[target_xpath].parent
attach = REXML::Element.new('ATTACH')
attach.add_text('YES')
disk.add(attach)
drv_message = Base64.encode64(xml_data.to_s)
action = VmmAction.new(self, id, action, drv_message)
# Determine the strategy
vmm_driver_path = 'VM/HISTORY_RECORDS/HISTORY/VMMMAD'
tm_driver_path = "VM/TEMPLATE/DISK[DISK_SNAPSHOT_ACTIVE='YES']/TM_MAD"
vmm_driver = ensure_xpath(xml_data, id, action, vmm_driver_path) || return
tm_driver = ensure_xpath(xml_data, id, action, tm_driver_path) || return
strategy = @options[:snapshots_strategy]
if @options[:live_snapshots] && LIVE_DISK_SNAPSHOTS.include?("#{vmm_driver}-#{tm_driver}")
strategy = :live
end
{
:action => action,
:strategy => strategy,
:drv_message => drv_message,
:target => target,
:target_index => target_index,
:xml_data => xml_data
}
end
end
################################################################################
#
# Virtual Machine Manager Execution Driver - Main Program
#
################################################################################
if ENV['LIVE_DISK_SNAPSHOTS']
LIVE_DISK_SNAPSHOTS = ENV['LIVE_DISK_SNAPSHOTS'].split
else
LIVE_DISK_SNAPSHOTS = []
end
opts = GetoptLong.new(
[ '--retries', '-r', GetoptLong::OPTIONAL_ARGUMENT ],
[ '--threads', '-t', GetoptLong::OPTIONAL_ARGUMENT ],
[ '--local', '-l', GetoptLong::REQUIRED_ARGUMENT ],
[ '--shell', '-s', GetoptLong::REQUIRED_ARGUMENT ],
[ '--parallel', '-p', GetoptLong::NO_ARGUMENT ],
[ '--live-snapshots', '-i', GetoptLong::NO_ARGUMENT ],
[ '--default-snapshots', '-d', GetoptLong::REQUIRED_ARGUMENT ]
)
hypervisor = ''
retries = 0
threads = 15
shell = 'bash'
local_actions = {}
single_host = true
live_snapshots = false
snapshots_strategy = :suspend # Either :detach or :suspend
begin
opts.each do |opt, arg|
case opt
when '--retries'
retries = arg.to_i
when '--threads'
threads = arg.to_i
when '--local'
local_actions = OpenNebulaDriver.parse_actions_list(arg)
when '--shell'
shell = arg
when '--parallel'
single_host = false
when '--default-snapshots'
snapshots_strategy = arg.to_sym
when '--live-snapshots'
live_snapshots = true
end
end
rescue Exception => e
exit(-1)
end
if ARGV.length >= 1
hypervisor = ARGV.shift
else
exit(-1)
end
exec_driver = ExecDriver.new(hypervisor,
:concurrency => threads,
:retries => retries,
:local_actions => local_actions,
:shell => shell,
:single_host => single_host,
:snapshots_strategy => snapshots_strategy,
:live_snapshots => live_snapshots)
exec_driver.start_driver
Fail when the live snapshot can not be done
(cherry picked from commit ace8f04e34b2a8f4b31739b15a3ffa156f50101e)
#!/usr/bin/env ruby
# -------------------------------------------------------------------------- #
# Copyright 2002-2015, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
# Set up the environment for the driver
ONE_LOCATION = ENV["ONE_LOCATION"]
if !ONE_LOCATION
RUBY_LIB_LOCATION = "/usr/lib/one/ruby"
MAD_LOCATION = "/usr/lib/one/mads"
ETC_LOCATION = "/etc/one/"
else
RUBY_LIB_LOCATION = ONE_LOCATION + "/lib/ruby"
MAD_LOCATION = ONE_LOCATION + "/lib/mads"
ETC_LOCATION = ONE_LOCATION + "/etc/"
end
$: << RUBY_LIB_LOCATION
$: << MAD_LOCATION
require "VirtualMachineDriver"
require 'one_vnm'
require 'one_tm'
require 'getoptlong'
require 'ssh_stream'
require 'rexml/document'
require 'pp'
class VmmAction
# List of xpaths required by the VNM driver actions
XPATH_LIST = %w(
ID DEPLOY_ID
TEMPLATE/NIC
TEMPLATE/SECURITY_GROUP_RULE
HISTORY_RECORDS/HISTORY/HOSTNAME
)
attr_reader :data
# Initialize a VmmAction object
# @param[OpenNebula::ExecDriver] Driver to be used for the actions
# @param[String] Id of the VM
# @param[String] name of the actions as described in the VMM protocol
# @param[xml_data] data sent from OpenNebula core
def initialize(driver, id, action, xml_data)
# Initialize object with xml data
@vmm = driver
@id = id
@main_action = action
@xml_data = @vmm.decode(xml_data)
@data = Hash.new
get_data(:host)
get_data(:net_drv)
get_data(:deploy_id)
get_data(:checkpoint_file)
get_data(:local_dfile, :LOCAL_DEPLOYMENT_FILE)
get_data(:remote_dfile, :REMOTE_DEPLOYMENT_FILE)
# For migration
get_data(:dest_host, :MIGR_HOST)
get_data(:dest_driver, :MIGR_NET_DRV)
# For disk hotplugging
get_data(:disk_target_path)
get_data(:tm_command)
# VM template
vm_template = @xml_data.elements['VM'].to_s
@data[:vm] = Base64.encode64(vm_template).delete("\n")
# VM data for VNM
vm_template_xml = REXML::Document.new(vm_template).root
vm_vnm_xml = REXML::Document.new('<VM></VM>').root
XPATH_LIST.each do |xpath|
elements = vm_template_xml.elements.each(xpath) do |element|
add_element_to_path(vm_vnm_xml, element, xpath)
end
end
# Initialize streams and vnm
@ssh_src = @vmm.get_ssh_stream(action, @data[:host], @id)
@vnm_src = VirtualNetworkDriver.new(@data[:net_drv],
:local_actions => @vmm.options[:local_actions],
:message => vm_vnm_xml.to_s,
:ssh_stream => @ssh_src)
if @data[:dest_host] and !@data[:dest_host].empty?
@ssh_dst = @vmm.get_ssh_stream(action, @data[:dest_host], @id)
@vnm_dst = VirtualNetworkDriver.new(@data[:dest_driver],
:local_actions => @vmm.options[:local_actions],
:message => vm_vnm_xml.to_s,
:ssh_stream => @ssh_dst)
end
@tm = TransferManagerDriver.new(nil)
end
#Execute a set of steps defined with
# - :driver :vmm or :vnm to execute the step
# - :action for the step
# - :parameters command line paremeters for the action
# - :destination use next host
# - :fail_action steps to be executed if steps fail
# - :stdin for the action
# @param [Array] of steps
def run(steps, info_on_success = nil)
result = execute_steps(steps)
@ssh_src.close if @ssh_src
@ssh_dst.close if @ssh_dst
#Prepare the info for the OpenNebula core
if DriverExecHelper.failed?(result)
info = @data[:failed_info]
else
info = @data["#{@main_action.to_s}_info".to_sym]
end
@vmm.send_message(VirtualMachineDriver::ACTION[@main_action],
result, @id, info)
end
private
DRIVER_NAMES = {
:vmm => "virtualization driver",
:vnm => "network driver",
:tm => "transfer manager driver"
}
# Executes a set of steps. If one step fails any recover action is performed
# and the step execution breaks.
# @param [Array] array of steps to be executed
# @return [String, Hash] "SUCCESS/FAILURE" for the step set, and
# information associated to each step (by :<action>_info). In case of
# failure information is also in [:failed_info]
def execute_steps(steps)
result = DriverExecHelper.const_get(:RESULT)[:failure]
steps.each do |step|
# Execute Step
case step[:driver]
when :vmm
if step[:destination]
host = @data[:dest_host]
ssh = @ssh_dst
else
host = @data[:host]
ssh = @ssh_src
end
stdin = step[:stdin] || @xml_data.to_s
result, info = @vmm.do_action(get_parameters(step[:parameters]),
@id,
host,
step[:action],
:ssh_stream => ssh,
:respond => false,
:stdin => stdin)
when :vnm
if step[:destination]
vnm = @vnm_dst
else
vnm = @vnm_src
end
result, info = vnm.do_action(@id, step[:action],
:parameters => get_parameters(step[:parameters]))
when :tm
result, info = @tm.do_transfer_action(@id, step[:parameters])
else
result = DriverExecHelper.const_get(:RESULT)[:failure]
info = "No driver in #{step[:action]}"
end
# Save the step info
@data["#{step[:action]}_info".to_sym] = info.strip
# Roll back steps, store failed info and break steps
if DriverExecHelper.failed?(result)
execute_steps(step[:fail_actions]) if step[:fail_actions]
@data[:failed_info] = info
@vmm.log(@id,
"Failed to execute #{DRIVER_NAMES[step[:driver]]} " \
"operation: #{step[:action]}.")
if step[:no_fail]
result = DriverExecHelper::RESULT[:success]
else
break
end
else
@vmm.log(@id,
"Successfully execute #{DRIVER_NAMES[step[:driver]]} " \
"operation: #{step[:action]}.")
end
end
return result
end
# Prepare the parameters for the action step generating a blank separated
# list of command arguments
# @param [Hash] an action step
def get_parameters(step_params)
parameters = step_params || []
parameters.map do |param|
if Symbol===param
"\'#{@data[param].to_s}\'"
else
"\'#{param}\'"
end
end.join(' ')
end
# Extracts data from the XML argument of the VMM action
# @param [Symbol] corresponding to a XML element
# @param [String] an xpath for the XML element
# @return [String] the element value
def get_data(name, xml_path=nil)
if xml_path
path=xml_path.to_s
else
path=name.to_s.upcase
end
if (elem = @xml_data.elements[path])
@data[name]=elem.text
end
end
# Adds a REXML node to a specific xpath
#
# @param [REXML::Element] xml document to add to
# @param [REXML::Element] element to add
# @param [String] path where the element is inserted in the xml document
# @return [REXML::Element]
def add_element_to_path(xml, element, path)
root = xml
path.split('/')[0..-2].each do |path_element|
xml = xml.add_element(path_element) if path_element
end
xml.add_element(element)
root
end
end
# The main class for the Sh driver
class ExecDriver < VirtualMachineDriver
attr_reader :options
# Initializes the VMM driver
# @param [String] hypervisor name identifies the plugin
# @param [OpenNebulaDriver::options]
def initialize(hypervisor, options={})
@options={
:threaded => true
}.merge!(options)
if options[:shell]
@shell=options[:shell]
else
@shell='bash'
end
super("vmm/#{hypervisor}", @options)
@hypervisor = hypervisor
end
# Creates an SshStream to execute commands on the target host
# @param[String] the hostname of the host
# @param[String] id of the VM to log messages
# @return [SshStreamCommand]
def get_ssh_stream(aname, host, id)
SshStreamCommand.new(host,
@remote_scripts_base_path,
log_method(id), nil, @shell)
end
#---------------------------------------------------------------------------
# Virtual Machine Manager Protocol Actions
#---------------------------------------------------------------------------
#
# DEPLOY action, sends the deployment file to remote host
#
def deploy(id, drv_message)
action = VmmAction.new(self, id, :deploy, drv_message)
# ----------------------------------------------------------------------
# Initialization of deployment data
# ----------------------------------------------------------------------
local_dfile=action.data[:local_dfile]
if !local_dfile || File.zero?(local_dfile)
send_message(ACTION[:deploy],RESULT[:failure],id,
"Cannot open deployment file #{local_dfile}")
return
end
domain = File.read(local_dfile)
if action_is_local?(:deploy)
dfile = action.data[:local_dfile]
else
dfile = action.data[:remote_dfile]
end
# ----------------------------------------------------------------------
# Deployment Steps
# ----------------------------------------------------------------------
steps=[
# Execute pre-boot networking setup
{
:driver => :vnm,
:action => :pre
},
# Boot the Virtual Machine
{
:driver => :vmm,
:action => :deploy,
:parameters => [dfile, :host],
:stdin => domain,
},
# Execute post-boot networking setup
{
:driver => :vnm,
:action => :post,
:parameters => [:deploy_info],
:fail_actions => [
{
:driver => :vmm,
:action => :cancel,
:parameters => [:deploy_info, :host]
}
]
}
]
action.run(steps)
end
#
# SHUTDOWN action, graceful shutdown and network clean up
#
def shutdown(id, drv_message)
action = VmmAction.new(self, id, :shutdown, drv_message)
steps=[
# Shutdown the Virtual Machine
{
:driver => :vmm,
:action => :shutdown,
:parameters => [:deploy_id, :host]
},
# Execute networking clean up operations
{
:driver => :vnm,
:action => :clean
}
]
action.run(steps)
end
#
# CANCEL action, destroys a VM and network clean up
#
def cancel(id, drv_message)
action = VmmAction.new(self, id, :cancel, drv_message)
steps=[
# Cancel the Virtual Machine
{
:driver => :vmm,
:action => :cancel,
:parameters => [:deploy_id, :host]
},
# Execute networking clean up operations
{
:driver => :vnm,
:action => :clean
}
]
action.run(steps)
end
#
# SAVE action, stops the VM and saves its state, network is cleaned up
#
def save(id, drv_message)
action = VmmAction.new(self, id, :save, drv_message)
steps=[
# Save the Virtual Machine state
{
:driver => :vmm,
:action => :save,
:parameters => [:deploy_id, :checkpoint_file, :host]
},
# Execute networking clean up operations
{
:driver => :vnm,
:action => :clean
}
]
action.run(steps)
end
#
# RESTORE action, restore a VM from a previous state, and restores network
#
def restore(id, drv_message)
action=VmmAction.new(self, id, :restore, drv_message)
steps=[
# Execute pre-boot networking setup
{
:driver => :vnm,
:action => :pre
},
# Restore the Virtual Machine from checkpoint
{
:driver => :vmm,
:action => :restore,
:parameters => [:checkpoint_file, :host, :deploy_id]
},
# Execute post-boot networking setup
{
:driver => :vnm,
:action => :post,
:parameters => [:deploy_id],
:fail_actions => [
{
:driver => :vmm,
:action => :cancel,
:parameters => [:deploy_id, :host]
}
],
}
]
action.run(steps)
end
#
# MIGRATE (live) action, migrates a VM to another host creating network
#
def migrate(id, drv_message)
action = VmmAction.new(self, id, :migrate, drv_message)
pre = "PRE"
post = "POST"
failed = "FAIL"
pre << action.data[:tm_command] << " " << action.data[:vm]
post << action.data[:tm_command] << " " << action.data[:vm]
failed << action.data[:tm_command] << " " << action.data[:vm]
steps=[
# Execute a pre-migrate TM setup
{
:driver => :tm,
:action => :tm_premigrate,
:parameters => pre.split
},
# Execute pre-boot networking setup on migrating host
{
:driver => :vnm,
:action => :pre,
:destination => true
},
# Migrate the Virtual Machine
{
:driver => :vmm,
:action => :migrate,
:parameters => [:deploy_id, :dest_host, :host],
:fail_actions => [
{
:driver => :tm,
:action => :tm_failmigrate,
:parameters => failed.split,
:no_fail => true
}
]
},
# Execute networking clean up operations
# NOTE: VM is now in the new host. If we fail from now on, oned will
# assume that the VM is in the previous host but it is in fact
# migrated. Log errors will be shown in vm.log
{
:driver => :vnm,
:action => :clean,
:no_fail => true
},
# Execute post-boot networking setup on migrating host
{
:driver => :vnm,
:action => :post,
:parameters => [:deploy_id],
:destination => :true,
:no_fail => true
},
{
:driver => :tm,
:action => :tm_postmigrate,
:parameters => post.split,
:no_fail => true
},
]
action.run(steps)
end
#
# POLL action, gets information of a VM
#
def poll(id, drv_message)
data = decode(drv_message)
host = data.elements['HOST'].text
deploy_id = data.elements['DEPLOY_ID'].text
do_action("#{deploy_id} #{host}", id, host, ACTION[:poll])
end
#
# REBOOT action, reboots a running VM
#
def reboot(id, drv_message)
data = decode(drv_message)
host = data.elements['HOST'].text
deploy_id = data.elements['DEPLOY_ID'].text
do_action("#{deploy_id} #{host}", id, host, ACTION[:reboot])
end
#
# RESET action, resets a running VM
#
def reset(id, drv_message)
data = decode(drv_message)
host = data.elements['HOST'].text
deploy_id = data.elements['DEPLOY_ID'].text
do_action("#{deploy_id} #{host}", id, host, ACTION[:reset])
end
#
# ATTACHDISK action, attaches a disk to a running VM
#
def attach_disk(id, drv_message)
action = ACTION[:attach_disk]
xml_data = decode(drv_message)
tm_command = ensure_xpath(xml_data, id, action, 'TM_COMMAND') || return
tm_rollback= xml_data.elements['TM_COMMAND_ROLLBACK'].text.strip
target_xpath = "VM/TEMPLATE/DISK[ATTACH='YES']/TARGET"
target = ensure_xpath(xml_data, id, action, target_xpath) || return
target_index = target.downcase[-1..-1].unpack('c').first - 97
action = VmmAction.new(self, id, :attach_disk, drv_message)
# Bug #1355, argument character limitation in ESX
# Message not used in vmware anyway
if @hypervisor == "vmware"
drv_message = "drv_message"
end
steps = [
# Perform a PROLOG on the disk
{
:driver => :tm,
:action => :tm_attach,
:parameters => tm_command.split
},
# Run the attach vmm script
{
:driver => :vmm,
:action => :attach_disk,
:parameters => [
:deploy_id,
:disk_target_path,
target,
target_index,
drv_message
],
:fail_actions => [
{
:driver => :tm,
:action => :tm_detach,
:parameters => tm_rollback.split
}
]
}
]
action.run(steps)
end
#
# DETACHDISK action, attaches a disk to a running VM
#
def detach_disk(id, drv_message)
action = ACTION[:detach_disk]
xml_data = decode(drv_message)
tm_command = ensure_xpath(xml_data, id, action, 'TM_COMMAND') || return
target_xpath = "VM/TEMPLATE/DISK[ATTACH='YES']/TARGET"
target = ensure_xpath(xml_data, id, action, target_xpath) || return
target_index = target.downcase[-1..-1].unpack('c').first - 97
action = VmmAction.new(self, id, :detach_disk, drv_message)
steps = [
# Run the detach vmm script
{
:driver => :vmm,
:action => :detach_disk,
:parameters => [
:deploy_id,
:disk_target_path,
target,
target_index
]
},
# Perform an EPILOG on the disk
{
:driver => :tm,
:action => :tm_detach,
:parameters => tm_command.split
}
]
action.run(steps)
end
#
# SNAPSHOTCREATE action, creates a new system snapshot
#
def snapshot_create(id, drv_message)
xml_data = decode(drv_message)
host = xml_data.elements['HOST'].text
deploy_id = xml_data.elements['DEPLOY_ID'].text
snap_id_xpath = "VM/TEMPLATE/SNAPSHOT[ACTIVE='YES']/SNAPSHOT_ID"
snap_id = xml_data.elements[snap_id_xpath].text.to_i
do_action("#{deploy_id} #{snap_id}",
id,
host,
ACTION[:snapshot_create],
:script_name => "snapshot_create")
end
#
# SNAPSHOTREVERT action, reverts to a system snapshot
#
def snapshot_revert(id, drv_message)
xml_data = decode(drv_message)
host = xml_data.elements['HOST'].text
deploy_id = xml_data.elements['DEPLOY_ID'].text
snap_id_xpath = "VM/TEMPLATE/SNAPSHOT[ACTIVE='YES']/HYPERVISOR_ID"
snapshot_name = xml_data.elements[snap_id_xpath].text
do_action("#{deploy_id} #{snapshot_name}",
id,
host,
ACTION[:snapshot_revert],
:script_name => "snapshot_revert")
end
#
# SNAPSHOTDELETE action, deletes a system snapshot
#
def snapshot_delete(id, drv_message)
xml_data = decode(drv_message)
host = xml_data.elements['HOST'].text
deploy_id = xml_data.elements['DEPLOY_ID'].text
snap_id_xpath = "VM/TEMPLATE/SNAPSHOT[ACTIVE='YES']/HYPERVISOR_ID"
snapshot_name = xml_data.elements[snap_id_xpath].text
do_action("#{deploy_id} #{snapshot_name}",
id,
host,
ACTION[:snapshot_delete],
:script_name => "snapshot_delete")
end
#
# CLEANUP action, frees resources allocated in a host: VM and disk images
#
def cleanup(id, drv_message)
xml_data = decode(drv_message)
tm_command = xml_data.elements['TM_COMMAND'].text
mhost = xml_data.elements['MIGR_HOST'].text
deploy_id = xml_data.elements['DEPLOY_ID'].text
action = VmmAction.new(self, id, :cleanup, drv_message)
steps = Array.new
# Cancel the VM at host (only if we have a valid deploy-id)
if deploy_id && !deploy_id.empty?
steps <<
{
:driver => :vmm,
:action => :cancel,
:parameters => [:deploy_id, :host],
:no_fail => true
}
steps <<
{
:driver => :vnm,
:action => :clean,
:no_fail => true
}
end
# Cancel the VM at the previous host (in case of migration)
if mhost && !mhost.empty?
steps <<
{
:driver => :vmm,
:action => :cancel,
:parameters => [:deploy_id, :dest_host],
:destination => true,
:no_fail => true
}
steps <<
{
:driver => :vnm,
:action => :clean,
:destination => true,
:no_fail => true
}
end
# Cleans VM disk images and directory
tm_command.each_line { |tc|
tc.strip!
steps <<
{
:driver => :tm,
:action => :tm_delete,
:parameters => tc.split,
:no_fail => true
} if !tc.empty?
} if tm_command
action.run(steps)
end
#
# ATTACHNIC action to attach a new nic interface
#
def attach_nic(id, drv_message)
xml_data = decode(drv_message)
begin
source = xml_data.elements["VM/TEMPLATE/NIC[ATTACH='YES']/BRIDGE"]
source_ovs =
xml_data.elements["VM/TEMPLATE/NIC[ATTACH='YES']/BRIDGE_OVS"]
mac = xml_data.elements["VM/TEMPLATE/NIC[ATTACH='YES']/MAC"]
source = source.text.strip
source_ovs = source_ovs.text.strip if source_ovs
mac = mac.text.strip
rescue
send_message(action, RESULT[:failure], id,
"Error in #{ACTION[:attach_nic]}, BRIDGE and MAC needed in NIC")
return
end
model = xml_data.elements["VM/TEMPLATE/NIC[ATTACH='YES']/MODEL"]
model = model.text if !model.nil?
model = model.strip if !model.nil?
model = "-" if model.nil?
net_drv = xml_data.elements["NET_DRV"]
net_drv = net_drv.text if !net_drv.nil?
net_drv = net_drv.strip if !net_drv.nil?
net_drv = "-" if net_drv.nil?
source = source_ovs if net_drv == 'ovswitch' && source_ovs
action = VmmAction.new(self, id, :attach_nic, drv_message)
steps=[
# Execute pre-attach networking setup
{
:driver => :vnm,
:action => :pre
},
# Attach the new NIC
{
:driver => :vmm,
:action => :attach_nic,
:parameters => [:deploy_id, mac, source, model, net_drv]
},
# Execute post-boot networking setup
{
:driver => :vnm,
:action => :post,
:parameters => [:deploy_id],
:fail_actions => [
{
:driver => :vmm,
:action => :detach_nic,
:parameters => [:deploy_id, mac]
}
]
}
]
action.run(steps)
end
#
# DETACHNIC action to detach a nic interface
#
def detach_nic(id, drv_message)
xml_data = decode(drv_message)
begin
mac = xml_data.elements["VM/TEMPLATE/NIC[ATTACH='YES']/MAC"]
mac = mac.text.strip
rescue
send_message(action, RESULT[:failure], id,
"Error in #{ACTION[:detach_nic]}, MAC needed in NIC")
return
end
action = VmmAction.new(self, id, :detach_nic, drv_message)
steps=[
# Detach the NIC
{
:driver => :vmm,
:action => :detach_nic,
:parameters => [:deploy_id, mac]
},
# Clean networking setup
{
:driver => :vnm,
:action => :clean
}
]
action.run(steps)
end
#
# DISKSNAPSHOTCREATE action, takes a snapshot of a disk
#
def disk_snapshot_create(id, drv_message)
snap_action = prepare_snap_action(id, drv_message,
:disk_snapshot_create)
action = snap_action[:action]
strategy = snap_action[:strategy]
drv_message = snap_action[:drv_message]
target = snap_action[:target]
target_index = snap_action[:target_index]
xml_data = snap_action[:xml_data]
# Get TM command
tm_command = ensure_xpath(xml_data, id, action, 'TM_COMMAND') || return
# Build the process
case strategy
when :live
tm_command_split = tm_command.split
tm_command_split[0] += "_LIVE"
steps = [
{
:driver => :tm,
:action => :tm_snap_create_live,
:parameters => tm_command_split
}
]
when :detach
steps = [
# detach_disk or save
{
:driver => :vmm,
:action => :detach_disk,
:parameters => [:deploy_id, :disk_target_path, target,
target_index]
},
# run TM
{
:driver => :tm,
:action => :tm_snap_create,
:parameters => tm_command.split,
:no_fail => true
},
# attach_disk or restore
{
:driver => :vmm,
:action => :attach_disk,
:parameters => [:deploy_id, :disk_target_path, target,
target_index, drv_message]
}
]
when :suspend
steps = [
# detach_disk or save
{
:driver => :vmm,
:action => :save,
:parameters => [:deploy_id, :checkpoint_file, :host]
},
# network drivers (clean)
{
:driver => :vnm,
:action => :clean
},
# run TM
{
:driver => :tm,
:action => :tm_snap_create,
:parameters => tm_command.split,
:no_fail => true
},
# network drivers (pre)
{
:driver => :vnm,
:action => :pre
},
# attach_disk or restore
{
:driver => :vmm,
:action => :restore,
:parameters => [:checkpoint_file, :host, :deploy_id]
},
# network drivers (post)
{
:driver => :vnm,
:action => :post,
:parameters => [:deploy_id],
:fail_actions => [
{
:driver => :vmm,
:action => :cancel,
:parameters => [:deploy_id, :host]
}
]
}
]
else
return
end
action.run(steps)
end
#
# DISKSNAPSHOTREVERT action, takes a snapshot of a disk
#
def disk_snapshot_revert(id, drv_message)
snap_action = prepare_snap_action(id, drv_message,
:disk_snapshot_revert)
action = snap_action[:action]
strategy = @options[:snapshots_strategy]
drv_message = snap_action[:drv_message]
target = snap_action[:target]
target_index = snap_action[:target_index]
xml_data = snap_action[:xml_data]
# Get TM command
tm_command = ensure_xpath(xml_data, id, action, 'TM_COMMAND') || return
case strategy
when :detach
steps = [
# Save VM state / detach the disk
{
:driver => :vmm,
:action => :detach_disk,
:parameters => [:deploy_id, :disk_target_path, target, target_index]
},
# Do the snapshot
{
:driver => :tm,
:action => :tm_snap_revert,
:parameters => tm_command.split,
:no_fail => true,
},
# Restore VM / attach the disk
{
:driver => :vmm,
:action => :attach_disk,
:parameters => [:deploy_id, :disk_target_path, target, target_index,
drv_message]
}
]
when :suspend
steps = [
# Save VM state / detach the disk
{
:driver => :vmm,
:action => :save,
:parameters => [:deploy_id, :checkpoint_file, :host]
},
# network drivers (clean)
{
:driver => :vnm,
:action => :clean
},
# Do the snapshot
{
:driver => :tm,
:action => :tm_snap_revert,
:parameters => tm_command.split,
:no_fail => true,
},
# network drivers (pre)
{
:driver => :vnm,
:action => :pre
},
# Restore VM / attach the disk
{
:driver => :vmm,
:action => :restore,
:parameters => [:checkpoint_file, :host, :deploy_id]
},
# network drivers (post)
{
:driver => :vnm,
:action => :post,
:parameters => [:deploy_id],
:fail_actions => [
{
:driver => :vmm,
:action => :cancel,
:parameters => [:deploy_id, :host]
}
]
}
]
else
return
end
action.run(steps)
end
private
def ensure_xpath(xml_data, id, action, xpath)
begin
value = xml_data.elements[xpath].text.strip
raise if value.empty?
value
rescue
send_message(action, RESULT[:failure], id,
"Cannot perform #{action}, expecting #{xpath}")
nil
end
end
def prepare_snap_action(id, drv_message, action)
xml_data = decode(drv_message)
# Make sure disk target has been defined
target_xpath = "VM/TEMPLATE/DISK[DISK_SNAPSHOT_ACTIVE='YES']/TARGET"
target = ensure_xpath(xml_data, id, ACTION[action],
target_xpath) || return
target_index = target.downcase[-1..-1].unpack('c').first - 97
# Always send ATTACH='YES' for the selected target in case it will end
# up being a 'detach' strategy
disk = xml_data.elements[target_xpath].parent
attach = REXML::Element.new('ATTACH')
attach.add_text('YES')
disk.add(attach)
drv_message = Base64.encode64(xml_data.to_s)
action = VmmAction.new(self, id, action, drv_message)
# Determine the strategy
vmm_driver_path = 'VM/HISTORY_RECORDS/HISTORY/VMMMAD'
tm_driver_path = "VM/TEMPLATE/DISK[DISK_SNAPSHOT_ACTIVE='YES']/TM_MAD"
vmm_driver = ensure_xpath(xml_data, id, action, vmm_driver_path) || return
tm_driver = ensure_xpath(xml_data, id, action, tm_driver_path) || return
strategy = @options[:snapshots_strategy]
if @options[:live_snapshots] && LIVE_DISK_SNAPSHOTS.include?("#{vmm_driver}-#{tm_driver}")
strategy = :live
end
{
:action => action,
:strategy => strategy,
:drv_message => drv_message,
:target => target,
:target_index => target_index,
:xml_data => xml_data
}
end
end
################################################################################
#
# Virtual Machine Manager Execution Driver - Main Program
#
################################################################################
if ENV['LIVE_DISK_SNAPSHOTS']
LIVE_DISK_SNAPSHOTS = ENV['LIVE_DISK_SNAPSHOTS'].split
else
LIVE_DISK_SNAPSHOTS = []
end
opts = GetoptLong.new(
[ '--retries', '-r', GetoptLong::OPTIONAL_ARGUMENT ],
[ '--threads', '-t', GetoptLong::OPTIONAL_ARGUMENT ],
[ '--local', '-l', GetoptLong::REQUIRED_ARGUMENT ],
[ '--shell', '-s', GetoptLong::REQUIRED_ARGUMENT ],
[ '--parallel', '-p', GetoptLong::NO_ARGUMENT ],
[ '--live-snapshots', '-i', GetoptLong::NO_ARGUMENT ],
[ '--default-snapshots', '-d', GetoptLong::REQUIRED_ARGUMENT ]
)
hypervisor = ''
retries = 0
threads = 15
shell = 'bash'
local_actions = {}
single_host = true
live_snapshots = false
snapshots_strategy = :suspend # Either :detach or :suspend
begin
opts.each do |opt, arg|
case opt
when '--retries'
retries = arg.to_i
when '--threads'
threads = arg.to_i
when '--local'
local_actions = OpenNebulaDriver.parse_actions_list(arg)
when '--shell'
shell = arg
when '--parallel'
single_host = false
when '--default-snapshots'
snapshots_strategy = arg.to_sym
when '--live-snapshots'
live_snapshots = true
end
end
rescue Exception => e
exit(-1)
end
if ARGV.length >= 1
hypervisor = ARGV.shift
else
exit(-1)
end
exec_driver = ExecDriver.new(hypervisor,
:concurrency => threads,
:retries => retries,
:local_actions => local_actions,
:shell => shell,
:single_host => single_host,
:snapshots_strategy => snapshots_strategy,
:live_snapshots => live_snapshots)
exec_driver.start_driver
|
# Copyright (C) 2007, 2008, 2009, 2010, 2011 The Collaborative Software Foundation
#
# This file is part of TriSano.
#
# TriSano is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# TriSano is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with TriSano. If not, see http://www.gnu.org/licenses/agpl-3.0.txt.
class FormElement < ActiveRecord::Base
include Trisano::CorePathCallable
before_destroy :delete_questions
acts_as_nested_set :scope => :tree_id
belongs_to :form
belongs_to :export_column
# sucks, but here so we can load the entire form tree as objects in one go
has_one :question, :foreign_key => "form_element_id", :dependent => :destroy
named_scope :library_roots, :conditions => {
:parent_id => nil,
:form_id => nil
}
attr_accessor :parent_element_id
@@export_lookup_separator = "|||"
class InvalidFormStructure < ActiveRecord::ActiveRecordError; end
class IllegalCopyOperation < ActiveRecord::ActiveRecordError; end
# Generic save_and_add_to_form. Sub-classes with special needs override. Block can be used to add other
# post-saving activities in the transaction
def save_and_add_to_form
if self.valid?
begin
transaction do
parent_element = FormElement.find(parent_element_id)
self.tree_id = parent_element.tree_id
self.form_id = parent_element.form_id
self.save(false)
yield if block_given?
parent_element.add_child(self)
validate_form_structure
return true
end
rescue Exception => ex
return nil
end
end
end
def update_and_validate(attributes)
begin
transaction do
if self.update_attributes(attributes)
self.validate_form_structure
return true
else
return nil
end
end
rescue
return nil
end
end
def destroy_and_validate
begin
transaction do
self.destroy
form.nil? ? validate_tree_structure : validate_form_structure
return true
end
rescue
return nil
end
end
def reorder_element_children(ids)
begin
transaction do
self.reorder_children(ids)
validate_form_structure
return true
end
rescue
return nil
end
end
def children_count_by_type(type_name)
FormElement.calculate(:count, :type, :conditions => ["parent_id = ? and tree_id = ? and type = ?", self.id, self.tree_id, type_name])
end
def children_by_type(type_name)
FormElement.find(:all, :conditions =>["parent_id = ? and tree_id = ? and type = ?", self.id, self.tree_id, type_name], :order => :lft)
end
# DEBT! Should make publish and add_to_library the same code
def add_to_library(group_element=nil)
begin
transaction do
options = { :parent => group_element, :is_template => true }
options[:tree_id] = group_element ? group_element.tree_id : FormElement.next_tree_id
result = copy_with_children(options)
result.validate_tree_structure(self)
return result
end
rescue
return nil
end
end
def copy_from_library(library_element, options = {})
begin
transaction do
if (library_element.class.name == "ValueSetElement" && !can_receive_value_set?)
errors.add(:base, :failed_copy)
raise IllegalCopyOperation
end
options = {
:form_id => form_id,
:tree_id => tree_id,
:is_template => false }.merge(options)
add_child(library_element.copy_with_children(options))
validate_form_structure
return true
end
rescue Exception => ex
self.errors.add(:base, ex.message)
raise
end
end
# use this instead of cloning to spawn new nodes from old nodes
# The contract works like this:
#
# 1. Everything is cool, return the copied element
# 2. Not copiable. Return nil. Nothing copied, move along.
# 3. Something went terribly wrong. Raise exception.
#
def copy(options = {})
options.symbolize_keys!
returning self.class.new do |e|
hash = {
'form_id' => options[:form_id],
'tree_id' => options[:tree_id],
'is_template' => options[:is_template],
'lft' => nil,
'rgt' => nil,
'parent_id' => nil
}
e.attributes = attributes.merge(hash)
end
end
# Returns root node of the copied tree
def copy_with_children(options = {})
options.symbolize_keys!
if options[:parent] and options[:parent].tree_id != options[:tree_id]
raise("tree_id must match the parent element's tree_id, if parent element is not nil")
end
if e = copy(options)
e.save!
options[:parent].add_child(e) if options[:parent]
copy_children(options.merge(:parent => e))
e
end
end
def copy_children(options)
children.each do |child|
child.copy_with_children(options)
end
end
# most form elements don't have a short name, so don't bother w/ db
# stoofs
def compare_short_names(other_tree_element, options={})
[]
end
def self.filter_library(options)
if options[:filter_by].blank?
FormElement.roots(:conditions => ["form_id IS NULL"])
else
if options[:direction].to_sym == :to_library
FormElement.find_by_sql(["SELECT * FROM form_elements WHERE form_id IS NULL AND type = 'GroupElement' and name ILIKE ? ", "%#{options[:filter_by]}%"])
else
raise Exception.new("No type specified for a from library filter") if options[:type].blank?
if (options[:type] == :question_element)
FormElement.find(:all,
:conditions => ["form_id IS NULL AND type = ? AND form_elements.id IN (SELECT form_element_id FROM questions WHERE question_text ILIKE ?)", options[:type].to_s.camelcase, "%#{options[:filter_by]}%"],
:include => [:question]
)
else
FormElement.find_by_sql(["SELECT * FROM form_elements WHERE form_id IS NULL AND type = ? AND name ILIKE ?", options[:type].to_s.camelcase, "%#{options[:filter_by]}%"])
end
end
end
end
def validate_form_structure
return if form.nil?
structural_errors = form.structural_errors
unless structural_errors.empty?
structural_errors.each do |error|
errors.add(:base, error)
end
raise InvalidFormStructure, errors.full_messages.join("\n")
end
end
def validate_tree_structure(element_for_errors=nil)
structural_errors = self.structural_errors
unless structural_errors.empty?
if (element_for_errors)
structural_errors.each do |error|
element_for_errors.errors.add(:base, error)
end
end
raise structural_errors.inspect
end
end
# Contains generic nested set validation checks for the tree that this node is in.
#
# Form#structural_errors contains additional checks specific to a form tree.
def structural_errors
structural_errors = []
structural_errors << :multiple_roots if FormElement.find_by_sql("select id from form_elements where tree_id = #{self.tree_id} and parent_id is null").size > 1
structural_errors << :overlap if FormElement.find_by_sql("
select result, count(*) from (SELECT lft as result FROM form_elements where tree_id = #{self.tree_id}
UNION ALL SELECT rgt FROM form_elements where tree_id = #{self.tree_id} order by result) as elements
group by result
having count(*) > 1;"
).size > 0
structural_errors << :structure_gaps if FormElement.find_by_sql("
select l.result + 1 as start
from (SELECT lft as result FROM form_elements where tree_id = #{self.tree_id}
UNION SELECT rgt FROM form_elements where tree_id = #{self.tree_id} order by result) as l
left outer join (SELECT lft as result FROM form_elements where tree_id = #{self.tree_id}
UNION SELECT rgt FROM form_elements where tree_id = #{self.tree_id} order by result) as r on l.result + 1 = r.result
where r.result is null;"
).size > 1
structural_errors << :orphans if FormElement.find_by_sql("
select id from form_elements where tree_id = #{self.tree_id} and parent_id not in (select id from form_elements where tree_id = #{self.tree_id});"
).size > 0
structural_errors << :corrupt_nesting if FormElement.find_by_sql("select id, type, name, lft, rgt from form_elements where tree_id = #{self.tree_id} and lft >= rgt;").size > 0
structural_errors
end
def can_receive_value_set?
return false
end
def code_condition_lookup
if self.is_condition_code
begin
external_code = ExternalCode.find(self.condition)
return "#{external_code.code_name}#{@@export_lookup_separator}#{external_code.the_code}"
rescue Exception => ex
logger.error ex
raise(I18n.translate('form_element_could_not_find_external_code', :core_path => self.core_path))
end
end
end
def cdc_export_column_lookup
if self.export_column_id
begin
export_column = ExportColumn.find(self.export_column_id, :include => :export_disease_group)
return "#{export_column.export_disease_group.name}#{@@export_lookup_separator}#{export_column.export_column_name}"
rescue Exception => ex
if self.class.name == "QuestionElement"
element_type = "question"
identifier = self.question.question_text
else
element_type = "value set"
identifier = self.name
end
logger.error ex
raise(I18n.translate('form_element_export_column_or_disease_group_not_found', :element_type => element_type, :identifier => identifier))
end
end
end
def cdc_export_conversion_value_lookup
if self.export_conversion_value_id
begin
export_conversion_value = ExportConversionValue.find(self.export_conversion_value_id)
export_column = ExportColumn.find(export_conversion_value.export_column_id, :include => :export_disease_group)
return "#{export_column.export_disease_group.name}#{@@export_lookup_separator}#{export_column.export_column_name}#{@@export_lookup_separator}#{export_conversion_value.value_from}#{@@export_lookup_separator}#{export_conversion_value.value_to}"
rescue
message = I18n.translate('form_element_something_not_found_for_value_element', :name => self.name)
if self.form_id.blank?
message << " #{I18n.translate('form_element_library_element_at_fault', :name => self.root.class.human_name)} "
if self.root.class.name == "QuestionElement"
message << "'#{self.root.question.question_text}'."
else
message << "'#{self.root.name}'."
end
end
raise message
end
end
end
def self.export_lookup_separator
@@export_lookup_separator
end
# Deletes answers to questions under a follow up. Used to clear out answers
# to a follow up that no longer applies because its condition no longer matches
# the answer provided by the user.
def self.delete_answers_to_follow_ups(event_id, follow_up)
return unless follow_up.is_a?(FollowUpElement)
unless (event_id.blank?)
question_elements_to_delete = QuestionElement.find(:all, :include => :question,
:conditions => ["lft > ? and rgt < ? and tree_id = ?", follow_up.lft, follow_up.rgt, follow_up.tree_id])
question_elements_to_delete.each do |question_element|
answer = Answer.find_by_event_id_and_question_id(event_id, question_element.question.id)
answer.destroy unless answer.nil?
end
end
end
def self.next_tree_id
FormElement.find_by_sql("SELECT nextval('tree_id_generator')").first.nextval.to_i
end
def repeater?
core_field.repeater?
end
def core_field
CoreField.find_by_key(core_field_element.core_path)
end
def core_path(event_type=nil)
core_path ||= read_attribute(:core_path)
if event_type
core_path.sub(/^(.+)_event\[/, event_type)
else
core_path
end
end
def core_field_element
if self.is_a?(CoreFieldElement)
return self
elsif self.parent and self.parent.respond_to?(:core_field_element)
return self.parent.core_field_element
else
return nil
end
end
protected
# A little hack to make sure that questions get deleted when a
# question element is deleted as part of a larger pruning operation.
#
# By default, acts_as_nested prunes children using delete_all. It
# can be configured to use destroy, but that has two problems
# 1) It's slow, 2) It's broken (it leaves gaps in the set).
def delete_questions
questions = self.children.collect {|child| child.id if child.is_a? QuestionElement}
Question.delete_all ['form_element_id IN (?)', questions]
end
end
Better logging of form element create errors
# Copyright (C) 2007, 2008, 2009, 2010, 2011 The Collaborative Software Foundation
#
# This file is part of TriSano.
#
# TriSano is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# TriSano is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with TriSano. If not, see http://www.gnu.org/licenses/agpl-3.0.txt.
class FormElement < ActiveRecord::Base
include Trisano::CorePathCallable
before_destroy :delete_questions
acts_as_nested_set :scope => :tree_id
belongs_to :form
belongs_to :export_column
# sucks, but here so we can load the entire form tree as objects in one go
has_one :question, :foreign_key => "form_element_id", :dependent => :destroy
named_scope :library_roots, :conditions => {
:parent_id => nil,
:form_id => nil
}
attr_accessor :parent_element_id
@@export_lookup_separator = "|||"
class InvalidFormStructure < ActiveRecord::ActiveRecordError; end
class IllegalCopyOperation < ActiveRecord::ActiveRecordError; end
# Generic save_and_add_to_form. Sub-classes with special needs override. Block can be used to add other
# post-saving activities in the transaction
def save_and_add_to_form
if self.valid?
begin
transaction do
parent_element = FormElement.find(parent_element_id)
self.tree_id = parent_element.tree_id
self.form_id = parent_element.form_id
self.save(false)
yield if block_given?
parent_element.add_child(self)
validate_form_structure
return true
end
rescue Exception => ex
Rails.logger.error "Unable to save form element #{self.inspect}: #{ex.message}.\n#{ex.backtrace.join('\n')}"
return nil
end
end
end
def update_and_validate(attributes)
begin
transaction do
if self.update_attributes(attributes)
self.validate_form_structure
return true
else
return nil
end
end
rescue
return nil
end
end
def destroy_and_validate
begin
transaction do
self.destroy
form.nil? ? validate_tree_structure : validate_form_structure
return true
end
rescue
return nil
end
end
def reorder_element_children(ids)
begin
transaction do
self.reorder_children(ids)
validate_form_structure
return true
end
rescue
return nil
end
end
def children_count_by_type(type_name)
FormElement.calculate(:count, :type, :conditions => ["parent_id = ? and tree_id = ? and type = ?", self.id, self.tree_id, type_name])
end
def children_by_type(type_name)
FormElement.find(:all, :conditions =>["parent_id = ? and tree_id = ? and type = ?", self.id, self.tree_id, type_name], :order => :lft)
end
# DEBT! Should make publish and add_to_library the same code
def add_to_library(group_element=nil)
begin
transaction do
options = { :parent => group_element, :is_template => true }
options[:tree_id] = group_element ? group_element.tree_id : FormElement.next_tree_id
result = copy_with_children(options)
result.validate_tree_structure(self)
return result
end
rescue
return nil
end
end
def copy_from_library(library_element, options = {})
begin
transaction do
if (library_element.class.name == "ValueSetElement" && !can_receive_value_set?)
errors.add(:base, :failed_copy)
raise IllegalCopyOperation
end
options = {
:form_id => form_id,
:tree_id => tree_id,
:is_template => false }.merge(options)
add_child(library_element.copy_with_children(options))
validate_form_structure
return true
end
rescue Exception => ex
self.errors.add(:base, ex.message)
raise
end
end
# use this instead of cloning to spawn new nodes from old nodes
# The contract works like this:
#
# 1. Everything is cool, return the copied element
# 2. Not copiable. Return nil. Nothing copied, move along.
# 3. Something went terribly wrong. Raise exception.
#
def copy(options = {})
options.symbolize_keys!
returning self.class.new do |e|
hash = {
'form_id' => options[:form_id],
'tree_id' => options[:tree_id],
'is_template' => options[:is_template],
'lft' => nil,
'rgt' => nil,
'parent_id' => nil
}
e.attributes = attributes.merge(hash)
end
end
# Returns root node of the copied tree
def copy_with_children(options = {})
options.symbolize_keys!
if options[:parent] and options[:parent].tree_id != options[:tree_id]
raise("tree_id must match the parent element's tree_id, if parent element is not nil")
end
if e = copy(options)
e.save!
options[:parent].add_child(e) if options[:parent]
copy_children(options.merge(:parent => e))
e
end
end
def copy_children(options)
children.each do |child|
child.copy_with_children(options)
end
end
# most form elements don't have a short name, so don't bother w/ db
# stoofs
def compare_short_names(other_tree_element, options={})
[]
end
def self.filter_library(options)
if options[:filter_by].blank?
FormElement.roots(:conditions => ["form_id IS NULL"])
else
if options[:direction].to_sym == :to_library
FormElement.find_by_sql(["SELECT * FROM form_elements WHERE form_id IS NULL AND type = 'GroupElement' and name ILIKE ? ", "%#{options[:filter_by]}%"])
else
raise Exception.new("No type specified for a from library filter") if options[:type].blank?
if (options[:type] == :question_element)
FormElement.find(:all,
:conditions => ["form_id IS NULL AND type = ? AND form_elements.id IN (SELECT form_element_id FROM questions WHERE question_text ILIKE ?)", options[:type].to_s.camelcase, "%#{options[:filter_by]}%"],
:include => [:question]
)
else
FormElement.find_by_sql(["SELECT * FROM form_elements WHERE form_id IS NULL AND type = ? AND name ILIKE ?", options[:type].to_s.camelcase, "%#{options[:filter_by]}%"])
end
end
end
end
def validate_form_structure
return if form.nil?
structural_errors = form.structural_errors
unless structural_errors.empty?
structural_errors.each do |error|
errors.add(:base, error)
end
raise InvalidFormStructure, errors.full_messages.join("\n")
end
end
def validate_tree_structure(element_for_errors=nil)
structural_errors = self.structural_errors
unless structural_errors.empty?
if (element_for_errors)
structural_errors.each do |error|
element_for_errors.errors.add(:base, error)
end
end
raise structural_errors.inspect
end
end
# Contains generic nested set validation checks for the tree that this node is in.
#
# Form#structural_errors contains additional checks specific to a form tree.
def structural_errors
structural_errors = []
structural_errors << :multiple_roots if FormElement.find_by_sql("select id from form_elements where tree_id = #{self.tree_id} and parent_id is null").size > 1
structural_errors << :overlap if FormElement.find_by_sql("
select result, count(*) from (SELECT lft as result FROM form_elements where tree_id = #{self.tree_id}
UNION ALL SELECT rgt FROM form_elements where tree_id = #{self.tree_id} order by result) as elements
group by result
having count(*) > 1;"
).size > 0
structural_errors << :structure_gaps if FormElement.find_by_sql("
select l.result + 1 as start
from (SELECT lft as result FROM form_elements where tree_id = #{self.tree_id}
UNION SELECT rgt FROM form_elements where tree_id = #{self.tree_id} order by result) as l
left outer join (SELECT lft as result FROM form_elements where tree_id = #{self.tree_id}
UNION SELECT rgt FROM form_elements where tree_id = #{self.tree_id} order by result) as r on l.result + 1 = r.result
where r.result is null;"
).size > 1
structural_errors << :orphans if FormElement.find_by_sql("
select id from form_elements where tree_id = #{self.tree_id} and parent_id not in (select id from form_elements where tree_id = #{self.tree_id});"
).size > 0
structural_errors << :corrupt_nesting if FormElement.find_by_sql("select id, type, name, lft, rgt from form_elements where tree_id = #{self.tree_id} and lft >= rgt;").size > 0
structural_errors
end
def can_receive_value_set?
return false
end
def code_condition_lookup
if self.is_condition_code
begin
external_code = ExternalCode.find(self.condition)
return "#{external_code.code_name}#{@@export_lookup_separator}#{external_code.the_code}"
rescue Exception => ex
logger.error ex
raise(I18n.translate('form_element_could_not_find_external_code', :core_path => self.core_path))
end
end
end
def cdc_export_column_lookup
if self.export_column_id
begin
export_column = ExportColumn.find(self.export_column_id, :include => :export_disease_group)
return "#{export_column.export_disease_group.name}#{@@export_lookup_separator}#{export_column.export_column_name}"
rescue Exception => ex
if self.class.name == "QuestionElement"
element_type = "question"
identifier = self.question.question_text
else
element_type = "value set"
identifier = self.name
end
logger.error ex
raise(I18n.translate('form_element_export_column_or_disease_group_not_found', :element_type => element_type, :identifier => identifier))
end
end
end
def cdc_export_conversion_value_lookup
if self.export_conversion_value_id
begin
export_conversion_value = ExportConversionValue.find(self.export_conversion_value_id)
export_column = ExportColumn.find(export_conversion_value.export_column_id, :include => :export_disease_group)
return "#{export_column.export_disease_group.name}#{@@export_lookup_separator}#{export_column.export_column_name}#{@@export_lookup_separator}#{export_conversion_value.value_from}#{@@export_lookup_separator}#{export_conversion_value.value_to}"
rescue
message = I18n.translate('form_element_something_not_found_for_value_element', :name => self.name)
if self.form_id.blank?
message << " #{I18n.translate('form_element_library_element_at_fault', :name => self.root.class.human_name)} "
if self.root.class.name == "QuestionElement"
message << "'#{self.root.question.question_text}'."
else
message << "'#{self.root.name}'."
end
end
raise message
end
end
end
def self.export_lookup_separator
@@export_lookup_separator
end
# Deletes answers to questions under a follow up. Used to clear out answers
# to a follow up that no longer applies because its condition no longer matches
# the answer provided by the user.
def self.delete_answers_to_follow_ups(event_id, follow_up)
return unless follow_up.is_a?(FollowUpElement)
unless (event_id.blank?)
question_elements_to_delete = QuestionElement.find(:all, :include => :question,
:conditions => ["lft > ? and rgt < ? and tree_id = ?", follow_up.lft, follow_up.rgt, follow_up.tree_id])
question_elements_to_delete.each do |question_element|
answer = Answer.find_by_event_id_and_question_id(event_id, question_element.question.id)
answer.destroy unless answer.nil?
end
end
end
def self.next_tree_id
FormElement.find_by_sql("SELECT nextval('tree_id_generator')").first.nextval.to_i
end
def repeater?
core_field.repeater?
end
def core_field
CoreField.find_by_key(core_field_element.core_path)
end
def core_path(event_type=nil)
core_path ||= read_attribute(:core_path)
if event_type
core_path.sub(/^(.+)_event\[/, event_type)
else
core_path
end
end
def core_field_element
if self.is_a?(CoreFieldElement)
return self
elsif self.parent and self.parent.respond_to?(:core_field_element)
return self.parent.core_field_element
else
return nil
end
end
protected
# A little hack to make sure that questions get deleted when a
# question element is deleted as part of a larger pruning operation.
#
# By default, acts_as_nested prunes children using delete_all. It
# can be configured to use destroy, but that has two problems
# 1) It's slow, 2) It's broken (it leaves gaps in the set).
def delete_questions
questions = self.children.collect {|child| child.id if child.is_a? QuestionElement}
Question.delete_all ['form_element_id IN (?)', questions]
end
end
|
# -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'whenever-elasticbeanstalk/version'
Gem::Specification.new do |gem|
gem.name = "whenever-elasticbeanstalk"
gem.version = Whenever::Elasticbeanstalk::VERSION
gem.authors = ["Chad McGimpsey"]
gem.email = ["chad.mcgimpsey@gmail.com"]
gem.description = %q{Use whenever on AWS Elastic Beanstalk}
gem.summary = %q{Allows you to run cron jobs easily on one or all AWS Elastic Beanstalk instances.}
gem.homepage = "https://github.com/dignoe/whenever-elasticbeanstalk"
gem.add_dependency('whenever')
gem.add_dependency('aws-sdk')
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ["lib"]
end
changed way that executibles are added
# -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'whenever-elasticbeanstalk/version'
Gem::Specification.new do |gem|
gem.name = "whenever-elasticbeanstalk"
gem.version = Whenever::Elasticbeanstalk::VERSION
gem.platform = Gem::Platform::RUBY
gem.authors = ["Chad McGimpsey"]
gem.email = ["chad.mcgimpsey@gmail.com"]
gem.description = %q{Use Whenever on AWS Elastic Beanstalk}
gem.summary = %q{Allows you to run cron jobs easily on one or all AWS Elastic Beanstalk instances.}
gem.homepage = "https://github.com/dignoe/whenever-elasticbeanstalk"
gem.add_dependency('whenever')
gem.add_dependency('aws-sdk')
gem.files = `git ls-files`.split($/)
gem.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ["lib"]
end
|
#!/usr/bin/env ruby
# -------------------------------------------------------------------------- #
# Copyright 2002-2011, OpenNebula Project Leads (OpenNebula.org) #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
$: << File.dirname(__FILE__)
require 'rexml/document'
require 'OpenNebulaNic'
CONF = {
:start_vlan => 2
}
COMMANDS = {
:ebtables => "sudo /sbin/ebtables",
:iptables => "sudo /sbin/iptables",
:brctl => "sudo /usr/sbin/brctl",
:ip => "sudo /sbin/ip",
:vconfig => "sudo /sbin/vconfig",
:virsh => "virsh -c qemu:///system",
:xm => "sudo /usr/sbin/xm",
:ovs_vsctl=> "sudo /usr/local/bin/ovs-vsctl",
:lsmod => "/sbin/lsmod"
}
class VM
def initialize(vm_root)
@vm_root = vm_root
end
def [](element)
if @vm_root
val = @vm_root.elements[element]
if val.text
return val.text
end
end
nil
end
end
class OpenNebulaNetwork
attr_reader :vm_info, :hypervisor, :nics
def initialize(vm_tpl, hypervisor=nil)
@vm_root = REXML::Document.new(vm_tpl).root
@vm = VM.new(@vm_root)
@vm_info = Hash.new
if !hypervisor
hypervisor = detect_hypervisor
end
@hypervisor = hypervisor
@nics = get_nics
@filtered_nics = @nics
end
def filter(*filter)
@filtered_nics = @nics.get(*filter)
self
end
def unfilter
@filtered_nics = @nics
self
end
def process(&block)
if @filtered_nics
@filtered_nics.each do |n|
yield(n)
end
end
end
def detect_hypervisor
uname_a = `uname -a`
lsmod = `#{COMMANDS[:lsmod]}`
if uname_a.match(/xen/i)
"xen"
elsif lsmod.match(/kvm/)
"kvm"
end
end
def get_nics
nics = Nics.new(@hypervisor)
@vm_root.elements.each("TEMPLATE/NIC") do |nic_element|
nic = nics.new_nic
nic_element.elements.each('*') do |nic_attribute|
key = nic_attribute.xpath.split('/')[-1].downcase.to_sym
nic[key] = nic_attribute.text
end
nic.get_info(@vm)
nic.get_tap
nics << nic
end
nics
end
def get_interfaces
bridges = Hash.new
brctl_exit =`#{COMMANDS[:brctl]} show`
cur_bridge = ""
brctl_exit.split("\n")[1..-1].each do |l|
l = l.split
if l.length > 1
cur_bridge = l[0]
bridges[cur_bridge] = Array.new
bridges[cur_bridge] << l[3]
else
bridges[cur_bridge] << l[0]
end
end
bridges
end
end
feature #476: move all methods handling VM info to the VM class
#!/usr/bin/env ruby
# -------------------------------------------------------------------------- #
# Copyright 2002-2011, OpenNebula Project Leads (OpenNebula.org) #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
$: << File.dirname(__FILE__)
require 'rexml/document'
require 'OpenNebulaNic'
def log(msg)
require 'pp'
puts "\n"+"-"*80
if msg.instance_of? String
puts msg
else
pp msg
end
puts "-"*80
puts
end
CONF = {
:start_vlan => 2
}
COMMANDS = {
:ebtables => "sudo /sbin/ebtables",
:iptables => "sudo /sbin/iptables",
:brctl => "sudo /usr/sbin/brctl",
:ip => "sudo /sbin/ip",
:vconfig => "sudo /sbin/vconfig",
:virsh => "virsh -c qemu:///system",
:xm => "sudo /usr/sbin/xm",
:ovs_vsctl=> "sudo /usr/local/bin/ovs-vsctl",
:lsmod => "/sbin/lsmod"
}
class VM
attr_accessor :nics, :filtered_nics
def initialize(vm_root, hypervisor)
@vm_root = vm_root
@hypervisor = hypervisor
get_nics
end
def [](element)
if @vm_root
val = @vm_root.elements[element]
if val.text
return val.text
end
end
nil
end
def get_nics
nics = Nics.new(@hypervisor)
@vm_root.elements.each("TEMPLATE/NIC") do |nic_element|
nic = nics.new_nic
nic_element.elements.each('*') do |nic_attribute|
key = nic_attribute.xpath.split('/')[-1].downcase.to_sym
nic[key] = nic_attribute.text
end
nic.get_info(self)
nic.get_tap
nics << nic
end
@nics = nics
@filtered_nics = nics
end
end
class OpenNebulaNetwork
attr_reader :hypervisor, :vm
def initialize(vm_tpl, hypervisor=nil)
hypervisor = detect_hypervisor if !hypervisor
@vm = VM.new(REXML::Document.new(vm_tpl).root, hypervisor)
end
def filter(*filter)
@vm.filtered_nics = @vm.nics.get(*filter)
self
end
def unfilter
@vm.filtered_nics = @vm.nics
self
end
def process(&block)
if @vm.filtered_nics
@vm.filtered_nics.each do |n|
yield(n)
end
end
end
def detect_hypervisor
uname_a = `uname -a`
lsmod = `#{COMMANDS[:lsmod]}`
if uname_a.match(/xen/i)
"xen"
elsif lsmod.match(/kvm/)
"kvm"
end
end
def get_interfaces
bridges = Hash.new
brctl_exit =`#{COMMANDS[:brctl]} show`
cur_bridge = ""
brctl_exit.split("\n")[1..-1].each do |l|
l = l.split
if l.length > 1
cur_bridge = l[0]
bridges[cur_bridge] = Array.new
bridges[cur_bridge] << l[3]
else
bridges[cur_bridge] << l[0]
end
end
bridges
end
end
|
require 'test_helper'
#
# == MenuHelper Test
#
class MenuHelperTest < ActionView::TestCase
include MenuHelper
end
Add tests for menu_helper
require 'test_helper'
#
# == MenuHelper Test
#
class MenuHelperTest < ActionView::TestCase
include MenuHelper
setup :initialize_test
test 'should return active class for home page' do
assert_equal set_active_class('homes', 'index'), 'l-nav-item-active'
end
test 'should not return active class if action params is show for home page' do
assert_nil set_active_class('homes', 'show')
end
test 'should return active class if action params is false for home page' do
assert_equal set_active_class('homes'), 'l-nav-item-active'
end
test 'should not return active class for contact page' do
assert_nil set_active_class('contacts', 'new')
end
test 'should not return active class if action params is false for contact page' do
assert_nil set_active_class('contacts')
end
private
def initialize_test
params[:controller] = 'homes'
params[:action] = 'index'
end
end
|
# frozen_string_literal: true
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
lib = File.expand_path "lib", __dir__
$LOAD_PATH.unshift lib unless $LOAD_PATH.include? lib
require "gapic/common/version"
Gem::Specification.new do |spec|
spec.name = "gapic-common"
spec.version = Gapic::Common::VERSION
spec.authors = ["Google API Authors"]
spec.email = ["googleapis-packages@google.com"]
spec.licenses = ["Apache-2.0"]
spec.summary = "Common code for GAPIC-generated API clients"
spec.homepage = "https://github.com/googleapis/gapic-generator-ruby"
spec.files = Dir.glob("lib/**/*") + Dir.glob("*.md") +
["LICENSE", ".yardopts"]
spec.require_paths = ["lib"]
spec.platform = Gem::Platform::RUBY
spec.required_ruby_version = ">= 2.5"
spec.add_dependency "faraday", "~> 1.3"
spec.add_dependency "googleapis-common-protos", ">= 1.3.11", "< 2.a"
spec.add_dependency "googleapis-common-protos-types", ">= 1.0.6", "< 2.a"
spec.add_dependency "googleauth", ">= 0.16.2", "< 2.a"
spec.add_dependency "google-protobuf", "~> 3.14"
spec.add_dependency "grpc", "~> 1.36"
spec.add_development_dependency "google-cloud-core", "~> 1.5"
spec.add_development_dependency "google-style", "~> 1.25.1"
spec.add_development_dependency "minitest", "~> 5.14"
spec.add_development_dependency "minitest-autotest", "~> 1.0"
spec.add_development_dependency "minitest-focus", "~> 1.1"
spec.add_development_dependency "minitest-rg", "~> 5.2"
spec.add_development_dependency "rake", ">= 12.0"
spec.add_development_dependency "redcarpet", "~> 3.0"
spec.add_development_dependency "yard", "~> 0.9"
end
feat(gapic-common): Require googleauth 0.17 for proper support of JWT creds with scopes (#647)
# frozen_string_literal: true
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
lib = File.expand_path "lib", __dir__
$LOAD_PATH.unshift lib unless $LOAD_PATH.include? lib
require "gapic/common/version"
Gem::Specification.new do |spec|
spec.name = "gapic-common"
spec.version = Gapic::Common::VERSION
spec.authors = ["Google API Authors"]
spec.email = ["googleapis-packages@google.com"]
spec.licenses = ["Apache-2.0"]
spec.summary = "Common code for GAPIC-generated API clients"
spec.homepage = "https://github.com/googleapis/gapic-generator-ruby"
spec.files = Dir.glob("lib/**/*") + Dir.glob("*.md") +
["LICENSE", ".yardopts"]
spec.require_paths = ["lib"]
spec.platform = Gem::Platform::RUBY
spec.required_ruby_version = ">= 2.5"
spec.add_dependency "faraday", "~> 1.3"
spec.add_dependency "googleapis-common-protos", ">= 1.3.11", "< 2.a"
spec.add_dependency "googleapis-common-protos-types", ">= 1.0.6", "< 2.a"
spec.add_dependency "googleauth", ">= 0.17.0", "< 2.a"
spec.add_dependency "google-protobuf", "~> 3.14"
spec.add_dependency "grpc", "~> 1.36"
spec.add_development_dependency "google-cloud-core", "~> 1.5"
spec.add_development_dependency "google-style", "~> 1.25.1"
spec.add_development_dependency "minitest", "~> 5.14"
spec.add_development_dependency "minitest-autotest", "~> 1.0"
spec.add_development_dependency "minitest-focus", "~> 1.1"
spec.add_development_dependency "minitest-rg", "~> 5.2"
spec.add_development_dependency "rake", ">= 12.0"
spec.add_development_dependency "redcarpet", "~> 3.0"
spec.add_development_dependency "yard", "~> 0.9"
end
|
module Frank
module Cucumber
VERSION = "0.9.5.pre2"
end
end
version bump
module Frank
module Cucumber
VERSION = "0.9.5.pre3"
end
end
|
#!/usr/bin/env ruby -rubygems
# -*- encoding: utf-8 -*-
begin
RUBY_ENGINE
rescue NameError
RUBY_ENGINE = "ruby" # Not defined in Ruby 1.8.7
end
Gem::Specification.new do |gem|
gem.version = File.read('VERSION').chomp
gem.date = File.mtime('VERSION').strftime('%Y-%m-%d')
gem.name = "rdf-microdata"
gem.homepage = "http://ruby-rdf.github.com/rdf-microdata"
gem.license = 'Public Domain' if gem.respond_to?(:license=)
gem.summary = "Microdata reader for Ruby."
gem.description = gem.summary
gem.rubyforge_project = 'rdf-microdata'
gem.authors = %w(Gregg Kellogg)
gem.email = 'public-rdf-ruby@w3.org'
gem.platform = Gem::Platform::RUBY
gem.files = %w(AUTHORS README UNLICENSE VERSION) + Dir.glob('lib/**/*.rb') + Dir.glob('etc/*')
gem.require_paths = %w(lib)
gem.extensions = %w()
gem.test_files = %w()
gem.has_rdoc = false
gem.required_ruby_version = '>= 1.9.2'
gem.requirements = []
gem.add_runtime_dependency 'rdf', '>= 1.1.0'
gem.add_runtime_dependency 'rdf-xsd', '>= 1.1.0'
gem.add_runtime_dependency 'htmlentities', '>= 4.3.0'
gem.add_development_dependency 'nokogiri' , '>= 1.6.0'
gem.add_development_dependency 'equivalent-xml' , '>= 0.3.0'
gem.add_development_dependency 'open-uri-cached', '>= 0.0.5'
gem.add_development_dependency 'yard' , '>= 0.8.7'
gem.add_development_dependency 'spira', '= 0.0.12'
gem.add_development_dependency 'rspec', '>= 2.14.0'
gem.add_development_dependency 'rdf-spec', '>= 1.1.0'
gem.add_development_dependency 'rdf-rdfa', '>= 1.1.0'
gem.add_development_dependency 'rdf-turtle', '>= 1.1.0'
gem.add_development_dependency 'rdf-isomorphic', '>= 1.1.0'
# Rubinius has it's own dependencies
if RUBY_ENGINE == "rbx" && RUBY_VERSION >= "2.1.0"
#gem.add_runtime_dependency "rubysl-bigdecimal"
#gem.add_runtime_dependency "rubysl-date"
#gem.add_runtime_dependency "rubysl-enumerator"
#gem.add_runtime_dependency "rubysl-readline"
#gem.add_runtime_dependency "rubysl-net-http"
#gem.add_runtime_dependency "rubysl-pathname"
#gem.add_runtime_dependency "rubysl-time"
#gem.add_runtime_dependency "rubysl-uri"
#gem.add_runtime_dependency "rubysl-weakref"
gem.add_development_dependency "rubysl-base64"
#gem.add_development_dependency "rubysl-fileutils"
gem.add_development_dependency "rubysl-open-uri"
gem.add_development_dependency "rubysl-prettyprint"
gem.add_development_dependency "rubysl-rexml"
gem.add_development_dependency "racc"
end
gem.post_install_message = nil
end
Make nokogiri a runtime dependency, not development.
This addresses issue #2. A previous attempt was made to make nokogiri optional (as with RDFa), but it's not really worth the trouble.
#!/usr/bin/env ruby -rubygems
# -*- encoding: utf-8 -*-
begin
RUBY_ENGINE
rescue NameError
RUBY_ENGINE = "ruby" # Not defined in Ruby 1.8.7
end
Gem::Specification.new do |gem|
gem.version = File.read('VERSION').chomp
gem.date = File.mtime('VERSION').strftime('%Y-%m-%d')
gem.name = "rdf-microdata"
gem.homepage = "http://ruby-rdf.github.com/rdf-microdata"
gem.license = 'Public Domain' if gem.respond_to?(:license=)
gem.summary = "Microdata reader for Ruby."
gem.description = gem.summary
gem.rubyforge_project = 'rdf-microdata'
gem.authors = %w(Gregg Kellogg)
gem.email = 'public-rdf-ruby@w3.org'
gem.platform = Gem::Platform::RUBY
gem.files = %w(AUTHORS README UNLICENSE VERSION) + Dir.glob('lib/**/*.rb') + Dir.glob('etc/*')
gem.require_paths = %w(lib)
gem.extensions = %w()
gem.test_files = %w()
gem.has_rdoc = false
gem.required_ruby_version = '>= 1.9.2'
gem.requirements = []
gem.add_runtime_dependency 'rdf', '>= 1.1.0'
gem.add_runtime_dependency 'rdf-xsd', '>= 1.1.0'
gem.add_runtime_dependency 'htmlentities', '>= 4.3.0'
gem.add_runtime_dependency 'nokogiri' , '>= 1.6.1'
gem.add_development_dependency 'equivalent-xml' , '>= 0.3.0'
gem.add_development_dependency 'open-uri-cached', '>= 0.0.5'
gem.add_development_dependency 'yard' , '>= 0.8.7'
gem.add_development_dependency 'spira', '= 0.0.12'
gem.add_development_dependency 'rspec', '>= 2.14.0'
gem.add_development_dependency 'rdf-spec', '>= 1.1.0'
gem.add_development_dependency 'rdf-rdfa', '>= 1.1.0'
gem.add_development_dependency 'rdf-turtle', '>= 1.1.0'
gem.add_development_dependency 'rdf-isomorphic', '>= 1.1.0'
# Rubinius has it's own dependencies
if RUBY_ENGINE == "rbx" && RUBY_VERSION >= "2.1.0"
#gem.add_runtime_dependency "rubysl-bigdecimal"
#gem.add_runtime_dependency "rubysl-date"
#gem.add_runtime_dependency "rubysl-enumerator"
#gem.add_runtime_dependency "rubysl-readline"
#gem.add_runtime_dependency "rubysl-net-http"
#gem.add_runtime_dependency "rubysl-pathname"
#gem.add_runtime_dependency "rubysl-time"
#gem.add_runtime_dependency "rubysl-uri"
#gem.add_runtime_dependency "rubysl-weakref"
gem.add_development_dependency "rubysl-base64"
#gem.add_development_dependency "rubysl-fileutils"
gem.add_development_dependency "rubysl-open-uri"
gem.add_development_dependency "rubysl-prettyprint"
gem.add_development_dependency "rubysl-rexml"
gem.add_development_dependency "racc"
end
gem.post_install_message = nil
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{hitfox_coupon_api}
s.version = "0.0.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Gerrit Riessen"]
s.date = %q{2011-08-31}
s.description = %q{Simple JSON interface to the HitFox coupon API}
s.email = %q{gerrit.riessen@gmail.com}
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
".rvmrc",
"Gemfile",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"hitfox_coupon_api.gemspec",
"lib/hitfox_coupon_api.rb",
"lib/hitfox_coupon_api/application.rb",
"lib/hitfox_coupon_api/configuration.rb",
"lib/hitfox_coupon_api/coupon.rb",
"test/.login.yml.sample",
"test/helper.rb",
"test/test_hitfox_coupon_api.rb",
"test_shell"
]
s.homepage = %q{https://github.com/gorenje/hitfox_coupon_api}
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.6.2}
s.summary = %q{Simple JSON interface to the HitFox coupon API}
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rest-client>, [">= 0"])
s.add_runtime_dependency(%q<json>, [">= 0"])
s.add_development_dependency(%q<rake>, ["= 0.8.7"])
s.add_development_dependency(%q<pry>, [">= 0"])
s.add_development_dependency(%q<pry-doc>, [">= 0"])
s.add_development_dependency(%q<gist>, [">= 0"])
s.add_development_dependency(%q<thoughtbot-shoulda>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<rr>, [">= 0"])
s.add_development_dependency(%q<cheat>, [">= 0"])
else
s.add_dependency(%q<rest-client>, [">= 0"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<rake>, ["= 0.8.7"])
s.add_dependency(%q<pry>, [">= 0"])
s.add_dependency(%q<pry-doc>, [">= 0"])
s.add_dependency(%q<gist>, [">= 0"])
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<rr>, [">= 0"])
s.add_dependency(%q<cheat>, [">= 0"])
end
else
s.add_dependency(%q<rest-client>, [">= 0"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<rake>, ["= 0.8.7"])
s.add_dependency(%q<pry>, [">= 0"])
s.add_dependency(%q<pry-doc>, [">= 0"])
s.add_dependency(%q<gist>, [">= 0"])
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<rr>, [">= 0"])
s.add_dependency(%q<cheat>, [">= 0"])
end
end
Regenerate gemspec for version 0.0.2
On branch: master
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{hitfox_coupon_api}
s.version = "0.0.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Gerrit Riessen"]
s.date = %q{2011-08-31}
s.description = %q{Simple JSON interface to the HitFox coupon API}
s.email = %q{gerrit.riessen@gmail.com}
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
".rvmrc",
"Gemfile",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"hitfox_coupon_api.gemspec",
"lib/hitfox_coupon_api.rb",
"lib/hitfox_coupon_api/application.rb",
"lib/hitfox_coupon_api/configuration.rb",
"lib/hitfox_coupon_api/coupon.rb",
"test/.login.yml.sample",
"test/helper.rb",
"test/test_hitfox_coupon_api.rb",
"test_shell"
]
s.homepage = %q{https://github.com/gorenje/hitfox_coupon_api}
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.6.2}
s.summary = %q{Simple JSON interface to the HitFox coupon API}
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rest-client>, [">= 0"])
s.add_runtime_dependency(%q<json>, [">= 0"])
s.add_development_dependency(%q<rake>, ["= 0.8.7"])
s.add_development_dependency(%q<pry>, [">= 0"])
s.add_development_dependency(%q<pry-doc>, [">= 0"])
s.add_development_dependency(%q<gist>, [">= 0"])
s.add_development_dependency(%q<thoughtbot-shoulda>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<rr>, [">= 0"])
s.add_development_dependency(%q<cheat>, [">= 0"])
else
s.add_dependency(%q<rest-client>, [">= 0"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<rake>, ["= 0.8.7"])
s.add_dependency(%q<pry>, [">= 0"])
s.add_dependency(%q<pry-doc>, [">= 0"])
s.add_dependency(%q<gist>, [">= 0"])
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<rr>, [">= 0"])
s.add_dependency(%q<cheat>, [">= 0"])
end
else
s.add_dependency(%q<rest-client>, [">= 0"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<rake>, ["= 0.8.7"])
s.add_dependency(%q<pry>, [">= 0"])
s.add_dependency(%q<pry-doc>, [">= 0"])
s.add_dependency(%q<gist>, [">= 0"])
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<rr>, [">= 0"])
s.add_dependency(%q<cheat>, [">= 0"])
end
end
|
# -*- encoding: utf-8 -*-
require 'rubygems' unless Object.const_defined?(:Gem)
require File.dirname(__FILE__) + "/lib/bahia"
Gem::Specification.new do |s|
s.name = "bahia"
s.version = Bahia::VERSION
s.authors = ["Gabriel Horner"]
s.email = "gabriel.horner@gmail.com"
s.homepage = "http://github.com/cldwalker/bahia"
s.summary = "aruba for non-cucumber test frameworks"
s.description = "Bahia - where commandline acceptance tests are easy, the people are festive and onde nasceu capoeira. In other words, aruba for any non-cucumber test framework."
s.required_rubygems_version = ">= 1.3.6"
s.files = Dir.glob(%w[{lib,test}/**/*.rb bin/* [A-Z]*.{md,txt,rdoc} ext/**/*.{rb,c} **/deps.rip]) + %w{Rakefile .gemspec}
s.extra_rdoc_files = ["README.md", "LICENSE.txt"]
s.license = 'MIT'
end
update gemspec
# -*- encoding: utf-8 -*-
require 'rubygems' unless Object.const_defined?(:Gem)
require File.dirname(__FILE__) + "/lib/bahia"
Gem::Specification.new do |s|
s.name = "bahia"
s.version = Bahia::VERSION
s.authors = ["Gabriel Horner"]
s.email = "gabriel.horner@gmail.com"
s.homepage = "http://github.com/cldwalker/bahia"
s.summary = "aruba for non-cucumber test frameworks"
s.description = "Bahia - where commandline acceptance tests are easy, the people are festive and onde nasceu capoeira. In other words, aruba for any non-cucumber test framework."
s.required_rubygems_version = ">= 1.3.6"
s.add_development_dependency 'rspec', '~> 2.7.0'
s.files = Dir.glob(%w[{lib,spec}/**/*.rb bin/* [A-Z]*.{md,txt,rdoc} ext/**/*.{rb,c} **/deps.rip]) + %w{Rakefile .gemspec}
s.extra_rdoc_files = ["README.md", "LICENSE.txt"]
s.license = 'MIT'
end
|
# encoding: UTF-8
require File.join(File.dirname(__FILE__), 'lib', 'social_stream', 'places', 'version')
Gem::Specification.new do |s|
s.name = "social_stream-places"
s.version = SocialStream::Places::VERSION.dup
s.authors = ["Carolina García", "GING - DIT - UPM"]
s.summary = "Places support for Social Stream, the core for building social network websites"
s.description = "Social Stream is a Ruby on Rails engine providing your application with social networking features and activity streams.\n\nThis gem allow you to add places as a new social stream activity"
s.email = "holacarol@gmail.com"
s.homepage = "http://github.com/ging/social_stream-places"
s.files = `git ls-files`.split("\n")
# Gem dependencies
s.add_runtime_dependency('social_stream-base', '~> 1.0.0')
s.add_runtime_dependency('gmaps4rails','~> 1.5.2')
s.add_runtime_dependency('geocoder')
# Development Gem dependencies
end
Remove tilde from Carolina's last name
# encoding: UTF-8
require File.join(File.dirname(__FILE__), 'lib', 'social_stream', 'places', 'version')
Gem::Specification.new do |s|
s.name = "social_stream-places"
s.version = SocialStream::Places::VERSION.dup
s.authors = ["Carolina Garcia", "GING - DIT - UPM"]
s.summary = "Places support for Social Stream, the core for building social network websites"
s.description = "Social Stream is a Ruby on Rails engine providing your application with social networking features and activity streams.\n\nThis gem allow you to add places as a new social stream activity"
s.email = "holacarol@gmail.com"
s.homepage = "http://github.com/ging/social_stream-places"
s.files = `git ls-files`.split("\n")
# Gem dependencies
s.add_runtime_dependency('social_stream-base', '~> 1.0.0')
s.add_runtime_dependency('gmaps4rails','~> 1.5.2')
s.add_runtime_dependency('geocoder')
# Development Gem dependencies
end
|
require File.expand_path("../lib/#{File.basename(__FILE__, '.gemspec')}/version", __FILE__)
Gem::Specification.new do |s|
s.name = 'ffi'
s.version = FFI::VERSION
s.author = 'Wayne Meissner'
s.email = 'wmeissner@gmail.com'
s.homepage = 'http://wiki.github.com/ffi/ffi'
s.summary = 'Ruby FFI'
s.description = 'Ruby FFI library'
s.files = %w(ffi.gemspec LICENSE COPYING README.md Rakefile) + Dir.glob("{ext,gen,lib,spec,libtest}/**/*").reject { |f| f =~ /lib\/[12]\.[089]/}
s.extensions << 'ext/ffi_c/extconf.rb'
s.has_rdoc = false
s.rdoc_options = %w[--exclude=ext/ffi_c/.*\.o$ --exclude=ffi_c\.(bundle|so)$]
s.license = 'BSD'
s.require_paths << 'ext/ffi_c'
s.required_ruby_version = '>= 1.8.7'
s.add_development_dependency 'rake'
s.add_development_dependency 'rake-compiler', '>=0.6.0'
s.add_development_dependency 'rspec'
s.add_development_dependency 'rubygems-tasks'
end
Reject .so and .bundle files in gem. (merge gone wrong...PR from @simi)
require File.expand_path("../lib/#{File.basename(__FILE__, '.gemspec')}/version", __FILE__)
Gem::Specification.new do |s|
s.name = 'ffi'
s.version = FFI::VERSION
s.author = 'Wayne Meissner'
s.email = 'wmeissner@gmail.com'
s.homepage = 'http://wiki.github.com/ffi/ffi'
s.summary = 'Ruby FFI'
s.description = 'Ruby FFI library'
s.files = %w(ffi.gemspec LICENSE COPYING README.md Rakefile) + Dir.glob("{ext,gen,lib,spec,libtest}/**/*").reject { |f| f =~ /(lib\/[12]\.[089]|\.so|\.bundle)/}
s.extensions << 'ext/ffi_c/extconf.rb'
s.has_rdoc = false
s.rdoc_options = %w[--exclude=ext/ffi_c/.*\.o$ --exclude=ffi_c\.(bundle|so)$]
s.license = 'BSD'
s.require_paths << 'ext/ffi_c'
s.required_ruby_version = '>= 1.8.7'
s.add_development_dependency 'rake'
s.add_development_dependency 'rake-compiler', '>=0.6.0'
s.add_development_dependency 'rspec'
s.add_development_dependency 'rubygems-tasks'
end
|
require "progressbar"
require "csv"
module Dsc
class Command
def self.transport_class
nil
end
def self.transport_class_name
class_name = transport_class.name.split('::').last || ''
end
def self.transport_class_string
transport_class_name.split(/(?=[A-Z])/).join(" ")
end
def self.command_symbol
transport_class_name.split(/(?=[A-Z])/).join("_").downcase.to_sym
end
def self.schema
transport_class.mappings
end
def initialize(global_options)
@hostname = global_options[:m]
@port = global_options[:port].to_i
@tenant = global_options[:t]
@username =global_options[:u]
@password = global_options[:p]
@show_progress_bar = global_options[:P]
@debug_level = parse_debug_level(global_options[:d])
@output = global_options[:o]
end
# @group Debug Level flag
# Valid debug levels
# @return [Array<String>] Valid debug levels
def self.valid_debug_levels
DeepSecurity::LOG_MAPPING.keys
end
# String of debug levels for help string
# @return [String] String of debug levels for help string
def self.valid_debug_levels_string
valid_debug_levels.join(", ")
end
# Parse debug level
# @return [nil, DeepSecurity::LOG_MAPPING] Return parsed debug level
def parse_debug_level(argument)
return nil if argument.blank?
return argument.to_sym if (DeepSecurity::LOG_MAPPING.keys.include?(argument.to_sym))
:debug
end
# Define debug level argument
# @return [void]
def self.define_debug_flag(c)
c.flag [:d, :debug], :desc => "Enable client debug output. (One of #{Dsc::Command.valid_debug_levels_string})", :arg_name => 'debug_level'
end
# @endgroup
def self.default_fields
[]
end
def self.default_fields_string
default_fields.join(",")
end
def self.valid_fields
transport_class.defined_attributes.sort
end
def self.valid_fields_string
valid_fields.join(", ")
end
def parse_fields(fields_string_or_filename)
filename = File.absolute_path(fields_string_or_filename)
if File.exists?(filename)
fields_string = File.read(filename)
else
fields_string = fields_string_or_filename
end
fields = fields_string.split(",").map(&:strip)
unknown_fields = fields.reject { |each| self.class.transport_class.has_attribute_chain(each) }
raise "Unknown filename or field found (#{unknown_fields.join(', ')}) - known fields are: #{self.class.valid_fields.join(', ')}" unless unknown_fields.empty?
fields
end
def self.valid_time_filters
{
:last_hour => DeepSecurity::TimeFilter.last_hour,
:last_24_hours => DeepSecurity::TimeFilter.last_24_hours,
:last_7_days => DeepSecurity::TimeFilter.last_7_days,
:last_day => DeepSecurity::TimeFilter.last_day
}
end
def self.valid_time_filters_string
valid_time_filters.keys.join(', ')
end
def parse_time_filter(string)
filter = self.class.valid_time_filters[string.to_sym]
raise "Unknown time filter" if filter.nil?
filter
end
def self.valid_detail_levels
DeepSecurity::EnumHostDetailLevel.keys()
end
def self.valid_detail_levels_string
valid_detail_levels.join(", ")
end
def parse_detail_level(string)
detail_level = DeepSecurity::EnumHostDetailLevel[string.upcase.strip]
raise "Unknown detail level filter" if detail_level.nil?
detail_level
end
def output
unless @output == '--'
output = File.open(option, 'w')
else
output = STDOUT
end
yield output
output.close() unless @output == '--'
end
def connect
yield DeepSecurity::Manager.server(@hostname, @port, @debug_level)
end
def authenticate
connect do |dsm|
begin
dsm.connect(@tenant, @username, @password)
yield dsm
rescue DeepSecurity::AuthenticationFailedException => e
puts "Authentication failed! #{e.message}"
ensure
dsm.disconnect()
end
end
end
def print_api_version(options, args)
output do |output|
authenticate do |dsm|
output.puts dsm.api_version()
end
end
end
def print_manager_time(options, args)
output do |output|
authenticate do |dsm|
output.puts dsm.manager_time()
end
end
end
def print_schema(options, args)
output do |output|
schema = self.class.schema()
schema.keys.sort.each do |key|
output.puts "#{key} (#{schema[key].type_string}): #{schema[key].description}"
end
end
end
def self.define_list_command(command)
command.desc "List #{self.transport_class_string}s"
command.command :list do |list|
define_fields_argument(list)
yield list if block_given?
list.action do |global_options, options, args|
self.new(global_options).list(options, args)
end
end
end
def self.define_schema_command(command)
command.desc "Show #{self.transport_class_string} schema"
command.command :schema do |schema|
yield schema if block_given?
schema.action do |global_options, options, args|
self.new(global_options).print_schema(options, args)
end
end
end
def self.define_time_filter_argument(command)
command.desc "A filter specifying the time interval to query (One of #{self.valid_time_filters_string})"
command.default_value "last_day"
command.flag [:time_filter]
end
def self.define_fields_argument(command)
command.desc "A comma separated list of fields to display or a file containing those fields. (Available fields: #{self.valid_fields_string})"
command.default_value self.default_fields_string
command.flag [:fields]
end
def self.define_detail_level_argument(command)
command.desc "A detail level specifiying the extent of data returned. (Available values: #{self.valid_detail_levels_string})"
command.default_value "low"
command.flag [:detail_level]
end
end
end
Refactored --fields flag
require "progressbar"
require "csv"
module Dsc
class Command
def self.transport_class
nil
end
def self.transport_class_name
class_name = transport_class.name.split('::').last || ''
end
def self.transport_class_string
transport_class_name.split(/(?=[A-Z])/).join(" ")
end
def self.command_symbol
transport_class_name.split(/(?=[A-Z])/).join("_").downcase.to_sym
end
def self.schema
transport_class.mappings
end
def initialize(global_options)
@hostname = global_options[:m]
@port = global_options[:port].to_i
@tenant = global_options[:t]
@username =global_options[:u]
@password = global_options[:p]
@show_progress_bar = global_options[:P]
@debug_level = parse_debug_level(global_options[:d])
@output = global_options[:o]
end
# @group Debug Level flag
# Valid debug levels
# @return [Array<String>] Valid debug levels
def self.valid_debug_levels
DeepSecurity::LOG_MAPPING.keys
end
# String of debug levels for help string
# @return [String] String of debug levels for help string
def self.valid_debug_levels_string
valid_debug_levels.join(", ")
end
# Parse debug level argument
# @return [nil, DeepSecurity::LOG_MAPPING] Return parsed debug level
def parse_debug_level(argument)
return nil if argument.blank?
return argument.to_sym if (DeepSecurity::LOG_MAPPING.keys.include?(argument.to_sym))
:debug
end
# Define debug level flag
# @return [void]
def self.define_debug_flag(command)
command.flag [:d, :debug],
:desc => "Enable client debug output. (One of #{Dsc::Command.valid_debug_levels_string})",
:arg_name => 'debug_level'
end
# @endgroup
# @group Fields flag
# Default fields if no argument is given
# @note Needs to be overridden by subclass
# @return [Array<String>] Default fields if no argument is given
def self.default_fields
[]
end
# String of default fields for help string
# @return [String] String of default fields for help string
def self.default_fields_string
default_fields.join(",")
end
# Sorted list of available fields
# @return [Array<String>] Sorted list of available fields
def self.valid_fields
transport_class.defined_attributes.sort
end
# String of available fields for help string
# @return [String] String of available fields for help string
def self.valid_fields_string
valid_fields.join(", ")
end
# Parse fields argument. Either split the string or read from file
# @return [Array<String>] parse fields
def parse_fields(fields_string_or_filename)
filename = File.absolute_path(fields_string_or_filename_argument)
if File.exists?(filename)
fields_string = File.read(filename)
else
fields_string = fields_string_or_filename
end
fields = fields_string.split(",").map(&:strip)
unknown_fields = fields.reject { |each| self.class.transport_class.has_attribute_chain(each) }
raise "Unknown filename or field found (#{unknown_fields.join(', ')}) - known fields are: #{self.class.valid_fields.join(', ')}" unless unknown_fields.empty?
fields
end
# Define fields flag
# @return [void]
def self.define_fields_flag(command)
command.flag [:fields],
:desc => "A comma separated list of fields to display or a file containing those fields. (Available fields: #{self.valid_fields_string})",
:default_value => self.default_fields_string
end
# @endgroup
def self.valid_time_filters
{
:last_hour => DeepSecurity::TimeFilter.last_hour,
:last_24_hours => DeepSecurity::TimeFilter.last_24_hours,
:last_7_days => DeepSecurity::TimeFilter.last_7_days,
:last_day => DeepSecurity::TimeFilter.last_day
}
end
def self.valid_time_filters_string
valid_time_filters.keys.join(', ')
end
def parse_time_filter(string)
filter = self.class.valid_time_filters[string.to_sym]
raise "Unknown time filter" if filter.nil?
filter
end
def self.valid_detail_levels
DeepSecurity::EnumHostDetailLevel.keys()
end
def self.valid_detail_levels_string
valid_detail_levels.join(", ")
end
def parse_detail_level(string)
detail_level = DeepSecurity::EnumHostDetailLevel[string.upcase.strip]
raise "Unknown detail level filter" if detail_level.nil?
detail_level
end
def output
unless @output == '--'
output = File.open(option, 'w')
else
output = STDOUT
end
yield output
output.close() unless @output == '--'
end
def connect
yield DeepSecurity::Manager.server(@hostname, @port, @debug_level)
end
def authenticate
connect do |dsm|
begin
dsm.connect(@tenant, @username, @password)
yield dsm
rescue DeepSecurity::AuthenticationFailedException => e
puts "Authentication failed! #{e.message}"
ensure
dsm.disconnect()
end
end
end
def print_api_version(options, args)
output do |output|
authenticate do |dsm|
output.puts dsm.api_version()
end
end
end
def print_manager_time(options, args)
output do |output|
authenticate do |dsm|
output.puts dsm.manager_time()
end
end
end
def print_schema(options, args)
output do |output|
schema = self.class.schema()
schema.keys.sort.each do |key|
output.puts "#{key} (#{schema[key].type_string}): #{schema[key].description}"
end
end
end
def self.define_list_command(command)
command.desc "List #{self.transport_class_string}s"
command.command :list do |list|
define_fields_flag(list)
yield list if block_given?
list.action do |global_options, options, args|
self.new(global_options).list(options, args)
end
end
end
def self.define_schema_command(command)
command.desc "Show #{self.transport_class_string} schema"
command.command :schema do |schema|
yield schema if block_given?
schema.action do |global_options, options, args|
self.new(global_options).print_schema(options, args)
end
end
end
def self.define_time_filter_argument(command)
command.desc "A filter specifying the time interval to query (One of #{self.valid_time_filters_string})"
command.default_value "last_day"
command.flag [:time_filter]
end
def self.define_detail_level_argument(command)
command.desc "A detail level specifiying the extent of data returned. (Available values: #{self.valid_detail_levels_string})"
command.default_value "low"
command.flag [:detail_level]
end
end
end |
class Ffmpeg28 < Formula
desc "Play, record, convert, and stream audio and video"
homepage "https://ffmpeg.org/"
url "https://ffmpeg.org/releases/ffmpeg-2.8.6.tar.bz2"
sha256 "40611e329bc354592c6f8f1deb033c31b91f80e91f5707ca4f9afceca78d8e62"
head "https://github.com/FFmpeg/FFmpeg.git"
bottle do
sha256 "8234d9f4885fb0f13c08e6dd9bdbb29f801c1b9961894bf73e2f49b92f9f2abd" => :el_capitan
sha256 "d8b21613d6ddbf6e4cfe1a95648a26b7b78aec4bedbba35ad39e39fefb3054ed" => :yosemite
sha256 "bc7b183c0eedcb5a8ab35be3b25cf00af89c867085a5fc0855d7ec74ab7d1e79" => :mavericks
end
conflicts_with "ffmpeg",
:because => "both install the same binaries"
option "without-x264", "Disable H.264 encoder"
option "without-lame", "Disable MP3 encoder"
option "without-libvo-aacenc", "Disable VisualOn AAC encoder"
option "without-xvid", "Disable Xvid MPEG-4 video encoder"
option "without-qtkit", "Disable deprecated QuickTime framework"
option "with-rtmpdump", "Enable RTMP protocol"
option "with-libass", "Enable ASS/SSA subtitle format"
option "with-opencore-amr", "Enable Opencore AMR NR/WB audio format"
option "with-openjpeg", "Enable JPEG 2000 image format"
option "with-openssl", "Enable SSL support"
option "with-libssh", "Enable SFTP protocol via libssh"
option "with-schroedinger", "Enable Dirac video format"
option "with-ffplay", "Enable FFplay media player"
option "with-tools", "Enable additional FFmpeg tools"
option "with-fdk-aac", "Enable the Fraunhofer FDK AAC library"
option "with-libvidstab", "Enable vid.stab support for video stabilization"
option "with-x265", "Enable x265 encoder"
option "with-libsoxr", "Enable the soxr resample library"
option "with-webp", "Enable using libwebp to encode WEBP images"
option "with-zeromq", "Enable using libzeromq to receive commands sent through a libzeromq client"
option "with-snappy", "Enable Snappy library"
option "with-dcadec", "Enable dcadec library"
depends_on "pkg-config" => :build
# manpages won't be built without texi2html
depends_on "texi2html" => :build
depends_on "yasm" => :build
depends_on "x264" => :recommended
depends_on "lame" => :recommended
depends_on "libvo-aacenc" => :recommended
depends_on "xvid" => :recommended
depends_on "faac" => :optional
depends_on "fontconfig" => :optional
depends_on "freetype" => :optional
depends_on "theora" => :optional
depends_on "libvorbis" => :optional
depends_on "libvpx" => :optional
depends_on "rtmpdump" => :optional
depends_on "opencore-amr" => :optional
depends_on "libass" => :optional
depends_on "openjpeg" => :optional
depends_on "sdl" if build.with? "ffplay"
depends_on "snappy" => :optional
depends_on "speex" => :optional
depends_on "schroedinger" => :optional
depends_on "fdk-aac" => :optional
depends_on "opus" => :optional
depends_on "frei0r" => :optional
depends_on "libcaca" => :optional
depends_on "libbluray" => :optional
depends_on "libsoxr" => :optional
depends_on "libquvi" => :optional
depends_on "libvidstab" => :optional
depends_on "x265" => :optional
depends_on "openssl" => :optional
depends_on "libssh" => :optional
depends_on "webp" => :optional
depends_on "zeromq" => :optional
depends_on "libbs2b" => :optional
depends_on "dcadec" => :optional
def install
args = ["--prefix=#{prefix}",
"--enable-shared",
"--enable-pthreads",
"--enable-gpl",
"--enable-version3",
"--enable-hardcoded-tables",
"--enable-avresample",
"--cc=#{ENV.cc}",
"--host-cflags=#{ENV.cflags}",
"--host-ldflags=#{ENV.ldflags}",
]
args << "--enable-opencl" if MacOS.version > :lion
args << "--enable-libx264" if build.with? "x264"
args << "--enable-libmp3lame" if build.with? "lame"
args << "--enable-libvo-aacenc" if build.with? "libvo-aacenc"
args << "--enable-libxvid" if build.with? "xvid"
args << "--enable-libsnappy" if build.with? "snappy"
args << "--enable-libfontconfig" if build.with? "fontconfig"
args << "--enable-libfreetype" if build.with? "freetype"
args << "--enable-libtheora" if build.with? "theora"
args << "--enable-libvorbis" if build.with? "libvorbis"
args << "--enable-libvpx" if build.with? "libvpx"
args << "--enable-librtmp" if build.with? "rtmpdump"
args << "--enable-libopencore-amrnb" << "--enable-libopencore-amrwb" if build.with? "opencore-amr"
args << "--enable-libfaac" if build.with? "faac"
args << "--enable-libass" if build.with? "libass"
args << "--enable-ffplay" if build.with? "ffplay"
args << "--enable-libssh" if build.with? "libssh"
args << "--enable-libspeex" if build.with? "speex"
args << "--enable-libschroedinger" if build.with? "schroedinger"
args << "--enable-libfdk-aac" if build.with? "fdk-aac"
args << "--enable-openssl" if build.with? "openssl"
args << "--enable-libopus" if build.with? "opus"
args << "--enable-frei0r" if build.with? "frei0r"
args << "--enable-libcaca" if build.with? "libcaca"
args << "--enable-libsoxr" if build.with? "libsoxr"
args << "--enable-libquvi" if build.with? "libquvi"
args << "--enable-libvidstab" if build.with? "libvidstab"
args << "--enable-libx265" if build.with? "x265"
args << "--enable-libwebp" if build.with? "webp"
args << "--enable-libzmq" if build.with? "zeromq"
args << "--enable-libbs2b" if build.with? "libbs2b"
args << "--enable-libdcadec" if build.with? "dcadec"
args << "--disable-indev=qtkit" if build.without? "qtkit"
if build.with? "openjpeg"
args << "--enable-libopenjpeg"
args << "--disable-decoder=jpeg2000"
args << "--extra-cflags=" + `pkg-config --cflags libopenjpeg`.chomp
end
# These librares are GPL-incompatible, and require ffmpeg be built with
# the "--enable-nonfree" flag, which produces unredistributable libraries
if %w[faac fdk-aac openssl].any? { |f| build.with? f }
args << "--enable-nonfree"
end
# A bug in a dispatch header on 10.10, included via CoreFoundation,
# prevents GCC from building VDA support. GCC has no problems on
# 10.9 and earlier.
# See: https://github.com/Homebrew/homebrew/issues/33741
if MacOS.version < :yosemite || ENV.compiler == :clang
args << "--enable-vda"
else
args << "--disable-vda"
end
# For 32-bit compilation under gcc 4.2, see:
# https://trac.macports.org/ticket/20938#comment:22
ENV.append_to_cflags "-mdynamic-no-pic" if Hardware.is_32_bit? && Hardware::CPU.intel? && ENV.compiler == :clang
system "./configure", *args
if MacOS.prefer_64_bit?
inreplace "config.mak" do |s|
shflags = s.get_make_var "SHFLAGS"
if shflags.gsub!(" -Wl,-read_only_relocs,suppress", "")
s.change_make_var! "SHFLAGS", shflags
end
end
end
system "make", "install"
if build.with? "tools"
system "make", "alltools"
bin.install Dir["tools/*"].select { |f| File.executable? f }
end
end
def caveats
if build.without? "faac" then <<-EOS.undent
FFmpeg has been built without libfaac for licensing reasons;
libvo-aacenc is used by default.
To install with libfaac, you can:
brew reinstall ffmpeg28 --with-faac
You can also use the experimental FFmpeg encoder, libfdk-aac, or
libvo_aacenc to encode AAC audio:
ffmpeg -i input.wav -c:a aac -strict experimental output.m4a
Or:
brew reinstall ffmpeg28 --with-fdk-aac
ffmpeg -i input.wav -c:a libfdk_aac output.m4a
EOS
end
end
test do
# Create an example mp4 file
system "#{bin}/ffmpeg", "-y", "-filter_complex",
"testsrc=rate=1:duration=1", "#{testpath}/video.mp4"
assert (testpath/"video.mp4").exist?
end
end
ffmpeg28: remove head
class Ffmpeg28 < Formula
desc "Play, record, convert, and stream audio and video"
homepage "https://ffmpeg.org/"
url "https://ffmpeg.org/releases/ffmpeg-2.8.6.tar.bz2"
sha256 "40611e329bc354592c6f8f1deb033c31b91f80e91f5707ca4f9afceca78d8e62"
bottle do
sha256 "8234d9f4885fb0f13c08e6dd9bdbb29f801c1b9961894bf73e2f49b92f9f2abd" => :el_capitan
sha256 "d8b21613d6ddbf6e4cfe1a95648a26b7b78aec4bedbba35ad39e39fefb3054ed" => :yosemite
sha256 "bc7b183c0eedcb5a8ab35be3b25cf00af89c867085a5fc0855d7ec74ab7d1e79" => :mavericks
end
conflicts_with "ffmpeg",
:because => "both install the same binaries"
option "without-x264", "Disable H.264 encoder"
option "without-lame", "Disable MP3 encoder"
option "without-libvo-aacenc", "Disable VisualOn AAC encoder"
option "without-xvid", "Disable Xvid MPEG-4 video encoder"
option "without-qtkit", "Disable deprecated QuickTime framework"
option "with-rtmpdump", "Enable RTMP protocol"
option "with-libass", "Enable ASS/SSA subtitle format"
option "with-opencore-amr", "Enable Opencore AMR NR/WB audio format"
option "with-openjpeg", "Enable JPEG 2000 image format"
option "with-openssl", "Enable SSL support"
option "with-libssh", "Enable SFTP protocol via libssh"
option "with-schroedinger", "Enable Dirac video format"
option "with-ffplay", "Enable FFplay media player"
option "with-tools", "Enable additional FFmpeg tools"
option "with-fdk-aac", "Enable the Fraunhofer FDK AAC library"
option "with-libvidstab", "Enable vid.stab support for video stabilization"
option "with-x265", "Enable x265 encoder"
option "with-libsoxr", "Enable the soxr resample library"
option "with-webp", "Enable using libwebp to encode WEBP images"
option "with-zeromq", "Enable using libzeromq to receive commands sent through a libzeromq client"
option "with-snappy", "Enable Snappy library"
option "with-dcadec", "Enable dcadec library"
depends_on "pkg-config" => :build
# manpages won't be built without texi2html
depends_on "texi2html" => :build
depends_on "yasm" => :build
depends_on "x264" => :recommended
depends_on "lame" => :recommended
depends_on "libvo-aacenc" => :recommended
depends_on "xvid" => :recommended
depends_on "faac" => :optional
depends_on "fontconfig" => :optional
depends_on "freetype" => :optional
depends_on "theora" => :optional
depends_on "libvorbis" => :optional
depends_on "libvpx" => :optional
depends_on "rtmpdump" => :optional
depends_on "opencore-amr" => :optional
depends_on "libass" => :optional
depends_on "openjpeg" => :optional
depends_on "sdl" if build.with? "ffplay"
depends_on "snappy" => :optional
depends_on "speex" => :optional
depends_on "schroedinger" => :optional
depends_on "fdk-aac" => :optional
depends_on "opus" => :optional
depends_on "frei0r" => :optional
depends_on "libcaca" => :optional
depends_on "libbluray" => :optional
depends_on "libsoxr" => :optional
depends_on "libquvi" => :optional
depends_on "libvidstab" => :optional
depends_on "x265" => :optional
depends_on "openssl" => :optional
depends_on "libssh" => :optional
depends_on "webp" => :optional
depends_on "zeromq" => :optional
depends_on "libbs2b" => :optional
depends_on "dcadec" => :optional
def install
args = ["--prefix=#{prefix}",
"--enable-shared",
"--enable-pthreads",
"--enable-gpl",
"--enable-version3",
"--enable-hardcoded-tables",
"--enable-avresample",
"--cc=#{ENV.cc}",
"--host-cflags=#{ENV.cflags}",
"--host-ldflags=#{ENV.ldflags}",
]
args << "--enable-opencl" if MacOS.version > :lion
args << "--enable-libx264" if build.with? "x264"
args << "--enable-libmp3lame" if build.with? "lame"
args << "--enable-libvo-aacenc" if build.with? "libvo-aacenc"
args << "--enable-libxvid" if build.with? "xvid"
args << "--enable-libsnappy" if build.with? "snappy"
args << "--enable-libfontconfig" if build.with? "fontconfig"
args << "--enable-libfreetype" if build.with? "freetype"
args << "--enable-libtheora" if build.with? "theora"
args << "--enable-libvorbis" if build.with? "libvorbis"
args << "--enable-libvpx" if build.with? "libvpx"
args << "--enable-librtmp" if build.with? "rtmpdump"
args << "--enable-libopencore-amrnb" << "--enable-libopencore-amrwb" if build.with? "opencore-amr"
args << "--enable-libfaac" if build.with? "faac"
args << "--enable-libass" if build.with? "libass"
args << "--enable-ffplay" if build.with? "ffplay"
args << "--enable-libssh" if build.with? "libssh"
args << "--enable-libspeex" if build.with? "speex"
args << "--enable-libschroedinger" if build.with? "schroedinger"
args << "--enable-libfdk-aac" if build.with? "fdk-aac"
args << "--enable-openssl" if build.with? "openssl"
args << "--enable-libopus" if build.with? "opus"
args << "--enable-frei0r" if build.with? "frei0r"
args << "--enable-libcaca" if build.with? "libcaca"
args << "--enable-libsoxr" if build.with? "libsoxr"
args << "--enable-libquvi" if build.with? "libquvi"
args << "--enable-libvidstab" if build.with? "libvidstab"
args << "--enable-libx265" if build.with? "x265"
args << "--enable-libwebp" if build.with? "webp"
args << "--enable-libzmq" if build.with? "zeromq"
args << "--enable-libbs2b" if build.with? "libbs2b"
args << "--enable-libdcadec" if build.with? "dcadec"
args << "--disable-indev=qtkit" if build.without? "qtkit"
if build.with? "openjpeg"
args << "--enable-libopenjpeg"
args << "--disable-decoder=jpeg2000"
args << "--extra-cflags=" + `pkg-config --cflags libopenjpeg`.chomp
end
# These librares are GPL-incompatible, and require ffmpeg be built with
# the "--enable-nonfree" flag, which produces unredistributable libraries
if %w[faac fdk-aac openssl].any? { |f| build.with? f }
args << "--enable-nonfree"
end
# A bug in a dispatch header on 10.10, included via CoreFoundation,
# prevents GCC from building VDA support. GCC has no problems on
# 10.9 and earlier.
# See: https://github.com/Homebrew/homebrew/issues/33741
if MacOS.version < :yosemite || ENV.compiler == :clang
args << "--enable-vda"
else
args << "--disable-vda"
end
# For 32-bit compilation under gcc 4.2, see:
# https://trac.macports.org/ticket/20938#comment:22
ENV.append_to_cflags "-mdynamic-no-pic" if Hardware.is_32_bit? && Hardware::CPU.intel? && ENV.compiler == :clang
system "./configure", *args
if MacOS.prefer_64_bit?
inreplace "config.mak" do |s|
shflags = s.get_make_var "SHFLAGS"
if shflags.gsub!(" -Wl,-read_only_relocs,suppress", "")
s.change_make_var! "SHFLAGS", shflags
end
end
end
system "make", "install"
if build.with? "tools"
system "make", "alltools"
bin.install Dir["tools/*"].select { |f| File.executable? f }
end
end
def caveats
if build.without? "faac" then <<-EOS.undent
FFmpeg has been built without libfaac for licensing reasons;
libvo-aacenc is used by default.
To install with libfaac, you can:
brew reinstall ffmpeg28 --with-faac
You can also use the experimental FFmpeg encoder, libfdk-aac, or
libvo_aacenc to encode AAC audio:
ffmpeg -i input.wav -c:a aac -strict experimental output.m4a
Or:
brew reinstall ffmpeg28 --with-fdk-aac
ffmpeg -i input.wav -c:a libfdk_aac output.m4a
EOS
end
end
test do
# Create an example mp4 file
system "#{bin}/ffmpeg", "-y", "-filter_complex",
"testsrc=rate=1:duration=1", "#{testpath}/video.mp4"
assert (testpath/"video.mp4").exist?
end
end
|
module EDI
VERSION = "0.4.0"
end
Version Bump
module EDI
VERSION = "0.4.1"
end
|
module EDI
VERSION = "0.1.2"
end
Version Bump
module EDI
VERSION = "0.2.0"
end
|
module Egnyte
class Client
def links
Link::all(@session)
end
def links_where(params)
Link::where(@session, params)
end
def link(id)
Link::find(@session, id)
end
def create_link(params)
Link::create(@session, params)
end
def delete_link(id)
Link::delete(@session, id)
end
end
class Link
@@required_attributes = ['path', 'type', 'accessibility']
attr_accessor :path, :type, :accessibility, :send_email, :recipients, :messages, :copy_me, :notify, :link_to_current, :expiry_date, :expiry_clicks, :add_filename, :creation_date
attr_reader :id
def initialize(session, params)
@session = session
params.each do |k,v|
instance_variable_set("@#{k}", v)
end
end
def self.all(session)
self.where(session)
end
def self.create(session, params)
link = self.new(session, params)
link.save
end
def self.find(session, id)
response = session.get("#{self.link_path(session)}/#{id}", return_parsed_response=true)
self.new(session, response)
end
def self.where(session, params=nil)
url = self.link_path(session)
url += Egnyte::Helper.params_to_s(params) if params
parsed_body = session.get(url)
parsed_body["ids"].nil? ? [] : parsed_body["ids"]
end
def save
raise Egnyte::MissingAttribute.new(missing_attributes) unless valid?
response = @session.post(link_path, to_json, return_parsed_response=true)
link = Egnyte::Link.find(@session, response['links'].first['id'])
link.instance_variables.each do |ivar|
instance_variable_set(ivar, link.instance_variable_get(ivar))
end
self
end
def delete
Egnyte::User.delete(@session, @id)
end
def self.delete(session, id)
session.delete("#{self.link_path(session)}/#{id}", return_parsed_response=false)
end
def valid?
return missing_attributes.size < 1
end
def missing_attributes
missing = @@required_attributes.collect do |param|
param unless instance_variable_get("@#{param}")
end
missing.compact
end
def to_json
hash = {}
instance_variables.each do |iv|
next if [:@session, :@client].include? iv
next if instance_variable_get(iv) == nil
hash[iv.to_s[1..-1]] = instance_variable_get(iv)
end
hash.to_json
end
def link_path
Egnyte::Link.link_path(@session)
end
def self.link_path(session)
"https://#{session.domain}.#{EGNYTE_DOMAIN}/#{session.api}/v1/links"
end
end
end
bugfix for delete method
module Egnyte
class Client
def links
Link::all(@session)
end
def links_where(params)
Link::where(@session, params)
end
def link(id)
Link::find(@session, id)
end
def create_link(params)
Link::create(@session, params)
end
def delete_link(id)
Link::delete(@session, id)
end
end
class Link
@@required_attributes = ['path', 'type', 'accessibility']
attr_accessor :path, :type, :accessibility, :send_email, :recipients, :messages, :copy_me, :notify, :link_to_current, :expiry_date, :expiry_clicks, :add_filename, :creation_date
attr_reader :id
def initialize(session, params)
@session = session
params.each do |k,v|
instance_variable_set("@#{k}", v)
end
end
def self.all(session)
self.where(session)
end
def self.create(session, params)
link = self.new(session, params)
link.save
end
def self.find(session, id)
response = session.get("#{self.link_path(session)}/#{id}", return_parsed_response=true)
self.new(session, response)
end
def self.where(session, params=nil)
url = self.link_path(session)
url += Egnyte::Helper.params_to_s(params) if params
parsed_body = session.get(url)
parsed_body["ids"].nil? ? [] : parsed_body["ids"]
end
def save
raise Egnyte::MissingAttribute.new(missing_attributes) unless valid?
response = @session.post(link_path, to_json, return_parsed_response=true)
link = Egnyte::Link.find(@session, response['links'].first['id'])
link.instance_variables.each do |ivar|
instance_variable_set(ivar, link.instance_variable_get(ivar))
end
self
end
def delete
Egnyte::Link.delete(@session, @id)
end
def self.delete(session, id)
session.delete("#{self.link_path(session)}/#{id}", return_parsed_response=false)
end
def valid?
return missing_attributes.size < 1
end
def missing_attributes
missing = @@required_attributes.collect do |param|
param unless instance_variable_get("@#{param}")
end
missing.compact
end
def to_json
hash = {}
instance_variables.each do |iv|
next if [:@session, :@client].include? iv
next if instance_variable_get(iv) == nil
hash[iv.to_s[1..-1]] = instance_variable_get(iv)
end
hash.to_json
end
def link_path
Egnyte::Link.link_path(@session)
end
def self.link_path(session)
"https://#{session.domain}.#{EGNYTE_DOMAIN}/#{session.api}/v1/links"
end
end
end
|
require 'active_model'
module ActiveModel
class Errors
def merge!(errors)
messages.merge!(errors.messages)
end
end
end
module Eldr
module Action
def self.included(klass)
klass.include ActiveModel::Validations
klass.send(:attr_accessor, :env, :status, :body, :header)
end
def header
@header ||= {}
end
def body
@body ||= ''
end
def valid?
errors.count == 0
end
def params
env['eldr.params']
end
def to_a
[status, header, [body]]
end
alias_method :to_ary, :to_a
end
end
add proper configuration handling
require 'active_model'
module ActiveModel
class Errors
def merge!(errors)
messages.merge!(errors.messages)
end
end
end
module Eldr
module Action
class << self
def included(klass)
klass.include ActiveModel::Validations
klass.send(:attr_accessor, :env, :configuration, :status, :body, :header)
end
def configuration
@configuration ||= Configuration.new
end
alias_method :config, :configuration
def set(key, value)
configuration.set(key, value)
end
end
def initialize(configuration = nil)
configuration ||= self.class.configuration
@configuration = configuration
end
def header
@header ||= {}
end
def body
@body ||= ''
end
def valid?
errors.count == 0
end
def params
env['eldr.params']
end
def to_a
[status, header, [body]]
end
alias_method :to_ary, :to_a
end
end
|
require_relative '../../helper'
describe LookerSDK::Client::Roles do
before(:each) do
LookerSDK.reset!
@client = LookerSDK::Client.new(:netrc => true, :netrc_file => File.join(fixture_path, '.netrc'))
end
def with_role(&block)
role_type = LookerSDK.create_role_type(:name => "test_role_type", :permissions => ["administer"])
role_domain = LookerSDK.create_role_domain(:name => "test_role_domain", :models => "all")
role = LookerSDK.create_role(:name => "test_role", :role_domain_id => role_domain.id, :role_type_id => role_type.id)
begin
yield role
ensure
LookerSDK.delete_role(role.id).must_equal true
LookerSDK.delete_role_type(role_type.id).must_equal true
LookerSDK.delete_role_domain(role_domain.id).must_equal true
end
end
describe ".all_roles", :vcr do
it "returns all Looker roles" do
roles = LookerSDK.all_roles
roles.must_be_kind_of Array
roles.length.must_equal 2
roles.each do |user|
user.must_be_kind_of Sawyer::Resource
end
end
end
describe ".create_role", :vcr do
it "creates a role" do
role_type = LookerSDK.create_role_type(:name => "test_role_type", :permissions => ["administer"])
role_domain = LookerSDK.create_role_domain(:name => "test_role_domain", :models => "all")
role = LookerSDK.create_role(:name => "test_role", :role_domain_id => role_domain.id, :role_type_id => role_type.id)
role.name.must_equal "test_role"
role.role_domain.id.must_equal role_domain.id
role.role_type.id.must_equal role_type.id
LookerSDK.delete_role(role.id).must_equal true
LookerSDK.delete_role_type(role_type.id).must_equal true
LookerSDK.delete_role_domain(role_domain.id).must_equal true
end
it "requires a name to create" do
role_type = LookerSDK.create_role_type(:name => "test_role_type", :permissions => ["administer"])
role_domain = LookerSDK.create_role_domain(:name => "test_role_domain", :models => "all")
assert_raises LookerSDK::UnprocessableEntity do
LookerSDK.create_role(:role_domain_id => role_domain.id, :role_type_id => role_type.id)
end
LookerSDK.delete_role_type(role_type.id).must_equal true
LookerSDK.delete_role_domain(role_domain.id).must_equal true
end
it "requires a valid role_type_id to create" do
role_domain = LookerSDK.create_role_domain(:name => "test_role_domain", :models => "all")
assert_raises LookerSDK::UnprocessableEntity do
LookerSDK.create_role(:name => "test_role_domain", :role_domain_id => role_domain.id)
end
assert_raises LookerSDK::UnprocessableEntity do
LookerSDK.create_role(:name => "test_role_domain", :role_domain_id => role_domain.id, :role_type_id => 9999)
end
LookerSDK.delete_role_domain(role_domain.id).must_equal true
end
it "requires a valid role_domain_id to create" do
role_type = LookerSDK.create_role_type(:name => "test_role_type", :permissions => ["administer"])
assert_raises LookerSDK::UnprocessableEntity do
LookerSDK.create_role(:name => "test_role_domain", :role_type_id => role_type.id)
end
assert_raises LookerSDK::UnprocessableEntity do
LookerSDK.create_role(:name => "test_role_domain", :role_type_id => role_type.id, :role_domain_id => 9999)
end
LookerSDK.delete_role_type(role_type.id).must_equal true
end
end
describe ".update_role", :vcr do
it "updates a role" do
with_role do |role|
role_type = LookerSDK.create_role_type(:name => "new_test_role_type", :permissions => ["administer"])
role_domain = LookerSDK.create_role_domain(:name => "new_test_role_domain", :models => "all")
new_role = LookerSDK.update_role(role.id, {:name => "new_test_role", :role_domain_id => role_domain.id, :role_type_id => role_type.id})
new_role.name.must_equal "new_test_role"
new_role.role_domain.id.must_equal role_domain.id
new_role.role_type.id.must_equal role_type.id
LookerSDK.delete_role_type(role_type.id).must_equal true
LookerSDK.delete_role_domain(role_domain.id).must_equal true
end
end
it "allows update to same name" do
with_role do |role|
new_role = LookerSDK.update_role(role.id, {:name => role.name})
new_role.name.must_equal role.name
end
end
it "rejects update with duplicate name" do
with_role do |role|
new_role = LookerSDK.create_role(:name => "new_name", :role_domain_id => role.role_domain.id, :role_type_id => role.role_type.id)
assert_raises LookerSDK::UnprocessableEntity do
LookerSDK.update_role(role.id, {:name => new_role.name})
end
LookerSDK.delete_role(new_role.id).must_equal true
end
end
it "requires a valid role_type_id to update" do
with_role do |role|
assert_raises LookerSDK::UnprocessableEntity do
LookerSDK.update_role(role.id, :role_type_id => 9999)
end
end
end
it "requires a valid role_domain_id to update" do
with_role do |role|
assert_raises LookerSDK::UnprocessableEntity do
LookerSDK.update_role(role.id, :role_domain_id => 9999)
end
end
end
end
describe ".delete_role", :vcr do
it "deletes user created roles" do
role_type = LookerSDK.create_role_type(:name => "test_role_type", :permissions => ["administer"])
role_domain = LookerSDK.create_role_domain(:name => "test_role_domain", :models => "all")
role = LookerSDK.create_role(:name => "test_role", :role_domain_id => role_domain.id, :role_type_id => role_type.id)
LookerSDK.delete_role(role.id).must_equal true
LookerSDK.delete_role_type(role_type.id).must_equal true
LookerSDK.delete_role_domain(role_domain.id).must_equal true
end
it "will not delete (403) built in admin role" do
roles = LookerSDK.all_roles
admin_role = roles.select {|d| d.name == "Admin"}.first
admin_role.wont_be_nil
admin_role.role_domain.name.must_equal "All"
admin_role.role_type.name.must_equal "Admin"
assert_raises LookerSDK::Forbidden do
LookerSDK.delete_role(admin_role.id)
end
end
end
describe ".set_role_users", :vcr do
it "sets users of role" do
users = (1..5).map {|i| LookerSDK.create_user }
with_role do |role|
LookerSDK.set_role_users(role.id, users.map {|u| u.id })
new_user_ids = LookerSDK.role_users(role.id).map {|user| user.id}
users.map {|u| u.id}.each do |user_id|
new_user_ids.must_include user_id
end
end
users.each do |u|
LookerSDK.delete_user(u.id)
end
end
end
end
added test to ensure that users cannot be added to a role twice.
require_relative '../../helper'
describe LookerSDK::Client::Roles do
before(:each) do
LookerSDK.reset!
@client = LookerSDK::Client.new(:netrc => true, :netrc_file => File.join(fixture_path, '.netrc'))
end
def with_role(&block)
role_type = LookerSDK.create_role_type(:name => "test_role_type", :permissions => ["administer"])
role_domain = LookerSDK.create_role_domain(:name => "test_role_domain", :models => "all")
role = LookerSDK.create_role(:name => "test_role", :role_domain_id => role_domain.id, :role_type_id => role_type.id)
begin
yield role
ensure
LookerSDK.delete_role(role.id).must_equal true
LookerSDK.delete_role_type(role_type.id).must_equal true
LookerSDK.delete_role_domain(role_domain.id).must_equal true
end
end
describe ".all_roles", :vcr do
it "returns all Looker roles" do
roles = LookerSDK.all_roles
roles.must_be_kind_of Array
roles.length.must_equal 2
roles.each do |user|
user.must_be_kind_of Sawyer::Resource
end
end
end
describe ".create_role", :vcr do
it "creates a role" do
role_type = LookerSDK.create_role_type(:name => "test_role_type", :permissions => ["administer"])
role_domain = LookerSDK.create_role_domain(:name => "test_role_domain", :models => "all")
role = LookerSDK.create_role(:name => "test_role", :role_domain_id => role_domain.id, :role_type_id => role_type.id)
role.name.must_equal "test_role"
role.role_domain.id.must_equal role_domain.id
role.role_type.id.must_equal role_type.id
LookerSDK.delete_role(role.id).must_equal true
LookerSDK.delete_role_type(role_type.id).must_equal true
LookerSDK.delete_role_domain(role_domain.id).must_equal true
end
it "requires a name to create" do
role_type = LookerSDK.create_role_type(:name => "test_role_type", :permissions => ["administer"])
role_domain = LookerSDK.create_role_domain(:name => "test_role_domain", :models => "all")
assert_raises LookerSDK::UnprocessableEntity do
LookerSDK.create_role(:role_domain_id => role_domain.id, :role_type_id => role_type.id)
end
LookerSDK.delete_role_type(role_type.id).must_equal true
LookerSDK.delete_role_domain(role_domain.id).must_equal true
end
it "requires a valid role_type_id to create" do
role_domain = LookerSDK.create_role_domain(:name => "test_role_domain", :models => "all")
assert_raises LookerSDK::UnprocessableEntity do
LookerSDK.create_role(:name => "test_role_domain", :role_domain_id => role_domain.id)
end
assert_raises LookerSDK::UnprocessableEntity do
LookerSDK.create_role(:name => "test_role_domain", :role_domain_id => role_domain.id, :role_type_id => 9999)
end
LookerSDK.delete_role_domain(role_domain.id).must_equal true
end
it "requires a valid role_domain_id to create" do
role_type = LookerSDK.create_role_type(:name => "test_role_type", :permissions => ["administer"])
assert_raises LookerSDK::UnprocessableEntity do
LookerSDK.create_role(:name => "test_role_domain", :role_type_id => role_type.id)
end
assert_raises LookerSDK::UnprocessableEntity do
LookerSDK.create_role(:name => "test_role_domain", :role_type_id => role_type.id, :role_domain_id => 9999)
end
LookerSDK.delete_role_type(role_type.id).must_equal true
end
end
describe ".update_role", :vcr do
it "updates a role" do
with_role do |role|
role_type = LookerSDK.create_role_type(:name => "new_test_role_type", :permissions => ["administer"])
role_domain = LookerSDK.create_role_domain(:name => "new_test_role_domain", :models => "all")
new_role = LookerSDK.update_role(role.id, {:name => "new_test_role", :role_domain_id => role_domain.id, :role_type_id => role_type.id})
new_role.name.must_equal "new_test_role"
new_role.role_domain.id.must_equal role_domain.id
new_role.role_type.id.must_equal role_type.id
LookerSDK.delete_role_type(role_type.id).must_equal true
LookerSDK.delete_role_domain(role_domain.id).must_equal true
end
end
it "allows update to same name" do
with_role do |role|
new_role = LookerSDK.update_role(role.id, {:name => role.name})
new_role.name.must_equal role.name
end
end
it "rejects update with duplicate name" do
with_role do |role|
new_role = LookerSDK.create_role(:name => "new_name", :role_domain_id => role.role_domain.id, :role_type_id => role.role_type.id)
assert_raises LookerSDK::UnprocessableEntity do
LookerSDK.update_role(role.id, {:name => new_role.name})
end
LookerSDK.delete_role(new_role.id).must_equal true
end
end
it "requires a valid role_type_id to update" do
with_role do |role|
assert_raises LookerSDK::UnprocessableEntity do
LookerSDK.update_role(role.id, :role_type_id => 9999)
end
end
end
it "requires a valid role_domain_id to update" do
with_role do |role|
assert_raises LookerSDK::UnprocessableEntity do
LookerSDK.update_role(role.id, :role_domain_id => 9999)
end
end
end
end
describe ".delete_role", :vcr do
it "deletes user created roles" do
role_type = LookerSDK.create_role_type(:name => "test_role_type", :permissions => ["administer"])
role_domain = LookerSDK.create_role_domain(:name => "test_role_domain", :models => "all")
role = LookerSDK.create_role(:name => "test_role", :role_domain_id => role_domain.id, :role_type_id => role_type.id)
LookerSDK.delete_role(role.id).must_equal true
LookerSDK.delete_role_type(role_type.id).must_equal true
LookerSDK.delete_role_domain(role_domain.id).must_equal true
end
it "will not delete (403) built in admin role" do
roles = LookerSDK.all_roles
admin_role = roles.select {|d| d.name == "Admin"}.first
admin_role.wont_be_nil
admin_role.role_domain.name.must_equal "All"
admin_role.role_type.name.must_equal "Admin"
assert_raises LookerSDK::Forbidden do
LookerSDK.delete_role(admin_role.id)
end
end
end
describe ".set_role_users", :vcr do
it "sets users of role" do
users = (1..5).map {|i| LookerSDK.create_user }
with_role do |role|
LookerSDK.set_role_users(role.id, users.map {|u| u.id })
new_user_ids = LookerSDK.role_users(role.id).map {|user| user.id}
users.map {|u| u.id}.each do |user_id|
new_user_ids.must_include user_id
end
end
users.each do |u|
LookerSDK.delete_user(u.id)
end
end
it "wont set duplicate roles" do
users = (1..5).map { |i| LookerSDK.create_user }
with_role do |role|
# set the users to be all the user ids plus the first one twice.
LookerSDK.set_role_users(role.id, users.map {|u| u.id } << users.first.id)
new_user_ids = LookerSDK.role_users(role.id).map {|user| user.id}
new_user_ids.select {|user_id| user_id == users.first.id}.length.must_equal 1
end
users.each do |u|
LookerSDK.delete_user(u.id)
end
end
end
end
|
require 'spec_helper'
module Worker
describe Worker do
describe "#add_job" do
it "sends a info message" do
output = double('output')
output.should_receive(:puts).with('Received a new Job')
#Create a worker
worker = Worker.new(output)
# We set an expectation, something that should
# happen before the end of this example
worker.add_job("-w320 -h120", "povray.pov")
end
end
end
end
Refactoring of worker_spec.rb
require 'spec_helper'
module Worker
describe Worker do
describe "#add_job" do
let(:output) {double('output')}
let(:worker) {Worker.new(output)}
it "sends a info message" do
output.should_receive(:puts).with('Received a new Job')
worker.add_job("-w320 -h120", "povray.pov")
end
end
end
end |
require 'epub/constants'
require 'epub/parser/version'
require 'epub/parser/ocf'
require 'epub/parser/publication'
require 'epub/parser/content_document'
require 'zipruby'
require 'nokogiri'
module EPUB
class Parser
class << self
def parse(file, options = {})
new(file, options).parse
end
end
def initialize(filepath, options = {})
raise "File #{filepath} not readable" unless File.readable_real? filepath
@filepath = File.realpath filepath
@book = create_book options
end
def parse
Zip::Archive.open @filepath do |zip|
@book.ocf = OCF.parse(zip)
@book.package = Publication.parse(zip, @book.rootfile_path)
# @book.content_document =??? parse_content_document
# ...
end
@book
end
private
def create_book(params)
case
when params[:book]
options[:book]
when params[:class]
options[:class].new
else
require 'epub/book'
Book.new
end
end
end
end
[BUG FIX]Fix a mistake of variable name
require 'epub/constants'
require 'epub/parser/version'
require 'epub/parser/ocf'
require 'epub/parser/publication'
require 'epub/parser/content_document'
require 'zipruby'
require 'nokogiri'
module EPUB
class Parser
class << self
def parse(file, options = {})
new(file, options).parse
end
end
def initialize(filepath, options = {})
raise "File #{filepath} not readable" unless File.readable_real? filepath
@filepath = File.realpath filepath
@book = create_book options
end
def parse
Zip::Archive.open @filepath do |zip|
@book.ocf = OCF.parse(zip)
@book.package = Publication.parse(zip, @book.rootfile_path)
# @book.content_document =??? parse_content_document
# ...
end
@book
end
private
def create_book(params)
case
when params[:book]
params[:book]
when params[:class]
params[:class].new
else
require 'epub/book'
Book.new
end
end
end
end
|
module Escualo
module Env
def self.setup(ssh)
source_escualorc = "'source ~/.escualorc'"
ssh.exec! %Q{
mkdir -p ~/.escualo/vars && \
echo 'for var in ~/.escualo/vars/*; do source $var; done' > ~/.escualorc && \
chmod u+x ~/.escualorc && \
grep -q #{source_escualorc} ~/.bashrc || echo #{source_escualorc} >> ~/.bashrc
}
end
def self.set_builtins(ssh, options)
set ssh, ESCUALO_BASE_VERSION: Escualo::BASE_VERSION
set ssh, Escualo::Env.locale_variables
set ssh, Escualo::Env.environment_variables(options.env)
end
def self.list(ssh)
ssh.exec!("cat ~/.escualo/vars/*").gsub("export ", '')
end
def self.clean(ssh, options)
ssh.exec!("rm ~/.escualo/vars/*")
set_builtins ssh, options
end
def self.present?(ssh, variable)
value = get(ssh, variable)
value.present? && !value.include?('No such file or directory')
end
def self.get(ssh, variable)
ssh.exec!("cat ~/.escualo/vars/#{variable}")
end
def self.set(ssh, variables)
variables.each do |key, value|
ssh.exec!("echo 'export #{key}=#{value}' > ~/.escualo/vars/#{key}")
end
end
def self.unset(ssh, variable_names)
variable_names.each do |name|
ssh.exec!("rm ~/.escualo/vars/#{name}")
end
end
def self.locale_variables
%w{LANG LC_ALL LC_NAME LC_IDENTIFICATION LC_PAPER LC_ADDRESS LC_TIME LC_NUMERIC LC_MONETARY LC_TELEPHONE LC_MEASUREMENT}.map do |it|
[it, 'en_US.UTF-8']
end.to_h
end
def self.environment_variables(environment)
%w{RAILS_ENV NODE_ENV RACK_ENV}.map do |it|
[it, environment]
end.to_h
end
def self.locale_export
locale_variables.map { |key, value| "#{key}=#{value}" }.join(' ')
end
end
end
Preserving envs after clean
module Escualo
module Env
def self.setup(ssh)
source_escualorc = "'source ~/.escualorc'"
ssh.exec! %Q{
mkdir -p ~/.escualo/vars && \
echo 'for var in ~/.escualo/vars/*; do source $var; done' > ~/.escualorc && \
chmod u+x ~/.escualorc && \
grep -q #{source_escualorc} ~/.bashrc || echo #{source_escualorc} >> ~/.bashrc
}
end
def self.set_builtins(ssh, options)
set ssh, ESCUALO_BASE_VERSION: Escualo::BASE_VERSION
set ssh, Escualo::Env.locale_variables
set ssh, Escualo::Env.environment_variables(options.env)
end
def self.list(ssh)
ssh.exec!("cat ~/.escualo/vars/*").gsub("export ", '')
end
def self.clean(ssh, options)
options.env = get(ssh, 'RACK_ENV').split('=').second.strip
ssh.exec!("rm ~/.escualo/vars/*")
set_builtins ssh, options
end
def self.present?(ssh, variable)
value = get(ssh, variable)
value.present? && !value.include?('No such file or directory')
end
def self.get(ssh, variable)
ssh.exec!("cat ~/.escualo/vars/#{variable}")
end
def self.set(ssh, variables)
variables.each do |key, value|
ssh.exec!("echo 'export #{key}=#{value}' > ~/.escualo/vars/#{key}")
end
end
def self.unset(ssh, variable_names)
variable_names.each do |name|
ssh.exec!("rm ~/.escualo/vars/#{name}")
end
end
def self.locale_variables
%w{LANG LC_ALL LC_NAME LC_IDENTIFICATION LC_PAPER LC_ADDRESS LC_TIME LC_NUMERIC LC_MONETARY LC_TELEPHONE LC_MEASUREMENT}.map do |it|
[it, 'en_US.UTF-8']
end.to_h
end
def self.environment_variables(environment)
%w{RAILS_ENV NODE_ENV RACK_ENV}.map do |it|
[it, environment]
end.to_h
end
def self.locale_export
locale_variables.map { |key, value| "#{key}=#{value}" }.join(' ')
end
end
end |
class String
# Internal: Check a given string for misspelled TLDs and misspelled domains from popular e-mail providers.
#
# Examples
#
# "joe@gmail.cmo".clean_up_typoed_email
# # => "joe@gmail.com"
#
# "joe@yaho.com".clean_up_typoed_email
# # => "joe@yahoo.com"
#
# Returns the cleaned String.
def clean_up_typoed_email
downcase.
remove_invalid_characters.
fix_transposed_periods.
remove_period_around_at_sign.
handle_different_country_tlds.
fix_coms_with_appended_letters.
clean_up_funky_coms.
clean_up_funky_nets.
clean_up_funky_orgs.
clean_up_gmail.
clean_up_googlemail.
clean_up_hotmail.
clean_up_aol.
clean_up_yahoo.
clean_up_other_providers.
clean_up_known_coms.
add_a_period_if_they_forgot_it
end
protected
def remove_invalid_characters
gsub(/(\s|\#|\'|\"|\\)*/, "").
gsub(/(\,|\.\.)/, ".").
gsub("@@", "@")
end
def fix_transposed_periods
gsub(/c\.om$/, ".com").
gsub(/n\.et$/, ".net")
# can't do "o.gr" => ".org", as ".gr" is a valid TLD
end
def remove_period_around_at_sign
gsub(/(\.@|@\.)/, "@")
end
def handle_different_country_tlds
gsub(/\.(o\.uk|couk|co\.um)$/, ".co.uk").
gsub(/\.(cojp|co\.lp)$/, ".co.jp")
end
def fix_coms_with_appended_letters
gsub(/\.com\.$/, ".com").
gsub(/\.com(?!castbiz|\.).*$/, ".com"). # fix extra letters after .com as long as they're not .comcastbiz or .com.anything
gsub(/\.co[^op]$/, ".com")
end
def clean_up_funky_coms
gsub(/\.c*(c|ci|coi|l|m|n|o|op|cp|0)*m+o*$/,".com").
gsub(/\.(c|v|x)o+(m|n)$/,".com")
end
def clean_up_funky_nets
gsub(/\.(nte*|n*et*)$/, ".net")
end
def clean_up_funky_orgs
gsub(/\.o+g*r*g*$/, ".org") # require the o, to not false-positive .gr e-mails
end
def clean_up_googlemail
gsub(/@(g(o)*)*le(n|m)*(a|i|l)+m*(a|i|k|l)*\./,"@googlemail.")
end
def clean_up_gmail
gsub(/@g(n|m)*(a|i|l)+m*(a|i|k|l|o|u)*\./,"@gmail.")
end
def clean_up_hotmail
gsub(/@h(i|o|p)*y*t*o*a*m*n*t*(a|i|k|l)*\./,"@hotmail.")
end
def clean_up_yahoo
gsub(/@(ya|yh|ua|ah)+h*a*o+\./,"@yahoo.")
end
def clean_up_aol
gsub(/@(ol|ao|ail)\./,"@aol.")
end
def clean_up_other_providers
gsub(/@co*(m|n)+a*cas*t*\./,"@comcast.").
gsub(/@sbc*gl*ob(a|l)l*\./, "@sbcglobal.").
gsub(/@ver*i*z*on\./,"@verizon.").
gsub(/@icl*oud\./,"@icloud.").
gsub(/@outl*ook*\./,"@outlook.")
end
def clean_up_known_coms
gsub(/(aol|googlemail|gmail|hotmail|yahoo|icloud|outlook)\.(co|net|org)$/, '\1.com')
end
def add_a_period_if_they_forgot_it
gsub(/([^\.])(com|org|net)$/, '\1.\2')
end
end
Fixing more funky .coms
class String
# Internal: Check a given string for misspelled TLDs and misspelled domains from popular e-mail providers.
#
# Examples
#
# "joe@gmail.cmo".clean_up_typoed_email
# # => "joe@gmail.com"
#
# "joe@yaho.com".clean_up_typoed_email
# # => "joe@yahoo.com"
#
# Returns the cleaned String.
def clean_up_typoed_email
downcase.
remove_invalid_characters.
fix_transposed_periods.
remove_period_around_at_sign.
handle_different_country_tlds.
fix_coms_with_appended_letters.
clean_up_funky_coms.
clean_up_funky_nets.
clean_up_funky_orgs.
clean_up_gmail.
clean_up_googlemail.
clean_up_hotmail.
clean_up_aol.
clean_up_yahoo.
clean_up_other_providers.
clean_up_known_coms.
add_a_period_if_they_forgot_it
end
protected
def remove_invalid_characters
gsub(/(\s|\#|\'|\"|\\)*/, "").
gsub(/(\,|\.\.)/, ".").
gsub("@@", "@")
end
def fix_transposed_periods
gsub(/c\.om$/, ".com").
gsub(/n\.et$/, ".net")
# can't do "o.gr" => ".org", as ".gr" is a valid TLD
end
def remove_period_around_at_sign
gsub(/(\.@|@\.)/, "@")
end
def handle_different_country_tlds
gsub(/\.(o\.uk|couk|co\.um)$/, ".co.uk").
gsub(/\.(cojp|co\.lp)$/, ".co.jp")
end
def fix_coms_with_appended_letters
gsub(/\.co[mn]\.com/, ".com").
gsub(/\.com\.$/, ".com").
gsub(/\.com(?!cast|\.|@).{1,3}$/, ".com"). # fix up to three extra letters after .com as long as they're not .comcast or .com.* (and as long as they're not before the @)
gsub(/\.co[^op]$/, ".com")
end
def clean_up_funky_coms
gsub(/\.c*(c|ci|coi|l|m|n|o|op|cp|0)*m+o*$/,".com").
gsub(/\.(c|v|x)o+(m|n)$/,".com")
end
def clean_up_funky_nets
gsub(/\.(nte*|n*et*)$/, ".net")
end
def clean_up_funky_orgs
gsub(/\.o+g*r*g*$/, ".org") # require the o, to not false-positive .gr e-mails
end
def clean_up_googlemail
gsub(/@(g(o)*)*le(n|m)*(a|i|l)+m*(a|i|k|l)*\./,"@googlemail.")
end
def clean_up_gmail
gsub(/@g(n|m)*(a|i|l)+m*(a|i|k|l|o|u)*\./,"@gmail.")
end
def clean_up_hotmail
gsub(/@h(i|o|p)*y*t*o*a*m*n*t*(a|i|k|l)*\./,"@hotmail.")
end
def clean_up_yahoo
gsub(/@(ya|yh|ua|ah)+h*a*o+\./,"@yahoo.")
end
def clean_up_aol
gsub(/@(ol|ao|ail)\./,"@aol.")
end
def clean_up_other_providers
gsub(/@co*(m|n)+a*cas*t*\./,"@comcast.").
gsub(/@sbc*gl*ob(a|l)l*\./, "@sbcglobal.").
gsub(/@ver*i*z*on\./,"@verizon.").
gsub(/@icl*oud\./,"@icloud.").
gsub(/@outl*ook*\./,"@outlook.")
end
def clean_up_known_coms
gsub(/(aol|googlemail|gmail|hotmail|yahoo|icloud|outlook)\.(co|net|org)$/, '\1.com')
end
def add_a_period_if_they_forgot_it
gsub(/([^\.])(com|org|net)$/, '\1.\2')
end
end |
require 'test_helper'
class SimCtl::Command::ListTest < Minitest::Test
context 'devicetype' do
should 'find device type by name' do
assert_kind_of SimCtl::DeviceType, SimCtl.devicetype(name: 'iPhone 5')
end
end
context 'list_devicetypes' do
should 'contain some devicetypes' do
assert SimCtl.list_devicetypes.count > 0
end
should 'be a SimCtl::DeviceType object' do
assert_kind_of SimCtl::DeviceType, SimCtl.list_devicetypes.first
end
should 'parse identifier property' do
assert SimCtl.list_devicetypes.first.identifier != nil
end
should 'parse name property' do
assert SimCtl.list_devicetypes.first.name != nil
end
end
context 'list_runtimes' do
should 'contain some runtimes' do
assert SimCtl.list_runtimes.count > 0
end
should 'be a SimCtl::Runtime object' do
assert_kind_of SimCtl::Runtime, SimCtl.list_runtimes.first
end
should 'parse availability property' do
assert SimCtl.list_runtimes.first.availability != nil
end
should 'parse buildversion property' do
assert SimCtl.list_runtimes.first.buildversion != nil
end
should 'parse identifier property' do
assert SimCtl.list_runtimes.first.identifier != nil
end
should 'parse name property' do
assert SimCtl.list_runtimes.first.name != nil
end
end
context 'runtime' do
should 'find runtime by name' do
assert_kind_of SimCtl::Runtime, SimCtl.runtime(name: 'iOS 9.2')
end
end
context 'unknown method' do
should 'raise an exception' do
assert_raises { SimCtl.foo }
end
end
end
Add test
require 'test_helper'
class SimCtl::Command::ListTest < Minitest::Test
context 'devicetype' do
should 'find device type by name' do
assert_kind_of SimCtl::DeviceType, SimCtl.devicetype(name: 'iPhone 5')
end
end
context 'list_devicetypes' do
should 'contain some devicetypes' do
assert SimCtl.list_devicetypes.count > 0
end
should 'be a SimCtl::DeviceType object' do
assert_kind_of SimCtl::DeviceType, SimCtl.list_devicetypes.first
end
should 'parse identifier property' do
assert SimCtl.list_devicetypes.first.identifier != nil
end
should 'parse name property' do
assert SimCtl.list_devicetypes.first.name != nil
end
end
context 'list_runtimes' do
should 'contain some runtimes' do
assert SimCtl.list_runtimes.count > 0
end
should 'be a SimCtl::Runtime object' do
assert_kind_of SimCtl::Runtime, SimCtl.list_runtimes.first
end
should 'parse availability property' do
assert SimCtl.list_runtimes.first.availability != nil
end
should 'parse buildversion property' do
assert SimCtl.list_runtimes.first.buildversion != nil
end
should 'parse identifier property' do
assert SimCtl.list_runtimes.first.identifier != nil
end
should 'parse name property' do
assert SimCtl.list_runtimes.first.name != nil
end
should 'return latest ios runtime' do
assert_kind_of SimCtl::Runtime, SimCtl::Runtime.latest(:ios)
end
should 'return latest tvos runtime' do
assert_kind_of SimCtl::Runtime, SimCtl::Runtime.latest(:tvos)
end
should 'return latest watchos runtime' do
assert_kind_of SimCtl::Runtime, SimCtl::Runtime.latest(:watchos)
end
end
context 'runtime' do
should 'find runtime by name' do
assert_kind_of SimCtl::Runtime, SimCtl.runtime(name: 'iOS 9.2')
end
end
context 'unknown method' do
should 'raise an exception' do
assert_raises { SimCtl.foo }
end
end
end
|
require "filestorage/version"
require "filestorage/local_filestorage"
module Filestorage
end
lib/filestorage.rb: Define exceptions.
require "filestorage/version"
require "filestorage/local_filestorage"
module Filestorage
class NotExist < StandardError; end
class AlreadyExist < StandardError; end
end
|
#--
# Copyright (c) 2015-2016, John Mettraux, jmettraux+flon@gmail.com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Made in Japan.
#++
module Flor
module Lang include Raabro
# parsing
def null(i); str(:null, i, 'null'); end
def number(i); rex(:number, i, /-?[0-9]+(\.[0-9]+)?([eE][+-]?[0-9]+)?/); end
def tru(i); str(nil, i, 'true'); end
def fls(i); str(nil, i, 'false'); end
def boolean(i); alt(:boolean, i, :tru, :fls); end
def dqstring(i)
rex(:dqstring, i, %r{
"(
\\["bfnrt] |
\\u[0-9a-fA-F]{4} |
[^"\\\b\f\n\r\t]
)*"
}x)
end
def sqstring(i)
rex(:sqstring, i, %r{
'(
\\['bfnrt] |
\\u[0-9a-fA-F]{4} |
[^'\\\b\f\n\r\t]
)*'
}x)
end
def rxstring(i)
rex(:rxstring, i, %r{
/(
\\[\/bfnrt] |
\\u[0-9a-fA-F]{4} |
[^/\\\b\f\n\r\t]
)*/[a-z]*
}x)
end
def symbol(i); rex(:symbol, i, /[^:;| \b\f\n\r\t"',()\[\]{}#\\]+/); end
def comment(i); rex(nil, i, /#[^\r\n]*/); end
def ws_star(i); rex(nil, i, /[ \t]*/); end
def retnew(i); rex(nil, i, /[\r\n]*/); end
def colon(i); str(nil, i, ':'); end
def comma(i); str(nil, i, ','); end
def bslash(i); str(nil, i, '\\'); end
def pstart(i); str(nil, i, '('); end
def pend(i); str(nil, i, ')'); end
def sbstart(i); str(nil, i, '['); end
def sbend(i); str(nil, i, ']'); end
def pbstart(i); str(nil, i, '{'); end
def pbend(i); str(nil, i, '}'); end
def eol(i); seq(nil, i, :ws_star, :comment, '?', :retnew); end
def postval(i); rep(nil, i, :eol, 0); end
def comma_eol(i); seq(nil, i, :comma, :eol, :ws_star); end
def bslash_eol(i); seq(nil, i, :bslash, :eol, :ws_star); end
def sep(i); alt(nil, i, :comma_eol, :bslash_eol, :ws_star); end
def comma_qmark_eol(i); seq(nil, i, :comma, '?', :eol); end
def coll_sep(i); alt(nil, i, :bslash_eol, :comma_qmark_eol, :ws_star); end
def ent(i); seq(:ent, i, :key, :postval, :colon, :postval, :exp, :postval); end
def ent_qmark(i); rep(nil, i, :ent, 0, 1); end
def exp_qmark(i); rep(nil, i, :exp, 0, 1); end
def obj(i); eseq(:obj, i, :pbstart, :ent_qmark, :coll_sep, :pbend); end
def arr(i); eseq(:arr, i, :sbstart, :exp_qmark, :coll_sep, :sbend); end
def par(i); seq(:par, i, :pstart, :eol, :ws_star, :node, :eol, :pend); end
def val(i)
altg(:val, i,
:panode, :par,
:symbol, :sqstring, :dqstring, :rxstring,
:arr, :obj,
:number, :boolean, :null)
end
def val_ws(i); seq(nil, i, :val, :ws_star); end
# precedence
# %w[ or or ], %w[ and and ],
# %w[ equ == != <> ], %w[ lgt < > <= >= ], %w[ sum + - ], %w[ prd * / % ],
def ssprd(i); rex(:sop, i, /[\*\/%]/); end
def sssum(i); rex(:sop, i, /[+-]/); end
def sslgt(i); rex(:sop, i, /(<=?|>=?)/); end
def ssequ(i); rex(:sop, i, /(==?|!=|<>)/); end
def ssand(i); str(:sop, i, 'and'); end
def ssor(i); str(:sop, i, 'or'); end
def sprd(i); seq(nil, i, :ssprd, :eol, '?'); end
def ssum(i); seq(nil, i, :sssum, :eol, '?'); end
def slgt(i); seq(nil, i, :sslgt, :eol, '?'); end
def sequ(i); seq(nil, i, :ssequ, :eol, '?'); end
def sand(i); seq(nil, i, :ssand, :eol, '?'); end
def sor(i); seq(nil, i, :ssor, :eol, '?'); end
def eprd(i); jseq(:exp, i, :val_ws, :sprd); end
def esum(i); jseq(:exp, i, :eprd, :ssum); end
def elgt(i); jseq(:exp, i, :esum, :slgt); end
def eequ(i); jseq(:exp, i, :elgt, :sequ); end
def eand(i); jseq(:exp, i, :eequ, :sand); end
def eor(i); jseq(:exp, i, :eand, :sor); end
alias exp eor
def key(i); seq(:key, i, :exp); end
def keycol(i); seq(nil, i, :key, :ws_star, :colon, :eol); end
def att(i); seq(:att, i, :sep, :keycol, '?', :exp); end
def head(i); seq(:head, i, :exp); end
def indent(i); rex(:indent, i, /[|; \t]*/); end
def node(i); seq(:node, i, :indent, :head, :att, '*'); end
def line(i)
seq(:line, i, :node, '?', :eol)
end
def panode(i)
seq(:panode, i, :pstart, :eol, :ws_star, :line, '*', :eol, :pend)
end
def flon(i); rep(:flon, i, :line, 0); end
# rewriting
def line_number(t)
t.input.string[0..t.offset].scan("\n").count + 1
end
alias ln line_number
def rewrite_par(t)
Nod.new(t.lookup(:node)).to_a
end
def rewrite_symbol(t); [ t.string, [], ln(t) ]; end
def rewrite_sqstring(t); [ '_sqs', t.string[1..-2], ln(t) ]; end
def rewrite_dqstring(t); [ '_dqs', t.string[1..-2], ln(t) ]; end
def rewrite_rxstring(t); [ '_rxs', t.string, ln(t) ]; end
def rewrite_boolean(t); [ '_boo', t.string == 'true', line_number(t) ]; end
def rewrite_null(t); [ '_nul', nil, line_number(t) ]; end
def rewrite_number(t)
s = t.string; [ '_num', s.index('.') ? s.to_f : s.to_i, ln(t) ]
end
def rewrite_obj(t)
cn =
t.subgather(nil).inject([]) do |a, tt|
a << rewrite(tt.c0.c0)
a << rewrite(tt.c4)
end
cn = 0 if cn.empty?
[ '_obj', cn, ln(t) ]
end
def rewrite_arr(t)
cn = t.subgather(nil).collect { |n| rewrite(n) }
cn = 0 if cn.empty?
[ '_arr', cn, ln(t) ]
end
def rewrite_val(t)
rewrite(t.c0)
end
def rewrite_exp(t)
return rewrite(t.c0) if t.children.size == 1
cn = [ rewrite(t.c0) ]
op = t.lookup(:sop).string
tcn = t.children[2..-1].dup
loop do
c = tcn.shift; break unless c
cn << rewrite(c)
o = tcn.shift; break unless o
o = o.lookup(:sop).string
next if o == op
cn = [ [ op, cn, cn.first[2] ] ]
op = o
end
[ op, cn, cn.first[2] ]
end
class Nod
attr_accessor :parent, :indent
attr_reader :children
def initialize(tree)
@parent = nil
@indent = -1
@head = 'sequence'
@children = []
@line = 0
read(tree) if tree
end
def append(node)
if node.indent == :east
node.indent = self.indent + 2
elsif node.indent == :south
node.indent = self.indent
end
if node.indent > self.indent
@children << node
node.parent = self
else
@parent.append(node)
end
end
def to_a
return @head if @head.is_a?(Array) && @children.empty?
cn = @children.collect(&:to_a)
# detect if/unless suffix
atts =
cn.inject([]) { |a, c| a << c[1] if c[0] == '_att'; a }
i =
atts.index { |c|
c.size == 1 && %w[ if unless ].include?(c[0][0]) && c[0][1] == []
}
#return cn.first if @head == 'sequence' && @line == 0 && cn.size == 1
return [ @head, cn, @line ] unless i
# rewrite if/unless suffix
t = [ atts[i][0][0] == 'if' ? 'ife' : 'unlesse', [], @line ]
t[1].concat(atts[i + 1..-1].collect(&:first))
t[1].push([ @head, cn[0, i], @line ])
t
end
protected
def read(tree)
if it = tree.lookup(:indent)
s = it.string
semicount = s.count(';')
pipe = s.index('|')
@indent =
if semicount == 1 then :east
elsif semicount > 1 || pipe then :south
else s.length; end
end
ht = tree.lookup(:head)
@line = Lang.line_number(ht)
@head = Flor::Lang.rewrite(ht.c0)
@head = @head[0] if @head[0].is_a?(String) && @head[1] == []
@children.concat(
tree.children[2..-1].collect do |ct|
v = Flor::Lang.rewrite(ct.clast)
if kt = ct.lookup(:key)
k = Flor::Lang.rewrite(kt.c0)
[ '_att', [ k, v ], k[2] ]
else
[ '_att', [ v ], v[2] ]
end
end)
end
end
end
def rewrite_flon(t)
prev = root = Nod.new(nil)
t.gather(:node).each do |nt|
n = Nod.new(nt)
prev.append(n)
prev = n
end
root.children.count == 1 ? root.children.first.to_a : root.to_a
end
alias rewrite_panode rewrite_flon
def parse(input, fname=nil, opts={})
opts = fname if fname.is_a?(Hash) && opts.empty?
#Raabro.pp(super(input, debug: 2))
#Raabro.pp(super(input, debug: 3))
r = super(input, opts)
r << fname if fname
r
end
end # module Lang
def self.unescape_u(cs)
s = ''; 4.times { s << cs.next }
[ s.to_i(16) ].pack('U*')
end
def self.unescape(s)
sio = StringIO.new
cs = s.each_char
loop do
c = cs.next
break unless c
if c == '\\'
case cn = cs.next
when 'u' then sio.print(unescape_u(cs))
when '\\', '"', '\'' then sio.print(cn)
when 'b' then sio.print("\b")
when 'f' then sio.print("\f")
when 'n' then sio.print("\n")
when 'r' then sio.print("\r")
when 't' then sio.print("\t")
else sio.print("\\#{cn}")
end
else
sio.print(c)
end
end
sio.string
end
end
remove extra end in parser
#--
# Copyright (c) 2015-2016, John Mettraux, jmettraux+flon@gmail.com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Made in Japan.
#++
module Flor
module Lang include Raabro
# parsing
def null(i); str(:null, i, 'null'); end
def number(i); rex(:number, i, /-?[0-9]+(\.[0-9]+)?([eE][+-]?[0-9]+)?/); end
def tru(i); str(nil, i, 'true'); end
def fls(i); str(nil, i, 'false'); end
def boolean(i); alt(:boolean, i, :tru, :fls); end
def dqstring(i)
rex(:dqstring, i, %r{
"(
\\["bfnrt] |
\\u[0-9a-fA-F]{4} |
[^"\\\b\f\n\r\t]
)*"
}x)
end
def sqstring(i)
rex(:sqstring, i, %r{
'(
\\['bfnrt] |
\\u[0-9a-fA-F]{4} |
[^'\\\b\f\n\r\t]
)*'
}x)
end
def rxstring(i)
rex(:rxstring, i, %r{
/(
\\[\/bfnrt] |
\\u[0-9a-fA-F]{4} |
[^/\\\b\f\n\r\t]
)*/[a-z]*
}x)
end
def symbol(i); rex(:symbol, i, /[^:;| \b\f\n\r\t"',()\[\]{}#\\]+/); end
def comment(i); rex(nil, i, /#[^\r\n]*/); end
def ws_star(i); rex(nil, i, /[ \t]*/); end
def retnew(i); rex(nil, i, /[\r\n]*/); end
def colon(i); str(nil, i, ':'); end
def comma(i); str(nil, i, ','); end
def bslash(i); str(nil, i, '\\'); end
def pstart(i); str(nil, i, '('); end
def pend(i); str(nil, i, ')'); end
def sbstart(i); str(nil, i, '['); end
def sbend(i); str(nil, i, ']'); end
def pbstart(i); str(nil, i, '{'); end
def pbend(i); str(nil, i, '}'); end
def eol(i); seq(nil, i, :ws_star, :comment, '?', :retnew); end
def postval(i); rep(nil, i, :eol, 0); end
def comma_eol(i); seq(nil, i, :comma, :eol, :ws_star); end
def bslash_eol(i); seq(nil, i, :bslash, :eol, :ws_star); end
def sep(i); alt(nil, i, :comma_eol, :bslash_eol, :ws_star); end
def comma_qmark_eol(i); seq(nil, i, :comma, '?', :eol); end
def coll_sep(i); alt(nil, i, :bslash_eol, :comma_qmark_eol, :ws_star); end
def ent(i); seq(:ent, i, :key, :postval, :colon, :postval, :exp, :postval); end
def ent_qmark(i); rep(nil, i, :ent, 0, 1); end
def exp_qmark(i); rep(nil, i, :exp, 0, 1); end
def obj(i); eseq(:obj, i, :pbstart, :ent_qmark, :coll_sep, :pbend); end
def arr(i); eseq(:arr, i, :sbstart, :exp_qmark, :coll_sep, :sbend); end
def par(i); seq(:par, i, :pstart, :eol, :ws_star, :node, :eol, :pend); end
def val(i)
altg(:val, i,
:panode, :par,
:symbol, :sqstring, :dqstring, :rxstring,
:arr, :obj,
:number, :boolean, :null)
end
def val_ws(i); seq(nil, i, :val, :ws_star); end
# precedence
# %w[ or or ], %w[ and and ],
# %w[ equ == != <> ], %w[ lgt < > <= >= ], %w[ sum + - ], %w[ prd * / % ],
def ssprd(i); rex(:sop, i, /[\*\/%]/); end
def sssum(i); rex(:sop, i, /[+-]/); end
def sslgt(i); rex(:sop, i, /(<=?|>=?)/); end
def ssequ(i); rex(:sop, i, /(==?|!=|<>)/); end
def ssand(i); str(:sop, i, 'and'); end
def ssor(i); str(:sop, i, 'or'); end
def sprd(i); seq(nil, i, :ssprd, :eol, '?'); end
def ssum(i); seq(nil, i, :sssum, :eol, '?'); end
def slgt(i); seq(nil, i, :sslgt, :eol, '?'); end
def sequ(i); seq(nil, i, :ssequ, :eol, '?'); end
def sand(i); seq(nil, i, :ssand, :eol, '?'); end
def sor(i); seq(nil, i, :ssor, :eol, '?'); end
def eprd(i); jseq(:exp, i, :val_ws, :sprd); end
def esum(i); jseq(:exp, i, :eprd, :ssum); end
def elgt(i); jseq(:exp, i, :esum, :slgt); end
def eequ(i); jseq(:exp, i, :elgt, :sequ); end
def eand(i); jseq(:exp, i, :eequ, :sand); end
def eor(i); jseq(:exp, i, :eand, :sor); end
alias exp eor
def key(i); seq(:key, i, :exp); end
def keycol(i); seq(nil, i, :key, :ws_star, :colon, :eol); end
def att(i); seq(:att, i, :sep, :keycol, '?', :exp); end
def head(i); seq(:head, i, :exp); end
def indent(i); rex(:indent, i, /[|; \t]*/); end
def node(i); seq(:node, i, :indent, :head, :att, '*'); end
def line(i)
seq(:line, i, :node, '?', :eol)
end
def panode(i)
seq(:panode, i, :pstart, :eol, :ws_star, :line, '*', :eol, :pend)
end
def flon(i); rep(:flon, i, :line, 0); end
# rewriting
def line_number(t)
t.input.string[0..t.offset].scan("\n").count + 1
end
alias ln line_number
def rewrite_par(t)
Nod.new(t.lookup(:node)).to_a
end
def rewrite_symbol(t); [ t.string, [], ln(t) ]; end
def rewrite_sqstring(t); [ '_sqs', t.string[1..-2], ln(t) ]; end
def rewrite_dqstring(t); [ '_dqs', t.string[1..-2], ln(t) ]; end
def rewrite_rxstring(t); [ '_rxs', t.string, ln(t) ]; end
def rewrite_boolean(t); [ '_boo', t.string == 'true', line_number(t) ]; end
def rewrite_null(t); [ '_nul', nil, line_number(t) ]; end
def rewrite_number(t)
s = t.string; [ '_num', s.index('.') ? s.to_f : s.to_i, ln(t) ]
end
def rewrite_obj(t)
cn =
t.subgather(nil).inject([]) do |a, tt|
a << rewrite(tt.c0.c0)
a << rewrite(tt.c4)
end
cn = 0 if cn.empty?
[ '_obj', cn, ln(t) ]
end
def rewrite_arr(t)
cn = t.subgather(nil).collect { |n| rewrite(n) }
cn = 0 if cn.empty?
[ '_arr', cn, ln(t) ]
end
def rewrite_val(t)
rewrite(t.c0)
end
def rewrite_exp(t)
return rewrite(t.c0) if t.children.size == 1
cn = [ rewrite(t.c0) ]
op = t.lookup(:sop).string
tcn = t.children[2..-1].dup
loop do
c = tcn.shift; break unless c
cn << rewrite(c)
o = tcn.shift; break unless o
o = o.lookup(:sop).string
next if o == op
cn = [ [ op, cn, cn.first[2] ] ]
op = o
end
[ op, cn, cn.first[2] ]
end
class Nod
attr_accessor :parent, :indent
attr_reader :children
def initialize(tree)
@parent = nil
@indent = -1
@head = 'sequence'
@children = []
@line = 0
read(tree) if tree
end
def append(node)
if node.indent == :east
node.indent = self.indent + 2
elsif node.indent == :south
node.indent = self.indent
end
if node.indent > self.indent
@children << node
node.parent = self
else
@parent.append(node)
end
end
def to_a
return @head if @head.is_a?(Array) && @children.empty?
cn = @children.collect(&:to_a)
# detect if/unless suffix
atts =
cn.inject([]) { |a, c| a << c[1] if c[0] == '_att'; a }
i =
atts.index { |c|
c.size == 1 && %w[ if unless ].include?(c[0][0]) && c[0][1] == []
}
#return cn.first if @head == 'sequence' && @line == 0 && cn.size == 1
return [ @head, cn, @line ] unless i
# rewrite if/unless suffix
t = [ atts[i][0][0] == 'if' ? 'ife' : 'unlesse', [], @line ]
t[1].concat(atts[i + 1..-1].collect(&:first))
t[1].push([ @head, cn[0, i], @line ])
t
end
protected
def read(tree)
if it = tree.lookup(:indent)
s = it.string
semicount = s.count(';')
pipe = s.index('|')
@indent =
if semicount == 1 then :east
elsif semicount > 1 || pipe then :south
else s.length; end
end
ht = tree.lookup(:head)
@line = Lang.line_number(ht)
@head = Flor::Lang.rewrite(ht.c0)
@head = @head[0] if @head[0].is_a?(String) && @head[1] == []
@children.concat(
tree.children[2..-1].collect do |ct|
v = Flor::Lang.rewrite(ct.clast)
if kt = ct.lookup(:key)
k = Flor::Lang.rewrite(kt.c0)
[ '_att', [ k, v ], k[2] ]
else
[ '_att', [ v ], v[2] ]
end
end)
end
end
def rewrite_flon(t)
prev = root = Nod.new(nil)
t.gather(:node).each do |nt|
n = Nod.new(nt)
prev.append(n)
prev = n
end
root.children.count == 1 ? root.children.first.to_a : root.to_a
end
alias rewrite_panode rewrite_flon
def parse(input, fname=nil, opts={})
opts = fname if fname.is_a?(Hash) && opts.empty?
#Raabro.pp(super(input, debug: 2))
#Raabro.pp(super(input, debug: 3))
r = super(input, opts)
r << fname if fname
r
end
end # module Lang
def self.unescape_u(cs)
s = ''; 4.times { s << cs.next }
[ s.to_i(16) ].pack('U*')
end
def self.unescape(s)
sio = StringIO.new
cs = s.each_char
loop do
c = cs.next
break unless c
if c == '\\'
case cn = cs.next
when 'u' then sio.print(unescape_u(cs))
when '\\', '"', '\'' then sio.print(cn)
when 'b' then sio.print("\b")
when 'f' then sio.print("\f")
when 'n' then sio.print("\n")
when 'r' then sio.print("\r")
when 't' then sio.print("\t")
else sio.print("\\#{cn}")
end
else
sio.print(c)
end
end
sio.string
end
end
|
require_relative '../lib/css.rb'
require 'minitest/unit'
require 'minitest/autorun'
require 'nokogiri'
class DescendantSelectorTest < MiniTest::Unit::TestCase
def test_can_select_B_descendants_of_A
doc = Nokogiri.XML "<a><b/><b/></a>"
selector = "a b"
result = Css.select(doc, selector)
assert_equal 2, result.count, "Expected 2, but there were #{result.count}"
end
def test_can_select_different_numbers_of_B_descendants_of_A
doc = Nokogiri.XML "<a><b/><b/><b/></a>"
selector = "a b"
assert_equal 3, Css.select(doc, selector).count
end
def test_can_select_different_numbers_of_C_descendants_of_A
doc = Nokogiri.XML "<a><c/><c/><c/></a>"
selector = "a c"
assert_equal 3, Css.select(doc, selector).count
end
def test_can_select_names_as_descendants
doc = Nokogiri.XML "<a><cat/><cat/><cat/></a>"
selector = "a cat"
assert 3, Css.select(doc, selector).count
end
def test_can_actually_select_descendants
doc = Nokogiri.XML "<root><a><b/><b/></a><b/></root>"
selector = "a b"
assert_equal 2, Css.select(doc, selector).count
end
def test_can_chain_descendant_seletors
doc = Nokogiri.XML "<root><a><b><c/></b><d><c/></d></a></root>"
selector = "a b c"
result = Css.select(doc, selector)
assert_equal 1, result.count
end
def test_descendant_selectors_can_be_nested_arbitrarily_deeply
doc = Nokogiri.XML "<root><a><b><c><d><e><f/></e></d></c></b></a></root>"
selector = "a b c d e f"
result = Css.select(doc, selector)
assert_equal 1, result.count
end
def test_irrelevant_elements_are_ignored_in_descendant_selector
doc = Nokogiri.XML "<a><b><c/></b></a>"
selector = "a c"
result = Css.select(doc, selector)
assert_equal 1, result.count
end
end
add an assertion
require_relative '../lib/css.rb'
require 'minitest/unit'
require 'minitest/autorun'
require 'nokogiri'
class DescendantSelectorTest < MiniTest::Unit::TestCase
def test_can_select_B_descendants_of_A
doc = Nokogiri.XML "<a><b/><b/></a>"
selector = "a b"
result = Css.select(doc, selector)
assert_equal 2, result.count, "Expected 2, but there were #{result.count}"
end
def test_can_select_different_numbers_of_B_descendants_of_A
doc = Nokogiri.XML "<a><b/><b/><b/></a>"
selector = "a b"
assert_equal 3, Css.select(doc, selector).count
end
def test_can_select_different_numbers_of_C_descendants_of_A
doc = Nokogiri.XML "<a><c/><c/><c/></a>"
selector = "a c"
assert_equal 3, Css.select(doc, selector).count
end
def test_can_select_names_as_descendants
doc = Nokogiri.XML "<a><cat/><cat/><cat/></a>"
selector = "a cat"
assert 3, Css.select(doc, selector).count
end
def test_can_actually_select_descendants
doc = Nokogiri.XML "<root><a><b/><b/></a><b/></root>"
selector = "a b"
assert_equal 2, Css.select(doc, selector).count
end
def test_can_chain_descendant_seletors
doc = Nokogiri.XML "<root><a><b><c/></b><d><c/></d></a></root>"
selector = "a b c"
result = Css.select(doc, selector)
assert_equal 1, result.count
end
def test_descendant_selectors_can_be_nested_arbitrarily_deeply
doc = Nokogiri.XML "<root><a><b><c><d><e><f/></e></d></c></b></a></root>"
selector = "a b c d e f"
result = Css.select(doc, selector)
assert_equal 1, result.count
assert_equal "f", result.first.name
end
def test_irrelevant_elements_are_ignored_in_descendant_selector
doc = Nokogiri.XML "<a><b><c/></b></a>"
selector = "a c"
result = Css.select(doc, selector)
assert_equal 1, result.count
end
end
|
class AWS < Fog::Bin
class << self
def class_for(key)
case key
when :auto_scaling
Fog::AWS::AutoScaling
when :cdn
Fog::CDN::AWS
when :cloud_formation
Fog::AWS::CloudFormation
when :cloud_watch
Fog::AWS::CloudWatch
when :compute
Fog::Compute::AWS
when :dns
Fog::DNS::AWS
when :elasticache
Fog::AWS::Elasticache
when :elb
Fog::AWS::ELB
when :emr
Fog::AWS::EMR
when :iam
Fog::AWS::IAM
when :sdb, :simpledb
Fog::AWS::SimpleDB
when :ses
Fog::AWS::SES
when :sqs
Fog::AWS::SQS
when :eu_storage, :storage
Fog::Storage::AWS
when :rds
Fog::AWS::RDS
when :sns
Fog::AWS::SNS
when :sts
Fog::AWS::STS
else
# @todo Replace most instances of ArgumentError with NotImplementedError
# @todo For a list of widely supported Exceptions, see:
# => http://www.zenspider.com/Languages/Ruby/QuickRef.html#35
raise ArgumentError, "Unsupported #{self} service: #{key}"
end
end
def [](service)
@@connections ||= Hash.new do |hash, key|
hash[key] = case key
when :auto_scaling
Fog::AWS::AutoScaling.new
when :cdn
Fog::Logger.warning("AWS[:cdn] is not recommended, use CDN[:aws] for portability")
Fog::CDN.new(:provider => 'AWS')
when :cloud_formation
Fog::AWS::CloudFormation.new
when :cloud_watch
Fog::AWS::CloudWatch.new
when :compute
Fog::Logger.warning("AWS[:compute] is not recommended, use Comptue[:aws] for portability")
Fog::Compute.new(:provider => 'AWS')
when :dns
Fog::Logger.warning("AWS[:dns] is not recommended, use DNS[:aws] for portability")
Fog::DNS.new(:provider => 'AWS')
when :elasticache
Fog::AWS::Elasticache.new
when :elb
Fog::AWS::ELB.new
when :emr
Fog::AWS::EMR.new
when :iam
Fog::AWS::IAM.new
when :rds
Fog::AWS::RDS.new
when :eu_storage
Fog::Storage.new(:provider => 'AWS', :region => 'eu-west-1')
when :sdb, :simpledb
Fog::AWS::SimpleDB.new
when :ses
Fog::AWS::SES.new
when :sqs
Fog::AWS::SQS.new
when :storage
Fog::Logger.warning("AWS[:storage] is not recommended, use Storage[:aws] for portability")
Fog::Storage.new(:provider => 'AWS')
when :sns
Fog::AWS::SNS.new
else
raise ArgumentError, "Unrecognized service: #{key.inspect}"
end
end
@@connections[service]
end
def services
Fog::AWS.services
end
end
end
Fixed a typo in the warning.
class AWS < Fog::Bin
class << self
def class_for(key)
case key
when :auto_scaling
Fog::AWS::AutoScaling
when :cdn
Fog::CDN::AWS
when :cloud_formation
Fog::AWS::CloudFormation
when :cloud_watch
Fog::AWS::CloudWatch
when :compute
Fog::Compute::AWS
when :dns
Fog::DNS::AWS
when :elasticache
Fog::AWS::Elasticache
when :elb
Fog::AWS::ELB
when :emr
Fog::AWS::EMR
when :iam
Fog::AWS::IAM
when :sdb, :simpledb
Fog::AWS::SimpleDB
when :ses
Fog::AWS::SES
when :sqs
Fog::AWS::SQS
when :eu_storage, :storage
Fog::Storage::AWS
when :rds
Fog::AWS::RDS
when :sns
Fog::AWS::SNS
when :sts
Fog::AWS::STS
else
# @todo Replace most instances of ArgumentError with NotImplementedError
# @todo For a list of widely supported Exceptions, see:
# => http://www.zenspider.com/Languages/Ruby/QuickRef.html#35
raise ArgumentError, "Unsupported #{self} service: #{key}"
end
end
def [](service)
@@connections ||= Hash.new do |hash, key|
hash[key] = case key
when :auto_scaling
Fog::AWS::AutoScaling.new
when :cdn
Fog::Logger.warning("AWS[:cdn] is not recommended, use CDN[:aws] for portability")
Fog::CDN.new(:provider => 'AWS')
when :cloud_formation
Fog::AWS::CloudFormation.new
when :cloud_watch
Fog::AWS::CloudWatch.new
when :compute
Fog::Logger.warning("AWS[:compute] is not recommended, use Compute[:aws] for portability")
Fog::Compute.new(:provider => 'AWS')
when :dns
Fog::Logger.warning("AWS[:dns] is not recommended, use DNS[:aws] for portability")
Fog::DNS.new(:provider => 'AWS')
when :elasticache
Fog::AWS::Elasticache.new
when :elb
Fog::AWS::ELB.new
when :emr
Fog::AWS::EMR.new
when :iam
Fog::AWS::IAM.new
when :rds
Fog::AWS::RDS.new
when :eu_storage
Fog::Storage.new(:provider => 'AWS', :region => 'eu-west-1')
when :sdb, :simpledb
Fog::AWS::SimpleDB.new
when :ses
Fog::AWS::SES.new
when :sqs
Fog::AWS::SQS.new
when :storage
Fog::Logger.warning("AWS[:storage] is not recommended, use Storage[:aws] for portability")
Fog::Storage.new(:provider => 'AWS')
when :sns
Fog::AWS::SNS.new
else
raise ArgumentError, "Unrecognized service: #{key.inspect}"
end
end
@@connections[service]
end
def services
Fog::AWS.services
end
end
end
|
require 'fresh_redis/hash'
require 'fresh_redis/key'
require 'fresh_redis/string'
require 'fresh_redis/version'
class FreshRedis
include Hash
include String
NIL_VALUE = "__FR_NIL__"
def initialize(redis, options={})
@redis = redis
@options = options
end
def build_key(base_key, options={})
options = @options.merge(options)
Key.build(base_key, options)
end
private
def n(value)
value || NIL_VALUE
end
def un_n(value)
n?(value) ? nil : value
end
def n?(value)
value == NIL_VALUE
end
end
note to extract nil handling
require 'fresh_redis/hash'
require 'fresh_redis/key'
require 'fresh_redis/string'
require 'fresh_redis/version'
class FreshRedis
include Hash
include String
NIL_VALUE = "__FR_NIL__"
def initialize(redis, options={})
@redis = redis
@options = options
end
def build_key(base_key, options={})
options = @options.merge(options)
Key.build(base_key, options)
end
private
#TODO extract nil handling out to separate module
def n(value)
value || NIL_VALUE
end
def un_n(value)
n?(value) ? nil : value
end
def n?(value)
value == NIL_VALUE
end
end
|
require_relative 'cache'
require_relative 'gem_store'
require 'rubygems'
require 'yard'
class GemVersion
attr_accessor :name, :version, :platform
def initialize(name, version, platform)
@name, @version, @platform = name.to_s, version.to_s, platform.to_s
end
def to_s
platform == "ruby" ? version : [version,platform].join(',')
end
end
class GemUpdater
include YARD::Server
attr_accessor :gem, :settings, :app
class << self
def fetch_remote_gems
libs = {}
if Gem::VERSION < '2.0'
Gem::SpecFetcher.fetcher.list(true).values.flatten(1).each do |info|
(libs[info[0]] ||= []) << GemVersion.new(*info)
end
else # RubyGems 2.x API
Gem::SpecFetcher.fetcher.available_specs(:released).first.values.flatten(1).each do |tuple|
(libs[tuple.name] ||= []) << GemVersion.new(tuple.name, tuple.version, tuple.platform)
end
end
libs
end
def update_remote_gems
libs = fetch_remote_gems
store = GemStore.new
changed_gems = {}
removed_gems = []
RemoteGem.all.each do |row|
changed_gems[row.name] = row.versions.split(' ')
end
RemoteGem.db.transaction do
libs.each do |name, versions|
versions = pick_best_versions(versions)
if changed_gems[name] && (versions|changed_gems[name]).size == versions.size
changed_gems.delete(name)
else
store[name] = versions
end
end
end
changed_gems.keys.each do |gem_name|
flush_cache(gem_name)
end
# deal with deleted gems
changed_gems.keys.each do |gem_name|
next if libs[gem_name]
removed_gems << gem_name
changed_gems.delete(gem_name)
store.delete(gem_name)
end
[changed_gems, removed_gems]
end
def pick_best_versions(versions)
seen = {}
uniqversions = []
versions.each do |ver|
uniqversions |= [ver.version]
(seen[ver.version] ||= []).send(ver.platform == "ruby" ? :unshift : :push, ver)
end
uniqversions.map {|v| seen[v].first.to_s }
end
def flush_cache(gem_name)
Cache.invalidate("/gems", "/gems/~#{gem_name[0, 1]}", "/gems/#{gem_name}")
end
end
def initialize(app, name, version, platform='ruby')
self.settings = app.settings
self.app = app
self.gem = GemVersion.new(name, version, platform)
end
def register
puts "Registering gem #{gem.name}-#{gem.version}"
store = GemStore.new
libs = (store[gem.name] || []).map {|v| v.version }
store[gem.name] = libs | [gem.version]
end
# TODO: improve this cache invalidation to be version specific
def flush_cache
self.class.flush_cache(gem.name)
end
end
Improve initial import speed of remote_gems
require_relative 'cache'
require_relative 'gem_store'
require 'version_sorter'
require 'rubygems'
require 'yard'
class GemVersion
attr_accessor :name, :version, :platform
def initialize(name, version, platform)
@name, @version, @platform = name.to_s, version.to_s, platform.to_s
end
def to_s
platform == "ruby" ? version : [version,platform].join(',')
end
end
class GemUpdater
include YARD::Server
attr_accessor :gem, :settings, :app
class << self
def fetch_remote_gems
libs = {}
if Gem::VERSION < '2.0'
Gem::SpecFetcher.fetcher.list(true).values.flatten(1).each do |info|
(libs[info[0]] ||= []) << GemVersion.new(*info)
end
else # RubyGems 2.x API
Gem::SpecFetcher.fetcher.available_specs(:released).first.values.flatten(1).each do |tuple|
(libs[tuple.name] ||= []) << GemVersion.new(tuple.name, tuple.version, tuple.platform)
end
end
libs
end
def update_remote_gems
libs = fetch_remote_gems
store = GemStore.new
changed_gems = {}
removed_gems = []
RemoteGem.all.each do |row|
changed_gems[row.name] = row.versions.split(' ')
end
RemoteGem.db.transaction do
libs.each do |name, versions|
versions = pick_best_versions(versions)
if changed_gems[name] && (versions|changed_gems[name]).size == versions.size
changed_gems.delete(name)
elsif changed_gems[name]
store[name] = versions
else
RemoteGem.create(name: name,
versions: VersionSorter.sort(versions).join(" "))
end
end
end
changed_gems.keys.each do |gem_name|
flush_cache(gem_name)
end
# deal with deleted gems
changed_gems.keys.each do |gem_name|
next if libs[gem_name]
removed_gems << gem_name
changed_gems.delete(gem_name)
store.delete(gem_name)
end
[changed_gems, removed_gems]
end
def pick_best_versions(versions)
seen = {}
uniqversions = []
versions.each do |ver|
uniqversions |= [ver.version]
(seen[ver.version] ||= []).send(ver.platform == "ruby" ? :unshift : :push, ver)
end
uniqversions.map {|v| seen[v].first.to_s }
end
def flush_cache(gem_name)
Cache.invalidate("/gems", "/gems/~#{gem_name[0, 1]}", "/gems/#{gem_name}")
end
end
def initialize(app, name, version, platform='ruby')
self.settings = app.settings
self.app = app
self.gem = GemVersion.new(name, version, platform)
end
def register
puts "Registering gem #{gem.name}-#{gem.version}"
store = GemStore.new
libs = (store[gem.name] || []).map {|v| v.version }
store[gem.name] = libs | [gem.version]
end
# TODO: improve this cache invalidation to be version specific
def flush_cache
self.class.flush_cache(gem.name)
end
end
|
require 'gembots'
# useful math functions/methods
class Integer
def to_radian
self * (Math::PI / 180.0)
end
end
def rotation_matrix angle
cos = Math.cos angle
sin = Math.sin angle
return [[cos, -sin], [sin, cos]] unless angle < 0
return [[cos, sin], [-sin, cos]]
end
class Gembots::Robot
# robots name
attr_accessor :name
# coordinates
attr_reader :x_pos
attr_reader :y_pos
attr_reader :angle
# robot ids
attr_reader :id
attr_accessor :parent_id # i know it's possible to abuse this
def initialize name = 'Robot'
@name = name
@x_pos = 0
@y_pos = 0
@angle = 0
@id = self.object_id
@parent_id = nil
end
# return duplicate robot with own parent_id
def clone
clone = self.dup
clone.parent_id = self.id
clone
end
# useful information functions
def is_clone_of? robot
@parent_id == robot.id
end
# moves forward the distance specified
# use negative numbers to move in reverse
def move dist=1
# math stuff here to calculate movement and stuff
# for now I'll just implement 8 directions
directions = [
[1, 0], # 0
[1, 1], # 45
[0, 1], # 90
[-1, 1], # 135
[-1, 0], # 180
[-1, -1], # 225
[0, -1], # 270
[1, -1] # 315
]
@y_pos += dist * directions[360 / @angle - 1][0]
@x_pos += dist * directions[360 / @angle - 1][1]
end
# rotates angle in degrees clockwise
# use negative numbers to move counter-clockwise
def turn angle
@angle += angle
# wrapping
@angle -= 360 if @angle > 360
@angle += 360 if @angle < 0
# additional code to implement animation speed/timing if need be
self.update
end
# defaults to prevent errors when stuff isn't defined + awesome docs
# main loop idle code
# it gets interrupted whenever other functions need to get called
def when_idle robot
end
# angle/view has detected another robot
def when_find_robot robot, target_robot
end
# when moving to the same space as another robot
def when_robot_collision robot, target_robot
end
# function run to tell arena to update their crap
def update *i
end
end
Added stuff for arena
require 'gembots'
# useful math functions/methods
class Integer
def to_radian
self * (Math::PI / 180.0)
end
end
def rotation_matrix angle
cos = Math.cos angle
sin = Math.sin angle
return [[cos, -sin], [sin, cos]] unless angle < 0
return [[cos, sin], [-sin, cos]]
end
class Gembots::Robot
# robots name
attr_accessor :name
# coordinates
attr_reader :x_pos
attr_reader :y_pos
attr_reader :angle
# robot ids
attr_reader :id
attr_accessor :parent_id # i know it's possible to abuse this
# robot arena
attr_accessor :arena
def initialize name = 'Robot'
@name = name
@x_pos = 0
@y_pos = 0
@angle = 0
@id = self.object_id
@parent_id = nil
end
# return duplicate robot with own parent_id
def clone
clone = self.dup
clone.parent_id = self.id
clone
end
# useful information functions
def is_clone_of? robot
@parent_id == robot.id
end
# moves forward the distance specified
# use negative numbers to move in reverse
def move dist=1
# math stuff here to calculate movement and stuff
# for now I'll just implement 8 directions
directions = [
[1, 0], # 0
[1, 1], # 45
[0, 1], # 90
[-1, 1], # 135
[-1, 0], # 180
[-1, -1], # 225
[0, -1], # 270
[1, -1] # 315
]
@y_pos += dist * directions[360 / @angle - 1][0]
@x_pos += dist * directions[360 / @angle - 1][1]
self.update @arena
end
# rotates angle in degrees clockwise
# use negative numbers to move counter-clockwise
def turn angle
@angle += angle
# wrapping
@angle -= 360 if @angle > 360
@angle += 360 if @angle < 0
# additional code to implement animation speed/timing if need be
self.update @arena
end
# defaults to prevent errors when stuff isn't defined + awesome docs
# main loop idle code
# it gets interrupted whenever other functions need to get called
def when_idle robot
end
# angle/view has detected another robot
def when_find_robot robot, target_robot
end
# when moving to the same space as another robot
def when_robot_collision robot, target_robot
end
# function run to tell arena to update their crap
def update *i
end
end
|
lib = File.expand_path('lib', __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'line/bot/api/version'
Gem::Specification.new do |spec|
spec.name = "line-bot-api"
spec.version = Line::Bot::API::VERSION
spec.authors = ["LINE Corporation"]
spec.email = ["kimoto@linecorp.com", "todaka.yusuke@linecorp.com", "masaki_kurosawa@linecorp.com"]
spec.description = "Line::Bot::API - SDK of the LINE Messaging API for Ruby"
spec.summary = "SDK of the LINE Messaging API"
spec.homepage = "https://github.com/line/line-bot-sdk-ruby"
spec.license = "Apache-2.0"
spec.files = %w(CONTRIBUTING.md LICENSE README.md line-bot-api.gemspec) + Dir['lib/**/*.rb']
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.required_ruby_version = '>= 2.0.0'
spec.add_development_dependency "addressable", "~> 2.3"
spec.add_development_dependency "bundler", "~> 1.11" if RUBY_VERSION < "2.3"
spec.add_development_dependency 'rake', "~> 10.4"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "webmock", "~> 1.24"
end
Update webmock requirement from ~> 1.24 to ~> 3.8
Updates the requirements on [webmock](https://github.com/bblimke/webmock) to permit the latest version.
- [Release notes](https://github.com/bblimke/webmock/releases)
- [Changelog](https://github.com/bblimke/webmock/blob/master/CHANGELOG.md)
- [Commits](https://github.com/bblimke/webmock/compare/v1.24.0...v3.8.3)
Signed-off-by: dependabot-preview[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com>
lib = File.expand_path('lib', __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'line/bot/api/version'
Gem::Specification.new do |spec|
spec.name = "line-bot-api"
spec.version = Line::Bot::API::VERSION
spec.authors = ["LINE Corporation"]
spec.email = ["kimoto@linecorp.com", "todaka.yusuke@linecorp.com", "masaki_kurosawa@linecorp.com"]
spec.description = "Line::Bot::API - SDK of the LINE Messaging API for Ruby"
spec.summary = "SDK of the LINE Messaging API"
spec.homepage = "https://github.com/line/line-bot-sdk-ruby"
spec.license = "Apache-2.0"
spec.files = %w(CONTRIBUTING.md LICENSE README.md line-bot-api.gemspec) + Dir['lib/**/*.rb']
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.required_ruby_version = '>= 2.0.0'
spec.add_development_dependency "addressable", "~> 2.3"
spec.add_development_dependency "bundler", "~> 1.11" if RUBY_VERSION < "2.3"
spec.add_development_dependency 'rake', "~> 10.4"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "webmock", "~> 3.8"
end
|
class Ginsu::Knife
def current_status
@@config.status
end
end
Added Ginsu::Knife::initialize.
class Ginsu::Knife
def initialize(params = {})
params.each do |key,val|
self.send("#{key}=", val) if self.respond_to? key
end
self.send(:init) if self.respond_to? :init
end
def current_status
@@config.status
end
end
|
require_relative 'git_crecord/git'
require_relative 'git_crecord/hunks'
require_relative 'git_crecord/ui'
require_relative 'git_crecord/version'
module GitCrecord
def self.main(argv)
if argv.include?('--version')
puts VERSION
0
else
run
end
end
def self.run
Dir.chdir(Git.toplevel_dir) do
files = Hunks.parse(Git.diff)
files.concat(Hunks.untracked_files(Git.status))
return false if files.empty?
result = UI.run(files)
return result.call == true if result.respond_to?(:call)
0
end
end
end
Use booleans as return-values of the program
require_relative 'git_crecord/git'
require_relative 'git_crecord/hunks'
require_relative 'git_crecord/ui'
require_relative 'git_crecord/version'
module GitCrecord
def self.main(argv)
if argv.include?('--version')
puts VERSION
true
else
run
end
end
def self.run
Dir.chdir(Git.toplevel_dir) do
files = Hunks.parse(Git.diff)
files.concat(Hunks.untracked_files(Git.status))
return false if files.empty?
result = UI.run(files)
return result.call == true if result.respond_to?(:call)
true
end
end
end
|
require "gnip-stream/version"
require 'gnip-stream/stream'
require 'gnip-stream/data_buffer'
require 'gnip-stream/json_stream'
require 'gnip-stream/xml_stream'
require 'gnip-stream/powertrack_client'
require 'gnip-stream/facebook_client'
module GnipStream
end
ensure that ErrorReconnect class gets required.
require "gnip-stream/version"
require 'gnip-stream/stream'
require 'gnip-stream/data_buffer'
require 'gnip-stream/json_stream'
require 'gnip-stream/xml_stream'
require 'gnip-stream/powertrack_client'
require 'gnip-stream/facebook_client'
require 'gnip-stream/error_reconnect'
module GnipStream
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.